Merge pull request #127 from Y-zu-don-maintenance-org/features/4.1.9
4.1.10
This commit is contained in:
commit
393ae412db
@ -1,225 +0,0 @@
|
|||||||
version: 2.1
|
|
||||||
|
|
||||||
orbs:
|
|
||||||
ruby: circleci/ruby@2.0.0
|
|
||||||
node: circleci/node@5.0.3
|
|
||||||
|
|
||||||
executors:
|
|
||||||
default:
|
|
||||||
parameters:
|
|
||||||
ruby-version:
|
|
||||||
type: string
|
|
||||||
docker:
|
|
||||||
- image: cimg/ruby:<< parameters.ruby-version >>
|
|
||||||
environment:
|
|
||||||
BUNDLE_JOBS: 3
|
|
||||||
BUNDLE_RETRY: 3
|
|
||||||
CONTINUOUS_INTEGRATION: true
|
|
||||||
DB_HOST: localhost
|
|
||||||
DB_USER: root
|
|
||||||
DISABLE_SIMPLECOV: true
|
|
||||||
RAILS_ENV: test
|
|
||||||
- image: cimg/postgres:14.5
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: root
|
|
||||||
POSTGRES_HOST_AUTH_METHOD: trust
|
|
||||||
- image: cimg/redis:7.0
|
|
||||||
|
|
||||||
commands:
|
|
||||||
install-system-dependencies:
|
|
||||||
steps:
|
|
||||||
- run:
|
|
||||||
name: Install system dependencies
|
|
||||||
command: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y libicu-dev libidn11-dev
|
|
||||||
install-ruby-dependencies:
|
|
||||||
parameters:
|
|
||||||
ruby-version:
|
|
||||||
type: string
|
|
||||||
steps:
|
|
||||||
- run:
|
|
||||||
command: |
|
|
||||||
bundle config clean 'true'
|
|
||||||
bundle config frozen 'true'
|
|
||||||
bundle config without 'development production'
|
|
||||||
name: Set bundler settings
|
|
||||||
- ruby/install-deps:
|
|
||||||
bundler-version: '2.3.26'
|
|
||||||
key: ruby<< parameters.ruby-version >>-gems-v1
|
|
||||||
wait-db:
|
|
||||||
steps:
|
|
||||||
- run:
|
|
||||||
command: dockerize -wait tcp://localhost:5432 -wait tcp://localhost:6379 -timeout 1m
|
|
||||||
name: Wait for PostgreSQL and Redis
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
docker:
|
|
||||||
- image: cimg/ruby:3.0-node
|
|
||||||
environment:
|
|
||||||
RAILS_ENV: test
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- install-system-dependencies
|
|
||||||
- install-ruby-dependencies:
|
|
||||||
ruby-version: '3.0'
|
|
||||||
- node/install-packages:
|
|
||||||
cache-version: v1
|
|
||||||
pkg-manager: yarn
|
|
||||||
- run:
|
|
||||||
command: |
|
|
||||||
export NODE_OPTIONS=--openssl-legacy-provider
|
|
||||||
./bin/rails assets:precompile
|
|
||||||
name: Precompile assets
|
|
||||||
- persist_to_workspace:
|
|
||||||
paths:
|
|
||||||
- public/assets
|
|
||||||
- public/packs-test
|
|
||||||
root: .
|
|
||||||
|
|
||||||
test:
|
|
||||||
parameters:
|
|
||||||
ruby-version:
|
|
||||||
type: string
|
|
||||||
executor:
|
|
||||||
name: default
|
|
||||||
ruby-version: << parameters.ruby-version >>
|
|
||||||
environment:
|
|
||||||
ALLOW_NOPAM: true
|
|
||||||
PAM_ENABLED: true
|
|
||||||
PAM_DEFAULT_SERVICE: pam_test
|
|
||||||
PAM_CONTROLLED_SERVICE: pam_test_controlled
|
|
||||||
parallelism: 4
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- install-system-dependencies
|
|
||||||
- run:
|
|
||||||
command: sudo apt-get install -y ffmpeg imagemagick libpam-dev
|
|
||||||
name: Install additional system dependencies
|
|
||||||
- run:
|
|
||||||
command: bundle config with 'pam_authentication'
|
|
||||||
name: Enable PAM authentication
|
|
||||||
- install-ruby-dependencies:
|
|
||||||
ruby-version: << parameters.ruby-version >>
|
|
||||||
- attach_workspace:
|
|
||||||
at: .
|
|
||||||
- wait-db
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:create db:schema:load db:seed
|
|
||||||
name: Load database schema
|
|
||||||
- ruby/rspec-test
|
|
||||||
|
|
||||||
test-migrations:
|
|
||||||
executor:
|
|
||||||
name: default
|
|
||||||
ruby-version: '3.0'
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- install-system-dependencies
|
|
||||||
- install-ruby-dependencies:
|
|
||||||
ruby-version: '3.0'
|
|
||||||
- wait-db
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:create
|
|
||||||
name: Create database
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20171010025614
|
|
||||||
name: Run migrations up to v2.0.0
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20180514140000
|
|
||||||
name: Run migrations up to v2.4.0
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2_4
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20180707154237
|
|
||||||
name: Run migrations up to v2.4.3
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2_4_3
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate
|
|
||||||
name: Run all remaining migrations
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:check_database
|
|
||||||
name: Check migration result
|
|
||||||
|
|
||||||
test-two-step-migrations:
|
|
||||||
executor:
|
|
||||||
name: default
|
|
||||||
ruby-version: '3.0'
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- install-system-dependencies
|
|
||||||
- install-ruby-dependencies:
|
|
||||||
ruby-version: '3.0'
|
|
||||||
- wait-db
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:create
|
|
||||||
name: Create database
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20171010025614
|
|
||||||
name: Run migrations up to v2.0.0
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20180514140000
|
|
||||||
name: Run pre-deployment migrations up to v2.4.0
|
|
||||||
environment:
|
|
||||||
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2_4
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate VERSION=20180707154237
|
|
||||||
name: Run migrations up to v2.4.3
|
|
||||||
environment:
|
|
||||||
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:populate_v2_4_3
|
|
||||||
name: Populate database with test data
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate
|
|
||||||
name: Run all remaining pre-deployment migrations
|
|
||||||
environment:
|
|
||||||
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails db:migrate
|
|
||||||
name: Run all post-deployment migrations
|
|
||||||
- run:
|
|
||||||
command: ./bin/rails tests:migrations:check_database
|
|
||||||
name: Check migration result
|
|
||||||
|
|
||||||
workflows:
|
|
||||||
version: 2
|
|
||||||
build-and-test:
|
|
||||||
jobs:
|
|
||||||
- build
|
|
||||||
- test:
|
|
||||||
matrix:
|
|
||||||
parameters:
|
|
||||||
ruby-version:
|
|
||||||
- '2.7'
|
|
||||||
- '3.0'
|
|
||||||
name: test-ruby<< matrix.ruby-version >>
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
- test-migrations:
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
- test-two-step-migrations:
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
- node/run:
|
|
||||||
cache-version: v1
|
|
||||||
name: test-webui
|
|
||||||
pkg-manager: yarn
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
version: '16.18'
|
|
||||||
yarn-run: test:jest
|
|
92
.github/workflows/build-container-image.yml
vendored
Normal file
92
.github/workflows/build-container-image.yml
vendored
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
platforms:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
cache:
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
use_native_arm64_builder:
|
||||||
|
type: boolean
|
||||||
|
push_to_images:
|
||||||
|
type: string
|
||||||
|
flavor:
|
||||||
|
type: string
|
||||||
|
tags:
|
||||||
|
type: string
|
||||||
|
labels:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-image:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- uses: docker/setup-qemu-action@v2
|
||||||
|
if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
|
||||||
|
|
||||||
|
- uses: docker/setup-buildx-action@v2
|
||||||
|
id: buildx
|
||||||
|
if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
|
||||||
|
|
||||||
|
- name: Start a local Docker Builder
|
||||||
|
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
|
||||||
|
run: |
|
||||||
|
docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
|
||||||
|
|
||||||
|
- uses: docker/setup-buildx-action@v2
|
||||||
|
id: buildx-native
|
||||||
|
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
|
||||||
|
with:
|
||||||
|
driver: remote
|
||||||
|
endpoint: tcp://localhost:1234
|
||||||
|
platforms: linux/amd64
|
||||||
|
append: |
|
||||||
|
- endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
|
||||||
|
platforms: linux/arm64
|
||||||
|
name: mastodon-docker-builder-arm64-01
|
||||||
|
driver-opts:
|
||||||
|
- servername=mastodon-docker-builder-arm64-01
|
||||||
|
env:
|
||||||
|
BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
|
||||||
|
BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
|
||||||
|
BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
|
||||||
|
|
||||||
|
- name: Log in to Docker Hub
|
||||||
|
if: contains(inputs.push_to_images, 'tootsuite')
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Log in to the Github Container registry
|
||||||
|
if: contains(inputs.push_to_images, 'ghcr.io')
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- uses: docker/metadata-action@v4
|
||||||
|
id: meta
|
||||||
|
if: ${{ inputs.push_to_images != '' }}
|
||||||
|
with:
|
||||||
|
images: ${{ inputs.push_to_images }}
|
||||||
|
flavor: ${{ inputs.flavor }}
|
||||||
|
tags: ${{ inputs.tags }}
|
||||||
|
labels: ${{ inputs.labels }}
|
||||||
|
|
||||||
|
- uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: ${{ inputs.platforms }}
|
||||||
|
provenance: false
|
||||||
|
builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
|
||||||
|
push: ${{ inputs.push_to_images != '' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
|
||||||
|
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}
|
70
.github/workflows/build-image.yml
vendored
70
.github/workflows/build-image.yml
vendored
@ -1,70 +0,0 @@
|
|||||||
name: Build container image
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'main'
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- .github/workflows/build-image.yml
|
|
||||||
- Dockerfile
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: hadolint/hadolint-action@v3.1.0
|
|
||||||
- uses: docker/setup-qemu-action@v2
|
|
||||||
- uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
|
|
||||||
|
|
||||||
- name: Log in to the Github Container registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
|
|
||||||
|
|
||||||
- uses: docker/metadata-action@v4
|
|
||||||
id: meta
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
tootsuite/mastodon
|
|
||||||
ghcr.io/mastodon/mastodon
|
|
||||||
flavor: |
|
|
||||||
latest=auto
|
|
||||||
tags: |
|
|
||||||
type=edge,branch=main
|
|
||||||
type=pep440,pattern={{raw}}
|
|
||||||
type=pep440,pattern=v{{major}}.{{minor}}
|
|
||||||
type=ref,event=pr
|
|
||||||
|
|
||||||
- uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
provenance: false
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
push: ${{ github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
27
.github/workflows/build-releases.yml
vendored
Normal file
27
.github/workflows/build-releases.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
name: Build container release images
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-image:
|
||||||
|
uses: ./.github/workflows/build-container-image.yml
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
use_native_arm64_builder: true
|
||||||
|
push_to_images: |
|
||||||
|
tootsuite/mastodon
|
||||||
|
ghcr.io/mastodon/mastodon
|
||||||
|
# Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
|
||||||
|
cache: false
|
||||||
|
flavor: |
|
||||||
|
latest=false
|
||||||
|
tags: |
|
||||||
|
type=pep440,pattern={{raw}}
|
||||||
|
type=pep440,pattern=v{{major}}.{{minor}}
|
||||||
|
secrets: inherit
|
41
.github/workflows/lint-ruby.yml
vendored
41
.github/workflows/lint-ruby.yml
vendored
@ -1,41 +0,0 @@
|
|||||||
name: Ruby Linting
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches-ignore:
|
|
||||||
- 'dependabot/**'
|
|
||||||
paths:
|
|
||||||
- 'Gemfile*'
|
|
||||||
- '.rubocop.yml'
|
|
||||||
- '**/*.rb'
|
|
||||||
- '**/*.rake'
|
|
||||||
- '.github/workflows/lint-ruby.yml'
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'Gemfile*'
|
|
||||||
- '.rubocop.yml'
|
|
||||||
- '**/*.rb'
|
|
||||||
- '**/*.rake'
|
|
||||||
- '.github/workflows/lint-ruby.yml'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Set-up RuboCop Problem Mathcher
|
|
||||||
uses: r7kamura/rubocop-problem-matchers-action@v1
|
|
||||||
|
|
||||||
- name: Run rubocop
|
|
||||||
uses: github/super-linter@v4
|
|
||||||
env:
|
|
||||||
DEFAULT_BRANCH: main
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
LINTER_RULES_PATH: .
|
|
||||||
RUBY_CONFIG_FILE: .rubocop.yml
|
|
||||||
VALIDATE_ALL_CODEBASE: false
|
|
||||||
VALIDATE_RUBY: true
|
|
15
.github/workflows/test-image-build.yml
vendored
Normal file
15
.github/workflows/test-image-build.yml
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
name: Test container image build
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-image:
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
uses: ./.github/workflows/build-container-image.yml
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64 # Testing only on native platform so it is performant
|
78
CHANGELOG.md
78
CHANGELOG.md
@ -3,6 +3,84 @@ Changelog
|
|||||||
|
|
||||||
All notable changes to this project will be documented in this file.
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
## [4.1.10] - 2023-10-10
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Change some worker lock TTLs to be shorter-lived ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27246))
|
||||||
|
- Change user archive export allowed period from 7 days to 6 days ([suddjian](https://github.com/mastodon/mastodon/pull/27200))
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix mentions being matched in some URL query strings ([mjankowski](https://github.com/mastodon/mastodon/pull/25656))
|
||||||
|
- Fix multiple instances of the trend refresh scheduler sometimes running at once ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27253))
|
||||||
|
- Fix importer returning negative row estimates ([jgillich](https://github.com/mastodon/mastodon/pull/27258))
|
||||||
|
- Fix filtering audit log for entries about disabling 2FA ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27186))
|
||||||
|
- Fix tIME chunk not being properly removed from PNG uploads ([TheEssem](https://github.com/mastodon/mastodon/pull/27111))
|
||||||
|
- Fix inefficient queries in “Follows and followers” as well as several admin pages ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27116), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/27306))
|
||||||
|
|
||||||
|
## [4.1.9] - 2023-09-20
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix post translation erroring out ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26990))
|
||||||
|
|
||||||
|
## [4.1.8] - 2023-09-19
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix post edits not being forwarded as expected ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26936))
|
||||||
|
- Fix moderator rights inconsistencies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26729))
|
||||||
|
- Fix crash when encountering invalid URL ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26814))
|
||||||
|
- Fix cached posts including stale stats ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26409))
|
||||||
|
- Fix uploading of video files for which `ffprobe` reports `0/0` average framerate ([NicolaiSoeborg](https://github.com/mastodon/mastodon/pull/26500))
|
||||||
|
- Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough ([yufushiro](https://github.com/mastodon/mastodon/pull/26608))
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Fix missing HTML sanitization in translation API (CVE-2023-42452)
|
||||||
|
- Fix incorrect domain name normalization (CVE-2023-42451)
|
||||||
|
|
||||||
|
## [4.1.7] - 2023-09-05
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Change remote report processing to accept reports with long comments, but truncate them ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25028))
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- **Fix blocking subdomains of an already-blocked domain** ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26392))
|
||||||
|
- Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled ([danielmbrasil](https://github.com/mastodon/mastodon/pull/26237))
|
||||||
|
- Fix inefficiencies in `PlainTextFormatter` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26727))
|
||||||
|
|
||||||
|
## [4.1.6] - 2023-07-31
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix memory leak in streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26228))
|
||||||
|
- Fix wrong filters sometimes applying in streaming ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26159), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26213), [renchap](https://github.com/mastodon/mastodon/pull/26233))
|
||||||
|
- Fix incorrect connect timeout in outgoing requests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26116))
|
||||||
|
|
||||||
|
## [4.1.5] - 2023-07-21
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Add check preventing Sidekiq workers from running with Makara configured ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25850))
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Change request timeout handling to use a longer deadline ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26055))
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix moderation interface for remote instances with a .zip TLD ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
|
||||||
|
- Fix remote accounts being possibly persisted to database with incomplete protocol values ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
|
||||||
|
- Fix trending publishers table not rendering correctly on narrow screens ([vmstan](https://github.com/mastodon/mastodon/pull/25945))
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Fix CSP headers being unintentionally wide ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26105))
|
||||||
|
|
||||||
## [4.1.4] - 2023-07-07
|
## [4.1.4] - 2023-07-07
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
@ -17,6 +17,7 @@ COPY Gemfile* package.json yarn.lock /opt/mastodon/
|
|||||||
|
|
||||||
# hadolint ignore=DL3008
|
# hadolint ignore=DL3008
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
apt-get -yq dist-upgrade && \
|
||||||
apt-get install -y --no-install-recommends build-essential \
|
apt-get install -y --no-install-recommends build-essential \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
git \
|
git \
|
||||||
|
122
Gemfile.lock
122
Gemfile.lock
@ -10,40 +10,40 @@ GIT
|
|||||||
GEM
|
GEM
|
||||||
remote: https://rubygems.org/
|
remote: https://rubygems.org/
|
||||||
specs:
|
specs:
|
||||||
actioncable (6.1.7.4)
|
actioncable (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
nio4r (~> 2.0)
|
nio4r (~> 2.0)
|
||||||
websocket-driver (>= 0.6.1)
|
websocket-driver (>= 0.6.1)
|
||||||
actionmailbox (6.1.7.4)
|
actionmailbox (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
activejob (= 6.1.7.4)
|
activejob (= 6.1.7.6)
|
||||||
activerecord (= 6.1.7.4)
|
activerecord (= 6.1.7.6)
|
||||||
activestorage (= 6.1.7.4)
|
activestorage (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
mail (>= 2.7.1)
|
mail (>= 2.7.1)
|
||||||
actionmailer (6.1.7.4)
|
actionmailer (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
actionview (= 6.1.7.4)
|
actionview (= 6.1.7.6)
|
||||||
activejob (= 6.1.7.4)
|
activejob (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
mail (~> 2.5, >= 2.5.4)
|
mail (~> 2.5, >= 2.5.4)
|
||||||
rails-dom-testing (~> 2.0)
|
rails-dom-testing (~> 2.0)
|
||||||
actionpack (6.1.7.4)
|
actionpack (6.1.7.6)
|
||||||
actionview (= 6.1.7.4)
|
actionview (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
rack (~> 2.0, >= 2.0.9)
|
rack (~> 2.0, >= 2.0.9)
|
||||||
rack-test (>= 0.6.3)
|
rack-test (>= 0.6.3)
|
||||||
rails-dom-testing (~> 2.0)
|
rails-dom-testing (~> 2.0)
|
||||||
rails-html-sanitizer (~> 1.0, >= 1.2.0)
|
rails-html-sanitizer (~> 1.0, >= 1.2.0)
|
||||||
actiontext (6.1.7.4)
|
actiontext (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
activerecord (= 6.1.7.4)
|
activerecord (= 6.1.7.6)
|
||||||
activestorage (= 6.1.7.4)
|
activestorage (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
nokogiri (>= 1.8.5)
|
nokogiri (>= 1.8.5)
|
||||||
actionview (6.1.7.4)
|
actionview (6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
builder (~> 3.1)
|
builder (~> 3.1)
|
||||||
erubi (~> 1.4)
|
erubi (~> 1.4)
|
||||||
rails-dom-testing (~> 2.0)
|
rails-dom-testing (~> 2.0)
|
||||||
@ -54,22 +54,22 @@ GEM
|
|||||||
case_transform (>= 0.2)
|
case_transform (>= 0.2)
|
||||||
jsonapi-renderer (>= 0.1.1.beta1, < 0.3)
|
jsonapi-renderer (>= 0.1.1.beta1, < 0.3)
|
||||||
active_record_query_trace (1.8)
|
active_record_query_trace (1.8)
|
||||||
activejob (6.1.7.4)
|
activejob (6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
globalid (>= 0.3.6)
|
globalid (>= 0.3.6)
|
||||||
activemodel (6.1.7.4)
|
activemodel (6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
activerecord (6.1.7.4)
|
activerecord (6.1.7.6)
|
||||||
activemodel (= 6.1.7.4)
|
activemodel (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
activestorage (6.1.7.4)
|
activestorage (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
activejob (= 6.1.7.4)
|
activejob (= 6.1.7.6)
|
||||||
activerecord (= 6.1.7.4)
|
activerecord (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
marcel (~> 1.0)
|
marcel (~> 1.0)
|
||||||
mini_mime (>= 1.1.0)
|
mini_mime (>= 1.1.0)
|
||||||
activesupport (6.1.7.4)
|
activesupport (6.1.7.6)
|
||||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||||
i18n (>= 1.6, < 2)
|
i18n (>= 1.6, < 2)
|
||||||
minitest (>= 5.1)
|
minitest (>= 5.1)
|
||||||
@ -404,13 +404,13 @@ GEM
|
|||||||
mime-types (3.4.1)
|
mime-types (3.4.1)
|
||||||
mime-types-data (~> 3.2015)
|
mime-types-data (~> 3.2015)
|
||||||
mime-types-data (3.2022.0105)
|
mime-types-data (3.2022.0105)
|
||||||
mini_mime (1.1.2)
|
mini_mime (1.1.5)
|
||||||
mini_portile2 (2.8.2)
|
mini_portile2 (2.8.4)
|
||||||
minitest (5.17.0)
|
minitest (5.17.0)
|
||||||
msgpack (1.6.0)
|
msgpack (1.6.0)
|
||||||
multi_json (1.15.0)
|
multi_json (1.15.0)
|
||||||
multipart-post (2.1.1)
|
multipart-post (2.1.1)
|
||||||
net-imap (0.3.6)
|
net-imap (0.3.7)
|
||||||
date
|
date
|
||||||
net-protocol
|
net-protocol
|
||||||
net-ldap (0.17.1)
|
net-ldap (0.17.1)
|
||||||
@ -491,13 +491,13 @@ GEM
|
|||||||
pry-rails (0.3.9)
|
pry-rails (0.3.9)
|
||||||
pry (>= 0.10.4)
|
pry (>= 0.10.4)
|
||||||
public_suffix (5.0.1)
|
public_suffix (5.0.1)
|
||||||
puma (5.6.5)
|
puma (5.6.7)
|
||||||
nio4r (~> 2.0)
|
nio4r (~> 2.0)
|
||||||
pundit (2.3.0)
|
pundit (2.3.0)
|
||||||
activesupport (>= 3.0.0)
|
activesupport (>= 3.0.0)
|
||||||
raabro (1.4.0)
|
raabro (1.4.0)
|
||||||
racc (1.6.2)
|
racc (1.6.2)
|
||||||
rack (2.2.7)
|
rack (2.2.8)
|
||||||
rack-attack (6.6.1)
|
rack-attack (6.6.1)
|
||||||
rack (>= 1.0, < 3)
|
rack (>= 1.0, < 3)
|
||||||
rack-cors (1.1.1)
|
rack-cors (1.1.1)
|
||||||
@ -512,20 +512,20 @@ GEM
|
|||||||
rack
|
rack
|
||||||
rack-test (2.0.2)
|
rack-test (2.0.2)
|
||||||
rack (>= 1.3)
|
rack (>= 1.3)
|
||||||
rails (6.1.7.4)
|
rails (6.1.7.6)
|
||||||
actioncable (= 6.1.7.4)
|
actioncable (= 6.1.7.6)
|
||||||
actionmailbox (= 6.1.7.4)
|
actionmailbox (= 6.1.7.6)
|
||||||
actionmailer (= 6.1.7.4)
|
actionmailer (= 6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
actiontext (= 6.1.7.4)
|
actiontext (= 6.1.7.6)
|
||||||
actionview (= 6.1.7.4)
|
actionview (= 6.1.7.6)
|
||||||
activejob (= 6.1.7.4)
|
activejob (= 6.1.7.6)
|
||||||
activemodel (= 6.1.7.4)
|
activemodel (= 6.1.7.6)
|
||||||
activerecord (= 6.1.7.4)
|
activerecord (= 6.1.7.6)
|
||||||
activestorage (= 6.1.7.4)
|
activestorage (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
bundler (>= 1.15.0)
|
bundler (>= 1.15.0)
|
||||||
railties (= 6.1.7.4)
|
railties (= 6.1.7.6)
|
||||||
sprockets-rails (>= 2.0.0)
|
sprockets-rails (>= 2.0.0)
|
||||||
rails-controller-testing (1.0.5)
|
rails-controller-testing (1.0.5)
|
||||||
actionpack (>= 5.0.1.rc1)
|
actionpack (>= 5.0.1.rc1)
|
||||||
@ -541,9 +541,9 @@ GEM
|
|||||||
railties (>= 6.0.0, < 7)
|
railties (>= 6.0.0, < 7)
|
||||||
rails-settings-cached (0.6.6)
|
rails-settings-cached (0.6.6)
|
||||||
rails (>= 4.2.0)
|
rails (>= 4.2.0)
|
||||||
railties (6.1.7.4)
|
railties (6.1.7.6)
|
||||||
actionpack (= 6.1.7.4)
|
actionpack (= 6.1.7.6)
|
||||||
activesupport (= 6.1.7.4)
|
activesupport (= 6.1.7.6)
|
||||||
method_source
|
method_source
|
||||||
rake (>= 12.2)
|
rake (>= 12.2)
|
||||||
thor (~> 1.0)
|
thor (~> 1.0)
|
||||||
@ -634,7 +634,7 @@ GEM
|
|||||||
activerecord (>= 4.0.0)
|
activerecord (>= 4.0.0)
|
||||||
railties (>= 4.0.0)
|
railties (>= 4.0.0)
|
||||||
semantic_range (3.0.0)
|
semantic_range (3.0.0)
|
||||||
sidekiq (6.5.8)
|
sidekiq (6.5.11)
|
||||||
connection_pool (>= 2.2.5, < 3)
|
connection_pool (>= 2.2.5, < 3)
|
||||||
rack (~> 2.0)
|
rack (~> 2.0)
|
||||||
redis (>= 4.5.0, < 5)
|
redis (>= 4.5.0, < 5)
|
||||||
@ -746,14 +746,14 @@ GEM
|
|||||||
rack-proxy (>= 0.6.1)
|
rack-proxy (>= 0.6.1)
|
||||||
railties (>= 5.2)
|
railties (>= 5.2)
|
||||||
semantic_range (>= 2.3.0)
|
semantic_range (>= 2.3.0)
|
||||||
websocket-driver (0.7.5)
|
websocket-driver (0.7.6)
|
||||||
websocket-extensions (>= 0.1.0)
|
websocket-extensions (>= 0.1.0)
|
||||||
websocket-extensions (0.1.5)
|
websocket-extensions (0.1.5)
|
||||||
wisper (2.0.1)
|
wisper (2.0.1)
|
||||||
xorcist (1.1.3)
|
xorcist (1.1.3)
|
||||||
xpath (3.2.0)
|
xpath (3.2.0)
|
||||||
nokogiri (~> 1.8)
|
nokogiri (~> 1.8)
|
||||||
zeitwerk (2.6.8)
|
zeitwerk (2.6.12)
|
||||||
|
|
||||||
PLATFORMS
|
PLATFORMS
|
||||||
ruby
|
ruby
|
||||||
|
@ -37,7 +37,7 @@ module Admin
|
|||||||
@domain_block.errors.delete(:domain)
|
@domain_block.errors.delete(:domain)
|
||||||
render :new
|
render :new
|
||||||
else
|
else
|
||||||
if existing_domain_block.present?
|
if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
|
||||||
@domain_block = existing_domain_block
|
@domain_block = existing_domain_block
|
||||||
@domain_block.update(resource_params)
|
@domain_block.update(resource_params)
|
||||||
end
|
end
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class Api::V1::Timelines::TagController < Api::BaseController
|
class Api::V1::Timelines::TagController < Api::BaseController
|
||||||
|
before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
|
||||||
before_action :load_tag
|
before_action :load_tag
|
||||||
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
|
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
|
||||||
|
|
||||||
@ -15,6 +16,10 @@ class Api::V1::Timelines::TagController < Api::BaseController
|
|||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def require_auth?
|
||||||
|
!Setting.timeline_preview
|
||||||
|
end
|
||||||
|
|
||||||
def load_tag
|
def load_tag
|
||||||
@tag = Tag.find_normalized(params[:id])
|
@tag = Tag.find_normalized(params[:id])
|
||||||
end
|
end
|
||||||
|
@ -16,7 +16,7 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
|
|||||||
@account,
|
@account,
|
||||||
target_account,
|
target_account,
|
||||||
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
|
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
|
||||||
comment: @json['content'] || '',
|
comment: report_comment,
|
||||||
uri: report_uri
|
uri: report_uri
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
@ -35,4 +35,8 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
|
|||||||
def report_uri
|
def report_uri
|
||||||
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
|
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def report_comment
|
||||||
|
(@json['content'] || '')[0...5000]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -28,6 +28,6 @@ class ActivityPub::Activity::Update < ActivityPub::Activity
|
|||||||
|
|
||||||
return if @status.nil?
|
return if @status.nil?
|
||||||
|
|
||||||
ActivityPub::ProcessStatusUpdateService.new.call(@status, @object, request_id: @options[:request_id])
|
ActivityPub::ProcessStatusUpdateService.new.call(@status, @json, @object, request_id: @options[:request_id])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -27,6 +27,8 @@ class ActivityPub::TagManager
|
|||||||
when :note, :comment, :activity
|
when :note, :comment, :activity
|
||||||
return activity_account_status_url(target.account, target) if target.reblog?
|
return activity_account_status_url(target.account, target) if target.reblog?
|
||||||
short_account_status_url(target.account, target)
|
short_account_status_url(target.account, target)
|
||||||
|
when :flag
|
||||||
|
target.uri
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -41,6 +43,8 @@ class ActivityPub::TagManager
|
|||||||
account_status_url(target.account, target)
|
account_status_url(target.account, target)
|
||||||
when :emoji
|
when :emoji
|
||||||
emoji_url(target)
|
emoji_url(target)
|
||||||
|
when :flag
|
||||||
|
target.uri
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
9
app/lib/admin/account_statuses_filter.rb
Normal file
9
app/lib/admin/account_statuses_filter.rb
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Admin::AccountStatusesFilter < AccountStatusesFilter
|
||||||
|
private
|
||||||
|
|
||||||
|
def blocked?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
@ -34,7 +34,9 @@ class Importer::BaseImporter
|
|||||||
# Estimate the amount of documents that would be indexed. Not exact!
|
# Estimate the amount of documents that would be indexed. Not exact!
|
||||||
# @returns [Integer]
|
# @returns [Integer]
|
||||||
def estimate!
|
def estimate!
|
||||||
ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples AS estimate FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['estimate'].to_i }
|
reltuples = ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['reltuples'].to_i }
|
||||||
|
# If the table has never yet been vacuumed or analyzed, reltuples contains -1
|
||||||
|
[reltuples, 0].max
|
||||||
end
|
end
|
||||||
|
|
||||||
# Import data from the database into the index
|
# Import data from the database into the index
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class PlainTextFormatter
|
class PlainTextFormatter
|
||||||
include ActionView::Helpers::TextHelper
|
NEWLINE_TAGS_RE = %r{(<br />|<br>|</p>)+}
|
||||||
|
|
||||||
NEWLINE_TAGS_RE = /(<br \/>|<br>|<\/p>)+/.freeze
|
|
||||||
|
|
||||||
attr_reader :text, :local
|
attr_reader :text, :local
|
||||||
|
|
||||||
@ -18,7 +16,10 @@ class PlainTextFormatter
|
|||||||
if local?
|
if local?
|
||||||
text
|
text
|
||||||
else
|
else
|
||||||
html_entities.decode(strip_tags(insert_newlines)).chomp
|
node = Nokogiri::HTML.fragment(insert_newlines)
|
||||||
|
# Elements that are entirely removed with our Sanitize config
|
||||||
|
node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
|
||||||
|
node.text.chomp
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -27,8 +28,4 @@ class PlainTextFormatter
|
|||||||
def insert_newlines
|
def insert_newlines
|
||||||
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
|
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
|
||||||
end
|
end
|
||||||
|
|
||||||
def html_entities
|
|
||||||
HTMLEntities.new
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
@ -4,14 +4,22 @@ require 'ipaddr'
|
|||||||
require 'socket'
|
require 'socket'
|
||||||
require 'resolv'
|
require 'resolv'
|
||||||
|
|
||||||
# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block
|
# Use our own timeout class to avoid using HTTP.rb's timeout block
|
||||||
# around the Socket#open method, since we use our own timeout blocks inside
|
# around the Socket#open method, since we use our own timeout blocks inside
|
||||||
# that method
|
# that method
|
||||||
#
|
#
|
||||||
# Also changes how the read timeout behaves so that it is cumulative (closer
|
# Also changes how the read timeout behaves so that it is cumulative (closer
|
||||||
# to HTTP::Timeout::Global, but still having distinct timeouts for other
|
# to HTTP::Timeout::Global, but still having distinct timeouts for other
|
||||||
# operation types)
|
# operation types)
|
||||||
class HTTP::Timeout::PerOperation
|
class PerOperationWithDeadline < HTTP::Timeout::PerOperation
|
||||||
|
READ_DEADLINE = 30
|
||||||
|
|
||||||
|
def initialize(*args)
|
||||||
|
super
|
||||||
|
|
||||||
|
@read_deadline = options.fetch(:read_deadline, READ_DEADLINE)
|
||||||
|
end
|
||||||
|
|
||||||
def connect(socket_class, host, port, nodelay = false)
|
def connect(socket_class, host, port, nodelay = false)
|
||||||
@socket = socket_class.open(host, port)
|
@socket = socket_class.open(host, port)
|
||||||
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
|
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
|
||||||
@ -24,7 +32,7 @@ class HTTP::Timeout::PerOperation
|
|||||||
|
|
||||||
# Read data from the socket
|
# Read data from the socket
|
||||||
def readpartial(size, buffer = nil)
|
def readpartial(size, buffer = nil)
|
||||||
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_timeout
|
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_deadline
|
||||||
|
|
||||||
timeout = false
|
timeout = false
|
||||||
loop do
|
loop do
|
||||||
@ -33,7 +41,8 @@ class HTTP::Timeout::PerOperation
|
|||||||
return :eof if result.nil?
|
return :eof if result.nil?
|
||||||
|
|
||||||
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
||||||
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout || remaining_time <= 0
|
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout
|
||||||
|
raise HTTP::TimeoutError, "Read timed out after a total of #{@read_deadline} seconds" if remaining_time <= 0
|
||||||
return result if result != :wait_readable
|
return result if result != :wait_readable
|
||||||
|
|
||||||
# marking the socket for timeout. Why is this not being raised immediately?
|
# marking the socket for timeout. Why is this not being raised immediately?
|
||||||
@ -46,7 +55,7 @@ class HTTP::Timeout::PerOperation
|
|||||||
# timeout. Else, the first timeout was a proper timeout.
|
# timeout. Else, the first timeout was a proper timeout.
|
||||||
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when
|
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when
|
||||||
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
|
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
|
||||||
timeout = true unless @socket.to_io.wait_readable(remaining_time)
|
timeout = true unless @socket.to_io.wait_readable([remaining_time, @read_timeout].min)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -57,7 +66,7 @@ class Request
|
|||||||
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
|
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
|
||||||
# and 5s timeout on the TLS handshake, meaning the worst case should take
|
# and 5s timeout on the TLS handshake, meaning the worst case should take
|
||||||
# about 15s in total
|
# about 15s in total
|
||||||
TIMEOUT = { connect: 5, read: 10, write: 10 }.freeze
|
TIMEOUT = { connect_timeout: 5, read_timeout: 10, write_timeout: 10, read_deadline: 30 }.freeze
|
||||||
|
|
||||||
include RoutingHelper
|
include RoutingHelper
|
||||||
|
|
||||||
@ -69,6 +78,7 @@ class Request
|
|||||||
@http_client = options.delete(:http_client)
|
@http_client = options.delete(:http_client)
|
||||||
@allow_local = options.delete(:allow_local)
|
@allow_local = options.delete(:allow_local)
|
||||||
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
|
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
|
||||||
|
@options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
|
||||||
@options = @options.merge(proxy_url) if use_proxy?
|
@options = @options.merge(proxy_url) if use_proxy?
|
||||||
@headers = {}
|
@headers = {}
|
||||||
|
|
||||||
@ -129,7 +139,7 @@ class Request
|
|||||||
end
|
end
|
||||||
|
|
||||||
def http_client
|
def http_client
|
||||||
HTTP.use(:auto_inflate).timeout(TIMEOUT.dup).follow(max_hops: 3)
|
HTTP.use(:auto_inflate).follow(max_hops: 3)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -275,11 +285,11 @@ class Request
|
|||||||
end
|
end
|
||||||
|
|
||||||
until socks.empty?
|
until socks.empty?
|
||||||
_, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect])
|
_, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
|
||||||
|
|
||||||
if available_socks.nil?
|
if available_socks.nil?
|
||||||
socks.each(&:close)
|
socks.each(&:close)
|
||||||
raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds"
|
raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
|
||||||
end
|
end
|
||||||
|
|
||||||
available_socks.each do |sock|
|
available_socks.each do |sock|
|
||||||
|
@ -7,18 +7,18 @@ class TagManager
|
|||||||
include RoutingHelper
|
include RoutingHelper
|
||||||
|
|
||||||
def web_domain?(domain)
|
def web_domain?(domain)
|
||||||
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero?
|
domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
|
||||||
end
|
end
|
||||||
|
|
||||||
def local_domain?(domain)
|
def local_domain?(domain)
|
||||||
domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero?
|
domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize_domain(domain)
|
def normalize_domain(domain)
|
||||||
return if domain.nil?
|
return if domain.nil?
|
||||||
|
|
||||||
uri = Addressable::URI.new
|
uri = Addressable::URI.new
|
||||||
uri.host = domain.gsub(/[\/]/, '')
|
uri.host = domain.delete_suffix('/')
|
||||||
uri.normalized_host
|
uri.normalized_host
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -28,7 +28,7 @@ class TagManager
|
|||||||
domain = uri.host + (uri.port ? ":#{uri.port}" : '')
|
domain = uri.host + (uri.port ? ":#{uri.port}" : '')
|
||||||
|
|
||||||
TagManager.instance.web_domain?(domain)
|
TagManager.instance.web_domain?(domain)
|
||||||
rescue Addressable::URI::InvalidURIError
|
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -46,7 +46,7 @@ class TranslationService::DeepL < TranslationService
|
|||||||
|
|
||||||
raise UnexpectedResponseError unless json.is_a?(Hash)
|
raise UnexpectedResponseError unless json.is_a?(Hash)
|
||||||
|
|
||||||
Translation.new(text: json.dig('translations', 0, 'text'), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com')
|
Translation.new(text: Sanitize.fragment(json.dig('translations', 0, 'text'), Sanitize::Config::MASTODON_STRICT), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com')
|
||||||
rescue Oj::ParseError
|
rescue Oj::ParseError
|
||||||
raise UnexpectedResponseError
|
raise UnexpectedResponseError
|
||||||
end
|
end
|
||||||
|
@ -37,7 +37,7 @@ class TranslationService::LibreTranslate < TranslationService
|
|||||||
|
|
||||||
raise UnexpectedResponseError unless json.is_a?(Hash)
|
raise UnexpectedResponseError unless json.is_a?(Hash)
|
||||||
|
|
||||||
Translation.new(text: json['translatedText'], detected_source_language: source_language, provider: 'LibreTranslate')
|
Translation.new(text: Sanitize.fragment(json['translatedText'], Sanitize::Config::MASTODON_STRICT), detected_source_language: source_language, provider: 'LibreTranslate')
|
||||||
rescue Oj::ParseError
|
rescue Oj::ParseError
|
||||||
raise UnexpectedResponseError
|
raise UnexpectedResponseError
|
||||||
end
|
end
|
||||||
|
@ -43,6 +43,9 @@ class VideoMetadataExtractor
|
|||||||
@height = video_stream[:height]
|
@height = video_stream[:height]
|
||||||
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
|
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
|
||||||
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
|
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
|
||||||
|
# For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
|
||||||
|
# should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
|
||||||
|
@frame_rate ||= @r_frame_rate
|
||||||
end
|
end
|
||||||
|
|
||||||
if (audio_stream = audio_streams.first)
|
if (audio_stream = audio_streams.first)
|
||||||
|
@ -61,9 +61,9 @@ class Account < ApplicationRecord
|
|||||||
trust_level
|
trust_level
|
||||||
)
|
)
|
||||||
|
|
||||||
USERNAME_RE = /[a-z0-9_]+([a-z0-9_\.-]+[a-z0-9_]+)?/i
|
USERNAME_RE = /[a-z0-9_]+([a-z0-9_.-]+[a-z0-9_]+)?/i
|
||||||
MENTION_RE = /(?<=^|[^\/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]\.\-]+[[:word:]]+)?)/i
|
MENTION_RE = %r{(?<![=/[:word:]])@((#{USERNAME_RE})(?:@[[:word:].-]+[[:word:]]+)?)}i
|
||||||
URL_PREFIX_RE = /\Ahttp(s?):\/\/[^\/]+/
|
URL_PREFIX_RE = %r{\Ahttp(s?)://[^/]+}
|
||||||
USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i
|
USERNAME_ONLY_RE = /\A#{USERNAME_RE}\z/i
|
||||||
|
|
||||||
include Attachmentable
|
include Attachmentable
|
||||||
@ -114,8 +114,8 @@ class Account < ApplicationRecord
|
|||||||
scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) }
|
scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) }
|
||||||
scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).left_outer_joins(:account_stat) }
|
scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).left_outer_joins(:account_stat) }
|
||||||
scope :followable_by, ->(account) { joins(arel_table.join(Follow.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(Follow.arel_table[:target_account_id]).and(Follow.arel_table[:account_id].eq(account.id))).join_sources).where(Follow.arel_table[:id].eq(nil)).joins(arel_table.join(FollowRequest.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(FollowRequest.arel_table[:target_account_id]).and(FollowRequest.arel_table[:account_id].eq(account.id))).join_sources).where(FollowRequest.arel_table[:id].eq(nil)) }
|
scope :followable_by, ->(account) { joins(arel_table.join(Follow.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(Follow.arel_table[:target_account_id]).and(Follow.arel_table[:account_id].eq(account.id))).join_sources).where(Follow.arel_table[:id].eq(nil)).joins(arel_table.join(FollowRequest.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(FollowRequest.arel_table[:target_account_id]).and(FollowRequest.arel_table[:account_id].eq(account.id))).join_sources).where(FollowRequest.arel_table[:id].eq(nil)) }
|
||||||
scope :by_recent_status, -> { order(Arel.sql('(case when account_stats.last_status_at is null then 1 else 0 end) asc, account_stats.last_status_at desc, accounts.id desc')) }
|
scope :by_recent_status, -> { includes(:account_stat).merge(AccountStat.order('last_status_at DESC NULLS LAST')).references(:account_stat) }
|
||||||
scope :by_recent_sign_in, -> { order(Arel.sql('(case when users.current_sign_in_at is null then 1 else 0 end) asc, users.current_sign_in_at desc, accounts.id desc')) }
|
scope :by_recent_sign_in, -> { order(Arel.sql('users.current_sign_in_at DESC NULLS LAST')) }
|
||||||
scope :popular, -> { order('account_stats.followers_count desc') }
|
scope :popular, -> { order('account_stats.followers_count desc') }
|
||||||
scope :by_domain_and_subdomains, ->(domain) { where(domain: domain).or(where(arel_table[:domain].matches("%.#{domain}"))) }
|
scope :by_domain_and_subdomains, ->(domain) { where(domain: domain).or(where(arel_table[:domain].matches("%.#{domain}"))) }
|
||||||
scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) }
|
scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) }
|
||||||
|
@ -38,7 +38,7 @@ class Admin::ActionLogFilter
|
|||||||
destroy_status: { target_type: 'Status', action: 'destroy' }.freeze,
|
destroy_status: { target_type: 'Status', action: 'destroy' }.freeze,
|
||||||
destroy_user_role: { target_type: 'UserRole', action: 'destroy' }.freeze,
|
destroy_user_role: { target_type: 'UserRole', action: 'destroy' }.freeze,
|
||||||
destroy_canonical_email_block: { target_type: 'CanonicalEmailBlock', action: 'destroy' }.freeze,
|
destroy_canonical_email_block: { target_type: 'CanonicalEmailBlock', action: 'destroy' }.freeze,
|
||||||
disable_2fa_user: { target_type: 'User', action: 'disable' }.freeze,
|
disable_2fa_user: { target_type: 'User', action: 'disable_2fa' }.freeze,
|
||||||
disable_custom_emoji: { target_type: 'CustomEmoji', action: 'disable' }.freeze,
|
disable_custom_emoji: { target_type: 'CustomEmoji', action: 'disable' }.freeze,
|
||||||
disable_user: { target_type: 'User', action: 'disable' }.freeze,
|
disable_user: { target_type: 'User', action: 'disable' }.freeze,
|
||||||
enable_custom_emoji: { target_type: 'CustomEmoji', action: 'enable' }.freeze,
|
enable_custom_emoji: { target_type: 'CustomEmoji', action: 'enable' }.freeze,
|
||||||
|
@ -140,6 +140,6 @@ class Admin::StatusBatchAction
|
|||||||
end
|
end
|
||||||
|
|
||||||
def allowed_status_ids
|
def allowed_status_ids
|
||||||
AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
|
Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -18,7 +18,7 @@ module AccountAvatar
|
|||||||
|
|
||||||
included do
|
included do
|
||||||
# Avatar upload
|
# Avatar upload
|
||||||
has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail]
|
has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
|
||||||
validates_attachment_content_type :avatar, content_type: IMAGE_MIME_TYPES
|
validates_attachment_content_type :avatar, content_type: IMAGE_MIME_TYPES
|
||||||
validates_attachment_size :avatar, less_than: LIMIT
|
validates_attachment_size :avatar, less_than: LIMIT
|
||||||
remotable_attachment :avatar, LIMIT, suppress_errors: false
|
remotable_attachment :avatar, LIMIT, suppress_errors: false
|
||||||
|
@ -19,7 +19,7 @@ module AccountHeader
|
|||||||
|
|
||||||
included do
|
included do
|
||||||
# Header upload
|
# Header upload
|
||||||
has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail]
|
has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
|
||||||
validates_attachment_content_type :header, content_type: IMAGE_MIME_TYPES
|
validates_attachment_content_type :header, content_type: IMAGE_MIME_TYPES
|
||||||
validates_attachment_size :header, less_than: LIMIT
|
validates_attachment_size :header, less_than: LIMIT
|
||||||
remotable_attachment :header, LIMIT, suppress_errors: false
|
remotable_attachment :header, LIMIT, suppress_errors: false
|
||||||
|
@ -37,7 +37,7 @@ class CustomEmoji < ApplicationRecord
|
|||||||
belongs_to :category, class_name: 'CustomEmojiCategory', optional: true
|
belongs_to :category, class_name: 'CustomEmojiCategory', optional: true
|
||||||
has_one :local_counterpart, -> { where(domain: nil) }, class_name: 'CustomEmoji', primary_key: :shortcode, foreign_key: :shortcode
|
has_one :local_counterpart, -> { where(domain: nil) }, class_name: 'CustomEmoji', primary_key: :shortcode, foreign_key: :shortcode
|
||||||
|
|
||||||
has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false
|
has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
|
||||||
|
|
||||||
before_validation :downcase_domain
|
before_validation :downcase_domain
|
||||||
|
|
||||||
|
@ -169,7 +169,7 @@ class MediaAttachment < ApplicationRecord
|
|||||||
}.freeze
|
}.freeze
|
||||||
|
|
||||||
GLOBAL_CONVERT_OPTIONS = {
|
GLOBAL_CONVERT_OPTIONS = {
|
||||||
all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date',
|
all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp',
|
||||||
}.freeze
|
}.freeze
|
||||||
|
|
||||||
belongs_to :account, inverse_of: :media_attachments, optional: true
|
belongs_to :account, inverse_of: :media_attachments, optional: true
|
||||||
|
@ -50,7 +50,7 @@ class PreviewCard < ApplicationRecord
|
|||||||
has_and_belongs_to_many :statuses
|
has_and_belongs_to_many :statuses
|
||||||
has_one :trend, class_name: 'PreviewCardTrend', inverse_of: :preview_card, dependent: :destroy
|
has_one :trend, class_name: 'PreviewCardTrend', inverse_of: :preview_card, dependent: :destroy
|
||||||
|
|
||||||
has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date' }, validate_media_type: false
|
has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, validate_media_type: false
|
||||||
|
|
||||||
validates :url, presence: true, uniqueness: true
|
validates :url, presence: true, uniqueness: true
|
||||||
validates_attachment_content_type :image, content_type: IMAGE_MIME_TYPES
|
validates_attachment_content_type :image, content_type: IMAGE_MIME_TYPES
|
||||||
|
@ -25,7 +25,7 @@ class PreviewCardProvider < ApplicationRecord
|
|||||||
|
|
||||||
validates :domain, presence: true, uniqueness: true, domain: true
|
validates :domain, presence: true, uniqueness: true, domain: true
|
||||||
|
|
||||||
has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false
|
has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
|
||||||
validates_attachment :icon, content_type: { content_type: ICON_MIME_TYPES }, size: { less_than: LIMIT }
|
validates_attachment :icon, content_type: { content_type: ICON_MIME_TYPES }, size: { less_than: LIMIT }
|
||||||
remotable_attachment :icon, LIMIT
|
remotable_attachment :icon, LIMIT
|
||||||
|
|
||||||
|
@ -60,13 +60,13 @@ class RelationshipFilter
|
|||||||
def relationship_scope(value)
|
def relationship_scope(value)
|
||||||
case value
|
case value
|
||||||
when 'following'
|
when 'following'
|
||||||
account.following.eager_load(:account_stat).reorder(nil)
|
account.following.includes(:account_stat).reorder(nil)
|
||||||
when 'followed_by'
|
when 'followed_by'
|
||||||
account.followers.eager_load(:account_stat).reorder(nil)
|
account.followers.includes(:account_stat).reorder(nil)
|
||||||
when 'mutual'
|
when 'mutual'
|
||||||
account.followers.eager_load(:account_stat).reorder(nil).merge(Account.where(id: account.following))
|
account.followers.includes(:account_stat).reorder(nil).merge(Account.where(id: account.following))
|
||||||
when 'invited'
|
when 'invited'
|
||||||
Account.joins(user: :invite).merge(Invite.where(user: account.user)).eager_load(:account_stat).reorder(nil)
|
Account.joins(user: :invite).merge(Invite.where(user: account.user)).includes(:account_stat).reorder(nil)
|
||||||
else
|
else
|
||||||
raise Mastodon::InvalidParameterError, "Unknown relationship: #{value}"
|
raise Mastodon::InvalidParameterError, "Unknown relationship: #{value}"
|
||||||
end
|
end
|
||||||
@ -112,7 +112,7 @@ class RelationshipFilter
|
|||||||
def activity_scope(value)
|
def activity_scope(value)
|
||||||
case value
|
case value
|
||||||
when 'dormant'
|
when 'dormant'
|
||||||
AccountStat.where(last_status_at: nil).or(AccountStat.where(AccountStat.arel_table[:last_status_at].lt(1.month.ago)))
|
Account.joins(:account_stat).where(account_stat: { last_status_at: [nil, ...1.month.ago] })
|
||||||
else
|
else
|
||||||
raise Mastodon::InvalidParameterError, "Unknown activity: #{value}"
|
raise Mastodon::InvalidParameterError, "Unknown activity: #{value}"
|
||||||
end
|
end
|
||||||
|
@ -39,7 +39,10 @@ class Report < ApplicationRecord
|
|||||||
scope :resolved, -> { where.not(action_taken_at: nil) }
|
scope :resolved, -> { where.not(action_taken_at: nil) }
|
||||||
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
|
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
|
||||||
|
|
||||||
validates :comment, length: { maximum: 1_000 }
|
# A report is considered local if the reporter is local
|
||||||
|
delegate :local?, to: :account
|
||||||
|
|
||||||
|
validates :comment, length: { maximum: 1_000 }, if: :local?
|
||||||
validates :rule_ids, absence: true, unless: :violation?
|
validates :rule_ids, absence: true, unless: :violation?
|
||||||
|
|
||||||
validate :validate_rule_ids
|
validate :validate_rule_ids
|
||||||
@ -50,10 +53,6 @@ class Report < ApplicationRecord
|
|||||||
violation: 2_000,
|
violation: 2_000,
|
||||||
}
|
}
|
||||||
|
|
||||||
def local?
|
|
||||||
false # Force uri_for to use uri attribute
|
|
||||||
end
|
|
||||||
|
|
||||||
before_validation :set_uri, only: :create
|
before_validation :set_uri, only: :create
|
||||||
|
|
||||||
after_create_commit :trigger_webhooks
|
after_create_commit :trigger_webhooks
|
||||||
|
@ -40,7 +40,7 @@ class SiteUpload < ApplicationRecord
|
|||||||
mascot: {}.freeze,
|
mascot: {}.freeze,
|
||||||
}.freeze
|
}.freeze
|
||||||
|
|
||||||
has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector]
|
has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector]
|
||||||
|
|
||||||
validates_attachment_content_type :file, content_type: /\Aimage\/.*\z/
|
validates_attachment_content_type :file, content_type: /\Aimage\/.*\z/
|
||||||
validates :file, presence: true
|
validates :file, presence: true
|
||||||
|
@ -371,13 +371,25 @@ class Status < ApplicationRecord
|
|||||||
|
|
||||||
account_ids.uniq!
|
account_ids.uniq!
|
||||||
|
|
||||||
|
status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
|
||||||
|
|
||||||
return if account_ids.empty?
|
return if account_ids.empty?
|
||||||
|
|
||||||
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
|
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
|
||||||
|
|
||||||
|
status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
|
||||||
|
|
||||||
cached_items.each do |item|
|
cached_items.each do |item|
|
||||||
item.account = accounts[item.account_id]
|
item.account = accounts[item.account_id]
|
||||||
item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
|
item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
|
||||||
|
|
||||||
|
if item.reblog?
|
||||||
|
status_stat = status_stats[item.reblog.id]
|
||||||
|
item.reblog.status_stat = status_stat if status_stat.present?
|
||||||
|
else
|
||||||
|
status_stat = status_stats[item.id]
|
||||||
|
item.status_stat = status_stat if status_stat.present?
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ class Admin::StatusPolicy < ApplicationPolicy
|
|||||||
end
|
end
|
||||||
|
|
||||||
def show?
|
def show?
|
||||||
role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?)
|
role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
|
||||||
end
|
end
|
||||||
|
|
||||||
def destroy?
|
def destroy?
|
||||||
@ -26,4 +26,10 @@ class Admin::StatusPolicy < ApplicationPolicy
|
|||||||
def review?
|
def review?
|
||||||
role.can?(:manage_taxonomies)
|
role.can?(:manage_taxonomies)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def viewable_through_normal_policy?
|
||||||
|
StatusPolicy.new(current_account, record, @preloaded_relations).show?
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class BackupPolicy < ApplicationPolicy
|
class BackupPolicy < ApplicationPolicy
|
||||||
MIN_AGE = 1.week
|
MIN_AGE = 6.days
|
||||||
|
|
||||||
def create?
|
def create?
|
||||||
user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero?
|
user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero?
|
||||||
|
@ -8,6 +8,6 @@ class ActivityPub::FetchRemotePollService < BaseService
|
|||||||
|
|
||||||
return unless supported_context?(json)
|
return unless supported_context?(json)
|
||||||
|
|
||||||
ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json)
|
ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json, json)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -76,6 +76,9 @@ class ActivityPub::ProcessAccountService < BaseService
|
|||||||
@account.suspended_at = domain_block.created_at if auto_suspend?
|
@account.suspended_at = domain_block.created_at if auto_suspend?
|
||||||
@account.suspension_origin = :local if auto_suspend?
|
@account.suspension_origin = :local if auto_suspend?
|
||||||
@account.silenced_at = domain_block.created_at if auto_silence?
|
@account.silenced_at = domain_block.created_at if auto_silence?
|
||||||
|
|
||||||
|
set_immediate_protocol_attributes!
|
||||||
|
|
||||||
@account.save
|
@account.save
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -5,10 +5,11 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
|
|||||||
include Redisable
|
include Redisable
|
||||||
include Lockable
|
include Lockable
|
||||||
|
|
||||||
def call(status, json, request_id: nil)
|
def call(status, activity_json, object_json, request_id: nil)
|
||||||
raise ArgumentError, 'Status has unsaved changes' if status.changed?
|
raise ArgumentError, 'Status has unsaved changes' if status.changed?
|
||||||
|
|
||||||
@json = json
|
@activity_json = activity_json
|
||||||
|
@json = object_json
|
||||||
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
|
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
|
||||||
@uri = @status_parser.uri
|
@uri = @status_parser.uri
|
||||||
@status = status
|
@status = status
|
||||||
@ -308,6 +309,6 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
|
|||||||
end
|
end
|
||||||
|
|
||||||
def forwarder
|
def forwarder
|
||||||
@forwarder ||= ActivityPub::Forwarder.new(@account, @json, @status)
|
@forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -12,7 +12,9 @@ class TranslateStatusService < BaseService
|
|||||||
@content = status_content_format(@status)
|
@content = status_content_format(@status)
|
||||||
@target_language = target_language
|
@target_language = target_language
|
||||||
|
|
||||||
Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) }
|
Rails.cache.fetch("translations:v2/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
|
||||||
|
translation_backend.translate(@content, @status.language, @target_language)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
- Trends::PreviewCardProviderFilter::KEYS.each do |key|
|
- Trends::PreviewCardProviderFilter::KEYS.each do |key|
|
||||||
= hidden_field_tag key, params[key] if params[key].present?
|
= hidden_field_tag key, params[key] if params[key].present?
|
||||||
|
|
||||||
.batch-table.optional
|
.batch-table
|
||||||
.batch-table__toolbar
|
.batch-table__toolbar
|
||||||
%label.batch-table__toolbar__select.batch-checkbox-all
|
%label.batch-table__toolbar__select.batch-checkbox-all
|
||||||
= check_box_tag :batch_checkbox_all, nil, false
|
= check_box_tag :batch_checkbox_all, nil, false
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class AccountDeletionWorker
|
class AccountDeletionWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
|
||||||
|
|
||||||
def perform(account_id, options = {})
|
def perform(account_id, options = {})
|
||||||
reserve_username = options.with_indifferent_access.fetch(:reserve_username, true)
|
reserve_username = options.with_indifferent_access.fetch(:reserve_username, true)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class ActivityPub::SynchronizeFeaturedCollectionWorker
|
class ActivityPub::SynchronizeFeaturedCollectionWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
|
||||||
|
|
||||||
def perform(account_id, options = {})
|
def perform(account_id, options = {})
|
||||||
options = { note: true, hashtag: false }.deep_merge(options.deep_symbolize_keys)
|
options = { note: true, hashtag: false }.deep_merge(options.deep_symbolize_keys)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class ActivityPub::SynchronizeFeaturedTagsCollectionWorker
|
class ActivityPub::SynchronizeFeaturedTagsCollectionWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
|
||||||
|
|
||||||
def perform(account_id, url)
|
def perform(account_id, url)
|
||||||
ActivityPub::FetchFeaturedTagsCollectionService.new.call(Account.find(account_id), url)
|
ActivityPub::FetchFeaturedTagsCollectionService.new.call(Account.find(account_id), url)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class ActivityPub::UpdateDistributionWorker < ActivityPub::RawDistributionWorker
|
class ActivityPub::UpdateDistributionWorker < ActivityPub::RawDistributionWorker
|
||||||
sidekiq_options queue: 'push', lock: :until_executed
|
sidekiq_options queue: 'push', lock: :until_executed, lock_ttl: 1.day.to_i
|
||||||
|
|
||||||
# Distribute an profile update to servers that might have a copy
|
# Distribute an profile update to servers that might have a copy
|
||||||
# of the account in question
|
# of the account in question
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class Admin::AccountDeletionWorker
|
class Admin::AccountDeletionWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
|
||||||
|
|
||||||
def perform(account_id)
|
def perform(account_id)
|
||||||
DeleteAccountService.new.call(Account.find(account_id), reserve_username: true, reserve_email: true)
|
DeleteAccountService.new.call(Account.find(account_id), reserve_username: true, reserve_email: true)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class Admin::DomainPurgeWorker
|
class Admin::DomainPurgeWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
|
||||||
|
|
||||||
def perform(domain)
|
def perform(domain)
|
||||||
PurgeDomainService.new.call(domain)
|
PurgeDomainService.new.call(domain)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class PublishScheduledStatusWorker
|
class PublishScheduledStatusWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options lock: :until_executed
|
sidekiq_options lock: :until_executed, lock_ttl: 1.hour.to_i
|
||||||
|
|
||||||
def perform(scheduled_status_id)
|
def perform(scheduled_status_id)
|
||||||
scheduled_status = ScheduledStatus.find(scheduled_status_id)
|
scheduled_status = ScheduledStatus.find(scheduled_status_id)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class ResolveAccountWorker
|
class ResolveAccountWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'pull', lock: :until_executed
|
sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
|
||||||
|
|
||||||
def perform(uri)
|
def perform(uri)
|
||||||
ResolveAccountService.new.call(uri)
|
ResolveAccountService.new.call(uri)
|
||||||
|
@ -4,7 +4,7 @@ class Scheduler::IndexingScheduler
|
|||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
include Redisable
|
include Redisable
|
||||||
|
|
||||||
sidekiq_options retry: 0
|
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
|
||||||
|
|
||||||
IMPORT_BATCH_SIZE = 1000
|
IMPORT_BATCH_SIZE = 1000
|
||||||
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
|
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class Scheduler::ScheduledStatusesScheduler
|
class Scheduler::ScheduledStatusesScheduler
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options retry: 0
|
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.hour.to_i
|
||||||
|
|
||||||
def perform
|
def perform
|
||||||
publish_scheduled_statuses!
|
publish_scheduled_statuses!
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class Scheduler::Trends::RefreshScheduler
|
class Scheduler::Trends::RefreshScheduler
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options retry: 0
|
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
|
||||||
|
|
||||||
def perform
|
def perform
|
||||||
Trends.refresh!
|
Trends.refresh!
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
class VerifyAccountLinksWorker
|
class VerifyAccountLinksWorker
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
|
|
||||||
sidekiq_options queue: 'default', retry: false, lock: :until_executed
|
sidekiq_options queue: 'default', retry: false, lock: :until_executed, lock_ttl: 1.hour.to_i
|
||||||
|
|
||||||
def perform(account_id)
|
def perform(account_id)
|
||||||
account = Account.find(account_id)
|
account = Account.find(account_id)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
|
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
|
||||||
|
|
||||||
def host_to_url(str)
|
def host_to_url(str)
|
||||||
"http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}".split('/').first if str.present?
|
"http#{Rails.configuration.x.use_https ? 's' : ''}://#{str.split('/').first}" if str.present?
|
||||||
end
|
end
|
||||||
|
|
||||||
base_host = Rails.configuration.x.web_domain
|
base_host = Rails.configuration.x.web_domain
|
||||||
|
@ -3,6 +3,11 @@
|
|||||||
require_relative '../../lib/mastodon/sidekiq_middleware'
|
require_relative '../../lib/mastodon/sidekiq_middleware'
|
||||||
|
|
||||||
Sidekiq.configure_server do |config|
|
Sidekiq.configure_server do |config|
|
||||||
|
if Rails.configuration.database_configuration.dig('production', 'adapter') == 'postgresql_makara'
|
||||||
|
STDERR.puts 'ERROR: Database replication is not currently supported in Sidekiq workers. Check your configuration.'
|
||||||
|
exit 1
|
||||||
|
end
|
||||||
|
|
||||||
config.redis = REDIS_SIDEKIQ_PARAMS
|
config.redis = REDIS_SIDEKIQ_PARAMS
|
||||||
|
|
||||||
config.server_middleware do |chain|
|
config.server_middleware do |chain|
|
||||||
|
@ -292,7 +292,7 @@ Rails.application.routes.draw do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ } do
|
resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ }, format: 'html' do
|
||||||
member do
|
member do
|
||||||
post :clear_delivery_errors
|
post :clear_delivery_errors
|
||||||
post :restart_delivery
|
post :restart_delivery
|
||||||
|
@ -56,7 +56,7 @@ services:
|
|||||||
|
|
||||||
web:
|
web:
|
||||||
build: .
|
build: .
|
||||||
image: ghcr.io/mastodon/mastodon
|
image: ghcr.io/mastodon/mastodon:v4.1.10
|
||||||
restart: always
|
restart: always
|
||||||
env_file: .env.production
|
env_file: .env.production
|
||||||
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
|
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
|
||||||
@ -77,7 +77,7 @@ services:
|
|||||||
|
|
||||||
streaming:
|
streaming:
|
||||||
build: .
|
build: .
|
||||||
image: ghcr.io/mastodon/mastodon
|
image: ghcr.io/mastodon/mastodon:v4.1.10
|
||||||
restart: always
|
restart: always
|
||||||
env_file: .env.production
|
env_file: .env.production
|
||||||
command: node ./streaming
|
command: node ./streaming
|
||||||
@ -95,7 +95,7 @@ services:
|
|||||||
|
|
||||||
sidekiq:
|
sidekiq:
|
||||||
build: .
|
build: .
|
||||||
image: ghcr.io/mastodon/mastodon
|
image: ghcr.io/mastodon/mastodon:v4.1.10
|
||||||
restart: always
|
restart: always
|
||||||
env_file: .env.production
|
env_file: .env.production
|
||||||
command: bundle exec sidekiq
|
command: bundle exec sidekiq
|
||||||
|
@ -13,7 +13,7 @@ module Mastodon
|
|||||||
end
|
end
|
||||||
|
|
||||||
def patch
|
def patch
|
||||||
4
|
10
|
||||||
end
|
end
|
||||||
|
|
||||||
def flags
|
def flags
|
||||||
|
@ -37,14 +37,16 @@ module Paperclip
|
|||||||
@output_options['f'] = 'image2'
|
@output_options['f'] = 'image2'
|
||||||
@output_options['vframes'] = 1
|
@output_options['vframes'] = 1
|
||||||
when 'mp4'
|
when 'mp4'
|
||||||
|
unless eligible_to_passthrough?(metadata)
|
||||||
@output_options['acodec'] = 'aac'
|
@output_options['acodec'] = 'aac'
|
||||||
@output_options['strict'] = 'experimental'
|
@output_options['strict'] = 'experimental'
|
||||||
|
|
||||||
if high_vfr?(metadata) && !eligible_to_passthrough?(metadata)
|
if high_vfr?(metadata)
|
||||||
@output_options['vsync'] = 'vfr'
|
@output_options['vsync'] = 'vfr'
|
||||||
@output_options['r'] = @vfr_threshold
|
@output_options['r'] = @vfr_threshold
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
command_arguments, interpolations = prepare_command(destination)
|
command_arguments, interpolations = prepare_command(destination)
|
||||||
|
|
||||||
|
@ -40,24 +40,36 @@ describe Admin::StatusesController do
|
|||||||
end
|
end
|
||||||
|
|
||||||
describe 'POST #batch' do
|
describe 'POST #batch' do
|
||||||
before do
|
subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
|
||||||
post :batch, params: { account_id: account.id, action => '', admin_status_batch_action: { status_ids: status_ids } }
|
|
||||||
end
|
|
||||||
|
|
||||||
let(:status_ids) { [media_attached_status.id] }
|
let(:status_ids) { [media_attached_status.id] }
|
||||||
|
|
||||||
context 'when action is report' do
|
shared_examples 'when action is report' do
|
||||||
let(:action) { 'report' }
|
let(:action) { 'report' }
|
||||||
|
|
||||||
it 'creates a report' do
|
it 'creates a report' do
|
||||||
|
subject
|
||||||
|
|
||||||
report = Report.last
|
report = Report.last
|
||||||
expect(report.target_account_id).to eq account.id
|
expect(report.target_account_id).to eq account.id
|
||||||
expect(report.status_ids).to eq status_ids
|
expect(report.status_ids).to eq status_ids
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'redirects to report page' do
|
it 'redirects to report page' do
|
||||||
|
subject
|
||||||
|
|
||||||
expect(response).to redirect_to(admin_report_path(Report.last.id))
|
expect(response).to redirect_to(admin_report_path(Report.last.id))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'when action is report'
|
||||||
|
|
||||||
|
context 'when the moderator is blocked by the author' do
|
||||||
|
before do
|
||||||
|
account.block!(user.account)
|
||||||
|
end
|
||||||
|
|
||||||
|
it_behaves_like 'when action is report'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -6,35 +6,65 @@ describe Api::V1::Timelines::TagController do
|
|||||||
render_views
|
render_views
|
||||||
|
|
||||||
let(:user) { Fabricate(:user) }
|
let(:user) { Fabricate(:user) }
|
||||||
|
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(controller).to receive(:doorkeeper_token) { token }
|
allow(controller).to receive(:doorkeeper_token) { token }
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with a user context' do
|
|
||||||
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
|
|
||||||
|
|
||||||
describe 'GET #show' do
|
describe 'GET #show' do
|
||||||
|
subject do
|
||||||
|
get :show, params: { id: 'test' }
|
||||||
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
PostStatusService.new.call(user.account, text: 'It is a #test')
|
PostStatusService.new.call(user.account, text: 'It is a #test')
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'returns http success' do
|
context 'when the instance allows public preview' do
|
||||||
get :show, params: { id: 'test' }
|
context 'when the user is not authenticated' do
|
||||||
|
let(:token) { nil }
|
||||||
|
|
||||||
|
it 'returns http success', :aggregate_failures do
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(response).to have_http_status(200)
|
||||||
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the user is authenticated' do
|
||||||
|
it 'returns http success', :aggregate_failures do
|
||||||
|
subject
|
||||||
|
|
||||||
expect(response).to have_http_status(200)
|
expect(response).to have_http_status(200)
|
||||||
expect(response.headers['Link'].links.size).to eq(2)
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'without a user context' do
|
context 'when the instance does not allow public preview' do
|
||||||
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) }
|
before do
|
||||||
|
Form::AdminSettings.new(timeline_preview: false).save
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the user is not authenticated' do
|
||||||
|
let(:token) { nil }
|
||||||
|
|
||||||
|
it 'returns http unauthorized' do
|
||||||
|
subject
|
||||||
|
|
||||||
|
expect(response).to have_http_status(401)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the user is authenticated' do
|
||||||
|
it 'returns http success', :aggregate_failures do
|
||||||
|
subject
|
||||||
|
|
||||||
describe 'GET #show' do
|
|
||||||
it 'returns http success' do
|
|
||||||
get :show, params: { id: 'test' }
|
|
||||||
expect(response).to have_http_status(200)
|
expect(response).to have_http_status(200)
|
||||||
expect(response.headers['Link']).to be_nil
|
expect(response.headers['Link'].links.size).to eq(2)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -13,12 +13,17 @@ RSpec.describe CacheConcern, type: :controller do
|
|||||||
def empty_relation
|
def empty_relation
|
||||||
render plain: cache_collection(Status.none, Status).size
|
render plain: cache_collection(Status.none, Status).size
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def account_statuses_favourites
|
||||||
|
render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
routes.draw do
|
routes.draw do
|
||||||
get 'empty_array' => 'anonymous#empty_array'
|
get 'empty_array' => 'anonymous#empty_array'
|
||||||
post 'empty_relation' => 'anonymous#empty_relation'
|
get 'empty_relation' => 'anonymous#empty_relation'
|
||||||
|
get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -36,5 +41,20 @@ RSpec.describe CacheConcern, type: :controller do
|
|||||||
expect(response.body).to eq '0'
|
expect(response.body).to eq '0'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when given a collection of statuses' do
|
||||||
|
let!(:account) { Fabricate(:account) }
|
||||||
|
let!(:status) { Fabricate(:status, account: account) }
|
||||||
|
|
||||||
|
it 'correctly updates with new interactions' do
|
||||||
|
get :account_statuses_favourites, params: { id: account.id }
|
||||||
|
expect(response.body).to eq '[0]'
|
||||||
|
|
||||||
|
FavouriteService.new.call(account, status)
|
||||||
|
|
||||||
|
get :account_statuses_favourites, params: { id: account.id }
|
||||||
|
expect(response.body).to eq '[1]'
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
Fabricator(:account_stat) do
|
Fabricator(:account_stat) do
|
||||||
account nil
|
account { Fabricate.build(:account) }
|
||||||
statuses_count ""
|
statuses_count '123'
|
||||||
following_count ""
|
following_count '456'
|
||||||
followers_count ""
|
followers_count '789'
|
||||||
end
|
end
|
||||||
|
@ -37,6 +37,37 @@ RSpec.describe ActivityPub::Activity::Flag do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when the report comment is excessively long' do
|
||||||
|
subject do
|
||||||
|
described_class.new({
|
||||||
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
||||||
|
id: flag_id,
|
||||||
|
type: 'Flag',
|
||||||
|
content: long_comment,
|
||||||
|
actor: ActivityPub::TagManager.instance.uri_for(sender),
|
||||||
|
object: [
|
||||||
|
ActivityPub::TagManager.instance.uri_for(flagged),
|
||||||
|
ActivityPub::TagManager.instance.uri_for(status),
|
||||||
|
],
|
||||||
|
}.with_indifferent_access, sender)
|
||||||
|
end
|
||||||
|
|
||||||
|
let(:long_comment) { Faker::Lorem.characters(number: 6000) }
|
||||||
|
|
||||||
|
before do
|
||||||
|
subject.perform
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'creates a report but with a truncated comment' do
|
||||||
|
report = Report.find_by(account: sender, target_account: flagged)
|
||||||
|
|
||||||
|
expect(report).to_not be_nil
|
||||||
|
expect(report.comment.length).to eq 5000
|
||||||
|
expect(report.comment).to eq long_comment[0...5000]
|
||||||
|
expect(report.status_ids).to eq [status.id]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'when the reported status is private and should not be visible to the remote server' do
|
context 'when the reported status is private and should not be visible to the remote server' do
|
||||||
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }
|
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }
|
||||||
|
|
||||||
|
@ -695,7 +695,7 @@ RSpec.describe Account, type: :model do
|
|||||||
expect(subject.match('Check this out https://medium.com/@alice/some-article#.abcdef123')).to be_nil
|
expect(subject.match('Check this out https://medium.com/@alice/some-article#.abcdef123')).to be_nil
|
||||||
end
|
end
|
||||||
|
|
||||||
xit 'does not match URL querystring' do
|
it 'does not match URL query string' do
|
||||||
expect(subject.match('https://example.com/?x=@alice')).to be_nil
|
expect(subject.match('https://example.com/?x=@alice')).to be_nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -6,32 +6,60 @@ describe RelationshipFilter do
|
|||||||
let(:account) { Fabricate(:account) }
|
let(:account) { Fabricate(:account) }
|
||||||
|
|
||||||
describe '#results' do
|
describe '#results' do
|
||||||
context 'when default params are used' do
|
let(:account_of_7_months) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 7.months.ago).account }
|
||||||
let(:subject) do
|
let(:account_of_1_day) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 1.day.ago).account }
|
||||||
RelationshipFilter.new(account, 'order' => 'active').results
|
let(:account_of_3_days) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 3.days.ago).account }
|
||||||
end
|
let(:silent_account) { Fabricate(:account_stat, statuses_count: 0, last_status_at: nil).account }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
add_following_account_with(last_status_at: 7.days.ago)
|
account.follow!(account_of_7_months)
|
||||||
add_following_account_with(last_status_at: 1.day.ago)
|
account.follow!(account_of_1_day)
|
||||||
add_following_account_with(last_status_at: 3.days.ago)
|
account.follow!(account_of_3_days)
|
||||||
|
account.follow!(silent_account)
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when ordering by last activity' do
|
||||||
|
context 'when not filtering' do
|
||||||
|
subject do
|
||||||
|
described_class.new(account, 'order' => 'active').results
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'returns followings ordered by last activity' do
|
it 'returns followings ordered by last activity' do
|
||||||
expected_result = account.following.eager_load(:account_stat).reorder(nil).by_recent_status
|
expect(subject).to eq [account_of_1_day, account_of_3_days, account_of_7_months, silent_account]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
expect(subject).to eq expected_result
|
context 'when filtering for dormant accounts' do
|
||||||
|
subject do
|
||||||
|
described_class.new(account, 'order' => 'active', 'activity' => 'dormant').results
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns dormant followings ordered by last activity' do
|
||||||
|
expect(subject).to eq [account_of_7_months, silent_account]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_following_account_with(last_status_at:)
|
context 'when ordering by account creation' do
|
||||||
following_account = Fabricate(:account)
|
context 'when not filtering' do
|
||||||
Fabricate(:account_stat, account: following_account,
|
subject do
|
||||||
last_status_at: last_status_at,
|
described_class.new(account, 'order' => 'recent').results
|
||||||
statuses_count: 1,
|
end
|
||||||
following_count: 0,
|
|
||||||
followers_count: 0)
|
it 'returns followings ordered by last account creation' do
|
||||||
Fabricate(:follow, account: account, target_account: following_account).account
|
expect(subject).to eq [silent_account, account_of_3_days, account_of_1_day, account_of_7_months]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when filtering for dormant accounts' do
|
||||||
|
subject do
|
||||||
|
described_class.new(account, 'order' => 'recent', 'activity' => 'dormant').results
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns dormant followings ordered by last activity' do
|
||||||
|
expect(subject).to eq [silent_account, account_of_7_months]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -125,10 +125,17 @@ describe Report do
|
|||||||
expect(report).to be_valid
|
expect(report).to be_valid
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'is invalid if comment is longer than 1000 characters' do
|
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
|
||||||
|
|
||||||
|
it 'is invalid if comment is longer than 1000 characters only if reporter is local' do
|
||||||
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
|
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
|
||||||
report.valid?
|
expect(report.valid?).to be false
|
||||||
expect(report).to model_have_error_on_field(:comment)
|
expect(report).to model_have_error_on_field(:comment)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'is valid if comment is longer than 1000 characters and reporter is not local' do
|
||||||
|
report = Fabricate.build(:report, account: remote_account, comment: Faker::Lorem.characters(number: 1001))
|
||||||
|
expect(report.valid?).to be true
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
27
spec/requests/content_security_policy_spec.rb
Normal file
27
spec/requests/content_security_policy_spec.rb
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
describe 'Content-Security-Policy' do
|
||||||
|
it 'sets the expected CSP headers' do
|
||||||
|
allow(SecureRandom).to receive(:base64).with(16).and_return('ZbA+JmE7+bK8F5qvADZHuQ==')
|
||||||
|
|
||||||
|
get '/'
|
||||||
|
expect(response.headers['Content-Security-Policy'].split(';').map(&:strip)).to contain_exactly(
|
||||||
|
"base-uri 'none'",
|
||||||
|
"default-src 'none'",
|
||||||
|
"frame-ancestors 'none'",
|
||||||
|
"font-src 'self' https://cb6e6126.ngrok.io",
|
||||||
|
"img-src 'self' https: data: blob: https://cb6e6126.ngrok.io",
|
||||||
|
"style-src 'self' https://cb6e6126.ngrok.io 'nonce-ZbA+JmE7+bK8F5qvADZHuQ=='",
|
||||||
|
"media-src 'self' https: data: https://cb6e6126.ngrok.io",
|
||||||
|
"frame-src 'self' https:",
|
||||||
|
"manifest-src 'self' https://cb6e6126.ngrok.io",
|
||||||
|
"form-action 'self'",
|
||||||
|
"child-src 'self' blob: https://cb6e6126.ngrok.io",
|
||||||
|
"worker-src 'self' blob: https://cb6e6126.ngrok.io",
|
||||||
|
"connect-src 'self' data: blob: https://cb6e6126.ngrok.io https://cb6e6126.ngrok.io ws://localhost:4000",
|
||||||
|
"script-src 'self' https://cb6e6126.ngrok.io 'wasm-unsafe-eval'"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
@ -41,12 +41,12 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
|
|
||||||
describe '#call' do
|
describe '#call' do
|
||||||
it 'updates text' do
|
it 'updates text' do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
expect(status.reload.text).to eq 'Hello universe'
|
expect(status.reload.text).to eq 'Hello universe'
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates content warning' do
|
it 'updates content warning' do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
expect(status.reload.spoiler_text).to eq 'Show more'
|
expect(status.reload.spoiler_text).to eq 'Show more'
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -87,7 +87,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -135,7 +135,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -188,7 +188,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -216,11 +216,11 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
expect { subject.call(status, json) }.not_to change { status.reload.edits.pluck(&:id) }
|
expect { subject.call(status, json, json) }.to_not(change { status.reload.edits.pluck(&:id) })
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not update the text, spoiler_text or edited_at' do
|
it 'does not update the text, spoiler_text or edited_at' do
|
||||||
expect { subject.call(status, json) }.not_to change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] }
|
expect { subject.call(status, json, json) }.to_not(change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] })
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -235,7 +235,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -259,7 +259,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
|
|
||||||
before do
|
before do
|
||||||
status.update(ordered_media_attachment_ids: nil)
|
status.update(ordered_media_attachment_ids: nil)
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'does not create any edits' do
|
it 'does not create any edits' do
|
||||||
@ -273,7 +273,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
|
|
||||||
context 'originally without tags' do
|
context 'originally without tags' do
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates tags' do
|
it 'updates tags' do
|
||||||
@ -299,7 +299,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates tags' do
|
it 'updates tags' do
|
||||||
@ -309,7 +309,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
|
|
||||||
context 'originally without mentions' do
|
context 'originally without mentions' do
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates mentions' do
|
it 'updates mentions' do
|
||||||
@ -321,7 +321,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
let(:mentions) { [alice, bob] }
|
let(:mentions) { [alice, bob] }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates mentions' do
|
it 'updates mentions' do
|
||||||
@ -332,7 +332,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
context 'originally without media attachments' do
|
context 'originally without media attachments' do
|
||||||
before do
|
before do
|
||||||
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
|
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:payload) do
|
let(:payload) do
|
||||||
@ -382,7 +382,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
|
|
||||||
before do
|
before do
|
||||||
allow(RedownloadMediaWorker).to receive(:perform_async)
|
allow(RedownloadMediaWorker).to receive(:perform_async)
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates the existing media attachment in-place' do
|
it 'updates the existing media attachment in-place' do
|
||||||
@ -410,7 +410,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
before do
|
before do
|
||||||
poll = Fabricate(:poll, status: status)
|
poll = Fabricate(:poll, status: status)
|
||||||
status.update(preloadable_poll: poll)
|
status.update(preloadable_poll: poll)
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'removes poll' do
|
it 'removes poll' do
|
||||||
@ -440,7 +440,7 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'creates a poll' do
|
it 'creates a poll' do
|
||||||
@ -456,12 +456,12 @@ RSpec.describe ActivityPub::ProcessStatusUpdateService, type: :service do
|
|||||||
end
|
end
|
||||||
|
|
||||||
it 'creates edit history' do
|
it 'creates edit history' do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
|
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'sets edited timestamp' do
|
it 'sets edited timestamp' do
|
||||||
subject.call(status, json)
|
subject.call(status, json, json)
|
||||||
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
|
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -4,6 +4,14 @@ RSpec.describe ReportService, type: :service do
|
|||||||
subject { described_class.new }
|
subject { described_class.new }
|
||||||
|
|
||||||
let(:source_account) { Fabricate(:account) }
|
let(:source_account) { Fabricate(:account) }
|
||||||
|
let(:target_account) { Fabricate(:account) }
|
||||||
|
|
||||||
|
context 'with a local account' do
|
||||||
|
it 'has a uri' do
|
||||||
|
report = subject.call(source_account, target_account)
|
||||||
|
expect(report.uri).to_not be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'for a remote account' do
|
context 'for a remote account' do
|
||||||
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
|
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
|
||||||
|
@ -226,9 +226,15 @@ const startWorker = async (workerId) => {
|
|||||||
callbacks.forEach(callback => callback(json));
|
callbacks.forEach(callback => callback(json));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback SubscriptionListener
|
||||||
|
* @param {ReturnType<parseJSON>} json of the message
|
||||||
|
* @returns void
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} channel
|
* @param {string} channel
|
||||||
* @param {function(string): void} callback
|
* @param {SubscriptionListener} callback
|
||||||
*/
|
*/
|
||||||
const subscribe = (channel, callback) => {
|
const subscribe = (channel, callback) => {
|
||||||
log.silly(`Adding listener for ${channel}`);
|
log.silly(`Adding listener for ${channel}`);
|
||||||
@ -245,7 +251,7 @@ const startWorker = async (workerId) => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} channel
|
* @param {string} channel
|
||||||
* @param {function(Object<string, any>): void} callback
|
* @param {SubscriptionListener} callback
|
||||||
*/
|
*/
|
||||||
const unsubscribe = (channel, callback) => {
|
const unsubscribe = (channel, callback) => {
|
||||||
log.silly(`Removing listener for ${channel}`);
|
log.silly(`Removing listener for ${channel}`);
|
||||||
@ -623,51 +629,66 @@ const startWorker = async (workerId) => {
|
|||||||
* @param {string[]} ids
|
* @param {string[]} ids
|
||||||
* @param {any} req
|
* @param {any} req
|
||||||
* @param {function(string, string): void} output
|
* @param {function(string, string): void} output
|
||||||
* @param {function(string[], function(string): void): void} attachCloseHandler
|
* @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
|
||||||
* @param {boolean=} needsFiltering
|
* @param {boolean=} needsFiltering
|
||||||
* @returns {function(object): void}
|
* @returns {SubscriptionListener}
|
||||||
*/
|
*/
|
||||||
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
|
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
|
||||||
const accountId = req.accountId || req.remoteAddress;
|
const accountId = req.accountId || req.remoteAddress;
|
||||||
|
|
||||||
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
|
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
|
||||||
|
|
||||||
// Currently message is of type string, soon it'll be Record<string, any>
|
const transmit = (event, payload) => {
|
||||||
const listener = message => {
|
// TODO: Replace "string"-based delete payloads with object payloads:
|
||||||
const { event, payload, queued_at } = message;
|
|
||||||
|
|
||||||
const transmit = () => {
|
|
||||||
const now = new Date().getTime();
|
|
||||||
const delta = now - queued_at;
|
|
||||||
const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
|
const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
|
||||||
|
|
||||||
log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload} Delay: ${delta}ms`);
|
log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`);
|
||||||
output(event, encodedPayload);
|
output(event, encodedPayload);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Only messages that may require filtering are statuses, since notifications
|
// The listener used to process each message off the redis subscription,
|
||||||
// are already personalized and deletes do not matter
|
// message here is an object with an `event` and `payload` property. Some
|
||||||
if (!needsFiltering || event !== 'update') {
|
// events also include a queued_at value, but this is being removed shortly.
|
||||||
transmit();
|
/** @type {SubscriptionListener} */
|
||||||
|
const listener = message => {
|
||||||
|
const { event, payload } = message;
|
||||||
|
|
||||||
|
// Streaming only needs to apply filtering to some channels and only to
|
||||||
|
// some events. This is because majority of the filtering happens on the
|
||||||
|
// Ruby on Rails side when producing the event for streaming.
|
||||||
|
//
|
||||||
|
// The only events that require filtering from the streaming server are
|
||||||
|
// `update` and `status.update`, all other events are transmitted to the
|
||||||
|
// client as soon as they're received (pass-through).
|
||||||
|
//
|
||||||
|
// The channels that need filtering are determined in the function
|
||||||
|
// `channelNameToIds` defined below:
|
||||||
|
if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
|
||||||
|
transmit(event, payload);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const unpackedPayload = payload;
|
// The rest of the logic from here on in this function is to handle
|
||||||
const targetAccountIds = [unpackedPayload.account.id].concat(unpackedPayload.mentions.map(item => item.id));
|
// filtering of statuses:
|
||||||
const accountDomain = unpackedPayload.account.acct.split('@')[1];
|
|
||||||
|
|
||||||
if (Array.isArray(req.chosenLanguages) && unpackedPayload.language !== null && req.chosenLanguages.indexOf(unpackedPayload.language) === -1) {
|
// Filter based on language:
|
||||||
log.silly(req.requestId, `Message ${unpackedPayload.id} filtered by language (${unpackedPayload.language})`);
|
if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
|
||||||
|
log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// When the account is not logged in, it is not necessary to confirm the block or mute
|
// When the account is not logged in, it is not necessary to confirm the block or mute
|
||||||
if (!req.accountId) {
|
if (!req.accountId) {
|
||||||
transmit();
|
transmit(event, payload);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
pgPool.connect((err, client, done) => {
|
// Filter based on domain blocks, blocks, mutes, or custom filters:
|
||||||
|
const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
|
||||||
|
const accountDomain = payload.account.acct.split('@')[1];
|
||||||
|
|
||||||
|
// TODO: Move this logic out of the message handling loop
|
||||||
|
pgPool.connect((err, client, releasePgConnection) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error(err);
|
log.error(err);
|
||||||
return;
|
return;
|
||||||
@ -682,40 +703,57 @@ const startWorker = async (workerId) => {
|
|||||||
SELECT 1
|
SELECT 1
|
||||||
FROM mutes
|
FROM mutes
|
||||||
WHERE account_id = $1
|
WHERE account_id = $1
|
||||||
AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, unpackedPayload.account.id].concat(targetAccountIds)),
|
AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
|
||||||
];
|
];
|
||||||
|
|
||||||
if (accountDomain) {
|
if (accountDomain) {
|
||||||
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
|
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!unpackedPayload.filtered && !req.cachedFilters) {
|
if (!payload.filtered && !req.cachedFilters) {
|
||||||
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
|
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
|
||||||
}
|
}
|
||||||
|
|
||||||
Promise.all(queries).then(values => {
|
Promise.all(queries).then(values => {
|
||||||
done();
|
releasePgConnection();
|
||||||
|
|
||||||
|
// Handling blocks & mutes and domain blocks: If one of those applies,
|
||||||
|
// then we don't transmit the payload of the event to the client
|
||||||
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
|
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!unpackedPayload.filtered && !req.cachedFilters) {
|
// If the payload already contains the `filtered` property, it means
|
||||||
|
// that filtering has been applied on the ruby on rails side, as
|
||||||
|
// such, we don't need to construct or apply the filters in streaming:
|
||||||
|
if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
|
||||||
|
transmit(event, payload);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handling for constructing the custom filters and caching them on the request
|
||||||
|
// TODO: Move this logic out of the message handling lifecycle
|
||||||
|
if (!req.cachedFilters) {
|
||||||
const filterRows = values[accountDomain ? 2 : 1].rows;
|
const filterRows = values[accountDomain ? 2 : 1].rows;
|
||||||
|
|
||||||
req.cachedFilters = filterRows.reduce((cache, row) => {
|
req.cachedFilters = filterRows.reduce((cache, filter) => {
|
||||||
if (cache[row.id]) {
|
if (cache[filter.id]) {
|
||||||
cache[row.id].keywords.push([row.keyword, row.whole_word]);
|
cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
|
||||||
} else {
|
} else {
|
||||||
cache[row.id] = {
|
cache[filter.id] = {
|
||||||
keywords: [[row.keyword, row.whole_word]],
|
keywords: [[filter.keyword, filter.whole_word]],
|
||||||
expires_at: row.expires_at,
|
expires_at: filter.expires_at,
|
||||||
repr: {
|
filter: {
|
||||||
id: row.id,
|
id: filter.id,
|
||||||
title: row.title,
|
title: filter.title,
|
||||||
context: row.context,
|
context: filter.context,
|
||||||
expires_at: row.expires_at,
|
expires_at: filter.expires_at,
|
||||||
filter_action: ['warn', 'hide'][row.filter_action],
|
// filter.filter_action is the value from the
|
||||||
|
// custom_filters.action database column, it is an integer
|
||||||
|
// representing a value in an enum defined by Ruby on Rails:
|
||||||
|
//
|
||||||
|
// enum { warn: 0, hide: 1 }
|
||||||
|
filter_action: ['warn', 'hide'][filter.filter_action],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -723,6 +761,10 @@ const startWorker = async (workerId) => {
|
|||||||
return cache;
|
return cache;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
|
// Construct the regular expressions for the custom filters: This
|
||||||
|
// needs to be done in a separate loop as the database returns one
|
||||||
|
// filterRow per keyword, so we need all the keywords before
|
||||||
|
// constructing the regular expression
|
||||||
Object.keys(req.cachedFilters).forEach((key) => {
|
Object.keys(req.cachedFilters).forEach((key) => {
|
||||||
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
|
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
|
||||||
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
@ -742,31 +784,58 @@ const startWorker = async (workerId) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check filters
|
// Apply cachedFilters against the payload, constructing a
|
||||||
if (req.cachedFilters && !unpackedPayload.filtered) {
|
// `filter_results` array of FilterResult entities
|
||||||
const status = unpackedPayload;
|
if (req.cachedFilters) {
|
||||||
const searchContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/<br\s*\/?>/g, '\n').replace(/<\/p><p>/g, '\n\n');
|
const status = payload;
|
||||||
const searchIndex = JSDOM.fragment(searchContent).textContent;
|
// TODO: Calculate searchableContent in Ruby on Rails:
|
||||||
|
const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/<br\s*\/?>/g, '\n').replace(/<\/p><p>/g, '\n\n');
|
||||||
|
const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
|
||||||
|
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
payload.filtered = [];
|
const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
|
||||||
Object.values(req.cachedFilters).forEach((cachedFilter) => {
|
// Check the filter hasn't expired before applying:
|
||||||
if ((cachedFilter.expires_at === null || cachedFilter.expires_at > now)) {
|
if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
|
||||||
const keyword_matches = searchIndex.match(cachedFilter.regexp);
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Just in-case JSDOM fails to find textContent in searchableContent
|
||||||
|
if (!searchableTextContent) {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
|
||||||
if (keyword_matches) {
|
if (keyword_matches) {
|
||||||
payload.filtered.push({
|
// results is an Array of FilterResult; status_matches is always
|
||||||
filter: cachedFilter.repr,
|
// null as we only are only applying the keyword-based custom
|
||||||
|
// filters, not the status-based custom filters.
|
||||||
|
// https://docs.joinmastodon.org/entities/FilterResult/
|
||||||
|
results.push({
|
||||||
|
filter: cachedFilter.filter,
|
||||||
keyword_matches,
|
keyword_matches,
|
||||||
});
|
status_matches: null
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
transmit();
|
return results;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Send the payload + the FilterResults as the `filtered` property
|
||||||
|
// to the streaming connection. To reach this code, the `event` must
|
||||||
|
// have been either `update` or `status.update`, meaning the
|
||||||
|
// `payload` is a Status entity, which has a `filtered` property:
|
||||||
|
//
|
||||||
|
// filtered: https://docs.joinmastodon.org/entities/Status/#filtered
|
||||||
|
transmit(event, {
|
||||||
|
...payload,
|
||||||
|
filtered: filter_results
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
transmit(event, payload);
|
||||||
|
}
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
|
releasePgConnection();
|
||||||
log.error(err);
|
log.error(err);
|
||||||
done();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -775,7 +844,7 @@ const startWorker = async (workerId) => {
|
|||||||
subscribe(`${redisPrefix}${id}`, listener);
|
subscribe(`${redisPrefix}${id}`, listener);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (attachCloseHandler) {
|
if (typeof attachCloseHandler === 'function') {
|
||||||
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
|
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -812,12 +881,13 @@ const startWorker = async (workerId) => {
|
|||||||
/**
|
/**
|
||||||
* @param {any} req
|
* @param {any} req
|
||||||
* @param {function(): void} [closeHandler]
|
* @param {function(): void} [closeHandler]
|
||||||
* @return {function(string[]): void}
|
* @returns {function(string[], SubscriptionListener): void}
|
||||||
*/
|
*/
|
||||||
const streamHttpEnd = (req, closeHandler = undefined) => (ids) => {
|
|
||||||
|
const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
|
||||||
req.on('close', () => {
|
req.on('close', () => {
|
||||||
ids.forEach(id => {
|
ids.forEach(id => {
|
||||||
unsubscribe(id);
|
unsubscribe(id, listener);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (closeHandler) {
|
if (closeHandler) {
|
||||||
@ -1077,7 +1147,7 @@ const startWorker = async (workerId) => {
|
|||||||
* @typedef WebSocketSession
|
* @typedef WebSocketSession
|
||||||
* @property {any} socket
|
* @property {any} socket
|
||||||
* @property {any} request
|
* @property {any} request
|
||||||
* @property {Object.<string, { listener: function(string): void, stopHeartbeat: function(): void }>} subscriptions
|
* @property {Object.<string, { listener: SubscriptionListener, stopHeartbeat: function(): void }>} subscriptions
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
Reference in New Issue
Block a user