Split out moderation backend (#1970)

* mv appview

* copy

* finalize copy

* package names

* big WIP

* first pass at mod servce

* some tidy

* tidy & fix compiler errors

* rename to ozone, db migrations, add to dev-env & pds cfg

* getRecord & getRepo mostly working

* fix open handle

* get record tests all working

* moderation events working

* statuses working

* tidy test suite

* search repos

* server & db tests

* moderation tests

* wip daemon + push events

* pds fanout working

* fix db test

* fanning takedowns out to appview

* rm try/catch

* bsky moderation test

* introduce mod subject wrappers

* more tidy

* refactor event reversal

* tidy some db stuff

* tidy

* rename service to mod-service

* fix test

* tidy config

* refactor auth in bsky

* wip patching up auto-mod

* add label ingester in appview

* fix a couple build issues

* fix some timing bugs

* tidy polling logic

* fix up tests

* fix some pds tests

* eslint ignore

* fix ozone tests

* move seeds to dev-env

* move images around

* fix db schemas

* use service auth admin reqs

* fix remaining tests

* auth tests bsky

* another test

* random tidy

* fix up search

* clean up bsky mod service

* more tidy

* default attempts to 0

* tidy old test

* random tidy

* tidy package.json

* tidy logger

* takedownId -> takedownRef

* misc pr feedback

* split daemon out from ozone application

* fix blob takedown mgiration

* refactor ozone config

* do push event fanout on write instead of on read

* make suspend error work again

* add attempts check & add supporting index

* fix takedown test ref

* get tests working

* rm old test

* fix timing bug in event pusher tests

* attempt another fix for timing bug

* await req

* service files

* remove labelerDid cfg

* update snaps for labeler did + some cfg changes

* fix more snaps

* pnpm i

* build ozone images

* build

* make label provider optional

* fix build issues

* fix build

* fix build

* build pds

* build on ghcr

* fix syntax in entry

* another fix

* use correct import

* export logger

* remove event reverser

* adjust push event fanout

* push out multiple

* remove builds
This commit is contained in:
Daniel Holmgren 2024-01-05 17:06:54 -06:00 committed by GitHub
parent 65254ab148
commit de2dbc2903
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
468 changed files with 26632 additions and 4879 deletions

View File

@ -1,3 +1,4 @@
packages/api/src/client
packages/bsky/src/lexicon
packages/pds/src/lexicon
packages/ozone/src/lexicon

View File

@ -3,7 +3,6 @@ on:
push:
branches:
- main
- appeal-report
env:
REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }}
USERNAME: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_USERNAME }}

View File

@ -0,0 +1,54 @@
name: build-and-push-ozone-aws
on:
push:
branches:
- main
env:
REGISTRY: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_REGISTRY }}
USERNAME: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_USERNAME }}
PASSWORD: ${{ secrets.AWS_ECR_REGISTRY_USEAST2_PACKAGES_PASSWORD }}
IMAGE_NAME: ozone
jobs:
ozone-container-aws:
if: github.repository == 'bluesky-social/atproto'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ env.USERNAME}}
password: ${{ env.PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=sha,enable=true,priority=100,prefix=,suffix=,format=long
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
file: ./services/ozone/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@ -0,0 +1,56 @@
name: build-and-push-ozone-ghcr
on:
push:
branches:
- main
env:
REGISTRY: ghcr.io
USERNAME: ${{ github.actor }}
PASSWORD: ${{ secrets.GITHUB_TOKEN }}
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
ozone-container-ghcr:
if: github.repository == 'bluesky-social/atproto'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ env.USERNAME }}
password: ${{ env.PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=sha,enable=true,priority=100,prefix=ozone:,suffix=,format=long
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
file: ./services/ozone/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@ -31,6 +31,7 @@ codegen: ## Re-generate packages from lexicon/ files
cd packages/api; pnpm run codegen
cd packages/pds; pnpm run codegen
cd packages/bsky; pnpm run codegen
cd packages/ozone; pnpm run codegen
# clean up codegen output
pnpm format

View File

@ -294,6 +294,7 @@
"did": { "type": "string", "format": "did" },
"handle": { "type": "string", "format": "handle" },
"email": { "type": "string" },
"relatedRecords": { "type": "array", "items": { "type": "unknown" } },
"indexedAt": { "type": "string", "format": "datetime" },
"invitedBy": {
"type": "ref",

View File

@ -0,0 +1,36 @@
{
"lexicon": 1,
"id": "com.atproto.admin.getAccountInfos",
"defs": {
"main": {
"type": "query",
"description": "Get details about some accounts.",
"parameters": {
"type": "params",
"required": ["dids"],
"properties": {
"dids": {
"type": "array",
"items": { "type": "string", "format": "did" }
}
}
},
"output": {
"encoding": "application/json",
"schema": {
"type": "object",
"required": ["infos"],
"properties": {
"infos": {
"type": "array",
"items": {
"type": "ref",
"ref": "com.atproto.admin.defs#accountView"
}
}
}
}
}
}
}
}

View File

@ -14,6 +14,7 @@ import * as ComAtprotoAdminDisableInviteCodes from './types/com/atproto/admin/di
import * as ComAtprotoAdminEmitModerationEvent from './types/com/atproto/admin/emitModerationEvent'
import * as ComAtprotoAdminEnableAccountInvites from './types/com/atproto/admin/enableAccountInvites'
import * as ComAtprotoAdminGetAccountInfo from './types/com/atproto/admin/getAccountInfo'
import * as ComAtprotoAdminGetAccountInfos from './types/com/atproto/admin/getAccountInfos'
import * as ComAtprotoAdminGetInviteCodes from './types/com/atproto/admin/getInviteCodes'
import * as ComAtprotoAdminGetModerationEvent from './types/com/atproto/admin/getModerationEvent'
import * as ComAtprotoAdminGetRecord from './types/com/atproto/admin/getRecord'
@ -153,6 +154,7 @@ export * as ComAtprotoAdminDisableInviteCodes from './types/com/atproto/admin/di
export * as ComAtprotoAdminEmitModerationEvent from './types/com/atproto/admin/emitModerationEvent'
export * as ComAtprotoAdminEnableAccountInvites from './types/com/atproto/admin/enableAccountInvites'
export * as ComAtprotoAdminGetAccountInfo from './types/com/atproto/admin/getAccountInfo'
export * as ComAtprotoAdminGetAccountInfos from './types/com/atproto/admin/getAccountInfos'
export * as ComAtprotoAdminGetInviteCodes from './types/com/atproto/admin/getInviteCodes'
export * as ComAtprotoAdminGetModerationEvent from './types/com/atproto/admin/getModerationEvent'
export * as ComAtprotoAdminGetRecord from './types/com/atproto/admin/getRecord'
@ -441,6 +443,17 @@ export class AdminNS {
})
}
getAccountInfos(
params?: ComAtprotoAdminGetAccountInfos.QueryParams,
opts?: ComAtprotoAdminGetAccountInfos.CallOptions,
): Promise<ComAtprotoAdminGetAccountInfos.Response> {
return this._service.xrpc
.call('com.atproto.admin.getAccountInfos', params, undefined, opts)
.catch((e) => {
throw ComAtprotoAdminGetAccountInfos.toKnownErr(e)
})
}
getInviteCodes(
params?: ComAtprotoAdminGetInviteCodes.QueryParams,
opts?: ComAtprotoAdminGetInviteCodes.CallOptions,

View File

@ -436,6 +436,12 @@ export const schemaDict = {
email: {
type: 'string',
},
relatedRecords: {
type: 'array',
items: {
type: 'unknown',
},
},
indexedAt: {
type: 'string',
format: 'datetime',
@ -1046,6 +1052,45 @@ export const schemaDict = {
},
},
},
ComAtprotoAdminGetAccountInfos: {
lexicon: 1,
id: 'com.atproto.admin.getAccountInfos',
defs: {
main: {
type: 'query',
description: 'Get details about some accounts.',
parameters: {
type: 'params',
required: ['dids'],
properties: {
dids: {
type: 'array',
items: {
type: 'string',
format: 'did',
},
},
},
},
output: {
encoding: 'application/json',
schema: {
type: 'object',
required: ['infos'],
properties: {
infos: {
type: 'array',
items: {
type: 'ref',
ref: 'lex:com.atproto.admin.defs#accountView',
},
},
},
},
},
},
},
},
ComAtprotoAdminGetInviteCodes: {
lexicon: 1,
id: 'com.atproto.admin.getInviteCodes',
@ -7875,6 +7920,7 @@ export const ids = {
ComAtprotoAdminEmitModerationEvent: 'com.atproto.admin.emitModerationEvent',
ComAtprotoAdminEnableAccountInvites: 'com.atproto.admin.enableAccountInvites',
ComAtprotoAdminGetAccountInfo: 'com.atproto.admin.getAccountInfo',
ComAtprotoAdminGetAccountInfos: 'com.atproto.admin.getAccountInfos',
ComAtprotoAdminGetInviteCodes: 'com.atproto.admin.getInviteCodes',
ComAtprotoAdminGetModerationEvent: 'com.atproto.admin.getModerationEvent',
ComAtprotoAdminGetRecord: 'com.atproto.admin.getRecord',

View File

@ -255,6 +255,7 @@ export interface AccountView {
did: string
handle: string
email?: string
relatedRecords?: {}[]
indexedAt: string
invitedBy?: ComAtprotoServerDefs.InviteCode
invites?: ComAtprotoServerDefs.InviteCode[]

View File

@ -0,0 +1,36 @@
/**
* GENERATED CODE - DO NOT MODIFY
*/
import { Headers, XRPCError } from '@atproto/xrpc'
import { ValidationResult, BlobRef } from '@atproto/lexicon'
import { isObj, hasProp } from '../../../../util'
import { lexicons } from '../../../../lexicons'
import { CID } from 'multiformats/cid'
import * as ComAtprotoAdminDefs from './defs'
export interface QueryParams {
dids: string[]
}
export type InputSchema = undefined
export interface OutputSchema {
infos: ComAtprotoAdminDefs.AccountView[]
[k: string]: unknown
}
export interface CallOptions {
headers?: Headers
}
export interface Response {
success: boolean
headers: Headers
data: OutputSchema
}
export function toKnownErr(e: any) {
if (e instanceof XRPCError) {
}
return e
}

View File

@ -16,18 +16,16 @@ import { ModerationService } from '../../../../services/moderation'
export default function (server: Server, ctx: AppContext) {
const getProfile = createPipeline(skeleton, hydration, noRules, presentation)
server.app.bsky.actor.getProfile({
auth: ctx.authOptionalAccessOrRoleVerifier,
auth: ctx.authVerifier.optionalStandardOrRole,
handler: async ({ auth, params, res }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const modService = ctx.services.moderation(ctx.db.getPrimary())
const viewer = 'did' in auth.credentials ? auth.credentials.did : null
const canViewTakendownProfile =
auth.credentials.type === 'role' && auth.credentials.triage
const { viewer, canViewTakedowns } = ctx.authVerifier.parseCreds(auth)
const [result, repoRev] = await Promise.allSettled([
getProfile(
{ ...params, viewer, canViewTakendownProfile },
{ ...params, viewer, canViewTakedowns },
{ db, actorService, modService },
),
actorService.getRepoRev(viewer),
@ -52,15 +50,14 @@ const skeleton = async (
params: Params,
ctx: Context,
): Promise<SkeletonState> => {
const { actorService, modService } = ctx
const { canViewTakendownProfile } = params
const { actorService } = ctx
const { canViewTakedowns } = params
const actor = await actorService.getActor(params.actor, true)
if (!actor) {
throw new InvalidRequestError('Profile not found')
}
if (!canViewTakendownProfile && softDeleted(actor)) {
const isSuspended = await modService.isSubjectSuspended(actor.did)
if (isSuspended) {
if (!canViewTakedowns && softDeleted(actor)) {
if (actor.takedownRef?.includes('SUSPEND')) {
throw new InvalidRequestError(
'Account has been temporarily suspended',
'AccountTakedown',
@ -78,10 +75,10 @@ const skeleton = async (
const hydration = async (state: SkeletonState, ctx: Context) => {
const { actorService } = ctx
const { params, actor } = state
const { viewer, canViewTakendownProfile } = params
const { viewer, canViewTakedowns } = params
const hydration = await actorService.views.profileDetailHydration(
[actor.did],
{ viewer, includeSoftDeleted: canViewTakendownProfile },
{ viewer, includeSoftDeleted: canViewTakedowns },
)
return { ...state, ...hydration }
}
@ -110,7 +107,7 @@ type Context = {
type Params = QueryParams & {
viewer: string | null
canViewTakendownProfile: boolean
canViewTakedowns: boolean
}
type SkeletonState = { params: Params; actor: Actor }

View File

@ -13,11 +13,11 @@ import { createPipeline, noRules } from '../../../../pipeline'
export default function (server: Server, ctx: AppContext) {
const getProfile = createPipeline(skeleton, hydration, noRules, presentation)
server.app.bsky.actor.getProfiles({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth, params, res }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const [result, repoRev] = await Promise.all([
getProfile({ ...params, viewer }, { db, actorService }),

View File

@ -17,12 +17,12 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.actor.getSuggestions({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await getSuggestions(
{ ...params, viewer },

View File

@ -1,18 +1,13 @@
import { sql } from 'kysely'
import AppContext from '../../../../context'
import { Server } from '../../../../lexicon'
import {
cleanQuery,
getUserSearchQuery,
SearchKeyset,
} from '../../../../services/util/search'
import { cleanQuery } from '../../../../services/util/search'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.actor.searchActors({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth, params }) => {
const { cursor, limit } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const rawQuery = params.q ?? params.term
const query = cleanQuery(rawQuery || '')
const db = ctx.db.getReplica('search')
@ -29,15 +24,11 @@ export default function (server: Server, ctx: AppContext) {
results = res.data.actors.map((a) => a.did)
resCursor = res.data.cursor
} else {
const res = query
? await getUserSearchQuery(db, { query, limit, cursor })
.select('distance')
.selectAll('actor')
.execute()
: []
results = res.map((a) => a.did)
const keyset = new SearchKeyset(sql``, sql``)
resCursor = keyset.packFromResult(res)
const res = await ctx.services
.actor(ctx.db.getReplica('search'))
.getSearchResults({ query, limit, cursor })
results = res.results.map((a) => a.did)
resCursor = res.cursor
}
const actors = await ctx.services

View File

@ -7,10 +7,10 @@ import {
export default function (server: Server, ctx: AppContext) {
server.app.bsky.actor.searchActorsTypeahead({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const { limit } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const rawQuery = params.q ?? params.term
const query = cleanQuery(rawQuery || '')
const db = ctx.db.getReplica('search')

View File

@ -6,10 +6,10 @@ import { TimeCidKeyset, paginate } from '../../../../db/pagination'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.feed.getActorFeeds({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth, params }) => {
const { actor, limit, cursor } = params
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)

View File

@ -23,9 +23,9 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getActorLikes({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth, res }) => {
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const feedService = ctx.services.feed(db)

View File

@ -23,14 +23,13 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getAuthorFeed({
auth: ctx.authOptionalAccessOrRoleVerifier,
auth: ctx.authVerifier.optionalStandardOrRole,
handler: async ({ params, auth, res }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const feedService = ctx.services.feed(db)
const graphService = ctx.services.graph(db)
const viewer =
auth.credentials.type === 'access' ? auth.credentials.did : null
const { viewer } = ctx.authVerifier.parseCreds(auth)
const [result, repoRev] = await Promise.all([
getAuthorFeed(

View File

@ -33,11 +33,11 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getFeed({
auth: ctx.authOptionalVerifierAnyAudience,
auth: ctx.authVerifier.standardOptionalAnyAud,
handler: async ({ params, auth, req }) => {
const db = ctx.db.getReplica()
const feedService = ctx.services.feed(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const { timerSkele, timerHydr, ...result } = await getFeed(
{ ...params, viewer },

View File

@ -9,10 +9,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.feed.getFeedGenerator({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const { feed } = params
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const feedService = ctx.services.feed(db)

View File

@ -14,10 +14,10 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getFeedGenerators({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const { feeds } = params
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const feedService = ctx.services.feed(db)
const actorService = ctx.services.actor(db)

View File

@ -5,10 +5,10 @@ import { toSkeletonItem } from '../../../../feed-gen/types'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.feed.getFeedSkeleton({
auth: ctx.authVerifierAnyAudience,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const { feed } = params
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const localAlgo = ctx.algos[feed]
if (!localAlgo) {

View File

@ -13,12 +13,12 @@ import { createPipeline } from '../../../../pipeline'
export default function (server: Server, ctx: AppContext) {
const getLikes = createPipeline(skeleton, hydration, noBlocks, presentation)
server.app.bsky.feed.getLikes({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await getLikes(
{ ...params, viewer },

View File

@ -22,9 +22,9 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getListFeed({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth, res }) => {
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const feedService = ctx.services.feed(db)

View File

@ -31,9 +31,9 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getPostThread({
auth: ctx.authOptionalAccessOrRoleVerifier,
auth: ctx.authVerifier.optionalStandardOrRole,
handler: async ({ params, auth, res }) => {
const viewer = 'did' in auth.credentials ? auth.credentials.did : null
const { viewer } = ctx.authVerifier.parseCreds(auth)
const db = ctx.db.getReplica('thread')
const feedService = ctx.services.feed(db)
const actorService = ctx.services.actor(db)

View File

@ -14,12 +14,12 @@ import { ActorService } from '../../../../services/actor'
export default function (server: Server, ctx: AppContext) {
const getPosts = createPipeline(skeleton, hydration, noBlocks, presentation)
server.app.bsky.feed.getPosts({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const feedService = ctx.services.feed(db)
const actorService = ctx.services.actor(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const results = await getPosts(
{ ...params, viewer },

View File

@ -18,12 +18,12 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getRepostedBy({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await getRepostedBy(
{ ...params, viewer },

View File

@ -4,9 +4,9 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.feed.getSuggestedFeeds({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth }) => {
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const feedService = ctx.services.feed(db)

View File

@ -22,9 +22,9 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.getTimeline({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth, res }) => {
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica('timeline')
const feedService = ctx.services.feed(db)
const actorService = ctx.services.actor(db)

View File

@ -21,9 +21,9 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.feed.searchPosts({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth, params }) => {
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica('search')
const feedService = ctx.services.feed(db)
const actorService = ctx.services.actor(db)

View File

@ -5,10 +5,10 @@ import { notSoftDeletedClause } from '../../../../db/util'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.getBlocks({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth }) => {
const { limit, cursor } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getReplica()
const { ref } = db.db.dynamic

View File

@ -19,17 +19,15 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.graph.getFollowers({
auth: ctx.authOptionalAccessOrRoleVerifier,
auth: ctx.authVerifier.optionalStandardOrRole,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const viewer = 'did' in auth.credentials ? auth.credentials.did : null
const canViewTakendownProfile =
auth.credentials.type === 'role' && auth.credentials.triage
const { viewer, canViewTakedowns } = ctx.authVerifier.parseCreds(auth)
const result = await getFollowers(
{ ...params, viewer, canViewTakendownProfile },
{ ...params, viewer, canViewTakedowns },
{ db, actorService, graphService },
)
@ -46,10 +44,10 @@ const skeleton = async (
ctx: Context,
): Promise<SkeletonState> => {
const { db, actorService } = ctx
const { limit, cursor, actor, canViewTakendownProfile } = params
const { limit, cursor, actor, canViewTakedowns } = params
const { ref } = db.db.dynamic
const subject = await actorService.getActor(actor, canViewTakendownProfile)
const subject = await actorService.getActor(actor, canViewTakedowns)
if (!subject) {
throw new InvalidRequestError(`Actor not found: ${actor}`)
}
@ -58,7 +56,7 @@ const skeleton = async (
.selectFrom('follow')
.where('follow.subjectDid', '=', subject.did)
.innerJoin('actor as creator', 'creator.did', 'follow.creator')
.if(!canViewTakendownProfile, (qb) =>
.if(!canViewTakedowns, (qb) =>
qb.where(notSoftDeletedClause(ref('creator'))),
)
.selectAll('creator')
@ -130,7 +128,7 @@ type Context = {
type Params = QueryParams & {
viewer: string | null
canViewTakendownProfile: boolean
canViewTakedowns: boolean
}
type SkeletonState = {

View File

@ -19,17 +19,15 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.graph.getFollows({
auth: ctx.authOptionalAccessOrRoleVerifier,
auth: ctx.authVerifier.optionalStandardOrRole,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const viewer = 'did' in auth.credentials ? auth.credentials.did : null
const canViewTakendownProfile =
auth.credentials.type === 'role' && auth.credentials.triage
const { viewer, canViewTakedowns } = ctx.authVerifier.parseCreds(auth)
const result = await getFollows(
{ ...params, viewer, canViewTakendownProfile },
{ ...params, viewer, canViewTakedowns },
{ db, actorService, graphService },
)
@ -46,10 +44,10 @@ const skeleton = async (
ctx: Context,
): Promise<SkeletonState> => {
const { db, actorService } = ctx
const { limit, cursor, actor, canViewTakendownProfile } = params
const { limit, cursor, actor, canViewTakedowns } = params
const { ref } = db.db.dynamic
const creator = await actorService.getActor(actor, canViewTakendownProfile)
const creator = await actorService.getActor(actor, canViewTakedowns)
if (!creator) {
throw new InvalidRequestError(`Actor not found: ${actor}`)
}
@ -58,7 +56,7 @@ const skeleton = async (
.selectFrom('follow')
.where('follow.creator', '=', creator.did)
.innerJoin('actor as subject', 'subject.did', 'follow.subjectDid')
.if(!canViewTakendownProfile, (qb) =>
.if(!canViewTakedowns, (qb) =>
qb.where(notSoftDeletedClause(ref('subject'))),
)
.selectAll('subject')
@ -131,7 +129,7 @@ type Context = {
type Params = QueryParams & {
viewer: string | null
canViewTakendownProfile: boolean
canViewTakedowns: boolean
}
type SkeletonState = {

View File

@ -13,12 +13,12 @@ import { createPipeline, noRules } from '../../../../pipeline'
export default function (server: Server, ctx: AppContext) {
const getList = createPipeline(skeleton, hydration, noRules, presentation)
server.app.bsky.graph.getList({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const graphService = ctx.services.graph(db)
const actorService = ctx.services.actor(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await getList(
{ ...params, viewer },

View File

@ -17,12 +17,12 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.graph.getListBlocks({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const graphService = ctx.services.graph(db)
const actorService = ctx.services.actor(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await getListBlocks(
{ ...params, viewer },

View File

@ -5,10 +5,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.getListMutes({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth }) => {
const { limit, cursor } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getReplica()
const { ref } = db.db.dynamic

View File

@ -6,10 +6,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.getLists({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ params, auth }) => {
const { actor, limit, cursor } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getReplica()
const { ref } = db.db.dynamic

View File

@ -5,10 +5,10 @@ import { notSoftDeletedClause } from '../../../../db/util'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.getMutes({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth }) => {
const { limit, cursor } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getReplica()
const { ref } = db.db.dynamic

View File

@ -9,10 +9,10 @@ const RESULT_LENGTH = 10
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.getSuggestedFollowsByActor({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, params }) => {
const { actor } = params
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)

View File

@ -4,10 +4,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.muteActor({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, input }) => {
const { actor } = input.body
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getPrimary()
const subjectDid = await ctx.services.actor(db).getActorDid(actor)

View File

@ -6,10 +6,10 @@ import { AtUri } from '@atproto/syntax'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.muteActorList({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, input }) => {
const { list } = input.body
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getPrimary()

View File

@ -4,10 +4,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.unmuteActor({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, input }) => {
const { actor } = input.body
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getPrimary()
const subjectDid = await ctx.services.actor(db).getActorDid(actor)

View File

@ -3,10 +3,10 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.graph.unmuteActorList({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, input }) => {
const { list } = input.body
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getPrimary()
await ctx.services.graph(db).unmuteActorList({

View File

@ -6,9 +6,9 @@ import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.notification.getUnreadCount({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, params }) => {
const requester = auth.credentials.did
const requester = auth.credentials.iss
if (params.seenAt) {
throw new InvalidRequestError('The seenAt parameter is unsupported')
}

View File

@ -20,13 +20,13 @@ export default function (server: Server, ctx: AppContext) {
presentation,
)
server.app.bsky.notification.listNotifications({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ params, auth }) => {
const db = ctx.db.getReplica()
const actorService = ctx.services.actor(db)
const graphService = ctx.services.graph(db)
const labelService = ctx.services.label(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await listNotifications(
{ ...params, viewer },

View File

@ -5,13 +5,11 @@ import { Platform } from '../../../../notifications'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.notification.registerPush({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, input }) => {
const { token, platform, serviceDid, appId } = input.body
const {
credentials: { did },
} = auth
if (serviceDid !== auth.artifacts.aud) {
const did = auth.credentials.iss
if (serviceDid !== auth.credentials.aud) {
throw new InvalidRequestError('Invalid serviceDid.')
}
const { notifServer } = ctx

View File

@ -5,10 +5,10 @@ import { excluded } from '../../../../db/util'
export default function (server: Server, ctx: AppContext) {
server.app.bsky.notification.updateSeen({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ input, auth }) => {
const { seenAt } = input.body
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
let parsed: string
try {

View File

@ -8,10 +8,10 @@ import { GeneratorView } from '../../../../lexicon/types/app/bsky/feed/defs'
// THIS IS A TEMPORARY UNSPECCED ROUTE
export default function (server: Server, ctx: AppContext) {
server.app.bsky.unspecced.getPopularFeedGenerators({
auth: ctx.authOptionalVerifier,
auth: ctx.authVerifier.standardOptional,
handler: async ({ auth, params }) => {
const { limit, cursor, query } = params
const requester = auth.credentials.did
const requester = auth.credentials.iss
const db = ctx.db.getReplica()
const { ref } = db.db.dynamic
const feedService = ctx.services.feed(db)

View File

@ -6,11 +6,11 @@ import { toSkeletonItem } from '../../../../feed-gen/types'
// THIS IS A TEMPORARY UNSPECCED ROUTE
export default function (server: Server, ctx: AppContext) {
server.app.bsky.unspecced.getTimelineSkeleton({
auth: ctx.authVerifier,
auth: ctx.authVerifier.standard,
handler: async ({ auth, params }) => {
const db = ctx.db.getReplica('timeline')
const feedService = ctx.services.feed(db)
const viewer = auth.credentials.did
const viewer = auth.credentials.iss
const result = await skeleton({ ...params, viewer }, { db, feedService })

View File

@ -10,7 +10,6 @@ import AppContext from '../context'
import { httpLogger as log } from '../logger'
import { retryHttp } from '../util/retry'
import { Database } from '../db'
import { sql } from 'kysely'
// Resolve and verify blob from its origin host
@ -88,10 +87,10 @@ export async function resolveBlob(
const [{ pds }, takedown] = await Promise.all([
idResolver.did.resolveAtprotoData(did), // @TODO cache did info
db.db
.selectFrom('moderation_subject_status')
.select('id')
.where('blobCids', '@>', sql`CAST(${JSON.stringify([cidStr])} AS JSONB)`)
.where('takendown', 'is', true)
.selectFrom('blob_takedown')
.select('takedownRef')
.where('did', '=', did)
.where('cid', '=', cid.toString())
.executeTakeFirst(),
])
if (takedown) {

View File

@ -1,220 +0,0 @@
import { CID } from 'multiformats/cid'
import { AtUri } from '@atproto/syntax'
import {
AuthRequiredError,
InvalidRequestError,
UpstreamFailureError,
} from '@atproto/xrpc-server'
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
import { getSubject } from '../moderation/util'
import {
isModEventLabel,
isModEventReverseTakedown,
isModEventTakedown,
} from '../../../../lexicon/types/com/atproto/admin/defs'
import { TakedownSubjects } from '../../../../services/moderation'
import { retryHttp } from '../../../../util/retry'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.emitModerationEvent({
auth: ctx.roleVerifier,
handler: async ({ input, auth }) => {
const access = auth.credentials
const db = ctx.db.getPrimary()
const moderationService = ctx.services.moderation(db)
const { subject, createdBy, subjectBlobCids, event } = input.body
const isTakedownEvent = isModEventTakedown(event)
const isReverseTakedownEvent = isModEventReverseTakedown(event)
const isLabelEvent = isModEventLabel(event)
// apply access rules
// if less than moderator access then can not takedown an account
if (!access.moderator && isTakedownEvent && 'did' in subject) {
throw new AuthRequiredError(
'Must be a full moderator to perform an account takedown',
)
}
// if less than moderator access then can only take ack and escalation actions
if (!access.moderator && (isTakedownEvent || isReverseTakedownEvent)) {
throw new AuthRequiredError(
'Must be a full moderator to take this type of action',
)
}
// if less than moderator access then can not apply labels
if (!access.moderator && isLabelEvent) {
throw new AuthRequiredError('Must be a full moderator to label content')
}
if (isLabelEvent) {
validateLabels([
...(event.createLabelVals ?? []),
...(event.negateLabelVals ?? []),
])
}
const subjectInfo = getSubject(subject)
if (isTakedownEvent || isReverseTakedownEvent) {
const isSubjectTakendown = await moderationService.isSubjectTakendown(
subjectInfo,
)
if (isSubjectTakendown && isTakedownEvent) {
throw new InvalidRequestError(`Subject is already taken down`)
}
if (!isSubjectTakendown && isReverseTakedownEvent) {
throw new InvalidRequestError(`Subject is not taken down`)
}
}
const { result: moderationEvent, takenDown } = await db.transaction(
async (dbTxn) => {
const moderationTxn = ctx.services.moderation(dbTxn)
const labelTxn = ctx.services.label(dbTxn)
const result = await moderationTxn.logEvent({
event,
subject: subjectInfo,
subjectBlobCids:
subjectBlobCids?.map((cid) => CID.parse(cid)) ?? [],
createdBy,
})
let takenDown: TakedownSubjects | undefined
if (
result.subjectType === 'com.atproto.admin.defs#repoRef' &&
result.subjectDid
) {
// No credentials to revoke on appview
if (isTakedownEvent) {
takenDown = await moderationTxn.takedownRepo({
takedownId: result.id,
did: result.subjectDid,
})
}
if (isReverseTakedownEvent) {
await moderationTxn.reverseTakedownRepo({
did: result.subjectDid,
})
takenDown = {
subjects: [
{
$type: 'com.atproto.admin.defs#repoRef',
did: result.subjectDid,
},
],
did: result.subjectDid,
}
}
}
if (
result.subjectType === 'com.atproto.repo.strongRef' &&
result.subjectUri
) {
const blobCids = subjectBlobCids?.map((cid) => CID.parse(cid)) ?? []
if (isTakedownEvent) {
takenDown = await moderationTxn.takedownRecord({
takedownId: result.id,
uri: new AtUri(result.subjectUri),
// TODO: I think this will always be available for strongRefs?
cid: CID.parse(result.subjectCid as string),
blobCids,
})
}
if (isReverseTakedownEvent) {
await moderationTxn.reverseTakedownRecord({
uri: new AtUri(result.subjectUri),
})
takenDown = {
did: result.subjectDid,
subjects: [
{
$type: 'com.atproto.repo.strongRef',
uri: result.subjectUri,
cid: result.subjectCid ?? '',
},
...blobCids.map((cid) => ({
$type: 'com.atproto.admin.defs#repoBlobRef',
did: result.subjectDid,
cid: cid.toString(),
recordUri: result.subjectUri,
})),
],
}
}
}
if (isLabelEvent) {
await labelTxn.formatAndCreate(
ctx.cfg.labelerDid,
result.subjectUri ?? result.subjectDid,
result.subjectCid,
{
create: result.createLabelVals?.length
? result.createLabelVals.split(' ')
: undefined,
negate: result.negateLabelVals?.length
? result.negateLabelVals.split(' ')
: undefined,
},
)
}
return { result, takenDown }
},
)
if (takenDown && ctx.moderationPushAgent) {
const { did, subjects } = takenDown
if (did && subjects.length > 0) {
const agent = ctx.moderationPushAgent
const results = await Promise.allSettled(
subjects.map((subject) =>
retryHttp(() =>
agent.api.com.atproto.admin.updateSubjectStatus({
subject,
takedown: isTakedownEvent
? {
applied: true,
ref: moderationEvent.id.toString(),
}
: {
applied: false,
},
}),
),
),
)
const hadFailure = results.some((r) => r.status === 'rejected')
if (hadFailure) {
throw new UpstreamFailureError('failed to apply action on PDS')
}
}
}
return {
encoding: 'application/json',
body: await moderationService.views.event(moderationEvent),
}
},
})
}
const validateLabels = (labels: string[]) => {
for (const label of labels) {
for (const char of badChars) {
if (label.includes(char)) {
throw new InvalidRequestError(`Invalid label: ${label}`)
}
}
}
}
const badChars = [' ', ',', ';', `'`, `"`]

View File

@ -0,0 +1,42 @@
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
import { Actor } from '../../../../db/tables/actor'
import { mapDefined } from '@atproto/common'
import { INVALID_HANDLE } from '@atproto/syntax'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.getAccountInfos({
auth: ctx.authVerifier.roleOrAdminService,
handler: async ({ params }) => {
const { dids } = params
const db = ctx.db.getPrimary()
const actorService = ctx.services.actor(db)
const [actors, profiles] = await Promise.all([
actorService.getActors(dids, true),
actorService.getProfileRecords(dids, true),
])
const actorByDid = actors.reduce((acc, cur) => {
return acc.set(cur.did, cur)
}, new Map<string, Actor>())
const infos = mapDefined(dids, (did) => {
const info = actorByDid.get(did)
if (!info) return
const profile = profiles.get(did)
return {
did,
handle: info.handle ?? INVALID_HANDLE,
relatedRecords: profile ? [profile] : undefined,
indexedAt: info.indexedAt,
}
})
return {
encoding: 'application/json',
body: {
infos,
},
}
},
})
}

View File

@ -1,19 +0,0 @@
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.getModerationEvent({
auth: ctx.roleVerifier,
handler: async ({ params }) => {
const { id } = params
const db = ctx.db.getPrimary()
const moderationService = ctx.services.moderation(db)
const event = await moderationService.getEventOrThrow(id)
const eventDetail = await moderationService.views.eventDetail(event)
return {
encoding: 'application/json',
body: eventDetail,
}
},
})
}

View File

@ -0,0 +1,73 @@
import { InvalidRequestError } from '@atproto/xrpc-server'
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
import { OutputSchema } from '../../../../lexicon/types/com/atproto/admin/getSubjectStatus'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.getSubjectStatus({
auth: ctx.authVerifier.roleOrAdminService,
handler: async ({ params }) => {
const { did, uri, blob } = params
const modService = ctx.services.moderation(ctx.db.getPrimary())
let body: OutputSchema | null = null
if (blob) {
if (!did) {
throw new InvalidRequestError(
'Must provide a did to request blob state',
)
}
const takedown = await modService.getBlobTakedownRef(did, blob)
if (takedown) {
body = {
subject: {
$type: 'com.atproto.admin.defs#repoBlobRef',
did: did,
cid: blob,
},
takedown,
}
}
} else if (uri) {
const [takedown, cidRes] = await Promise.all([
modService.getRecordTakedownRef(uri),
ctx.db
.getPrimary()
.db.selectFrom('record')
.where('uri', '=', uri)
.select('cid')
.executeTakeFirst(),
])
if (cidRes && takedown) {
body = {
subject: {
$type: 'com.atproto.repo.strongRef',
uri,
cid: cidRes.cid,
},
takedown,
}
}
} else if (did) {
const takedown = await modService.getRepoTakedownRef(did)
if (takedown) {
body = {
subject: {
$type: 'com.atproto.admin.defs#repoRef',
did: did,
},
takedown,
}
}
} else {
throw new InvalidRequestError('No provided subject')
}
if (body === null) {
throw new InvalidRequestError('Subject not found', 'NotFound')
}
return {
encoding: 'application/json',
body,
}
},
})
}

View File

@ -1,27 +0,0 @@
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.searchRepos({
auth: ctx.roleVerifier,
handler: async ({ params }) => {
const db = ctx.db.getPrimary()
const moderationService = ctx.services.moderation(db)
const { limit, cursor } = params
// prefer new 'q' query param over deprecated 'term'
const query = params.q ?? params.term
const { results, cursor: resCursor } = await ctx.services
.actor(db)
.getSearchResults({ query, limit, cursor, includeSoftDeleted: true })
return {
encoding: 'application/json',
body: {
cursor: resCursor,
repos: await moderationService.views.repo(results),
},
}
},
})
}

View File

@ -0,0 +1,74 @@
import { AtUri } from '@atproto/syntax'
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
import {
isRepoRef,
isRepoBlobRef,
} from '../../../../lexicon/types/com/atproto/admin/defs'
import { isMain as isStrongRef } from '../../../../lexicon/types/com/atproto/repo/strongRef'
import { AuthRequiredError, InvalidRequestError } from '@atproto/xrpc-server'
import { CID } from 'multiformats/cid'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.admin.updateSubjectStatus({
auth: ctx.authVerifier.roleOrAdminService,
handler: async ({ input, auth }) => {
const { canPerformTakedown } = ctx.authVerifier.parseCreds(auth)
if (!canPerformTakedown) {
throw new AuthRequiredError(
'Must be a full moderator to update subject state',
)
}
const modService = ctx.services.moderation(ctx.db.getPrimary())
const { subject, takedown } = input.body
if (takedown) {
if (isRepoRef(subject)) {
const did = subject.did
if (takedown.applied) {
await modService.takedownRepo({
takedownRef: takedown.ref ?? new Date().toISOString(),
did,
})
} else {
await modService.reverseTakedownRepo({ did })
}
} else if (isStrongRef(subject)) {
const uri = new AtUri(subject.uri)
const cid = CID.parse(subject.cid)
if (takedown.applied) {
await modService.takedownRecord({
takedownRef: takedown.ref ?? new Date().toISOString(),
uri,
cid,
})
} else {
await modService.reverseTakedownRecord({ uri })
}
} else if (isRepoBlobRef(subject)) {
const { did, cid } = subject
if (takedown.applied) {
await modService.takedownBlob({
takedownRef: takedown.ref ?? new Date().toISOString(),
did,
cid,
})
} else {
await modService.reverseTakedownBlob({ did, cid })
}
} else {
throw new InvalidRequestError('Invalid subject')
}
}
return {
encoding: 'application/json',
body: {
subject,
takedown,
},
}
},
})
}

View File

@ -1,53 +0,0 @@
import { AuthRequiredError, ForbiddenError } from '@atproto/xrpc-server'
import { Server } from '../../../../lexicon'
import AppContext from '../../../../context'
import { getReasonType, getSubject } from './util'
import { softDeleted } from '../../../../db/util'
import { REASONAPPEAL } from '../../../../lexicon/types/com/atproto/moderation/defs'
export default function (server: Server, ctx: AppContext) {
server.com.atproto.moderation.createReport({
// @TODO anonymous reports w/ optional auth are a temporary measure
auth: ctx.authOptionalVerifier,
handler: async ({ input, auth }) => {
const { reasonType, reason, subject } = input.body
const requester = auth.credentials.did
const db = ctx.db.getPrimary()
if (requester) {
// Don't accept reports from users that are fully taken-down
const actor = await ctx.services.actor(db).getActor(requester, true)
if (actor && softDeleted(actor)) {
throw new AuthRequiredError()
}
}
const reportReasonType = getReasonType(reasonType)
const reportSubject = getSubject(subject)
const subjectDid =
'did' in reportSubject ? reportSubject.did : reportSubject.uri.host
// If the report is an appeal, the requester must be the author of the subject
if (reasonType === REASONAPPEAL && requester !== subjectDid) {
throw new ForbiddenError('You cannot appeal this report')
}
const report = await db.transaction(async (dbTxn) => {
const moderationTxn = ctx.services.moderation(dbTxn)
return moderationTxn.report({
reasonType: reportReasonType,
reason,
subject: reportSubject,
reportedBy: requester || ctx.cfg.serverDid,
})
})
const moderationService = ctx.services.moderation(db)
return {
encoding: 'application/json',
body: moderationService.views.reportPublic(report),
}
},
})
}

View File

@ -40,16 +40,11 @@ import updateSeen from './app/bsky/notification/updateSeen'
import registerPush from './app/bsky/notification/registerPush'
import getPopularFeedGenerators from './app/bsky/unspecced/getPopularFeedGenerators'
import getTimelineSkeleton from './app/bsky/unspecced/getTimelineSkeleton'
import createReport from './com/atproto/moderation/createReport'
import emitModerationEvent from './com/atproto/admin/emitModerationEvent'
import searchRepos from './com/atproto/admin/searchRepos'
import adminGetRecord from './com/atproto/admin/getRecord'
import getRepo from './com/atproto/admin/getRepo'
import queryModerationStatuses from './com/atproto/admin/queryModerationStatuses'
import getSubjectStatus from './com/atproto/admin/getSubjectStatus'
import updateSubjectStatus from './com/atproto/admin/updateSubjectStatus'
import getAccountInfos from './com/atproto/admin/getAccountInfos'
import resolveHandle from './com/atproto/identity/resolveHandle'
import getRecord from './com/atproto/repo/getRecord'
import queryModerationEvents from './com/atproto/admin/queryModerationEvents'
import getModerationEvent from './com/atproto/admin/getModerationEvent'
import fetchLabels from './com/atproto/temp/fetchLabels'
export * as health from './health'
@ -101,14 +96,9 @@ export default function (server: Server, ctx: AppContext) {
getPopularFeedGenerators(server, ctx)
getTimelineSkeleton(server, ctx)
// com.atproto
createReport(server, ctx)
emitModerationEvent(server, ctx)
searchRepos(server, ctx)
adminGetRecord(server, ctx)
getRepo(server, ctx)
getModerationEvent(server, ctx)
queryModerationEvents(server, ctx)
queryModerationStatuses(server, ctx)
getSubjectStatus(server, ctx)
updateSubjectStatus(server, ctx)
getAccountInfos(server, ctx)
resolveHandle(server, ctx)
getRecord(server, ctx)
fetchLabels(server, ctx)

View File

@ -0,0 +1,275 @@
import {
AuthRequiredError,
verifyJwt as verifyServiceJwt,
} from '@atproto/xrpc-server'
import { IdResolver } from '@atproto/identity'
import * as ui8 from 'uint8arrays'
import express from 'express'
type ReqCtx = {
req: express.Request
}
export enum RoleStatus {
Valid,
Invalid,
Missing,
}
type NullOutput = {
credentials: {
type: 'null'
iss: null
}
}
type StandardOutput = {
credentials: {
type: 'standard'
aud: string
iss: string
}
}
type RoleOutput = {
credentials: {
type: 'role'
admin: boolean
moderator: boolean
triage: boolean
}
}
type AdminServiceOutput = {
credentials: {
type: 'admin_service'
aud: string
iss: string
}
}
export type AuthVerifierOpts = {
ownDid: string
adminDid: string
adminPass: string
moderatorPass: string
triagePass: string
}
export class AuthVerifier {
private _adminPass: string
private _moderatorPass: string
private _triagePass: string
public ownDid: string
public adminDid: string
constructor(public idResolver: IdResolver, opts: AuthVerifierOpts) {
this._adminPass = opts.adminPass
this._moderatorPass = opts.moderatorPass
this._triagePass = opts.triagePass
this.ownDid = opts.ownDid
this.adminDid = opts.adminDid
}
// verifiers (arrow fns to preserve scope)
standard = async (ctx: ReqCtx): Promise<StandardOutput> => {
const { iss, aud } = await this.verifyServiceJwt(ctx, {
aud: this.ownDid,
iss: null,
})
return { credentials: { type: 'standard', iss, aud } }
}
standardOptional = async (
ctx: ReqCtx,
): Promise<StandardOutput | NullOutput> => {
if (isBearerToken(ctx.req)) {
return this.standard(ctx)
}
return this.nullCreds()
}
standardOptionalAnyAud = async (
ctx: ReqCtx,
): Promise<StandardOutput | NullOutput> => {
if (!isBearerToken(ctx.req)) {
return this.nullCreds()
}
const { iss, aud } = await this.verifyServiceJwt(ctx, {
aud: null,
iss: null,
})
return { credentials: { type: 'standard', iss, aud } }
}
role = (ctx: ReqCtx): RoleOutput => {
const creds = this.parseRoleCreds(ctx.req)
if (creds.status !== RoleStatus.Valid) {
throw new AuthRequiredError()
}
return {
credentials: {
...creds,
type: 'role',
},
}
}
standardOrRole = async (
ctx: ReqCtx,
): Promise<StandardOutput | RoleOutput> => {
if (isBearerToken(ctx.req)) {
return this.standard(ctx)
} else {
return this.role(ctx)
}
}
optionalStandardOrRole = async (
ctx: ReqCtx,
): Promise<StandardOutput | RoleOutput | NullOutput> => {
if (isBearerToken(ctx.req)) {
return await this.standard(ctx)
} else {
const creds = this.parseRoleCreds(ctx.req)
if (creds.status === RoleStatus.Valid) {
return {
credentials: {
...creds,
type: 'role',
},
}
} else if (creds.status === RoleStatus.Missing) {
return this.nullCreds()
} else {
throw new AuthRequiredError()
}
}
}
adminService = async (reqCtx: ReqCtx): Promise<AdminServiceOutput> => {
const { iss, aud } = await this.verifyServiceJwt(reqCtx, {
aud: this.ownDid,
iss: [this.adminDid],
})
return { credentials: { type: 'admin_service', aud, iss } }
}
roleOrAdminService = async (
reqCtx: ReqCtx,
): Promise<RoleOutput | AdminServiceOutput> => {
if (isBearerToken(reqCtx.req)) {
return this.adminService(reqCtx)
} else {
return this.role(reqCtx)
}
}
parseRoleCreds(req: express.Request) {
const parsed = parseBasicAuth(req.headers.authorization || '')
const { Missing, Valid, Invalid } = RoleStatus
if (!parsed) {
return { status: Missing, admin: false, moderator: false, triage: false }
}
const { username, password } = parsed
if (username === 'admin' && password === this._adminPass) {
return { status: Valid, admin: true, moderator: true, triage: true }
}
if (username === 'admin' && password === this._moderatorPass) {
return { status: Valid, admin: false, moderator: true, triage: true }
}
if (username === 'admin' && password === this._triagePass) {
return { status: Valid, admin: false, moderator: false, triage: true }
}
return { status: Invalid, admin: false, moderator: false, triage: false }
}
async verifyServiceJwt(
reqCtx: ReqCtx,
opts: { aud: string | null; iss: string[] | null },
) {
const getSigningKey = async (
did: string,
forceRefresh: boolean,
): Promise<string> => {
if (opts.iss !== null && !opts.iss.includes(did)) {
throw new AuthRequiredError('Untrusted issuer', 'UntrustedIss')
}
return this.idResolver.did.resolveAtprotoKey(did, forceRefresh)
}
const jwtStr = bearerTokenFromReq(reqCtx.req)
if (!jwtStr) {
throw new AuthRequiredError('missing jwt', 'MissingJwt')
}
const payload = await verifyServiceJwt(jwtStr, opts.aud, getSigningKey)
return { iss: payload.iss, aud: payload.aud }
}
nullCreds(): NullOutput {
return {
credentials: {
type: 'null',
iss: null,
},
}
}
parseCreds(
creds: StandardOutput | RoleOutput | AdminServiceOutput | NullOutput,
) {
const viewer =
creds.credentials.type === 'standard' ? creds.credentials.iss : null
const canViewTakedowns =
(creds.credentials.type === 'role' && creds.credentials.triage) ||
creds.credentials.type === 'admin_service'
const canPerformTakedown =
(creds.credentials.type === 'role' && creds.credentials.moderator) ||
creds.credentials.type === 'admin_service'
return {
viewer,
canViewTakedowns,
canPerformTakedown,
}
}
}
// HELPERS
// ---------
const BEARER = 'Bearer '
const BASIC = 'Basic '
const isBearerToken = (req: express.Request): boolean => {
return req.headers.authorization?.startsWith(BEARER) ?? false
}
const bearerTokenFromReq = (req: express.Request) => {
const header = req.headers.authorization || ''
if (!header.startsWith(BEARER)) return null
return header.slice(BEARER.length).trim()
}
export const parseBasicAuth = (
token: string,
): { username: string; password: string } | null => {
if (!token.startsWith(BASIC)) return null
const b64 = token.slice(BASIC.length)
let parsed: string[]
try {
parsed = ui8.toString(ui8.fromString(b64, 'base64pad'), 'utf8').split(':')
} catch (err) {
return null
}
const [username, password] = parsed
if (!username || !password) return null
return { username, password }
}
export const buildBasicAuth = (username: string, password: string): string => {
return (
BASIC +
ui8.toString(ui8.fromString(`${username}:${password}`, 'utf8'), 'base64pad')
)
}

View File

@ -6,16 +6,12 @@ import { PrimaryDatabase } from '../db'
import { IdResolver } from '@atproto/identity'
import { BackgroundQueue } from '../background'
import { IndexerConfig } from '../indexer/config'
import { buildBasicAuth } from '../auth'
import { buildBasicAuth } from '../auth-verifier'
import { CID } from 'multiformats/cid'
import { LabelService } from '../services/label'
import { ModerationService } from '../services/moderation'
import { ImageFlagger } from './abyss'
import { HiveLabeler, ImgLabeler } from './hive'
import { KeywordLabeler, TextLabeler } from './keyword'
import { ids } from '../lexicon/lexicons'
import { ImageUriBuilder } from '../image/uri'
import { ImageInvalidator } from '../image/invalidator'
import { Abyss } from './abyss'
import { FuzzyMatcher, TextFlagger } from './fuzzy-matcher'
import {
@ -24,43 +20,21 @@ import {
} from '../lexicon/types/com/atproto/moderation/defs'
export class AutoModerator {
public pushAgent?: AtpAgent
public pushAgent: AtpAgent
public imageFlagger?: ImageFlagger
public textFlagger?: TextFlagger
public imgLabeler?: ImgLabeler
public textLabeler?: TextLabeler
services: {
label: (db: PrimaryDatabase) => LabelService
moderation?: (db: PrimaryDatabase) => ModerationService
}
constructor(
public ctx: {
db: PrimaryDatabase
idResolver: IdResolver
cfg: IndexerConfig
backgroundQueue: BackgroundQueue
imgUriBuilder?: ImageUriBuilder
imgInvalidator?: ImageInvalidator
},
) {
const { imgUriBuilder, imgInvalidator } = ctx
const { hiveApiKey, abyssEndpoint, abyssPassword } = ctx.cfg
this.services = {
label: LabelService.creator(null),
}
if (imgUriBuilder && imgInvalidator) {
this.services.moderation = ModerationService.creator(
imgUriBuilder,
imgInvalidator,
)
} else {
log.error(
{ imgUriBuilder, imgInvalidator },
'moderation service not properly configured',
)
}
this.imgLabeler = hiveApiKey ? new HiveLabeler(hiveApiKey, ctx) : undefined
this.textLabeler = new KeywordLabeler(ctx.cfg.labelerKeywords)
if (abyssEndpoint && abyssPassword) {
@ -79,7 +53,6 @@ export class AutoModerator {
)
}
if (ctx.cfg.moderationPushUrl) {
const url = new URL(ctx.cfg.moderationPushUrl)
this.pushAgent = new AtpAgent({ service: url.origin })
this.pushAgent.api.setHeader(
@ -87,7 +60,6 @@ export class AutoModerator {
buildBasicAuth(url.username, url.password),
)
}
}
processRecord(uri: AtUri, cid: CID, obj: unknown) {
this.ctx.backgroundQueue.add(async () => {
@ -133,7 +105,7 @@ export class AutoModerator {
...imgs.map((cid) => this.imgLabeler?.labelImg(uri.host, cid)),
])
const labels = dedupe(allLabels.flat())
await this.storeLabels(uri, recordCid, labels)
await this.pushLabels(uri, recordCid, labels)
}
async flagRecordText(uri: AtUri, cid: CID, text: string[]) {
@ -156,22 +128,22 @@ export class AutoModerator {
if (!this.textFlagger) return
const matches = this.textFlagger.getMatches(text)
if (matches.length < 1) return
await this.ctx.db.transaction(async (dbTxn) => {
if (!this.services.moderation) {
log.error(
{ subject, text, matches },
'no moderation service setup to flag record text',
)
return
const formattedSubject =
'did' in subject
? {
$type: 'com.atproto.admin.defs#repoRef',
did: subject.did,
}
return this.services.moderation(dbTxn).report({
: {
$type: 'com.atproto.repo.strongRef',
uri: subject.uri.toString(),
cid: subject.cid.toString(),
}
await this.pushAgent.api.com.atproto.moderation.createReport({
reasonType: REASONOTHER,
reason: `Automatically flagged for possible slurs: ${matches.join(
', ',
)}`,
subject,
reportedBy: this.ctx.cfg.labelerDid,
})
reason: `Automatically flagged for possible slurs: ${matches.join(', ')}`,
subject: formattedSubject,
reportedBy: this.ctx.cfg.serverDid,
})
}
@ -226,27 +198,17 @@ export class AutoModerator {
'hard takedown of record (and blobs) based on auto-matching',
)
if (this.services.moderation) {
await this.ctx.db.transaction(async (dbTxn) => {
// directly/locally create report, even if we use pushAgent for the takedown. don't have acctual account credentials for pushAgent, only admin auth
if (!this.services.moderation) {
// checked above, outside the transaction
return
}
const modSrvc = this.services.moderation(dbTxn)
await modSrvc.report({
reportedBy: this.ctx.cfg.labelerDid,
await this.pushAgent.com.atproto.moderation.createReport({
reportedBy: this.ctx.cfg.serverDid,
reasonType: REASONVIOLATION,
subject: {
uri: uri,
cid: recordCid,
$type: 'com.atproto.repo.strongRef',
uri: uri.toString(),
cid: recordCid.toString(),
},
reason: reportReason,
})
})
}
if (this.pushAgent) {
await this.pushAgent.com.atproto.admin.emitModerationEvent({
event: {
$type: 'com.atproto.admin.defs#modEventTakedown',
@ -258,61 +220,27 @@ export class AutoModerator {
cid: recordCid.toString(),
},
subjectBlobCids: takedownCids.map((c) => c.toString()),
createdBy: this.ctx.cfg.labelerDid,
createdBy: this.ctx.cfg.serverDid,
})
} else {
await this.ctx.db.transaction(async (dbTxn) => {
if (!this.services.moderation) {
throw new Error('no mod push agent or uri invalidator setup')
}
const modSrvc = this.services.moderation(dbTxn)
const action = await modSrvc.logEvent({
event: {
$type: 'com.atproto.admin.defs#modEventTakedown',
comment: takedownReason,
},
subject: { uri, cid: recordCid },
subjectBlobCids: takedownCids,
createdBy: this.ctx.cfg.labelerDid,
})
await modSrvc.takedownRecord({
takedownId: action.id,
uri: uri,
cid: recordCid,
blobCids: takedownCids,
})
})
}
}
async storeLabels(uri: AtUri, cid: CID, labels: string[]): Promise<void> {
async pushLabels(uri: AtUri, cid: CID, labels: string[]): Promise<void> {
if (labels.length < 1) return
// Given that moderation service is available, log the labeling event for historical purposes
if (this.services.moderation) {
await this.ctx.db.transaction(async (dbTxn) => {
if (!this.services.moderation) return
const modSrvc = this.services.moderation(dbTxn)
await modSrvc.logEvent({
await this.pushAgent.com.atproto.admin.emitModerationEvent({
event: {
$type: 'com.atproto.admin.defs#modEventLabel',
comment: '[AutoModerator]: Applying labels',
createLabelVals: labels,
negateLabelVals: [],
comment: '[AutoModerator]: Applying labels',
},
subject: { uri, cid },
createdBy: this.ctx.cfg.labelerDid,
subject: {
$type: 'com.atproto.repo.strongRef',
uri: uri.toString(),
cid: cid.toString(),
},
createdBy: this.ctx.cfg.serverDid,
})
})
}
const labelSrvc = this.services.label(this.ctx.db)
await labelSrvc.formatAndCreate(
this.ctx.cfg.labelerDid,
uri.toString(),
cid.toString(),
{ create: labels },
)
}
async processAll() {

View File

@ -31,11 +31,10 @@ export interface ServerConfigValues {
imgUriEndpoint?: string
blobCacheLocation?: string
searchEndpoint?: string
labelerDid: string
adminPassword: string
moderatorPassword?: string
triagePassword?: string
moderationPushUrl?: string
moderatorPassword: string
triagePassword: string
modServiceDid: string
rateLimitsEnabled: boolean
rateLimitBypassKey?: string
rateLimitBypassIps?: string[]
@ -110,14 +109,17 @@ export class ServerConfig {
)
const dbPostgresSchema = process.env.DB_POSTGRES_SCHEMA
assert(dbPrimaryPostgresUrl)
const adminPassword = process.env.ADMIN_PASSWORD || 'admin'
const adminPassword = process.env.ADMIN_PASSWORD || undefined
assert(adminPassword)
const moderatorPassword = process.env.MODERATOR_PASSWORD || undefined
assert(moderatorPassword)
const triagePassword = process.env.TRIAGE_PASSWORD || undefined
const labelerDid = process.env.LABELER_DID || 'did:example:labeler'
const moderationPushUrl =
overrides?.moderationPushUrl ||
process.env.MODERATION_PUSH_URL ||
assert(triagePassword)
const modServiceDid =
overrides?.modServiceDid ||
process.env.MODERATION_SERVICE_DID ||
undefined
assert(modServiceDid)
const rateLimitsEnabled = process.env.RATE_LIMITS_ENABLED === 'true'
const rateLimitBypassKey = process.env.RATE_LIMIT_BYPASS_KEY
const rateLimitBypassIps = process.env.RATE_LIMIT_BYPASS_IPS
@ -150,11 +152,10 @@ export class ServerConfig {
imgUriEndpoint,
blobCacheLocation,
searchEndpoint,
labelerDid,
adminPassword,
moderatorPassword,
triagePassword,
moderationPushUrl,
modServiceDid,
rateLimitsEnabled,
rateLimitBypassKey,
rateLimitBypassIps,
@ -267,10 +268,6 @@ export class ServerConfig {
return this.cfg.searchEndpoint
}
get labelerDid() {
return this.cfg.labelerDid
}
get adminPassword() {
return this.cfg.adminPassword
}
@ -283,8 +280,8 @@ export class ServerConfig {
return this.cfg.triagePassword
}
get moderationPushUrl() {
return this.cfg.moderationPushUrl
get modServiceDid() {
return this.cfg.modServiceDid
}
get rateLimitsEnabled() {

View File

@ -7,15 +7,14 @@ import { DatabaseCoordinator } from './db'
import { ServerConfig } from './config'
import { ImageUriBuilder } from './image/uri'
import { Services } from './services'
import * as auth from './auth'
import DidRedisCache from './did-cache'
import { BackgroundQueue } from './background'
import { MountedAlgos } from './feed-gen/types'
import { NotificationServer } from './notifications'
import { Redis } from './redis'
import { AuthVerifier } from './auth-verifier'
export class AppContext {
public moderationPushAgent: AtpAgent | undefined
constructor(
private opts: {
db: DatabaseCoordinator
@ -30,17 +29,9 @@ export class AppContext {
searchAgent?: AtpAgent
algos: MountedAlgos
notifServer: NotificationServer
authVerifier: AuthVerifier
},
) {
if (opts.cfg.moderationPushUrl) {
const url = new URL(opts.cfg.moderationPushUrl)
this.moderationPushAgent = new AtpAgent({ service: url.origin })
this.moderationPushAgent.api.setHeader(
'authorization',
auth.buildBasicAuth(url.username, url.password),
)
}
}
) {}
get db(): DatabaseCoordinator {
return this.opts.db
@ -86,30 +77,8 @@ export class AppContext {
return this.opts.searchAgent
}
get authVerifier() {
return auth.authVerifier(this.idResolver, { aud: this.cfg.serverDid })
}
get authVerifierAnyAudience() {
return auth.authVerifier(this.idResolver, { aud: null })
}
get authOptionalVerifierAnyAudience() {
return auth.authOptionalVerifier(this.idResolver, { aud: null })
}
get authOptionalVerifier() {
return auth.authOptionalVerifier(this.idResolver, {
aud: this.cfg.serverDid,
})
}
get authOptionalAccessOrRoleVerifier() {
return auth.authOptionalAccessOrRoleVerifier(this.idResolver, this.cfg)
}
get roleVerifier() {
return auth.roleVerifier(this.cfg)
get authVerifier(): AuthVerifier {
return this.opts.authVerifier
}
async serviceAuthJwt(aud: string) {

View File

@ -30,6 +30,7 @@ import * as algo from './tables/algo'
import * as viewParam from './tables/view-param'
import * as suggestedFollow from './tables/suggested-follow'
import * as suggestedFeed from './tables/suggested-feed'
import * as blobTakedown from './tables/blob-takedown'
export type DatabaseSchemaType = duplicateRecord.PartialDB &
profile.PartialDB &
@ -61,7 +62,8 @@ export type DatabaseSchemaType = duplicateRecord.PartialDB &
algo.PartialDB &
viewParam.PartialDB &
suggestedFollow.PartialDB &
suggestedFeed.PartialDB
suggestedFeed.PartialDB &
blobTakedown.PartialDB
export type DatabaseSchema = Kysely<DatabaseSchemaType>

View File

@ -1,23 +0,0 @@
import { Kysely } from 'kysely'
export async function up(db: Kysely<unknown>): Promise<void> {
await db.schema
.alterTable('moderation_subject_status')
.addColumn('lastAppealedAt', 'varchar')
.execute()
await db.schema
.alterTable('moderation_subject_status')
.addColumn('appealed', 'boolean')
.execute()
}
export async function down(db: Kysely<unknown>): Promise<void> {
await db.schema
.alterTable('moderation_subject_status')
.dropColumn('lastAppealedAt')
.execute()
await db.schema
.alterTable('moderation_subject_status')
.dropColumn('appealed')
.execute()
}

View File

@ -0,0 +1,66 @@
import { Kysely } from 'kysely'
export async function up(db: Kysely<unknown>): Promise<void> {
await db.schema
.createTable('blob_takedown')
.addColumn('did', 'varchar', (col) => col.notNull())
.addColumn('cid', 'varchar', (col) => col.notNull())
.addColumn('takedownRef', 'varchar', (col) => col.notNull())
.addPrimaryKeyConstraint('blob_takedown_pkey', ['did', 'cid'])
.execute()
await db.schema
.alterTable('actor')
.dropConstraint('actor_takedown_id_fkey')
.execute()
await db.schema.alterTable('actor').dropColumn('takedownId').execute()
await db.schema
.alterTable('actor')
.addColumn('takedownRef', 'varchar')
.execute()
await db.schema
.alterTable('record')
.dropConstraint('record_takedown_id_fkey')
.execute()
await db.schema.alterTable('record').dropColumn('takedownId').execute()
await db.schema
.alterTable('record')
.addColumn('takedownRef', 'varchar')
.execute()
}
export async function down(db: Kysely<unknown>): Promise<void> {
await db.schema.dropTable('blob_takedown').execute()
await db.schema.alterTable('actor').dropColumn('takedownRef').execute()
await db.schema
.alterTable('actor')
.addColumn('takedownId', 'integer')
.execute()
await db.schema
.alterTable('actor')
.addForeignKeyConstraint(
'actor_takedown_id_fkey',
['takedownId'],
'moderation_event',
['id'],
)
.execute()
await db.schema.alterTable('record').dropColumn('takedownRef').execute()
await db.schema
.alterTable('record')
.addColumn('takedownId', 'integer')
.execute()
await db.schema
.alterTable('record')
.addForeignKeyConstraint(
'record_takedown_id_fkey',
['takedownId'],
'moderation_event',
['id'],
)
.execute()
}

View File

@ -32,4 +32,4 @@ export * as _20230920T213858047Z from './20230920T213858047Z-add-tags-to-post'
export * as _20230929T192920807Z from './20230929T192920807Z-record-cursor-indexes'
export * as _20231003T202833377Z from './20231003T202833377Z-create-moderation-subject-status'
export * as _20231205T000257238Z from './20231205T000257238Z-remove-did-cache'
export * as _20231213T181744386Z from './20231213T181744386Z-moderation-subject-appeal'
export * as _20231220T225126090Z from './20231220T225126090Z-blob-takedowns'

View File

@ -1,125 +0,0 @@
import { wait } from '@atproto/common'
import { Leader } from './leader'
import { dbLogger } from '../logger'
import AppContext from '../context'
import { AtUri } from '@atproto/api'
import { ModerationSubjectStatusRow } from '../services/moderation/types'
import { CID } from 'multiformats/cid'
import AtpAgent from '@atproto/api'
import { retryHttp } from '../util/retry'
export const MODERATION_ACTION_REVERSAL_ID = 1011
export class PeriodicModerationEventReversal {
leader = new Leader(
MODERATION_ACTION_REVERSAL_ID,
this.appContext.db.getPrimary(),
)
destroyed = false
pushAgent?: AtpAgent
constructor(private appContext: AppContext) {
this.pushAgent = appContext.moderationPushAgent
}
async revertState(eventRow: ModerationSubjectStatusRow) {
await this.appContext.db.getPrimary().transaction(async (dbTxn) => {
const moderationTxn = this.appContext.services.moderation(dbTxn)
const originalEvent =
await moderationTxn.getLastReversibleEventForSubject(eventRow)
if (originalEvent) {
const { restored } = await moderationTxn.revertState({
action: originalEvent.action,
createdBy: originalEvent.createdBy,
comment:
'[SCHEDULED_REVERSAL] Reverting action as originally scheduled',
subject:
eventRow.recordPath && eventRow.recordCid
? {
uri: AtUri.make(
eventRow.did,
...eventRow.recordPath.split('/'),
),
cid: CID.parse(eventRow.recordCid),
}
: { did: eventRow.did },
createdAt: new Date(),
})
const { pushAgent } = this
if (
originalEvent.action === 'com.atproto.admin.defs#modEventTakedown' &&
restored?.subjects?.length &&
pushAgent
) {
await Promise.allSettled(
restored.subjects.map((subject) =>
retryHttp(() =>
pushAgent.api.com.atproto.admin.updateSubjectStatus({
subject,
takedown: {
applied: false,
},
}),
),
),
)
}
}
})
}
async findAndRevertDueActions() {
const moderationService = this.appContext.services.moderation(
this.appContext.db.getPrimary(),
)
const subjectsDueForReversal =
await moderationService.getSubjectsDueForReversal()
// We shouldn't have too many actions due for reversal at any given time, so running in parallel is probably fine
// Internally, each reversal runs within its own transaction
await Promise.all(subjectsDueForReversal.map(this.revertState.bind(this)))
}
async run() {
while (!this.destroyed) {
try {
const { ran } = await this.leader.run(async ({ signal }) => {
while (!signal.aborted) {
// super basic synchronization by agreeing when the intervals land relative to unix timestamp
const now = Date.now()
const intervalMs = 1000 * 60
const nextIteration = Math.ceil(now / intervalMs)
const nextInMs = nextIteration * intervalMs - now
await wait(nextInMs)
if (signal.aborted) break
await this.findAndRevertDueActions()
}
})
if (ran && !this.destroyed) {
throw new Error('View maintainer completed, but should be persistent')
}
} catch (err) {
dbLogger.error(
{
err,
lockId: MODERATION_ACTION_REVERSAL_ID,
},
'moderation action reversal errored',
)
}
if (!this.destroyed) {
await wait(10000 + jitter(2000))
}
}
}
destroy() {
this.destroyed = true
this.leader.destroy()
}
}
function jitter(maxMs) {
return Math.round((Math.random() - 0.5) * maxMs * 2)
}

View File

@ -2,7 +2,7 @@ export interface Actor {
did: string
handle: string | null
indexedAt: string
takedownId: number | null // @TODO(bsky)
takedownRef: string | null
}
export const tableName = 'actor'

View File

@ -0,0 +1,9 @@
export interface BlobTakedown {
did: string
cid: string
takedownRef: string
}
export const tableName = 'blob_takedown'
export type PartialDB = { [tableName]: BlobTakedown }

View File

@ -20,7 +20,6 @@ export interface ModerationEvent {
| 'com.atproto.admin.defs#modEventMute'
| 'com.atproto.admin.defs#modEventReverseTakedown'
| 'com.atproto.admin.defs#modEventEmail'
| 'com.atproto.admin.defs#modEventResolveAppeal'
subjectType: 'com.atproto.admin.defs#repoRef' | 'com.atproto.repo.strongRef'
subjectDid: string
subjectUri: string | null
@ -48,11 +47,9 @@ export interface ModerationSubjectStatus {
lastReviewedBy: string | null
lastReviewedAt: string | null
lastReportedAt: string | null
lastAppealedAt: string | null
muteUntil: string | null
suspendUntil: string | null
takendown: boolean
appealed: boolean | null
comment: string | null
}

View File

@ -4,7 +4,7 @@ export interface Record {
did: string
json: string
indexedAt: string
takedownId: number | null // @TODO(bsky)
takedownRef: string | null
}
export const tableName = 'record'

View File

@ -20,11 +20,11 @@ export const actorWhereClause = (actor: string) => {
// Applies to actor or record table
export const notSoftDeletedClause = (alias: DbRef) => {
return sql`${alias}."takedownId" is null`
return sql`${alias}."takedownRef" is null`
}
export const softDeleted = (actorOrRecord: { takedownId: number | null }) => {
return actorOrRecord.takedownId !== null
export const softDeleted = (actorOrRecord: { takedownRef: string | null }) => {
return actorOrRecord.takedownRef !== null
}
export const countAll = sql<number>`count(*)`

View File

@ -33,20 +33,20 @@ import { NotificationServer } from './notifications'
import { AtpAgent } from '@atproto/api'
import { Keypair } from '@atproto/crypto'
import { Redis } from './redis'
import { AuthVerifier } from './auth-verifier'
export type { ServerConfigValues } from './config'
export type { MountedAlgos } from './feed-gen/types'
export { ServerConfig } from './config'
export { Database, PrimaryDatabase, DatabaseCoordinator } from './db'
export { PeriodicModerationEventReversal } from './db/periodic-moderation-event-reversal'
export { Redis } from './redis'
export { ViewMaintainer } from './db/views'
export { AppContext } from './context'
export type { ImageInvalidator } from './image/invalidator'
export { makeAlgos } from './feed-gen'
export * from './daemon'
export * from './indexer'
export * from './ingester'
export { MigrateModerationData } from './migrate-moderation-data'
export class BskyAppView {
public ctx: AppContext
@ -127,6 +127,14 @@ export class BskyAppView {
},
})
const authVerifier = new AuthVerifier(idResolver, {
ownDid: config.serverDid,
adminDid: config.modServiceDid,
adminPass: config.adminPassword,
moderatorPass: config.moderatorPassword,
triagePass: config.triagePassword,
})
const ctx = new AppContext({
db,
cfg: config,
@ -140,6 +148,7 @@ export class BskyAppView {
searchAgent,
algos,
notifServer,
authVerifier,
})
const xrpcOpts: XrpcServerOptions = {

View File

@ -3,6 +3,7 @@ import { DAY, HOUR, parseIntWithFallback } from '@atproto/common'
export interface IndexerConfigValues {
version: string
serverDid: string
dbPostgresUrl: string
dbPostgresSchema?: string
redisHost?: string // either set redis host, or both sentinel name and hosts
@ -13,7 +14,6 @@ export interface IndexerConfigValues {
didCacheStaleTTL: number
didCacheMaxTTL: number
handleResolveNameservers?: string[]
labelerDid: string
hiveApiKey?: string
abyssEndpoint?: string
abyssPassword?: string
@ -21,7 +21,7 @@ export interface IndexerConfigValues {
fuzzyMatchB64?: string
fuzzyFalsePositiveB64?: string
labelerKeywords: Record<string, string>
moderationPushUrl?: string
moderationPushUrl: string
indexerConcurrency?: number
indexerPartitionIds: number[]
indexerPartitionBatchSize?: number
@ -37,6 +37,7 @@ export class IndexerConfig {
static readEnv(overrides?: Partial<IndexerConfigValues>) {
const version = process.env.BSKY_VERSION || '0.0.0'
const serverDid = process.env.SERVER_DID || 'did:example:test'
const dbPostgresUrl =
overrides?.dbPostgresUrl || process.env.DB_PRIMARY_POSTGRES_URL
const dbPostgresSchema =
@ -66,11 +67,11 @@ export class IndexerConfig {
const handleResolveNameservers = process.env.HANDLE_RESOLVE_NAMESERVERS
? process.env.HANDLE_RESOLVE_NAMESERVERS.split(',')
: []
const labelerDid = process.env.LABELER_DID || 'did:example:labeler'
const moderationPushUrl =
overrides?.moderationPushUrl ||
process.env.MODERATION_PUSH_URL ||
undefined
assert(moderationPushUrl)
const hiveApiKey = process.env.HIVE_API_KEY || undefined
const abyssEndpoint = process.env.ABYSS_ENDPOINT
const abyssPassword = process.env.ABYSS_PASSWORD
@ -101,6 +102,7 @@ export class IndexerConfig {
assert(indexerPartitionIds.length > 0)
return new IndexerConfig({
version,
serverDid,
dbPostgresUrl,
dbPostgresSchema,
redisHost,
@ -111,7 +113,6 @@ export class IndexerConfig {
didCacheStaleTTL,
didCacheMaxTTL,
handleResolveNameservers,
labelerDid,
moderationPushUrl,
hiveApiKey,
abyssEndpoint,
@ -136,6 +137,10 @@ export class IndexerConfig {
return this.cfg.version
}
get serverDid() {
return this.cfg.serverDid
}
get dbPostgresUrl() {
return this.cfg.dbPostgresUrl
}
@ -176,10 +181,6 @@ export class IndexerConfig {
return this.cfg.handleResolveNameservers
}
get labelerDid() {
return this.cfg.labelerDid
}
get moderationPushUrl() {
return this.cfg.moderationPushUrl
}

View File

@ -13,8 +13,6 @@ import { AutoModerator } from '../auto-moderator'
import { Redis } from '../redis'
import { NotificationServer } from '../notifications'
import { CloseFn, createServer, startServer } from './server'
import { ImageUriBuilder } from '../image/uri'
import { ImageInvalidator } from '../image/invalidator'
export { IndexerConfig } from './config'
export type { IndexerConfigValues } from './config'
@ -42,7 +40,6 @@ export class BskyIndexer {
redis: Redis
redisCache: Redis
cfg: IndexerConfig
imgInvalidator?: ImageInvalidator
}): BskyIndexer {
const { db, redis, redisCache, cfg } = opts
const didCache = new DidRedisCache(redisCache.withNamespace('did-doc'), {
@ -56,17 +53,11 @@ export class BskyIndexer {
})
const backgroundQueue = new BackgroundQueue(db)
const imgUriBuilder = cfg.imgUriEndpoint
? new ImageUriBuilder(cfg.imgUriEndpoint)
: undefined
const imgInvalidator = opts.imgInvalidator
const autoMod = new AutoModerator({
db,
idResolver,
cfg,
backgroundQueue,
imgUriBuilder,
imgInvalidator,
})
const notifServer = cfg.pushNotificationEndpoint

View File

@ -9,6 +9,7 @@ export interface IngesterConfigValues {
redisSentinelHosts?: string[]
redisPassword?: string
repoProvider: string
labelProvider?: string
ingesterPartitionCount: number
ingesterNamespace?: string
ingesterSubLockId?: number
@ -40,6 +41,7 @@ export class IngesterConfig {
const redisPassword =
overrides?.redisPassword || process.env.REDIS_PASSWORD || undefined
const repoProvider = overrides?.repoProvider || process.env.REPO_PROVIDER // E.g. ws://abc.com:4000
const labelProvider = overrides?.labelProvider || process.env.LABEL_PROVIDER
const ingesterPartitionCount =
overrides?.ingesterPartitionCount ||
maybeParseInt(process.env.INGESTER_PARTITION_COUNT)
@ -69,6 +71,7 @@ export class IngesterConfig {
redisSentinelHosts,
redisPassword,
repoProvider,
labelProvider,
ingesterPartitionCount,
ingesterSubLockId,
ingesterNamespace,
@ -110,6 +113,10 @@ export class IngesterConfig {
return this.cfg.repoProvider
}
get labelProvider() {
return this.cfg.labelProvider
}
get ingesterPartitionCount() {
return this.cfg.ingesterPartitionCount
}

View File

@ -1,6 +1,7 @@
import { PrimaryDatabase } from '../db'
import { Redis } from '../redis'
import { IngesterConfig } from './config'
import { LabelSubscription } from './label-subscription'
export class IngesterContext {
constructor(
@ -8,6 +9,7 @@ export class IngesterContext {
db: PrimaryDatabase
redis: Redis
cfg: IngesterConfig
labelSubscription?: LabelSubscription
},
) {}
@ -22,6 +24,10 @@ export class IngesterContext {
get cfg(): IngesterConfig {
return this.opts.cfg
}
get labelSubscription(): LabelSubscription | undefined {
return this.opts.labelSubscription
}
}
export default IngesterContext

View File

@ -5,6 +5,7 @@ import { Redis } from '../redis'
import { IngesterConfig } from './config'
import { IngesterContext } from './context'
import { IngesterSubscription } from './subscription'
import { LabelSubscription } from './label-subscription'
export { IngesterConfig } from './config'
export type { IngesterConfigValues } from './config'
@ -26,7 +27,15 @@ export class BskyIngester {
cfg: IngesterConfig
}): BskyIngester {
const { db, redis, cfg } = opts
const ctx = new IngesterContext({ db, redis, cfg })
const labelSubscription = cfg.labelProvider
? new LabelSubscription(db, cfg.labelProvider)
: undefined
const ctx = new IngesterContext({
db,
redis,
cfg,
labelSubscription,
})
const sub = new IngesterSubscription(ctx, {
service: cfg.repoProvider,
subLockId: cfg.ingesterSubLockId,
@ -63,11 +72,13 @@ export class BskyIngester {
'ingester stats',
)
}, 500)
await this.ctx.labelSubscription?.start()
this.sub.run()
return this
}
async destroy(opts?: { skipDb: boolean }): Promise<void> {
await this.ctx.labelSubscription?.destroy()
await this.sub.destroy()
clearInterval(this.subStatsInterval)
await this.ctx.redis.destroy()

View File

@ -0,0 +1,76 @@
import AtpAgent from '@atproto/api'
import { PrimaryDatabase } from '../db'
import { sql } from 'kysely'
import { dbLogger } from '../logger'
import { SECOND } from '@atproto/common'
export class LabelSubscription {
destroyed = false
promise: Promise<void> = Promise.resolve()
timer: NodeJS.Timer | undefined
lastLabel: number | undefined
labelAgent: AtpAgent
constructor(public db: PrimaryDatabase, public labelProvider: string) {
this.labelAgent = new AtpAgent({ service: labelProvider })
}
async start() {
const res = await this.db.db
.selectFrom('label')
.select('cts')
.orderBy('cts', 'desc')
.limit(1)
.executeTakeFirst()
this.lastLabel = res ? new Date(res.cts).getTime() : undefined
this.poll()
}
poll() {
if (this.destroyed) return
this.promise = this.fetchLabels()
.catch((err) =>
dbLogger.error({ err }, 'failed to fetch and store labels'),
)
.finally(() => {
this.timer = setTimeout(() => this.poll(), SECOND)
})
}
async fetchLabels() {
const res = await this.labelAgent.api.com.atproto.temp.fetchLabels({
since: this.lastLabel,
})
const last = res.data.labels.at(-1)
if (!last) {
return
}
const dbVals = res.data.labels.map((l) => ({
...l,
cid: l.cid ?? '',
neg: l.neg ?? false,
}))
const { ref } = this.db.db.dynamic
const excluded = (col: string) => ref(`excluded.${col}`)
await this.db
.asPrimary()
.db.insertInto('label')
.values(dbVals)
.onConflict((oc) =>
oc.columns(['src', 'uri', 'cid', 'val']).doUpdateSet({
neg: sql`${excluded('neg')}`,
cts: sql`${excluded('cts')}`,
}),
)
.execute()
this.lastLabel = new Date(last.cts).getTime()
}
async destroy() {
this.destroyed = true
if (this.timer) {
clearTimeout(this.timer)
}
await this.promise
}
}

View File

@ -15,6 +15,7 @@ import * as ComAtprotoAdminDisableInviteCodes from './types/com/atproto/admin/di
import * as ComAtprotoAdminEmitModerationEvent from './types/com/atproto/admin/emitModerationEvent'
import * as ComAtprotoAdminEnableAccountInvites from './types/com/atproto/admin/enableAccountInvites'
import * as ComAtprotoAdminGetAccountInfo from './types/com/atproto/admin/getAccountInfo'
import * as ComAtprotoAdminGetAccountInfos from './types/com/atproto/admin/getAccountInfos'
import * as ComAtprotoAdminGetInviteCodes from './types/com/atproto/admin/getInviteCodes'
import * as ComAtprotoAdminGetModerationEvent from './types/com/atproto/admin/getModerationEvent'
import * as ComAtprotoAdminGetRecord from './types/com/atproto/admin/getRecord'
@ -265,6 +266,17 @@ export class AdminNS {
return this._server.xrpc.method(nsid, cfg)
}
getAccountInfos<AV extends AuthVerifier>(
cfg: ConfigOf<
AV,
ComAtprotoAdminGetAccountInfos.Handler<ExtractAuth<AV>>,
ComAtprotoAdminGetAccountInfos.HandlerReqCtx<ExtractAuth<AV>>
>,
) {
const nsid = 'com.atproto.admin.getAccountInfos' // @ts-ignore
return this._server.xrpc.method(nsid, cfg)
}
getInviteCodes<AV extends AuthVerifier>(
cfg: ConfigOf<
AV,

View File

@ -436,6 +436,12 @@ export const schemaDict = {
email: {
type: 'string',
},
relatedRecords: {
type: 'array',
items: {
type: 'unknown',
},
},
indexedAt: {
type: 'string',
format: 'datetime',
@ -1046,6 +1052,45 @@ export const schemaDict = {
},
},
},
ComAtprotoAdminGetAccountInfos: {
lexicon: 1,
id: 'com.atproto.admin.getAccountInfos',
defs: {
main: {
type: 'query',
description: 'Get details about some accounts.',
parameters: {
type: 'params',
required: ['dids'],
properties: {
dids: {
type: 'array',
items: {
type: 'string',
format: 'did',
},
},
},
},
output: {
encoding: 'application/json',
schema: {
type: 'object',
required: ['infos'],
properties: {
infos: {
type: 'array',
items: {
type: 'ref',
ref: 'lex:com.atproto.admin.defs#accountView',
},
},
},
},
},
},
},
},
ComAtprotoAdminGetInviteCodes: {
lexicon: 1,
id: 'com.atproto.admin.getInviteCodes',
@ -7875,6 +7920,7 @@ export const ids = {
ComAtprotoAdminEmitModerationEvent: 'com.atproto.admin.emitModerationEvent',
ComAtprotoAdminEnableAccountInvites: 'com.atproto.admin.enableAccountInvites',
ComAtprotoAdminGetAccountInfo: 'com.atproto.admin.getAccountInfo',
ComAtprotoAdminGetAccountInfos: 'com.atproto.admin.getAccountInfos',
ComAtprotoAdminGetInviteCodes: 'com.atproto.admin.getInviteCodes',
ComAtprotoAdminGetModerationEvent: 'com.atproto.admin.getModerationEvent',
ComAtprotoAdminGetRecord: 'com.atproto.admin.getRecord',

View File

@ -255,6 +255,7 @@ export interface AccountView {
did: string
handle: string
email?: string
relatedRecords?: {}[]
indexedAt: string
invitedBy?: ComAtprotoServerDefs.InviteCode
invites?: ComAtprotoServerDefs.InviteCode[]

View File

@ -0,0 +1,46 @@
/**
* GENERATED CODE - DO NOT MODIFY
*/
import express from 'express'
import { ValidationResult, BlobRef } from '@atproto/lexicon'
import { lexicons } from '../../../../lexicons'
import { isObj, hasProp } from '../../../../util'
import { CID } from 'multiformats/cid'
import { HandlerAuth } from '@atproto/xrpc-server'
import * as ComAtprotoAdminDefs from './defs'
export interface QueryParams {
dids: string[]
}
export type InputSchema = undefined
export interface OutputSchema {
infos: ComAtprotoAdminDefs.AccountView[]
[k: string]: unknown
}
export type HandlerInput = undefined
export interface HandlerSuccess {
encoding: 'application/json'
body: OutputSchema
headers?: { [key: string]: string }
}
export interface HandlerError {
status: number
message?: string
}
export type HandlerOutput = HandlerError | HandlerSuccess
export type HandlerReqCtx<HA extends HandlerAuth = never> = {
auth: HA
params: QueryParams
input: HandlerInput
req: express.Request
res: express.Response
}
export type Handler<HA extends HandlerAuth = never> = (
ctx: HandlerReqCtx<HA>,
) => Promise<HandlerOutput> | HandlerOutput

View File

@ -1,414 +0,0 @@
import { sql } from 'kysely'
import { DatabaseCoordinator, PrimaryDatabase } from './index'
import { adjustModerationSubjectStatus } from './services/moderation/status'
import { ModerationEventRow } from './services/moderation/types'
type ModerationActionRow = Omit<ModerationEventRow, 'comment' | 'meta'> & {
reason: string | null
}
const getEnv = () => ({
DB_URL:
process.env.MODERATION_MIGRATION_DB_URL ||
'postgresql://pg:password@127.0.0.1:5433/postgres',
DB_POOL_SIZE: Number(process.env.MODERATION_MIGRATION_DB_POOL_SIZE) || 10,
DB_SCHEMA: process.env.MODERATION_MIGRATION_DB_SCHEMA || 'bsky',
})
const countEntries = async (db: PrimaryDatabase) => {
const [allActions, allReports] = await Promise.all([
db.db
// @ts-ignore
.selectFrom('moderation_action')
// @ts-ignore
.select((eb) => eb.fn.count<number>('id').as('count'))
.executeTakeFirstOrThrow(),
db.db
// @ts-ignore
.selectFrom('moderation_report')
// @ts-ignore
.select((eb) => eb.fn.count<number>('id').as('count'))
.executeTakeFirstOrThrow(),
])
return { reportsCount: allReports.count, actionsCount: allActions.count }
}
const countEvents = async (db: PrimaryDatabase) => {
const events = await db.db
.selectFrom('moderation_event')
.select((eb) => eb.fn.count<number>('id').as('count'))
.executeTakeFirstOrThrow()
return events.count
}
const getLatestReportLegacyRefId = async (db: PrimaryDatabase) => {
const events = await db.db
.selectFrom('moderation_event')
.select((eb) => eb.fn.max('legacyRefId').as('latestLegacyRefId'))
.where('action', '=', 'com.atproto.admin.defs#modEventReport')
.executeTakeFirstOrThrow()
return events.latestLegacyRefId
}
const countStatuses = async (db: PrimaryDatabase) => {
const events = await db.db
.selectFrom('moderation_subject_status')
.select((eb) => eb.fn.count<number>('id').as('count'))
.executeTakeFirstOrThrow()
return events.count
}
const processLegacyReports = async (
db: PrimaryDatabase,
legacyIds: number[],
) => {
if (!legacyIds.length) {
console.log('No legacy reports to process')
return
}
const reports = await db.db
.selectFrom('moderation_event')
.where('action', '=', 'com.atproto.admin.defs#modEventReport')
.where('legacyRefId', 'in', legacyIds)
.orderBy('legacyRefId', 'asc')
.selectAll()
.execute()
console.log(`Processing ${reports.length} reports from ${legacyIds.length}`)
await db.transaction(async (tx) => {
// This will be slow but we need to run this in sequence
for (const report of reports) {
await adjustModerationSubjectStatus(tx, report)
}
})
console.log(`Completed processing ${reports.length} reports`)
}
const getReportEventsAboveLegacyId = async (
db: PrimaryDatabase,
aboveLegacyId: number,
) => {
return await db.db
.selectFrom('moderation_event')
.where('action', '=', 'com.atproto.admin.defs#modEventReport')
.where('legacyRefId', '>', aboveLegacyId)
.select(sql<number>`"legacyRefId"`.as('legacyRefId'))
.execute()
}
const createEvents = async (
db: PrimaryDatabase,
opts?: { onlyReportsAboveId: number },
) => {
const commonColumnsToSelect = [
'subjectDid',
'subjectUri',
'subjectType',
'subjectCid',
sql`reason`.as('comment'),
'createdAt',
]
const commonColumnsToInsert = [
'subjectDid',
'subjectUri',
'subjectType',
'subjectCid',
'comment',
'createdAt',
'action',
'createdBy',
] as const
let totalActions: number
if (!opts?.onlyReportsAboveId) {
await db.db
.insertInto('moderation_event')
.columns([
'id',
...commonColumnsToInsert,
'createLabelVals',
'negateLabelVals',
'durationInHours',
'expiresAt',
])
.expression((eb) =>
eb
// @ts-ignore
.selectFrom('moderation_action')
// @ts-ignore
.select([
'id',
...commonColumnsToSelect,
sql`CONCAT('com.atproto.admin.defs#modEvent', UPPER(SUBSTRING(SPLIT_PART(action, '#', 2) FROM 1 FOR 1)), SUBSTRING(SPLIT_PART(action, '#', 2) FROM 2))`.as(
'action',
),
'createdBy',
'createLabelVals',
'negateLabelVals',
'durationInHours',
'expiresAt',
])
.orderBy('id', 'asc'),
)
.execute()
totalActions = await countEvents(db)
console.log(`Created ${totalActions} events from actions`)
await sql`SELECT setval(pg_get_serial_sequence('moderation_event', 'id'), (select max(id) from moderation_event))`.execute(
db.db,
)
console.log('Reset the id sequence for moderation_event')
} else {
totalActions = await countEvents(db)
}
await db.db
.insertInto('moderation_event')
.columns([...commonColumnsToInsert, 'meta', 'legacyRefId'])
.expression((eb) => {
const builder = eb
// @ts-ignore
.selectFrom('moderation_report')
// @ts-ignore
.select([
...commonColumnsToSelect,
sql`'com.atproto.admin.defs#modEventReport'`.as('action'),
sql`"reportedByDid"`.as('createdBy'),
sql`json_build_object('reportType', "reasonType")`.as('meta'),
sql`id`.as('legacyRefId'),
])
if (opts?.onlyReportsAboveId) {
// @ts-ignore
return builder.where('id', '>', opts.onlyReportsAboveId)
}
return builder
})
.execute()
const totalEvents = await countEvents(db)
console.log(`Created ${totalEvents - totalActions} events from reports`)
return
}
const setReportedAtTimestamp = async (db: PrimaryDatabase) => {
console.log('Initiating lastReportedAt timestamp sync')
const didUpdate = await sql`
UPDATE moderation_subject_status
SET "lastReportedAt" = reports."createdAt"
FROM (
select "subjectDid", "subjectUri", MAX("createdAt") as "createdAt"
from moderation_report
where "subjectUri" is null
group by "subjectDid", "subjectUri"
) as reports
WHERE reports."subjectDid" = moderation_subject_status."did"
AND "recordPath" = ''
AND ("lastReportedAt" is null OR "lastReportedAt" < reports."createdAt")
`.execute(db.db)
console.log(
`Updated lastReportedAt for ${didUpdate.numUpdatedOrDeletedRows} did subject`,
)
const contentUpdate = await sql`
UPDATE moderation_subject_status
SET "lastReportedAt" = reports."createdAt"
FROM (
select "subjectDid", "subjectUri", MAX("createdAt") as "createdAt"
from moderation_report
where "subjectUri" is not null
group by "subjectDid", "subjectUri"
) as reports
WHERE reports."subjectDid" = moderation_subject_status."did"
AND "recordPath" is not null
AND POSITION(moderation_subject_status."recordPath" IN reports."subjectUri") > 0
AND ("lastReportedAt" is null OR "lastReportedAt" < reports."createdAt")
`.execute(db.db)
console.log(
`Updated lastReportedAt for ${contentUpdate.numUpdatedOrDeletedRows} subject with uri`,
)
}
const createStatusFromActions = async (db: PrimaryDatabase) => {
const allEvents = await db.db
// @ts-ignore
.selectFrom('moderation_action')
// @ts-ignore
.where('reversedAt', 'is', null)
// @ts-ignore
.select((eb) => eb.fn.count<number>('id').as('count'))
.executeTakeFirstOrThrow()
const chunkSize = 2500
const totalChunks = Math.ceil(allEvents.count / chunkSize)
console.log(`Processing ${allEvents.count} actions in ${totalChunks} chunks`)
await db.transaction(async (tx) => {
// This is not used for pagination but only for logging purposes
let currentChunk = 1
let lastProcessedId: undefined | number = 0
do {
const eventsQuery = tx.db
// @ts-ignore
.selectFrom('moderation_action')
// @ts-ignore
.where('reversedAt', 'is', null)
// @ts-ignore
.where('id', '>', lastProcessedId)
.limit(chunkSize)
.selectAll()
const events = (await eventsQuery.execute()) as ModerationActionRow[]
for (const event of events) {
// Remap action to event data type
const actionParts = event.action.split('#')
await adjustModerationSubjectStatus(tx, {
...event,
action: `com.atproto.admin.defs#modEvent${actionParts[1]
.charAt(0)
.toUpperCase()}${actionParts[1].slice(
1,
)}` as ModerationEventRow['action'],
comment: event.reason,
meta: null,
})
}
console.log(`Processed events chunk ${currentChunk} of ${totalChunks}`)
lastProcessedId = events.at(-1)?.id
currentChunk++
} while (lastProcessedId !== undefined)
})
console.log(`Events migration complete!`)
const totalStatuses = await countStatuses(db)
console.log(`Created ${totalStatuses} statuses`)
}
const remapFlagToAcknlowedge = async (db: PrimaryDatabase) => {
console.log('Initiating flag to ack remap')
const results = await sql`
UPDATE moderation_event
SET "action" = 'com.atproto.admin.defs#modEventAcknowledge'
WHERE action = 'com.atproto.admin.defs#modEventFlag'
`.execute(db.db)
console.log(`Remapped ${results.numUpdatedOrDeletedRows} flag actions to ack`)
}
const syncBlobCids = async (db: PrimaryDatabase) => {
console.log('Initiating blob cid sync')
const results = await sql`
UPDATE moderation_subject_status
SET "blobCids" = blob_action."cids"
FROM (
SELECT moderation_action."subjectUri", moderation_action."subjectDid", jsonb_agg(moderation_action_subject_blob."cid") as cids
FROM moderation_action_subject_blob
JOIN moderation_action
ON moderation_action.id = moderation_action_subject_blob."actionId"
WHERE moderation_action."reversedAt" is NULL
GROUP by moderation_action."subjectUri", moderation_action."subjectDid"
) as blob_action
WHERE did = "subjectDid" AND position("recordPath" IN "subjectUri") > 0
`.execute(db.db)
console.log(`Updated blob cids on ${results.numUpdatedOrDeletedRows} rows`)
}
async function updateStatusFromUnresolvedReports(db: PrimaryDatabase) {
const { ref } = db.db.dynamic
const reports = await db.db
// @ts-ignore
.selectFrom('moderation_report')
.whereNotExists((qb) =>
qb
.selectFrom('moderation_report_resolution')
.selectAll()
// @ts-ignore
.whereRef('reportId', '=', ref('moderation_report.id')),
)
.select(sql<number>`moderation_report.id`.as('legacyId'))
.execute()
console.log('Updating statuses based on unresolved reports')
await processLegacyReports(
db,
reports.map((report) => report.legacyId),
)
console.log('Completed updating statuses based on unresolved reports')
}
export async function MigrateModerationData() {
const env = getEnv()
const db = new DatabaseCoordinator({
schema: env.DB_SCHEMA,
primary: {
url: env.DB_URL,
poolSize: env.DB_POOL_SIZE,
},
replicas: [],
})
const primaryDb = db.getPrimary()
const [counts, existingEventsCount] = await Promise.all([
countEntries(primaryDb),
countEvents(primaryDb),
])
// If there are existing events in the moderation_event table, we assume that the migration has already been run
// so we just bring over any new reports since last run
if (existingEventsCount) {
console.log(
`Found ${existingEventsCount} existing events. Migrating ${counts.reportsCount} reports only, ignoring actions`,
)
const reportMigrationStartedAt = Date.now()
const latestReportLegacyRefId = await getLatestReportLegacyRefId(primaryDb)
if (latestReportLegacyRefId) {
await createEvents(primaryDb, {
onlyReportsAboveId: latestReportLegacyRefId,
})
const newReportEvents = await getReportEventsAboveLegacyId(
primaryDb,
latestReportLegacyRefId,
)
await processLegacyReports(
primaryDb,
newReportEvents.map((evt) => evt.legacyRefId),
)
await setReportedAtTimestamp(primaryDb)
} else {
console.log('No reports have been migrated into events yet, bailing.')
}
console.log(
`Time spent: ${(Date.now() - reportMigrationStartedAt) / 1000} seconds`,
)
console.log('Migration complete!')
return
}
const totalEntries = counts.actionsCount + counts.reportsCount
console.log(`Migrating ${totalEntries} rows of actions and reports`)
const startedAt = Date.now()
await createEvents(primaryDb)
// Important to run this before creation statuses from actions to ensure that we are not attempting to map flag actions
await remapFlagToAcknlowedge(primaryDb)
await createStatusFromActions(primaryDb)
await updateStatusFromUnresolvedReports(primaryDb)
await setReportedAtTimestamp(primaryDb)
await syncBlobCids(primaryDb)
console.log(`Time spent: ${(Date.now() - startedAt) / 1000 / 60} minutes`)
console.log('Migration complete!')
}

View File

@ -10,6 +10,8 @@ import { SearchKeyset, getUserSearchQuery } from '../util/search'
import { FromDb } from '../types'
import { GraphService } from '../graph'
import { LabelService } from '../label'
import { AtUri } from '@atproto/syntax'
import { ids } from '../../lexicon/lexicons'
export * from './types'
@ -96,6 +98,26 @@ export class ActorService {
})
}
async getProfileRecords(dids: string[], includeSoftDeleted = false) {
if (dids.length === 0) return new Map()
const profileUris = dids.map((did) =>
AtUri.make(did, ids.AppBskyActorProfile, 'self').toString(),
)
const { ref } = this.db.db.dynamic
const res = await this.db.db
.selectFrom('record')
.innerJoin('actor', 'actor.did', 'record.did')
.if(!includeSoftDeleted, (qb) =>
qb.where(notSoftDeletedClause(ref('actor'))),
)
.where('uri', 'in', profileUris)
.select(['record.did', 'record.json'])
.execute()
return res.reduce((acc, cur) => {
return acc.set(cur.did, JSON.parse(cur.json))
}, new Map<string, JSON>())
}
async getSearchResults({
cursor,
limit = 25,

View File

@ -1,37 +1,9 @@
import { CID } from 'multiformats/cid'
import { AtUri } from '@atproto/syntax'
import { InvalidRequestError } from '@atproto/xrpc-server'
import { PrimaryDatabase } from '../../db'
import { ModerationViews } from './views'
import { ImageUriBuilder } from '../../image/uri'
import { Main as StrongRef } from '../../lexicon/types/com/atproto/repo/strongRef'
import { ImageInvalidator } from '../../image/invalidator'
import {
isModEventComment,
isModEventLabel,
isModEventMute,
isModEventReport,
isModEventTakedown,
isModEventEmail,
RepoRef,
RepoBlobRef,
} from '../../lexicon/types/com/atproto/admin/defs'
import { addHoursToDate } from '../../util/date'
import {
adjustModerationSubjectStatus,
getStatusIdentifierFromSubject,
} from './status'
import {
ModEventType,
ModerationEventRow,
ModerationEventRowWithHandle,
ModerationSubjectStatusRow,
ReversibleModerationEvent,
SubjectInfo,
} from './types'
import { ModerationEvent } from '../../db/tables/moderation'
import { paginate } from '../../db/pagination'
import { StatusKeyset, TimeIdKeyset } from './pagination'
import { StatusAttr } from '../../lexicon/types/com/atproto/admin/defs'
export class ModerationService {
constructor(
@ -48,630 +20,99 @@ export class ModerationService {
new ModerationService(db, imgUriBuilder, imgInvalidator)
}
views = new ModerationViews(this.db)
async getEvent(id: number): Promise<ModerationEventRow | undefined> {
return await this.db.db
.selectFrom('moderation_event')
.selectAll()
.where('id', '=', id)
.executeTakeFirst()
}
async getEventOrThrow(id: number): Promise<ModerationEventRow> {
const event = await this.getEvent(id)
if (!event) throw new InvalidRequestError('Moderation event not found')
return event
}
async getEvents(opts: {
subject?: string
createdBy?: string
limit: number
cursor?: string
includeAllUserRecords: boolean
types: ModerationEvent['action'][]
sortDirection?: 'asc' | 'desc'
}): Promise<{ cursor?: string; events: ModerationEventRowWithHandle[] }> {
const {
subject,
createdBy,
limit,
cursor,
includeAllUserRecords,
sortDirection = 'desc',
types,
} = opts
let builder = this.db.db
.selectFrom('moderation_event')
.leftJoin(
'actor as creatorActor',
'creatorActor.did',
'moderation_event.createdBy',
)
.leftJoin(
'actor as subjectActor',
'subjectActor.did',
'moderation_event.subjectDid',
)
if (subject) {
builder = builder.where((qb) => {
if (includeAllUserRecords) {
// If subject is an at-uri, we need to extract the DID from the at-uri
// otherwise, subject is probably a DID already
if (subject.startsWith('at://')) {
const uri = new AtUri(subject)
return qb.where('subjectDid', '=', uri.hostname)
}
return qb.where('subjectDid', '=', subject)
}
return qb
.where((subQb) =>
subQb
.where('subjectDid', '=', subject)
.where('subjectUri', 'is', null),
)
.orWhere('subjectUri', '=', subject)
})
}
if (types.length) {
builder = builder.where((qb) => {
if (types.length === 1) {
return qb.where('action', '=', types[0])
}
return qb.where('action', 'in', types)
})
}
if (createdBy) {
builder = builder.where('createdBy', '=', createdBy)
}
const { ref } = this.db.db.dynamic
const keyset = new TimeIdKeyset(
ref(`moderation_event.createdAt`),
ref('moderation_event.id'),
)
const paginatedBuilder = paginate(builder, {
limit,
cursor,
keyset,
direction: sortDirection,
tryIndex: true,
})
const result = await paginatedBuilder
.selectAll(['moderation_event'])
.select([
'subjectActor.handle as subjectHandle',
'creatorActor.handle as creatorHandle',
])
.execute()
return { cursor: keyset.packFromResult(result), events: result }
}
async getReport(id: number): Promise<ModerationEventRow | undefined> {
return await this.db.db
.selectFrom('moderation_event')
.where('action', '=', 'com.atproto.admin.defs#modEventReport')
.selectAll()
.where('id', '=', id)
.executeTakeFirst()
}
async getCurrentStatus(
subject: { did: string } | { uri: AtUri } | { cids: CID[] },
) {
let builder = this.db.db.selectFrom('moderation_subject_status').selectAll()
if ('did' in subject) {
builder = builder.where('did', '=', subject.did)
} else if ('uri' in subject) {
builder = builder.where('recordPath', '=', subject.uri.toString())
}
// TODO: Handle the cid status
return await builder.execute()
}
buildSubjectInfo(
subject: { did: string } | { uri: AtUri; cid: CID },
subjectBlobCids?: CID[],
): SubjectInfo {
if ('did' in subject) {
if (subjectBlobCids?.length) {
throw new InvalidRequestError('Blobs do not apply to repo subjects')
}
// Allowing dids that may not exist: may have been deleted but needs to remain actionable.
return {
subjectType: 'com.atproto.admin.defs#repoRef',
subjectDid: subject.did,
subjectUri: null,
subjectCid: null,
}
}
// Allowing records/blobs that may not exist: may have been deleted but needs to remain actionable.
return {
subjectType: 'com.atproto.repo.strongRef',
subjectDid: subject.uri.host,
subjectUri: subject.uri.toString(),
subjectCid: subject.cid.toString(),
}
}
async logEvent(info: {
event: ModEventType
subject: { did: string } | { uri: AtUri; cid: CID }
subjectBlobCids?: CID[]
createdBy: string
createdAt?: Date
}): Promise<ModerationEventRow> {
this.db.assertTransaction()
const {
event,
createdBy,
subject,
subjectBlobCids,
createdAt = new Date(),
} = info
// Resolve subject info
const subjectInfo = this.buildSubjectInfo(subject, subjectBlobCids)
const createLabelVals =
isModEventLabel(event) && event.createLabelVals.length > 0
? event.createLabelVals.join(' ')
: undefined
const negateLabelVals =
isModEventLabel(event) && event.negateLabelVals.length > 0
? event.negateLabelVals.join(' ')
: undefined
const meta: Record<string, string | boolean> = {}
if (isModEventReport(event)) {
meta.reportType = event.reportType
}
if (isModEventComment(event) && event.sticky) {
meta.sticky = event.sticky
}
if (isModEventEmail(event)) {
meta.subjectLine = event.subjectLine
}
const modEvent = await this.db.db
.insertInto('moderation_event')
.values({
comment: event.comment ? `${event.comment}` : null,
action: event.$type as ModerationEvent['action'],
createdAt: createdAt.toISOString(),
createdBy,
createLabelVals,
negateLabelVals,
durationInHours: event.durationInHours
? Number(event.durationInHours)
: null,
meta,
expiresAt:
(isModEventTakedown(event) || isModEventMute(event)) &&
event.durationInHours
? addHoursToDate(event.durationInHours, createdAt).toISOString()
: undefined,
...subjectInfo,
})
.returningAll()
.executeTakeFirstOrThrow()
await adjustModerationSubjectStatus(this.db, modEvent, subjectBlobCids)
return modEvent
}
async getLastReversibleEventForSubject({
did,
muteUntil,
recordPath,
suspendUntil,
}: ModerationSubjectStatusRow) {
const isSuspended = suspendUntil && new Date(suspendUntil) < new Date()
const isMuted = muteUntil && new Date(muteUntil) < new Date()
// If the subject is neither suspended nor muted don't bother finding the last reversible event
// Ideally, this should never happen because the caller of this method should only call this
// after ensuring that the suspended or muted subjects are being reversed
if (!isSuspended && !isMuted) {
return null
}
let builder = this.db.db
.selectFrom('moderation_event')
.where('subjectDid', '=', did)
if (recordPath) {
builder = builder.where('subjectUri', 'like', `%${recordPath}%`)
}
// Means the subject was suspended and needs to be unsuspended
if (isSuspended) {
builder = builder
.where('action', '=', 'com.atproto.admin.defs#modEventTakedown')
.where('durationInHours', 'is not', null)
}
if (isMuted) {
builder = builder
.where('action', '=', 'com.atproto.admin.defs#modEventMute')
.where('durationInHours', 'is not', null)
}
return await builder
.orderBy('id', 'desc')
.selectAll()
.limit(1)
.executeTakeFirst()
}
async getSubjectsDueForReversal(): Promise<ModerationSubjectStatusRow[]> {
const subjectsDueForReversal = await this.db.db
.selectFrom('moderation_subject_status')
.where('suspendUntil', '<', new Date().toISOString())
.orWhere('muteUntil', '<', new Date().toISOString())
.selectAll()
.execute()
return subjectsDueForReversal
}
async isSubjectSuspended(did: string): Promise<boolean> {
const res = await this.db.db
.selectFrom('moderation_subject_status')
.where('did', '=', did)
.where('recordPath', '=', '')
.where('suspendUntil', '>', new Date().toISOString())
.select('did')
.limit(1)
.executeTakeFirst()
return !!res
}
async revertState({
createdBy,
createdAt,
comment,
action,
subject,
}: ReversibleModerationEvent): Promise<{
result: ModerationEventRow
restored?: TakedownSubjects
}> {
const isRevertingTakedown =
action === 'com.atproto.admin.defs#modEventTakedown'
this.db.assertTransaction()
const result = await this.logEvent({
event: {
$type: isRevertingTakedown
? 'com.atproto.admin.defs#modEventReverseTakedown'
: 'com.atproto.admin.defs#modEventUnmute',
comment: comment ?? undefined,
},
createdAt,
createdBy,
subject,
})
let restored: TakedownSubjects | undefined
if (!isRevertingTakedown) {
return { result, restored }
}
if (
result.subjectType === 'com.atproto.admin.defs#repoRef' &&
result.subjectDid
) {
await this.reverseTakedownRepo({
did: result.subjectDid,
})
restored = {
did: result.subjectDid,
subjects: [
{
$type: 'com.atproto.admin.defs#repoRef',
did: result.subjectDid,
},
],
}
}
if (
result.subjectType === 'com.atproto.repo.strongRef' &&
result.subjectUri
) {
const uri = new AtUri(result.subjectUri)
await this.reverseTakedownRecord({
uri,
})
const did = uri.hostname
// TODO: MOD_EVENT This bit needs testing
const subjectStatus = await this.db.db
.selectFrom('moderation_subject_status')
.where('did', '=', uri.host)
.where('recordPath', '=', `${uri.collection}/${uri.rkey}`)
.select('blobCids')
.executeTakeFirst()
const blobCids = subjectStatus?.blobCids || []
restored = {
did,
subjects: [
{
$type: 'com.atproto.repo.strongRef',
uri: result.subjectUri,
cid: result.subjectCid ?? '',
},
...blobCids.map((cid) => ({
$type: 'com.atproto.admin.defs#repoBlobRef',
did,
cid,
recordUri: result.subjectUri,
})),
],
}
}
return { result, restored }
}
async takedownRepo(info: {
takedownId: number
did: string
}): Promise<TakedownSubjects> {
const { takedownId, did } = info
async takedownRepo(info: { takedownRef: string; did: string }) {
const { takedownRef, did } = info
await this.db.db
.updateTable('actor')
.set({ takedownId })
.set({ takedownRef })
.where('did', '=', did)
.where('takedownId', 'is', null)
.where('takedownRef', 'is', null)
.executeTakeFirst()
return {
did,
subjects: [
{
$type: 'com.atproto.admin.defs#repoRef',
did,
},
],
}
}
async reverseTakedownRepo(info: { did: string }) {
await this.db.db
.updateTable('actor')
.set({ takedownId: null })
.set({ takedownRef: null })
.where('did', '=', info.did)
.execute()
}
async takedownRecord(info: {
takedownId: number
uri: AtUri
cid: CID
blobCids?: CID[]
}): Promise<TakedownSubjects> {
const { takedownId, uri, cid, blobCids } = info
const did = uri.hostname
this.db.assertTransaction()
async takedownRecord(info: { takedownRef: string; uri: AtUri; cid: CID }) {
const { takedownRef, uri } = info
await this.db.db
.updateTable('record')
.set({ takedownId })
.set({ takedownRef })
.where('uri', '=', uri.toString())
.where('takedownId', 'is', null)
.where('takedownRef', 'is', null)
.executeTakeFirst()
if (blobCids) {
await Promise.all(
blobCids.map(async (cid) => {
const paths = ImageUriBuilder.presets.map((id) => {
const imgUri = this.imgUriBuilder.getPresetUri(id, uri.host, cid)
return imgUri.replace(this.imgUriBuilder.endpoint, '')
})
await this.imgInvalidator.invalidate(cid.toString(), paths)
}),
)
}
return {
did,
subjects: [
{
$type: 'com.atproto.repo.strongRef',
uri: uri.toString(),
cid: cid.toString(),
},
...(blobCids || []).map((cid) => ({
$type: 'com.atproto.admin.defs#repoBlobRef',
did,
cid: cid.toString(),
recordUri: uri.toString(),
})),
],
}
}
async reverseTakedownRecord(info: { uri: AtUri }) {
this.db.assertTransaction()
await this.db.db
.updateTable('record')
.set({ takedownId: null })
.set({ takedownRef: null })
.where('uri', '=', info.uri.toString())
.execute()
}
async report(info: {
reasonType: NonNullable<ModerationEventRow['meta']>['reportType']
reason?: string
subject: { did: string } | { uri: AtUri; cid: CID }
reportedBy: string
createdAt?: Date
}): Promise<ModerationEventRow> {
const {
reasonType,
reason,
reportedBy,
createdAt = new Date(),
subject,
} = info
const event = await this.logEvent({
event: {
$type: 'com.atproto.admin.defs#modEventReport',
reportType: reasonType,
comment: reason,
},
createdBy: reportedBy,
subject,
createdAt,
})
return event
}
async getSubjectStatuses({
cursor,
limit = 50,
takendown,
appealed,
reviewState,
reviewedAfter,
reviewedBefore,
reportedAfter,
reportedBefore,
includeMuted,
ignoreSubjects,
sortDirection,
lastReviewedBy,
sortField,
subject,
}: {
cursor?: string
limit?: number
takendown?: boolean
appealed?: boolean | null
reviewedBefore?: string
reviewState?: ModerationSubjectStatusRow['reviewState']
reviewedAfter?: string
reportedAfter?: string
reportedBefore?: string
includeMuted?: boolean
subject?: string
ignoreSubjects?: string[]
sortDirection: 'asc' | 'desc'
lastReviewedBy?: string
sortField: 'lastReviewedAt' | 'lastReportedAt'
}) {
let builder = this.db.db
.selectFrom('moderation_subject_status')
.leftJoin('actor', 'actor.did', 'moderation_subject_status.did')
if (subject) {
const subjectInfo = getStatusIdentifierFromSubject(subject)
builder = builder
.where('moderation_subject_status.did', '=', subjectInfo.did)
.where((qb) =>
subjectInfo.recordPath
? qb.where('recordPath', '=', subjectInfo.recordPath)
: qb.where('recordPath', '=', ''),
)
}
if (ignoreSubjects?.length) {
builder = builder
.where('moderation_subject_status.did', 'not in', ignoreSubjects)
.where('recordPath', 'not in', ignoreSubjects)
}
if (reviewState) {
builder = builder.where('reviewState', '=', reviewState)
}
if (lastReviewedBy) {
builder = builder.where('lastReviewedBy', '=', lastReviewedBy)
}
if (reviewedAfter) {
builder = builder.where('lastReviewedAt', '>', reviewedAfter)
}
if (reviewedBefore) {
builder = builder.where('lastReviewedAt', '<', reviewedBefore)
}
if (reportedAfter) {
builder = builder.where('lastReviewedAt', '>', reportedAfter)
}
if (reportedBefore) {
builder = builder.where('lastReportedAt', '<', reportedBefore)
}
if (takendown) {
builder = builder.where('takendown', '=', true)
}
if (appealed !== undefined) {
builder =
appealed === null
? builder.where('appealed', 'is', null)
: builder.where('appealed', '=', appealed)
}
if (!includeMuted) {
builder = builder.where((qb) =>
qb
.where('muteUntil', '<', new Date().toISOString())
.orWhere('muteUntil', 'is', null),
)
}
const { ref } = this.db.db.dynamic
const keyset = new StatusKeyset(
ref(`moderation_subject_status.${sortField}`),
ref('moderation_subject_status.id'),
)
const paginatedBuilder = paginate(builder, {
limit,
cursor,
keyset,
direction: sortDirection,
tryIndex: true,
nullsLast: true,
})
const results = await paginatedBuilder
.select('actor.handle as handle')
.selectAll('moderation_subject_status')
async takedownBlob(info: { takedownRef: string; did: string; cid: string }) {
const { takedownRef, did, cid } = info
await this.db.db
.insertInto('blob_takedown')
.values({ did, cid, takedownRef })
.onConflict((oc) => oc.doNothing())
.execute()
return { statuses: results, cursor: keyset.packFromResult(results) }
const paths = ImageUriBuilder.presets.map((id) => {
const imgUri = this.imgUriBuilder.getPresetUri(id, did, cid)
return imgUri.replace(this.imgUriBuilder.endpoint, '')
})
await this.imgInvalidator.invalidate(cid.toString(), paths)
}
async isSubjectTakendown(
subject: { did: string } | { uri: AtUri },
): Promise<boolean> {
const { did, recordPath } = getStatusIdentifierFromSubject(
'did' in subject ? subject.did : subject.uri,
)
const builder = this.db.db
.selectFrom('moderation_subject_status')
async reverseTakedownBlob(info: { did: string; cid: string }) {
const { did, cid } = info
await this.db.db
.deleteFrom('blob_takedown')
.where('did', '=', did)
.where('recordPath', '=', recordPath || '')
.where('cid', '=', cid)
.execute()
}
const result = await builder.select('takendown').executeTakeFirst()
async getRepoTakedownRef(did: string): Promise<StatusAttr | null> {
const res = await this.db.db
.selectFrom('actor')
.where('did', '=', did)
.selectAll()
.executeTakeFirst()
return res ? formatStatus(res.takedownRef) : null
}
return !!result?.takendown
async getRecordTakedownRef(uri: string): Promise<StatusAttr | null> {
const res = await this.db.db
.selectFrom('record')
.where('uri', '=', uri)
.selectAll()
.executeTakeFirst()
return res ? formatStatus(res.takedownRef) : null
}
async getBlobTakedownRef(
did: string,
cid: string,
): Promise<StatusAttr | null> {
const res = await this.db.db
.selectFrom('blob_takedown')
.where('did', '=', did)
.where('cid', '=', cid)
.selectAll()
.executeTakeFirst()
// this table only tracks takedowns not all blobs
// so if no result is returned then the blob is not taken down (rather than not found)
return formatStatus(res?.takedownRef ?? null)
}
}
export type TakedownSubjects = {
did: string
subjects: (RepoRef | RepoBlobRef | StrongRef)[]
const formatStatus = (ref: string | null): StatusAttr => {
return ref ? { applied: true, ref } : { applied: false }
}

View File

@ -1,96 +0,0 @@
import { InvalidRequestError } from '@atproto/xrpc-server'
import { DynamicModule, sql } from 'kysely'
import { Cursor, GenericKeyset } from '../../db/pagination'
type StatusKeysetParam = {
lastReviewedAt: string | null
lastReportedAt: string | null
id: number
}
export class StatusKeyset extends GenericKeyset<StatusKeysetParam, Cursor> {
labelResult(result: StatusKeysetParam): Cursor
labelResult(result: StatusKeysetParam) {
const primaryField = (
this.primary as ReturnType<DynamicModule['ref']>
).dynamicReference.includes('lastReviewedAt')
? 'lastReviewedAt'
: 'lastReportedAt'
return {
primary: result[primaryField]
? new Date(`${result[primaryField]}`).getTime().toString()
: '',
secondary: result.id.toString(),
}
}
labeledResultToCursor(labeled: Cursor) {
return {
primary: labeled.primary,
secondary: labeled.secondary,
}
}
cursorToLabeledResult(cursor: Cursor) {
return {
primary: cursor.primary
? new Date(parseInt(cursor.primary, 10)).toISOString()
: '',
secondary: cursor.secondary,
}
}
unpackCursor(cursorStr?: string): Cursor | undefined {
if (!cursorStr) return
const result = cursorStr.split('::')
const [primary, secondary, ...others] = result
if (!secondary || others.length > 0) {
throw new InvalidRequestError('Malformed cursor')
}
return {
primary,
secondary,
}
}
// This is specifically built to handle nullable columns as primary sorting column
getSql(labeled?: Cursor, direction?: 'asc' | 'desc') {
if (labeled === undefined) return
if (direction === 'asc') {
return !labeled.primary
? sql`(${this.primary} IS NULL AND ${this.secondary} > ${labeled.secondary})`
: sql`((${this.primary}, ${this.secondary}) > (${labeled.primary}, ${labeled.secondary}) OR (${this.primary} is null))`
} else {
return !labeled.primary
? sql`(${this.primary} IS NULL AND ${this.secondary} < ${labeled.secondary})`
: sql`((${this.primary}, ${this.secondary}) < (${labeled.primary}, ${labeled.secondary}) OR (${this.primary} is null))`
}
}
}
type TimeIdKeysetParam = {
id: number
createdAt: string
}
type TimeIdResult = TimeIdKeysetParam
export class TimeIdKeyset extends GenericKeyset<TimeIdKeysetParam, Cursor> {
labelResult(result: TimeIdResult): Cursor
labelResult(result: TimeIdResult) {
return { primary: result.createdAt, secondary: result.id.toString() }
}
labeledResultToCursor(labeled: Cursor) {
return {
primary: new Date(labeled.primary).getTime().toString(),
secondary: labeled.secondary,
}
}
cursorToLabeledResult(cursor: Cursor) {
const primaryDate = new Date(parseInt(cursor.primary, 10))
if (isNaN(primaryDate.getTime())) {
throw new InvalidRequestError('Malformed cursor')
}
return {
primary: primaryDate.toISOString(),
secondary: cursor.secondary,
}
}
}

View File

@ -1,551 +0,0 @@
import { sql } from 'kysely'
import { ArrayEl } from '@atproto/common'
import { AtUri } from '@atproto/syntax'
import { INVALID_HANDLE } from '@atproto/syntax'
import { BlobRef, jsonStringToLex } from '@atproto/lexicon'
import { Database } from '../../db'
import { Actor } from '../../db/tables/actor'
import { Record as RecordRow } from '../../db/tables/record'
import {
ModEventView,
RepoView,
RepoViewDetail,
RecordView,
RecordViewDetail,
ReportViewDetail,
BlobView,
SubjectStatusView,
ModEventViewDetail,
} from '../../lexicon/types/com/atproto/admin/defs'
import { OutputSchema as ReportOutput } from '../../lexicon/types/com/atproto/moderation/createReport'
import { Label } from '../../lexicon/types/com/atproto/label/defs'
import {
ModerationEventRowWithHandle,
ModerationSubjectStatusRowWithHandle,
} from './types'
import { getSelfLabels } from '../label'
import { REASONOTHER } from '../../lexicon/types/com/atproto/moderation/defs'
export class ModerationViews {
constructor(private db: Database) {}
repo(result: RepoResult): Promise<RepoView>
repo(result: RepoResult[]): Promise<RepoView[]>
async repo(
result: RepoResult | RepoResult[],
): Promise<RepoView | RepoView[]> {
const results = Array.isArray(result) ? result : [result]
if (results.length === 0) return []
const [info, subjectStatuses] = await Promise.all([
await this.db.db
.selectFrom('actor')
.leftJoin('profile', 'profile.creator', 'actor.did')
.leftJoin(
'record as profile_record',
'profile_record.uri',
'profile.uri',
)
.where(
'actor.did',
'in',
results.map((r) => r.did),
)
.select(['actor.did as did', 'profile_record.json as profileJson'])
.execute(),
this.getSubjectStatus(results.map((r) => ({ did: r.did }))),
])
const infoByDid = info.reduce(
(acc, cur) => Object.assign(acc, { [cur.did]: cur }),
{} as Record<string, ArrayEl<typeof info>>,
)
const subjectStatusByDid = subjectStatuses.reduce(
(acc, cur) =>
Object.assign(acc, { [cur.did ?? '']: this.subjectStatus(cur) }),
{},
)
const views = results.map((r) => {
const { profileJson } = infoByDid[r.did] ?? {}
const relatedRecords: object[] = []
if (profileJson) {
relatedRecords.push(
jsonStringToLex(profileJson) as Record<string, unknown>,
)
}
return {
// No email or invite info on appview
did: r.did,
handle: r.handle ?? INVALID_HANDLE,
relatedRecords,
indexedAt: r.indexedAt,
moderation: {
subjectStatus: subjectStatusByDid[r.did] ?? undefined,
},
}
})
return Array.isArray(result) ? views : views[0]
}
event(result: EventResult): Promise<ModEventView>
event(result: EventResult[]): Promise<ModEventView[]>
async event(
result: EventResult | EventResult[],
): Promise<ModEventView | ModEventView[]> {
const results = Array.isArray(result) ? result : [result]
if (results.length === 0) return []
const views = results.map((res) => {
const eventView: ModEventView = {
id: res.id,
event: {
$type: res.action,
comment: res.comment ?? undefined,
},
subject:
res.subjectType === 'com.atproto.admin.defs#repoRef'
? {
$type: 'com.atproto.admin.defs#repoRef',
did: res.subjectDid,
}
: {
$type: 'com.atproto.repo.strongRef',
uri: res.subjectUri,
cid: res.subjectCid,
},
subjectBlobCids: [],
createdBy: res.createdBy,
createdAt: res.createdAt,
subjectHandle: res.subjectHandle ?? undefined,
creatorHandle: res.creatorHandle ?? undefined,
}
if (
[
'com.atproto.admin.defs#modEventTakedown',
'com.atproto.admin.defs#modEventMute',
].includes(res.action)
) {
eventView.event = {
...eventView.event,
durationInHours: res.durationInHours ?? undefined,
}
}
if (res.action === 'com.atproto.admin.defs#modEventLabel') {
eventView.event = {
...eventView.event,
createLabelVals: res.createLabelVals?.length
? res.createLabelVals.split(' ')
: [],
negateLabelVals: res.negateLabelVals?.length
? res.negateLabelVals.split(' ')
: [],
}
}
// This is for legacy data only, for new events, these types of events won't have labels attached
if (
[
'com.atproto.admin.defs#modEventAcknowledge',
'com.atproto.admin.defs#modEventTakedown',
'com.atproto.admin.defs#modEventEscalate',
].includes(res.action)
) {
if (res.createLabelVals?.length) {
eventView.event = {
...eventView.event,
createLabelVals: res.createLabelVals.split(' '),
}
}
if (res.negateLabelVals?.length) {
eventView.event = {
...eventView.event,
negateLabelVals: res.negateLabelVals.split(' '),
}
}
}
if (res.action === 'com.atproto.admin.defs#modEventReport') {
eventView.event = {
...eventView.event,
reportType: res.meta?.reportType ?? undefined,
}
}
if (res.action === 'com.atproto.admin.defs#modEventEmail') {
eventView.event = {
...eventView.event,
subjectLine: res.meta?.subjectLine ?? '',
}
}
if (
res.action === 'com.atproto.admin.defs#modEventComment' &&
res.meta?.sticky
) {
eventView.event.sticky = true
}
return eventView
})
return Array.isArray(result) ? views : views[0]
}
async eventDetail(result: EventResult): Promise<ModEventViewDetail> {
const [event, subject] = await Promise.all([
this.event(result),
this.subject(result),
])
const allBlobs = findBlobRefs(subject.value)
const subjectBlobs = await this.blob(
allBlobs.filter((blob) =>
event.subjectBlobCids.includes(blob.ref.toString()),
),
)
return {
...event,
subject,
subjectBlobs,
}
}
async repoDetail(result: RepoResult): Promise<RepoViewDetail> {
const [repo, labels] = await Promise.all([
this.repo(result),
this.labels(result.did),
])
return {
...repo,
moderation: {
...repo.moderation,
},
labels,
}
}
record(result: RecordResult): Promise<RecordView>
record(result: RecordResult[]): Promise<RecordView[]>
async record(
result: RecordResult | RecordResult[],
): Promise<RecordView | RecordView[]> {
const results = Array.isArray(result) ? result : [result]
if (results.length === 0) return []
const [repoResults, subjectStatuses] = await Promise.all([
this.db.db
.selectFrom('actor')
.where(
'actor.did',
'in',
results.map((r) => didFromUri(r.uri)),
)
.selectAll()
.execute(),
this.getSubjectStatus(results.map((r) => didAndRecordPathFromUri(r.uri))),
])
const repos = await this.repo(repoResults)
const reposByDid = repos.reduce(
(acc, cur) => Object.assign(acc, { [cur.did]: cur }),
{} as Record<string, ArrayEl<typeof repos>>,
)
const subjectStatusByUri = subjectStatuses.reduce(
(acc, cur) =>
Object.assign(acc, {
[`${cur.did}/${cur.recordPath}` ?? '']: this.subjectStatus(cur),
}),
{},
)
const views = results.map((res) => {
const repo = reposByDid[didFromUri(res.uri)]
const { did, recordPath } = didAndRecordPathFromUri(res.uri)
const subjectStatus = subjectStatusByUri[`${did}/${recordPath}`]
if (!repo) throw new Error(`Record repo is missing: ${res.uri}`)
const value = jsonStringToLex(res.json) as Record<string, unknown>
return {
uri: res.uri,
cid: res.cid,
value,
blobCids: findBlobRefs(value).map((blob) => blob.ref.toString()),
indexedAt: res.indexedAt,
repo,
moderation: {
subjectStatus,
},
}
})
return Array.isArray(result) ? views : views[0]
}
async recordDetail(result: RecordResult): Promise<RecordViewDetail> {
const [record, subjectStatusResult] = await Promise.all([
this.record(result),
this.getSubjectStatus(didAndRecordPathFromUri(result.uri)),
])
const [blobs, labels, subjectStatus] = await Promise.all([
this.blob(findBlobRefs(record.value)),
this.labels(record.uri),
subjectStatusResult?.length
? this.subjectStatus(subjectStatusResult[0])
: Promise.resolve(undefined),
])
const selfLabels = getSelfLabels({
uri: result.uri,
cid: result.cid,
record: jsonStringToLex(result.json) as Record<string, unknown>,
})
return {
...record,
blobs,
moderation: {
...record.moderation,
subjectStatus,
},
labels: [...labels, ...selfLabels],
}
}
reportPublic(report: ReportResult): ReportOutput {
return {
id: report.id,
createdAt: report.createdAt,
// Ideally, we would never have a report entry that does not have a reasonType but at the schema level
// we are not guarantying that so in whatever case, if we end up with such entries, default to 'other'
reasonType: report.meta?.reportType
? (report.meta?.reportType as string)
: REASONOTHER,
reason: report.comment ?? undefined,
reportedBy: report.createdBy,
subject:
report.subjectType === 'com.atproto.admin.defs#repoRef'
? {
$type: 'com.atproto.admin.defs#repoRef',
did: report.subjectDid,
}
: {
$type: 'com.atproto.repo.strongRef',
uri: report.subjectUri,
cid: report.subjectCid,
},
}
}
// Partial view for subjects
async subject(result: SubjectResult): Promise<SubjectView> {
let subject: SubjectView
if (result.subjectType === 'com.atproto.admin.defs#repoRef') {
const repoResult = await this.db.db
.selectFrom('actor')
.selectAll()
.where('did', '=', result.subjectDid)
.executeTakeFirst()
if (repoResult) {
subject = await this.repo(repoResult)
subject.$type = 'com.atproto.admin.defs#repoView'
} else {
subject = { did: result.subjectDid }
subject.$type = 'com.atproto.admin.defs#repoViewNotFound'
}
} else if (
result.subjectType === 'com.atproto.repo.strongRef' &&
result.subjectUri !== null
) {
const recordResult = await this.db.db
.selectFrom('record')
.selectAll()
.where('uri', '=', result.subjectUri)
.executeTakeFirst()
if (recordResult) {
subject = await this.record(recordResult)
subject.$type = 'com.atproto.admin.defs#recordView'
} else {
subject = { uri: result.subjectUri }
subject.$type = 'com.atproto.admin.defs#recordViewNotFound'
}
} else {
throw new Error(`Bad subject data: (${result.id}) ${result.subjectType}`)
}
return subject
}
// Partial view for blobs
async blob(blobs: BlobRef[]): Promise<BlobView[]> {
if (!blobs.length) return []
const { ref } = this.db.db.dynamic
const modStatusResults = await this.db.db
.selectFrom('moderation_subject_status')
.where(
sql<string>`${ref(
'moderation_subject_status.blobCids',
)} @> ${JSON.stringify(blobs.map((blob) => blob.ref.toString()))}`,
)
.selectAll()
.executeTakeFirst()
const statusByCid = (modStatusResults?.blobCids || [])?.reduce(
(acc, cur) => Object.assign(acc, { [cur]: modStatusResults }),
{},
)
// Intentionally missing details field, since we don't have any on appview.
// We also don't know when the blob was created, so we use a canned creation time.
const unknownTime = new Date(0).toISOString()
return blobs.map((blob) => {
const cid = blob.ref.toString()
const subjectStatus = statusByCid[cid]
? this.subjectStatus(statusByCid[cid])
: undefined
return {
cid,
mimeType: blob.mimeType,
size: blob.size,
createdAt: unknownTime,
moderation: {
subjectStatus,
},
}
})
}
async labels(subject: string, includeNeg?: boolean): Promise<Label[]> {
const res = await this.db.db
.selectFrom('label')
.where('label.uri', '=', subject)
.if(!includeNeg, (qb) => qb.where('neg', '=', false))
.selectAll()
.execute()
return res.map((l) => ({
...l,
cid: l.cid === '' ? undefined : l.cid,
neg: l.neg,
}))
}
async getSubjectStatus(
subject:
| { did: string; recordPath?: string }
| { did: string; recordPath?: string }[],
): Promise<ModerationSubjectStatusRowWithHandle[]> {
const subjectFilters = Array.isArray(subject) ? subject : [subject]
const filterForSubject =
({ did, recordPath }: { did: string; recordPath?: string }) =>
// TODO: Fix the typing here?
(clause: any) => {
clause = clause
.where('moderation_subject_status.did', '=', did)
.where('moderation_subject_status.recordPath', '=', recordPath || '')
return clause
}
const builder = this.db.db
.selectFrom('moderation_subject_status')
.leftJoin('actor', 'actor.did', 'moderation_subject_status.did')
.where((clause) => {
subjectFilters.forEach(({ did, recordPath }, i) => {
const applySubjectFilter = filterForSubject({ did, recordPath })
if (i === 0) {
clause = clause.where(applySubjectFilter)
} else {
clause = clause.orWhere(applySubjectFilter)
}
})
return clause
})
.selectAll('moderation_subject_status')
.select('actor.handle as handle')
return builder.execute()
}
subjectStatus(result: ModerationSubjectStatusRowWithHandle): SubjectStatusView
subjectStatus(
result: ModerationSubjectStatusRowWithHandle[],
): SubjectStatusView[]
subjectStatus(
result:
| ModerationSubjectStatusRowWithHandle
| ModerationSubjectStatusRowWithHandle[],
): SubjectStatusView | SubjectStatusView[] {
const results = Array.isArray(result) ? result : [result]
if (results.length === 0) return []
const decoratedSubjectStatuses = results.map((subjectStatus) => ({
id: subjectStatus.id,
reviewState: subjectStatus.reviewState,
createdAt: subjectStatus.createdAt,
updatedAt: subjectStatus.updatedAt,
comment: subjectStatus.comment ?? undefined,
lastReviewedBy: subjectStatus.lastReviewedBy ?? undefined,
lastReviewedAt: subjectStatus.lastReviewedAt ?? undefined,
lastReportedAt: subjectStatus.lastReportedAt ?? undefined,
lastAppealedAt: subjectStatus.lastAppealedAt ?? undefined,
muteUntil: subjectStatus.muteUntil ?? undefined,
suspendUntil: subjectStatus.suspendUntil ?? undefined,
takendown: subjectStatus.takendown ?? undefined,
appealed: subjectStatus.appealed ?? undefined,
subjectRepoHandle: subjectStatus.handle ?? undefined,
subjectBlobCids: subjectStatus.blobCids || [],
subject: !subjectStatus.recordPath
? {
$type: 'com.atproto.admin.defs#repoRef',
did: subjectStatus.did,
}
: {
$type: 'com.atproto.repo.strongRef',
uri: AtUri.make(
subjectStatus.did,
// Not too intuitive but the recordpath is basically <collection>/<rkey>
// which is what the last 2 params of .make() arguments are
...subjectStatus.recordPath.split('/'),
).toString(),
cid: subjectStatus.recordCid,
},
}))
return Array.isArray(result)
? decoratedSubjectStatuses
: decoratedSubjectStatuses[0]
}
}
type RepoResult = Actor
type EventResult = ModerationEventRowWithHandle
type ReportResult = ModerationEventRowWithHandle
type RecordResult = RecordRow
type SubjectResult = Pick<
EventResult & ReportResult,
'id' | 'subjectType' | 'subjectDid' | 'subjectUri' | 'subjectCid'
>
type SubjectView = ModEventViewDetail['subject'] & ReportViewDetail['subject']
function didFromUri(uri: string) {
return new AtUri(uri).host
}
function didAndRecordPathFromUri(uri: string) {
const atUri = new AtUri(uri)
return { did: atUri.host, recordPath: `${atUri.collection}/${atUri.rkey}` }
}
function findBlobRefs(value: unknown, refs: BlobRef[] = []) {
if (value instanceof BlobRef) {
refs.push(value)
} else if (Array.isArray(value)) {
value.forEach((val) => findBlobRefs(val, refs))
} else if (value && typeof value === 'object') {
Object.values(value).forEach((val) => findBlobRefs(val, refs))
}
return refs
}

View File

@ -1,14 +0,0 @@
/**
* This function takes a number as input and returns a Date object,
* which is the current date and time plus the input number of hours.
*
* @param {number} hours - The number of hours to add to the current date and time.
* @param {Date} startingDate - If provided, the function will add `hours` to the provided date instead of the current date.
* @returns {Date} - The new Date object, which is the current date and time plus the input number of hours.
*/
export function addHoursToDate(hours: number, startingDate?: Date): Date {
// When date is passe, let's clone before calling `setHours()` so that we are not mutating the original date
const currentDate = startingDate ? new Date(startingDate) : new Date()
currentDate.setHours(currentDate.getHours() + hours)
return currentDate
}

View File

@ -461,12 +461,12 @@ Array [
"$type": "app.bsky.embed.images#view",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg",
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg",
},
@ -517,7 +517,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -528,7 +528,7 @@ Array [
},
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -721,12 +721,12 @@ Array [
"$type": "app.bsky.embed.images#view",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg",
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg",
},
@ -777,7 +777,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -788,7 +788,7 @@ Array [
},
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -937,12 +937,12 @@ Array [
"$type": "app.bsky.embed.images#view",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(4)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(4)@jpeg",
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg",
},
@ -987,7 +987,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -998,7 +998,7 @@ Array [
},
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -1222,12 +1222,12 @@ Array [
"$type": "app.bsky.embed.images#view",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(4)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(4)@jpeg",
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg",
},
@ -1278,7 +1278,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -1289,7 +1289,7 @@ Array [
},
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",

View File

@ -101,7 +101,7 @@ Array [
"$type": "app.bsky.embed.images#view",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(2)/cids(5)@jpeg",
"thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(2)/cids(5)@jpeg",
},
@ -113,7 +113,7 @@ Array [
"cid": "cids(3)",
"cts": "1970-01-01T00:00:00.000Z",
"neg": false,
"src": "did:example:labeler",
"src": "user(3)",
"uri": "record(3)",
"val": "test-label",
},
@ -121,7 +121,7 @@ Array [
"cid": "cids(3)",
"cts": "1970-01-01T00:00:00.000Z",
"neg": false,
"src": "did:example:labeler",
"src": "user(3)",
"uri": "record(3)",
"val": "test-label-2",
},
@ -134,7 +134,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -207,7 +207,7 @@ Array [
"record": Object {
"$type": "app.bsky.embed.record#viewRecord",
"author": Object {
"did": "user(3)",
"did": "user(4)",
"handle": "dan.test",
"labels": Array [],
"viewer": Object {
@ -223,7 +223,7 @@ Array [
"record": Object {
"$type": "app.bsky.embed.record#viewRecord",
"author": Object {
"did": "user(4)",
"did": "user(5)",
"handle": "carol.test",
"labels": Array [],
"viewer": Object {
@ -245,7 +245,7 @@ Array [
"$type": "app.bsky.embed.images",
"images": Array [
Object {
"alt": "tests/sample-img/key-landscape-small.jpg",
"alt": "../dev-env/src/seed/img/key-landscape-small.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -256,7 +256,7 @@ Array [
},
},
Object {
"alt": "tests/sample-img/key-alt.jpg",
"alt": "../dev-env/src/seed/img/key-alt.jpg",
"image": Object {
"$type": "blob",
"mimeType": "image/jpeg",
@ -317,7 +317,7 @@ Array [
"cid": "cids(6)",
"cts": "1970-01-01T00:00:00.000Z",
"neg": false,
"src": "did:example:labeler",
"src": "user(3)",
"uri": "record(6)",
"val": "test-label",
},
@ -416,7 +416,7 @@ Array [
"cursor": "0000000000000::bafycid",
"follows": Array [
Object {
"did": "user(3)",
"did": "user(4)",
"handle": "dan.test",
"labels": Array [],
"viewer": Object {

View File

@ -0,0 +1,164 @@
import { SeedClient, usersSeed, TestNetwork } from '@atproto/dev-env'
import AtpAgent from '@atproto/api'
import { Secp256k1Keypair } from '@atproto/crypto'
import { createServiceAuthHeaders } from '@atproto/xrpc-server'
import { RepoRef } from '../../src/lexicon/types/com/atproto/admin/defs'
describe('admin auth', () => {
let network: TestNetwork
let agent: AtpAgent
let sc: SeedClient
let repoSubject: RepoRef
const modServiceDid = 'did:example:mod'
const altModDid = 'did:example:alt'
let modServiceKey: Secp256k1Keypair
let bskyDid: string
beforeAll(async () => {
network = await TestNetwork.create({
dbPostgresSchema: 'bsky_admin_auth',
bsky: {
modServiceDid,
},
})
bskyDid = network.bsky.ctx.cfg.serverDid
modServiceKey = await Secp256k1Keypair.create()
const origResolve = network.bsky.ctx.idResolver.did.resolveAtprotoKey
network.bsky.ctx.idResolver.did.resolveAtprotoKey = async (
did: string,
forceRefresh?: boolean,
) => {
if (did === modServiceDid || did === altModDid) {
return modServiceKey.did()
}
return origResolve(did, forceRefresh)
}
agent = network.bsky.getClient()
sc = network.getSeedClient()
await usersSeed(sc)
await network.processAll()
repoSubject = {
$type: 'com.atproto.admin.defs#repoRef',
did: sc.dids.bob,
}
})
afterAll(async () => {
await network.close()
})
it('allows service auth requests from the configured appview did', async () => {
const headers = await createServiceAuthHeaders({
iss: modServiceDid,
aud: bskyDid,
keypair: modServiceKey,
})
await agent.api.com.atproto.admin.updateSubjectStatus(
{
subject: repoSubject,
takedown: { applied: true, ref: 'test-repo' },
},
{
...headers,
encoding: 'application/json',
},
)
const res = await agent.api.com.atproto.admin.getSubjectStatus(
{
did: repoSubject.did,
},
headers,
)
expect(res.data.subject.did).toBe(repoSubject.did)
expect(res.data.takedown?.applied).toBe(true)
})
it('does not allow requests from another did', async () => {
const headers = await createServiceAuthHeaders({
iss: altModDid,
aud: bskyDid,
keypair: modServiceKey,
})
const attempt = agent.api.com.atproto.admin.updateSubjectStatus(
{
subject: repoSubject,
takedown: { applied: true, ref: 'test-repo' },
},
{
...headers,
encoding: 'application/json',
},
)
await expect(attempt).rejects.toThrow('Untrusted issuer')
})
it('does not allow requests from an authenticated user', async () => {
const aliceKey = await network.pds.ctx.actorStore.keypair(sc.dids.alice)
const headers = await createServiceAuthHeaders({
iss: sc.dids.alice,
aud: bskyDid,
keypair: aliceKey,
})
const attempt = agent.api.com.atproto.admin.updateSubjectStatus(
{
subject: repoSubject,
takedown: { applied: true, ref: 'test-repo' },
},
{
...headers,
encoding: 'application/json',
},
)
await expect(attempt).rejects.toThrow('Untrusted issuer')
})
it('does not allow requests with a bad signature', async () => {
const badKey = await Secp256k1Keypair.create()
const headers = await createServiceAuthHeaders({
iss: modServiceDid,
aud: bskyDid,
keypair: badKey,
})
const attempt = agent.api.com.atproto.admin.updateSubjectStatus(
{
subject: repoSubject,
takedown: { applied: true, ref: 'test-repo' },
},
{
...headers,
encoding: 'application/json',
},
)
await expect(attempt).rejects.toThrow(
'jwt signature does not match jwt issuer',
)
})
it('does not allow requests with a bad aud', async () => {
// repo subject is bob, so we set alice as the audience
const headers = await createServiceAuthHeaders({
iss: modServiceDid,
aud: sc.dids.alice,
keypair: modServiceKey,
})
const attempt = agent.api.com.atproto.admin.updateSubjectStatus(
{
subject: repoSubject,
takedown: { applied: true, ref: 'test-repo' },
},
{
...headers,
encoding: 'application/json',
},
)
await expect(attempt).rejects.toThrow(
'jwt audience does not match service did',
)
})
})

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,5 @@
import AtpAgent, { AtUri } from '@atproto/api'
import { TestNetwork, SeedClient } from '@atproto/dev-env'
import basicSeed from '../seeds/basic'
import { TestNetwork, SeedClient, basicSeed } from '@atproto/dev-env'
import { makeAlgos } from '../../src'
describe('algo hot-classic', () => {
@ -40,7 +39,7 @@ describe('algo hot-classic', () => {
it('returns well liked posts', async () => {
const img = await sc.uploadFile(
alice,
'tests/sample-img/key-landscape-small.jpg',
'../dev-env/src/seed/img/key-landscape-small.jpg',
'image/jpeg',
)
const one = await sc.post(alice, 'first post', undefined, [img])

View File

@ -1,6 +1,5 @@
import AtpAgent from '@atproto/api'
import { SeedClient, TestNetwork } from '@atproto/dev-env'
import usersSeed from './seeds/users'
import { SeedClient, TestNetwork, usersSeed } from '@atproto/dev-env'
import { createServiceJwt } from '@atproto/xrpc-server'
import { Keypair, Secp256k1Keypair } from '@atproto/crypto'

View File

@ -1,6 +1,5 @@
import { TestNetwork, SeedClient } from '@atproto/dev-env'
import { TestNetwork, SeedClient, basicSeed } from '@atproto/dev-env'
import { FuzzyMatcher, encode } from '../../src/auto-moderator/fuzzy-matcher'
import basicSeed from '../seeds/basic'
import { AtpAgent } from '@atproto/api'
import { ImageInvalidator } from '../../src/image/invalidator'
@ -35,9 +34,8 @@ describe('fuzzy matcher', () => {
})
const getAllReports = () => {
return network.bsky.ctx.db
.getPrimary()
.db.selectFrom('moderation_event')
return network.ozone.ctx.db.db
.selectFrom('moderation_event')
.where('action', '=', 'com.atproto.admin.defs#modEventReport')
.selectAll()
.orderBy('id', 'asc')

View File

@ -1,16 +1,13 @@
import { TestNetwork } from '@atproto/dev-env'
import { TestNetwork, usersSeed } from '@atproto/dev-env'
import { AtUri, BlobRef } from '@atproto/api'
import { Readable } from 'stream'
import { AutoModerator } from '../../src/auto-moderator'
import IndexerContext from '../../src/indexer/context'
import { cidForRecord } from '@atproto/repo'
import { TID } from '@atproto/common'
import { LabelService } from '../../src/services/label'
import usersSeed from '../seeds/users'
import { CID } from 'multiformats/cid'
import { ImgLabeler } from '../../src/auto-moderator/hive'
import { ModerationService } from '../../src/services/moderation'
import { ImageInvalidator } from '../../src/image/invalidator'
import { TestOzone } from '@atproto/dev-env/src/ozone'
// outside of test suite so that TestLabeler can access them
let badCid1: CID | undefined = undefined
@ -18,10 +15,9 @@ let badCid2: CID | undefined = undefined
describe('labeler', () => {
let network: TestNetwork
let ozone: TestOzone
let autoMod: AutoModerator
let labelSrvc: LabelService
let ctx: IndexerContext
let labelerDid: string
let badBlob1: BlobRef
let badBlob2: BlobRef
let goodBlob: BlobRef
@ -32,12 +28,11 @@ describe('labeler', () => {
network = await TestNetwork.create({
dbPostgresSchema: 'bsky_labeler',
})
ozone = network.ozone
ctx = network.bsky.indexer.ctx
const pdsCtx = network.pds.ctx
labelerDid = ctx.cfg.labelerDid
autoMod = ctx.autoMod
autoMod.imgLabeler = new TestImgLabeler()
labelSrvc = ctx.services.label(ctx.db)
const sc = network.getSeedClient()
await usersSeed(sc)
await network.processAll()
@ -54,11 +49,7 @@ describe('labeler', () => {
constraints: {},
}
await store.repo.blob.verifyBlobAndMakePermanent(preparedBlobRef)
await store.repo.blob.associateBlob(
preparedBlobRef,
postUri(),
TID.nextStr(),
)
await store.repo.blob.associateBlob(preparedBlobRef, postUri())
return blobRef
})
}
@ -76,11 +67,15 @@ describe('labeler', () => {
await network.close()
})
const getLabels = async (subject: string) => {
return ozone.ctx.db.db
.selectFrom('label')
.selectAll()
.where('uri', '=', subject)
.execute()
}
it('labels text in posts', async () => {
autoMod.services.moderation = ModerationService.creator(
new NoopImageUriBuilder(''),
new NoopInvalidator(),
)
const post = {
$type: 'app.bsky.feed.post',
text: 'blah blah label_me',
@ -89,11 +84,11 @@ describe('labeler', () => {
const cid = await cidForRecord(post)
const uri = postUri()
autoMod.processRecord(uri, cid, post)
await autoMod.processAll()
const labels = await labelSrvc.getLabels(uri.toString())
await network.processAll()
const labels = await getLabels(uri.toString())
expect(labels.length).toBe(1)
expect(labels[0]).toMatchObject({
src: labelerDid,
src: ozone.ctx.cfg.service.did,
uri: uri.toString(),
cid: cid.toString(),
val: 'test-label',
@ -102,7 +97,7 @@ describe('labeler', () => {
// Verify that along with applying the labels, we are also leaving trace of the label as moderation event
// Temporarily assign an instance of moderation service to the autoMod so that we can validate label event
const modSrvc = autoMod.services.moderation(ctx.db)
const modSrvc = ozone.ctx.modService(ozone.ctx.db)
const { events } = await modSrvc.getEvents({
includeAllUserRecords: false,
subject: uri.toString(),
@ -116,11 +111,8 @@ describe('labeler', () => {
createLabelVals: 'test-label',
negateLabelVals: null,
comment: `[AutoModerator]: Applying labels`,
createdBy: labelerDid,
createdBy: network.bsky.indexer.ctx.cfg.serverDid,
})
// Cleanup the temporary assignment, knowing that by default, moderation service is not available
autoMod.services.moderation = undefined
})
it('labels embeds in posts', async () => {
@ -150,36 +142,12 @@ describe('labeler', () => {
const cid = await cidForRecord(post)
autoMod.processRecord(uri, cid, post)
await autoMod.processAll()
const dbLabels = await labelSrvc.getLabels(uri.toString())
const dbLabels = await getLabels(uri.toString())
const labels = dbLabels.map((row) => row.val).sort()
expect(labels).toEqual(
['test-label', 'test-label-2', 'img-label', 'other-img-label'].sort(),
)
})
it('retrieves repo labels on profile views', async () => {
await ctx.db.db
.insertInto('label')
.values({
src: labelerDid,
uri: alice,
cid: '',
val: 'repo-label',
neg: false,
cts: new Date().toISOString(),
})
.execute()
const labels = await labelSrvc.getLabelsForProfile(alice)
expect(labels.length).toBe(1)
expect(labels[0]).toMatchObject({
src: labelerDid,
uri: alice,
val: 'repo-label',
neg: false,
})
})
})
class TestImgLabeler implements ImgLabeler {
@ -193,14 +161,3 @@ class TestImgLabeler implements ImgLabeler {
return []
}
}
class NoopInvalidator implements ImageInvalidator {
async invalidate() {}
}
class NoopImageUriBuilder {
constructor(public endpoint: string) {}
getPresetUri() {
return ''
}
}

View File

@ -1,16 +1,16 @@
import fs from 'fs/promises'
import { TestNetwork, SeedClient, ImageRef } from '@atproto/dev-env'
import { TestNetwork, SeedClient, ImageRef, usersSeed } from '@atproto/dev-env'
import { AtpAgent } from '@atproto/api'
import { AutoModerator } from '../../src/auto-moderator'
import IndexerContext from '../../src/indexer/context'
import { sha256RawToCid } from '@atproto/common'
import usersSeed from '../seeds/users'
import { CID } from 'multiformats/cid'
import { AtUri } from '@atproto/syntax'
import { ImageFlagger } from '../../src/auto-moderator/abyss'
import { ImageInvalidator } from '../../src/image/invalidator'
import { sha256 } from '@atproto/crypto'
import { ids } from '../../src/lexicon/lexicons'
import { TestOzone } from '@atproto/dev-env/src/ozone'
import { PrimaryDatabase } from '../../src'
// outside of test suite so that TestLabeler can access them
let badCid1: CID | undefined = undefined
@ -18,9 +18,10 @@ let badCid2: CID | undefined = undefined
describe('takedowner', () => {
let network: TestNetwork
let ozone: TestOzone
let bskyDb: PrimaryDatabase
let autoMod: AutoModerator
let testInvalidator: TestInvalidator
let ctx: IndexerContext
let pdsAgent: AtpAgent
let sc: SeedClient
let alice: string
@ -36,8 +37,9 @@ describe('takedowner', () => {
imgInvalidator: testInvalidator,
},
})
ctx = network.bsky.indexer.ctx
autoMod = ctx.autoMod
ozone = network.ozone
bskyDb = network.bsky.ctx.db.getPrimary()
autoMod = network.bsky.indexer.ctx.autoMod
autoMod.imageFlagger = new TestFlagger()
pdsAgent = new AtpAgent({ service: network.pds.url })
sc = network.getSeedClient()
@ -45,26 +47,26 @@ describe('takedowner', () => {
await network.processAll()
alice = sc.dids.alice
const fileBytes1 = await fs.readFile(
'tests/sample-img/key-portrait-small.jpg',
'../dev-env/src/seed/img/key-portrait-small.jpg',
)
const fileBytes2 = await fs.readFile(
'tests/sample-img/key-portrait-large.jpg',
'../dev-env/src/seed/img/key-portrait-large.jpg',
)
badCid1 = sha256RawToCid(await sha256(fileBytes1))
badCid2 = sha256RawToCid(await sha256(fileBytes2))
goodBlob = await sc.uploadFile(
alice,
'tests/sample-img/key-landscape-small.jpg',
'../dev-env/src/seed/img/key-landscape-small.jpg',
'image/jpeg',
)
badBlob1 = await sc.uploadFile(
alice,
'tests/sample-img/key-portrait-small.jpg',
'../dev-env/src/seed/img/key-portrait-small.jpg',
'image/jpeg',
)
badBlob2 = await sc.uploadFile(
alice,
'tests/sample-img/key-portrait-large.jpg',
'../dev-env/src/seed/img/key-portrait-large.jpg',
'image/jpeg',
)
})
@ -76,9 +78,8 @@ describe('takedowner', () => {
it('takes down flagged content in posts', async () => {
const post = await sc.post(alice, 'blah', undefined, [goodBlob, badBlob1])
await network.processAll()
await autoMod.processAll()
const [modStatus, takedownEvent] = await Promise.all([
ctx.db.db
ozone.ctx.db.db
.selectFrom('moderation_subject_status')
.where('did', '=', alice)
.where(
@ -88,7 +89,7 @@ describe('takedowner', () => {
)
.select(['takendown', 'id'])
.executeTakeFirst(),
ctx.db.db
ozone.ctx.db.db
.selectFrom('moderation_event')
.where('subjectDid', '=', alice)
.where('action', '=', 'com.atproto.admin.defs#modEventTakedown')
@ -99,12 +100,12 @@ describe('takedowner', () => {
throw new Error('expected mod action')
}
expect(modStatus.takendown).toEqual(true)
const record = await ctx.db.db
const record = await bskyDb.db
.selectFrom('record')
.where('uri', '=', post.ref.uriStr)
.select('takedownId')
.select('takedownRef')
.executeTakeFirst()
expect(record?.takedownId).toBeGreaterThan(0)
expect(record?.takedownRef).toEqual(`BSKY-TAKEDOWN-${takedownEvent.id}`)
const recordPds = await network.pds.ctx.actorStore.read(
post.ref.uri.hostname,
@ -115,7 +116,7 @@ describe('takedowner', () => {
.select('takedownRef')
.executeTakeFirst(),
)
expect(recordPds?.takedownRef).toEqual(takedownEvent.id.toString())
expect(recordPds?.takedownRef).toEqual(`BSKY-TAKEDOWN-${takedownEvent.id}`)
expect(testInvalidator.invalidated.length).toBe(1)
expect(testInvalidator.invalidated[0].subject).toBe(
@ -137,13 +138,13 @@ describe('takedowner', () => {
)
await network.processAll()
const [modStatus, takedownEvent] = await Promise.all([
ctx.db.db
ozone.ctx.db.db
.selectFrom('moderation_subject_status')
.where('did', '=', alice)
.where('recordPath', '=', `${ids.AppBskyActorProfile}/self`)
.select(['takendown', 'id'])
.executeTakeFirst(),
ctx.db.db
ozone.ctx.db.db
.selectFrom('moderation_event')
.where('subjectDid', '=', alice)
.where(
@ -159,12 +160,12 @@ describe('takedowner', () => {
throw new Error('expected mod action')
}
expect(modStatus.takendown).toEqual(true)
const record = await ctx.db.db
const recordBsky = await bskyDb.db
.selectFrom('record')
.where('uri', '=', res.data.uri)
.select('takedownId')
.select('takedownRef')
.executeTakeFirst()
expect(record?.takedownId).toBeGreaterThan(0)
expect(recordBsky?.takedownRef).toEqual(`BSKY-TAKEDOWN-${takedownEvent.id}`)
const recordPds = await network.pds.ctx.actorStore.read(alice, (store) =>
store.db.db
@ -173,7 +174,7 @@ describe('takedowner', () => {
.select('takedownRef')
.executeTakeFirst(),
)
expect(recordPds?.takedownRef).toEqual(takedownEvent.id.toString())
expect(recordPds?.takedownRef).toEqual(`BSKY-TAKEDOWN-${takedownEvent.id}`)
expect(testInvalidator.invalidated.length).toBe(2)
expect(testInvalidator.invalidated[1].subject).toBe(

View File

@ -1,8 +1,7 @@
import axios, { AxiosInstance } from 'axios'
import { CID } from 'multiformats/cid'
import { verifyCidForBytes } from '@atproto/common'
import { TestNetwork } from '@atproto/dev-env'
import basicSeed from './seeds/basic'
import { TestNetwork, basicSeed } from '@atproto/dev-env'
import { randomBytes } from '@atproto/crypto'
describe('blob resolver', () => {

Some files were not shown because too many files have changed in this diff Show More