* refactor(crypto): remove circular dependency * refactor(crypto): expose compress/decompress as part of the DidKeyPlugin interface * fix(crypto): remove import from private file * refactor: isolate tsconfig * fix: remove unused bench file * chore(repo): remove unused deps * fix(ozone): properly list dependencies * fix(services): do lint js files * fix(services/pds): remove unused deps * chore(pds): remove bench * chore(dev-env): remove unused deps * chore(api): remove bench * remove unused babel.config.js files * fix: remove .ts extension from import * fix(pds): remove imports of src files * fix(tsconfig): properly list all projects * fix(dev-env): remove imports of src files * fix(bsky): remove direct import to crypto src * fix(api): remove imports to api internals * chore(build): prevent bundling of built output * chore(dev): add "dev" script to build in watch mode * chore(deps): move ts-node dependency where it is actually used * fix(deps): add dev-env as project dependency * fix(xrpc-server): properly type kexicon * fix(bsky): improve typings * fix(pds): fully type formatRecordEmbedInternal return value * fix(repo): remove imports from @ipld/car/api * feat(dev-env): re-export BskyIngester * fix: properly lint & type jest config & test files * fix(ci): test after build * fix(types): use NodeJS.Timeout instead of NodeJS.Timer * fix(bsky): make types exportable * fix(ozone): make types exportable * fix(xrpc-server): make types exportable * fix(xprc-server): make code compliant with "node" types * fix(xrpc-server): avoid accessing properties of unknown * chore(deps): update @types/node * feat(tsconfig): narrow down available types depending on the package's target environment * fix(pds): remove unused prop * fix(bsync): Database's migrator not always initialized * fix(dev-env): remove unreachable code * fix(xrpc-server): remove unused import * fix(xrpc-server): mark header property as abstract * fix(pds): initialize LeakyTxPlugin's txOver property * fix(bsky): initialize LeakyTxPlugin's txOver property * fix(bsky): remove unused migrator from DatabaseCoordinator * fix(bsky): Properly initialize LabelService's cache property * fix(ozone): Database's migrator not initialized * fix(ozone): initialize LeakyTxPlugin's txOver property * fix(crypto): ignore unused variable error * feat(tsconfig): use stricter rules * feat(tsconfig): enable useDefineForClassFields * feat(xrpc-server): add support for brotli incoming payload * fix(xrpc-server): properly parse & process content-encoding * fix(common:stream): always call cb in _transform * tidy/fix tests and service entrypoints * Revert "fix(xrpc-server): properly parse & process content-encoding" This reverts commit 2b1c66e153820d3e128fc839fcc1834d52a66686. * Revert "feat(xrpc-server): add support for brotli incoming payload" This reverts commit e710c21e6118214ddf215b0515e68cb87299a952. * remove special node env for tests (defaults to jest val of "test") * kill mute sync handler on disconnect * work around connect-es bug w/ request aborts * style(crypto): rename imports from uint8arrays * fix update package-lock * fix lint * force hbs files to be bundled as cjs * fix: use concurrently instead of npm-run-all npm-run-all seems not to be maintained anymore. Additionally, concurrently better forwards signals to child processes. * remove concurrently alltogether * ignore sqlite files in services/pds * fix verify * fix verify * tidy, fix verify * fix blob diversion test * build rework changeset --------- Co-authored-by: Devin Ivy <devinivy@gmail.com>
583 lines
20 KiB
TypeScript
583 lines
20 KiB
TypeScript
import assert from 'assert'
|
|
import { XRPCError } from '@atproto/xrpc'
|
|
import { AuthRequiredError } from '@atproto/xrpc-server'
|
|
import { TID } from '@atproto/common'
|
|
import { AtUri, AtpAgent } from '@atproto/api'
|
|
import {
|
|
TestNetwork,
|
|
TestFeedGen,
|
|
SeedClient,
|
|
RecordRef,
|
|
basicSeed,
|
|
} from '@atproto/dev-env'
|
|
import { Handler as SkeletonHandler } from '../src/lexicon/types/app/bsky/feed/getFeedSkeleton'
|
|
import { ids } from '../src/lexicon/lexicons'
|
|
import {
|
|
FeedViewPost,
|
|
SkeletonFeedPost,
|
|
} from '../src/lexicon/types/app/bsky/feed/defs'
|
|
import { forSnapshot, paginateAll } from './_util'
|
|
|
|
describe('feed generation', () => {
|
|
let network: TestNetwork
|
|
let agent: AtpAgent
|
|
let pdsAgent: AtpAgent
|
|
let sc: SeedClient
|
|
let gen: TestFeedGen
|
|
|
|
let alice: string
|
|
let feedUriAll: string
|
|
let feedUriAllRef: RecordRef
|
|
let feedUriEven: string
|
|
let feedUriOdd: string // Unsupported by feed gen
|
|
let feedUriBadPagination: string
|
|
let feedUriPrime: string // Taken-down
|
|
let feedUriPrimeRef: RecordRef
|
|
let feedUriNeedsAuth: string
|
|
|
|
beforeAll(async () => {
|
|
network = await TestNetwork.create({
|
|
dbPostgresSchema: 'bsky_feed_generation',
|
|
})
|
|
agent = network.bsky.getClient()
|
|
pdsAgent = network.pds.getClient()
|
|
sc = network.getSeedClient()
|
|
await basicSeed(sc)
|
|
await network.processAll()
|
|
alice = sc.dids.alice
|
|
const allUri = AtUri.make(alice, 'app.bsky.feed.generator', 'all')
|
|
const feedUriBadPagination = AtUri.make(
|
|
alice,
|
|
'app.bsky.feed.generator',
|
|
'bad-pagination',
|
|
)
|
|
const evenUri = AtUri.make(alice, 'app.bsky.feed.generator', 'even')
|
|
const primeUri = AtUri.make(alice, 'app.bsky.feed.generator', 'prime')
|
|
const needsAuthUri = AtUri.make(
|
|
alice,
|
|
'app.bsky.feed.generator',
|
|
'needs-auth',
|
|
)
|
|
gen = await network.createFeedGen({
|
|
[allUri.toString()]: feedGenHandler('all'),
|
|
[evenUri.toString()]: feedGenHandler('even'),
|
|
[feedUriBadPagination.toString()]: feedGenHandler('bad-pagination'),
|
|
[primeUri.toString()]: feedGenHandler('prime'),
|
|
[needsAuthUri.toString()]: feedGenHandler('needs-auth'),
|
|
})
|
|
|
|
const feedSuggestions = [
|
|
{ uri: allUri.toString(), order: 1 },
|
|
{ uri: evenUri.toString(), order: 2 },
|
|
{ uri: feedUriBadPagination.toString(), order: 3 },
|
|
{ uri: primeUri.toString(), order: 4 },
|
|
]
|
|
await network.bsky.db.db
|
|
.insertInto('suggested_feed')
|
|
.values(feedSuggestions)
|
|
.execute()
|
|
})
|
|
|
|
afterAll(async () => {
|
|
await gen.close()
|
|
await network.close()
|
|
})
|
|
|
|
it('feed gen records can be created.', async () => {
|
|
const all = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'all' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'All',
|
|
description: 'Provides all feed candidates',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
const even = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'even' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'Even',
|
|
description: 'Provides even-indexed feed candidates',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
// Unsupported by feed gen
|
|
const odd = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'odd' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'Temp', // updated in next test
|
|
description: 'Temp', // updated in next test
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
const badPagination = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'bad-pagination' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'Bad Pagination',
|
|
description:
|
|
'Provides all feed candidates, blindly ignoring pagination limit',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
// Taken-down
|
|
const prime = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'prime' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'Prime',
|
|
description: 'Provides prime-indexed feed candidates',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
const needsAuth = await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: alice, rkey: 'needs-auth' },
|
|
{
|
|
did: gen.did,
|
|
displayName: 'Needs Auth',
|
|
description: 'Provides all feed candidates when authed',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(alice),
|
|
)
|
|
await network.processAll()
|
|
await network.bsky.ctx.dataplane.takedownRecord({
|
|
recordUri: prime.uri,
|
|
})
|
|
|
|
feedUriAll = all.uri
|
|
feedUriAllRef = new RecordRef(all.uri, all.cid)
|
|
feedUriEven = even.uri
|
|
feedUriOdd = odd.uri
|
|
feedUriBadPagination = badPagination.uri
|
|
feedUriPrime = prime.uri
|
|
feedUriPrimeRef = new RecordRef(prime.uri, prime.cid)
|
|
feedUriNeedsAuth = needsAuth.uri
|
|
})
|
|
|
|
it('feed gen records can be updated', async () => {
|
|
await pdsAgent.api.com.atproto.repo.putRecord(
|
|
{
|
|
repo: alice,
|
|
collection: ids.AppBskyFeedGenerator,
|
|
rkey: 'odd',
|
|
record: {
|
|
did: gen.did,
|
|
displayName: 'Odd',
|
|
description: 'Provides odd-indexed feed candidates',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
},
|
|
{ headers: sc.getHeaders(alice), encoding: 'application/json' },
|
|
)
|
|
await network.processAll()
|
|
})
|
|
|
|
it('getActorFeeds fetches feed generators by actor.', async () => {
|
|
// add some likes
|
|
await sc.like(sc.dids.bob, feedUriAllRef)
|
|
await sc.like(sc.dids.carol, feedUriAllRef)
|
|
await network.processAll()
|
|
|
|
const results = (results) => results.flatMap((res) => res.feeds)
|
|
const paginator = async (cursor?: string) => {
|
|
const res = await agent.api.app.bsky.feed.getActorFeeds(
|
|
{ actor: alice, cursor, limit: 2 },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
return res.data
|
|
}
|
|
|
|
const paginatedAll = results(await paginateAll(paginator))
|
|
|
|
expect(paginatedAll.length).toEqual(5)
|
|
expect(paginatedAll[0].uri).toEqual(feedUriOdd)
|
|
expect(paginatedAll[1].uri).toEqual(feedUriNeedsAuth)
|
|
expect(paginatedAll[2].uri).toEqual(feedUriBadPagination)
|
|
expect(paginatedAll[3].uri).toEqual(feedUriEven)
|
|
expect(paginatedAll[4].uri).toEqual(feedUriAll)
|
|
expect(paginatedAll.map((fg) => fg.uri)).not.toContain(feedUriPrime) // taken-down
|
|
expect(forSnapshot(paginatedAll)).toMatchSnapshot()
|
|
})
|
|
|
|
it('embeds feed generator records in posts', async () => {
|
|
const res = await pdsAgent.api.app.bsky.feed.post.create(
|
|
{ repo: sc.dids.bob },
|
|
{
|
|
text: 'cool feed!',
|
|
embed: {
|
|
$type: 'app.bsky.embed.record',
|
|
record: feedUriAllRef.raw,
|
|
},
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(sc.dids.bob),
|
|
)
|
|
await network.processAll()
|
|
const view = await agent.api.app.bsky.feed.getPosts(
|
|
{ uris: [res.uri] },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(view.data.posts.length).toBe(1)
|
|
expect(forSnapshot(view.data.posts[0])).toMatchSnapshot()
|
|
})
|
|
|
|
it('does not embed taken-down feed generator records in posts', async () => {
|
|
const res = await pdsAgent.api.app.bsky.feed.post.create(
|
|
{ repo: sc.dids.bob },
|
|
{
|
|
text: 'weird feed',
|
|
embed: {
|
|
$type: 'app.bsky.embed.record',
|
|
record: feedUriPrimeRef.raw,
|
|
},
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(sc.dids.bob),
|
|
)
|
|
await network.processAll()
|
|
const view = await agent.api.app.bsky.feed.getPosts(
|
|
{ uris: [res.uri] },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(view.data.posts.length).toBe(1)
|
|
expect(forSnapshot(view.data.posts[0])).toMatchSnapshot()
|
|
})
|
|
|
|
describe('getFeedGenerator', () => {
|
|
it('describes a feed gen & returns online status', async () => {
|
|
const resEven = await agent.api.app.bsky.feed.getFeedGenerator(
|
|
{ feed: feedUriAll },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(forSnapshot(resEven.data)).toMatchSnapshot()
|
|
expect(resEven.data.isOnline).toBe(true)
|
|
expect(resEven.data.isValid).toBe(true)
|
|
})
|
|
|
|
it('does not describe taken-down feed', async () => {
|
|
const tryGetFeed = agent.api.app.bsky.feed.getFeedGenerator(
|
|
{ feed: feedUriPrime },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
await expect(tryGetFeed).rejects.toThrow('could not find feed')
|
|
})
|
|
|
|
// @TODO temporarily skipping while external feedgens catch-up on describeFeedGenerator
|
|
it.skip('handles an unsupported algo', async () => {
|
|
const resOdd = await agent.api.app.bsky.feed.getFeedGenerator(
|
|
{ feed: feedUriOdd },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(resOdd.data.isOnline).toBe(true)
|
|
expect(resOdd.data.isValid).toBe(false)
|
|
})
|
|
|
|
// @TODO temporarily skipping while external feedgens catch-up on describeFeedGenerator
|
|
it.skip('handles an offline feed', async () => {
|
|
// make an invalid feed gen in bob's repo
|
|
const allUriBob = AtUri.make(
|
|
sc.dids.bob,
|
|
'app.bsky.feed.generator',
|
|
'all',
|
|
)
|
|
const bobFg = await network.createFeedGen({
|
|
[allUriBob.toString()]: feedGenHandler('all'),
|
|
})
|
|
|
|
await pdsAgent.api.app.bsky.feed.generator.create(
|
|
{ repo: sc.dids.bob, rkey: 'all' },
|
|
{
|
|
did: bobFg.did,
|
|
displayName: 'All by bob',
|
|
description: 'Provides all feed candidates - by bob',
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
sc.getHeaders(sc.dids.bob),
|
|
)
|
|
await network.processAll()
|
|
|
|
// now take it offline
|
|
await bobFg.close()
|
|
|
|
const res = await agent.api.app.bsky.feed.getFeedGenerator(
|
|
{
|
|
feed: allUriBob.toString(),
|
|
},
|
|
{ headers: await network.serviceHeaders(sc.dids.alice) },
|
|
)
|
|
expect(res.data.isOnline).toBe(false)
|
|
expect(res.data.isValid).toBe(false)
|
|
})
|
|
})
|
|
|
|
describe('getFeedGenerators', () => {
|
|
it('describes multiple feed gens', async () => {
|
|
const resEven = await agent.api.app.bsky.feed.getFeedGenerators(
|
|
{ feeds: [feedUriEven, feedUriAll, feedUriPrime] },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(forSnapshot(resEven.data)).toMatchSnapshot()
|
|
expect(resEven.data.feeds.map((fg) => fg.uri)).not.toContain(feedUriPrime) // taken-down
|
|
})
|
|
})
|
|
|
|
describe('getSuggestedFeeds', () => {
|
|
it('returns list of suggested feed generators', async () => {
|
|
const resEven = await agent.api.app.bsky.feed.getSuggestedFeeds(
|
|
{},
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(forSnapshot(resEven.data)).toMatchSnapshot()
|
|
expect(resEven.data.feeds.map((fg) => fg.uri)).not.toContain(feedUriPrime) // taken-down
|
|
})
|
|
})
|
|
|
|
describe('getPopularFeedGenerators', () => {
|
|
it('gets popular feed generators', async () => {
|
|
const res = await agent.api.app.bsky.unspecced.getPopularFeedGenerators(
|
|
{},
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(res.data.feeds.map((f) => f.uri)).not.toContain(feedUriPrime) // taken-down
|
|
expect(res.data.feeds.map((f) => f.uri)).toEqual([
|
|
feedUriAll,
|
|
feedUriEven,
|
|
feedUriBadPagination,
|
|
])
|
|
})
|
|
|
|
it('searches feed generators', async () => {
|
|
const res = await agent.api.app.bsky.unspecced.getPopularFeedGenerators(
|
|
{ query: 'all' },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect(res.data.feeds.map((f) => f.uri)).toEqual([feedUriAll])
|
|
})
|
|
|
|
it('paginates', async () => {
|
|
const resFull =
|
|
await agent.api.app.bsky.unspecced.getPopularFeedGenerators(
|
|
{},
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
const resOne =
|
|
await agent.api.app.bsky.unspecced.getPopularFeedGenerators(
|
|
{ limit: 2 },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
const resTwo =
|
|
await agent.api.app.bsky.unspecced.getPopularFeedGenerators(
|
|
{ cursor: resOne.data.cursor },
|
|
{ headers: await network.serviceHeaders(sc.dids.bob) },
|
|
)
|
|
expect([...resOne.data.feeds, ...resTwo.data.feeds]).toEqual(
|
|
resFull.data.feeds,
|
|
)
|
|
})
|
|
})
|
|
|
|
describe('getFeed', () => {
|
|
it('resolves basic feed contents.', async () => {
|
|
const feed = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriEven },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
expect(feed.data.feed.map((item) => item.post.uri)).toEqual([
|
|
sc.posts[sc.dids.alice][0].ref.uriStr,
|
|
sc.posts[sc.dids.carol][0].ref.uriStr,
|
|
sc.replies[sc.dids.carol][0].ref.uriStr,
|
|
])
|
|
expect(forSnapshot(feed.data.feed)).toMatchSnapshot()
|
|
})
|
|
|
|
it('resolves basic feed contents without auth.', async () => {
|
|
const feed = await agent.api.app.bsky.feed.getFeed({ feed: feedUriEven })
|
|
expect(feed.data.feed.map((item) => item.post.uri)).toEqual([
|
|
sc.posts[sc.dids.alice][0].ref.uriStr,
|
|
sc.posts[sc.dids.carol][0].ref.uriStr,
|
|
sc.replies[sc.dids.carol][0].ref.uriStr,
|
|
])
|
|
expect(forSnapshot(feed.data.feed)).toMatchSnapshot()
|
|
})
|
|
|
|
it('paginates, handling replies and reposts.', async () => {
|
|
const results = (results) => results.flatMap((res) => res.feed)
|
|
const paginator = async (cursor?: string) => {
|
|
const res = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriAll, cursor, limit: 2 },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
return res.data
|
|
}
|
|
|
|
const paginatedAll: FeedViewPost[] = results(await paginateAll(paginator))
|
|
|
|
// Unknown post uri is omitted
|
|
expect(paginatedAll.map((item) => item.post.uri)).toEqual([
|
|
sc.posts[sc.dids.alice][0].ref.uriStr,
|
|
sc.posts[sc.dids.bob][0].ref.uriStr,
|
|
sc.posts[sc.dids.carol][0].ref.uriStr,
|
|
sc.replies[sc.dids.carol][0].ref.uriStr,
|
|
sc.posts[sc.dids.dan][1].ref.uriStr,
|
|
])
|
|
expect(forSnapshot(paginatedAll)).toMatchSnapshot()
|
|
})
|
|
|
|
it('paginates, handling feed not respecting limit.', async () => {
|
|
const res = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriBadPagination, limit: 3 },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
// refused to respect pagination limit, so it got cut short by appview but the cursor remains.
|
|
expect(res.data.feed.length).toBeLessThanOrEqual(3)
|
|
expect(parseInt(res.data.cursor || '', 10)).toBeGreaterThanOrEqual(3)
|
|
expect(res.data.feed.map((item) => item.post.uri)).toEqual([
|
|
sc.posts[sc.dids.alice][0].ref.uriStr,
|
|
sc.posts[sc.dids.bob][0].ref.uriStr,
|
|
sc.posts[sc.dids.carol][0].ref.uriStr,
|
|
])
|
|
})
|
|
|
|
it('fails on unknown feed.', async () => {
|
|
const tryGetFeed = agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriOdd },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
await expect(tryGetFeed).rejects.toMatchObject({
|
|
error: 'UnknownFeed',
|
|
})
|
|
})
|
|
|
|
it('resolves contents of taken-down feed.', async () => {
|
|
const tryGetFeed = agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriPrime },
|
|
{ headers: await network.serviceHeaders(alice) },
|
|
)
|
|
await expect(tryGetFeed).resolves.toBeDefined()
|
|
})
|
|
|
|
it('receives proper auth details.', async () => {
|
|
const feed = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriEven },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
expect(feed.data['$auth']?.['aud']).toEqual(gen.did)
|
|
expect(feed.data['$auth']?.['iss']).toEqual(alice)
|
|
})
|
|
|
|
it('receives proper auth details.', async () => {
|
|
const feed = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriEven },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
expect(feed.data['$auth']?.['aud']).toEqual(gen.did)
|
|
expect(feed.data['$auth']?.['iss']).toEqual(alice)
|
|
})
|
|
|
|
it('passes through auth error from feed.', async () => {
|
|
const tryGetFeed = agent.api.app.bsky.feed.getFeed({
|
|
feed: feedUriNeedsAuth,
|
|
})
|
|
const err = await tryGetFeed.catch((err) => err)
|
|
assert(err instanceof XRPCError)
|
|
expect(err.status).toBe(401)
|
|
expect(err.message).toBe('This feed requires auth')
|
|
})
|
|
|
|
it('provides timing info in server-timing header.', async () => {
|
|
const result = await agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriEven },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
expect(result.headers['server-timing']).toMatch(
|
|
/^skele;dur=\d+, hydr;dur=\d+$/,
|
|
)
|
|
})
|
|
|
|
it('returns an upstream failure error when the feed is down.', async () => {
|
|
await gen.close() // @NOTE must be last test
|
|
const tryGetFeed = agent.api.app.bsky.feed.getFeed(
|
|
{ feed: feedUriEven },
|
|
{ headers: await network.serviceHeaders(alice, gen.did) },
|
|
)
|
|
await expect(tryGetFeed).rejects.toThrow('feed unavailable')
|
|
})
|
|
})
|
|
|
|
const feedGenHandler =
|
|
(
|
|
feedName: 'even' | 'all' | 'prime' | 'bad-pagination' | 'needs-auth',
|
|
): SkeletonHandler =>
|
|
async ({ req, params }) => {
|
|
if (feedName === 'needs-auth' && !req.headers.authorization) {
|
|
throw new AuthRequiredError('This feed requires auth')
|
|
}
|
|
const { limit, cursor } = params
|
|
const candidates: SkeletonFeedPost[] = [
|
|
{ post: sc.posts[sc.dids.alice][0].ref.uriStr },
|
|
{ post: sc.posts[sc.dids.bob][0].ref.uriStr },
|
|
{ post: sc.posts[sc.dids.carol][0].ref.uriStr },
|
|
{ post: `at://did:plc:unknown/app.bsky.feed.post/${TID.nextStr()}` }, // Doesn't exist
|
|
{ post: sc.replies[sc.dids.carol][0].ref.uriStr }, // Reply
|
|
// Repost (accurate)
|
|
{
|
|
post: sc.posts[sc.dids.dan][1].ref.uriStr,
|
|
reason: {
|
|
$type: 'app.bsky.feed.defs#skeletonReasonRepost',
|
|
repost: sc.reposts[sc.dids.carol][0].uriStr,
|
|
},
|
|
},
|
|
// Repost (inaccurate)
|
|
{
|
|
post: sc.posts[alice][1].ref.uriStr,
|
|
reason: {
|
|
$type: 'app.bsky.feed.defs#skeletonReasonRepost',
|
|
repost: sc.reposts[sc.dids.carol][0].uriStr,
|
|
},
|
|
},
|
|
]
|
|
const offset = cursor ? parseInt(cursor, 10) : 0
|
|
const fullFeed = candidates.filter((_, i) => {
|
|
if (feedName === 'even') {
|
|
return i % 2 === 0
|
|
}
|
|
if (feedName === 'prime') {
|
|
return [2, 3, 5, 7, 11, 13].includes(i)
|
|
}
|
|
return true
|
|
})
|
|
const feedResults =
|
|
feedName === 'bad-pagination'
|
|
? fullFeed.slice(offset) // does not respect limit
|
|
: fullFeed.slice(offset, offset + limit)
|
|
const lastResult = feedResults.at(-1)
|
|
return {
|
|
encoding: 'application/json',
|
|
body: {
|
|
feed: feedResults,
|
|
cursor: lastResult
|
|
? (fullFeed.indexOf(lastResult) + 1).toString()
|
|
: undefined,
|
|
$auth: jwtBody(req.headers.authorization), // for testing purposes
|
|
},
|
|
}
|
|
}
|
|
})
|
|
|
|
const jwtBody = (authHeader?: string): Record<string, unknown> | undefined => {
|
|
if (!authHeader?.startsWith('Bearer')) return undefined
|
|
const jwt = authHeader.replace('Bearer ', '')
|
|
const [, bodyb64] = jwt.split('.')
|
|
const body = JSON.parse(Buffer.from(bodyb64, 'base64').toString())
|
|
if (!body || typeof body !== 'object') return undefined
|
|
return body
|
|
}
|