Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
22060ff
copy Omicron's sort order for mock API items
charliepark Feb 5, 2026
f7b01d8
A few tweaks for datetime comparisons
charliepark Feb 5, 2026
9a664ef
locale-agnostic comparisons; handle invalid dates
charliepark Feb 5, 2026
942f882
refactor; handle different page tokens
charliepark Feb 5, 2026
bb63b6d
npm run fmt
charliepark Feb 5, 2026
c47a58e
normalizeTime util for tokens
charliepark Feb 5, 2026
5dc74d3
Improvement for missing time data
charliepark Feb 5, 2026
d53f306
Merge branch 'main' into sort_mock_api_items
charliepark Feb 6, 2026
f0d0c23
Use Remeda's sortBy to clean up ordering
charliepark Feb 6, 2026
c181dd3
npm run fmt
charliepark Feb 6, 2026
5b5199e
Update tests to use first item in list
charliepark Feb 6, 2026
c761321
Revert to edge case for snapshot-max-size
charliepark Feb 6, 2026
24d2a0d
Remove unnecessary comments
charliepark Feb 6, 2026
6e92ed2
tiebreaker
charliepark Feb 6, 2026
def619f
Merge main
charliepark Feb 18, 2026
f64e35b
A few util updates
charliepark Feb 18, 2026
070e518
Test refactors
charliepark Feb 18, 2026
0ce8b3c
PR review fixes
charliepark Feb 18, 2026
a89939a
Revert to working test
charliepark Feb 18, 2026
e07eac2
npm run fmt
charliepark Feb 18, 2026
434be21
Remeda is your friend
charliepark Feb 18, 2026
a5c5cc5
no need to spread items array
charliepark Feb 19, 2026
169fcbc
Another comment patch
charliepark Feb 20, 2026
0713623
use first image in e2e test helper when no string passed
charliepark Feb 20, 2026
c5367ff
use existing types
charliepark Feb 21, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions app/forms/disk-create.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ import { Radio } from '~/ui/lib/Radio'
import { RadioGroup } from '~/ui/lib/RadioGroup'
import { Slash } from '~/ui/lib/Slash'
import { TipIcon } from '~/ui/lib/TipIcon'
import { ALL_ISH } from '~/util/consts'
import { toLocaleDateString } from '~/util/date'
import { docLinks } from '~/util/links'
import { diskSizeNearest10 } from '~/util/math'
Expand Down Expand Up @@ -127,7 +128,9 @@ export function CreateDiskSideModalForm({
)
const areImagesLoading = projectImages.isPending || siloImages.isPending

const snapshotsQuery = useQuery(q(api.snapshotList, { query: { project } }))
const snapshotsQuery = useQuery(
q(api.snapshotList, { query: { project, limit: ALL_ISH } })
)
const snapshots = snapshotsQuery.data?.items || []

// validate disk source size
Expand Down Expand Up @@ -417,7 +420,9 @@ const DiskNameFromId = ({ disk }: { disk: string }) => {

const SnapshotSelectField = ({ control }: { control: Control<DiskCreateForm> }) => {
const { project } = useProjectSelector()
const snapshotsQuery = useQuery(q(api.snapshotList, { query: { project } }))
const snapshotsQuery = useQuery(
q(api.snapshotList, { query: { project, limit: ALL_ISH } })
Copy link
Contributor Author

@charliepark charliepark Feb 18, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Our snapshots dropdown was getting cut off; we should think about a more robust fix in a future PR for very long snapshot lists

)

const snapshots = snapshotsQuery.data?.items || []
const diskSizeField = useController({ control, name: 'size' }).field
Expand Down
7 changes: 4 additions & 3 deletions mock-api/msw/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,15 @@ export const resolvePoolSelector = (
| { pool: string; type: 'explicit' }
| { type: 'auto'; ip_version?: IpVersion | null }
| undefined,
poolType?: IpPoolType
poolType?: IpPoolType,
siloId: string = defaultSilo.id
) => {
if (poolSelector?.type === 'explicit') {
return lookup.ipPool({ pool: poolSelector.pool })
}

// For 'auto' type, find the default pool for the specified IP version and pool type
const silo = lookup.silo({ silo: defaultSilo.id })
const silo = lookup.silo({ silo: siloId })
const links = db.ipPoolSilos.filter((ips) => ips.silo_id === silo.id && ips.is_default)

// Filter candidate pools by both IP version and pool type
Expand Down Expand Up @@ -114,7 +115,7 @@ export const resolvePoolSelector = (
if (!link) {
const typeStr = poolType ? ` ${poolType}` : ''
const versionStr = poolSelector?.ip_version ? ` ${poolSelector.ip_version}` : ''
throw notFoundErr(`default${typeStr}${versionStr} pool for silo '${defaultSilo.id}'`)
throw notFoundErr(`default${typeStr}${versionStr} pool for silo '${siloId}'`)
}
return lookupById(db.ipPools, link.ip_pool_id)
}
Expand Down
11 changes: 6 additions & 5 deletions mock-api/msw/handlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -305,10 +305,10 @@ export const handlers = makeHandlers({
return true // For mock purposes, just use first unicast pool
})
})
pool = poolWithIp || resolvePoolSelector(undefined, 'unicast')
pool = poolWithIp || resolvePoolSelector(undefined, 'unicast', project.silo_id)
} else {
// type === 'auto'
pool = resolvePoolSelector(addressAllocator.pool_selector, 'unicast')
pool = resolvePoolSelector(addressAllocator.pool_selector, 'unicast', project.silo_id)
ip = getIpFromPool(pool)
}

Expand Down Expand Up @@ -553,7 +553,7 @@ export const handlers = makeHandlers({
// which aren't quite as good as checking that there are actually IPs
// available, but they are good things to check
// Ephemeral IPs must use unicast pools
const pool = resolvePoolSelector(ip.pool_selector, 'unicast')
const pool = resolvePoolSelector(ip.pool_selector, 'unicast', project.silo_id)
getIpFromPool(pool)

// Validate that external IP version matches NIC's IP stack
Expand Down Expand Up @@ -694,7 +694,7 @@ export const handlers = makeHandlers({
floatingIp.instance_id = instanceId
} else if (ip.type === 'ephemeral') {
// Ephemeral IPs must use unicast pools
const pool = resolvePoolSelector(ip.pool_selector, 'unicast')
const pool = resolvePoolSelector(ip.pool_selector, 'unicast', project.silo_id)
const firstAvailableAddress = getIpFromPool(pool)

db.ephemeralIps.push({
Expand Down Expand Up @@ -876,8 +876,9 @@ export const handlers = makeHandlers({
},
instanceEphemeralIpAttach({ path, query: projectParams, body }) {
const instance = lookup.instance({ ...path, ...projectParams })
const instanceProject = lookup.project(projectParams)
// Ephemeral IPs must use unicast pools
const pool = resolvePoolSelector(body.pool_selector, 'unicast')
const pool = resolvePoolSelector(body.pool_selector, 'unicast', instanceProject.silo_id)
const ip = getIpFromPool(pool)

// Validate that external IP version matches primary NIC's IP stack
Expand Down
138 changes: 134 additions & 4 deletions mock-api/msw/util.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*
* Copyright Oxide Computer Company
*/
import * as R from 'remeda'
import { describe, expect, it } from 'vitest'

import { FLEET_ID } from '@oxide/api'
Expand All @@ -20,12 +21,26 @@ describe('paginated', () => {
expect(page.next_page).toBeNull()
})

it('defaults to name_ascending for items with a name field', () => {
const items = [
{ id: 'z', name: 'zebra' },
{ id: 'a', name: 'antelope' },
{ id: 'm', name: 'moose' },
]
const page = paginated({}, items)
expect(page.items.map((i) => i.name)).toEqual(['antelope', 'moose', 'zebra'])
expect(page.next_page).toBeNull()
})

it('should return the first 100 items with no limit passed', () => {
const items = Array.from({ length: 200 }).map((_, i) => ({ id: 'i' + i }))
const page = paginated({}, items)
expect(page.items.length).toBe(100)
expect(page.items).toEqual(items.slice(0, 100))
expect(page.next_page).toBe('i100')
// Items are sorted by id lexicographically (matching Omicron's UUID sorting behavior)
// Use locale-agnostic comparison to match the implementation
const sortedItems = R.sortBy(items, (i) => i.id)
expect(page.items).toEqual(sortedItems.slice(0, 100))
expect(page.next_page).toBe(sortedItems[99].id)
})

it('should return page with null `next_page` if items equal page', () => {
Expand Down Expand Up @@ -59,16 +74,131 @@ describe('paginated', () => {
const page = paginated({ limit: 5 }, items)
expect(page.items.length).toBe(5)
expect(page.items).toEqual(items.slice(0, 5))
expect(page.next_page).toBe('f')
expect(page.next_page).toBe('e')
})

it('should return the second page when given a `page_token`', () => {
const items = [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }]
const page = paginated({ pageToken: 'b' }, items)
// token 'a' is exclusive: start after 'a'
const page = paginated({ pageToken: 'a' }, items)
expect(page.items.length).toBe(3)
expect(page.items).toEqual([{ id: 'b' }, { id: 'c' }, { id: 'd' }])
expect(page.next_page).toBeNull()
})

it('returns empty page for limit 0', () => {
const items = [{ id: 'a' }, { id: 'b' }]
const page = paginated({ limit: 0 }, items)
expect(page.items).toEqual([])
expect(page.next_page).toBeNull()
})

it('pages through id_ascending with no overlap and no gap', () => {
// Items a..j; next_page is the last item on each page (exclusive cursor per Dropshot)
const items = Array.from({ length: 10 }, (_, i) => ({
id: String.fromCharCode(97 + i),
}))
const p1 = paginated({ limit: 3 }, items)
expect(p1.items.map((i) => i.id)).toEqual(['a', 'b', 'c'])
expect(p1.next_page).toBe('c')

const p2 = paginated({ limit: 3, pageToken: p1.next_page }, items)
expect(p2.items.map((i) => i.id)).toEqual(['d', 'e', 'f'])
expect(p2.next_page).toBe('f')

const p3 = paginated({ limit: 3, pageToken: p2.next_page }, items)
expect(p3.items.map((i) => i.id)).toEqual(['g', 'h', 'i'])
expect(p3.next_page).toBe('i')

const p4 = paginated({ limit: 3, pageToken: p3.next_page }, items)
expect(p4.items.map((i) => i.id)).toEqual(['j'])
expect(p4.next_page).toBeNull()
})

it('sorts name_descending with id ascending as tiebreaker', () => {
const items = [
{ id: 'z', name: 'beta' },
{ id: 'a', name: 'alpha' },
{ id: 'b', name: 'alpha' }, // same name, id 'a' < 'b'
]
const page = paginated({ sortBy: 'name_descending' }, items)
// beta descends first, then alpha items sorted ascending by id
expect(page.items.map((i) => i.id)).toEqual(['z', 'a', 'b'])
})

it('pages through name_descending with no overlap and no gap', () => {
const items = [
{ id: 'd', name: 'zest' },
{ id: 'c', name: 'yak' },
{ id: 'b', name: 'xerox' },
{ id: 'a', name: 'walrus' },
]
const p1 = paginated({ sortBy: 'name_descending', limit: 2 }, items)
expect(p1.items.map((i) => i.name)).toEqual(['zest', 'yak'])
// next_page token format is "name|id" — last item on page, not first of next
expect(p1.next_page).toBe('yak|c')

const p2 = paginated(
{ sortBy: 'name_descending', limit: 2, pageToken: p1.next_page },
items
)
expect(p2.items.map((i) => i.name)).toEqual(['xerox', 'walrus'])
expect(p2.next_page).toBeNull()
})

it('sorts time_and_id_ascending with id tiebreaker', () => {
const t1 = '2024-01-01T00:00:00.000Z'
const t2 = '2024-02-01T00:00:00.000Z'
const items = [
{ id: 'b', time_created: t2 },
{ id: 'c', time_created: t1 }, // same time as 'a', id 'a' < 'c'
{ id: 'a', time_created: t1 },
]
const page = paginated({ sortBy: 'time_and_id_ascending' }, items)
expect(page.items.map((i) => i.id)).toEqual(['a', 'c', 'b'])
})

it('pages through time_and_id_ascending with no overlap and no gap', () => {
const t1 = '2024-01-01T00:00:00.000Z'
const t2 = '2024-02-01T00:00:00.000Z'
const items = [
{ id: 'b', time_created: t2 },
{ id: 'c', time_created: t1 },
{ id: 'a', time_created: t1 },
]
const p1 = paginated({ sortBy: 'time_and_id_ascending', limit: 2 }, items)
expect(p1.items.map((i) => i.id)).toEqual(['a', 'c'])
// next_page token format is "timestamp|id" — last item on page, not first of next
expect(p1.next_page).toBe(`${t1}|c`)

const p2 = paginated(
{ sortBy: 'time_and_id_ascending', limit: 2, pageToken: p1.next_page },
items
)
expect(p2.items.map((i) => i.id)).toEqual(['b'])
expect(p2.next_page).toBeNull()
})

it('pages through time_and_id_descending with no overlap and no gap', () => {
const t1 = '2024-01-01T00:00:00.000Z'
const t2 = '2024-02-01T00:00:00.000Z'
const items = [
{ id: 'b', time_created: t2 },
{ id: 'c', time_created: t1 },
{ id: 'a', time_created: t1 },
]
// Descending by time: t2 first, then t1 items by id ascending
const p1 = paginated({ sortBy: 'time_and_id_descending', limit: 2 }, items)
expect(p1.items.map((i) => i.id)).toEqual(['b', 'a'])
expect(p1.next_page).toBe(`${t1}|a`)

const p2 = paginated(
{ sortBy: 'time_and_id_descending', limit: 2, pageToken: p1.next_page },
items
)
expect(p2.items.map((i) => i.id)).toEqual(['c'])
expect(p2.next_page).toBeNull()
})
})

describe('userHasRole', () => {
Expand Down
Loading
Loading