Fix seeder: origin mismatch + robust Dockerfile
Two root causes for seeding not working on production: 1. Origin mismatch: The seeder writes content IDs as http://localhost:7777/content/... but the app was using window.location.origin (the user's actual browser URL) to query the relay. Introduced VITE_CONTENT_ORIGIN env var baked into the Docker build so both sides use the same origin. 2. Dockerfile.seed fragility: Replaced --omit=dev + global tsx with a cleaner approach that strips sharp from package.json (the only native dep that fails on Alpine) then does a full npm install, ensuring tsx/esbuild and all applesauce deps resolve correctly. Also improved wait-for-relay to accept any HTTP response (some relays return 4xx for plain GET) and increased max attempts. Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
@@ -20,6 +20,10 @@ ENV VITE_NOSTR_RELAYS=""
|
||||
# so auth and content use built-in mock/local data instead of timing out
|
||||
ENV VITE_USE_MOCK_DATA=true
|
||||
|
||||
# Content origin must match the seeder's ORIGIN so that relay queries find
|
||||
# the seeded data, regardless of how users access the app in their browser
|
||||
ENV VITE_CONTENT_ORIGIN=http://localhost:7777
|
||||
|
||||
# Build the application
|
||||
RUN npm run build
|
||||
|
||||
|
||||
@@ -7,12 +7,15 @@ FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install tsx globally (isolated from project deps, avoids lockfile conflicts)
|
||||
RUN npm install -g tsx
|
||||
|
||||
# Install only production deps (no sharp or other heavy native devDeps)
|
||||
COPY package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
# Copy package.json, strip sharp (native dep that needs build tools not on
|
||||
# Alpine), then do a full npm install so tsx/esbuild get their binaries and
|
||||
# all applesauce transitive deps resolve correctly.
|
||||
COPY package.json ./
|
||||
RUN node -e "\
|
||||
const p = JSON.parse(require('fs').readFileSync('package.json','utf8'));\
|
||||
if (p.devDependencies) delete p.devDependencies.sharp;\
|
||||
require('fs').writeFileSync('package.json', JSON.stringify(p, null, 2));\
|
||||
" && npm install --no-package-lock
|
||||
|
||||
# Copy only what the seed scripts need
|
||||
COPY scripts/ ./scripts/
|
||||
@@ -24,4 +27,4 @@ ENV RELAY_URL=ws://relay:8080
|
||||
ENV ORIGIN=http://localhost:7777
|
||||
|
||||
# set -ex: print each command before running, exit on first failure
|
||||
CMD ["sh", "-c", "set -ex && node scripts/wait-for-relay.mjs && tsx scripts/seed-profiles.ts && tsx scripts/seed-activity.ts && echo 'Seeding complete!'"]
|
||||
CMD ["sh", "-c", "set -ex && node scripts/wait-for-relay.mjs && npx tsx scripts/seed-profiles.ts && npx tsx scripts/seed-activity.ts && echo 'Seeding complete!'"]
|
||||
|
||||
@@ -9,7 +9,7 @@ import http from 'node:http'
|
||||
|
||||
const wsUrl = process.env.RELAY_URL || 'ws://localhost:7777'
|
||||
const httpUrl = wsUrl.replace('ws://', 'http://').replace('wss://', 'https://')
|
||||
const maxAttempts = 30
|
||||
const maxAttempts = 45
|
||||
const intervalMs = 2000
|
||||
|
||||
console.log(`Waiting for relay at ${httpUrl} ...`)
|
||||
@@ -18,9 +18,14 @@ for (let i = 1; i <= maxAttempts; i++) {
|
||||
const ok = await new Promise((resolve) => {
|
||||
const req = http.get(httpUrl, (res) => {
|
||||
res.resume() // drain response
|
||||
resolve(res.statusCode >= 200 && res.statusCode < 400)
|
||||
// Accept ANY HTTP response as "alive" — some relays return 4xx
|
||||
// for plain GET because they expect a WebSocket upgrade
|
||||
resolve(true)
|
||||
})
|
||||
req.on('error', (err) => {
|
||||
if (i === 1) console.log(` (${err.code || err.message})`)
|
||||
resolve(false)
|
||||
})
|
||||
req.on('error', () => resolve(false))
|
||||
req.setTimeout(3000, () => {
|
||||
req.destroy()
|
||||
resolve(false)
|
||||
|
||||
@@ -170,11 +170,10 @@ export function useContentDiscovery() {
|
||||
if (!effectiveAlgo) return contents
|
||||
|
||||
const statsMap = contentStatsMap.value
|
||||
const origin = window.location.origin
|
||||
|
||||
// Build entries array: [Content, stats] for each content item
|
||||
const withStats: [Content, ContentStats][] = contents.map((c) => {
|
||||
const externalId = `${origin}/content/${c.id}`
|
||||
const externalId = getExternalContentId(c.id)
|
||||
return [c, statsMap.get(externalId) || EMPTY_STATS]
|
||||
})
|
||||
|
||||
|
||||
@@ -8,12 +8,20 @@ import { eventStore, pool, appRelays, factory, APP_RELAYS } from '../lib/nostr'
|
||||
import { accountManager } from '../lib/accounts'
|
||||
import { useObservable } from './useObservable'
|
||||
|
||||
/**
|
||||
* The origin used for Nostr content identifiers.
|
||||
* In Docker/production the env var is baked in at build time so it matches
|
||||
* the origin the seeder uses, regardless of how users access the app.
|
||||
*/
|
||||
const CONTENT_ORIGIN: string =
|
||||
import.meta.env.VITE_CONTENT_ORIGIN || window.location.origin
|
||||
|
||||
/**
|
||||
* Build the external content identifier used in Nostr tags.
|
||||
* Matches indeehub convention: {origin}/content/{contentId}
|
||||
*/
|
||||
export function getExternalContentId(contentId: string): string {
|
||||
return `${window.location.origin}/content/${contentId}`
|
||||
return `${CONTENT_ORIGIN}/content/${contentId}`
|
||||
}
|
||||
|
||||
// --- Comment Tree Types ---
|
||||
|
||||
1
src/env.d.ts
vendored
1
src/env.d.ts
vendored
@@ -27,6 +27,7 @@ interface ImportMetaEnv {
|
||||
readonly VITE_ENABLE_LIGHTNING: string
|
||||
readonly VITE_ENABLE_RENTALS: string
|
||||
readonly VITE_USE_MOCK_DATA: string
|
||||
readonly VITE_CONTENT_ORIGIN: string
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
|
||||
Reference in New Issue
Block a user