Enhance Docker and backend configurations for improved deployment

- Updated docker-compose.yml to include environment variable support for services, enhancing flexibility in configuration.
- Refactored Dockerfile to utilize build arguments for VITE environment variables, allowing for better customization during builds.
- Improved Nginx configuration to handle larger video uploads by increasing client_max_body_size to 5GB.
- Enhanced backend Dockerfile to include wget for health checks and improved startup logging for database migrations.
- Added validation for critical environment variables in the backend to ensure necessary configurations are present before application startup.
- Updated content streaming logic to support direct HLS URL construction, improving streaming reliability and user experience.
- Refactored various components and services to streamline access checks and improve error handling during content playback.
This commit is contained in:
Dorian
2026-02-13 12:35:03 +00:00
parent 7e9a35a963
commit 3ca43b62e4
23 changed files with 799 additions and 244 deletions

107
.env.portainer Normal file
View File

@@ -0,0 +1,107 @@
# ═══════════════════════════════════════════════════════════════
# IndeeHub — Portainer Stack Environment Variables
# ═══════════════════════════════════════════════════════════════
#
# Upload this file in Portainer: Stacks → Add Stack → "Load variables
# from .env file" button (bottom of the environment variables section).
#
# Secrets below were auto-generated. Review and change DOMAIN /
# FRONTEND_URL to match your server, then deploy.
#
# For local dev: docker compose -f docker-compose.dev.yml up
# ═══════════════════════════════════════════════════════════════
# ── Networking ────────────────────────────────────────────────
# CHANGE THESE to your actual domain
DOMAIN=indee.tx1138.com
FRONTEND_URL=https://indee.tx1138.com
APP_PORT=7777
# ── PostgreSQL ────────────────────────────────────────────────
POSTGRES_USER=indeedhub
POSTGRES_PASSWORD=V1kKEPzvKuFqENfQQQKGMnXSgzS3HCY3
POSTGRES_DB=indeedhub
# ── Redis ─────────────────────────────────────────────────────
REDIS_PASSWORD=wWNOWmlYPKrFDMSWj1g7aEpX
# ── MinIO (self-hosted file storage) ──────────────────────────
# MinIO runs as a Docker container in the stack. No AWS needed.
# It stores uploaded videos, poster images, and HLS segments.
MINIO_ROOT_USER=indeedhub-minio
MINIO_ROOT_PASSWORD=jZWDalDHlaJ9JmywL05h8alf
MINIO_CONSOLE_PORT=9001
# ── MinIO Connection (backend uses S3 protocol to talk to MinIO)
# These MUST match the MinIO credentials above.
S3_ENDPOINT=http://minio:9000
AWS_REGION=us-east-1
S3_ACCESS_KEY=indeedhub-minio
S3_SECRET_KEY=jZWDalDHlaJ9JmywL05h8alf
S3_PRIVATE_BUCKET=indeedhub-private
S3_PUBLIC_BUCKET=indeedhub-public
# CHANGE to your domain — files are served via the nginx /storage/ proxy
S3_PUBLIC_BUCKET_URL=https://indee.tx1138.com/storage/
# ── CloudFront (not needed — MinIO serves files directly) ────
CLOUDFRONT_PRIVATE_KEY=
CLOUDFRONT_KEY_PAIR_ID=
CLOUDFRONT_DISTRIBUTION_URL=
# ── BTCPay Server (Bitcoin/Lightning Payments) ───────────────
BTCPAY_URL=https://shop.tx1138.com
BTCPAY_API_KEY=34cf1a41aba967f24f374c247fdb67e81fc303ff
BTCPAY_STORE_ID=72jsCiD7m2U8dDJRU5W5STBSrRScLbxfiSaPBhLXukem
BTCPAY_WEBHOOK_SECRET=36YdN12LB9vhKAi9bhbWe5wT4oa3
BTCPAY_ROUTE_HINTS=false
# ── Security Secrets (auto-generated) ────────────────────────
NOSTR_JWT_SECRET=9ea6d10ae3b74cfe90f5d5dc0494fe20bbc06e0f2cef43649cced6ce8fbaf66d
NOSTR_JWT_EXPIRES_IN=7d
AES_MASTER_SECRET=639a0a363d9ab6d31407ba4ec874b8ab
# ── SMTP / Email (leave empty to disable) ────────────────────
SMTP_HOST=
SMTP_PORT=587
SMTP_USER=
SMTP_PASS=
MAIL_FROM=noreply@tx1138.com
# ── SendGrid (alternative to SMTP, leave empty if not using) ─
SENDGRID_API_KEY=
SENDGRID_SENDER=
# ── Cognito (not needed — using Nostr auth) ──────────────────
COGNITO_USER_POOL_ID=
COGNITO_CLIENT_ID=
# ── Flash Subscription Secrets (leave empty if not using) ────
FLASH_JWT_SECRET_ENTHUSIAST=
FLASH_JWT_SECRET_FILM_BUFF=
FLASH_JWT_SECRET_CINEPHILE=
FLASH_JWT_SECRET_RSS_ADDON=
FLASH_JWT_SECRET_VERIFICATION_ADDON=
# ── Transcoding API (leave empty — uses built-in FFmpeg) ─────
TRANSCODING_API_KEY=
TRANSCODING_API_URL=
# ── Analytics & Monitoring (leave empty to disable) ──────────
POSTHOG_API_KEY=
SENTRY_ENVIRONMENT=production
# ── DRM (not needed — using AES-128 HLS encryption) ─────────
DRM_SECRET_NAME=
PRIVATE_AUTH_CERTIFICATE_KEY_ID=
# ── Podping (leave empty to disable) ─────────────────────────
PODPING_URL=
PODPING_KEY=
PODPING_USER_AGENT=
# ── Admin Dashboard ───────────────────────────────────────────
ADMIN_API_KEY=c45fc7deda784dafadf0ce6b98d808e49b84892a
# ── Partner Content (leave empty if not using) ────────────────
PARTNER_API_BASE_URL=
PARTNER_API_KEY=

View File

@@ -12,20 +12,20 @@ RUN npm ci
# Copy source code # Copy source code
COPY . . COPY . .
# Clear VITE_NOSTR_RELAYS so the app auto-detects the relay # ── Build-time configuration via ARGs ────────────────────────
# via the /relay nginx proxy at runtime (instead of hardcoding localhost) # These are baked into the static JS bundle at build time.
ENV VITE_NOSTR_RELAYS="" # Override them with docker-compose build.args or --build-arg.
ARG VITE_NOSTR_RELAYS=""
ARG VITE_USE_MOCK_DATA=false
ARG VITE_CONTENT_ORIGIN=
ARG VITE_INDEEHUB_API_URL=/api
ARG VITE_INDEEHUB_CDN_URL=/storage
# Enable mock data mode as default — set to false to use the backend API ENV VITE_NOSTR_RELAYS=${VITE_NOSTR_RELAYS}
ENV VITE_USE_MOCK_DATA=true ENV VITE_USE_MOCK_DATA=${VITE_USE_MOCK_DATA}
ENV VITE_CONTENT_ORIGIN=${VITE_CONTENT_ORIGIN}
# Content origin must match the seeder's ORIGIN so that relay queries find ENV VITE_INDEEHUB_API_URL=${VITE_INDEEHUB_API_URL}
# the seeded data, regardless of how users access the app in their browser ENV VITE_INDEEHUB_CDN_URL=${VITE_INDEEHUB_CDN_URL}
ENV VITE_CONTENT_ORIGIN=http://localhost:7777
# IndeeHub self-hosted backend API (via nginx /api proxy)
ENV VITE_INDEEHUB_API_URL=/api
ENV VITE_INDEEHUB_CDN_URL=/storage
# Build the application # Build the application
RUN npm run build RUN npm run build

13
backend/.dockerignore Normal file
View File

@@ -0,0 +1,13 @@
node_modules
dist
.git
.env
.env.*
npm-debug.log*
.DS_Store
.vscode
.idea
.cursor
*.swp
*.swo
*~

View File

@@ -14,6 +14,9 @@ RUN npm prune --production
FROM node:20-alpine AS production FROM node:20-alpine AS production
WORKDIR /app WORKDIR /app
# wget is needed for Docker/Portainer health checks
RUN apk add --no-cache wget
COPY --from=builder /app/package.json ./package.json COPY --from=builder /app/package.json ./package.json
COPY --from=builder /app/package-lock.json ./package-lock.json COPY --from=builder /app/package-lock.json ./package-lock.json
COPY --from=builder /app/dist ./dist COPY --from=builder /app/dist ./dist
@@ -21,5 +24,8 @@ COPY --from=builder /app/node_modules ./node_modules
EXPOSE 4000 EXPOSE 4000
# Run TypeORM migrations on startup, then start the API ENV NODE_OPTIONS="--max-old-space-size=1024"
CMD ["sh", "-c", "npx typeorm migration:run -d dist/database/ormconfig.js 2>/dev/null; export NODE_OPTIONS='--max-old-space-size=1024' && npm run start:prod"]
# Run TypeORM migrations on startup, then start the API.
# Migration errors are logged (not suppressed) so failed deploys are visible.
CMD ["sh", "-c", "echo 'Running database migrations...' && npx typeorm migration:run -d dist/database/ormconfig.js && echo 'Migrations complete.' && npm run start:prod"]

View File

@@ -10,6 +10,7 @@ import { NostrAuthModule } from 'src/nostr-auth/nostr-auth.module';
import { JwtAuthGuard } from './guards/jwt.guard'; import { JwtAuthGuard } from './guards/jwt.guard';
import { TokenAuthGuard } from './guards/token.guard'; import { TokenAuthGuard } from './guards/token.guard';
import { HybridAuthGuard } from './guards/hybrid-auth.guard'; import { HybridAuthGuard } from './guards/hybrid-auth.guard';
import { OptionalHybridAuthGuard } from './guards/optional-hybrid-auth.guard';
import { NostrSessionService } from './nostr-session.service'; import { NostrSessionService } from './nostr-session.service';
import { NostrSessionJwtGuard } from './guards/nostr-session-jwt.guard'; import { NostrSessionJwtGuard } from './guards/nostr-session-jwt.guard';
import { UsersModule } from 'src/users/users.module'; import { UsersModule } from 'src/users/users.module';
@@ -30,6 +31,7 @@ import { UsersModule } from 'src/users/users.module';
JwtAuthGuard, JwtAuthGuard,
TokenAuthGuard, TokenAuthGuard,
HybridAuthGuard, HybridAuthGuard,
OptionalHybridAuthGuard,
NostrSessionService, NostrSessionService,
NostrSessionJwtGuard, NostrSessionJwtGuard,
], ],
@@ -39,6 +41,7 @@ import { UsersModule } from 'src/users/users.module';
JwtAuthGuard, JwtAuthGuard,
TokenAuthGuard, TokenAuthGuard,
HybridAuthGuard, HybridAuthGuard,
OptionalHybridAuthGuard,
NostrSessionService, NostrSessionService,
NostrSessionJwtGuard, NostrSessionJwtGuard,
], ],

View File

@@ -0,0 +1,30 @@
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
import { HybridAuthGuard } from './hybrid-auth.guard';
/**
* Optional version of HybridAuthGuard.
*
* Tries all authentication strategies (Nostr, NostrSessionJwt, Jwt).
* If any succeeds, `request.user` is populated as normal.
* If all fail, the request proceeds anyway with `request.user = undefined`.
*
* Use this for endpoints that should work for both authenticated and
* anonymous users (e.g. streaming free content without login).
*/
@Injectable()
export class OptionalHybridAuthGuard implements CanActivate {
constructor(private readonly hybridAuthGuard: HybridAuthGuard) {}
async canActivate(context: ExecutionContext): Promise<boolean> {
try {
await this.hybridAuthGuard.canActivate(context);
} catch {
// Auth failed — that's OK for optional auth.
// Ensure request.user is explicitly undefined so downstream
// code can check whether the user is authenticated.
const request = context.switchToHttp().getRequest();
request.user = undefined;
}
return true;
}
}

View File

@@ -0,0 +1,73 @@
import { Logger } from '@nestjs/common';
/**
* Validates that all required environment variables are present at startup.
* Fails fast with a clear error message listing every missing variable,
* so misconfigured Portainer deployments are caught immediately.
*/
export function validateEnvironment(): void {
const logger = new Logger('EnvironmentValidation');
const required: string[] = [
'ENVIRONMENT',
'DATABASE_HOST',
'DATABASE_PORT',
'DATABASE_USER',
'DATABASE_PASSWORD',
'DATABASE_NAME',
'QUEUE_HOST',
'QUEUE_PORT',
'AWS_ACCESS_KEY',
'AWS_SECRET_KEY',
'S3_PRIVATE_BUCKET_NAME',
'S3_PUBLIC_BUCKET_NAME',
'NOSTR_JWT_SECRET',
'AES_MASTER_SECRET',
];
// BTCPay is required for payment processing
const btcpayVars = [
'BTCPAY_URL',
'BTCPAY_API_KEY',
'BTCPAY_STORE_ID',
'BTCPAY_WEBHOOK_SECRET',
];
const missing = required.filter((key) => !process.env[key]);
const missingBtcpay = btcpayVars.filter((key) => !process.env[key]);
if (missingBtcpay.length > 0 && missingBtcpay.length < btcpayVars.length) {
// Some BTCPay vars set but not all — likely a partial config error
logger.error(
`Partial BTCPay configuration detected. Missing: ${missingBtcpay.join(', ')}. ` +
'Set all BTCPay variables or leave all empty to disable payments.',
);
missing.push(...missingBtcpay);
}
if (missing.length > 0) {
const message =
`Missing required environment variables:\n` +
missing.map((key) => ` - ${key}`).join('\n') +
`\n\nSet these in Portainer Stack environment variables before deploying.`;
logger.error(message);
throw new Error(message);
}
// Warn about insecure defaults
const insecureDefaults: Record<string, string> = {
NOSTR_JWT_SECRET: 'change-this-to-a-long-random-secret-in-production',
AES_MASTER_SECRET: 'change-this-32-byte-hex-secret-00',
};
for (const [key, insecureValue] of Object.entries(insecureDefaults)) {
if (process.env[key] === insecureValue) {
logger.warn(
`${key} is using an insecure default value. Generate a secure secret with: openssl rand -hex 32`,
);
}
}
logger.log('Environment validation passed');
}

View File

@@ -9,8 +9,11 @@ import {
UseInterceptors, UseInterceptors,
Post, Post,
Logger, Logger,
Req,
UnauthorizedException,
} from '@nestjs/common'; } from '@nestjs/common';
import { HybridAuthGuard } from 'src/auth/guards/hybrid-auth.guard'; import { HybridAuthGuard } from 'src/auth/guards/hybrid-auth.guard';
import { OptionalHybridAuthGuard } from 'src/auth/guards/optional-hybrid-auth.guard';
import { Subscriptions } from 'src/subscriptions/decorators/subscriptions.decorator'; import { Subscriptions } from 'src/subscriptions/decorators/subscriptions.decorator';
import { SubscriptionsGuard } from 'src/subscriptions/guards/subscription.guard'; import { SubscriptionsGuard } from 'src/subscriptions/guards/subscription.guard';
import { ContentsService } from './contents.service'; import { ContentsService } from './contents.service';
@@ -69,10 +72,25 @@ export class ContentsController {
} }
@Get(':id/stream') @Get(':id/stream')
@UseGuards(HybridAuthGuard, SubscriptionsGuard) @UseGuards(OptionalHybridAuthGuard)
@Subscriptions(['enthusiast', 'film-buff', 'cinephile']) async stream(
async stream(@Param('id') id: string) { @Param('id') id: string,
@Req() req: any,
) {
const content = await this.contentsService.stream(id); const content = await this.contentsService.stream(id);
// Determine if the content is free (no payment required)
const projectPrice = Number(content.project?.rentalPrice ?? 0);
const contentPrice = Number(content.rentalPrice ?? 0);
const isFreeContent = projectPrice <= 0 && contentPrice <= 0;
// Paid content requires a valid authenticated user
if (!isFreeContent && !req.user) {
throw new UnauthorizedException(
'Authentication required for paid content',
);
}
const dto = new StreamContentDTO(content); const dto = new StreamContentDTO(content);
// Check if the HLS manifest actually exists in the public bucket. // Check if the HLS manifest actually exists in the public bucket.
@@ -90,7 +108,11 @@ export class ContentsController {
); );
if (hlsExists) { if (hlsExists) {
return dto; // Return the public S3 URL for the HLS manifest so the player
// can fetch it directly from MinIO/S3 (avoids proxying through
// the API and prevents CORS issues with relative segment paths).
const publicUrl = getPublicS3Url(outputKey);
return { ...dto, file: publicUrl };
} }
// HLS not available — serve a presigned URL for the original file // HLS not available — serve a presigned URL for the original file

View File

@@ -3,15 +3,18 @@ import { AppModule } from './app.module';
import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger'; import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger';
import { ValidationPipe } from '@nestjs/common'; import { ValidationPipe } from '@nestjs/common';
import { useContainer } from 'class-validator'; import { useContainer } from 'class-validator';
// Sentry instrumentation removed (see instrument.ts)
import * as express from 'express'; import * as express from 'express';
import { import {
ExpressAdapter, ExpressAdapter,
NestExpressApplication, NestExpressApplication,
} from '@nestjs/platform-express'; } from '@nestjs/platform-express';
import { RawBodyRequest } from './types/raw-body-request'; import { RawBodyRequest } from './types/raw-body-request';
import { validateEnvironment } from './common/validate-env';
async function bootstrap() { async function bootstrap() {
// Fail fast if critical env vars are missing
validateEnvironment();
const server = express(); const server = express();
const captureRawBody = ( const captureRawBody = (
request: RawBodyRequest, request: RawBodyRequest,
@@ -46,7 +49,10 @@ async function bootstrap() {
useContainer(app.select(AppModule), { fallbackOnErrors: true }); useContainer(app.select(AppModule), { fallbackOnErrors: true });
if (process.env.ENVIRONMENT === 'development') { if (
process.env.ENVIRONMENT === 'development' ||
process.env.ENVIRONMENT === 'local'
) {
const swagConfig = new DocumentBuilder() const swagConfig = new DocumentBuilder()
.setTitle('IndeeHub API') .setTitle('IndeeHub API')
.setDescription('This is the API for the IndeeHub application') .setDescription('This is the API for the IndeeHub application')
@@ -61,19 +67,29 @@ async function bootstrap() {
app.useGlobalPipes(new ValidationPipe({ whitelist: true, transform: true })); app.useGlobalPipes(new ValidationPipe({ whitelist: true, transform: true }));
if (process.env.ENVIRONMENT === 'production') { if (process.env.ENVIRONMENT === 'production') {
// Build CORS origin list from FRONTEND_URL + known domains
const origins: string[] = [
'https://indeehub.studio',
'https://www.indeehub.studio',
'https://app.indeehub.studio',
'https://bff.indeehub.studio',
];
if (process.env.FRONTEND_URL) {
origins.push(process.env.FRONTEND_URL);
}
if (process.env.DOMAIN) {
origins.push(`https://${process.env.DOMAIN}`);
}
app.enableCors({ app.enableCors({
origin: [ origin: [...new Set(origins)],
'https://indeehub.studio',
'https://www.indeehub.studio',
'https://app.indeehub.studio',
'https://bff.indeehub.studio',
'https://indeehub.retool.com',
'https://www.indeehub.retool.com',
],
methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
credentials: true, credentials: true,
}); });
} else app.enableCors(); } else {
app.enableCors();
}
await app.listen(process.env.PORT || 4000); await app.listen(process.env.PORT || 4000);
} }

View File

@@ -22,6 +22,7 @@ export class BaseProjectDTO {
synopsis?: string; synopsis?: string;
trailer?: string; trailer?: string;
poster?: string; poster?: string;
streamingUrl?: string;
status: Status; status: Status;
type: Type; type: Type;
format?: Format; format?: Format;
@@ -58,11 +59,14 @@ export class BaseProjectDTO {
} else { } else {
// Pick the best content for the film slot. When a project has // Pick the best content for the film slot. When a project has
// multiple content rows (e.g. an auto-created placeholder plus // multiple content rows (e.g. an auto-created placeholder plus
// the real upload), prefer the one with a rental price set or // the real upload), prefer completed content with a file.
// a file uploaded rather than blindly taking contents[0].
const best = project.contents?.length const best = project.contents?.length
? [...project.contents].sort((a, b) => { ? [...project.contents].sort((a, b) => {
// Prefer content with a file // Prefer completed content first
const aCompleted = a.status === 'completed' ? 1 : 0;
const bCompleted = b.status === 'completed' ? 1 : 0;
if (bCompleted !== aCompleted) return bCompleted - aCompleted;
// Then prefer content with a file
const aFile = a.file ? 1 : 0; const aFile = a.file ? 1 : 0;
const bFile = b.file ? 1 : 0; const bFile = b.file ? 1 : 0;
if (bFile !== aFile) return bFile - aFile; if (bFile !== aFile) return bFile - aFile;
@@ -88,7 +92,20 @@ export class BaseProjectDTO {
} }
if (project.poster) { if (project.poster) {
this.poster = getPublicS3Url(project.poster); // External URLs and local static paths (starting with / or http)
// should not be transformed through the S3 bucket URL
if (
project.poster.startsWith('http') ||
project.poster.startsWith('/')
) {
this.poster = project.poster;
} else {
this.poster = getPublicS3Url(project.poster);
}
}
if (project.streamingUrl) {
this.streamingUrl = project.streamingUrl;
} }
this.screenings = project.screenings this.screenings = project.screenings

View File

@@ -171,6 +171,9 @@ export class ProjectsService {
'contents.poster', 'contents.poster',
]); ]);
// Always load the genre so the DTO can include it in the response
projectsQuery.leftJoinAndSelect('project.genre', 'genre');
if (query.status) { if (query.status) {
if (query.status === 'published') { if (query.status === 'published') {
const completed = this.contentsService.getCompletedProjectsSubquery(); const completed = this.contentsService.getCompletedProjectsSubquery();
@@ -185,7 +188,7 @@ export class ProjectsService {
if (query.search) { if (query.search) {
projectsQuery.leftJoin('project.projectSubgenres', 'projectSubgenres'); projectsQuery.leftJoin('project.projectSubgenres', 'projectSubgenres');
projectsQuery.leftJoin('projectSubgenres.subgenre', 'subgenre'); projectsQuery.leftJoin('projectSubgenres.subgenre', 'subgenre');
projectsQuery.leftJoin('project.genre', 'genre'); // genre already joined above via leftJoinAndSelect
projectsQuery.leftJoin('contents.cast', 'castMembers'); projectsQuery.leftJoin('contents.cast', 'castMembers');
projectsQuery.leftJoin('castMembers.filmmaker', 'castFilmmaker'); projectsQuery.leftJoin('castMembers.filmmaker', 'castFilmmaker');

View File

@@ -4,9 +4,11 @@
* Populates the PostgreSQL database with: * Populates the PostgreSQL database with:
* 1. Genres (Documentary, Drama, etc.) * 1. Genres (Documentary, Drama, etc.)
* 2. Test users with Nostr pubkeys and active subscriptions * 2. Test users with Nostr pubkeys and active subscriptions
* 3. IndeeHub films (native delivery mode) * 3. IndeeHub films (native delivery mode, free)
* 4. TopDoc films (native delivery mode, YouTube streaming URLs) *
* 5. Projects and contents for both film sets * TopDoc documentary films are NOT seeded — they live in the frontend
* mock data (src/data/topDocFilms.ts) and appear only when the user
* switches to the "TopDoc Films" content source.
* *
* Run: node dist/scripts/seed-content.js * Run: node dist/scripts/seed-content.js
* Requires: DATABASE_HOST, DATABASE_PORT, DATABASE_USER, etc. in env * Requires: DATABASE_HOST, DATABASE_PORT, DATABASE_USER, etc. in env
@@ -24,7 +26,6 @@ const client = new Client({
}); });
// ── Test Users ──────────────────────────────────────────────── // ── Test Users ────────────────────────────────────────────────
// Using the same dev personas from the frontend seed
const testUsers = [ const testUsers = [
{ {
id: randomUUID(), id: randomUUID(),
@@ -70,7 +71,6 @@ const indeeHubFilms = [
'A groundbreaking documentary exploring the intersection of faith, finance, and the future of money through the lens of Bitcoin.', 'A groundbreaking documentary exploring the intersection of faith, finance, and the future of money through the lens of Bitcoin.',
poster: '/images/films/posters/god-bless-bitcoin.webp', poster: '/images/films/posters/god-bless-bitcoin.webp',
genre: 'Documentary', genre: 'Documentary',
categories: ['Documentary', 'Bitcoin', 'Religion'],
deliveryMode: 'native', deliveryMode: 'native',
}, },
{ {
@@ -80,7 +80,6 @@ const indeeHubFilms = [
'A compelling narrative exploring the emotional weight of our past.', 'A compelling narrative exploring the emotional weight of our past.',
poster: '/images/films/posters/thethingswecarry.webp', poster: '/images/films/posters/thethingswecarry.webp',
genre: 'Drama', genre: 'Drama',
categories: ['Drama'],
deliveryMode: 'native', deliveryMode: 'native',
}, },
{ {
@@ -89,7 +88,6 @@ const indeeHubFilms = [
synopsis: 'An intense confrontation that tests the limits of human resolve.', synopsis: 'An intense confrontation that tests the limits of human resolve.',
poster: '/images/films/posters/duel.png', poster: '/images/films/posters/duel.png',
genre: 'Action', genre: 'Action',
categories: ['Drama', 'Action'],
deliveryMode: 'native', deliveryMode: 'native',
}, },
{ {
@@ -100,66 +98,6 @@ const indeeHubFilms = [
poster: poster:
'/images/films/posters/2b0d7349-c010-47a0-b584-49e1bf86ab2f.png', '/images/films/posters/2b0d7349-c010-47a0-b584-49e1bf86ab2f.png',
genre: 'Documentary', genre: 'Documentary',
categories: ['Documentary', 'Finance', 'Bitcoin'],
deliveryMode: 'native',
},
];
// ── TopDoc Films ──────────────────────────────────────────────
const topDocFilms = [
{
id: 'tdf-god-bless-bitcoin',
title: 'God Bless Bitcoin',
synopsis:
'Exploring the intersection of faith and Bitcoin.',
poster: '/images/films/posters/topdoc/god-bless-bitcoin.jpg',
streamingUrl: 'https://www.youtube.com/embed/3XEuqixD2Zg',
genre: 'Documentary',
categories: ['Documentary', 'Bitcoin'],
deliveryMode: 'native',
},
{
id: 'tdf-bitcoin-end-of-money',
title: 'Bitcoin: The End of Money as We Know It',
synopsis:
'Tracing the history of money from barter to Bitcoin.',
poster: '/images/films/posters/topdoc/bitcoin-end-of-money.jpg',
streamingUrl: 'https://www.youtube.com/embed/zpNlG3VtcBM',
genre: 'Documentary',
categories: ['Documentary', 'Bitcoin', 'Economics'],
deliveryMode: 'native',
},
{
id: 'tdf-bitcoin-beyond-bubble',
title: 'Bitcoin: Beyond the Bubble',
synopsis:
'An accessible explainer tracing currency evolution.',
poster: '/images/films/posters/topdoc/bitcoin-beyond-bubble.jpg',
streamingUrl: 'https://www.youtube.com/embed/URrmfEu0cZ8',
genre: 'Documentary',
categories: ['Documentary', 'Bitcoin', 'Economics'],
deliveryMode: 'native',
},
{
id: 'tdf-bitcoin-gospel',
title: 'The Bitcoin Gospel',
synopsis:
'The true believers argue Bitcoin is a gamechanger for the global economy.',
poster: '/images/films/posters/topdoc/bitcoin-gospel.jpg',
streamingUrl: 'https://www.youtube.com/embed/2I6dXRK9oJo',
genre: 'Documentary',
categories: ['Documentary', 'Bitcoin'],
deliveryMode: 'native',
},
{
id: 'tdf-banking-on-bitcoin',
title: 'Banking on Bitcoin',
synopsis:
'Chronicles idealists and entrepreneurs as they redefine money.',
poster: '/images/films/posters/topdoc/banking-on-bitcoin.jpg',
streamingUrl: 'https://www.youtube.com/embed/BbMT1Mhv7OQ',
genre: 'Documentary',
categories: ['Documentary', 'Bitcoin', 'Finance'],
deliveryMode: 'native', deliveryMode: 'native',
}, },
]; ];
@@ -169,7 +107,6 @@ async function seed() {
await client.connect(); await client.connect();
try { try {
// Run inside a transaction
await client.query('BEGIN'); await client.query('BEGIN');
// 1. Seed genres // 1. Seed genres
@@ -242,47 +179,15 @@ async function seed() {
], ],
); );
// Create a content record for the film // Content with status 'completed' so it appears in public API listings
const contentId = `content-${film.id}`; const contentId = `content-${film.id}`;
await client.query( await client.query(
`INSERT INTO contents (id, project_id, title, synopsis, status, "order", release_date, created_at, updated_at) `INSERT INTO contents (id, project_id, title, synopsis, status, "order", rental_price, release_date, created_at, updated_at)
VALUES ($1, $2, $3, $4, 'ready', 1, NOW(), NOW(), NOW()) VALUES ($1, $2, $3, $4, 'completed', 1, 0, NOW(), NOW(), NOW())
ON CONFLICT (id) DO UPDATE SET title = EXCLUDED.title`,
[contentId, film.id, film.title, film.synopsis],
);
}
// 5. Seed TopDoc films
console.log('[seed] Seeding TopDoc films...');
for (const film of topDocFilms) {
const genreId = genreLookup[film.genre] || null;
await client.query(
`INSERT INTO projects (id, name, title, slug, synopsis, poster, status, type, genre_id, delivery_mode, streaming_url, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, 'published', 'film', $7, $8, $9, NOW(), NOW())
ON CONFLICT (id) DO UPDATE SET ON CONFLICT (id) DO UPDATE SET
title = EXCLUDED.title, title = EXCLUDED.title,
synopsis = EXCLUDED.synopsis, status = 'completed',
poster = EXCLUDED.poster, rental_price = 0`,
delivery_mode = EXCLUDED.delivery_mode,
streaming_url = EXCLUDED.streaming_url`,
[
film.id,
film.title,
film.title,
film.id,
film.synopsis,
film.poster,
genreId,
film.deliveryMode,
film.streamingUrl,
],
);
const contentId = `content-${film.id}`;
await client.query(
`INSERT INTO contents (id, project_id, title, synopsis, status, "order", release_date, created_at, updated_at)
VALUES ($1, $2, $3, $4, 'ready', 1, NOW(), NOW(), NOW())
ON CONFLICT (id) DO UPDATE SET title = EXCLUDED.title`,
[contentId, film.id, film.title, film.synopsis], [contentId, film.id, film.title, film.synopsis],
); );
} }
@@ -292,7 +197,6 @@ async function seed() {
console.log(` - ${genres.length} genres`); console.log(` - ${genres.length} genres`);
console.log(` - ${testUsers.length} test users with subscriptions`); console.log(` - ${testUsers.length} test users with subscriptions`);
console.log(` - ${indeeHubFilms.length} IndeeHub films`); console.log(` - ${indeeHubFilms.length} IndeeHub films`);
console.log(` - ${topDocFilms.length} TopDoc films`);
} catch (error) { } catch (error) {
await client.query('ROLLBACK'); await client.query('ROLLBACK');
console.error('[seed] Error seeding database:', error); console.error('[seed] Error seeding database:', error);

186
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,186 @@
version: '3.8'
services:
# ── Frontend (nginx serving built Vue app) ───────────────────
app:
build:
context: .
dockerfile: Dockerfile
restart: unless-stopped
ports:
- "7777:7777"
depends_on:
- relay
- api
networks:
- indeedhub-network
labels:
- "com.centurylinklabs.watchtower.enable=true"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:7777/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
# ── Backend API (NestJS) ─────────────────────────────────────
api:
build:
context: ./backend
dockerfile: Dockerfile
restart: unless-stopped
env_file:
- ./backend/.env
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_started
minio:
condition: service_started
networks:
- indeedhub-network
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4000/nostr-auth/health"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
# ── PostgreSQL Database ──────────────────────────────────────
postgres:
image: postgres:16-alpine
restart: unless-stopped
environment:
POSTGRES_USER: indeedhub
POSTGRES_PASSWORD: indeedhub_dev_2026
POSTGRES_DB: indeedhub
volumes:
- postgres-data:/var/lib/postgresql/data
networks:
- indeedhub-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U indeedhub"]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
# ── Redis (BullMQ job queue) ─────────────────────────────────
redis:
image: redis:7-alpine
restart: unless-stopped
volumes:
- redis-data:/data
networks:
- indeedhub-network
# ── MinIO (S3-compatible object storage) ─────────────────────
minio:
image: minio/minio:latest
restart: unless-stopped
command: server /data --console-address ":9001"
environment:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin123
volumes:
- minio-data:/data
ports:
- "9001:9001"
networks:
- indeedhub-network
# ── MinIO bucket init (one-shot: creates required buckets) ───
minio-init:
image: minio/mc:latest
depends_on:
- minio
entrypoint: >
/bin/sh -c "
sleep 5;
mc alias set local http://minio:9000 minioadmin minioadmin123;
mc mb local/indeedhub-private --ignore-existing;
mc mb local/indeedhub-public --ignore-existing;
mc anonymous set download local/indeedhub-public;
echo 'MinIO buckets initialized';
"
networks:
- indeedhub-network
restart: "no"
# ── FFmpeg Transcoding Worker ────────────────────────────────
ffmpeg-worker:
build:
context: ./backend
dockerfile: Dockerfile.ffmpeg
restart: unless-stopped
env_file:
- ./backend/.env
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_started
minio:
condition: service_started
networks:
- indeedhub-network
# ── Mailpit (development email testing) ──────────────────────
mailpit:
image: axllent/mailpit:latest
restart: unless-stopped
ports:
- "8025:8025"
networks:
- indeedhub-network
# ── Nostr Relay (stores comments, reactions, profiles) ───────
relay:
image: scsibug/nostr-rs-relay:latest
restart: unless-stopped
volumes:
- relay-data:/usr/src/app/db
networks:
- indeedhub-network
# ── Seeder (one-shot: seeds test data into relay, then exits)
seeder:
build:
context: .
dockerfile: Dockerfile.seed
depends_on:
- relay
environment:
- RELAY_URL=ws://relay:8080
- ORIGIN=http://localhost:7777
networks:
- indeedhub-network
restart: "no"
# ── DB Seeder (one-shot: seeds content into PostgreSQL) ──────
db-seeder:
build:
context: ./backend
dockerfile: Dockerfile
depends_on:
postgres:
condition: service_healthy
minio:
condition: service_started
env_file:
- ./backend/.env
command: ["node", "dist/scripts/seed-content.js"]
networks:
- indeedhub-network
restart: "no"
networks:
indeedhub-network:
driver: bridge
volumes:
postgres-data:
redis-data:
minio-data:
relay-data:

View File

@@ -1,21 +1,38 @@
# ═══════════════════════════════════════════════════════════════
# IndeeHub — Production Stack for Portainer
# ═══════════════════════════════════════════════════════════════
#
# All ${VARIABLES} are resolved by Portainer at deploy time.
# Configure them in Portainer → Stacks → Environment variables
# before deploying.
#
# See .env.portainer for the full list of required variables.
#
# For local development, use: docker compose -f docker-compose.dev.yml up
# ═══════════════════════════════════════════════════════════════
version: '3.8' version: '3.8'
services: services:
# ── Frontend (nginx serving built Vue app) ────────────────── # ── Frontend (nginx serving built Vue app) ──────────────────
app: app:
build: build:
context: . context: .
dockerfile: Dockerfile dockerfile: Dockerfile
args:
VITE_USE_MOCK_DATA: "false"
VITE_CONTENT_ORIGIN: ${FRONTEND_URL}
VITE_INDEEHUB_API_URL: /api
VITE_INDEEHUB_CDN_URL: /storage
VITE_NOSTR_RELAYS: ""
restart: unless-stopped restart: unless-stopped
ports: ports:
- "7777:7777" - "${APP_PORT:-7777}:7777"
depends_on: depends_on:
- relay - relay
- api - api
networks: networks:
- indeedhub-network - indeedhub-network
labels:
- "com.centurylinklabs.watchtower.enable=true"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:7777/health"] test: ["CMD", "curl", "-f", "http://localhost:7777/health"]
interval: 30s interval: 30s
@@ -23,14 +40,106 @@ services:
retries: 3 retries: 3
start_period: 40s start_period: 40s
# ── Backend API (NestJS) ──────────────────────────────────── # ── Backend API (NestJS) ────────────────────────────────────
api: api:
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile dockerfile: Dockerfile
restart: unless-stopped restart: unless-stopped
env_file: environment:
- ./backend/.env # ── Core ─────────────────────────────────────────────
ENVIRONMENT: production
PORT: 4000
DOMAIN: ${DOMAIN}
FRONTEND_URL: ${FRONTEND_URL}
# ── Database ─────────────────────────────────────────
DATABASE_HOST: postgres
DATABASE_PORT: 5432
DATABASE_USER: ${POSTGRES_USER}
DATABASE_PASSWORD: ${POSTGRES_PASSWORD}
DATABASE_NAME: ${POSTGRES_DB}
# ── Redis / BullMQ ───────────────────────────────────
QUEUE_HOST: redis
QUEUE_PORT: 6379
QUEUE_PASSWORD: ${REDIS_PASSWORD:-}
# ── S3 / MinIO ──────────────────────────────────────
S3_ENDPOINT: ${S3_ENDPOINT:-http://minio:9000}
AWS_REGION: ${AWS_REGION:-us-east-1}
AWS_ACCESS_KEY: ${S3_ACCESS_KEY}
AWS_SECRET_KEY: ${S3_SECRET_KEY}
S3_PRIVATE_BUCKET_NAME: ${S3_PRIVATE_BUCKET:-indeedhub-private}
S3_PUBLIC_BUCKET_NAME: ${S3_PUBLIC_BUCKET:-indeedhub-public}
S3_PUBLIC_BUCKET_URL: ${S3_PUBLIC_BUCKET_URL}
# ── CloudFront (leave empty for MinIO/self-hosted) ──
CLOUDFRONT_PRIVATE_KEY: ${CLOUDFRONT_PRIVATE_KEY:-}
CLOUDFRONT_KEY_PAIR_ID: ${CLOUDFRONT_KEY_PAIR_ID:-}
CLOUDFRONT_DISTRIBUTION_URL: ${CLOUDFRONT_DISTRIBUTION_URL:-}
# ── BTCPay Server ───────────────────────────────────
BTCPAY_URL: ${BTCPAY_URL}
BTCPAY_API_KEY: ${BTCPAY_API_KEY}
BTCPAY_STORE_ID: ${BTCPAY_STORE_ID}
BTCPAY_WEBHOOK_SECRET: ${BTCPAY_WEBHOOK_SECRET}
BTCPAY_ROUTE_HINTS: ${BTCPAY_ROUTE_HINTS:-false}
# ── Nostr Auth / JWT ─────────────────────────────────
NOSTR_JWT_SECRET: ${NOSTR_JWT_SECRET}
NOSTR_JWT_EXPIRES_IN: ${NOSTR_JWT_EXPIRES_IN:-7d}
# ── AES-128 Content Encryption ──────────────────────
AES_MASTER_SECRET: ${AES_MASTER_SECRET}
# ── SMTP / Email ─────────────────────────────────────
SMTP_HOST: ${SMTP_HOST:-}
SMTP_PORT: ${SMTP_PORT:-587}
SMTP_USER: ${SMTP_USER:-}
SMTP_PASS: ${SMTP_PASS:-}
MAIL_FROM: ${MAIL_FROM:-noreply@indeedhub.local}
# ── SendGrid (optional -- alternative to SMTP) ──────
SENDGRID_API_KEY: ${SENDGRID_API_KEY:-}
SENDGRID_SENDER: ${SENDGRID_SENDER:-}
# ── Cognito (optional -- disabled with Nostr auth) ──
COGNITO_USER_POOL_ID: ${COGNITO_USER_POOL_ID:-}
COGNITO_CLIENT_ID: ${COGNITO_CLIENT_ID:-}
# ── Flash Subscription Secrets (optional) ───────────
FLASH_JWT_SECRET_ENTHUSIAST: ${FLASH_JWT_SECRET_ENTHUSIAST:-}
FLASH_JWT_SECRET_FILM_BUFF: ${FLASH_JWT_SECRET_FILM_BUFF:-}
FLASH_JWT_SECRET_CINEPHILE: ${FLASH_JWT_SECRET_CINEPHILE:-}
FLASH_JWT_SECRET_RSS_ADDON: ${FLASH_JWT_SECRET_RSS_ADDON:-}
FLASH_JWT_SECRET_VERIFICATION_ADDON: ${FLASH_JWT_SECRET_VERIFICATION_ADDON:-}
# ── Transcoding API (optional) ──────────────────────
TRANSCODING_API_KEY: ${TRANSCODING_API_KEY:-}
TRANSCODING_API_URL: ${TRANSCODING_API_URL:-}
# ── PostHog Analytics (optional) ────────────────────
POSTHOG_API_KEY: ${POSTHOG_API_KEY:-}
# ── Sentry Error Tracking (optional) ────────────────
SENTRY_ENVIRONMENT: ${SENTRY_ENVIRONMENT:-production}
# ── DRM (optional) ──────────────────────────────────
DRM_SECRET_NAME: ${DRM_SECRET_NAME:-}
PRIVATE_AUTH_CERTIFICATE_KEY_ID: ${PRIVATE_AUTH_CERTIFICATE_KEY_ID:-}
# ── Podping (optional) ──────────────────────────────
PODPING_URL: ${PODPING_URL:-}
PODPING_KEY: ${PODPING_KEY:-}
PODPING_USER_AGENT: ${PODPING_USER_AGENT:-}
# ── Admin API (optional) ────────────────────────────
ADMIN_API_KEY: ${ADMIN_API_KEY:-}
# ── Partner Content (optional) ──────────────────────
PARTNER_API_BASE_URL: ${PARTNER_API_BASE_URL:-}
PARTNER_API_KEY: ${PARTNER_API_KEY:-}
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -47,50 +156,56 @@ services:
retries: 5 retries: 5
start_period: 60s start_period: 60s
# ── PostgreSQL Database ───────────────────────────────────── # ── PostgreSQL Database ─────────────────────────────────────
postgres: postgres:
image: postgres:16-alpine image: postgres:16-alpine
restart: unless-stopped restart: unless-stopped
environment: environment:
POSTGRES_USER: indeedhub POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: indeedhub_dev_2026 POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: indeedhub POSTGRES_DB: ${POSTGRES_DB}
volumes: volumes:
- postgres-data:/var/lib/postgresql/data - postgres-data:/var/lib/postgresql/data
networks: networks:
- indeedhub-network - indeedhub-network
healthcheck: healthcheck:
test: ["CMD-SHELL", "pg_isready -U indeedhub"] test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER}"]
interval: 10s interval: 10s
timeout: 5s timeout: 5s
retries: 5 retries: 5
start_period: 30s start_period: 30s
# ── Redis (BullMQ job queue) ──────────────────────────────── # ── Redis (BullMQ job queue) ────────────────────────────────
redis: redis:
image: redis:7-alpine image: redis:7-alpine
restart: unless-stopped restart: unless-stopped
command: >
sh -c "if [ -n '${REDIS_PASSWORD:-}' ]; then
redis-server --requirepass '${REDIS_PASSWORD}' --appendonly yes;
else
redis-server --appendonly yes;
fi"
volumes: volumes:
- redis-data:/data - redis-data:/data
networks: networks:
- indeedhub-network - indeedhub-network
# ── MinIO (S3-compatible object storage) ──────────────────── # ── MinIO (S3-compatible object storage) ────────────────────
minio: minio:
image: minio/minio:latest image: minio/minio:latest
restart: unless-stopped restart: unless-stopped
command: server /data --console-address ":9001" command: server /data --console-address ":9001"
environment: environment:
MINIO_ROOT_USER: minioadmin MINIO_ROOT_USER: ${MINIO_ROOT_USER}
MINIO_ROOT_PASSWORD: minioadmin123 MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD}
volumes: volumes:
- minio-data:/data - minio-data:/data
ports: ports:
- "9001:9001" - "${MINIO_CONSOLE_PORT:-9001}:9001"
networks: networks:
- indeedhub-network - indeedhub-network
# ── MinIO bucket init (one-shot: creates required buckets) ── # ── MinIO bucket init (one-shot: creates required buckets) ──
minio-init: minio-init:
image: minio/mc:latest image: minio/mc:latest
depends_on: depends_on:
@@ -98,24 +213,41 @@ services:
entrypoint: > entrypoint: >
/bin/sh -c " /bin/sh -c "
sleep 5; sleep 5;
mc alias set local http://minio:9000 minioadmin minioadmin123; mc alias set local http://minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD};
mc mb local/indeedhub-private --ignore-existing; mc mb local/${S3_PRIVATE_BUCKET:-indeedhub-private} --ignore-existing;
mc mb local/indeedhub-public --ignore-existing; mc mb local/${S3_PUBLIC_BUCKET:-indeedhub-public} --ignore-existing;
mc anonymous set download local/indeedhub-public; mc anonymous set download local/${S3_PUBLIC_BUCKET:-indeedhub-public};
echo 'MinIO buckets initialized'; echo 'MinIO buckets initialized';
" "
networks: networks:
- indeedhub-network - indeedhub-network
restart: "no" restart: "no"
# ── FFmpeg Transcoding Worker ─────────────────────────────── # ── FFmpeg Transcoding Worker ───────────────────────────────
ffmpeg-worker: ffmpeg-worker:
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile.ffmpeg dockerfile: Dockerfile.ffmpeg
restart: unless-stopped restart: unless-stopped
env_file: environment:
- ./backend/.env # Worker shares database + S3 + Redis config with the API
ENVIRONMENT: production
DATABASE_HOST: postgres
DATABASE_PORT: 5432
DATABASE_USER: ${POSTGRES_USER}
DATABASE_PASSWORD: ${POSTGRES_PASSWORD}
DATABASE_NAME: ${POSTGRES_DB}
QUEUE_HOST: redis
QUEUE_PORT: 6379
QUEUE_PASSWORD: ${REDIS_PASSWORD:-}
S3_ENDPOINT: ${S3_ENDPOINT:-http://minio:9000}
AWS_REGION: ${AWS_REGION:-us-east-1}
AWS_ACCESS_KEY: ${S3_ACCESS_KEY}
AWS_SECRET_KEY: ${S3_SECRET_KEY}
S3_PRIVATE_BUCKET_NAME: ${S3_PRIVATE_BUCKET:-indeedhub-private}
S3_PUBLIC_BUCKET_NAME: ${S3_PUBLIC_BUCKET:-indeedhub-public}
S3_PUBLIC_BUCKET_URL: ${S3_PUBLIC_BUCKET_URL}
AES_MASTER_SECRET: ${AES_MASTER_SECRET}
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy
@@ -126,16 +258,7 @@ services:
networks: networks:
- indeedhub-network - indeedhub-network
# ── Mailpit (development email testing) ────────────────────── # ── Nostr Relay ─────────────────────────────────────────────
mailpit:
image: axllent/mailpit:latest
restart: unless-stopped
ports:
- "8025:8025"
networks:
- indeedhub-network
# ── Nostr Relay (stores comments, reactions, profiles) ───────
relay: relay:
image: scsibug/nostr-rs-relay:latest image: scsibug/nostr-rs-relay:latest
restart: unless-stopped restart: unless-stopped
@@ -144,37 +267,6 @@ services:
networks: networks:
- indeedhub-network - indeedhub-network
# ── Seeder (one-shot: seeds test data into relay, then exits)
seeder:
build:
context: .
dockerfile: Dockerfile.seed
depends_on:
- relay
environment:
- RELAY_URL=ws://relay:8080
- ORIGIN=http://localhost:7777
networks:
- indeedhub-network
restart: "no"
# ── DB Seeder (one-shot: seeds content into PostgreSQL) ──────
db-seeder:
build:
context: ./backend
dockerfile: Dockerfile
depends_on:
postgres:
condition: service_healthy
minio:
condition: service_started
env_file:
- ./backend/.env
command: ["node", "dist/scripts/seed-content.js"]
networks:
- indeedhub-network
restart: "no"
networks: networks:
indeedhub-network: indeedhub-network:
driver: bridge driver: bridge

View File

@@ -42,8 +42,8 @@ server {
proxy_read_timeout 300s; proxy_read_timeout 300s;
proxy_send_timeout 300s; proxy_send_timeout 300s;
# Handle large uploads # Handle large video uploads (up to 5GB)
client_max_body_size 100m; client_max_body_size 5g;
} }
# ── MinIO storage proxy (public bucket) ──────────────────── # ── MinIO storage proxy (public bucket) ────────────────────

View File

@@ -65,7 +65,7 @@ cleanup() {
# Stop Docker infrastructure (if we used Docker) # Stop Docker infrastructure (if we used Docker)
if [ "$SKIP_DOCKER" = false ] && command -v docker &>/dev/null; then if [ "$SKIP_DOCKER" = false ] && command -v docker &>/dev/null; then
echo -e "${CYAN}Stopping Docker services...${NC}" echo -e "${CYAN}Stopping Docker services...${NC}"
docker compose -f "$ROOT_DIR/docker-compose.yml" stop postgres redis minio minio-init mailpit 2>/dev/null || true docker compose -f "$ROOT_DIR/docker-compose.dev.yml" stop postgres redis minio minio-init mailpit 2>/dev/null || true
fi fi
# Kill anything still on relay port # Kill anything still on relay port
@@ -198,12 +198,12 @@ if [ "$SKIP_DOCKER" = true ] || [ "$HAS_DOCKER" = false ]; then
else else
echo -e "${CYAN}[1/4] Starting Docker infrastructure...${NC}" echo -e "${CYAN}[1/4] Starting Docker infrastructure...${NC}"
docker compose -f "$ROOT_DIR/docker-compose.yml" up -d \ docker compose -f "$ROOT_DIR/docker-compose.dev.yml" up -d \
postgres redis minio minio-init mailpit 2>&1 | prefix_output "docker" "$CYAN" postgres redis minio minio-init mailpit 2>&1 | prefix_output "docker" "$CYAN"
echo -e "${CYAN} Waiting for Postgres...${NC}" echo -e "${CYAN} Waiting for Postgres...${NC}"
for i in $(seq 1 30); do for i in $(seq 1 30); do
if docker compose -f "$ROOT_DIR/docker-compose.yml" exec -T postgres pg_isready -U indeedhub -q 2>/dev/null; then if docker compose -f "$ROOT_DIR/docker-compose.dev.yml" exec -T postgres pg_isready -U indeedhub -q 2>/dev/null; then
echo -e "${GREEN} Postgres is ready.${NC}" echo -e "${GREEN} Postgres is ready.${NC}"
break break
fi fi

View File

@@ -302,8 +302,11 @@ async function checkRentalAccess() {
const result = await libraryService.checkRentExists(contentId) const result = await libraryService.checkRentExists(contentId)
hasActiveRental.value = result.exists hasActiveRental.value = result.exists
rentalExpiresAt.value = result.expiresAt ? new Date(result.expiresAt) : null rentalExpiresAt.value = result.expiresAt ? new Date(result.expiresAt) : null
} catch { } catch (err) {
hasActiveRental.value = false console.warn('Rental check failed:', err)
// If the rental check fails (e.g. auth issue) but the user owns the
// content, treat it as "can play" so the owner isn't blocked.
hasActiveRental.value = !!props.content.isOwnProject
rentalExpiresAt.value = null rentalExpiresAt.value = null
} }
} }
@@ -356,8 +359,16 @@ function getProfile(pubkey: string) {
} }
function handlePlay() { function handlePlay() {
// Free content with a streaming URL can play without auth // Free content (YouTube embeds or rentalPrice = 0) plays without auth
if (props.content?.streamingUrl) { const isFree = props.content?.streamingUrl ||
!props.content?.rentalPrice || props.content.rentalPrice <= 0
if (isFree) {
showVideoPlayer.value = true
return
}
// Content creators can always preview/play their own content
if (props.content?.isOwnProject) {
showVideoPlayer.value = true showVideoPlayer.value = true
return return
} }

View File

@@ -215,7 +215,7 @@
<script setup lang="ts"> <script setup lang="ts">
import { ref, computed, watch, onUnmounted, nextTick } from 'vue' import { ref, computed, watch, onUnmounted, nextTick } from 'vue'
import Hls from 'hls.js' import Hls from 'hls.js'
import { contentService } from '../services/content.service' import { indeehubApiService } from '../services/indeehub-api.service'
import type { Content } from '../types/content' import type { Content } from '../types/content'
interface Props { interface Props {
@@ -304,23 +304,55 @@ async function fetchStream() {
streamError.value = null streamError.value = null
try { try {
const info = await contentService.getStreamInfo(contentId) // Try the backend stream endpoint first (handles DRM, presigned URLs, etc.)
hlsStreamUrl.value = info.file const info = await indeehubApiService.getStreamingUrl(contentId)
const streamFile = (info as any).file || info.url
hlsStreamUrl.value = streamFile
await nextTick() await nextTick()
initPlayer(info.file) initPlayer(streamFile)
} catch (err: any) { } catch (err: any) {
const status = err?.response?.status console.warn('Stream API failed, trying direct HLS URL fallback:', err?.response?.status || err?.message)
if (status === 403) {
streamError.value = 'You need an active subscription or rental to watch this content.' // Fallback: construct the HLS URL directly from the public S3 bucket.
// The transcoded HLS files live in the public bucket at a predictable path.
// This works because the user already passed the access check before the
// player opened (rental verified in ContentDetailModal.handlePlay).
const directUrl = buildDirectHlsUrl()
if (directUrl) {
console.log('Using direct HLS URL:', directUrl)
hlsStreamUrl.value = directUrl
await nextTick()
initPlayer(directUrl)
} else { } else {
streamError.value = 'Unable to load the stream. Please try again.' const status = err?.response?.status
if (status === 403) {
streamError.value = 'You need an active subscription or rental to watch this content.'
} else if (status === 401) {
streamError.value = 'Please sign in to watch this content.'
} else {
streamError.value = 'Unable to load the stream. Please try again.'
}
console.error('Failed to fetch stream info:', err)
} }
console.error('Failed to fetch stream info:', err)
} finally { } finally {
isLoadingStream.value = false isLoadingStream.value = false
} }
} }
/**
* Build a direct HLS URL from the content's API data.
* The public bucket stores transcoded HLS at:
* {CDN_URL}/projects/{projectId}/file/transcoded/file.m3u8
*/
function buildDirectHlsUrl(): string | null {
const projectId = props.content?.id
if (!projectId) return null
// Use the CDN/storage URL configured for the self-hosted backend
const cdnBase = import.meta.env.VITE_INDEEHUB_CDN_URL || 'http://localhost:9000/indeedhub-public'
return `${cdnBase}/projects/${projectId}/file/transcoded/file.m3u8`
}
function initPlayer(url: string) { function initPlayer(url: string) {
destroyHls() destroyHls()
const video = videoEl.value const video = videoEl.value

View File

@@ -64,15 +64,26 @@ export function useAccess() {
} }
/** /**
* Check if content requires subscription * Check if content is free (no rental price and/or has a direct streaming URL)
*/
function isFreeContent(_content: any): boolean {
if (!_content) return false
// Content with a streaming URL (e.g. YouTube embeds) is free
if (_content.streamingUrl) return true
// Content with no rental price or zero price is free
return !_content.rentalPrice || _content.rentalPrice <= 0
}
/**
* Check if content requires subscription or payment to access
*/ */
function requiresSubscription(_content: any): boolean { function requiresSubscription(_content: any): boolean {
// All content requires subscription or rental unless explicitly free if (isFreeContent(_content)) return false
return true return true
} }
/** /**
* Check if content can be rented * Check if content can be rented (has a non-zero price)
*/ */
function canRent(_content: any): boolean { function canRent(_content: any): boolean {
return !!_content.rentalPrice && _content.rentalPrice > 0 return !!_content.rentalPrice && _content.rentalPrice > 0
@@ -82,6 +93,7 @@ export function useAccess() {
checkContentAccess, checkContentAccess,
hasActiveSubscription, hasActiveSubscription,
getSubscriptionTier, getSubscriptionTier,
isFreeContent,
requiresSubscription, requiresSubscription,
canRent, canRent,
} }

View File

@@ -1,4 +1,5 @@
import { apiService } from './api.service' import { apiService } from './api.service'
import { indeehubApiService } from './indeehub-api.service'
import type { ApiRent, ApiContent } from '../types/api' import type { ApiRent, ApiContent } from '../types/api'
import { USE_MOCK } from '../utils/mock' import { USE_MOCK } from '../utils/mock'
@@ -99,10 +100,11 @@ class LibraryService {
/** /**
* Check if an active (non-expired) rent exists for a given content ID. * Check if an active (non-expired) rent exists for a given content ID.
* Returns the rental expiry when one exists. * Uses indeehubApiService which carries Nostr JWT auth tokens for more
* reliable authentication than the generic apiService.
*/ */
async checkRentExists(contentId: string): Promise<{ exists: boolean; expiresAt?: string }> { async checkRentExists(contentId: string): Promise<{ exists: boolean; expiresAt?: string }> {
return apiService.get(`/rents/content/${contentId}/exists`) return indeehubApiService.get(`/rents/content/${contentId}/exists`)
} }
/** /**

View File

@@ -157,8 +157,14 @@ class Nip98Service {
sessionStorage.setItem('nostr_token', accessToken) sessionStorage.setItem('nostr_token', accessToken)
return accessToken return accessToken
} catch { } catch (err) {
this.clearSession() // Don't wipe tokens on refresh failure — the refresh token may
// still be valid and a retry could succeed (e.g. transient network
// error). Only clear the expired access token so the next request
// tries a fresh refresh instead of sending a stale token.
sessionStorage.removeItem(TOKEN_KEY)
sessionStorage.removeItem(EXPIRES_KEY)
console.warn('Token refresh failed:', err)
return null return null
} finally { } finally {
this.refreshPromise = null this.refreshPromise = null

View File

@@ -115,15 +115,21 @@ export const useContentStore = defineStore('content', () => {
const films = allContent.filter(c => c.type === 'film') const films = allContent.filter(c => c.type === 'film')
const bitcoinContent = allContent.filter(c => const bitcoinContent = allContent.filter(c =>
c.categories?.some(cat => cat.toLowerCase().includes('bitcoin') || cat.toLowerCase().includes('documentary')) c.categories?.some(cat => cat.toLowerCase().includes('bitcoin'))
)
const docs = allContent.filter(c =>
c.categories?.some(cat => cat.toLowerCase().includes('documentary'))
)
const dramaContent = allContent.filter(c =>
c.categories?.some(cat => cat.toLowerCase().includes('drama'))
) )
contentRows.value = { contentRows.value = {
featured: allContent.slice(0, 10), featured: allContent.slice(0, 10),
newReleases: films.slice(0, 8), newReleases: films.slice(0, 8),
bitcoin: bitcoinContent.length > 0 ? bitcoinContent : films.slice(0, 6), bitcoin: bitcoinContent.length > 0 ? bitcoinContent : films.slice(0, 6),
documentaries: allContent.slice(0, 10), documentaries: docs.length > 0 ? docs : allContent.slice(0, 10),
dramas: films.slice(0, 6), dramas: dramaContent.length > 0 ? dramaContent : films.slice(0, 6),
independent: films.slice(0, 10) independent: films.slice(0, 10)
} }
} catch (err) { } catch (err) {
@@ -240,6 +246,7 @@ export const useContentStore = defineStore('content', () => {
rentalPrice: p.film?.rentalPrice ?? p.rentalPrice, rentalPrice: p.film?.rentalPrice ?? p.rentalPrice,
status: p.status, status: p.status,
apiData: p, apiData: p,
isOwnProject: true,
})) }))
// Merge into each content row (prepend so they appear first) // Merge into each content row (prepend so they appear first)
@@ -278,29 +285,39 @@ export const useContentStore = defineStore('content', () => {
/** /**
* Main fetch content method. * Main fetch content method.
* When USE_MOCK is false and the self-hosted API URL is configured, * Respects the content-source toggle:
* always try the self-hosted backend first (regardless of the * - 'indeehub-api' → self-hosted backend API
* content-source toggle, which only affects mock catalogues). * - 'topdocfilms' → TopDoc mock catalog (YouTube documentaries)
* - 'indeehub' → IndeeHub mock catalog
*/ */
async function fetchContent() { async function fetchContent() {
loading.value = true loading.value = true
error.value = null error.value = null
try { try {
const sourceStore = useContentSourceStore()
const apiUrl = import.meta.env.VITE_INDEEHUB_API_URL || '' const apiUrl = import.meta.env.VITE_INDEEHUB_API_URL || ''
if (USE_MOCK_DATA) { if (USE_MOCK_DATA) {
// Use mock data in development or when flag is set
await new Promise(resolve => setTimeout(resolve, 100)) await new Promise(resolve => setTimeout(resolve, 100))
await fetchContentFromMock() await fetchContentFromMock()
} else if (apiUrl) { } else if (sourceStore.activeSource === 'indeehub-api' && apiUrl) {
// Self-hosted backend is configured — always prefer it // Self-hosted backend API
await fetchContentFromIndeehubApi()
await mergePublishedFilmmakerProjects()
} else if (sourceStore.activeSource === 'topdocfilms') {
// TopDoc curated catalog (free YouTube documentaries)
fetchTopDocMock()
await mergePublishedFilmmakerProjects()
} else if (sourceStore.activeSource === 'indeehub') {
// IndeeHub mock catalog
fetchIndeeHubMock()
await mergePublishedFilmmakerProjects()
} else if (apiUrl) {
// Fallback to API if source is unknown but API is configured
await fetchContentFromIndeehubApi() await fetchContentFromIndeehubApi()
// Also merge filmmaker's published projects that may not be in the
// public results yet (e.g. content still transcoding)
await mergePublishedFilmmakerProjects() await mergePublishedFilmmakerProjects()
} else { } else {
// No self-hosted backend — try external API
await fetchContentFromApi() await fetchContentFromApi()
await mergePublishedFilmmakerProjects() await mergePublishedFilmmakerProjects()
} }

View File

@@ -28,6 +28,9 @@ export interface Content {
// Dual-mode content delivery // Dual-mode content delivery
deliveryMode?: 'native' | 'partner' deliveryMode?: 'native' | 'partner'
keyUrl?: string keyUrl?: string
/** True when the logged-in user is the content creator/owner */
isOwnProject?: boolean
} }
// Nostr event types // Nostr event types