initial commit

This commit is contained in:
Kar k1
2025-08-30 18:18:57 +05:30
commit 7219108342
270 changed files with 70221 additions and 0 deletions

75
lib/admin-middleware.ts Normal file
View File

@@ -0,0 +1,75 @@
import { NextRequest, NextResponse } from 'next/server'
import jwt from 'jsonwebtoken'
import { connectDB } from './mongodb'
import { User } from '@/models/user'
export interface AdminUser {
id: string
email: string
name: string
role: 'admin'
siliconId: string
}
export async function verifyAdminToken(request: NextRequest): Promise<AdminUser | null> {
try {
const authHeader = request.headers.get('authorization')
const cookieToken = request.cookies.get('accessToken')?.value
// Extract token from auth header, but only if it's not "Bearer undefined"
let headerToken = null
if (authHeader && authHeader !== 'Bearer undefined' && authHeader.startsWith('Bearer ')) {
headerToken = authHeader.replace('Bearer ', '')
}
const token = headerToken || cookieToken
if (!token) {
return null
}
const JWT_SECRET = process.env.JWT_SECRET || 'your-jwt-secret-change-in-production'
const decoded = jwt.verify(token, JWT_SECRET) as any
await connectDB()
const user = await User.findById(decoded.userId).select('-password -refreshToken')
if (!user || user.role !== 'admin') {
return null
}
return {
id: user._id.toString(),
email: user.email,
name: user.name,
role: user.role,
siliconId: user.siliconId,
}
} catch (error) {
console.error('Admin token verification failed:', error)
return null
}
}
export function createAdminResponse(message: string, status: number = 403) {
return NextResponse.json(
{
error: message,
code: 'ADMIN_ACCESS_REQUIRED',
},
{ status }
)
}
export async function withAdminAuth(
request: NextRequest,
handler: (request: NextRequest, admin: AdminUser) => Promise<NextResponse>
): Promise<NextResponse> {
const admin = await verifyAdminToken(request)
if (!admin) {
return createAdminResponse('Admin access required')
}
return handler(request, admin)
}

310
lib/analytics.tsx Normal file
View File

@@ -0,0 +1,310 @@
/**
* Analytics utilities and tracking setup
* Supports Google Analytics, Plausible, and custom tracking
*/
// Types for analytics events
export interface AnalyticsEvent {
action: string
category: string
label?: string
value?: number
}
export interface PageViewEvent {
page_title: string
page_location: string
page_path: string
}
export interface CustomEvent {
event_name: string
[key: string]: any
}
// Analytics providers configuration
interface AnalyticsConfig {
googleAnalytics?: {
measurementId: string
enabled: boolean
}
plausible?: {
domain: string
enabled: boolean
}
customTracking?: {
enabled: boolean
endpoint?: string
}
debug?: boolean
}
class Analytics {
private config: AnalyticsConfig
private isInitialized = false
constructor(config: AnalyticsConfig) {
this.config = config
}
// Initialize analytics
init() {
if (this.isInitialized || typeof window === 'undefined') return
// Initialize Google Analytics
if (this.config.googleAnalytics?.enabled && this.config.googleAnalytics.measurementId) {
this.initGoogleAnalytics(this.config.googleAnalytics.measurementId)
}
// Initialize Plausible
if (this.config.plausible?.enabled && this.config.plausible.domain) {
this.initPlausible(this.config.plausible.domain)
}
this.isInitialized = true
if (this.config.debug) {
console.log('Analytics initialized with config:', this.config)
}
}
// Initialize Google Analytics
private initGoogleAnalytics(measurementId: string) {
// Load gtag script
const script1 = document.createElement('script')
script1.async = true
script1.src = `https://www.googletagmanager.com/gtag/js?id=${measurementId}`
document.head.appendChild(script1)
// Initialize gtag
const script2 = document.createElement('script')
script2.innerHTML = `
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '${measurementId}', {
page_title: document.title,
page_location: window.location.href,
});
`
document.head.appendChild(script2)
// Make gtag available globally
;(window as any).gtag =
(window as any).gtag ||
function () {
;(window as any).dataLayer = (window as any).dataLayer || []
;(window as any).dataLayer.push(arguments)
}
}
// Initialize Plausible
private initPlausible(domain: string) {
const script = document.createElement('script')
script.defer = true
script.setAttribute('data-domain', domain)
script.src = 'https://plausible.io/js/script.js'
document.head.appendChild(script)
}
// Track page view
pageView(event: Partial<PageViewEvent> = {}) {
if (typeof window === 'undefined') return
const pageViewData = {
page_title: document.title,
page_location: window.location.href,
page_path: window.location.pathname,
...event,
}
// Google Analytics
if (this.config.googleAnalytics?.enabled && (window as any).gtag) {
;(window as any).gtag('config', this.config.googleAnalytics.measurementId, {
page_title: pageViewData.page_title,
page_location: pageViewData.page_location,
})
}
// Plausible (automatically tracks page views)
// Custom tracking
if (this.config.customTracking?.enabled) {
this.customTrack('page_view', pageViewData)
}
if (this.config.debug) {
console.log('Page view tracked:', pageViewData)
}
}
// Track custom event
track(eventName: string, properties: Record<string, any> = {}) {
if (typeof window === 'undefined') return
// Google Analytics
if (this.config.googleAnalytics?.enabled && (window as any).gtag) {
;(window as any).gtag('event', eventName, properties)
}
// Plausible
if (this.config.plausible?.enabled && (window as any).plausible) {
;(window as any).plausible(eventName, { props: properties })
}
// Custom tracking
if (this.config.customTracking?.enabled) {
this.customTrack(eventName, properties)
}
if (this.config.debug) {
console.log('Event tracked:', eventName, properties)
}
}
// Track user signup
trackSignup(method: string = 'email') {
this.track('sign_up', { method })
}
// Track user login
trackLogin(method: string = 'email') {
this.track('login', { method })
}
// Track user logout
trackLogout() {
this.track('logout')
}
// Track form submission
trackFormSubmit(formName: string, success: boolean = true) {
this.track('form_submit', {
form_name: formName,
success,
})
}
// Track button click
trackButtonClick(buttonName: string, location?: string) {
this.track('button_click', {
button_name: buttonName,
location,
})
}
// Track search
trackSearch(searchTerm: string, resultCount?: number) {
this.track('search', {
search_term: searchTerm,
result_count: resultCount,
})
}
// Track file download
trackDownload(fileName: string, fileType?: string) {
this.track('file_download', {
file_name: fileName,
file_type: fileType,
})
}
// Custom tracking implementation
private async customTrack(eventName: string, properties: Record<string, any>) {
if (!this.config.customTracking?.endpoint) return
try {
await fetch(this.config.customTracking.endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
event: eventName,
properties: {
...properties,
timestamp: new Date().toISOString(),
user_agent: navigator.userAgent,
url: window.location.href,
},
}),
})
} catch (error) {
if (this.config.debug) {
console.error('Custom tracking failed:', error)
}
}
}
// Identify user
identify(userId: string, traits: Record<string, any> = {}) {
// Google Analytics
if (this.config.googleAnalytics?.enabled && (window as any).gtag) {
;(window as any).gtag('config', this.config.googleAnalytics.measurementId, {
user_id: userId,
custom_map: traits,
})
}
// Custom tracking
if (this.config.customTracking?.enabled) {
this.customTrack('identify', { user_id: userId, traits })
}
if (this.config.debug) {
console.log('User identified:', userId, traits)
}
}
}
// Create analytics instance
const analyticsConfig: AnalyticsConfig = {
googleAnalytics: {
measurementId: process.env.NEXT_PUBLIC_GA_MEASUREMENT_ID || '',
enabled: !!(process.env.NEXT_PUBLIC_GA_MEASUREMENT_ID && process.env.NODE_ENV === 'production'),
},
plausible: {
domain: process.env.NEXT_PUBLIC_PLAUSIBLE_DOMAIN || '',
enabled: !!(process.env.NEXT_PUBLIC_PLAUSIBLE_DOMAIN && process.env.NODE_ENV === 'production'),
},
customTracking: {
enabled: false,
endpoint: process.env.NEXT_PUBLIC_ANALYTICS_ENDPOINT,
},
debug: process.env.NODE_ENV === 'development',
}
export const analytics = new Analytics(analyticsConfig)
// React hook for analytics
import { useEffect } from 'react'
import { usePathname } from 'next/navigation'
export function useAnalytics() {
const pathname = usePathname()
useEffect(() => {
analytics.init()
}, [])
useEffect(() => {
analytics.pageView({
page_path: pathname,
})
}, [pathname])
return analytics
}
// Higher-order component for analytics
import React from 'react'
export function withAnalytics<P extends object>(Component: React.ComponentType<P>) {
return function AnalyticsWrapper(props: P) {
useAnalytics()
return <Component {...props} />
}
}
// Export analytics instance
export default analytics

101
lib/auth-middleware.ts Normal file
View File

@@ -0,0 +1,101 @@
import { NextRequest, NextResponse } from 'next/server'
import { verifyAccessToken } from './jwt'
export interface AuthenticatedRequest extends NextRequest {
user?: {
userId: string
email: string
role: string
}
}
export const withAuth = (handler: (req: AuthenticatedRequest) => Promise<NextResponse>) => {
return async (req: AuthenticatedRequest): Promise<NextResponse> => {
try {
// Get token from Authorization header or cookie
const authHeader = req.headers.get('authorization')
const token = authHeader?.startsWith('Bearer ')
? authHeader.slice(7)
: req.cookies.get('accessToken')?.value
if (!token) {
return NextResponse.json(
{
success: false,
error: { message: 'No authentication token provided', code: 'NO_TOKEN' },
},
{ status: 401 }
)
}
const payload = verifyAccessToken(token)
if (!payload) {
return NextResponse.json(
{ success: false, error: { message: 'Invalid or expired token', code: 'INVALID_TOKEN' } },
{ status: 401 }
)
}
// Add user info to request
req.user = {
userId: payload.userId,
email: payload.email,
role: payload.role,
}
return handler(req)
} catch (error) {
console.error('Auth middleware error:', error)
return NextResponse.json(
{ success: false, error: { message: 'Authentication error', code: 'AUTH_ERROR' } },
{ status: 401 }
)
}
}
}
export const withAdminAuth = (handler: (req: AuthenticatedRequest) => Promise<NextResponse>) => {
return withAuth(async (req: AuthenticatedRequest): Promise<NextResponse> => {
if (req.user?.role !== 'admin') {
return NextResponse.json(
{
success: false,
error: { message: 'Admin access required', code: 'INSUFFICIENT_PERMISSIONS' },
},
{ status: 403 }
)
}
return handler(req)
})
}
// Helper function to get authenticated user from request
export const authMiddleware = async (
request: NextRequest
): Promise<{ id: string; email: string; role: string } | null> => {
try {
// Get token from Authorization header or cookie
const authHeader = request.headers.get('authorization')
const token = authHeader?.startsWith('Bearer ')
? authHeader.slice(7)
: request.cookies.get('accessToken')?.value
if (!token) {
return null
}
const payload = verifyAccessToken(token)
if (!payload) {
return null
}
return {
id: payload.userId,
email: payload.email,
role: payload.role,
}
} catch (error) {
console.error('Auth middleware error:', error)
return null
}
}

141
lib/balance-service.ts Normal file
View File

@@ -0,0 +1,141 @@
/**
* Balance Service Utility Functions
* Provides helper functions for balance operations in service deployments
*/
interface BalanceDeductionResult {
success: boolean
transactionId?: string
previousBalance?: number
newBalance?: number
error?: string
}
interface ServicePurchase {
amount: number
service: string
serviceId?: string
description?: string
transactionId?: string
}
/**
* Deduct balance for service purchase
*/
export async function deductBalance(purchase: ServicePurchase): Promise<BalanceDeductionResult> {
try {
const response = await fetch('/api/balance/deduct', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
credentials: 'include',
body: JSON.stringify(purchase),
})
const data = await response.json()
if (!response.ok) {
return {
success: false,
error: data.error?.message || `Balance deduction failed with status ${response.status}`,
}
}
if (data.success) {
return {
success: true,
transactionId: data.data.transactionId,
previousBalance: data.data.previousBalance,
newBalance: data.data.newBalance,
}
} else {
return {
success: false,
error: data.error?.message || 'Balance deduction failed',
}
}
} catch (error) {
console.error('Balance deduction error:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Network error during balance deduction',
}
}
}
/**
* Check if user has sufficient balance
*/
export async function checkSufficientBalance(
requiredAmount: number
): Promise<{ sufficient: boolean; currentBalance?: number; error?: string }> {
try {
const response = await fetch('/api/user/balance', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
credentials: 'include',
})
const data = await response.json()
if (!response.ok) {
return {
sufficient: false,
error: data.error?.message || 'Failed to check balance',
}
}
if (data.success) {
const currentBalance = data.data.balance
return {
sufficient: currentBalance >= requiredAmount,
currentBalance,
}
} else {
return {
sufficient: false,
error: data.error?.message || 'Failed to retrieve balance',
}
}
} catch (error) {
console.error('Balance check error:', error)
return {
sufficient: false,
error: error instanceof Error ? error.message : 'Network error during balance check',
}
}
}
/**
* Format currency amount for display
*/
export function formatCurrency(amount: number, currency: string = 'INR'): string {
return new Intl.NumberFormat('en-IN', {
style: 'currency',
currency,
minimumFractionDigits: 0,
maximumFractionDigits: 2,
}).format(amount)
}
/**
* Validate if balance is sufficient for a given amount
*/
export function validateBalance(currentBalance: number, requiredAmount: number): boolean {
return currentBalance >= requiredAmount
}
/**
* Generate service description for transaction
*/
export function generateServiceDescription(
serviceType: string,
serviceName: string,
cycle?: string
): string {
const cycleText = cycle ? ` (${cycle})` : ''
return `${serviceType}: ${serviceName}${cycleText}`
}

375
lib/billing-service.ts Normal file
View File

@@ -0,0 +1,375 @@
import { Billing, IBilling, BillingSchema } from '@/models/billing'
import { Transaction } from '@/models/transaction'
import { User as UserModel } from '@/models/user'
import connectDB from '@/lib/mongodb'
// Utility function to generate unique billing ID
function toBase62(num: number): string {
const base62 = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
let encoded = ''
let n = num
while (n > 0) {
encoded = base62[n % 62] + encoded
n = Math.floor(n / 62)
}
return encoded
}
export function generateBillingId(): string {
const epochStart = new Date('2020-01-01').getTime()
const now = Date.now()
const timestamp = (now - epochStart) % 1000000000000
const randomNum = Math.floor(Math.random() * 100)
const finalNum = timestamp * 100 + randomNum
return `bill_${toBase62(finalNum).padStart(8, '0')}`
}
export function generateTransactionId(serviceType: string): string {
const timestamp = Date.now()
const randomStr = Math.random().toString(36).substr(2, 9)
return `txn_${serviceType}_${timestamp}_${randomStr}`
}
// Interface for creating billing records
export interface CreateBillingParams {
user: {
id: string
email: string
siliconId?: string
}
service: {
name: string
type:
| 'vps'
| 'kubernetes'
| 'developer_hire'
| 'vpn'
| 'hosting'
| 'storage'
| 'database'
| 'ai_service'
| 'custom'
id?: string
clusterId?: string
instanceId?: string
config?: Record<string, any>
}
billing: {
amount: number
currency?: 'INR' | 'USD'
cycle: 'onetime' | 'hourly' | 'daily' | 'weekly' | 'monthly' | 'quarterly' | 'yearly'
discountApplied?: number
taxAmount?: number
autoRenew?: boolean
billingPeriodStart?: Date
billingPeriodEnd?: Date
nextBillingDate?: Date
}
payment: {
transactionId?: string
status: 'pending' | 'paid' | 'failed' | 'refunded'
}
status: {
service: 'pending' | 'active' | 'completed' | 'cancelled' | 'failed' | 'refunded'
serviceStatus: 0 | 1 | 2 // 0: inactive, 1: active, 2: suspended
}
metadata?: {
remarks?: string
deploymentSuccess?: boolean
deploymentResponse?: any
userAgent?: string
ip?: string
[key: string]: any
}
}
export class BillingService {
/**
* Create a comprehensive billing record for any service
*/
static async createBillingRecord(params: CreateBillingParams): Promise<IBilling> {
await connectDB()
const billingId = generateBillingId()
const totalAmount =
params.billing.amount +
(params.billing.taxAmount || 0) -
(params.billing.discountApplied || 0)
// Validate the billing data
const billingData = {
billing_id: billingId,
service: params.service.name,
service_type: params.service.type,
amount: params.billing.amount,
currency: params.billing.currency || 'INR',
user_email: params.user.email,
silicon_id: params.user.siliconId || params.user.id,
user_id: params.user.id,
status: params.status.service,
payment_status: params.payment.status,
cycle: params.billing.cycle,
service_id: params.service.id,
cluster_id: params.service.clusterId,
instance_id: params.service.instanceId,
service_name: params.service.name,
service_config: params.service.config,
billing_period_start: params.billing.billingPeriodStart,
billing_period_end: params.billing.billingPeriodEnd,
next_billing_date: params.billing.nextBillingDate,
auto_renew: params.billing.autoRenew || false,
discount_applied: params.billing.discountApplied || 0,
tax_amount: params.billing.taxAmount || 0,
total_amount: totalAmount,
transaction_id: params.payment.transactionId,
remarks: params.metadata?.remarks,
metadata: params.metadata,
service_status: params.status.serviceStatus,
created_by: params.user.email,
}
// Create billing record directly without Zod validation to avoid field conflicts
// The Mongoose schema will handle validation
const newBilling = new Billing(billingData)
return await newBilling.save()
}
/**
* Update billing record status
*/
static async updateBillingStatus(
billingId: string,
updates: {
status?: 'pending' | 'active' | 'completed' | 'cancelled' | 'failed' | 'refunded'
paymentStatus?: 'pending' | 'paid' | 'failed' | 'refunded'
serviceStatus?: 0 | 1 | 2
remarks?: string
updatedBy?: string
}
): Promise<IBilling | null> {
await connectDB()
const updateData: any = {}
if (updates.status) updateData.status = updates.status
if (updates.paymentStatus) updateData.payment_status = updates.paymentStatus
if (updates.serviceStatus !== undefined) updateData.service_status = updates.serviceStatus
if (updates.remarks) updateData.remarks = updates.remarks
if (updates.updatedBy) updateData.updated_by = updates.updatedBy
return await Billing.findOneAndUpdate(
{ billing_id: billingId },
{ $set: updateData },
{ new: true }
)
}
/**
* Get billing records for a user
*/
static async getUserBillings(
userEmail: string,
siliconId: string,
options?: {
serviceType?: string
status?: string
limit?: number
offset?: number
}
): Promise<IBilling[]> {
await connectDB()
const query: any = {
$or: [{ user_email: userEmail }, { silicon_id: siliconId }],
}
if (options?.serviceType) {
query.service_type = options.serviceType
}
if (options?.status) {
query.status = options.status
}
let billingQuery = Billing.find(query).sort({ createdAt: -1 })
if (options?.limit) {
billingQuery = billingQuery.limit(options.limit)
}
if (options?.offset) {
billingQuery = billingQuery.skip(options.offset)
}
return await billingQuery.exec()
}
/**
* Get active services for a user
*/
static async getActiveServices(userEmail: string, siliconId: string): Promise<IBilling[]> {
await connectDB()
return await (Billing as any).findActiveServices(userEmail, siliconId)
}
/**
* Process service deployment billing (deduct balance + create billing record)
*/
static async processServiceDeployment(params: {
user: { id: string; email: string; siliconId?: string }
service: {
name: string
type:
| 'vps'
| 'kubernetes'
| 'developer_hire'
| 'vpn'
| 'hosting'
| 'storage'
| 'database'
| 'ai_service'
| 'custom'
id?: string
clusterId?: string
instanceId?: string
config?: Record<string, any>
}
amount: number
currency?: 'INR' | 'USD'
cycle?: 'onetime' | 'hourly' | 'daily' | 'weekly' | 'monthly' | 'quarterly' | 'yearly'
deploymentSuccess: boolean
deploymentResponse?: any
metadata?: Record<string, any>
}): Promise<{
billing: IBilling
transaction?: any
balanceUpdated: boolean
}> {
await connectDB()
// Get user data and check balance
const userData = await UserModel.findOne({ email: params.user.email })
if (!userData) {
throw new Error('User not found')
}
const currentBalance = userData.balance || 0
let balanceUpdated = false
let transactionRecord = null
// Deduct balance if deployment was successful and amount > 0
if (params.deploymentSuccess && params.amount > 0) {
if (currentBalance < params.amount) {
throw new Error(
`Insufficient balance. Required: ₹${params.amount}, Available: ₹${currentBalance}`
)
}
const newBalance = currentBalance - params.amount
await UserModel.updateOne({ email: params.user.email }, { $set: { balance: newBalance } })
balanceUpdated = true
// Create transaction record
const transactionId = generateTransactionId(params.service.type)
try {
const newTransaction = new Transaction({
transactionId,
userId: params.user.id,
email: params.user.email,
type: 'debit',
amount: params.amount,
service: params.service.name,
serviceId: params.service.id || params.service.clusterId || params.service.instanceId,
description: `Payment for ${params.service.name} - ${params.service.type}`,
status: 'completed',
previousBalance: currentBalance,
newBalance,
metadata: {
serviceType: params.service.type,
deploymentSuccess: params.deploymentSuccess,
...params.metadata,
},
})
transactionRecord = await newTransaction.save()
} catch (transactionError) {
console.error('Failed to create transaction record:', transactionError)
// Continue with billing creation even if transaction fails
}
}
// Create billing record
const billing = await this.createBillingRecord({
user: params.user,
service: params.service,
billing: {
amount: params.amount,
currency: params.currency || 'INR',
cycle: params.cycle || 'onetime',
},
payment: {
transactionId: transactionRecord?.transactionId,
status: params.deploymentSuccess && params.amount > 0 ? 'paid' : 'pending',
},
status: {
service: params.deploymentSuccess ? 'active' : 'failed',
serviceStatus: params.deploymentSuccess ? 1 : 0,
},
metadata: {
remarks: params.deploymentSuccess ? 'Deployment Success' : 'Deployment Failed',
deploymentSuccess: params.deploymentSuccess,
deploymentResponse: params.deploymentResponse,
...params.metadata,
},
})
return {
billing,
transaction: transactionRecord,
balanceUpdated,
}
}
/**
* Get billing statistics for a user
*/
static async getBillingStats(
userEmail: string,
siliconId: string
): Promise<{
totalSpent: number
activeServices: number
totalServices: number
serviceBreakdown: Record<string, { count: number; amount: number }>
}> {
await connectDB()
const billings = await this.getUserBillings(userEmail, siliconId)
const stats = {
totalSpent: 0,
activeServices: 0,
totalServices: billings.length,
serviceBreakdown: {} as Record<string, { count: number; amount: number }>,
}
billings.forEach((billing) => {
stats.totalSpent += billing.total_amount || billing.amount
if (billing.status === 'active' && billing.service_status === 1) {
stats.activeServices++
}
const serviceType = billing.service_type
if (!stats.serviceBreakdown[serviceType]) {
stats.serviceBreakdown[serviceType] = { count: 0, amount: 0 }
}
stats.serviceBreakdown[serviceType].count++
stats.serviceBreakdown[serviceType].amount += billing.total_amount || billing.amount
})
return stats
}
}
export default BillingService

141
lib/cache.ts Normal file
View File

@@ -0,0 +1,141 @@
import { connectRedis } from './redis'
/**
* Cache management utilities for dashboard and topic data
*/
export class DashboardCache {
/**
* Invalidate dashboard cache for a specific user
*/
static async invalidateUserDashboard(userId: string): Promise<void> {
try {
const redisClient = await connectRedis()
const cacheKey = `dashboard:user:${userId}`
await redisClient.del(cacheKey)
console.log(`Dashboard cache invalidated for user ${userId}`)
} catch (error) {
console.warn('Failed to invalidate dashboard cache:', error)
}
}
/**
* Invalidate dashboard caches for multiple users
*/
static async invalidateMultipleUserDashboards(userIds: string[]): Promise<void> {
try {
const redisClient = await connectRedis()
const cacheKeys = userIds.map((userId) => `dashboard:user:${userId}`)
if (cacheKeys.length > 0) {
await redisClient.del(cacheKeys)
console.log(`Dashboard caches invalidated for ${userIds.length} users`)
}
} catch (error) {
console.warn('Failed to invalidate multiple dashboard caches:', error)
}
}
/**
* Get cached dashboard data for a user
*/
static async getUserDashboard(userId: string): Promise<any | null> {
try {
const redisClient = await connectRedis()
const cacheKey = `dashboard:user:${userId}`
const cachedData = await redisClient.get(cacheKey)
if (!cachedData) return null
// Handle both string and Buffer responses
const dataString = Buffer.isBuffer(cachedData)
? cachedData.toString('utf-8')
: String(cachedData)
return JSON.parse(dataString)
} catch (error) {
console.warn('Failed to get cached dashboard data:', error)
return null
}
}
/**
* Set dashboard cache for a user
*/
static async setUserDashboard(userId: string, data: any, ttlSeconds = 300): Promise<void> {
try {
const redisClient = await connectRedis()
const cacheKey = `dashboard:user:${userId}`
await redisClient.setEx(cacheKey, ttlSeconds, JSON.stringify(data))
console.log(`Dashboard data cached for user ${userId} (TTL: ${ttlSeconds}s)`)
} catch (error) {
console.warn('Failed to cache dashboard data:', error)
}
}
}
export class TopicCache {
/**
* Invalidate topic-specific caches when a topic is modified
*/
static async invalidateTopicCaches(topicSlug: string, authorId: string): Promise<void> {
try {
const redisClient = await connectRedis()
// Invalidate patterns that might be affected
const cachePatterns = [
`topic:${topicSlug}`, // Individual topic cache
`topics:public:*`, // Public topic listings
`topics:user:${authorId}:*`, // User's topic listings
`dashboard:user:${authorId}`, // User's dashboard
]
for (const pattern of cachePatterns) {
if (pattern.includes('*')) {
// For patterns with wildcards, we need to scan and delete
const keys = await redisClient.keys(pattern)
if (keys.length > 0) {
await redisClient.del(keys)
console.log(`Invalidated ${keys.length} cache keys matching pattern: ${pattern}`)
}
} else {
// Direct key deletion
await redisClient.del(pattern)
console.log(`Cache key deleted: ${pattern}`)
}
}
} catch (error) {
console.warn('Failed to invalidate topic caches:', error)
}
}
/**
* Invalidate public topic listing caches
*/
static async invalidatePublicTopicCaches(): Promise<void> {
try {
const redisClient = await connectRedis()
const keys = await redisClient.keys('topics:public:*')
if (keys.length > 0) {
await redisClient.del(keys)
console.log(`Invalidated ${keys.length} public topic cache keys`)
}
} catch (error) {
console.warn('Failed to invalidate public topic caches:', error)
}
}
}
/**
* Utility function to generate consistent cache keys
*/
export const CacheKeys = {
userDashboard: (userId: string) => `dashboard:user:${userId}`,
userTopics: (userId: string, page = 1, limit = 10) =>
`topics:user:${userId}:page:${page}:limit:${limit}`,
publicTopics: (page = 1, limit = 10, filters?: string) => {
const filterKey = filters ? `:filters:${Buffer.from(filters).toString('base64')}` : ''
return `topics:public:page:${page}:limit:${limit}${filterKey}`
},
individualTopic: (slug: string) => `topic:${slug}`,
relatedTopics: (topicId: string) => `topics:related:${topicId}`,
}

199
lib/env.ts Normal file
View File

@@ -0,0 +1,199 @@
/**
* Environment variable validation and management
* Ensures all required environment variables are present and valid
*/
import { z } from 'zod'
// Define the schema for environment variables
const envSchema = z.object({
// Node environment
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
// Server configuration
PORT: z.string().default('3006').transform(Number),
// Database
MONGODB_URI: z.string().min(1, 'MongoDB URI is required'),
// Redis
REDIS_URL: z.string().min(1, 'Redis URL is required'),
// Authentication secrets (required)
SESSION_SECRET: z.string().min(32, 'Session secret must be at least 32 characters'),
JWT_SECRET: z.string().min(1, 'JWT secret is required'),
JWT_REFRESH_SECRET: z.string().min(1, 'JWT refresh secret is required'),
// Optional configuration
NEXT_PUBLIC_SITE_URL: z.string().url().optional(),
NEXT_PUBLIC_APP_URL: z.string().url().optional(),
// Analytics (optional)
NEXT_PUBLIC_GA_MEASUREMENT_ID: z.string().optional(),
NEXT_PUBLIC_PLAUSIBLE_DOMAIN: z.string().optional(),
// Email (optional)
SMTP_HOST: z.string().optional(),
SMTP_PORT: z
.string()
.optional()
.transform((val) => (val ? Number(val) : undefined)),
SMTP_USER: z.string().optional(),
SMTP_PASS: z.string().optional(),
// File storage (optional) - using dev-portfolio naming convention
MINIO_ENDPOINT: z.string().optional(),
MINIO_PORT: z
.string()
.optional()
.transform((val) => (val ? Number(val) : undefined)),
MINIO_KEY: z.string().optional(),
MINIO_SECRET: z.string().optional(),
MINIO_IMAGE_BUCKET: z.string().optional(),
// Legacy names for backward compatibility
MINIO_ACCESS_KEY: z.string().optional(),
MINIO_SECRET_KEY: z.string().optional(),
MINIO_BUCKET: z.string().optional(),
// External file upload API
FILE_UPLOAD_TOKEN: z.string().optional(),
// Monitoring (optional)
SENTRY_DSN: z.string().optional(),
SENTRY_ORG: z.string().optional(),
SENTRY_PROJECT: z.string().optional(),
})
// Parse and validate environment variables
function validateEnv() {
// Skip validation during build if SKIP_ENV_VALIDATION is set
if (process.env.SKIP_ENV_VALIDATION === 'true') {
console.log('⚠️ Skipping environment validation (SKIP_ENV_VALIDATION=true)')
// Return process.env with unknown first to bypass TypeScript's type checking
return process.env as unknown as z.infer<typeof envSchema>
}
try {
return envSchema.parse(process.env)
} catch (error) {
if (error instanceof z.ZodError) {
const missingVars = error.issues.map((issue) => `${issue.path.join('.')}: ${issue.message}`)
console.error('❌ Invalid environment variables:')
missingVars.forEach((error) => console.error(` - ${error}`))
throw new Error('Environment validation failed')
}
throw error
}
}
// Export validated environment variables
export const env = validateEnv() as z.infer<typeof envSchema>
// Utility functions for environment checks
export const isDevelopment = env.NODE_ENV === 'development'
export const isProduction = env.NODE_ENV === 'production'
export const isTest = env.NODE_ENV === 'test'
// Database configuration
export const dbConfig = {
uri: env.MONGODB_URI,
}
// Redis configuration
export const redisConfig = {
url: env.REDIS_URL,
}
// Authentication configuration
export const authConfig = {
sessionSecret: env.SESSION_SECRET,
jwtSecret: env.JWT_SECRET,
jwtRefreshSecret: env.JWT_REFRESH_SECRET,
}
// App configuration
export const appConfig = {
port: env.PORT,
siteUrl: env.NEXT_PUBLIC_SITE_URL || env.NEXT_PUBLIC_APP_URL || `http://localhost:${env.PORT}`,
appUrl: env.NEXT_PUBLIC_APP_URL || env.NEXT_PUBLIC_SITE_URL || `http://localhost:${env.PORT}`,
}
// Analytics configuration
export const analyticsConfig = {
googleAnalytics: {
measurementId: env.NEXT_PUBLIC_GA_MEASUREMENT_ID,
enabled: !!env.NEXT_PUBLIC_GA_MEASUREMENT_ID && isProduction,
},
plausible: {
domain: env.NEXT_PUBLIC_PLAUSIBLE_DOMAIN,
enabled: !!env.NEXT_PUBLIC_PLAUSIBLE_DOMAIN && isProduction,
},
}
// Email configuration
export const emailConfig = {
smtp: {
host: env.SMTP_HOST,
port: env.SMTP_PORT || 587,
user: env.SMTP_USER,
pass: env.SMTP_PASS,
},
enabled: !!(env.SMTP_HOST && env.SMTP_USER && env.SMTP_PASS),
}
// File storage configuration - prioritize dev-portfolio naming
export const storageConfig = {
minio: {
endpoint: env.MINIO_ENDPOINT,
port: env.MINIO_PORT || 9000,
accessKey: env.MINIO_KEY || env.MINIO_ACCESS_KEY,
secretKey: env.MINIO_SECRET || env.MINIO_SECRET_KEY,
bucket: env.MINIO_IMAGE_BUCKET || env.MINIO_BUCKET,
},
enabled: !!(
env.MINIO_ENDPOINT &&
(env.MINIO_KEY || env.MINIO_ACCESS_KEY) &&
(env.MINIO_SECRET || env.MINIO_SECRET_KEY)
),
}
// Monitoring configuration
export const monitoringConfig = {
sentry: {
dsn: env.SENTRY_DSN,
org: env.SENTRY_ORG,
project: env.SENTRY_PROJECT,
},
enabled: !!env.SENTRY_DSN && isProduction,
}
// Environment info for debugging
export const envInfo = {
nodeEnv: env.NODE_ENV,
port: env.PORT,
features: {
analytics: analyticsConfig.googleAnalytics.enabled || analyticsConfig.plausible.enabled,
email: emailConfig.enabled,
storage: storageConfig.enabled,
monitoring: monitoringConfig.enabled,
},
}
// Log environment info in development
if (isDevelopment) {
console.log('🔧 Environment configuration:')
console.log(` - Node: ${env.NODE_ENV}`)
console.log(` - Port: ${env.PORT}`)
console.log(` - Database: ${env.MONGODB_URI.includes('localhost') ? 'Local' : 'Remote'}`)
console.log(` - Redis: ${env.REDIS_URL.includes('localhost') ? 'Local' : 'Remote'}`)
console.log(
` - Features: ${
Object.entries(envInfo.features)
.filter(([, enabled]) => enabled)
.map(([name]) => name)
.join(', ') || 'None'
}`
)
}

152
lib/file-vault.ts Normal file
View File

@@ -0,0 +1,152 @@
/**
* File upload service using external API
* Replaces MINIO with custom file upload API
*/
import { env } from './env'
// File upload result from the API
export interface FileUploadResponse {
success: boolean
url: string
filename: string
}
// File upload result for internal use
export interface UploadResult {
url: string
filename: string
originalName: string
size: number
type: string
uploadedAt: string
}
/**
* Upload file to external API
*/
export async function uploadFile(
buffer: Buffer,
filename: string,
contentType: string,
userId?: string
): Promise<UploadResult> {
console.log('📤 Starting file upload:', { filename, contentType, bufferSize: buffer.length })
try {
// Create FormData for multipart upload
const formData = new FormData()
const blob = new Blob([buffer], { type: contentType })
formData.append('file', blob, filename)
// Prepare headers
const headers: Record<string, string> = {
'x-user-data': userId || 'default-user',
Authorization: `Bearer ${env.FILE_UPLOAD_TOKEN || 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IlRlc3QgVXNlciIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'}`,
}
console.log('📤 Uploading to external API:', {
url: process.env.UPLOAD_API_URL,
filename,
size: buffer.length,
userId: userId || 'default-user',
})
// Make the upload request
const response = await fetch(process.env.UPLOAD_API_URL, {
method: 'POST',
headers,
body: formData,
})
if (!response.ok) {
const errorText = await response.text()
throw new Error(`Upload failed: ${response.status} ${response.statusText} - ${errorText}`)
}
const result: FileUploadResponse = await response.json()
if (!result.success) {
throw new Error('Upload API returned success: false')
}
console.log('✅ File uploaded successfully:', result)
return {
url: result.url,
filename: result.filename,
originalName: filename,
size: buffer.length,
type: contentType,
uploadedAt: new Date().toISOString(),
}
} catch (error) {
console.error('❌ Error uploading file:', error)
throw error
}
}
/**
* Generate unique filename with timestamp and random string
*/
export function generateUniqueFilename(originalName: string): string {
const timestamp = Date.now()
const randomString = Math.random().toString(36).substring(2)
const extension = originalName.split('.').pop()
return `${timestamp}-${randomString}.${extension}`
}
/**
* Validate file type
*/
export function validateFileType(mimetype: string, allowedTypes: string[]): boolean {
return allowedTypes.includes(mimetype)
}
/**
* Validate file size (in bytes)
*/
export function validateFileSize(size: number, maxSize: number): boolean {
return size <= maxSize
}
/**
* Get file URL (files are already publicly accessible)
*/
export async function getFileUrl(filePath: string): Promise<string> {
// Files from the upload API are already publicly accessible
// If filePath is already a full URL, return it as is
if (filePath.startsWith('http')) {
return filePath
}
// Otherwise, construct the URL using the delivery base URL
return `${process.env.DELIVERY_BASE_URL}/${filePath}`
}
/**
* Delete file (not supported by the current API)
*/
export async function deleteFile(filePath: string): Promise<void> {
console.warn('⚠️ File deletion not supported by current upload API:', filePath)
// The external API doesn't provide a delete endpoint
// This is a no-op for now
}
/**
* Move file to permanent storage (not needed with new API)
*/
export async function moveToPermStorage(tempPath: string, permanentPath: string): Promise<void> {
console.log(' Move to permanent storage not needed with new API')
// The new API directly uploads to permanent storage
// This is a no-op
}
/**
* Initialize bucket (not needed with new API)
*/
export async function initializeBucket(): Promise<void> {
console.log(' Bucket initialization not needed with new API')
// The new API handles storage internally
// This is a no-op
}

82
lib/google-oauth.ts Normal file
View File

@@ -0,0 +1,82 @@
import { OAuth2Client } from 'google-auth-library'
const client = new OAuth2Client(
process.env.GOOGLE_CLIENT_ID,
process.env.GOOGLE_CLIENT_SECRET,
process.env.GOOGLE_REDIRECT_URI ||
`${process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:4023'}/api/auth/google/callback`
)
export interface GoogleUserInfo {
id: string
email: string
name: string
picture?: string
given_name?: string
family_name?: string
verified_email: boolean
}
export const getGoogleAuthURL = () => {
const scopes = ['openid', 'profile', 'email']
return client.generateAuthUrl({
access_type: 'offline',
scope: scopes,
prompt: 'consent',
state: 'google_oauth',
})
}
export const getGoogleUser = async (code: string): Promise<GoogleUserInfo> => {
const { tokens } = await client.getToken(code)
if (!tokens.access_token) {
throw new Error('No access token received from Google')
}
// Get user info from Google
const response = await fetch(
`https://www.googleapis.com/oauth2/v2/userinfo?access_token=${tokens.access_token}`
)
if (!response.ok) {
throw new Error('Failed to fetch user info from Google')
}
const userInfo = (await response.json()) as GoogleUserInfo
if (!userInfo.verified_email) {
throw new Error('Google email not verified')
}
return userInfo
}
export const verifyGoogleToken = async (token: string): Promise<GoogleUserInfo> => {
try {
const ticket = await client.verifyIdToken({
idToken: token,
audience: process.env.GOOGLE_CLIENT_ID,
})
const payload = ticket.getPayload()
if (!payload) {
throw new Error('Invalid Google token payload')
}
return {
id: payload.sub,
email: payload.email!,
name: payload.name!,
picture: payload.picture,
given_name: payload.given_name,
family_name: payload.family_name,
verified_email: payload.email_verified || false,
}
} catch (error) {
throw new Error('Failed to verify Google token')
}
}
export { client as googleOAuthClient }

47
lib/jwt.ts Normal file
View File

@@ -0,0 +1,47 @@
import jwt from 'jsonwebtoken'
const JWT_SECRET = process.env.JWT_SECRET || 'your-jwt-secret-change-in-production'
const JWT_REFRESH_SECRET =
process.env.JWT_REFRESH_SECRET || 'your-jwt-refresh-secret-change-in-production'
export interface TokenPayload {
userId: string
email: string
role: string
}
export const generateTokens = (payload: TokenPayload) => {
const accessToken = jwt.sign(payload, JWT_SECRET, {
expiresIn: '15m', // Short-lived access token
})
const refreshToken = jwt.sign(payload, JWT_REFRESH_SECRET, {
expiresIn: '7d', // Long-lived refresh token
})
return { accessToken, refreshToken }
}
export const verifyAccessToken = (token: string): TokenPayload | null => {
try {
return jwt.verify(token, JWT_SECRET) as TokenPayload
} catch (error) {
return null
}
}
export const verifyRefreshToken = (token: string): TokenPayload | null => {
try {
return jwt.verify(token, JWT_REFRESH_SECRET) as TokenPayload
} catch (error) {
return null
}
}
export const decodeToken = (token: string): TokenPayload | null => {
try {
return jwt.decode(token) as TokenPayload
} catch (error) {
return null
}
}

193
lib/lazy-loading.tsx Normal file
View File

@@ -0,0 +1,193 @@
/**
* Lazy loading utilities for performance optimization
*/
import { lazy, ComponentType } from 'react'
/**
* Create a lazy-loaded component with better error handling
*/
export function createLazyComponent<T extends ComponentType<any>>(
importFunction: () => Promise<{ default: T }>,
displayName?: string
): T {
const LazyComponent = lazy(importFunction)
if (displayName) {
;(LazyComponent as any).displayName = `Lazy(${displayName})`
}
return LazyComponent as unknown as T
}
/**
* Intersection Observer hook for lazy loading elements
*/
import { useEffect, useRef, useState } from 'react'
interface UseIntersectionObserverOptions {
threshold?: number
root?: Element | null
rootMargin?: string
freezeOnceVisible?: boolean
}
export function useIntersectionObserver(options: UseIntersectionObserverOptions = {}) {
const { threshold = 0, root = null, rootMargin = '0%', freezeOnceVisible = false } = options
const [entry, setEntry] = useState<IntersectionObserverEntry>()
const [node, setNode] = useState<Element | null>(null)
const observer = useRef<IntersectionObserver | null>(null)
const frozen = entry?.isIntersecting && freezeOnceVisible
const updateEntry = ([entry]: IntersectionObserverEntry[]): void => {
setEntry(entry)
}
useEffect(() => {
const hasIOSupport = !!window.IntersectionObserver
if (!hasIOSupport || frozen || !node) return
const observerParams = { threshold, root, rootMargin }
const isNewObserver = !observer.current
const hasOptionsChanged =
observer.current &&
(observer.current.root !== root ||
observer.current.rootMargin !== rootMargin ||
observer.current.thresholds[0] !== threshold)
if (isNewObserver || hasOptionsChanged) {
if (observer.current) {
observer.current.disconnect()
}
observer.current = new window.IntersectionObserver(updateEntry, observerParams)
}
observer.current.observe(node)
return () => observer.current?.disconnect()
}, [node, threshold, root, rootMargin, frozen])
const cleanup = () => {
if (observer.current) {
observer.current.disconnect()
observer.current = null
}
}
useEffect(() => {
return cleanup
}, [])
return [setNode, !!entry?.isIntersecting, entry, cleanup] as const
}
/**
* Lazy loading image component
*/
import Image from 'next/image'
import { cn } from './utils'
interface LazyImageProps {
src: string
alt: string
width?: number
height?: number
className?: string
fill?: boolean
placeholder?: 'blur' | 'empty'
blurDataURL?: string
priority?: boolean
sizes?: string
}
export function LazyImage({
src,
alt,
width,
height,
className,
fill = false,
placeholder = 'empty',
blurDataURL,
priority = false,
sizes,
...props
}: LazyImageProps) {
const [setRef, isVisible] = useIntersectionObserver({
threshold: 0.1,
freezeOnceVisible: true,
})
return (
<div ref={setRef} className={cn('overflow-hidden', className)}>
{(isVisible || priority) && (
<Image
src={src}
alt={alt}
width={width}
height={height}
fill={fill}
placeholder={placeholder}
blurDataURL={blurDataURL}
priority={priority}
sizes={sizes}
className="transition-opacity duration-300"
{...props}
/>
)}
</div>
)
}
/**
* Lazy loading wrapper for any content
*/
interface LazyContentProps {
children: React.ReactNode
className?: string
fallback?: React.ReactNode
threshold?: number
rootMargin?: string
}
export function LazyContent({
children,
className,
fallback = null,
threshold = 0.1,
rootMargin = '50px',
}: LazyContentProps) {
const [setRef, isVisible] = useIntersectionObserver({
threshold,
rootMargin,
freezeOnceVisible: true,
})
return (
<div ref={setRef} className={className}>
{isVisible ? children : fallback}
</div>
)
}
/**
* Preload images for better performance
*/
export function preloadImage(src: string): Promise<void> {
return new Promise((resolve, reject) => {
const img = new window.Image()
img.onload = () => resolve()
img.onerror = reject
img.src = src
})
}
/**
* Preload multiple images
*/
export async function preloadImages(sources: string[]): Promise<void[]> {
return Promise.all(sources.map(preloadImage))
}

186
lib/minio.ts Normal file
View File

@@ -0,0 +1,186 @@
import { Client } from 'minio'
import { env } from './env'
// Get MinIO credentials with dev-portfolio naming first, fallback to legacy names
const minioAccessKey = env.MINIO_KEY || env.MINIO_ACCESS_KEY
const minioSecretKey = env.MINIO_SECRET || env.MINIO_SECRET_KEY
const minioBucket = env.MINIO_IMAGE_BUCKET || env.MINIO_BUCKET
// Only initialize MinIO client if ALL required configuration is available
export const minioClient = (env.MINIO_ENDPOINT && minioAccessKey && minioSecretKey)
? new Client({
endPoint: env.MINIO_ENDPOINT,
port: env.MINIO_PORT || 9000,
useSSL: env.NODE_ENV === 'production',
accessKey: minioAccessKey,
secretKey: minioSecretKey,
})
: null
// Log MinIO initialization status
if (!minioClient) {
console.warn('⚠️ MinIO client not initialized. Missing configuration:', {
hasEndpoint: !!env.MINIO_ENDPOINT,
hasKey: !!minioAccessKey,
hasSecret: !!minioSecretKey,
checkedVars: ['MINIO_KEY/MINIO_ACCESS_KEY', 'MINIO_SECRET/MINIO_SECRET_KEY']
})
} else {
console.log('✅ MinIO client initialized successfully with endpoint:', env.MINIO_ENDPOINT)
}
export const BUCKET_NAME = minioBucket || 'nextjs-boilerplate'
// Initialize bucket if it doesn't exist
export async function initializeBucket() {
if (!minioClient) {
console.error('❌ MinIO client not configured for bucket initialization')
throw new Error(
'MinIO client not configured. Please set MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY environment variables.'
)
}
try {
console.log('🔍 Checking if bucket exists:', BUCKET_NAME)
const bucketExists = await minioClient.bucketExists(BUCKET_NAME)
if (!bucketExists) {
console.log('🚀 Creating bucket:', BUCKET_NAME)
await minioClient.makeBucket(BUCKET_NAME, 'us-east-1')
console.log('✅ Bucket created successfully:', BUCKET_NAME)
} else {
console.log('✅ Bucket already exists:', BUCKET_NAME)
}
} catch (error) {
console.error('❌ Error initializing bucket:', {
error: error.message,
code: error.code,
statusCode: error.statusCode,
bucketName: BUCKET_NAME,
endpoint: env.MINIO_ENDPOINT,
port: env.MINIO_PORT
})
throw error
}
}
// Generate unique filename with timestamp and random string
export function generateUniqueFilename(originalName: string): string {
const timestamp = Date.now()
const randomString = Math.random().toString(36).substring(2)
const extension = originalName.split('.').pop()
return `${timestamp}-${randomString}.${extension}`
}
// Upload file to MinIO
export async function uploadFile(
buffer: Buffer,
filename: string,
contentType: string
): Promise<string> {
console.log('📤 Starting file upload:', { filename, contentType, bufferSize: buffer.length })
if (!minioClient) {
console.error('❌ MinIO client not configured')
throw new Error(
'MinIO client not configured. Please set MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY environment variables.'
)
}
try {
console.log('🔧 Initializing bucket:', BUCKET_NAME)
await initializeBucket()
const uniqueFilename = generateUniqueFilename(filename)
const tempPath = `temp/${uniqueFilename}`
console.log('📤 Uploading to MinIO:', {
bucket: BUCKET_NAME,
path: tempPath,
size: buffer.length,
endpoint: env.MINIO_ENDPOINT,
port: env.MINIO_PORT,
useSSL: env.NODE_ENV === 'production'
})
await minioClient.putObject(BUCKET_NAME, tempPath, buffer, buffer.length, {
'Content-Type': contentType,
})
console.log('✅ File uploaded successfully:', tempPath)
return tempPath
} catch (error) {
console.error('❌ Error uploading file:', {
error: error.message,
code: error.code,
statusCode: error.statusCode,
resource: error.resource,
region: error.region,
bucketName: error.bucketName,
objectName: error.objectName,
})
throw error
}
}
// Move file from temp to permanent storage
export async function moveToPermStorage(tempPath: string, permanentPath: string): Promise<void> {
if (!minioClient) {
throw new Error(
'MinIO client not configured. Please set MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY environment variables.'
)
}
try {
// Copy from temp to permanent location
await minioClient.copyObject(BUCKET_NAME, permanentPath, `${BUCKET_NAME}/${tempPath}`)
// Remove from temp location
await minioClient.removeObject(BUCKET_NAME, tempPath)
} catch (error) {
console.error('Error moving file to permanent storage:', error)
throw error
}
}
// Delete file from storage
export async function deleteFile(filePath: string): Promise<void> {
if (!minioClient) {
throw new Error(
'MinIO client not configured. Please set MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY environment variables.'
)
}
try {
await minioClient.removeObject(BUCKET_NAME, filePath)
} catch (error) {
console.error('Error deleting file:', error)
throw error
}
}
// Get file URL (for serving files)
export async function getFileUrl(filePath: string): Promise<string> {
if (!minioClient) {
throw new Error(
'MinIO client not configured. Please set MINIO_ENDPOINT, MINIO_ACCESS_KEY, and MINIO_SECRET_KEY environment variables.'
)
}
try {
return await minioClient.presignedGetObject(BUCKET_NAME, filePath, 7 * 24 * 60 * 60) // 7 days
} catch (error) {
console.error('Error getting file URL:', error)
throw error
}
}
// Validate file type
export function validateFileType(mimetype: string, allowedTypes: string[]): boolean {
return allowedTypes.includes(mimetype)
}
// Validate file size (in bytes)
export function validateFileSize(size: number, maxSize: number): boolean {
return size <= maxSize
}

87
lib/mongodb.ts Normal file
View File

@@ -0,0 +1,87 @@
import mongoose from 'mongoose'
const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017/siliconpin'
if (!MONGODB_URI) {
throw new Error('Please define the MONGODB_URI environment variable inside .env.local')
}
interface CachedConnection {
conn: typeof mongoose | null
promise: Promise<typeof mongoose> | null
}
// Global variable to cache the database connection across hot reloads in development
declare global {
var mongoose: CachedConnection | undefined
}
let cached: CachedConnection = global.mongoose || { conn: null, promise: null }
if (!global.mongoose) {
global.mongoose = cached
}
async function connectDB(): Promise<typeof mongoose> {
// Skip database connection during build phase
const isBuildTime = process.env.NEXT_PHASE === 'phase-production-build'
if (isBuildTime) {
throw new Error('Database connection skipped during build phase')
}
if (cached.conn) {
return cached.conn
}
if (!cached.promise) {
const opts = {
bufferCommands: false,
maxPoolSize: 10,
minPoolSize: 5,
socketTimeoutMS: 45000,
connectTimeoutMS: 10000,
serverSelectionTimeoutMS: 10000,
}
cached.promise = mongoose.connect(MONGODB_URI, opts)
}
try {
cached.conn = await cached.promise
console.log('Connected to MongoDB')
return cached.conn
} catch (error) {
cached.promise = null
console.error('MongoDB connection error:', error)
throw error
}
}
// Test MongoDB connection
export async function testMongoConnection(): Promise<boolean> {
try {
const connection = await connectDB()
// Test database operations
await connection.connection.db.admin().ping()
console.log('✅ MongoDB connection test successful')
// Optional: Test if we can create a test collection
const testCollection = connection.connection.db.collection('connection_test')
await testCollection.insertOne({ test: true, timestamp: new Date() })
await testCollection.deleteOne({ test: true })
console.log('✅ MongoDB read/write test successful')
return true
} catch (error) {
console.error('❌ MongoDB connection test failed:', error)
console.error('Please check:')
console.error('1. MongoDB is running (docker-compose up -d in /mongo directory)')
console.error('2. MONGODB_URI in .env is correct')
console.error('3. MongoDB credentials are valid')
return false
}
}
export default connectDB
export { connectDB }

126
lib/proxy.ts Normal file
View File

@@ -0,0 +1,126 @@
/**
* Development proxy configuration
* Useful for proxying external APIs during development
*/
import { NextRequest, NextResponse } from 'next/server'
export interface ProxyConfig {
target: string
pathRewrite?: Record<string, string>
changeOrigin?: boolean
headers?: Record<string, string>
}
/**
* Simple proxy utility for API routes
* @param request - The incoming request
* @param config - Proxy configuration
*/
export async function proxyRequest(
request: NextRequest,
config: ProxyConfig
): Promise<NextResponse> {
try {
const url = new URL(request.url)
let targetPath = url.pathname
// Apply path rewrites
if (config.pathRewrite) {
Object.entries(config.pathRewrite).forEach(([pattern, replacement]) => {
const regex = new RegExp(pattern)
if (regex.test(targetPath)) {
targetPath = targetPath.replace(regex, replacement)
}
})
}
// Build target URL
const targetUrl = new URL(targetPath + url.search, config.target)
// Prepare headers
const headers = new Headers()
// Copy headers from original request
request.headers.forEach((value, key) => {
// Skip host header to avoid conflicts
if (key.toLowerCase() !== 'host') {
headers.set(key, value)
}
})
// Add custom headers
if (config.headers) {
Object.entries(config.headers).forEach(([key, value]) => {
headers.set(key, value)
})
}
// Change origin if specified
if (config.changeOrigin) {
headers.set('Host', targetUrl.host)
headers.set('Origin', config.target)
}
// Forward the request
const response = await fetch(targetUrl.toString(), {
method: request.method,
headers,
body:
request.method !== 'GET' && request.method !== 'HEAD'
? await request.arrayBuffer()
: undefined,
})
// Create response with proper headers
const responseHeaders = new Headers()
// Copy response headers
response.headers.forEach((value, key) => {
// Skip some headers that might cause issues
if (
!['content-encoding', 'content-length', 'transfer-encoding'].includes(key.toLowerCase())
) {
responseHeaders.set(key, value)
}
})
// Add CORS headers for development
if (process.env.NODE_ENV === 'development') {
responseHeaders.set('Access-Control-Allow-Origin', '*')
responseHeaders.set('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS')
responseHeaders.set('Access-Control-Allow-Headers', 'Content-Type, Authorization')
}
const responseBody = await response.arrayBuffer()
return new NextResponse(responseBody, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
})
} catch (error) {
console.error('Proxy error:', error)
return NextResponse.json({ error: 'Proxy request failed' }, { status: 500 })
}
}
/**
* Example usage in an API route:
*
* // app/api/proxy/external/route.ts
* import { proxyRequest } from '@/lib/proxy';
*
* export async function GET(request: NextRequest) {
* return proxyRequest(request, {
* target: 'https://api.external-service.com',
* pathRewrite: {
* '^/api/proxy/external': ''
* },
* changeOrigin: true,
* headers: {
* 'X-API-Key': process.env.EXTERNAL_API_KEY || ''
* }
* });
* }
*/

49
lib/redis.ts Normal file
View File

@@ -0,0 +1,49 @@
import { createClient } from 'redis'
// Only create Redis client if REDIS_URL is provided
let client: ReturnType<typeof createClient> | null = null
if (process.env.REDIS_URL) {
client = createClient({
url: process.env.REDIS_URL,
})
client.on('error', (err) => {
console.error('Redis Client Error:', err)
})
client.on('connect', () => {
console.log('Connected to Redis')
})
client.on('ready', () => {
console.log('Redis Client Ready')
})
client.on('end', () => {
console.log('Redis Client Disconnected')
})
} else {
console.warn('Redis disabled - REDIS_URL not provided')
}
// Connect to Redis
const connectRedis = async () => {
if (!client) {
console.warn('Redis client not initialized - sessions will use memory store')
return null
}
if (!client.isOpen) {
try {
await client.connect()
} catch (error) {
console.error('Failed to connect to Redis:', error)
// Don't throw error - let app continue without Redis
return null
}
}
return client
}
export { client as redisClient, connectRedis }

321
lib/seo.ts Normal file
View File

@@ -0,0 +1,321 @@
/**
* SEO utilities for Next.js App Router
* Uses the new Metadata API for better SEO
*/
import { Metadata } from 'next'
export interface SEOConfig {
title?: string
description?: string
keywords?: string[]
authors?: Array<{ name: string; url?: string }>
creator?: string
publisher?: string
robots?: {
index?: boolean
follow?: boolean
nocache?: boolean
googleBot?: {
index?: boolean
follow?: boolean
noimageindex?: boolean
'max-video-preview'?: number
'max-image-preview'?: 'none' | 'standard' | 'large'
'max-snippet'?: number
}
}
openGraph?: {
title?: string
description?: string
url?: string
siteName?: string
images?: Array<{
url: string
width?: number
height?: number
alt?: string
}>
type?: 'website' | 'article'
publishedTime?: string
modifiedTime?: string
authors?: string[]
section?: string
tags?: string[]
}
twitter?: {
card?: 'summary' | 'summary_large_image' | 'app' | 'player'
title?: string
description?: string
siteId?: string
creator?: string
creatorId?: string
images?: string | Array<{ url: string; alt?: string }>
}
alternates?: {
canonical?: string
languages?: Record<string, string>
}
icons?: {
icon?: string | Array<{ url: string; sizes?: string; type?: string }>
shortcut?: string
apple?: string | Array<{ url: string; sizes?: string; type?: string }>
}
}
const defaultSEOConfig: SEOConfig = {
title: 'NextJS Boilerplate',
description:
'A production-ready NextJS boilerplate with TypeScript, Tailwind CSS, and authentication',
keywords: ['Next.js', 'React', 'TypeScript', 'Tailwind CSS', 'Authentication', 'Boilerplate'],
authors: [{ name: 'NextJS Boilerplate Team' }],
creator: 'NextJS Boilerplate',
publisher: 'NextJS Boilerplate',
robots: {
index: true,
follow: true,
googleBot: {
index: true,
follow: true,
'max-video-preview': -1,
'max-image-preview': 'large',
'max-snippet': -1,
},
},
openGraph: {
type: 'website',
siteName: 'NextJS Boilerplate',
images: [
{
url: '/og-image.png',
width: 1200,
height: 630,
alt: 'NextJS Boilerplate',
},
],
},
twitter: {
card: 'summary_large_image',
creator: '@nextjs_boilerplate',
},
icons: {
icon: '/favicon.ico',
shortcut: '/favicon.ico',
apple: '/apple-touch-icon.png',
},
}
/**
* Generate metadata for pages
*/
export function generateMetadata(config: SEOConfig = {}): Metadata {
const mergedConfig = {
...defaultSEOConfig,
...config,
openGraph: {
...defaultSEOConfig.openGraph,
...config.openGraph,
},
twitter: {
...defaultSEOConfig.twitter,
...config.twitter,
},
robots: {
...defaultSEOConfig.robots,
...config.robots,
},
}
return {
title: mergedConfig.title,
description: mergedConfig.description,
keywords: mergedConfig.keywords,
authors: mergedConfig.authors,
creator: mergedConfig.creator,
publisher: mergedConfig.publisher,
robots: mergedConfig.robots,
openGraph: mergedConfig.openGraph,
twitter: mergedConfig.twitter,
alternates: config.alternates,
icons: mergedConfig.icons,
}
}
/**
* Generate metadata for article pages
*/
export function generateArticleMetadata(
title: string,
description: string,
config: {
publishedTime?: string
modifiedTime?: string
authors?: string[]
section?: string
tags?: string[]
image?: string
url?: string
} = {}
): Metadata {
return generateMetadata({
title,
description,
openGraph: {
type: 'article',
title,
description,
publishedTime: config.publishedTime,
modifiedTime: config.modifiedTime,
authors: config.authors,
section: config.section,
tags: config.tags,
url: config.url,
images: config.image
? [
{
url: config.image,
width: 1200,
height: 630,
alt: title,
},
]
: undefined,
},
twitter: {
card: 'summary_large_image',
title,
description,
images: config.image ? [{ url: config.image, alt: title }] : undefined,
},
})
}
/**
* Generate structured data (JSON-LD)
*/
export function generateStructuredData(type: string, data: Record<string, any>) {
const structuredData = {
'@context': 'https://schema.org',
'@type': type,
...data,
}
return {
__html: JSON.stringify(structuredData),
}
}
/**
* Website structured data
*/
export function generateWebsiteStructuredData(
name: string,
url: string,
description?: string,
logo?: string
) {
return generateStructuredData('WebSite', {
name,
url,
description,
logo,
potentialAction: {
'@type': 'SearchAction',
target: {
'@type': 'EntryPoint',
urlTemplate: `${url}/search?q={search_term_string}`,
},
'query-input': 'required name=search_term_string',
},
})
}
/**
* Organization structured data
*/
export function generateOrganizationStructuredData(
name: string,
url: string,
logo?: string,
contactPoint?: {
telephone?: string
contactType?: string
email?: string
}
) {
return generateStructuredData('Organization', {
name,
url,
logo,
contactPoint: contactPoint
? {
'@type': 'ContactPoint',
...contactPoint,
}
: undefined,
})
}
/**
* Article structured data
*/
export function generateArticleStructuredData(
headline: string,
description: string,
author: string,
publishedDate: string,
modifiedDate?: string,
image?: string,
url?: string
) {
return generateStructuredData('Article', {
headline,
description,
author: {
'@type': 'Person',
name: author,
},
datePublished: publishedDate,
dateModified: modifiedDate || publishedDate,
image,
url,
publisher: {
'@type': 'Organization',
name: 'NextJS Boilerplate',
logo: {
'@type': 'ImageObject',
url: '/logo.png',
},
},
})
}
/**
* Breadcrumb structured data
*/
export function generateBreadcrumbStructuredData(items: Array<{ name: string; url: string }>) {
return generateStructuredData('BreadcrumbList', {
itemListElement: items.map((item, index) => ({
'@type': 'ListItem',
position: index + 1,
name: item.name,
item: item.url,
})),
})
}
/**
* FAQ structured data
*/
export function generateFAQStructuredData(faqs: Array<{ question: string; answer: string }>) {
return generateStructuredData('FAQPage', {
mainEntity: faqs.map((faq) => ({
'@type': 'Question',
name: faq.question,
acceptedAnswer: {
'@type': 'Answer',
text: faq.answer,
},
})),
})
}

55
lib/session.ts Normal file
View File

@@ -0,0 +1,55 @@
import session from 'express-session'
import { RedisStore } from 'connect-redis'
import { redisClient } from './redis'
// Extend the session interface to include our custom properties
declare module 'express-session' {
interface SessionData {
userId?: string
user?: {
id: string
email: string
name: string
role: string
avatar?: string
}
accessToken?: string
refreshToken?: string
}
}
// Create session config - use Redis if available, otherwise use memory store
const createSessionConfig = () => {
const baseConfig = {
secret: process.env.SESSION_SECRET || 'your-super-secret-session-key-change-in-production',
resave: false,
saveUninitialized: false,
rolling: true, // Reset expiry on each request
cookie: {
secure: process.env.NODE_ENV === 'production', // HTTPS only in production
httpOnly: true,
maxAge: 24 * 60 * 60 * 1000, // 24 hours
sameSite: 'lax' as const,
},
name: 'sessionId', // Don't use default session name
}
// Only use Redis store if Redis client is available
if (redisClient) {
console.log('Using Redis store for sessions')
return {
...baseConfig,
store: new RedisStore({
client: redisClient,
prefix: 'sess:',
}),
}
} else {
console.warn('Using memory store for sessions - sessions will not persist across server restarts')
return baseConfig
}
}
export const sessionConfig = createSessionConfig()
export const sessionMiddleware = session(sessionConfig)

36
lib/siliconId.ts Normal file
View File

@@ -0,0 +1,36 @@
/**
* Encode number to Base62
*/
function toBase62(num: number): string {
const base62 = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
let encoded = ''
let n = num
if (n === 0) return '0'
while (n > 0) {
encoded = base62[n % 62] + encoded
n = Math.floor(n / 62)
}
return encoded
}
/**
* Generate compact Silicon ID
* Format: base62(timestamp + random), min length 8 chars
*/
export function generateSiliconId(): string {
const epochStart = new Date('2020-01-01').getTime()
const now = Date.now()
// relative timestamp since epochStart
const timestamp = (now - epochStart) % 1_000_000_000_000
// add randomness (0999)
const randomNum = Math.floor(Math.random() * 1000)
// combine into one big number
const finalNum = timestamp * 1000 + randomNum
// convert to base62
return toBase62(finalNum).padStart(10, '0') // always at least 10 chars
}

48
lib/startup.ts Normal file
View File

@@ -0,0 +1,48 @@
// Server-side startup checks - runs when Next.js server starts
let startupComplete = false
export async function runStartupChecks() {
if (startupComplete) return // Only run once
console.log('\n🚀 NextJS Boilerplate - Running startup checks...')
console.log('='.repeat(60))
try {
// Test MongoDB connection
const { testMongoConnection } = await import('./mongodb')
const mongoStatus = await testMongoConnection()
// Test Redis connection (optional)
let redisStatus = false
try {
const { connectRedis } = await import('./redis')
const redisClient = await connectRedis()
await redisClient.ping()
console.log('✅ Redis connection successful')
redisStatus = true
} catch (error) {
console.log('⚠️ Redis connection failed (optional service)')
console.log(' → Redis is used for session storage but app will work without it')
}
console.log('='.repeat(60))
if (mongoStatus) {
console.log('🎉 All critical services connected - App ready!')
console.log(` → Local: http://localhost:${process.env.PORT || 3006}`)
console.log(` → Database: Connected to MongoDB`)
console.log(` → Sessions: ${redisStatus ? 'Redis enabled' : 'Fallback to memory'}`)
} else {
console.log('❌ Critical services failed - App may not work properly')
console.log(' → Check MongoDB connection and try again')
}
console.log('='.repeat(60))
startupComplete = true
} catch (error) {
console.error('❌ Startup checks failed:', error)
console.log('='.repeat(60))
}
}
// Export for explicit use in layout

229
lib/storage.ts Normal file
View File

@@ -0,0 +1,229 @@
/**
* Storage utilities for file management
* Provides high-level functions for common file operations
*/
import {
uploadFile,
moveToPermStorage,
deleteFile,
getFileUrl,
validateFileType,
validateFileSize,
generateUniqueFilename,
} from './file-vault'
// File type configurations
export const FILE_CONFIGS = {
image: {
allowedTypes: ['image/jpeg', 'image/png', 'image/webp', 'image/gif', 'image/svg+xml'],
maxSize: 10 * 1024 * 1024, // 10MB
folder: 'images',
},
document: {
allowedTypes: [
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'text/plain',
'text/csv',
],
maxSize: 20 * 1024 * 1024, // 20MB
folder: 'documents',
},
avatar: {
allowedTypes: ['image/jpeg', 'image/png', 'image/webp'],
maxSize: 5 * 1024 * 1024, // 5MB
folder: 'avatars',
},
} as const
export type FileType = keyof typeof FILE_CONFIGS
// File upload result
export interface UploadResult {
tempPath: string
filename: string
size: number
type: string
uploadedAt: string
}
// File confirmation result
export interface ConfirmResult {
permanentPath: string
filename: string
folder: string
url: string
confirmedAt: string
}
/**
* Upload file with type validation
*/
export async function uploadFileWithValidation(
file: File,
fileType: FileType
): Promise<UploadResult> {
const config = FILE_CONFIGS[fileType]
// Validate file type
if (!validateFileType(file.type, [...config.allowedTypes])) {
throw new Error(`Invalid file type. Allowed: ${config.allowedTypes.join(', ')}`)
}
// Validate file size
if (!validateFileSize(file.size, config.maxSize)) {
throw new Error(`File too large. Max size: ${config.maxSize / (1024 * 1024)}MB`)
}
// Convert file to buffer
const bytes = await file.arrayBuffer()
const buffer = Buffer.from(bytes)
// Upload to external API (no temporary storage needed)
const uploadResult = await uploadFile(buffer, file.name, file.type)
return {
tempPath: uploadResult.url, // Use URL as tempPath for compatibility
filename: uploadResult.filename,
size: file.size,
type: file.type,
uploadedAt: new Date().toISOString(),
}
}
/**
* Confirm upload and move to permanent storage
* With the new API, files are already in permanent storage
*/
export async function confirmUpload(
tempPath: string,
fileType: FileType,
customFilename?: string
): Promise<ConfirmResult> {
const config = FILE_CONFIGS[fileType]
// With the new API, tempPath is already the permanent URL
const url = tempPath
const filename = url.split('/').pop() || 'file'
return {
permanentPath: url,
filename: filename,
folder: config.folder,
url: url,
confirmedAt: new Date().toISOString(),
}
}
/**
* Delete file from storage
*/
export async function removeFile(filePath: string): Promise<void> {
await deleteFile(filePath)
}
/**
* Get signed URL for file access
*/
export async function getSignedUrl(filePath: string): Promise<string> {
return await getFileUrl(filePath)
}
/**
* Validate file before upload
*/
export function validateFile(file: File, fileType: FileType): { valid: boolean; error?: string } {
const config = FILE_CONFIGS[fileType]
// Check file type
if (!validateFileType(file.type, [...config.allowedTypes])) {
return {
valid: false,
error: `Invalid file type. Allowed: ${config.allowedTypes.join(', ')}`,
}
}
// Check file size
if (!validateFileSize(file.size, config.maxSize)) {
return {
valid: false,
error: `File too large. Max size: ${config.maxSize / (1024 * 1024)}MB`,
}
}
return { valid: true }
}
/**
* Batch file operations
*/
export class FileBatch {
private tempPaths: string[] = []
async uploadFile(file: File, fileType: FileType): Promise<UploadResult> {
const result = await uploadFileWithValidation(file, fileType)
this.tempPaths.push(result.tempPath)
return result
}
async confirmAll(fileType: FileType): Promise<ConfirmResult[]> {
const results = []
for (const tempPath of this.tempPaths) {
const result = await confirmUpload(tempPath, fileType)
results.push(result)
}
this.tempPaths = [] // Clear after confirmation
return results
}
async cleanup(): Promise<void> {
for (const tempPath of this.tempPaths) {
try {
await removeFile(tempPath)
} catch (error) {
console.error(`Failed to cleanup ${tempPath}:`, error)
}
}
this.tempPaths = []
}
}
/**
* File metadata for database storage
*/
export interface FileMetadata {
id: string
filename: string
originalName: string
path: string
size: number
mimeType: string
fileType: FileType
uploadedBy: string
uploadedAt: Date
url?: string
}
/**
* Helper to create file metadata
*/
export function createFileMetadata(
uploadResult: UploadResult,
confirmResult: ConfirmResult,
fileType: FileType,
uploadedBy: string
): Omit<FileMetadata, 'id'> {
return {
filename: confirmResult.filename,
originalName: uploadResult.filename,
path: confirmResult.permanentPath,
size: uploadResult.size,
mimeType: uploadResult.type,
fileType,
uploadedBy,
uploadedAt: new Date(uploadResult.uploadedAt),
url: confirmResult.url,
}
}

162
lib/structured-data.ts Normal file
View File

@@ -0,0 +1,162 @@
import { ITopic } from '@/models/topic'
interface ArticleStructuredData {
title: string
description: string
image: string
publishedTime: string
authorName: string
url: string
readingTime?: string
tags?: string[]
}
export function generateArticleStructuredData({
title,
description,
image,
publishedTime,
authorName,
url,
readingTime,
tags = [],
}: ArticleStructuredData): string {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://siliconpin.com'
const structuredData = {
'@context': 'https://schema.org',
'@type': 'Article',
headline: title,
description: description,
image: {
'@type': 'ImageObject',
url: image.startsWith('http') ? image : `${baseUrl}${image}`,
width: 800,
height: 450,
},
author: {
'@type': 'Person',
name: authorName,
},
publisher: {
'@type': 'Organization',
name: 'SiliconPin',
logo: {
'@type': 'ImageObject',
url: `${baseUrl}/favicon.ico`,
width: 32,
height: 32,
},
},
datePublished: publishedTime,
dateModified: publishedTime,
mainEntityOfPage: {
'@type': 'WebPage',
'@id': `${baseUrl}${url}`,
},
url: `${baseUrl}${url}`,
...(readingTime && {
timeRequired: readingTime,
}),
...(tags.length > 0 && {
keywords: tags.join(', '),
about: tags.map(tag => ({
'@type': 'Thing',
name: tag,
})),
}),
}
return JSON.stringify(structuredData, null, 2)
}
export function generateTopicListingStructuredData(topics: ITopic[]): string {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://siliconpin.com'
const structuredData = {
'@context': 'https://schema.org',
'@type': 'Topic',
name: 'SiliconPin Topics',
description: 'Technical articles, tutorials, and insights from the SiliconPin community',
url: `${baseUrl}/topics`,
publisher: {
'@type': 'Organization',
name: 'SiliconPin',
logo: {
'@type': 'ImageObject',
url: `${baseUrl}/favicon.ico`,
width: 32,
height: 32,
},
},
topicPost: topics.slice(0, 10).map(topic => ({
'@type': 'TopicPosting',
headline: topic.title,
description: topic.excerpt,
url: `${baseUrl}/topics/${topic.slug}`,
datePublished: new Date(topic.publishedAt).toISOString(),
author: {
'@type': 'Person',
name: topic.author,
},
image: {
'@type': 'ImageObject',
url: topic.coverImage.startsWith('http') ? topic.coverImage : `${baseUrl}${topic.coverImage}`,
width: 800,
height: 450,
},
keywords: topic.tags.map(tag => tag.name).join(', '),
})),
}
return JSON.stringify(structuredData, null, 2)
}
export function generateWebsiteStructuredData(): string {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://siliconpin.com'
const structuredData = {
'@context': 'https://schema.org',
'@type': 'WebSite',
name: 'SiliconPin',
description: 'Modern web hosting and cloud solutions with integrated topic platform',
url: baseUrl,
potentialAction: {
'@type': 'SearchAction',
target: {
'@type': 'EntryPoint',
urlTemplate: `${baseUrl}/search?q={search_term_string}`,
},
'query-input': 'required name=search_term_string',
},
publisher: {
'@type': 'Organization',
name: 'SiliconPin',
logo: {
'@type': 'ImageObject',
url: `${baseUrl}/favicon.ico`,
width: 32,
height: 32,
},
},
}
return JSON.stringify(structuredData, null, 2)
}
export function generateBreadcrumbStructuredData(items: Array<{ name: string; url: string }>): string {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://siliconpin.com'
const structuredData = {
'@context': 'https://schema.org',
'@type': 'BreadcrumbList',
itemListElement: items.map((item, index) => ({
'@type': 'ListItem',
position: index + 1,
name: item.name,
item: item.url.startsWith('http') ? item.url : `${baseUrl}${item.url}`,
})),
}
return JSON.stringify(structuredData, null, 2)
}

81
lib/system-settings.ts Normal file
View File

@@ -0,0 +1,81 @@
import { SystemSettings, ISystemSettings } from '@/models/system-settings'
import { connectDB } from '@/lib/mongodb'
// Cache for system settings to avoid database calls on every request
let settingsCache: any = null
let cacheTimestamp = 0
const CACHE_DURATION = 60000 // 1 minute cache
async function getSettingsFromDB() {
await connectDB()
let settings = await (SystemSettings as any).findOne({})
if (!settings) {
// Create default settings if none exist
settings = new SystemSettings({
maintenanceMode: false,
registrationEnabled: true,
emailVerificationRequired: true,
maxUserBalance: 1000000,
defaultUserRole: 'user',
systemMessage: '',
paymentGatewayEnabled: true,
developerHireEnabled: true,
vpsDeploymentEnabled: true,
kubernetesDeploymentEnabled: true,
vpnServiceEnabled: true,
lastUpdated: new Date(),
updatedBy: 'System',
})
await settings.save()
}
return settings
}
export async function getSystemSettings() {
const now = Date.now()
// Return cached settings if still valid
if (settingsCache && now - cacheTimestamp < CACHE_DURATION) {
return settingsCache
}
// Fetch fresh settings from database
const settings = await getSettingsFromDB()
settingsCache = settings.toObject()
cacheTimestamp = now
return settingsCache
}
export async function updateSystemSettings(newSettings: any) {
await connectDB()
const settings = await (SystemSettings as any).findOneAndUpdate(
{},
{ ...newSettings, lastUpdated: new Date() },
{ new: true, upsert: true }
)
// Clear cache to force refresh
settingsCache = null
cacheTimestamp = 0
return settings
}
export async function checkServiceAvailability(
service: 'vps' | 'developer' | 'kubernetes' | 'vpn'
): Promise<boolean> {
const serviceMap = {
vps: 'vpsDeploymentEnabled',
developer: 'developerHireEnabled',
kubernetes: 'kubernetesDeploymentEnabled',
vpn: 'vpnServiceEnabled',
}
const settings = await getSystemSettings()
const settingKey = serviceMap[service]
return settings[settingKey] as boolean
}

6
lib/utils.ts Normal file
View File

@@ -0,0 +1,6 @@
import { type ClassValue, clsx } from 'clsx'
import { twMerge } from 'tailwind-merge'
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}