This commit is contained in:
Kar
2026-02-05 23:17:59 +05:30
parent 64c56d8a51
commit 1785548da8
2 changed files with 113 additions and 1 deletions

View File

@@ -4,6 +4,7 @@ import (
"database/sql"
"fmt"
"sitemap-api/models"
"strings"
"time"
_ "github.com/mattn/go-sqlite3"
@@ -193,6 +194,51 @@ func (db *DB) GetAllSites() ([]*models.Site, error) {
return sites, nil
}
func (db *DB) GetSitesByUUIDs(uuids []string) ([]*models.Site, error) {
if len(uuids) == 0 {
return []*models.Site{}, nil
}
// Create placeholders for the IN clause
placeholders := make([]string, len(uuids))
args := make([]interface{}, len(uuids))
for i, uuid := range uuids {
placeholders[i] = "?"
args[i] = uuid
}
query := fmt.Sprintf(`
SELECT id, uuid, domain, url, max_depth, page_count, status,
ip_address, user_agent, browser, browser_version, os, device_type,
session_id, cookies, referrer, created_at, completed_at, last_crawled
FROM sites WHERE uuid IN (%s) ORDER BY created_at DESC
`, strings.Join(placeholders, ","))
rows, err := db.conn.Query(query, args...)
if err != nil {
return nil, err
}
defer rows.Close()
sites := []*models.Site{}
for rows.Next() {
site := &models.Site{}
err := rows.Scan(
&site.ID, &site.UUID, &site.Domain, &site.URL, &site.MaxDepth,
&site.PageCount, &site.Status, &site.IPAddress, &site.UserAgent,
&site.Browser, &site.BrowserVersion, &site.OS, &site.DeviceType,
&site.SessionID, &site.Cookies, &site.Referrer, &site.CreatedAt,
&site.CompletedAt, &site.LastCrawled,
)
if err != nil {
return nil, err
}
sites = append(sites, site)
}
return sites, nil
}
func (db *DB) UpdateSiteStatus(uuid string, status string, pageCount int) error {
query := `
UPDATE sites

View File

@@ -151,6 +151,26 @@ func (h *Handler) GenerateSitemapXML(w http.ResponseWriter, r *http.Request) {
h.streamManager.CloseStream(generatedUUID)
}()
// Add UUID to user UUIDs cookie
userUUIDs := getUserUUIDsFromCookie(r)
userUUIDs = append(userUUIDs, generatedUUID)
// Keep only last 20 UUIDs and remove duplicates
uniqueUUIDs := removeDuplicateUUIDs(userUUIDs)
if len(uniqueUUIDs) > 20 {
uniqueUUIDs = uniqueUUIDs[len(uniqueUUIDs)-20:]
}
// Set cookie with user UUIDs
http.SetCookie(w, &http.Cookie{
Name: "user_uuids",
Value: strings.Join(uniqueUUIDs, ","),
Path: "/",
MaxAge: 86400 * 90, // 90 days
HttpOnly: false,
SameSite: http.SameSiteLaxMode,
})
// Return immediately with UUID
response := map[string]interface{}{
"uuid": generatedUUID,
@@ -241,7 +261,20 @@ func (h *Handler) DownloadSitemap(w http.ResponseWriter, r *http.Request) {
// GetSites handles GET /sites
func (h *Handler) GetSites(w http.ResponseWriter, r *http.Request) {
sites, err := h.db.GetAllSites()
// Get user's UUIDs from cookie
userUUIDs := getUserUUIDsFromCookie(r)
var sites []*models.Site
var err error
if len(userUUIDs) > 0 {
// Get only user's sites
sites, err = h.db.GetSitesByUUIDs(userUUIDs)
} else {
// No UUIDs found, return empty list
sites = []*models.Site{}
}
if err != nil {
http.Error(w, "Failed to retrieve sites", http.StatusInternalServerError)
return
@@ -421,6 +454,39 @@ func getOrCreateSession(r *http.Request) string {
return uuid.New().String()
}
func getUserUUIDsFromCookie(r *http.Request) []string {
// Get user UUIDs from cookie
cookie, err := r.Cookie("user_uuids")
if err != nil || cookie.Value == "" {
return []string{}
}
// Parse comma-separated UUIDs
uuids := strings.Split(cookie.Value, ",")
// Filter out empty strings
var result []string
for _, uuid := range uuids {
if strings.TrimSpace(uuid) != "" {
result = append(result, strings.TrimSpace(uuid))
}
}
return result
}
func removeDuplicateUUIDs(uuids []string) []string {
seen := make(map[string]bool)
var result []string
for _, uuid := range uuids {
if !seen[uuid] {
seen[uuid] = true
result = append(result, uuid)
}
}
return result
}
func extractCookies(r *http.Request) string {
cookies := r.Cookies()
if len(cookies) == 0 {