summaryrefslogtreecommitdiff
path: root/main.go
diff options
context:
space:
mode:
authorSerguey Parkhomovsky <xindigo@gmail.com>2025-12-25 22:24:41 -0800
committerSerguey Parkhomovsky <xindigo@gmail.com>2025-12-25 22:44:18 -0800
commit8676c7271085fb4dee8f157a6df9e1e7ff2552f5 (patch)
tree75e0468553304cc6357bec8d3fd0af640fe154b6 /main.go
parentf61d7b3171f8e38a6dc814bdb4b136588cb3f2b1 (diff)
parent38977b259a46b60f0ba9a223a0c9ee4e908facd9 (diff)
Merge main branches
Diffstat (limited to 'main.go')
-rw-r--r--main.go329
1 files changed, 210 insertions, 119 deletions
diff --git a/main.go b/main.go
index 00c3779..9985c1e 100644
--- a/main.go
+++ b/main.go
@@ -7,12 +7,21 @@ import (
"io"
"log"
"net/http"
+ "net/url"
"strconv"
"strings"
"time"
_ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
+ "github.com/PuerkitoBio/goquery"
+)
+
+const (
+ artistFetchDelay = 250 * time.Millisecond
+ pageFetchDelay = 500 * time.Millisecond
+ maxPages = 1000
+ itemsPerPage = 1000
)
// RSS structures
@@ -50,6 +59,44 @@ type TitleAuthor struct {
Author string
}
+func extractUsername(trackURL string) (string, error) {
+ u, err := url.Parse(trackURL)
+ if err != nil {
+ return "", err
+ }
+
+ parts := strings.Split(strings.Trim(u.Path, "/"), "/")
+ if len(parts) < 1 {
+ return "", fmt.Errorf("invalid URL format: %s", trackURL)
+ }
+
+ return parts[0], nil
+}
+
+func fetchArtistName(trackURL string) (string, error) {
+ resp, err := http.Get(trackURL)
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != 200 {
+ return "", fmt.Errorf("HTTP %d", resp.StatusCode)
+ }
+
+ doc, err := goquery.NewDocumentFromReader(resp.Body)
+ if err != nil {
+ return "", err
+ }
+
+ artistName := strings.TrimSpace(strings.TrimPrefix(doc.Find("#item_user a").Text(), "By "))
+ if artistName == "" {
+ return "", fmt.Errorf("artist name not found")
+ }
+
+ return artistName, nil
+}
+
func parseYearWeek(date string) (YearWeek, error) {
parts := strings.Split(date, "/")
if len(parts) != 2 {
@@ -95,149 +142,145 @@ func isValidXMLByte(b byte) bool {
return b == 0x09 || b == 0x0A || b == 0x0D || b >= 0x20
}
-func main() {
- fmt.Println("Program started")
- // Create SQLite database
- db, err := sql.Open("sqlite3", "weeklybeats.db")
+func fetchRSSPage(year, page int) (*RSS, error) {
+ url := fmt.Sprintf("https://weeklybeats.com/music/rss?limit=%d&year=%d&page=%d", itemsPerPage, year, page)
+
+ resp, err := http.Get(url)
if err != nil {
- log.Fatal("Failed to open database:", err)
+ return nil, fmt.Errorf("failed to fetch: %w", err)
}
- defer db.Close()
+ defer resp.Body.Close()
- // Create table
- createTable := `
- CREATE TABLE IF NOT EXISTS tracks (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- title TEXT NOT NULL,
- link TEXT NOT NULL,
- author TEXT,
- week INTEGER,
- year INTEGER,
- url TEXT,
- UNIQUE(author, week, year)
- );`
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("received status %d", resp.StatusCode)
+ }
- _, err = db.Exec(createTable)
+ body, err := io.ReadAll(resp.Body)
if err != nil {
- log.Fatal("Failed to create table:", err)
+ return nil, fmt.Errorf("failed to read response: %w", err)
}
- fmt.Println("Starting to scrape Weekly Beats RSS feed...")
+ cleanedBody := filterInvalidXMLBytes(body)
- // Prepare insert statement
- insertStmt, err := db.Prepare(`
- INSERT OR IGNORE INTO tracks (title, link, author, week, year, url)
- VALUES (?, ?, ?, ?, ?, ?)
- `)
+ var rss RSS
+ err = xml.Unmarshal(cleanedBody, &rss)
if err != nil {
- log.Fatal("Failed to prepare insert statement:", err)
+ return nil, fmt.Errorf("failed to parse XML: %w", err)
}
- defer insertStmt.Close()
- year := 2012
+ return &rss, nil
+}
+
+func getArtistNameFromTitle(title string) (string, error) {
+ titleAuthor, err := parseTitle(title)
+ if err != nil {
+ return "", err
+ }
+ return titleAuthor.Author, nil
+}
+
+func resolveArtistName(item Item, cache map[string]string) (string, error) {
+ username, err := extractUsername(item.Link)
+ if err != nil {
+ log.Printf("Failed to extract username from '%s': %v", item.Link, err)
+ return getArtistNameFromTitle(item.Title)
+ }
+
+ // Try cache first
+ if artistName := cache[username]; artistName != "" {
+ return artistName, nil
+ }
+
+ // Not in cache, fetch from track page
+ artistName, err := fetchArtistName(item.Link)
+ if err != nil {
+ log.Printf("Failed to fetch artist for '%s': %v, using RSS fallback", username, err)
+ return getArtistNameFromTitle(item.Title)
+ }
+
+ cache[username] = artistName
+ fmt.Printf("Cached artist name '%s' for username '%s'\n", artistName, username)
+ time.Sleep(artistFetchDelay)
+
+ return artistName, nil
+}
+
+func insertItems(insertStmt *sql.Stmt, items []Item, cache map[string]string) int {
+ inserted := 0
+ for _, item := range items {
+ artistName, err := resolveArtistName(item, cache)
+ if err != nil {
+ log.Printf("Failed to resolve artist for item '%s': %v", item.Title, err)
+ continue
+ }
+
+ titleAuthor, err := parseTitle(item.Title)
+ if err != nil {
+ log.Printf("Failed to parse title for item '%s': %v", item.Title, err)
+ continue
+ }
+
+ yearWeek, err := parseYearWeek(item.Category)
+ if err != nil {
+ log.Printf("Failed to parse date for item '%s': %v", item.Title, err)
+ continue
+ }
+
+ _, err = insertStmt.Exec(titleAuthor.Title, item.Link, artistName, yearWeek.Week, yearWeek.Year, item.Enclosure.URL)
+ if err != nil {
+ log.Printf("Failed to insert item '%s': %v", item.Title, err)
+ continue
+ }
+ inserted++
+ }
+ return inserted
+}
+
+func scrapeYear(year int, insertStmt *sql.Stmt, cache map[string]string) int {
totalItems := 0
- for year <= 2024 {
- page := 1
-
- for {
- fmt.Printf("Fetching page %d for year %d...\n", page, year)
-
- // Construct URL with page parameter
- url := fmt.Sprintf("https://weeklybeats.com/music/rss?limit=1000&year=%d&page=%d", year, page)
-
- // Fetch RSS feed
- resp, err := http.Get(url)
- if err != nil {
- log.Printf("Failed to fetch page %d: %v", page, err)
- break
- }
-
- if resp.StatusCode != 200 {
- fmt.Printf("Received status %d for page %d, stopping\n", resp.StatusCode, page)
- resp.Body.Close()
- break
- }
-
- body, err := io.ReadAll(resp.Body)
- resp.Body.Close()
-
- if err != nil {
- log.Printf("Failed to read response body for page %d: %v", page, err)
- break
- }
-
- cleanedBody := filterInvalidXMLBytes(body)
-
- // Parse XML
- var rss RSS
- err = xml.Unmarshal(cleanedBody, &rss)
- if err != nil {
- log.Printf("Failed to parse XML for page %d: %v", page, err)
- break
- }
-
- // Check if we got any items
- if len(rss.Channel.Items) == 0 {
- fmt.Printf("No items found on page %d, stopping\n", page)
- break
- }
-
- fmt.Printf("Found %d items on page %d\n", len(rss.Channel.Items), page)
-
- // Insert items into database
- pageItems := 0
- for _, item := range rss.Channel.Items {
- title, err := parseTitle(item.Title)
- if err != nil {
- log.Printf("Failed to parse title for item '%s': %v", title, err)
- continue
- }
-
- yearWeek, err := parseYearWeek(item.Category)
- if err != nil {
- log.Printf("Failed to parse date for item '%s': %v", title, err)
- continue
- }
-
- _, err = insertStmt.Exec(title, item.Link, item.Author, yearWeek.Week, yearWeek.Year, item.Enclosure.URL)
- if err != nil {
- log.Printf("Failed to insert item '%s': %v", item.Title, err)
- continue
- }
- pageItems++
- }
-
- fmt.Printf("Inserted %d new items from page %d\n", pageItems, page)
- totalItems += pageItems
-
- // Be respectful to the server
- time.Sleep(500 * time.Millisecond)
-
- page++
-
- // Safety check to prevent infinite loops
- if page > 1000 {
- fmt.Println("Reached maximum page limit (1000), stopping")
- break
- }
+ page := 1
+
+ for {
+ fmt.Printf("Fetching page %d for year %d...\n", page, year)
+
+ rss, err := fetchRSSPage(year, page)
+ if err != nil {
+ log.Printf("Failed to fetch page %d: %v", page, err)
+ break
}
- year += 2
+ if len(rss.Channel.Items) == 0 {
+ fmt.Printf("No items found on page %d, stopping\n", page)
+ break
+ }
+
+ fmt.Printf("Found %d items on page %d\n", len(rss.Channel.Items), page)
+
+ pageItems := insertItems(insertStmt, rss.Channel.Items, cache)
+ fmt.Printf("Inserted %d new items from page %d\n", pageItems, page)
+ totalItems += pageItems
+
+ time.Sleep(pageFetchDelay)
+ page++
+
+ if page > maxPages {
+ fmt.Printf("Reached maximum page limit (%d), stopping\n", maxPages)
+ break
+ }
}
- // Print summary
- fmt.Printf("\nScraping complete! Total items processed: %d\n", totalItems)
+ return totalItems
+}
- // Query and display some statistics
+func printStatistics(db *sql.DB) {
var count int
- err = db.QueryRow("SELECT COUNT(*) FROM tracks").Scan(&count)
+ err := db.QueryRow("SELECT COUNT(*) FROM tracks").Scan(&count)
if err != nil {
log.Printf("Failed to count records: %v", err)
} else {
fmt.Printf("Total records in database: %d\n", count)
}
- // Show sample of data
fmt.Println("\nSample of stored data:")
rows, err := db.Query("SELECT title, author, link FROM tracks LIMIT 5")
if err != nil {
@@ -256,3 +299,51 @@ func main() {
fmt.Printf("- %s by %s (%s)\n", title, author, link)
}
}
+
+func main() {
+ fmt.Println("Program started")
+
+ db, err := sql.Open("sqlite3", "weeklybeats.db")
+ if err != nil {
+ log.Fatal("Failed to open database:", err)
+ }
+ defer db.Close()
+
+ createTable := `
+ CREATE TABLE IF NOT EXISTS tracks (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ title TEXT NOT NULL,
+ link TEXT NOT NULL,
+ author TEXT,
+ week INTEGER,
+ year INTEGER,
+ url TEXT,
+ UNIQUE(author, week, year)
+ );`
+
+ _, err = db.Exec(createTable)
+ if err != nil {
+ log.Fatal("Failed to create table:", err)
+ }
+
+ fmt.Println("Starting to scrape Weekly Beats RSS feed...")
+
+ insertStmt, err := db.Prepare(`
+ INSERT OR IGNORE INTO tracks (title, link, author, week, year, url)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `)
+ if err != nil {
+ log.Fatal("Failed to prepare insert statement:", err)
+ }
+ defer insertStmt.Close()
+
+ cache := make(map[string]string)
+
+ totalItems := 0
+ for year := 2012; year <= 2024; year += 2 {
+ totalItems += scrapeYear(year, insertStmt, cache)
+ }
+
+ fmt.Printf("\nScraping complete! Total items processed: %d\n", totalItems)
+ printStatistics(db)
+}