summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSerguey Parkhomovsky <xindigo@gmail.com>2025-12-25 18:53:53 -0800
committerSerguey Parkhomovsky <xindigo@gmail.com>2025-12-25 21:55:34 -0800
commit7446cb7120afa221070ac996cf327e0bea1f9ff7 (patch)
tree441841b51165daa970aa68d2a5c9c8a0d06fd2ce
parent4df3b2195e9a0a4204bc9c0829420e267c678f5a (diff)
refactor
-rw-r--r--main.go241
1 files changed, 122 insertions, 119 deletions
diff --git a/main.go b/main.go
index f92c5fa..2474d39 100644
--- a/main.go
+++ b/main.go
@@ -93,149 +93,106 @@ func isValidXMLByte(b byte) bool {
return b == 0x09 || b == 0x0A || b == 0x0D || b >= 0x20
}
-func main() {
- fmt.Println("Program started")
- // Create SQLite database
- db, err := sql.Open("sqlite3", "weeklybeats.db")
+func fetchRSSPage(year, page int) (*RSS, error) {
+ url := fmt.Sprintf("https://weeklybeats.com/music/rss?limit=1000&year=%d&page=%d", year, page)
+
+ resp, err := http.Get(url)
if err != nil {
- log.Fatal("Failed to open database:", err)
+ return nil, fmt.Errorf("failed to fetch: %w", err)
}
- defer db.Close()
+ defer resp.Body.Close()
- // Create table
- createTable := `
- CREATE TABLE IF NOT EXISTS tracks (
- id INTEGER PRIMARY KEY AUTOINCREMENT,
- title TEXT NOT NULL,
- link TEXT NOT NULL,
- author TEXT,
- week INTEGER,
- year INTEGER,
- url TEXT,
- UNIQUE(author, week, year)
- );`
+ if resp.StatusCode != 200 {
+ return nil, fmt.Errorf("received status %d", resp.StatusCode)
+ }
- _, err = db.Exec(createTable)
+ body, err := io.ReadAll(resp.Body)
if err != nil {
- log.Fatal("Failed to create table:", err)
+ return nil, fmt.Errorf("failed to read response: %w", err)
}
- fmt.Println("Starting to scrape Weekly Beats RSS feed...")
+ cleanedBody := filterInvalidXMLBytes(body)
- // Prepare insert statement
- insertStmt, err := db.Prepare(`
- INSERT OR IGNORE INTO tracks (title, link, author, week, year, url)
- VALUES (?, ?, ?, ?, ?, ?)
- `)
+ var rss RSS
+ err = xml.Unmarshal(cleanedBody, &rss)
if err != nil {
- log.Fatal("Failed to prepare insert statement:", err)
+ return nil, fmt.Errorf("failed to parse XML: %w", err)
}
- defer insertStmt.Close()
- year := 2012
+ return &rss, nil
+}
+
+func insertItems(insertStmt *sql.Stmt, items []Item) int {
+ inserted := 0
+ for _, item := range items {
+ titleAuthor, err := parseTitle(item.Title)
+ if err != nil {
+ log.Printf("Failed to parse title for item '%s': %v", item.Title, err)
+ continue
+ }
+
+ yearWeek, err := parseYearWeek(item.Category)
+ if err != nil {
+ log.Printf("Failed to parse date for item '%s': %v", item.Title, err)
+ continue
+ }
+
+ _, err = insertStmt.Exec(titleAuthor.Title, item.Link, titleAuthor.Author, yearWeek.Week, yearWeek.Year, item.Enclosure.URL)
+ if err != nil {
+ log.Printf("Failed to insert item '%s': %v", item.Title, err)
+ continue
+ }
+ inserted++
+ }
+ return inserted
+}
+
+func scrapeYear(year int, insertStmt *sql.Stmt) int {
totalItems := 0
- for year <= 2024 {
- page := 1
-
- for {
- fmt.Printf("Fetching page %d for year %d...\n", page, year)
-
- // Construct URL with page parameter
- url := fmt.Sprintf("https://weeklybeats.com/music/rss?limit=1000&year=%d&page=%d", year, page)
-
- // Fetch RSS feed
- resp, err := http.Get(url)
- if err != nil {
- log.Printf("Failed to fetch page %d: %v", page, err)
- break
- }
-
- if resp.StatusCode != 200 {
- fmt.Printf("Received status %d for page %d, stopping\n", resp.StatusCode, page)
- resp.Body.Close()
- break
- }
-
- body, err := io.ReadAll(resp.Body)
- resp.Body.Close()
-
- if err != nil {
- log.Printf("Failed to read response body for page %d: %v", page, err)
- break
- }
-
- cleanedBody := filterInvalidXMLBytes(body)
-
- // Parse XML
- var rss RSS
- err = xml.Unmarshal(cleanedBody, &rss)
- if err != nil {
- log.Printf("Failed to parse XML for page %d: %v", page, err)
- break
- }
-
- // Check if we got any items
- if len(rss.Channel.Items) == 0 {
- fmt.Printf("No items found on page %d, stopping\n", page)
- break
- }
-
- fmt.Printf("Found %d items on page %d\n", len(rss.Channel.Items), page)
-
- // Insert items into database
- pageItems := 0
- for _, item := range rss.Channel.Items {
- titleAuthor, err := parseTitle(item.Title)
- if err != nil {
- log.Printf("Failed to parse title for item '%s': %v", item.Title, err)
- continue
- }
-
- yearWeek, err := parseYearWeek(item.Category)
- if err != nil {
- log.Printf("Failed to parse date for item '%s': %v", item.Title, err)
- continue
- }
-
- _, err = insertStmt.Exec(titleAuthor.Title, item.Link, titleAuthor.Author, yearWeek.Week, yearWeek.Year, item.Enclosure.URL)
- if err != nil {
- log.Printf("Failed to insert item '%s': %v", item.Title, err)
- continue
- }
- pageItems++
- }
-
- fmt.Printf("Inserted %d new items from page %d\n", pageItems, page)
- totalItems += pageItems
-
- // Be respectful to the server
- time.Sleep(500 * time.Millisecond)
-
- page++
-
- // Safety check to prevent infinite loops
- if page > 1000 {
- fmt.Println("Reached maximum page limit (1000), stopping")
- break
- }
+ page := 1
+
+ for {
+ fmt.Printf("Fetching page %d for year %d...\n", page, year)
+
+ rss, err := fetchRSSPage(year, page)
+ if err != nil {
+ log.Printf("Failed to fetch page %d: %v", page, err)
+ break
}
- year += 2
+ if len(rss.Channel.Items) == 0 {
+ fmt.Printf("No items found on page %d, stopping\n", page)
+ break
+ }
+
+ fmt.Printf("Found %d items on page %d\n", len(rss.Channel.Items), page)
+
+ pageItems := insertItems(insertStmt, rss.Channel.Items)
+ fmt.Printf("Inserted %d new items from page %d\n", pageItems, page)
+ totalItems += pageItems
+
+ time.Sleep(500 * time.Millisecond)
+
+ page++
+
+ if page > 1000 {
+ fmt.Println("Reached maximum page limit (1000), stopping")
+ break
+ }
}
- // Print summary
- fmt.Printf("\nScraping complete! Total items processed: %d\n", totalItems)
+ return totalItems
+}
- // Query and display some statistics
+func printStatistics(db *sql.DB) {
var count int
- err = db.QueryRow("SELECT COUNT(*) FROM tracks").Scan(&count)
+ err := db.QueryRow("SELECT COUNT(*) FROM tracks").Scan(&count)
if err != nil {
log.Printf("Failed to count records: %v", err)
} else {
fmt.Printf("Total records in database: %d\n", count)
}
- // Show sample of data
fmt.Println("\nSample of stored data:")
rows, err := db.Query("SELECT title, author, link FROM tracks LIMIT 5")
if err != nil {
@@ -254,3 +211,49 @@ func main() {
fmt.Printf("- %s by %s (%s)\n", title, author, link)
}
}
+
+func main() {
+ fmt.Println("Program started")
+
+ db, err := sql.Open("sqlite3", "weeklybeats.db")
+ if err != nil {
+ log.Fatal("Failed to open database:", err)
+ }
+ defer db.Close()
+
+ createTable := `
+ CREATE TABLE IF NOT EXISTS tracks (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ title TEXT NOT NULL,
+ link TEXT NOT NULL,
+ author TEXT,
+ week INTEGER,
+ year INTEGER,
+ url TEXT,
+ UNIQUE(author, week, year)
+ );`
+
+ _, err = db.Exec(createTable)
+ if err != nil {
+ log.Fatal("Failed to create table:", err)
+ }
+
+ fmt.Println("Starting to scrape Weekly Beats RSS feed...")
+
+ insertStmt, err := db.Prepare(`
+ INSERT OR IGNORE INTO tracks (title, link, author, week, year, url)
+ VALUES (?, ?, ?, ?, ?, ?)
+ `)
+ if err != nil {
+ log.Fatal("Failed to prepare insert statement:", err)
+ }
+ defer insertStmt.Close()
+
+ totalItems := 0
+ for year := 2012; year <= 2024; year += 2 {
+ totalItems += scrapeYear(year, insertStmt)
+ }
+
+ fmt.Printf("\nScraping complete! Total items processed: %d\n", totalItems)
+ printStatistics(db)
+}