Neue API + Webscraper

This commit is contained in:
Marc
2025-06-05 14:02:24 +00:00
parent d9f5176904
commit 3f4d5282be
1021 changed files with 113850 additions and 8 deletions

View File

@@ -0,0 +1,30 @@
// /scraper/scrapeScoreboard.js
const axios = require("axios");
const cheerio = require("cheerio");
module.exports = async function scrapeScoreboard(url) {
const res = await axios.get(url);
const $ = cheerio.load(res.data);
const tabelle = [];
$("table.samsDataTable tbody tr").each((_, row) => {
const tds = $(row).find("td");
const platzText = $(tds[0]).text().trim();
const platz = parseInt(platzText, 10);
if (isNaN(platz)) return;
const team = $(tds[1]).find("a span").first().text().trim();
const statusRaw = $(tds[2]).text().trim();
const status = statusRaw.includes("▲") ? "Aufstieg" : statusRaw.includes("▼") ? "Abstieg" : "-";
const spiele = parseInt($(tds[3]).text().trim(), 10);
const siege = parseInt($(tds[4]).text().trim(), 10);
const saetze = $(tds[5]).text().trim();
const punkte = parseInt($(tds[6]).text().trim(), 10);
tabelle.push({ platz, team, status, spiele, siege, saetze, punkte });
});
return tabelle;
};

25
scraper/scrapeSpiele.js Normal file
View File

@@ -0,0 +1,25 @@
// /scraper/scrapeSpiele.js
const axios = require("axios");
const cheerio = require("cheerio");
module.exports = async function scrapeSpiele(url) {
const { data } = await axios.get(url);
const $ = cheerio.load(data);
const ergebnisse = [];
$("table.samsDataTable tbody tr").each((_, row) => {
const tds = $(row).find("td");
if (tds.length === 0) return;
const datum = $(tds[0]).text().trim().replace(/\s+/g, " ");
const team1 = $(tds[4]).text().trim();
const team2 = $(tds[5]).text().trim();
const ergebnisDiv = $(tds[6]).find(".samsMatchResultSetPoints span").text().trim();
const satzpunkte = $(tds[6]).find(".samsMatchResultBallPoints").text().trim().replace(/\s+/g, " ");
ergebnisse.push({ datum, team1, team2, ergebnis: ergebnisDiv, satzverlauf: satzpunkte });
});
return ergebnisse;
};

53
scraper/updateLiveData.js Normal file
View File

@@ -0,0 +1,53 @@
// /scraper/updateLiveData.js
require("dotenv").config({ path: "../.env" });
const { Pool } = require("pg");
const scrapeScoreboard = require("./scrapeScoreboard");
const scrapeSpiele = require("./scrapeSpiele");
const pool = new Pool({
user: process.env.DB_USER,
host: "127.0.0.1",
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
port: process.env.DB_PORT,
});
console.log("🔌 Verbinde mit DB:", {
host: "127.0.0.1",
port: process.env.DB_PORT,
user: process.env.DB_USER,
db: process.env.DB_NAME
});
async function updateAllTeams() {
const { rows: teams } = await pool.query(`
SELECT id, name, scraper_identifier, tabellenlink, spielelink
FROM teams
WHERE tabellenlink IS NOT NULL AND spielelink IS NOT NULL AND scraper_identifier IS NOT NULL
`);
for (const team of teams) {
try {
console.log(`🔄 ${team.name} (ID ${team.id})`);
const scoreboard = await scrapeScoreboard(team.tabellenlink);
const spiele = await scrapeSpiele(team.spielelink);
await pool.query(`
INSERT INTO team_live_data (team_id, scoreboard, spiele)
VALUES ($1, $2, $3)
ON CONFLICT (team_id)
DO UPDATE SET scoreboard = EXCLUDED.scoreboard, spiele = EXCLUDED.spiele, last_updated = now()
`, [team.id, JSON.stringify(scoreboard), JSON.stringify(spiele)]);
console.log(`✅ Gespeichert: ${team.name}`);
} catch (err) {
console.error(`❌ Fehler bei ${team.name}:`, err.message);
}
}
await pool.end();
}
updateAllTeams();