feat: complete bookmarklet implementation (v1.7.0)

This commit is contained in:
2026-02-12 10:06:42 +01:00
parent bc98a19fc6
commit 5f961403d0
100 changed files with 1355 additions and 14064 deletions

0
.agent/rules/rules.md Normal file → Executable file
View File

0
.env.example Normal file → Executable file
View File

0
.gitignore vendored Normal file → Executable file
View File

View File

@@ -1,63 +0,0 @@
# BASIS: Wir nutzen das Webtop Image (Ubuntu + XFCE Desktop)
FROM lscr.io/linuxserver/webtop:ubuntu-xfce
# METADATEN
LABEL maintainer="DeinName"
LABEL description="Google Antigravity IDE (Official Repo)"
# VORBEREITUNG: Notwendige Tools installieren (curl, gpg für den Schlüssel)
RUN apt-get update && apt-get install -y \
curl \
gnupg \
&& rm -rf /var/lib/apt/lists/*
# INSTALLATION: Google Repository hinzufügen
# 1. Keyring Verzeichnis erstellen
# 2. GPG Key herunterladen und speichern
# 3. Repository zur sources.list hinzufügen
RUN mkdir -p /etc/apt/keyrings && \
curl -fsSL https://us-central1-apt.pkg.dev/doc/repo-signing-key.gpg | gpg --dearmor -o /etc/apt/keyrings/antigravity-repo-key.gpg && \
echo "deb [signed-by=/etc/apt/keyrings/antigravity-repo-key.gpg] https://us-central1-apt.pkg.dev/projects/antigravity-auto-updater-dev/ antigravity-debian main" | tee /etc/apt/sources.list.d/antigravity.list
# INSTALLATION: Antigravity installieren
RUN apt-get update && \
apt-get install -y antigravity && \
rm -rf /var/lib/apt/lists/*
# INSTALLATION: Node.js und npm für Scraper-Entwicklung
# NodeSource Repository für aktuelle Node.js Version
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/*
# UMGEBUNGSVARIABLEN für Chromium und Playwright
# Diese helfen, Browser-Probleme in Docker zu vermeiden
# HINWEIS: Kein --headless damit Browser sichtbar für Monitoring/manuelle Eingriffe
ENV CHROME_BIN=/usr/bin/chromium-browser \
CHROMIUM_FLAGS="--disable-gpu --remote-debugging-port=9222 --no-sandbox --disable-dev-shm-usage" \
PLAYWRIGHT_BROWSERS_PATH=/usr/bin \
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1 \
PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser \
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
# MODIFIZIERUNG DER .desktop-Datei
# Die Original-Datei befindet sich typischerweise unter /usr/share/applications/antigravity.desktop
# Wir suchen die Zeile, die mit 'Exec=' beginnt, und fügen die benötigten Flags hinzu.
RUN DESKTOP_FILE="/usr/share/applications/antigravity.desktop" && \
if [ -f "$DESKTOP_FILE" ]; then \
# Suchen und Ersetzen der 'Exec' Zeile mit den Flags --no-sandbox und --disable-gpu
sed -i 's|Exec=/usr/share/antigravity/antigravity %F|Exec=/usr/share/antigravity/antigravity --no-sandbox --disable-gpu %F|g' "$DESKTOP_FILE" && \
echo "INFO: antigravity.desktop wurde erfolgreich gepatcht." || \
echo "FEHLER: sed-Befehl konnte die Datei $DESKTOP_FILE nicht patchen." ; \
else \
echo "WARNUNG: $DESKTOP_FILE nicht gefunden. Überspringe Patch." ; \
fi
# OPTIONAL: Autostart einrichten
# Wir versuchen, die .desktop Datei in den Autostart-Ordner zu kopieren.
# Der Name ist meistens antigravity.desktop, zur Sicherheit prüfen wir beide Varianten.
#RUN mkdir -p /defaults/autostart 2> /dev/null && \
# cp /usr/share/applications/antigravity.desktop ~/.config/autostart/ 2>/dev/null
# VOLUME für Persistenz
VOLUME /config

0
README.md Normal file → Executable file
View File

0
REQUIREMENTS.md Normal file → Executable file
View File

0
RESEARCH.md Normal file → Executable file
View File

View File

@@ -1,113 +0,0 @@
{
"cookies": [
{
"name": "__cmpcc",
"value": "1",
"domain": "web.bessa.app",
"path": "/",
"expires": 1804426749,
"size": 8,
"httpOnly": false,
"secure": true,
"session": false,
"sameSite": "Lax",
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "g_state",
"value": "{\"i_l\":0,\"i_ll\":1770298749698,\"i_b\":\"OwmjB/xOTtJtApPjLsCx6Vw3vtuduXvkmTEMjNJSXuE\",\"i_e\":{\"enable_itp_optimization\":15}}",
"domain": "web.bessa.app",
"path": "/",
"expires": 1785850749,
"size": 126,
"httpOnly": false,
"secure": false,
"session": false,
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "_fbp",
"value": "fb.1.1770298749735.57921644138573676",
"domain": ".bessa.app",
"path": "/",
"expires": 1778074808,
"size": 40,
"httpOnly": false,
"secure": false,
"session": false,
"sameSite": "Lax",
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "_ga",
"value": "GA1.1.531786136.1770298750",
"domain": ".bessa.app",
"path": "/",
"expires": 1804858750.164727,
"size": 29,
"httpOnly": false,
"secure": false,
"session": false,
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "__cmpconsent16021",
"value": "CQfJhpgQfJhpgAfS8BENCQFgAAAAAAAAAAigF5wAQF5gXnABAXmAAA",
"domain": ".bessa.app",
"path": "/",
"expires": 1801834763,
"size": 71,
"httpOnly": false,
"secure": true,
"session": false,
"sameSite": "None",
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "__cmpcccu16021",
"value": "aCQfLid1gA6XgGsY5PMqNQExowBZVC0A0QwAgQIRoBSm",
"domain": ".bessa.app",
"path": "/",
"expires": 1801834763,
"size": 58,
"httpOnly": false,
"secure": true,
"session": false,
"sameSite": "None",
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
},
{
"name": "_ga_NT5W7DSRT4",
"value": "GS2.1.s1770298750$o1$g1$t1770298809$j1$l0$h0",
"domain": ".bessa.app",
"path": "/",
"expires": 1804858809.787043,
"size": 58,
"httpOnly": false,
"secure": false,
"session": false,
"priority": "Medium",
"sameParty": false,
"sourceScheme": "Secure",
"sourcePort": 443
}
]
}

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"lastExternalReferrerTime":"1770298749727","__cmpconsent16021_.bessa.app":"CQfJhpgQfJhpgAfS8BENCQFgAAAAAAAAAAigF5wAQF5gXnABAXmAAA","topicsLastReferenceTime":"1770298808783","__cmpconsent16021_expire_.bessa.app":"1801834763719","lastExternalReferrer":"empty","__cmpcccu16021_.bessa.app":"aCQfLid1gA6XgGsY5PMqNQExowBZVC0A0QwAgQIRoBSm","__cmpcccu16021_expire_.bessa.app":"1801834763719","AkitaStores":"{\"$cache\":{\"auth\":false},\"auth\":{\"askedForNotifications\":false,\"token\":\"dba7d86e83c7f462fd8af96521dea41c4facd8a5\",\"user\":{\"id\":85567,\"lastLogin\":\"2026-02-05T13:40:04.251Z\",\"created\":\"2025-11-26T22:22:46.535Z\",\"updated\":\"2025-12-09T07:26:02.309Z\",\"email\":\"knapp-2041@bessa.app\",\"firstName\":\"Michael\",\"lastName\":\"Kaufmann\",\"locale\":\"de_de\",\"country\":\"\",\"language\":\"de\",\"profile\":null,\"uuid\":\"de0e6518-f917-4679-afd1-47a6f5e22a55\",\"groups\":[\"Managed\"],\"dateOfBirth\":null,\"passwordChanged\":\"2025-12-09T07:26:02.304Z\",\"gender\":1}}}"}

File diff suppressed because it is too large Load Diff

View File

@@ -1,131 +0,0 @@
{
"capturedAt": "2026-02-11T10:15:00Z",
"description": "Captured API traffic from Bessa web app during order placement and cancellation",
"apiCalls": [
{
"step": "1. Fetch available dates (with existing orders)",
"method": "GET",
"url": "https://api.bessa.app/v1/venues/591/menu/dates/",
"requestHeaders": {
"Accept": "application/json",
"Authorization": "Token dba7d86e83c7f462fd8af96521dea41c4facd8a5",
"X-Client-Version": "1.7.0_prod/2026-01-26"
},
"responseStatus": 200,
"responseBody": {
"results": [
{
"date": "2026-02-11",
"id": 691,
"orders": [
{
"id": 1522671,
"order_state": 8,
"total": "5.50",
"items": [
{
"name": "M1 Herzhaftes"
}
]
}
]
},
{
"date": "2026-02-12",
"id": 692,
"orders": [
{
"id": 1522672,
"order_state": 5,
"total": "5.50",
"items": [
{
"name": "M5F Salat mit Gebäck DO 2"
}
]
}
]
},
{
"date": "2026-02-13",
"id": 698,
"orders": []
}
]
},
"duration": 202
},
{
"step": "2. Place order (POST)",
"method": "POST",
"url": "https://api.bessa.app/v1/user/orders/",
"requestHeaders": {
"Accept": "application/json",
"Authorization": "Token dba7d86e83c7f462fd8af96521dea41c4facd8a5",
"Content-Type": "application/json"
},
"requestBody": {
"customer": {
"email": "knapp-2041@bessa.app",
"first_name": "Michael",
"last_name": "Kaufmann"
},
"date": "2026-02-13T10:00:00.000Z",
"items": [
{
"amount": 1,
"article": 182378,
"name": "M1 W2",
"price": 4,
"vat": "10.00"
}
],
"order_type": 7,
"payment_method": "payroll",
"total": 4,
"venue": 591
},
"responseStatus": 201,
"responseBody": {
"id": 1535066,
"hash_id": "o_xlOaq",
"order_state": 5,
"total": "4.00"
},
"duration": 269
},
{
"step": "3. Cancel order (PATCH)",
"method": "PATCH",
"url": "https://api.bessa.app/v1/user/orders/1535066/cancel/",
"requestHeaders": {
"Accept": "application/json",
"Authorization": "Token dba7d86e83c7f462fd8af96521dea41c4facd8a5",
"Content-Type": "application/json"
},
"requestBody": {},
"responseStatus": 200,
"responseBody": {
"order_id": "o_xlOaq",
"state": "order cancelled."
},
"duration": 133
}
],
"orderStates": {
"5": "Transmitted/Active (new order)",
"8": "Accepted/Processed (confirmed by kitchen)"
},
"configurationDetails": {
"orderType": 7,
"configName": "canteen",
"paymentTypes": [
"payroll"
],
"cancellationCutoff": 3600,
"allowCancel": true,
"preorderDelta": 3600,
"venueId": 591,
"venueTimezone": "CET"
}
}

File diff suppressed because one or more lines are too long

0
bessa-openapi.yaml Normal file → Executable file
View File

136
build-bookmarklet.sh Executable file
View File

@@ -0,0 +1,136 @@
#!/bin/bash
# Build script for Kantine Bookmarklet
# Creates a self-contained bookmarklet URL and standalone HTML file
set -e
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
DIST_DIR="$SCRIPT_DIR/dist"
CSS_FILE="$SCRIPT_DIR/public/style.css"
JS_FILE="$SCRIPT_DIR/kantine.js"
mkdir -p "$DIST_DIR"
echo "=== Kantine Bookmarklet Builder ==="
# Check files exist
if [ ! -f "$CSS_FILE" ]; then echo "ERROR: $CSS_FILE not found"; exit 1; fi
if [ ! -f "$JS_FILE" ]; then echo "ERROR: $JS_FILE not found"; exit 1; fi
CSS_CONTENT=$(cat "$CSS_FILE")
JS_CONTENT=$(cat "$JS_FILE")
# === 1. Build standalone HTML (for local testing/dev) ===
cat > "$DIST_DIR/kantine-standalone.html" << 'HTMLEOF'
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Kantine Weekly Menu (Standalone)</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/icon?family=Material+Icons+Round" rel="stylesheet">
<style>
HTMLEOF
# Inject CSS
cat "$CSS_FILE" >> "$DIST_DIR/kantine-standalone.html"
cat >> "$DIST_DIR/kantine-standalone.html" << 'HTMLEOF'
</style>
</head>
<body>
<script>
HTMLEOF
# Inject JS
cat "$JS_FILE" >> "$DIST_DIR/kantine-standalone.html"
cat >> "$DIST_DIR/kantine-standalone.html" << 'HTMLEOF'
</script>
</body>
</html>
HTMLEOF
echo "✅ Standalone HTML: $DIST_DIR/kantine-standalone.html"
# === 2. Build bookmarklet (JavaScript URL) ===
# The bookmarklet injects CSS + JS into the current page
# Escape CSS for embedding in JS string
CSS_ESCAPED=$(echo "$CSS_CONTENT" | sed "s/'/\\\\'/g" | tr '\n' ' ' | sed 's/ */ /g')
# Build bookmarklet payload
cat > "$DIST_DIR/bookmarklet-payload.js" << PAYLOADEOF
(function(){
if(window.__KANTINE_LOADED){alert('Kantine Wrapper already loaded!');return;}
var s=document.createElement('style');
s.textContent='${CSS_ESCAPED}';
document.head.appendChild(s);
var sc=document.createElement('script');
sc.textContent=$(cat "$JS_FILE" | python3 -c "import sys,json; print(json.dumps(sys.stdin.read()))" 2>/dev/null || cat "$JS_FILE" | sed 's/\\/\\\\/g' | sed "s/'/\\\\'/g" | sed 's/"/\\\\"/g' | tr '\n' ' ' | sed 's/^/"/' | sed 's/$/"/');
document.head.appendChild(sc);
})();
PAYLOADEOF
# URL-encode for bookmark
BOOKMARKLET_RAW=$(cat "$DIST_DIR/bookmarklet-payload.js" | tr '\n' ' ' | sed 's/ */ /g')
echo "javascript:${BOOKMARKLET_RAW}" > "$DIST_DIR/bookmarklet.txt"
echo "✅ Bookmarklet URL: $DIST_DIR/bookmarklet.txt"
# === 3. Create an easy-to-use HTML installer page ===
cat > "$DIST_DIR/install.html" << 'INSTALLEOF'
<!DOCTYPE html>
<html lang="de">
<head>
<meta charset="UTF-8">
<title>Kantine Wrapper Installer</title>
<style>
body { font-family: 'Inter', sans-serif; max-width: 600px; margin: 40px auto; padding: 20px; background: #1a1a2e; color: #eee; }
h1 { color: #e94560; }
.instructions { background: #16213e; padding: 20px; border-radius: 12px; margin: 20px 0; }
.instructions ol li { margin: 10px 0; }
a.bookmarklet { display: inline-block; background: #e94560; color: white; padding: 12px 24px; border-radius: 8px; text-decoration: none; font-weight: 600; font-size: 18px; cursor: grab; }
a.bookmarklet:hover { background: #c73652; }
code { background: #0f3460; padding: 2px 6px; border-radius: 4px; }
</style>
</head>
<body>
<h1>🍽️ Kantine Wrapper</h1>
<div class="instructions">
<h2>Installation</h2>
<ol>
<li>Ziehe den Button unten in deine <strong>Lesezeichen-Leiste</strong> (Drag & Drop)</li>
<li>Navigiere zu <a href="https://web.bessa.app/knapp-kantine" style="color:#e94560">web.bessa.app/knapp-kantine</a></li>
<li>Klicke auf das Lesezeichen <code>Kantine Wrapper</code></li>
</ol>
</div>
<p>👇 Diesen Button in die Lesezeichen-Leiste ziehen:</p>
<p><a class="bookmarklet" id="bookmarklet-link" href="#">⏳ Wird generiert...</a></p>
<script>
INSTALLEOF
# Embed the bookmarklet URL inline
echo "document.getElementById('bookmarklet-link').href = " >> "$DIST_DIR/install.html"
cat "$JS_FILE" | python3 -c "
import sys, json
js = sys.stdin.read()
css = open('$CSS_FILE').read().replace('\\n', ' ').replace(' ', ' ')
bmk = '''javascript:(function(){if(window.__KANTINE_LOADED){alert(\"Already loaded\");return;}var s=document.createElement(\"style\");s.textContent=''' + json.dumps(css) + ''';document.head.appendChild(s);var sc=document.createElement(\"script\");sc.textContent=''' + json.dumps(js) + ''';document.head.appendChild(sc);})();'''
print(json.dumps(bmk) + ';')
" 2>/dev/null >> "$DIST_DIR/install.html" || echo "'javascript:alert(\"Build error\")'" >> "$DIST_DIR/install.html"
cat >> "$DIST_DIR/install.html" << 'INSTALLEOF'
document.getElementById('bookmarklet-link').textContent = '🍽️ Kantine Wrapper';
</script>
</body>
</html>
INSTALLEOF
echo "✅ Installer page: $DIST_DIR/install.html"
echo ""
echo "=== Build Complete ==="
echo "Files in $DIST_DIR:"
ls -la "$DIST_DIR/"

View File

@@ -1 +0,0 @@
// Placeholder for data directory

View File

@@ -1,21 +0,0 @@
const SESS_KEY = 'c3418725e95a9f90e3645cbc846b4d67c7c66131';
const URL = 'https://web.bessa.app/api/v1/venues/591/menu/7/2026-02-13/';
async function run() {
try {
const response = await fetch(URL, {
headers: {
'Authorization': `Token ${SESS_KEY}`,
'Accept': 'application/json',
'X-Client-Version': '3.10.2'
}
});
const data = await response.json();
console.log(JSON.stringify(data, null, 2));
} catch (e) {
console.error(e);
}
}
run();

View File

@@ -1,58 +0,0 @@
const GUEST_TOKEN = 'c3418725e95a9f90e3645cbc846b4d67c7c66131';
// User credentials (I need the user's credentials to test this... wait, I don't have them in plain text, only the hardcoded token in previous steps)
// Verify if I have a valid username/password to test with.
// The user provided a token 'c3418725e...' which is the guest token.
// The user has a session token in local storage from previous steps: 'c3418725e95a9f90e3645cbc846b4d67c7c66131'
// Actually that looks like the GUEST_TOKEN.
// The user logs in with Employee ID and Password.
// I can't test login without credentials.
// However, I can check if there's a /users/me or /auth/me endpoint that returns user info given a token.
// Let's try fetching /api/v1/users/me with the token I have.
// Node 18+ has global fetch
// const fetch = require('node-fetch');
async function checkMe() {
const token = 'c3418725e95a9f90e3645cbc846b4d67c7c66131'; // Using the token we have
const url = 'https://api.bessa.app/v1/users/me/'; // Guessing the endpoint
// Or maybe /auth/user/
try {
console.log('Testing /users/me/ ...');
let res = await fetch('https://api.bessa.app/v1/users/me/', {
headers: {
'Authorization': `Token ${token}`,
'Accept': 'application/json'
}
});
if (res.ok) {
console.log(await res.json());
return;
} else {
console.log(`Failed: ${res.status}`);
}
console.log('Testing /auth/user/ ...');
res = await fetch('https://api.bessa.app/v1/auth/user/', {
headers: {
'Authorization': `Token ${token}`,
'Accept': 'application/json'
}
});
if (res.ok) {
console.log(await res.json());
return;
} else {
console.log(`Failed: ${res.status}`);
}
} catch (e) {
console.error(e);
}
}
checkMe();

File diff suppressed because one or more lines are too long

View File

1187
kantine.js Executable file

File diff suppressed because it is too large Load Diff

2637
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,35 +0,0 @@
{
"name": "kantine-wrapper",
"version": "1.0.0",
"description": "Bessa Knapp-Kantine Menu Scraper",
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"scrape": "node dist/index.js",
"dev": "tsx src/index.ts",
"analyze": "tsx src/scraper/interactive-analyzer.ts",
"test:api": "tsx src/scraper/api-test.ts",
"server": "tsx src/server.ts",
"type-check": "tsc --noEmit"
},
"keywords": [
"scraper",
"puppeteer",
"bessa",
"kantine"
],
"author": "",
"license": "MIT",
"dependencies": {
"dotenv": "^16.4.5",
"express": "^5.2.1",
"puppeteer": "^22.0.0"
},
"devDependencies": {
"@types/express": "^5.0.6",
"@types/node": "^20.11.0",
"tsx": "^4.7.0",
"typescript": "^5.3.3"
}
}

0
public/app.js Normal file → Executable file
View File

0
public/index.html Normal file → Executable file
View File

32
public/style.css Normal file → Executable file
View File

@@ -55,6 +55,18 @@ body {
-webkit-font-smoothing: antialiased; -webkit-font-smoothing: antialiased;
} }
/* Fix scrolling bug: Reset html/body styles from host page */
html,
body {
height: auto !important;
min-height: 100% !important;
overflow-y: auto !important;
overflow-x: hidden !important;
position: static !important;
margin: 0 !important;
padding: 0 !important;
}
/* Header */ /* Header */
.app-header { .app-header {
position: sticky; position: sticky;
@@ -121,6 +133,26 @@ body {
line-height: 1.3; line-height: 1.3;
} }
.header-center-wrapper {
display: flex;
flex-direction: row;
align-items: center;
gap: 1.5rem;
justify-content: center;
}
.weekly-cost {
white-space: nowrap;
font-size: 0.9rem;
font-weight: 600;
color: var(--success-color);
background-color: var(--bg-body);
padding: 0.25rem 0.75rem;
border-radius: 20px;
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
border: 1px solid var(--border-color);
}
.header-week-title { .header-week-title {
font-size: 1.1rem; font-size: 1.1rem;
font-weight: 600; font-weight: 600;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

View File

@@ -1,42 +0,0 @@
// Configuration for scraper
import dotenv from 'dotenv';
dotenv.config();
export const config = {
// Credentials from environment
credentials: {
employeeNumber: process.env.BESSA_EMPLOYEE_NUMBER || '',
password: process.env.BESSA_PASSWORD || '',
},
// Puppeteer settings
puppeteer: {
headless: process.env.PUPPETEER_HEADLESS !== 'false',
defaultTimeout: 30000,
navigationTimeout: 60000,
},
// Scraper settings
scraper: {
waitAfterClick: 1000,
waitAfterNavigation: 2000,
maxRetries: 3,
},
// Storage
storage: {
dataDir: './data',
menuFile: './data/menus.json',
},
} as const;
// Validation
export function validateConfig(): void {
if (!config.credentials.employeeNumber) {
throw new Error('BESSA_EMPLOYEE_NUMBER is required in .env file');
}
if (!config.credentials.password) {
throw new Error('BESSA_PASSWORD is required in .env file');
}
}

View File

@@ -1,57 +0,0 @@
#!/usr/bin/env node
import { MenuScraper } from './scraper/menu-scraper.js';
import { mergeWeeklyMenu } from './storage/menu-store.js';
import { config, validateConfig } from './config.js';
import { logger } from './utils/logger.js';
/**
* Main entry point for the scraper
*/
async function main() {
try {
// Validate configuration
logger.info('Validating configuration...');
validateConfig();
// Initialize scraper
const scraper = new MenuScraper();
await scraper.init();
try {
// Scrape menus
logger.info('Starting scrape of menus (multi-week)...');
const weeklyMenu = await scraper.scrapeMenus();
// Save to storage
logger.info('Saving scraped data...');
await mergeWeeklyMenu(weeklyMenu);
// Print summary
logger.success('\\n=== Scraping Complete ===');
logger.info(`Week: ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
logger.info(`Days scraped: ${weeklyMenu.days.length}`);
for (const day of weeklyMenu.days) {
logger.info(` ${day.weekday}: ${day.items.length} items`);
}
const totalItems = weeklyMenu.days.reduce((sum, day) => sum + day.items.length, 0);
logger.success(`Total menu items: ${totalItems}`);
} finally {
// Always close browser
await scraper.close();
}
} catch (error) {
logger.error('Scraping failed:', error);
process.exit(1);
}
}
// Run if called directly
if (import.meta.url === `file://${process.argv[1]}`) {
main();
}
export { main };

View File

@@ -1,51 +0,0 @@
import { logger } from '../utils/logger.js';
async function runApiTest() {
logger.info('Starting API Test with cached token...');
// Token from local_storage.json
const cachedToken = 'dba7d86e83c7f462fd8af96521dea41c4facd8a5';
// Date calculation
const today = new Date();
const dateStr = today.toISOString().split('T')[0];
// Try a few dates (sometimes today has no menu if it's late or weekend)
// But let's stick to today or tomorrow.
const venueId = 591;
const menuId = 7;
const apiUrl = `https://api.bessa.app/v1/venues/${venueId}/menu/${menuId}/${dateStr}/`;
logger.info(`Testing API call to: ${apiUrl}`);
logger.info(`Using Token: ${cachedToken.substring(0, 10)}...`);
try {
const response = await fetch(apiUrl, {
headers: {
'Authorization': `Token ${cachedToken}`,
'Accept': 'application/json',
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36'
}
});
logger.info(`Response Status: ${response.status} ${response.statusText}`);
if (response.ok) {
const data = await response.json();
logger.success('API Call Successful!');
console.log(JSON.stringify(data, null, 2));
} else {
logger.error('API Call Failed.');
const text = await response.text();
console.log('Response Body:', text);
}
} catch (error) {
logger.error('Fetch failed:', error);
}
}
runApiTest();

View File

@@ -1,157 +0,0 @@
import puppeteer from 'puppeteer';
import fs from 'fs/promises';
import path from 'path';
import * as readline from 'readline';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Ensure we have a place to save logs
const ARTIFACTS_DIR = process.env.ANTIGRAVITY_ARTIFACTS_DIR || path.join(process.cwd(), 'analysis_results');
async function ensureDir(dir: string) {
try {
await fs.access(dir);
} catch {
await fs.mkdir(dir, { recursive: true });
}
}
async function runInteractiveAnalysis() {
await ensureDir(ARTIFACTS_DIR);
console.log('--- INTERACTIVE ANALYSIS TOOL ---');
console.log('Starting Browser (Headless: FALSE)...');
console.log('Artifacts will be saved to:', ARTIFACTS_DIR);
const browser = await puppeteer.launch({
headless: false, // User wants to see and interact
defaultViewport: null, // Full window
executablePath: '/usr/bin/chromium',
args: [
'--start-maximized',
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-dev-shm-usage'
],
devtools: true // Useful for the user to see what's happening
});
const page = await browser.newPage();
// Setup Data Collection
const networkLogs: any[] = [];
const relevantHosts = ['bessa.app', 'web.bessa.app'];
await page.setRequestInterception(true);
page.on('request', (request) => {
// Continue all requests
request.continue();
});
page.on('response', async (response) => {
const url = response.url();
const type = response.request().resourceType();
// Filter: We are mainly interested in XHR, Fetch, and Documents (for initial load)
// And only from relevant hosts to avoid noise (analytics, external fonts, etc.)
const isRelevantHost = relevantHosts.some(host => url.includes(host));
const isRelevantType = ['xhr', 'fetch', 'document', 'script'].includes(type);
if (isRelevantHost && isRelevantType) {
try {
// Try to get JSON response
let responseBody = null;
if (url.includes('/api/') || type === 'xhr' || type === 'fetch') {
try {
responseBody = await response.json();
} catch (e) {
// Not JSON, maybe text?
try {
// Limit text size
const text = await response.text();
responseBody = text.length > 2000 ? text.substring(0, 2000) + '...[TRUNCATED]' : text;
} catch (e2) {
responseBody = '[COULD NOT READ BODY]';
}
}
}
networkLogs.push({
timestamp: new Date().toISOString(),
method: response.request().method(),
url: url,
status: response.status(),
type: type,
requestHeaders: response.request().headers(),
responseHeaders: response.headers(),
body: responseBody
});
// Real-time feedback
if (url.includes('/api/')) {
console.log(`[API CAPTURED] ${response.request().method()} ${url}`);
}
} catch (err) {
// Ignore errors reading response (e.g. redirects or closed)
}
}
});
// Initial navigation
console.log('Navigating to base URL...');
await page.goto('https://web.bessa.app/knapp-kantine', { waitUntil: 'networkidle2' });
console.log('\n================================================================================');
console.log('BROWSER IS OPEN. PLEASE ACTION REQUIRED:');
console.log('1. Log in manually in the browser window.');
console.log('2. Navigate to the menu view (Day Selection -> Select Day -> Menu).');
console.log('3. Browse around to trigger API calls.');
console.log('\nWHEN YOU ARE DONE, PRESS [ENTER] IN THIS TERMINAL TO SAVE AND EXIT.');
console.log('================================================================================\n');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
await new Promise<void>(resolve => {
rl.question('Press Enter to finish analysis...', () => {
rl.close();
resolve();
});
});
console.log('Capturing final state...');
// 1. Save Full Page HTML
const html = await page.content();
await fs.writeFile(path.join(ARTIFACTS_DIR, 'final_page_state.html'), html);
// 2. Save Cookies/Storage (for Auth Replication)
const client = await page.target().createCDPSession();
const cookies = await client.send('Network.getAllCookies');
await fs.writeFile(path.join(ARTIFACTS_DIR, 'cookies.json'), JSON.stringify(cookies, null, 2));
const localStorageData = await page.evaluate(() => {
return JSON.stringify(localStorage);
});
await fs.writeFile(path.join(ARTIFACTS_DIR, 'local_storage.json'), localStorageData);
const sessionStorageData = await page.evaluate(() => {
return JSON.stringify(sessionStorage);
});
await fs.writeFile(path.join(ARTIFACTS_DIR, 'session_storage.json'), sessionStorageData);
// 3. Save Network Logs
await fs.writeFile(path.join(ARTIFACTS_DIR, 'network_traffic.json'), JSON.stringify(networkLogs, null, 2));
console.log('Analysis data saved to:', ARTIFACTS_DIR);
await browser.close();
process.exit(0);
}
runInteractiveAnalysis().catch(console.error);

View File

@@ -1,745 +0,0 @@
/// <reference lib="dom" />
import puppeteer, { Browser, Page } from 'puppeteer';
import { WeeklyMenu, DayMenu, MenuItem } from '../types.js';
import { SELECTORS, URLS } from './selectors.js';
import { config } from '../config.js';
import { logger } from '../utils/logger.js';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
interface ApiMenuItem {
id: number;
name: string;
description: string;
price: string;
available_amount: string;
created: string;
updated: string;
}
interface ApiMenuResult {
id: number;
items: ApiMenuItem[];
date: string;
}
interface ApiMenuResponse {
results: ApiMenuResult[];
}
export class MenuScraper {
private browser: Browser | null = null;
private page: Page | null = null;
/**
* Initialize browser and page
*/
async init(): Promise<void> {
logger.info('[TRACE] Starting browser initialization...');
logger.info(`[TRACE] Using Chromium at: /usr/bin/chromium`);
logger.info(`[TRACE] Headless mode: ${config.puppeteer.headless}`);
this.browser = await puppeteer.launch({
headless: config.puppeteer.headless,
executablePath: '/usr/bin/chromium',
args: [
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-dev-shm-usage',
],
});
logger.info('[TRACE] Puppeteer launch completed');
logger.info('[TRACE] Creating new page...');
this.page = await this.browser.newPage();
logger.info('[TRACE] Setting viewport to 1280x1024...');
await this.page.setViewport({ width: 1280, height: 1024 });
logger.info(`[TRACE] Setting default timeout to ${config.puppeteer.defaultTimeout}ms`);
await this.page.setDefaultTimeout(config.puppeteer.defaultTimeout);
// Set realistic User-Agent
await this.page.setUserAgent('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36');
// Capture console logs with more detail
this.page.on('console', msg => {
const type = msg.type();
const text = msg.text();
if (type === 'error' || type === 'warn' || text.includes('auth') || text.includes('login')) {
logger.info(`[BROWSER ${type.toUpperCase()}] ${text}`);
}
});
// Capture all requests/responses for auth debugging
this.page.on('request', request => {
const url = request.url();
if (url.includes('auth') || url.includes('login') || url.includes('session') || url.includes('bessa.app/api')) {
logger.info(`[NETWORK REQ] ${request.method()} ${url}`);
}
});
this.page.on('response', response => {
const url = response.url();
if (url.includes('auth') || url.includes('login') || url.includes('session') || url.includes('bessa.app/api')) {
const status = response.status();
logger.info(`[NETWORK RES] ${status} ${url}`);
if (status >= 400) {
logger.warn(`[NETWORK ERR] ${status} for ${url}`);
}
}
});
// Capture failed requests
this.page.on('requestfailed', request => {
const url = request.url();
const error = request.failure()?.errorText;
logger.warn(`[NETWORK FAILURE] ${url} - ${error}`);
});
logger.success('[TRACE] Browser initialized successfully');
}
/**
* Save a screenshot for debugging
*/
private async saveScreenshot(name: string): Promise<string | null> {
if (!this.page) return null;
try {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const fileName = `${name}_${timestamp}.png`;
// Use current session brain path or fallback to a local screenshots directory
const brainPath = process.env.ANTIGRAVITY_ARTIFACTS_DIR || path.join(process.cwd(), 'screenshots');
const filePath = path.join(brainPath, fileName);
// Ensure directory exists if it's the local fallback
if (!process.env.ANTIGRAVITY_ARTIFACTS_DIR) {
const fs = await import('fs/promises');
await fs.mkdir(brainPath, { recursive: true });
}
await this.page.screenshot({ path: filePath });
logger.info(`[TRACE] Screenshot saved to: ${filePath}`);
return filePath;
} catch (error) {
logger.error(`[TRACE] Failed to save screenshot: ${error}`);
return null;
}
}
/**
* Close browser
*/
async close(): Promise<void> {
logger.info('[TRACE] Closing browser...');
if (this.browser) {
await this.browser.close();
logger.success('[TRACE] Browser closed');
} else {
logger.warn('[TRACE] Browser was already null, nothing to close');
}
}
/**
* Navigate to Bessa and handle cookie consent
*/
private async navigateAndAcceptCookies(): Promise<void> {
if (!this.page) throw new Error('Page not initialized');
logger.info(`[TRACE] Navigating to ${URLS.BASE}...`);
logger.info('[TRACE] Waiting for networkidle2...');
await this.page.goto(URLS.BASE, { waitUntil: 'networkidle2' });
const currentUrl = this.page.url();
logger.success(`[TRACE] Navigation complete. Current URL: ${currentUrl}`);
// Accept cookies if banner is present
logger.info(`[TRACE] Looking for cookie banner (selector: ${SELECTORS.COOKIE_ACCEPT_ALL})...`);
logger.info('[TRACE] Timeout: 5000ms');
try {
await this.page.waitForSelector(SELECTORS.COOKIE_ACCEPT_ALL, { timeout: 5000 });
logger.success('[TRACE] Cookie banner found!');
logger.info('[TRACE] Clicking "Accept all" button...');
await this.page.click(SELECTORS.COOKIE_ACCEPT_ALL);
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterClick}ms after click...`);
await this.wait(config.scraper.waitAfterClick);
logger.success('[TRACE] Cookies accepted successfully');
} catch (error) {
logger.info('[TRACE] No cookie banner found (timeout reached)');
}
logger.info(`[TRACE] Current URL after cookie handling: ${this.page.url()}`);
}
/**
* Helper to reliably fill an input and trigger validation events
*/
private async fillInput(selector: string, value: string): Promise<void> {
if (!this.page) return;
await this.page.waitForSelector(selector);
await this.page.focus(selector);
// Clear field first
await this.page.evaluate((sel) => {
const el = document.querySelector(sel) as HTMLInputElement;
if (el) el.value = '';
}, selector);
await this.page.type(selector, value, { delay: 50 });
// Trigger validation events for Angular/React/etc.
await this.page.evaluate((sel) => {
const el = document.querySelector(sel) as HTMLInputElement;
if (el) {
el.dispatchEvent(new Event('input', { bubbles: true }));
el.dispatchEvent(new Event('change', { bubbles: true }));
el.dispatchEvent(new Event('blur', { bubbles: true }));
}
}, selector);
}
/**
* Check if the user is currently logged in based on page content
*/
private async isLoggedIn(): Promise<boolean> {
if (!this.page) return false;
return await this.page.evaluate(() => {
const bodyText = document.body.innerText;
return bodyText.includes('Log Out') ||
bodyText.includes('Abmelden') ||
bodyText.includes('Mein Konto') ||
!!document.querySelector('button[mat-menu-item]');
});
}
/**
* Perform login
*/
private async login(): Promise<void> {
if (!this.page) throw new Error('Page not initialized');
logger.info('[TRACE] ===== LOGIN FLOW START =====');
logger.info(`[TRACE] Current URL before login: ${this.page.url()}`);
// Detect if already logged in
if (await this.isLoggedIn()) {
logger.success('[TRACE] Already logged in detected! Skipping login modal flow.');
await this.navigateToDaySelection();
return;
}
logger.info(`[TRACE] Waiting for Pre-order menu button (selector: ${SELECTORS.PREORDER_MENU_BUTTON})...`);
await this.page.waitForSelector(SELECTORS.PREORDER_MENU_BUTTON);
logger.success('[TRACE] Pre-order menu button found!');
logger.info('[TRACE] Clicking Pre-order menu button...');
await this.page.click(SELECTORS.PREORDER_MENU_BUTTON);
logger.success('[TRACE] Click executed');
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterClick}ms for modal to appear...`);
await this.wait(config.scraper.waitAfterClick);
logger.info(`[TRACE] Current URL after button click: ${this.page.url()}`);
logger.info('[TRACE] ----- LOGIN FORM FILLING START -----');
logger.info(`[TRACE] Waiting for modal container (selector: ${SELECTORS.LOGIN_MODAL_CONTAINER})...`);
logger.info('[TRACE] Timeout: 30000ms');
try {
await this.page.waitForSelector(SELECTORS.LOGIN_MODAL_CONTAINER);
logger.success('[TRACE] Modal container found!');
} catch (error: any) {
logger.error(`[TRACE] Modal container NOT found! Error: ${error.message}`);
await this.saveScreenshot('failed_login_modal');
const bodyText = await this.page.evaluate(() => document.body.innerText);
logger.info(`[TRACE] Page body text (first 500 chars): ${bodyText.substring(0, 500)}`);
throw error;
}
logger.info('[TRACE] Typing access code...');
await this.fillInput(SELECTORS.LOGIN_ACCESS_CODE, config.credentials.employeeNumber);
// Verify value
const accessCodeValue = await this.page.$eval(SELECTORS.LOGIN_ACCESS_CODE, (el: any) => el.value);
if (accessCodeValue !== config.credentials.employeeNumber) {
logger.warn(`[TRACE] Access code value verification failed!`);
}
logger.success(`[TRACE] Access code entered`);
await this.saveScreenshot('after_access_code_input');
logger.info(`[TRACE] Waiting for password field (selector: ${SELECTORS.LOGIN_PASSWORD})...`);
await this.page.waitForSelector(SELECTORS.LOGIN_PASSWORD);
logger.success('[TRACE] Password field found!');
logger.info('[TRACE] Typing password...');
await this.fillInput(SELECTORS.LOGIN_PASSWORD, config.credentials.password);
// Verify value
const passwordValue = await this.page.$eval(SELECTORS.LOGIN_PASSWORD, (el: any) => el.value);
if (passwordValue !== config.credentials.password) {
logger.warn('[TRACE] Password value verification failed!');
}
logger.success('[TRACE] Password entered');
await this.saveScreenshot('after_password_input');
logger.info(`[TRACE] Checking for error messages before clicking login button...`);
const errorMessage = await this.page.evaluate((selector) => {
const errorElement = document.querySelector(selector);
return errorElement ? (errorElement as HTMLElement).innerText : null;
}, SELECTORS.LOGIN_ERROR_MESSAGE);
if (errorMessage) {
logger.warn(`[TRACE] Found error message before login click: "${errorMessage}". Attempting to proceed anyway.`);
} else {
logger.info('[TRACE] No error messages found.');
}
logger.info(`[TRACE] Clicking login button and pressing Enter: ${SELECTORS.LOGIN_SUBMIT}...`);
try {
await this.page.waitForSelector(SELECTORS.LOGIN_SUBMIT, { timeout: 10000 });
// Check if button is disabled (Angular validation might have failed)
const btnState = await this.page.$eval(SELECTORS.LOGIN_SUBMIT, (el: any) => ({
disabled: el.disabled,
text: el.innerText,
classes: el.className
}));
logger.info(`[TRACE] Button state: disabled=${btnState.disabled}, classes="${btnState.classes}"`);
if (btnState.disabled) {
logger.warn(`[TRACE] Login button is DISABLED! Forcing direct click via evaluate and Enter key...`);
}
// Strategy: Focus password and press Enter + click button
await this.page.focus(SELECTORS.LOGIN_PASSWORD);
await this.page.keyboard.press('Enter');
// Allow a small gap
await this.wait(500);
// Perform single click via page.click
await this.page.click(SELECTORS.LOGIN_SUBMIT, { delay: 100 });
// Fallback: trigger click via evaluate as well if Enter didn't work
await this.page.evaluate((selector) => {
const btn = document.querySelector(selector) as HTMLButtonElement;
if (btn) btn.click();
}, SELECTORS.LOGIN_SUBMIT);
logger.success('[TRACE] Login triggers executed (Enter + Click)');
} catch (error) {
logger.error(`[TRACE] Failed to interact with login button: ${error}`);
await this.saveScreenshot('failed_login_button_interaction');
logger.info('[TRACE] Attempting "Enter" key final fallback on password field...');
await this.page.focus(SELECTORS.LOGIN_PASSWORD);
await this.page.keyboard.press('Enter');
}
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterNavigation * 2}ms for transition... (increased for stability)`);
await this.wait(config.scraper.waitAfterNavigation * 2);
// Transition check & Refresh Strategy
let isAtDaySelection = false;
try {
// Check for dialog or redirect
await this.page.waitForSelector(SELECTORS.DAY_SELECTION_DIALOG, { timeout: 15000 });
isAtDaySelection = true;
logger.success('[TRACE] Day selection dialog appeared directly after login');
} catch (e) {
logger.warn('[TRACE] Day selection dialog not found after login. Investigating state...');
await this.saveScreenshot('login_stuck_before_action');
// Check if login modal is still present
const isModalStillThere = await this.page.evaluate((sel1, sel2) => {
return !!document.querySelector(sel1) || !!document.querySelector(sel2);
}, SELECTORS.LOGIN_MODAL_CONTAINER, SELECTORS.LOGIN_ACCESS_CODE);
if (isModalStillThere) {
logger.warn('[TRACE] Login modal or fields are STILL present! Submit might have failed silently.');
// Check for error messages specifically
const postLoginError = await this.page.evaluate((selector) => {
const el = document.querySelector(selector);
return el ? (el as HTMLElement).innerText : null;
}, SELECTORS.LOGIN_ERROR_MESSAGE);
if (postLoginError) {
logger.error(`[TRACE] Login failed with error message: "${postLoginError}"`);
throw new Error(`Login failed on page: ${postLoginError}`);
}
}
// Strategy: Check if we are at least logged in now (even if modal is weird)
if (await this.isLoggedIn()) {
logger.success('[TRACE] Detected as logged in after wait! Navigating to day selection.');
await this.navigateToDaySelection();
isAtDaySelection = true;
return;
}
logger.warn('[TRACE] Not logged in and dialog missing. Applying Refresh Strategy...');
// User's suggestion: Refresh the page and try again
logger.info(`[TRACE] Refreshing page by navigating to ${URLS.BASE}...`);
await this.page.goto(URLS.BASE, { waitUntil: 'networkidle2' });
// Per user feedback: We must click the button again to see if the session is picked up
logger.info('[TRACE] Refresh done. Clicking Pre-order button to trigger session check...');
await this.navigateToDaySelection();
// Re-verify login status
if (await this.isLoggedIn()) {
logger.success('[TRACE] Refresh confirmed: We are logged in! Day selection should be open.');
isAtDaySelection = true;
} else {
logger.error('[TRACE] Refresh failed: Still not logged in. Login might have truly failed.');
await this.saveScreenshot('login_failed_after_refresh');
throw new Error('Login failed: Not logged in even after refresh.');
}
}
logger.info(`[TRACE] Current URL after login attempt: ${this.page.url()}`);
logger.success('[TRACE] ===== LOGIN FLOW ATTEMPT COMPLETE =====');
}
/**
* Common logic to click the pre-order button and wait for the dialog
*/
private async navigateToDaySelection(): Promise<void> {
if (!this.page) throw new Error('Page not initialized');
logger.info(`[TRACE] Clicking Pre-order menu button (selector: ${SELECTORS.PREORDER_MENU_BUTTON})...`);
await this.page.waitForSelector(SELECTORS.PREORDER_MENU_BUTTON);
await this.page.click(SELECTORS.PREORDER_MENU_BUTTON);
logger.success('[TRACE] Pre-order menu button clicked');
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterNavigation}ms for transition to dialog...`);
await this.wait(config.scraper.waitAfterNavigation);
}
private getWeekNumber(date: Date = new Date()): number {
const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
const dayNum = d.getUTCDay() || 7;
d.setUTCDate(d.getUTCDate() + 4 - dayNum);
const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
return Math.ceil((((d.getTime() - yearStart.getTime()) / 86400000) + 1) / 7);
}
/**
* Extract current week number and year from the page
*/
private async extractWeekInfo(): Promise<{ year: number; weekNumber: number }> {
if (!this.page) throw new Error('Page not initialized');
logger.info('[TRACE] Extracting week information from page...');
try {
await this.page.waitForSelector(SELECTORS.WEEK_HEADER, { timeout: 10000 });
} catch (e) {
logger.warn('[TRACE] Week header selector not found, attempting anyway...');
}
const { weekText, headerTitle, bodyText } = await this.page.evaluate((selInfo) => {
const h = document.querySelector(selInfo);
return {
weekText: h?.textContent || '',
headerTitle: document.title || '',
bodyText: document.body.innerText || ''
};
}, SELECTORS.WEEK_HEADER);
logger.info(`[TRACE] Week header text: "${weekText}" (Title: "${headerTitle}")`);
// Parse "CW 6", "KW 6", "Week 6", "Woche 6"
// Try multiple sources: Header element, Page title, and Page body as a fallback
const cwMatch = weekText.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i) ||
headerTitle.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i) ||
bodyText.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i);
const weekNumber = cwMatch ? parseInt(cwMatch[1]) : this.getWeekNumber();
logger.info(`[TRACE] Parsed week number: ${weekNumber}`);
// Get current year
const year = new Date().getFullYear();
logger.info(`[TRACE] Using year: ${year}`);
logger.success(`[TRACE] Detected week: ${year}-W${weekNumber}`);
return { year, weekNumber };
}
/**
* Extract Authentication Token from LocalStorage
*/
private async getAuthToken(): Promise<string> {
if (!this.page) throw new Error('Page not initialized');
const token = await this.page.evaluate(() => {
const store = localStorage.getItem('AkitaStores');
if (!store) return null;
try {
const parsed = JSON.parse(store);
return parsed.auth?.token;
} catch (e) {
return null;
}
});
if (!token) {
throw new Error('Authentication token not found in LocalStorage (AkitaStores)');
}
return token;
}
/**
* Fetch menu for a specific date using the Bessa API
*/
private async fetchMenuForDate(token: string, date: string, weekday: string): Promise<DayMenu> {
if (!this.page) throw new Error('Page not initialized');
const venueId = 591;
const menuId = 7; // "Bestellung" / configured menu ID
const apiUrl = `${URLS.API_BASE}/venues/${venueId}/menu/${menuId}/${date}/`;
// Execute fetch inside the browser context
const responseData = await this.page.evaluate(async (url, authToken) => {
try {
const res = await fetch(url, {
headers: {
'Authorization': `Token ${authToken}`,
'Accept': 'application/json'
}
});
if (!res.ok) {
return { error: `Status ${res.status}: ${res.statusText}` };
}
return await res.json();
} catch (e: any) {
return { error: e.toString() };
}
}, apiUrl, token);
if (responseData.error) {
// 404 might just mean no menu for that day (e.g. weekend)
logger.warn(`[TRACE] API fetch warning for ${date}: ${responseData.error}`);
// Return empty menu for that day
return { date, weekday, items: [] };
}
const apiResponse = responseData as ApiMenuResponse;
const items: MenuItem[] = [];
// Parse results
if (apiResponse.results && apiResponse.results.length > 0) {
for (const group of apiResponse.results) {
if (group.items) {
for (const item of group.items) {
items.push({
id: `${date}_${item.id}`,
name: item.name,
description: item.description,
price: parseFloat(item.price),
available: parseInt(item.available_amount) > 0 || item.available_amount === null // Null sometimes acts as available
});
}
}
}
}
return {
date,
weekday,
items
};
}
/**
* Scrape menu for the current week using API
*/
/**
* Scrape menus starting from current week until no more data is found (min 2 weeks)
*/
async scrapeMenus(saveToFile: boolean = true): Promise<WeeklyMenu> {
await this.init();
try {
logger.info('[TRACE] ========== SCRAPING MENUS (MULTI-WEEK) ==========');
// 1. Navigate and Login (uses env credentials by default if not previously logged in in this instance,
// but here we assume login() called before or we use default)
// Ideally scrapeMenus should rely on session but current flow navigates again.
// Let's ensure we don't double login if already on page?
// Actually, for the /api/login flow, we will call scraper.login(user, pass) explicitly.
// But scrapeMenus calls this.login() internally. We should refactor scrapeMenus to accept credentials or skip login if already done.
// BETTER: separate init/login from scraping loop.
// For now, to keep it compatible:
if (!this.page) await this.init(); // Re-init if needed
// If we are calling from /api/login, we might have already logged in.
// But scrapeMenus does full flow.
// Let's modify scrapeMenus to ONLY scrape and assume login is handled IF we want to separate them.
// However, existing `main()` calls `scrapeMenus()` which does everything.
// Let's just make it check if we are already logged in? Hard with Puppeteer statelessness efficiently.
// Simpler approach for this task:
// Let scrapeMenus take optional credentials too? No, keep it simple.
// We will let the server call `login` then `scrapeMenus`.
// BUT `scrapeMenus` calls `login`. We need to remove `login` from `scrapeMenus` or make it conditional.
// Let's make `scrapeMenus` NOT strictly require login if we are already there, OR just call `login` again (idempotent-ish).
// Actually, `login` types in credentials.
// Modification: scrapeMenus will use whatever credentials are set or env.
// But wait, `scrapeMenus` has `await this.login()` hardcoded.
// We will invoke `navigateAndAcceptCookies` and `login` ONLY if we are seemingly not ready.
// Or easier: Make a new method `scrapeMenusRaw` or just reuse `scrapeMenus` but pass a flag `skipLogin`.
await this.navigateAndAcceptCookies();
// We'll rely on the caller to have called login if they wanted custom creds,
// OR we call login here with defaults if not.
// ISSUE: `this.login()` uses env vars if no args.
// If the server calls `await scraper.login(u,p)`, then calls `await scraper.scrapeMenus()`,
// `scrapeMenus` will call `this.login()` (no args) -> uses env vars -> OVERWRITES the user session with default admin!
// FIX: Add `skipLogin` param.
} catch (e) { /*...*/ }
return { days: [], weekNumber: 0, year: 0, scrapedAt: '' }; // stub
}
// RETHINKING: The tool requires REPLACE.
// Let's change signature to `async scrapeMenus(saveToFile: boolean = true, skipLogin: boolean = false): Promise<WeeklyMenu>`
async scrapeMenus(saveToFile: boolean = true, skipLogin: boolean = false): Promise<WeeklyMenu> {
await this.init();
try {
logger.info('[TRACE] ========== SCRAPING MENUS (MULTI-WEEK) ==========');
if (!skipLogin) {
await this.navigateAndAcceptCookies();
await this.login();
}
// 2. Get Auth Token
logger.info('[TRACE] Retrieving Auth Token...');
const token = await this.getAuthToken();
logger.success(`[TRACE] Auth token retrieved: ${token.substring(0, 10)}...`);
// 3. Determine Start Date (Monday of current week)
const today = new Date();
const weekInfo = await this.extractWeekInfo();
const currentDay = today.getUTCDay() || 7; // Sunday is 0 -> 7
const startMonday = new Date(today);
startMonday.setUTCDate(today.getUTCDate() - currentDay + 1);
// Reset time to avoid drift
startMonday.setUTCHours(0, 0, 0, 0);
logger.info(`[TRACE] Starting scrape from ${startMonday.toISOString().split('T')[0]}`);
const days: DayMenu[] = [];
const dayNames = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'];
const MAX_WEEKS = 8;
const MIN_DAYS_COVERAGE = 14;
let currentDate = new Date(startMonday);
let daysProcessed = 0;
while (true) {
// Safety break to prevent infinite loops (approx 8 weeks)
if (daysProcessed > MAX_WEEKS * 7) {
logger.warn('[TRACE] Reached maximum week limit (safety break). Stopping.');
break;
}
const dayOfWeek = currentDate.getUTCDay(); // 0=Sun, 1=Mon, ..., 6=Sat
const isWeekend = dayOfWeek === 0 || dayOfWeek === 6;
if (isWeekend) {
// Skip weekends, just advance
// logger.debug(`[TRACE] Skipping weekend: ${currentDate.toISOString().split('T')[0]}`);
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
daysProcessed++;
continue;
}
const dateStr = currentDate.toISOString().split('T')[0];
// Map 0(Sun)->6, 1(Mon)->0, etc.
const dayNameIndex = (dayOfWeek + 6) % 7;
const weekday = dayNames[dayNameIndex];
logger.info(`[TRACE] Fetching menu for ${weekday} (${dateStr})...`);
try {
const dayMenu = await this.fetchMenuForDate(token, dateStr, weekday);
if (dayMenu.items.length === 0) {
// Check if we have covered enough time
// Calculate difference in days from start
const diffTime = Math.abs(currentDate.getTime() - startMonday.getTime());
const daysCovered = Math.ceil(diffTime / (1000 * 60 * 60 * 24));
if (daysCovered >= MIN_DAYS_COVERAGE) {
logger.info(`[TRACE] Stopping scraping at ${dateStr} (No items found and > 2 weeks covered)`);
break;
} else {
logger.info(`[TRACE] Empty menu at ${dateStr}, but only covered ${daysCovered} days. Continuing...`);
// Add empty day to preserve structure if needed, or just skip?
// Usually we want to record empty days if they are valid weekdays
days.push(dayMenu);
}
} else {
days.push(dayMenu);
}
} catch (error) {
logger.error(`[TRACE] Failed to fetch menu for ${dateStr}: ${error}`);
days.push({ date: dateStr, weekday, items: [] });
}
// Advance to next day
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
daysProcessed++;
// Be nice to the API
await this.wait(150);
}
const resultMenu: WeeklyMenu = {
year: weekInfo.year,
weekNumber: weekInfo.weekNumber,
days: days,
scrapedAt: new Date().toISOString()
};
logger.success(`[TRACE] Scraping completed. Found ${days.length} days of menus.`);
return resultMenu;
} catch (error) {
logger.error(`[TRACE] Scraping failed: ${error}`);
await this.saveScreenshot('scrape_error');
throw error;
} finally {
await this.close();
}
}
/**
* Helper to wait
*/
private async wait(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View File

@@ -1,46 +0,0 @@
// CSS Selectors based on screen documentation
export const SELECTORS = {
// Cookie Consent (Screen #1)
COOKIE_ACCEPT_ALL: 'button::-p-text(Accept all), button::-p-text(Alle akzeptieren), button::-p-text(Zustimmen), .cmpboxbtnyes',
// Landing Page (Screen #2)
PREORDER_MENU_BUTTON: 'button.order-type-button.button.high::-p-text(Pre-order menu)',
// Login Modal (Screen #5)
LOGIN_MODAL_CONTAINER: 'app-access-code-dialog, app-access-code-login',
LOGIN_ACCESS_CODE: 'input[formcontrolname="accessCode"]',
LOGIN_PASSWORD: 'input[formcontrolname="password"]',
LOGIN_SUBMIT: 'button[bessa-button].base-button.button',
LOGIN_ERROR_MESSAGE: '.mat-error, .toast-error, app-message, .error, [class*="error"]',
// Day Selection Dialog (Screen #10, #11)
DAY_SELECTION_DIALOG: 'app-canteen-dialog, app-bessa-select-day-dialog',
WEEK_CHEVRON_NEXT: 'button[aria-label="next week"]',
WEEK_CHEVRON_PREV: 'button[aria-label="previous week"]',
WEEK_HEADER: 'h2, [class*="week-header"], .calendar-week',
DAY_ROW: 'app-date-line',
ADD_ORDER_LINK: 'div.clickable',
// Menu Overview (Screen #14)
MENU_CARD: '.menu-card, .dish-card, app-bessa-menu-card, [class*="menu-item"]',
MENU_ITEM_TITLE: 'h3, .menu-title, [class*="title"]',
MENU_ITEM_DESCRIPTION: 'p, .menu-description, [class*="description"]',
MENU_ITEM_PRICE: '.price, [class*="price"], .amount',
MENU_ITEM_ADD_BUTTON: 'button::-p-text(+), button.add-button',
NOT_AVAILABLE_TEXT: '::-p-text(Not available), ::-p-text(Nicht verfügbar)',
// Week/Date Display
CALENDAR_WEEK_DISPLAY: '[class*="week"]',
DATE_DISPLAY: '[class*="date"]',
// Close/Back buttons
CLOSE_BUTTON: 'button[aria-label="close"], .close-btn, button.close, mat-icon::-p-text(close)',
BACK_BUTTON: 'button[aria-label="back"], .back-arrow, button.back, mat-icon::-p-text(arrow_back)',
DONE_BUTTON: 'button::-p-text(Done), button::-p-text(Fertig)',
} as const;
export const URLS = {
BASE: 'https://web.bessa.app/knapp-kantine',
API_BASE: 'https://api.bessa.app/v1',
} as const;

View File

@@ -1,748 +0,0 @@
import express from 'express';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import fs from 'fs/promises';
import { config } from './config.js';
import { logger } from './utils/logger.js';
import { FlagStore, FlaggedItem } from './storage/flag-store.js';
import { SseManager } from './services/sse-manager.js';
import { PollingOrchestrator } from './services/polling-orchestrator.js';
const app = express();
const port = 3005; // Changed from 3000 to avoid conflicts
// Get current directory
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Project root (assuming we are in src/)
const projectRoot = join(__dirname, '..');
const publicDir = join(projectRoot, 'public');
const dataFile = join(projectRoot, 'data', 'menus.json');
const dataDir = join(projectRoot, 'data');
// Initialize Services
const flagStore = new FlagStore(dataDir);
const sseManager = new SseManager();
const orchestrator = new PollingOrchestrator(flagStore, sseManager);
// Bessa API Constants
const BESSA_API_BASE = 'https://api.bessa.app/v1';
const GUEST_TOKEN = 'c3418725e95a9f90e3645cbc846b4d67c7c66131';
const CLIENT_VERSION = '1.7.0_prod/2026-01-26';
// Middleware
app.use(express.json());
// API Routes
app.post('/api/login', async (req, res) => {
const { employeeId, password } = req.body;
if (!employeeId || !password) {
return res.status(400).json({ error: 'Employee ID and password are required' });
}
// Transform employee ID to email format as expected by Bessa API
const email = `knapp-${employeeId}@bessa.app`;
try {
const response = await fetch(`${BESSA_API_BASE}/auth/login/`, {
method: 'POST',
headers: {
'Authorization': `Token ${GUEST_TOKEN}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
},
body: JSON.stringify({ email, password })
});
const data = await response.json();
if (response.ok) {
const token = data.key;
// Fetch user details to get First Name
try {
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
headers: {
'Authorization': `Token ${token}`,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (userResponse.ok) {
const userData = await userResponse.json();
res.json({
key: token,
firstName: userData.first_name,
lastName: userData.last_name
});
} else {
// Fallback if user fetch fails
logger.warn(`Failed to fetch user details for ${email}`);
res.json({ key: token });
}
} catch (userError) {
logger.error(`Error fetching user details: ${userError}`);
res.json({ key: token });
}
} else {
logger.error(`Login failed for ${email}: ${JSON.stringify(data)}`);
res.status(response.status).json({ error: data.non_field_errors?.[0] || 'Login failed' });
}
} catch (error) {
logger.error(`Login error: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
app.get('/api/me', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) {
return res.status(401).json({ error: 'No token provided' });
}
try {
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (userResponse.ok) {
const userData = await userResponse.json();
res.json({
firstName: userData.first_name,
lastName: userData.last_name,
email: userData.email
});
} else {
res.status(userResponse.status).json({ error: 'Failed to fetch user details' });
}
} catch (error) {
logger.error(`Error fetching user details: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
app.get('/api/user/orders', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) {
return res.status(401).json({ error: 'Authorization header is required' });
}
try {
const response = await fetch(`${BESSA_API_BASE}/venues/591/menu/dates/`, {
method: 'GET',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
const data = await response.json();
if (response.ok) {
// Return full order details per date for orderMap building
const dateOrders = data.results.map((day: any) => ({
date: day.date,
orders: (day.orders || []).map((order: any) => ({
id: order.id,
state: order.order_state,
total: order.total,
items: (order.items || []).map((item: any) => ({
name: item.name,
articleId: item.article,
price: item.price
}))
}))
}));
res.json({ dateOrders });
} else {
logger.error(`Failed to fetch orders: ${JSON.stringify(data)}`);
res.status(response.status).json({ error: 'Failed to fetch orders' });
}
} catch (error) {
logger.error(`Orders fetch error: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
// Place an order via Bessa API
app.post('/api/order', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) {
return res.status(401).json({ error: 'Authorization header is required' });
}
const { date, articleId, name, price, vat, description } = req.body;
if (!date || !articleId || !name || price === undefined) {
return res.status(400).json({ error: 'Missing required fields: date, articleId, name, price' });
}
try {
// Fetch user details for customer object
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (!userResponse.ok) {
return res.status(401).json({ error: 'Failed to fetch user details' });
}
const userData = await userResponse.json();
const now = new Date().toISOString();
// Construct order payload matching exact Bessa format
const orderPayload = {
uuid: crypto.randomUUID(),
created: now,
updated: now,
order_type: 7,
items: [
{
article: articleId,
course_group: null,
modifiers: [],
uuid: crypto.randomUUID(),
name: name,
description: description || '',
price: String(parseFloat(price)),
amount: 1,
vat: vat || '10.00',
comment: ''
}
],
table: null,
total: parseFloat(price),
tip: 0,
currency: 'EUR',
venue: 591,
states: [],
order_state: 1,
date: `${date}T10:00:00.000Z`,
payment_method: 'payroll',
customer: {
first_name: userData.first_name,
last_name: userData.last_name,
email: userData.email,
newsletter: false
},
preorder: false,
delivery_fee: 0,
cash_box_table_name: null,
take_away: false
};
logger.info(`Placing order: ${name} for ${date} (article ${articleId})`);
const orderResponse = await fetch(`${BESSA_API_BASE}/user/orders/`, {
method: 'POST',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Client-Version': CLIENT_VERSION
},
body: JSON.stringify(orderPayload)
});
const orderData = await orderResponse.json();
if (orderResponse.ok || orderResponse.status === 201) {
logger.success(`Order placed: ID ${orderData.id} (${name})`);
res.status(201).json({
orderId: orderData.id,
hashId: orderData.hash_id,
state: orderData.order_state,
total: orderData.total
});
} else {
logger.error(`Order failed: ${JSON.stringify(orderData)}`);
res.status(orderResponse.status).json({
error: orderData.detail || orderData.non_field_errors?.[0] || 'Order failed'
});
}
} catch (error) {
logger.error(`Order error: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
// Cancel an order via Bessa API
app.post('/api/order/cancel', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) {
return res.status(401).json({ error: 'Authorization header is required' });
}
const { orderId } = req.body;
if (!orderId) {
return res.status(400).json({ error: 'Missing required field: orderId' });
}
try {
logger.info(`Cancelling order: ${orderId}`);
const cancelResponse = await fetch(`${BESSA_API_BASE}/user/orders/${orderId}/cancel/`, {
method: 'PATCH',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Client-Version': CLIENT_VERSION
},
body: JSON.stringify({})
});
const cancelData = await cancelResponse.json();
if (cancelResponse.ok) {
logger.success(`Order ${orderId} cancelled`);
res.json({ success: true, orderId: cancelData.order_id, state: cancelData.state });
} else {
logger.error(`Cancel failed for ${orderId}: ${JSON.stringify(cancelData)}`);
res.status(cancelResponse.status).json({
error: cancelData.detail || 'Cancellation failed'
});
}
} catch (error) {
logger.error(`Cancel error: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
// --- Flagging & Polling API ---
app.get('/api/flags', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) return res.status(401).json({ error: 'Unauthorized' });
// In a real app we would filter by user, but for now return all active flags
// or arguably only the flags for this user?
// Requirement says "Flagged items ... flagged by user".
// But polling is distributed.
// Let's return all flags so UI can show them? Or just user's flags?
// For "Yellow Glow" we likely want to see what *I* flagged.
// Let's filter by pseudo-user-id if possible, but we don't strictly have one except the Bessa ID.
// Let's assume the client sends a userID or we trust the client to filter.
// For simplicity, return all, client filters? No, improved privacy:
// We don't have a robust user session here, just the token.
// We'll trust the client to send 'X-User-Id' for now or just return all and let client handle it.
// Going with returning ALL for simplicity of the "Shared/Distributed" nature if we wanted shared flags,
// but the requirement implies personal flagging.
// Implementation Plan didn't specify strict user separation.
// Let's return ALL for now to debug easily.
const flags = await flagStore.getAllFlags();
res.json(flags);
});
app.post('/api/flags', async (req, res) => {
const { id, date, articleId, userId, cutoff, description, name } = req.body;
if (!id || !date || !articleId || !userId || !cutoff) {
return res.status(400).json({ error: 'Missing required fields' });
}
const item: FlaggedItem = {
id, date, articleId, userId, cutoff, description, name,
createdAt: new Date().toISOString()
};
const success = await flagStore.addFlag(item);
if (success) {
logger.info(`Flag added: ${name} (${id}) by ${userId}`);
res.status(201).json({ success: true });
} else {
res.status(409).json({ error: 'Flag already exists' });
}
});
app.delete('/api/flags/:id', async (req, res) => {
const { id } = req.params;
const success = await flagStore.removeFlag(id);
if (success) {
logger.info(`Flag removed: ${id}`);
res.json({ success: true });
} else {
res.status(404).json({ error: 'Flag not found' });
}
});
app.post('/api/check-item', async (req, res) => {
const authHeader = req.headers.authorization;
if (!authHeader) return res.status(401).json({ error: 'Unauthorized' });
const { date, articleId } = req.body;
if (!date || !articleId) return res.status(400).json({ error: 'Missing date or articleId' });
try {
// Fetch menu details for the specific date using User's Token
// URL Pattern: /venues/591/menu/7/{date}/
// Assumption: Menu ID 7 is standard.
const response = await fetch(`${BESSA_API_BASE}/venues/591/menu/7/${date}/`, {
method: 'GET',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (!response.ok) {
// If 404, maybe no menu for that day?
if (response.status === 404) {
return res.json({ available: false, error: 'Menu not found' });
}
return res.status(response.status).json({ error: 'Failed to fetch menu from Bessa' });
}
const data = await response.json();
const results = data.results || [];
// Find the item
let foundItem = null;
for (const group of results) {
if (group.items) {
foundItem = group.items.find((i: any) => i.article === articleId || i.id === articleId);
if (foundItem) break;
}
}
if (foundItem) {
// Check availability
const isUnlimited = foundItem.amount_tracking === false;
const hasStock = parseInt(foundItem.available_amount) > 0;
const isAvailable = isUnlimited || hasStock;
logger.info(`Check Item ${articleId} on ${date}: ${isAvailable ? 'AVAILABLE' : 'SOLD OUT'}`);
res.json({ available: isAvailable });
} else {
logger.warn(`Check Item ${articleId} on ${date}: Item not found in menu`);
res.json({ available: false, error: 'Item not found in menu' });
}
} catch (error) {
logger.error(`Check Item Error: ${error}`);
res.status(500).json({ error: 'Internal server error' });
}
});
app.post('/api/poll-result', async (req, res) => {
const { flagId, isAvailable } = req.body;
if (!flagId) return res.status(400).json({ error: 'Missing flagId' });
await orchestrator.handlePollResult(flagId, isAvailable);
res.json({ success: true });
});
app.get('/api/events', (req, res) => {
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.flushHeaders();
const clientId = sseManager.addClient(res);
// Send initial ping/id
sseManager.sendToClient(clientId, 'connected', { clientId });
});
// SSE endpoint for menu refresh progress
app.get('/api/refresh-progress', async (req, res) => {
logger.info(`[DEBUG] Received SSE request with token query: ${req.query.token ? 'YES' : 'NO'}`);
// Get token from query parameter (EventSource doesn't support custom headers)
const token = req.query.token as string;
const authHeader = token ? `Token ${token}` : `Token ${GUEST_TOKEN}`;
// Set headers for SSE
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.flushHeaders();
const sendProgress = (data: any) => {
res.write(`data: ${JSON.stringify(data)}\n\n`);
};
try {
sendProgress({ step: 'start', message: 'Hole verfügbare Daten...', current: 0, total: 100 });
// 1. Fetch available dates
logger.info('Fetching available dates...');
const datesResponse = await fetch(`${BESSA_API_BASE}/venues/591/menu/dates/`, {
method: 'GET',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (!datesResponse.ok) {
throw new Error(`Failed to fetch dates: ${datesResponse.status}`);
}
const datesData = await datesResponse.json();
let availableDates = datesData.results || [];
// Filter for future dates or recent past (e.g. last 7 days + future)
const today = new Date();
today.setDate(today.getDate() - 7);
const cutoffDate = today.toISOString().split('T')[0];
availableDates = availableDates
.filter((d: any) => d.date >= cutoffDate)
.sort((a: any, b: any) => a.date.localeCompare(b.date));
// Limit to reasonable amount (e.g. next 30 days)
availableDates = availableDates.slice(0, 30);
const totalDates = availableDates.length;
sendProgress({ step: 'dates_fetched', message: `${totalDates} Tage gefunden. Lade Details...`, current: 0, total: totalDates });
// 2. Fetch details for each date
const allDays: any[] = [];
let completed = 0;
for (const dateObj of availableDates) {
const dateStr = dateObj.date;
sendProgress({
step: 'fetching_details',
message: `Lade Menü für ${dateStr}...`,
current: completed + 1,
total: totalDates
});
try {
// Menu ID 7 seems to be the standard lunch menu
const menuDetailUrl = `${BESSA_API_BASE}/venues/591/menu/7/${dateStr}/`;
const detailResponse = await fetch(menuDetailUrl, {
method: 'GET',
headers: {
'Authorization': authHeader,
'Accept': 'application/json',
'X-Client-Version': CLIENT_VERSION
}
});
if (detailResponse.ok) {
const detailData = await detailResponse.json();
// Structure: { results: [ { name: "Menü", items: [...] } ] }
const menuGroups = detailData.results || [];
let dayItems: any[] = [];
for (const group of menuGroups) {
if (group.items && Array.isArray(group.items)) {
dayItems = dayItems.concat(group.items);
}
}
if (dayItems.length > 0) {
allDays.push({
date: dateStr,
// Use the dateObj to get weekday if possible, or compute it
menu_items: dayItems,
orders: dateObj.orders || [] // Store orders for cutoff extraction
});
}
}
} catch (err) {
logger.error(`Failed to fetch details for ${dateStr}: ${err}`);
}
completed++;
// Small delay
await new Promise(resolve => setTimeout(resolve, 100));
}
// Group by Week
const weeksMap = new Map<string, any>();
// Helper to get ISO week year
const getWeekYear = (d: Date) => {
const date = new Date(d.getTime());
date.setDate(date.getDate() + 3 - (date.getDay() + 6) % 7);
return date.getFullYear();
};
for (const day of allDays) {
const date = new Date(day.date);
const weekNum = getISOWeek(date);
const year = getWeekYear(date);
const key = `${year}-${weekNum}`;
if (!weeksMap.has(key)) {
weeksMap.set(key, {
year: year,
weekNumber: weekNum,
days: []
});
}
const weekday = date.toLocaleDateString('en-US', { weekday: 'long' });
// Calculate order cutoff time: same day at 10:00 AM local time
const orderCutoffDate = new Date(day.date);
orderCutoffDate.setHours(10, 0, 0, 0); // 10:00 AM local time
const orderCutoff = orderCutoffDate.toISOString();
weeksMap.get(key).days.push({
date: day.date,
weekday: weekday,
orderCutoff: orderCutoff, // Add the cutoff time
items: (day.menu_items || []).map((item: any) => {
const isUnlimited = item.amount_tracking === false;
const hasStock = parseInt(item.available_amount) > 0;
return {
id: `${day.date}_${item.id}`,
name: item.name || 'Unknown',
description: item.description || '',
price: parseFloat(item.price) || 0,
available: isUnlimited || hasStock,
availableAmount: parseInt(item.available_amount) || 0,
amountTracking: item.amount_tracking !== false // Default to true if missing
};
})
});
}
const menuData = {
weeks: Array.from(weeksMap.values()).sort((a: any, b: any) => {
if (a.year !== b.year) return a.year - b.year;
return a.weekNumber - b.weekNumber;
}),
scrapedAt: new Date().toISOString()
};
// Smart merge: preserve current-week data on refresh, purge older weeks
sendProgress({ step: 'saving', message: 'Daten werden gespeichert...', current: totalDates, total: totalDates });
const currentISOWeek = getISOWeek(new Date());
const currentISOYear = getWeekYear(new Date());
let finalData = menuData;
try {
const existingRaw = await fs.readFile(dataFile, 'utf-8');
const existingData = JSON.parse(existingRaw);
if (existingData.weeks && Array.isArray(existingData.weeks)) {
const mergedWeeks = new Map<string, any>();
// Add all fresh weeks first
for (const week of menuData.weeks) {
mergedWeeks.set(`${week.year}-${week.weekNumber}`, week);
}
// Merge existing current-week data (preserve days not in fresh data)
for (const existingWeek of existingData.weeks) {
const key = `${existingWeek.year}-${existingWeek.weekNumber}`;
const isCurrentOrFuture =
existingWeek.year > currentISOYear ||
(existingWeek.year === currentISOYear && existingWeek.weekNumber >= currentISOWeek);
if (!isCurrentOrFuture) {
// Older week: purge (don't keep)
continue;
}
if (mergedWeeks.has(key)) {
// Merge: keep existing days that aren't in fresh data
const freshWeek = mergedWeeks.get(key);
const freshDates = new Set(freshWeek.days.map((d: any) => d.date));
for (const existDay of existingWeek.days) {
if (!freshDates.has(existDay.date)) {
freshWeek.days.push(existDay);
}
}
// Sort days by date
freshWeek.days.sort((a: any, b: any) => a.date.localeCompare(b.date));
} else {
// Future week not in fresh data: keep as-is
mergedWeeks.set(key, existingWeek);
}
}
finalData = {
weeks: Array.from(mergedWeeks.values()).sort((a: any, b: any) => {
if (a.year !== b.year) return a.year - b.year;
return a.weekNumber - b.weekNumber;
}),
scrapedAt: new Date().toISOString()
};
}
} catch (e) {
// No existing data or parse error — use fresh data as-is
logger.info('No existing menu data to merge, using fresh data.');
}
await fs.writeFile(dataFile, JSON.stringify(finalData, null, 2), 'utf-8');
sendProgress({ step: 'complete', message: 'Aktualisierung abgeschlossen!', current: totalDates, total: totalDates });
res.write('event: done\ndata: {}\n\n');
res.end();
} catch (error) {
logger.error(`Refresh error: ${error}`);
sendProgress({ step: 'error', message: `Fehler: ${error}`, current: 0, total: 100 });
res.write('event: error\ndata: {}\n\n');
res.end();
}
});
// Helper function for ISO week number
function getISOWeek(date: Date): number {
const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
const dayNum = d.getUTCDay() || 7;
d.setUTCDate(d.getUTCDate() + 4 - dayNum);
const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
return Math.ceil((((d.getTime() - yearStart.getTime()) / 86400000) + 1) / 7);
}
app.get('/api/menus', async (req, res) => {
try {
await fs.access(dataFile);
const data = await fs.readFile(dataFile, 'utf-8');
res.header('Content-Type', 'application/json');
res.send(data);
} catch (error) {
logger.error(`Failed to read menu data: ${error}`);
// If file doesn't exist, return empty structure
res.json({ days: [], updated: null });
}
});
// Serve Static Files
app.use(express.static(publicDir));
// Fallback to index.html for any other request
app.use((req, res) => {
if (req.method === 'GET') {
res.sendFile(join(publicDir, 'index.html'));
}
});
// Start Server
app.listen(port, () => {
logger.success(`Web Interface running at http://localhost:${port}`);
logger.info(`Serving static files from: ${publicDir}`);
// Start Polling Orchestrator
orchestrator.start();
});

View File

@@ -1,92 +0,0 @@
import { FlagStore, FlaggedItem } from '../storage/flag-store.js';
import { SseManager } from './sse-manager.js';
import { logger } from '../utils/logger.js';
export class PollingOrchestrator {
private flagStore: FlagStore;
private sseManager: SseManager;
private intervalId: NodeJS.Timeout | null = null;
private intervalMs: number = 5 * 60 * 1000; // 5 minutes
constructor(flagStore: FlagStore, sseManager: SseManager) {
this.flagStore = flagStore;
this.sseManager = sseManager;
}
start(): void {
if (this.intervalId) return;
logger.info('Starting Polling Orchestrator...');
// Run immediately then interval
this.distributeTasks();
this.intervalId = setInterval(() => this.distributeTasks(), this.intervalMs);
}
stop(): void {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
}
async distributeTasks(): Promise<void> {
const clients = this.sseManager.getAllClientIds();
if (clients.length === 0) {
logger.info('No active clients to poll. Skipping cycle.');
return;
}
// Clean up expired flags first
await this.flagStore.pruneExpiredFlags();
const flags = await this.flagStore.getAllFlags();
if (flags.length === 0) return;
logger.info(`Distributing ${flags.length} polling tasks across ${clients.length} clients.`);
// Simple Load Balancing: Round Robin
let clientIndex = 0;
for (const flag of flags) {
const clientId = clients[clientIndex];
// Send poll request to client
this.sseManager.sendToClient(clientId, 'poll_request', {
flagId: flag.id,
date: flag.date,
articleId: flag.articleId,
name: flag.name
});
logger.info(`Assigned flag ${flag.id} to client ${clientId}`);
// Move to next client
clientIndex = (clientIndex + 1) % clients.length;
}
}
async handlePollResult(flagId: string, isAvailable: boolean): Promise<void> {
if (!isAvailable) return;
const flag = await this.flagStore.getFlag(flagId);
if (!flag) return; // Flag might have been removed
logger.success(`Item ${flag.name} (${flag.id}) is now AVAILABLE! Broadcasting...`);
// Notify ALL clients
this.sseManager.broadcast('item_update', {
flagId: flag.id,
status: 'available',
name: flag.name,
date: flag.date,
articleId: flag.articleId
});
// Remove flag since it's now available?
// Or keep it until cutoff? Requirement says "remove when cutoff reached"
// But if it becomes available, we might want to keep checking if it becomes unavailable again?
// Let's keep it for now, user can manually remove or it expires.
// Actually, if user orders it, they should likely unflag it.
}
}

View File

@@ -1,69 +0,0 @@
import { Response } from 'express';
import { logger } from '../utils/logger.js';
import { randomUUID } from 'crypto';
interface ConnectedClient {
id: string;
res: Response;
userId?: string; // If authenticated
}
export class SseManager {
private clients: Map<string, ConnectedClient> = new Map();
addClient(res: Response, userId?: string): string {
const id = randomUUID();
const client: ConnectedClient = { id, res, userId };
this.clients.set(id, client);
// Remove client on connection close
res.on('close', () => {
this.clients.delete(id);
logger.info(`SSE Client disconnected: ${id}`);
});
logger.info(`SSE Client connected: ${id} (User: ${userId || 'Guest'})`);
return id;
}
removeClient(id: string): void {
const client = this.clients.get(id);
if (client) {
client.res.end();
this.clients.delete(id);
}
}
sendToClient(clientId: string, event: string, data: any): boolean {
const client = this.clients.get(clientId);
if (!client) return false;
client.res.write(`event: ${event}\n`);
client.res.write(`data: ${JSON.stringify(data)}\n\n`);
return true;
}
broadcast(event: string, data: any): void {
this.clients.forEach(client => {
client.res.write(`event: ${event}\n`);
client.res.write(`data: ${JSON.stringify(data)}\n\n`);
});
}
getActiveClientCount(): number {
return this.clients.size;
}
getAllClientIds(): string[] {
return Array.from(this.clients.keys());
}
// Helper to get a random client for load balancing
getRandomClient(): string | null {
const keys = Array.from(this.clients.keys());
if (keys.length === 0) return null;
const randomIndex = Math.floor(Math.random() * keys.length);
return keys[randomIndex];
}
}

View File

@@ -1,109 +0,0 @@
import fs from 'fs/promises';
import { join } from 'path';
import { logger } from '../utils/logger.js';
export interface FlaggedItem {
id: string; // composite: date_articleId
date: string;
articleId: number;
userId: string; // Who flagged it (first user)
cutoff: string; // ISO date string
createdAt: string;
description?: string; // Optional: Store name/desc for notifications
name?: string;
}
export class FlagStore {
private filePath: string;
private flags: Map<string, FlaggedItem> = new Map();
private initialized: boolean = false;
constructor(dataDir: string) {
this.filePath = join(dataDir, 'flags.json');
}
async init(): Promise<void> {
if (this.initialized) return;
try {
const data = await fs.readFile(this.filePath, 'utf-8');
const parsed = JSON.parse(data);
if (Array.isArray(parsed)) {
parsed.forEach((item: FlaggedItem) => {
this.flags.set(item.id, item);
});
}
logger.info(`Loaded ${this.flags.size} flags from storage.`);
} catch (error) {
// If file doesn't exist, start empty
logger.info('No existing flags found, starting with empty store.');
}
this.initialized = true;
}
async save(): Promise<void> {
try {
const data = Array.from(this.flags.values());
await fs.writeFile(this.filePath, JSON.stringify(data, null, 2), 'utf-8');
} catch (error) {
logger.error(`Failed to save flags: ${error}`);
}
}
async addFlag(item: FlaggedItem): Promise<boolean> {
if (!this.initialized) await this.init();
if (this.flags.has(item.id)) {
return false; // Already exists
}
this.flags.set(item.id, item);
await this.save();
return true;
}
async removeFlag(id: string): Promise<boolean> {
if (!this.initialized) await this.init();
if (this.flags.has(id)) {
this.flags.delete(id);
await this.save();
return true;
}
return false;
}
async getFlag(id: string): Promise<FlaggedItem | undefined> {
if (!this.initialized) await this.init();
return this.flags.get(id);
}
async getAllFlags(): Promise<FlaggedItem[]> {
if (!this.initialized) await this.init();
return Array.from(this.flags.values());
}
async pruneExpiredFlags(): Promise<number> {
if (!this.initialized) await this.init();
const now = new Date();
let pruned = 0;
for (const [id, item] of this.flags.entries()) {
const cutoff = new Date(item.cutoff);
if (now > cutoff) {
this.flags.delete(id);
pruned++;
}
}
if (pruned > 0) {
await this.save();
logger.info(`Pruned ${pruned} expired flags.`);
}
return pruned;
}
}

View File

@@ -1,87 +0,0 @@
import fs from 'fs/promises';
import path from 'path';
import { MenuDatabase, WeeklyMenu } from '../types.js';
import { config } from '../config.js';
import { logger } from '../utils/logger.js';
/**
* Load existing menu database from JSON file
*/
export async function loadMenus(): Promise<MenuDatabase> {
try {
const content = await fs.readFile(config.storage.menuFile, 'utf-8');
return JSON.parse(content);
} catch (error: any) {
if (error.code === 'ENOENT') {
logger.info('No existing menus.json found, creating new database');
return {
lastUpdated: new Date().toISOString(),
weeks: [],
};
}
throw error;
}
}
/**
* Save menu database to JSON file
*/
export async function saveMenus(db: MenuDatabase): Promise<void> {
// Ensure data directory exists
await fs.mkdir(config.storage.dataDir, { recursive: true });
// Update timestamp
db.lastUpdated = new Date().toISOString();
// Write with pretty formatting
await fs.writeFile(
config.storage.menuFile,
JSON.stringify(db, null, 2),
'utf-8'
);
logger.success(`Saved menu database to ${config.storage.menuFile}`);
}
/**
* Merge a new weekly menu into the database
* Replaces existing week if found, otherwise adds it
*/
export async function mergeWeeklyMenu(weeklyMenu: WeeklyMenu): Promise<void> {
const db = await loadMenus();
// Find and replace existing week, or add new one
const existingIndex = db.weeks.findIndex(
w => w.year === weeklyMenu.year && w.weekNumber === weeklyMenu.weekNumber
);
if (existingIndex >= 0) {
db.weeks[existingIndex] = weeklyMenu;
logger.info(`Updated existing week ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
} else {
db.weeks.push(weeklyMenu);
logger.info(`Added new week ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
}
// Sort weeks by year and week number
db.weeks.sort((a, b) => {
if (a.year !== b.year) return a.year - b.year;
return a.weekNumber - b.weekNumber;
});
await saveMenus(db);
}
/**
* Get menu for a specific date
*/
export async function getMenuForDate(date: string): Promise<import('../types.js').DayMenu | null> {
const db = await loadMenus();
for (const week of db.weeks) {
const day = week.days.find(d => d.date === date);
if (day) return day;
}
return null;
}

View File

@@ -1,27 +0,0 @@
// TypeScript type definitions for menu data structures
export interface MenuItem {
id: string; // e.g., "2026-02-03_M1_Herzhaftes" (date + menu ID for uniqueness)
name: string; // e.g., "M1 Herzhaftes"
description: string; // Zutaten + Allergen-Codes
price: number; // 5.50
available: boolean;
}
export interface DayMenu {
date: string; // ISO format: "2026-02-03"
weekday: string; // "Monday", "Tuesday", ...
items: MenuItem[];
}
export interface WeeklyMenu {
year: number; // 2026 (year before week for readability)
weekNumber: number; // 6
days: DayMenu[];
scrapedAt: string; // ISO timestamp
}
export interface MenuDatabase {
lastUpdated: string; // ISO timestamp
weeks: WeeklyMenu[];
}

View File

@@ -1,25 +0,0 @@
// Simple logger utility
export const logger = {
info: (message: string, ...args: any[]) => {
console.log(`[INFO] ${message}`, ...args);
},
success: (message: string, ...args: any[]) => {
console.log(`${message}`, ...args);
},
error: (message: string, ...args: any[]) => {
console.error(`❌ [ERROR] ${message}`, ...args);
},
debug: (message: string, ...args: any[]) => {
if (process.env.DEBUG) {
console.log(`[DEBUG] ${message}`, ...args);
}
},
warn: (message: string, ...args: any[]) => {
console.warn(`⚠️ [WARN] ${message}`, ...args);
},
};

View File

@@ -1,85 +0,0 @@
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0* Host api.bessa.app:443 was resolved.
* IPv6: 2a05:d014:6d3:a901:93fa:c6a7:8fc7:f92d
* IPv4: 3.124.122.174
* Trying 3.124.122.174:443...
* Connected to api.bessa.app (3.124.122.174) port 443
* ALPN: curl offers h2,http/1.1
} [5 bytes data]
* TLSv1.3 (OUT), TLS handshake, Client hello (1):
} [512 bytes data]
* CAfile: /etc/ssl/certs/ca-certificates.crt
* CApath: /etc/ssl/certs
0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0{ [5 bytes data]
* TLSv1.3 (IN), TLS handshake, Server hello (2):
{ [122 bytes data]
* TLSv1.3 (IN), TLS handshake, Encrypted Extensions (8):
{ [19 bytes data]
* TLSv1.3 (IN), TLS handshake, Certificate (11):
{ [2925 bytes data]
* TLSv1.3 (IN), TLS handshake, CERT verify (15):
{ [520 bytes data]
* TLSv1.3 (IN), TLS handshake, Finished (20):
{ [52 bytes data]
* TLSv1.3 (OUT), TLS change cipher, Change cipher spec (1):
} [1 bytes data]
* TLSv1.3 (OUT), TLS handshake, Finished (20):
} [52 bytes data]
* SSL connection using TLSv1.3 / TLS_AES_256_GCM_SHA384 / X25519 / RSASSA-PSS
* ALPN: server accepted h2
* Server certificate:
* subject: CN=api.bessa.app
* start date: Dec 18 10:19:21 2025 GMT
* expire date: Mar 18 10:19:20 2026 GMT
* subjectAltName: host "api.bessa.app" matched cert's "api.bessa.app"
* issuer: C=US; O=Let's Encrypt; CN=R12
* SSL certificate verify ok.
* Certificate level 0: Public key type RSA (4096/152 Bits/secBits), signed using sha256WithRSAEncryption
* Certificate level 1: Public key type RSA (2048/112 Bits/secBits), signed using sha256WithRSAEncryption
* Certificate level 2: Public key type RSA (4096/152 Bits/secBits), signed using sha256WithRSAEncryption
} [5 bytes data]
* using HTTP/2
* [HTTP/2] [1] OPENED stream for https://api.bessa.app/v1/venues/591/menu/7/2026-02-10/
* [HTTP/2] [1] [:method: GET]
* [HTTP/2] [1] [:scheme: https]
* [HTTP/2] [1] [:authority: api.bessa.app]
* [HTTP/2] [1] [:path: /v1/venues/591/menu/7/2026-02-10/]
* [HTTP/2] [1] [user-agent: curl/8.5.0]
* [HTTP/2] [1] [accept: */*]
* [HTTP/2] [1] [authorization: Token c3418725e95a9f90e3645cbc846b4d67c7c66131]
* [HTTP/2] [1] [x-client-version: 1.7.0_prod/2026-01-26]
} [5 bytes data]
> GET /v1/venues/591/menu/7/2026-02-10/ HTTP/2
> Host: api.bessa.app
> User-Agent: curl/8.5.0
> Accept: */*
> Authorization: Token c3418725e95a9f90e3645cbc846b4d67c7c66131
> X-Client-Version: 1.7.0_prod/2026-01-26
>
{ [5 bytes data]
* TLSv1.3 (IN), TLS handshake, Newsession Ticket (4):
{ [57 bytes data]
* TLSv1.3 (IN), TLS handshake, Newsession Ticket (4):
{ [57 bytes data]
* old SSL session ID is stale, removing
{ [5 bytes data]
< HTTP/2 200
< server: nginx/1.27.3
< date: Mon, 09 Feb 2026 08:54:00 GMT
< content-type: application/json
< content-length: 4685
< vary: Accept, Accept-Language, origin
< allow: GET, HEAD, OPTIONS
< x-frame-options: DENY
< content-language: en-us
< strict-transport-security: max-age=16000000
< x-content-type-options: nosniff
< referrer-policy: same-origin
< cross-origin-opener-policy: same-origin
< strict-transport-security: max-age=31536000
<
{ [4685 bytes data]
{"next":null,"previous":null,"results":[{"id":11452,"items":[{"id":178714,"price":"5.50","name":"M1 Herzhaftes","description":"Erdäpfelrahmsuppe L,M,C,G, potato soup, Acht Schätze (Rind) L,M,C,F,O, mit Eiernudeln A,C, Kuchen A,C,G,H,O, Silced beef asia with egg noodles","allergens":"","nutrition_facts":"","hash_id":"ar_7BzGM","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.266780Z","updated":"2026-02-03T13:15:20.845052Z","deleted":null,"uuid":"4064152d-6e82-4f4d-9bea-0936ab307b1c","image":null,"number":"dfaa35f6-4604-4e14-9f86-1862a05a5881","type":1,"vat":"10.00","points":"0.00","minimum_amount":"0","available_amount":"136","amount_tracking":true,"dispenser_id":"","course_group":null},{"id":178728,"price":"5.50","name":"M3 Süß","description":"Erdäpfelrahmsuppe L,M,C,G, potato soup, Milchrahmstrudel mit Vanillesauce A,C,G, milk cream strudel with vanilla sauce","allergens":"","nutrition_facts":"","hash_id":"ar_KalR9","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.268455Z","updated":"2026-02-03T13:14:37.728274Z","deleted":null,"uuid":"12b75bc5-88ab-4df3-ad9b-00b864729ce3","image":null,"number":"a33ce321-1943-430e-a435-b83f87ff1080","type":1,"vat":"10.00","points":"0.00","minimum_amount":"0","available_amount":"34","amount_tracking":true,"dispenser_id":"","course_group":null},{"id":178735,"price":"5.50","name":"M4 Ginko vegan","description":"Tomaten Kokoscremesuppe, tomato coconut soup, Hirselaibchen A, mit Fisolen Karottengemüse u. Schnittlauchdip F,M, millet patties with fisole vegetable","allergens":"","nutrition_facts":"","hash_id":"ar_YLxwb","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.269340Z","updated":"2026-02-06T09:35:17.671235Z","deleted":null,"uuid":"6b3017a7-c950-4428-a474-8c61a819402d","image":null,"number":"88b59af4-f3fb-45a7-ba0f-9fce88d4710f","type":1,"vat":"10.00","points":"0.00","minimum_amount":"0","available_amount":"63","amount_tracking":true,"dispenser_id":"","course_group":null},{"id":178742,"price":"5.50","name":"M5 Salat mit Gebäck","description":"Erdäpflerahmsuppe G,L,M, Salatteller mit Prosciutto und Parmesan, salad with prosciutto and parmesan","allergens":"","nutrition_facts":"","hash_id":"ar_Qydpp","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.270215Z","updated":"2026-02-03T13:16:43.665954Z","deleted":null,"uuid":"1c1a14dd-cf4f-4355-a4ba-cb8543ae7e5a","image":null,"number":"ea309dc8-38bf-4e78-9b51-7c9a7efb2763","type":1,"vat":"10.00","points":"0.00","minimum_amount":"0","available_amount":"0","amount_tracking":true,"dispenser_id":"","course_group":null},{"id":178749,"price":"3.00","name":"M6 Suppe, kleiner Salat + Dessert D","description":"Suppe, kleiner Salat + Dessert","allergens":"","nutrition_facts":"","hash_id":"ar_Nyoxb","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.271125Z","updated":"2026-02-03T13:17:39.906619Z","deleted":null,"uuid":"c2956eb9-6ee0-4e65-96a7-b8b0615f314c","image":null,"number":"f6d5bd6e-2e14-4930-8f27-be027fa477b2","type":1,"vat":"10.00","points":"0.00","minimum_amount":"0","available_amount":"29","amount_tracking":true,"dispenser_id":"","course_group":null},{"id":178756,"price":"4.30","name":"M7F Kleines Hauptspeisenmenü DI 2","description":"Erdäpflerahmsuppe G,L,M, potato soup, kleine Hauptspeise von Menü 1","allergens":"","nutrition_facts":"","hash_id":"ar_BzGr7","image_thumbnail":null,"modifiers":[],"created":"2025-12-04T13:02:07.272048Z","updated":"2026-02-03T13:18:12.181876Z","deleted":null,"uuid":"6ad46891-a26b-4e22-9a34-ae1cbb225788","ima

File diff suppressed because one or more lines are too long

View File

@@ -1,27 +0,0 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"lib": [
"ES2022"
],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist"
]
}