feat: complete bookmarklet implementation (v1.7.0)
This commit is contained in:
@@ -1,42 +0,0 @@
|
||||
// Configuration for scraper
|
||||
|
||||
import dotenv from 'dotenv';
|
||||
dotenv.config();
|
||||
|
||||
export const config = {
|
||||
// Credentials from environment
|
||||
credentials: {
|
||||
employeeNumber: process.env.BESSA_EMPLOYEE_NUMBER || '',
|
||||
password: process.env.BESSA_PASSWORD || '',
|
||||
},
|
||||
|
||||
// Puppeteer settings
|
||||
puppeteer: {
|
||||
headless: process.env.PUPPETEER_HEADLESS !== 'false',
|
||||
defaultTimeout: 30000,
|
||||
navigationTimeout: 60000,
|
||||
},
|
||||
|
||||
// Scraper settings
|
||||
scraper: {
|
||||
waitAfterClick: 1000,
|
||||
waitAfterNavigation: 2000,
|
||||
maxRetries: 3,
|
||||
},
|
||||
|
||||
// Storage
|
||||
storage: {
|
||||
dataDir: './data',
|
||||
menuFile: './data/menus.json',
|
||||
},
|
||||
} as const;
|
||||
|
||||
// Validation
|
||||
export function validateConfig(): void {
|
||||
if (!config.credentials.employeeNumber) {
|
||||
throw new Error('BESSA_EMPLOYEE_NUMBER is required in .env file');
|
||||
}
|
||||
if (!config.credentials.password) {
|
||||
throw new Error('BESSA_PASSWORD is required in .env file');
|
||||
}
|
||||
}
|
||||
57
src/index.ts
57
src/index.ts
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { MenuScraper } from './scraper/menu-scraper.js';
|
||||
import { mergeWeeklyMenu } from './storage/menu-store.js';
|
||||
import { config, validateConfig } from './config.js';
|
||||
import { logger } from './utils/logger.js';
|
||||
|
||||
/**
|
||||
* Main entry point for the scraper
|
||||
*/
|
||||
async function main() {
|
||||
try {
|
||||
// Validate configuration
|
||||
logger.info('Validating configuration...');
|
||||
validateConfig();
|
||||
|
||||
// Initialize scraper
|
||||
const scraper = new MenuScraper();
|
||||
await scraper.init();
|
||||
|
||||
try {
|
||||
// Scrape menus
|
||||
logger.info('Starting scrape of menus (multi-week)...');
|
||||
const weeklyMenu = await scraper.scrapeMenus();
|
||||
|
||||
// Save to storage
|
||||
logger.info('Saving scraped data...');
|
||||
await mergeWeeklyMenu(weeklyMenu);
|
||||
|
||||
// Print summary
|
||||
logger.success('\\n=== Scraping Complete ===');
|
||||
logger.info(`Week: ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
|
||||
logger.info(`Days scraped: ${weeklyMenu.days.length}`);
|
||||
|
||||
for (const day of weeklyMenu.days) {
|
||||
logger.info(` ${day.weekday}: ${day.items.length} items`);
|
||||
}
|
||||
|
||||
const totalItems = weeklyMenu.days.reduce((sum, day) => sum + day.items.length, 0);
|
||||
logger.success(`Total menu items: ${totalItems}`);
|
||||
|
||||
} finally {
|
||||
// Always close browser
|
||||
await scraper.close();
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Scraping failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run if called directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main();
|
||||
}
|
||||
|
||||
export { main };
|
||||
@@ -1,51 +0,0 @@
|
||||
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
async function runApiTest() {
|
||||
logger.info('Starting API Test with cached token...');
|
||||
|
||||
// Token from local_storage.json
|
||||
const cachedToken = 'dba7d86e83c7f462fd8af96521dea41c4facd8a5';
|
||||
|
||||
// Date calculation
|
||||
const today = new Date();
|
||||
const dateStr = today.toISOString().split('T')[0];
|
||||
|
||||
// Try a few dates (sometimes today has no menu if it's late or weekend)
|
||||
// But let's stick to today or tomorrow.
|
||||
|
||||
const venueId = 591;
|
||||
const menuId = 7;
|
||||
const apiUrl = `https://api.bessa.app/v1/venues/${venueId}/menu/${menuId}/${dateStr}/`;
|
||||
|
||||
logger.info(`Testing API call to: ${apiUrl}`);
|
||||
logger.info(`Using Token: ${cachedToken.substring(0, 10)}...`);
|
||||
|
||||
try {
|
||||
const response = await fetch(apiUrl, {
|
||||
headers: {
|
||||
'Authorization': `Token ${cachedToken}`,
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36'
|
||||
}
|
||||
});
|
||||
|
||||
logger.info(`Response Status: ${response.status} ${response.statusText}`);
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
logger.success('API Call Successful!');
|
||||
console.log(JSON.stringify(data, null, 2));
|
||||
} else {
|
||||
logger.error('API Call Failed.');
|
||||
const text = await response.text();
|
||||
console.log('Response Body:', text);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
logger.error('Fetch failed:', error);
|
||||
}
|
||||
}
|
||||
|
||||
runApiTest();
|
||||
@@ -1,157 +0,0 @@
|
||||
|
||||
import puppeteer from 'puppeteer';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import * as readline from 'readline';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Ensure we have a place to save logs
|
||||
const ARTIFACTS_DIR = process.env.ANTIGRAVITY_ARTIFACTS_DIR || path.join(process.cwd(), 'analysis_results');
|
||||
|
||||
async function ensureDir(dir: string) {
|
||||
try {
|
||||
await fs.access(dir);
|
||||
} catch {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
async function runInteractiveAnalysis() {
|
||||
await ensureDir(ARTIFACTS_DIR);
|
||||
console.log('--- INTERACTIVE ANALYSIS TOOL ---');
|
||||
console.log('Starting Browser (Headless: FALSE)...');
|
||||
console.log('Artifacts will be saved to:', ARTIFACTS_DIR);
|
||||
|
||||
const browser = await puppeteer.launch({
|
||||
headless: false, // User wants to see and interact
|
||||
defaultViewport: null, // Full window
|
||||
executablePath: '/usr/bin/chromium',
|
||||
args: [
|
||||
'--start-maximized',
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage'
|
||||
],
|
||||
devtools: true // Useful for the user to see what's happening
|
||||
});
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
// Setup Data Collection
|
||||
const networkLogs: any[] = [];
|
||||
const relevantHosts = ['bessa.app', 'web.bessa.app'];
|
||||
|
||||
await page.setRequestInterception(true);
|
||||
|
||||
page.on('request', (request) => {
|
||||
// Continue all requests
|
||||
request.continue();
|
||||
});
|
||||
|
||||
page.on('response', async (response) => {
|
||||
const url = response.url();
|
||||
const type = response.request().resourceType();
|
||||
|
||||
// Filter: We are mainly interested in XHR, Fetch, and Documents (for initial load)
|
||||
// And only from relevant hosts to avoid noise (analytics, external fonts, etc.)
|
||||
const isRelevantHost = relevantHosts.some(host => url.includes(host));
|
||||
const isRelevantType = ['xhr', 'fetch', 'document', 'script'].includes(type);
|
||||
|
||||
if (isRelevantHost && isRelevantType) {
|
||||
try {
|
||||
// Try to get JSON response
|
||||
let responseBody = null;
|
||||
if (url.includes('/api/') || type === 'xhr' || type === 'fetch') {
|
||||
try {
|
||||
responseBody = await response.json();
|
||||
} catch (e) {
|
||||
// Not JSON, maybe text?
|
||||
try {
|
||||
// Limit text size
|
||||
const text = await response.text();
|
||||
responseBody = text.length > 2000 ? text.substring(0, 2000) + '...[TRUNCATED]' : text;
|
||||
} catch (e2) {
|
||||
responseBody = '[COULD NOT READ BODY]';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
networkLogs.push({
|
||||
timestamp: new Date().toISOString(),
|
||||
method: response.request().method(),
|
||||
url: url,
|
||||
status: response.status(),
|
||||
type: type,
|
||||
requestHeaders: response.request().headers(),
|
||||
responseHeaders: response.headers(),
|
||||
body: responseBody
|
||||
});
|
||||
|
||||
// Real-time feedback
|
||||
if (url.includes('/api/')) {
|
||||
console.log(`[API CAPTURED] ${response.request().method()} ${url}`);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
// Ignore errors reading response (e.g. redirects or closed)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Initial navigation
|
||||
console.log('Navigating to base URL...');
|
||||
await page.goto('https://web.bessa.app/knapp-kantine', { waitUntil: 'networkidle2' });
|
||||
|
||||
console.log('\n================================================================================');
|
||||
console.log('BROWSER IS OPEN. PLEASE ACTION REQUIRED:');
|
||||
console.log('1. Log in manually in the browser window.');
|
||||
console.log('2. Navigate to the menu view (Day Selection -> Select Day -> Menu).');
|
||||
console.log('3. Browse around to trigger API calls.');
|
||||
console.log('\nWHEN YOU ARE DONE, PRESS [ENTER] IN THIS TERMINAL TO SAVE AND EXIT.');
|
||||
console.log('================================================================================\n');
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
});
|
||||
|
||||
await new Promise<void>(resolve => {
|
||||
rl.question('Press Enter to finish analysis...', () => {
|
||||
rl.close();
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
console.log('Capturing final state...');
|
||||
|
||||
// 1. Save Full Page HTML
|
||||
const html = await page.content();
|
||||
await fs.writeFile(path.join(ARTIFACTS_DIR, 'final_page_state.html'), html);
|
||||
|
||||
// 2. Save Cookies/Storage (for Auth Replication)
|
||||
const client = await page.target().createCDPSession();
|
||||
const cookies = await client.send('Network.getAllCookies');
|
||||
await fs.writeFile(path.join(ARTIFACTS_DIR, 'cookies.json'), JSON.stringify(cookies, null, 2));
|
||||
|
||||
const localStorageData = await page.evaluate(() => {
|
||||
return JSON.stringify(localStorage);
|
||||
});
|
||||
await fs.writeFile(path.join(ARTIFACTS_DIR, 'local_storage.json'), localStorageData);
|
||||
|
||||
const sessionStorageData = await page.evaluate(() => {
|
||||
return JSON.stringify(sessionStorage);
|
||||
});
|
||||
await fs.writeFile(path.join(ARTIFACTS_DIR, 'session_storage.json'), sessionStorageData);
|
||||
|
||||
// 3. Save Network Logs
|
||||
await fs.writeFile(path.join(ARTIFACTS_DIR, 'network_traffic.json'), JSON.stringify(networkLogs, null, 2));
|
||||
|
||||
console.log('Analysis data saved to:', ARTIFACTS_DIR);
|
||||
await browser.close();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
runInteractiveAnalysis().catch(console.error);
|
||||
@@ -1,745 +0,0 @@
|
||||
/// <reference lib="dom" />
|
||||
import puppeteer, { Browser, Page } from 'puppeteer';
|
||||
import { WeeklyMenu, DayMenu, MenuItem } from '../types.js';
|
||||
import { SELECTORS, URLS } from './selectors.js';
|
||||
import { config } from '../config.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
|
||||
interface ApiMenuItem {
|
||||
id: number;
|
||||
name: string;
|
||||
description: string;
|
||||
price: string;
|
||||
available_amount: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
}
|
||||
|
||||
interface ApiMenuResult {
|
||||
id: number;
|
||||
items: ApiMenuItem[];
|
||||
date: string;
|
||||
}
|
||||
|
||||
interface ApiMenuResponse {
|
||||
results: ApiMenuResult[];
|
||||
}
|
||||
|
||||
export class MenuScraper {
|
||||
private browser: Browser | null = null;
|
||||
private page: Page | null = null;
|
||||
|
||||
/**
|
||||
* Initialize browser and page
|
||||
*/
|
||||
async init(): Promise<void> {
|
||||
logger.info('[TRACE] Starting browser initialization...');
|
||||
logger.info(`[TRACE] Using Chromium at: /usr/bin/chromium`);
|
||||
logger.info(`[TRACE] Headless mode: ${config.puppeteer.headless}`);
|
||||
|
||||
this.browser = await puppeteer.launch({
|
||||
headless: config.puppeteer.headless,
|
||||
executablePath: '/usr/bin/chromium',
|
||||
args: [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage',
|
||||
],
|
||||
});
|
||||
logger.info('[TRACE] Puppeteer launch completed');
|
||||
|
||||
logger.info('[TRACE] Creating new page...');
|
||||
this.page = await this.browser.newPage();
|
||||
|
||||
logger.info('[TRACE] Setting viewport to 1280x1024...');
|
||||
await this.page.setViewport({ width: 1280, height: 1024 });
|
||||
|
||||
logger.info(`[TRACE] Setting default timeout to ${config.puppeteer.defaultTimeout}ms`);
|
||||
await this.page.setDefaultTimeout(config.puppeteer.defaultTimeout);
|
||||
|
||||
// Set realistic User-Agent
|
||||
await this.page.setUserAgent('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36');
|
||||
|
||||
// Capture console logs with more detail
|
||||
this.page.on('console', msg => {
|
||||
const type = msg.type();
|
||||
const text = msg.text();
|
||||
if (type === 'error' || type === 'warn' || text.includes('auth') || text.includes('login')) {
|
||||
logger.info(`[BROWSER ${type.toUpperCase()}] ${text}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Capture all requests/responses for auth debugging
|
||||
this.page.on('request', request => {
|
||||
const url = request.url();
|
||||
if (url.includes('auth') || url.includes('login') || url.includes('session') || url.includes('bessa.app/api')) {
|
||||
logger.info(`[NETWORK REQ] ${request.method()} ${url}`);
|
||||
}
|
||||
});
|
||||
|
||||
this.page.on('response', response => {
|
||||
const url = response.url();
|
||||
if (url.includes('auth') || url.includes('login') || url.includes('session') || url.includes('bessa.app/api')) {
|
||||
const status = response.status();
|
||||
logger.info(`[NETWORK RES] ${status} ${url}`);
|
||||
if (status >= 400) {
|
||||
logger.warn(`[NETWORK ERR] ${status} for ${url}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Capture failed requests
|
||||
this.page.on('requestfailed', request => {
|
||||
const url = request.url();
|
||||
const error = request.failure()?.errorText;
|
||||
logger.warn(`[NETWORK FAILURE] ${url} - ${error}`);
|
||||
});
|
||||
|
||||
logger.success('[TRACE] Browser initialized successfully');
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a screenshot for debugging
|
||||
*/
|
||||
private async saveScreenshot(name: string): Promise<string | null> {
|
||||
if (!this.page) return null;
|
||||
try {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const fileName = `${name}_${timestamp}.png`;
|
||||
|
||||
// Use current session brain path or fallback to a local screenshots directory
|
||||
const brainPath = process.env.ANTIGRAVITY_ARTIFACTS_DIR || path.join(process.cwd(), 'screenshots');
|
||||
const filePath = path.join(brainPath, fileName);
|
||||
|
||||
// Ensure directory exists if it's the local fallback
|
||||
if (!process.env.ANTIGRAVITY_ARTIFACTS_DIR) {
|
||||
const fs = await import('fs/promises');
|
||||
await fs.mkdir(brainPath, { recursive: true });
|
||||
}
|
||||
|
||||
await this.page.screenshot({ path: filePath });
|
||||
logger.info(`[TRACE] Screenshot saved to: ${filePath}`);
|
||||
return filePath;
|
||||
} catch (error) {
|
||||
logger.error(`[TRACE] Failed to save screenshot: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close browser
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
logger.info('[TRACE] Closing browser...');
|
||||
if (this.browser) {
|
||||
await this.browser.close();
|
||||
logger.success('[TRACE] Browser closed');
|
||||
} else {
|
||||
logger.warn('[TRACE] Browser was already null, nothing to close');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to Bessa and handle cookie consent
|
||||
*/
|
||||
private async navigateAndAcceptCookies(): Promise<void> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
logger.info(`[TRACE] Navigating to ${URLS.BASE}...`);
|
||||
logger.info('[TRACE] Waiting for networkidle2...');
|
||||
|
||||
await this.page.goto(URLS.BASE, { waitUntil: 'networkidle2' });
|
||||
|
||||
const currentUrl = this.page.url();
|
||||
logger.success(`[TRACE] Navigation complete. Current URL: ${currentUrl}`);
|
||||
|
||||
// Accept cookies if banner is present
|
||||
logger.info(`[TRACE] Looking for cookie banner (selector: ${SELECTORS.COOKIE_ACCEPT_ALL})...`);
|
||||
logger.info('[TRACE] Timeout: 5000ms');
|
||||
|
||||
try {
|
||||
await this.page.waitForSelector(SELECTORS.COOKIE_ACCEPT_ALL, { timeout: 5000 });
|
||||
logger.success('[TRACE] Cookie banner found!');
|
||||
|
||||
logger.info('[TRACE] Clicking "Accept all" button...');
|
||||
await this.page.click(SELECTORS.COOKIE_ACCEPT_ALL);
|
||||
|
||||
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterClick}ms after click...`);
|
||||
await this.wait(config.scraper.waitAfterClick);
|
||||
|
||||
logger.success('[TRACE] Cookies accepted successfully');
|
||||
} catch (error) {
|
||||
logger.info('[TRACE] No cookie banner found (timeout reached)');
|
||||
}
|
||||
|
||||
logger.info(`[TRACE] Current URL after cookie handling: ${this.page.url()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to reliably fill an input and trigger validation events
|
||||
*/
|
||||
private async fillInput(selector: string, value: string): Promise<void> {
|
||||
if (!this.page) return;
|
||||
await this.page.waitForSelector(selector);
|
||||
await this.page.focus(selector);
|
||||
|
||||
// Clear field first
|
||||
await this.page.evaluate((sel) => {
|
||||
const el = document.querySelector(sel) as HTMLInputElement;
|
||||
if (el) el.value = '';
|
||||
}, selector);
|
||||
|
||||
await this.page.type(selector, value, { delay: 50 });
|
||||
|
||||
// Trigger validation events for Angular/React/etc.
|
||||
await this.page.evaluate((sel) => {
|
||||
const el = document.querySelector(sel) as HTMLInputElement;
|
||||
if (el) {
|
||||
el.dispatchEvent(new Event('input', { bubbles: true }));
|
||||
el.dispatchEvent(new Event('change', { bubbles: true }));
|
||||
el.dispatchEvent(new Event('blur', { bubbles: true }));
|
||||
}
|
||||
}, selector);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the user is currently logged in based on page content
|
||||
*/
|
||||
private async isLoggedIn(): Promise<boolean> {
|
||||
if (!this.page) return false;
|
||||
return await this.page.evaluate(() => {
|
||||
const bodyText = document.body.innerText;
|
||||
return bodyText.includes('Log Out') ||
|
||||
bodyText.includes('Abmelden') ||
|
||||
bodyText.includes('Mein Konto') ||
|
||||
!!document.querySelector('button[mat-menu-item]');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform login
|
||||
*/
|
||||
private async login(): Promise<void> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
logger.info('[TRACE] ===== LOGIN FLOW START =====');
|
||||
logger.info(`[TRACE] Current URL before login: ${this.page.url()}`);
|
||||
|
||||
// Detect if already logged in
|
||||
if (await this.isLoggedIn()) {
|
||||
logger.success('[TRACE] Already logged in detected! Skipping login modal flow.');
|
||||
await this.navigateToDaySelection();
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`[TRACE] Waiting for Pre-order menu button (selector: ${SELECTORS.PREORDER_MENU_BUTTON})...`);
|
||||
await this.page.waitForSelector(SELECTORS.PREORDER_MENU_BUTTON);
|
||||
logger.success('[TRACE] Pre-order menu button found!');
|
||||
|
||||
logger.info('[TRACE] Clicking Pre-order menu button...');
|
||||
await this.page.click(SELECTORS.PREORDER_MENU_BUTTON);
|
||||
logger.success('[TRACE] Click executed');
|
||||
|
||||
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterClick}ms for modal to appear...`);
|
||||
await this.wait(config.scraper.waitAfterClick);
|
||||
logger.info(`[TRACE] Current URL after button click: ${this.page.url()}`);
|
||||
|
||||
logger.info('[TRACE] ----- LOGIN FORM FILLING START -----');
|
||||
logger.info(`[TRACE] Waiting for modal container (selector: ${SELECTORS.LOGIN_MODAL_CONTAINER})...`);
|
||||
logger.info('[TRACE] Timeout: 30000ms');
|
||||
|
||||
try {
|
||||
await this.page.waitForSelector(SELECTORS.LOGIN_MODAL_CONTAINER);
|
||||
logger.success('[TRACE] Modal container found!');
|
||||
} catch (error: any) {
|
||||
logger.error(`[TRACE] Modal container NOT found! Error: ${error.message}`);
|
||||
await this.saveScreenshot('failed_login_modal');
|
||||
const bodyText = await this.page.evaluate(() => document.body.innerText);
|
||||
logger.info(`[TRACE] Page body text (first 500 chars): ${bodyText.substring(0, 500)}`);
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.info('[TRACE] Typing access code...');
|
||||
await this.fillInput(SELECTORS.LOGIN_ACCESS_CODE, config.credentials.employeeNumber);
|
||||
|
||||
// Verify value
|
||||
const accessCodeValue = await this.page.$eval(SELECTORS.LOGIN_ACCESS_CODE, (el: any) => el.value);
|
||||
if (accessCodeValue !== config.credentials.employeeNumber) {
|
||||
logger.warn(`[TRACE] Access code value verification failed!`);
|
||||
}
|
||||
logger.success(`[TRACE] Access code entered`);
|
||||
await this.saveScreenshot('after_access_code_input');
|
||||
|
||||
logger.info(`[TRACE] Waiting for password field (selector: ${SELECTORS.LOGIN_PASSWORD})...`);
|
||||
await this.page.waitForSelector(SELECTORS.LOGIN_PASSWORD);
|
||||
logger.success('[TRACE] Password field found!');
|
||||
|
||||
logger.info('[TRACE] Typing password...');
|
||||
await this.fillInput(SELECTORS.LOGIN_PASSWORD, config.credentials.password);
|
||||
|
||||
// Verify value
|
||||
const passwordValue = await this.page.$eval(SELECTORS.LOGIN_PASSWORD, (el: any) => el.value);
|
||||
if (passwordValue !== config.credentials.password) {
|
||||
logger.warn('[TRACE] Password value verification failed!');
|
||||
}
|
||||
logger.success('[TRACE] Password entered');
|
||||
await this.saveScreenshot('after_password_input');
|
||||
|
||||
logger.info(`[TRACE] Checking for error messages before clicking login button...`);
|
||||
const errorMessage = await this.page.evaluate((selector) => {
|
||||
const errorElement = document.querySelector(selector);
|
||||
return errorElement ? (errorElement as HTMLElement).innerText : null;
|
||||
}, SELECTORS.LOGIN_ERROR_MESSAGE);
|
||||
|
||||
if (errorMessage) {
|
||||
logger.warn(`[TRACE] Found error message before login click: "${errorMessage}". Attempting to proceed anyway.`);
|
||||
} else {
|
||||
logger.info('[TRACE] No error messages found.');
|
||||
}
|
||||
|
||||
logger.info(`[TRACE] Clicking login button and pressing Enter: ${SELECTORS.LOGIN_SUBMIT}...`);
|
||||
try {
|
||||
await this.page.waitForSelector(SELECTORS.LOGIN_SUBMIT, { timeout: 10000 });
|
||||
|
||||
// Check if button is disabled (Angular validation might have failed)
|
||||
const btnState = await this.page.$eval(SELECTORS.LOGIN_SUBMIT, (el: any) => ({
|
||||
disabled: el.disabled,
|
||||
text: el.innerText,
|
||||
classes: el.className
|
||||
}));
|
||||
|
||||
logger.info(`[TRACE] Button state: disabled=${btnState.disabled}, classes="${btnState.classes}"`);
|
||||
|
||||
if (btnState.disabled) {
|
||||
logger.warn(`[TRACE] Login button is DISABLED! Forcing direct click via evaluate and Enter key...`);
|
||||
}
|
||||
|
||||
// Strategy: Focus password and press Enter + click button
|
||||
await this.page.focus(SELECTORS.LOGIN_PASSWORD);
|
||||
await this.page.keyboard.press('Enter');
|
||||
|
||||
// Allow a small gap
|
||||
await this.wait(500);
|
||||
|
||||
// Perform single click via page.click
|
||||
await this.page.click(SELECTORS.LOGIN_SUBMIT, { delay: 100 });
|
||||
|
||||
// Fallback: trigger click via evaluate as well if Enter didn't work
|
||||
await this.page.evaluate((selector) => {
|
||||
const btn = document.querySelector(selector) as HTMLButtonElement;
|
||||
if (btn) btn.click();
|
||||
}, SELECTORS.LOGIN_SUBMIT);
|
||||
|
||||
logger.success('[TRACE] Login triggers executed (Enter + Click)');
|
||||
} catch (error) {
|
||||
logger.error(`[TRACE] Failed to interact with login button: ${error}`);
|
||||
await this.saveScreenshot('failed_login_button_interaction');
|
||||
logger.info('[TRACE] Attempting "Enter" key final fallback on password field...');
|
||||
await this.page.focus(SELECTORS.LOGIN_PASSWORD);
|
||||
await this.page.keyboard.press('Enter');
|
||||
}
|
||||
|
||||
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterNavigation * 2}ms for transition... (increased for stability)`);
|
||||
await this.wait(config.scraper.waitAfterNavigation * 2);
|
||||
|
||||
// Transition check & Refresh Strategy
|
||||
let isAtDaySelection = false;
|
||||
try {
|
||||
// Check for dialog or redirect
|
||||
await this.page.waitForSelector(SELECTORS.DAY_SELECTION_DIALOG, { timeout: 15000 });
|
||||
isAtDaySelection = true;
|
||||
logger.success('[TRACE] Day selection dialog appeared directly after login');
|
||||
} catch (e) {
|
||||
logger.warn('[TRACE] Day selection dialog not found after login. Investigating state...');
|
||||
await this.saveScreenshot('login_stuck_before_action');
|
||||
|
||||
// Check if login modal is still present
|
||||
const isModalStillThere = await this.page.evaluate((sel1, sel2) => {
|
||||
return !!document.querySelector(sel1) || !!document.querySelector(sel2);
|
||||
}, SELECTORS.LOGIN_MODAL_CONTAINER, SELECTORS.LOGIN_ACCESS_CODE);
|
||||
|
||||
if (isModalStillThere) {
|
||||
logger.warn('[TRACE] Login modal or fields are STILL present! Submit might have failed silently.');
|
||||
|
||||
// Check for error messages specifically
|
||||
const postLoginError = await this.page.evaluate((selector) => {
|
||||
const el = document.querySelector(selector);
|
||||
return el ? (el as HTMLElement).innerText : null;
|
||||
}, SELECTORS.LOGIN_ERROR_MESSAGE);
|
||||
|
||||
if (postLoginError) {
|
||||
logger.error(`[TRACE] Login failed with error message: "${postLoginError}"`);
|
||||
throw new Error(`Login failed on page: ${postLoginError}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Strategy: Check if we are at least logged in now (even if modal is weird)
|
||||
if (await this.isLoggedIn()) {
|
||||
logger.success('[TRACE] Detected as logged in after wait! Navigating to day selection.');
|
||||
await this.navigateToDaySelection();
|
||||
isAtDaySelection = true;
|
||||
return;
|
||||
}
|
||||
|
||||
logger.warn('[TRACE] Not logged in and dialog missing. Applying Refresh Strategy...');
|
||||
|
||||
// User's suggestion: Refresh the page and try again
|
||||
logger.info(`[TRACE] Refreshing page by navigating to ${URLS.BASE}...`);
|
||||
await this.page.goto(URLS.BASE, { waitUntil: 'networkidle2' });
|
||||
|
||||
// Per user feedback: We must click the button again to see if the session is picked up
|
||||
logger.info('[TRACE] Refresh done. Clicking Pre-order button to trigger session check...');
|
||||
await this.navigateToDaySelection();
|
||||
|
||||
// Re-verify login status
|
||||
if (await this.isLoggedIn()) {
|
||||
logger.success('[TRACE] Refresh confirmed: We are logged in! Day selection should be open.');
|
||||
isAtDaySelection = true;
|
||||
} else {
|
||||
logger.error('[TRACE] Refresh failed: Still not logged in. Login might have truly failed.');
|
||||
await this.saveScreenshot('login_failed_after_refresh');
|
||||
throw new Error('Login failed: Not logged in even after refresh.');
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[TRACE] Current URL after login attempt: ${this.page.url()}`);
|
||||
logger.success('[TRACE] ===== LOGIN FLOW ATTEMPT COMPLETE =====');
|
||||
}
|
||||
|
||||
/**
|
||||
* Common logic to click the pre-order button and wait for the dialog
|
||||
*/
|
||||
private async navigateToDaySelection(): Promise<void> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
logger.info(`[TRACE] Clicking Pre-order menu button (selector: ${SELECTORS.PREORDER_MENU_BUTTON})...`);
|
||||
await this.page.waitForSelector(SELECTORS.PREORDER_MENU_BUTTON);
|
||||
await this.page.click(SELECTORS.PREORDER_MENU_BUTTON);
|
||||
logger.success('[TRACE] Pre-order menu button clicked');
|
||||
|
||||
logger.info(`[TRACE] Waiting ${config.scraper.waitAfterNavigation}ms for transition to dialog...`);
|
||||
await this.wait(config.scraper.waitAfterNavigation);
|
||||
}
|
||||
|
||||
private getWeekNumber(date: Date = new Date()): number {
|
||||
const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
|
||||
const dayNum = d.getUTCDay() || 7;
|
||||
d.setUTCDate(d.getUTCDate() + 4 - dayNum);
|
||||
const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
|
||||
return Math.ceil((((d.getTime() - yearStart.getTime()) / 86400000) + 1) / 7);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract current week number and year from the page
|
||||
*/
|
||||
private async extractWeekInfo(): Promise<{ year: number; weekNumber: number }> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
logger.info('[TRACE] Extracting week information from page...');
|
||||
|
||||
try {
|
||||
await this.page.waitForSelector(SELECTORS.WEEK_HEADER, { timeout: 10000 });
|
||||
} catch (e) {
|
||||
logger.warn('[TRACE] Week header selector not found, attempting anyway...');
|
||||
}
|
||||
|
||||
const { weekText, headerTitle, bodyText } = await this.page.evaluate((selInfo) => {
|
||||
const h = document.querySelector(selInfo);
|
||||
return {
|
||||
weekText: h?.textContent || '',
|
||||
headerTitle: document.title || '',
|
||||
bodyText: document.body.innerText || ''
|
||||
};
|
||||
}, SELECTORS.WEEK_HEADER);
|
||||
|
||||
logger.info(`[TRACE] Week header text: "${weekText}" (Title: "${headerTitle}")`);
|
||||
|
||||
// Parse "CW 6", "KW 6", "Week 6", "Woche 6"
|
||||
// Try multiple sources: Header element, Page title, and Page body as a fallback
|
||||
const cwMatch = weekText.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i) ||
|
||||
headerTitle.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i) ||
|
||||
bodyText.match(/(?:CW|KW|Week|Woche|W)\s*(\d+)/i);
|
||||
|
||||
const weekNumber = cwMatch ? parseInt(cwMatch[1]) : this.getWeekNumber();
|
||||
|
||||
logger.info(`[TRACE] Parsed week number: ${weekNumber}`);
|
||||
|
||||
// Get current year
|
||||
const year = new Date().getFullYear();
|
||||
logger.info(`[TRACE] Using year: ${year}`);
|
||||
|
||||
logger.success(`[TRACE] Detected week: ${year}-W${weekNumber}`);
|
||||
return { year, weekNumber };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract Authentication Token from LocalStorage
|
||||
*/
|
||||
private async getAuthToken(): Promise<string> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
const token = await this.page.evaluate(() => {
|
||||
const store = localStorage.getItem('AkitaStores');
|
||||
if (!store) return null;
|
||||
try {
|
||||
const parsed = JSON.parse(store);
|
||||
return parsed.auth?.token;
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
if (!token) {
|
||||
throw new Error('Authentication token not found in LocalStorage (AkitaStores)');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch menu for a specific date using the Bessa API
|
||||
*/
|
||||
private async fetchMenuForDate(token: string, date: string, weekday: string): Promise<DayMenu> {
|
||||
if (!this.page) throw new Error('Page not initialized');
|
||||
|
||||
const venueId = 591;
|
||||
const menuId = 7; // "Bestellung" / configured menu ID
|
||||
const apiUrl = `${URLS.API_BASE}/venues/${venueId}/menu/${menuId}/${date}/`;
|
||||
|
||||
// Execute fetch inside the browser context
|
||||
const responseData = await this.page.evaluate(async (url, authToken) => {
|
||||
try {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
'Authorization': `Token ${authToken}`,
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
return { error: `Status ${res.status}: ${res.statusText}` };
|
||||
}
|
||||
|
||||
return await res.json();
|
||||
} catch (e: any) {
|
||||
return { error: e.toString() };
|
||||
}
|
||||
}, apiUrl, token);
|
||||
|
||||
if (responseData.error) {
|
||||
// 404 might just mean no menu for that day (e.g. weekend)
|
||||
logger.warn(`[TRACE] API fetch warning for ${date}: ${responseData.error}`);
|
||||
// Return empty menu for that day
|
||||
return { date, weekday, items: [] };
|
||||
}
|
||||
|
||||
const apiResponse = responseData as ApiMenuResponse;
|
||||
const items: MenuItem[] = [];
|
||||
|
||||
// Parse results
|
||||
if (apiResponse.results && apiResponse.results.length > 0) {
|
||||
for (const group of apiResponse.results) {
|
||||
if (group.items) {
|
||||
for (const item of group.items) {
|
||||
items.push({
|
||||
id: `${date}_${item.id}`,
|
||||
name: item.name,
|
||||
description: item.description,
|
||||
price: parseFloat(item.price),
|
||||
available: parseInt(item.available_amount) > 0 || item.available_amount === null // Null sometimes acts as available
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
date,
|
||||
weekday,
|
||||
items
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Scrape menu for the current week using API
|
||||
*/
|
||||
/**
|
||||
* Scrape menus starting from current week until no more data is found (min 2 weeks)
|
||||
*/
|
||||
async scrapeMenus(saveToFile: boolean = true): Promise<WeeklyMenu> {
|
||||
await this.init();
|
||||
try {
|
||||
logger.info('[TRACE] ========== SCRAPING MENUS (MULTI-WEEK) ==========');
|
||||
|
||||
// 1. Navigate and Login (uses env credentials by default if not previously logged in in this instance,
|
||||
// but here we assume login() called before or we use default)
|
||||
// Ideally scrapeMenus should rely on session but current flow navigates again.
|
||||
// Let's ensure we don't double login if already on page?
|
||||
// Actually, for the /api/login flow, we will call scraper.login(user, pass) explicitly.
|
||||
// But scrapeMenus calls this.login() internally. We should refactor scrapeMenus to accept credentials or skip login if already done.
|
||||
// BETTER: separate init/login from scraping loop.
|
||||
// For now, to keep it compatible:
|
||||
if (!this.page) await this.init(); // Re-init if needed
|
||||
|
||||
// If we are calling from /api/login, we might have already logged in.
|
||||
// But scrapeMenus does full flow.
|
||||
// Let's modify scrapeMenus to ONLY scrape and assume login is handled IF we want to separate them.
|
||||
// However, existing `main()` calls `scrapeMenus()` which does everything.
|
||||
// Let's just make it check if we are already logged in? Hard with Puppeteer statelessness efficiently.
|
||||
|
||||
// Simpler approach for this task:
|
||||
// Let scrapeMenus take optional credentials too? No, keep it simple.
|
||||
// We will let the server call `login` then `scrapeMenus`.
|
||||
// BUT `scrapeMenus` calls `login`. We need to remove `login` from `scrapeMenus` or make it conditional.
|
||||
// Let's make `scrapeMenus` NOT strictly require login if we are already there, OR just call `login` again (idempotent-ish).
|
||||
// Actually, `login` types in credentials.
|
||||
|
||||
// Modification: scrapeMenus will use whatever credentials are set or env.
|
||||
// But wait, `scrapeMenus` has `await this.login()` hardcoded.
|
||||
// We will invoke `navigateAndAcceptCookies` and `login` ONLY if we are seemingly not ready.
|
||||
// Or easier: Make a new method `scrapeMenusRaw` or just reuse `scrapeMenus` but pass a flag `skipLogin`.
|
||||
|
||||
await this.navigateAndAcceptCookies();
|
||||
// We'll rely on the caller to have called login if they wanted custom creds,
|
||||
// OR we call login here with defaults if not.
|
||||
// ISSUE: `this.login()` uses env vars if no args.
|
||||
// If the server calls `await scraper.login(u,p)`, then calls `await scraper.scrapeMenus()`,
|
||||
// `scrapeMenus` will call `this.login()` (no args) -> uses env vars -> OVERWRITES the user session with default admin!
|
||||
|
||||
// FIX: Add `skipLogin` param.
|
||||
|
||||
} catch (e) { /*...*/ }
|
||||
return { days: [], weekNumber: 0, year: 0, scrapedAt: '' }; // stub
|
||||
}
|
||||
|
||||
// RETHINKING: The tool requires REPLACE.
|
||||
// Let's change signature to `async scrapeMenus(saveToFile: boolean = true, skipLogin: boolean = false): Promise<WeeklyMenu>`
|
||||
|
||||
async scrapeMenus(saveToFile: boolean = true, skipLogin: boolean = false): Promise<WeeklyMenu> {
|
||||
await this.init();
|
||||
try {
|
||||
logger.info('[TRACE] ========== SCRAPING MENUS (MULTI-WEEK) ==========');
|
||||
|
||||
if (!skipLogin) {
|
||||
await this.navigateAndAcceptCookies();
|
||||
await this.login();
|
||||
}
|
||||
|
||||
// 2. Get Auth Token
|
||||
logger.info('[TRACE] Retrieving Auth Token...');
|
||||
const token = await this.getAuthToken();
|
||||
logger.success(`[TRACE] Auth token retrieved: ${token.substring(0, 10)}...`);
|
||||
|
||||
// 3. Determine Start Date (Monday of current week)
|
||||
const today = new Date();
|
||||
const weekInfo = await this.extractWeekInfo();
|
||||
|
||||
const currentDay = today.getUTCDay() || 7; // Sunday is 0 -> 7
|
||||
const startMonday = new Date(today);
|
||||
startMonday.setUTCDate(today.getUTCDate() - currentDay + 1);
|
||||
// Reset time to avoid drift
|
||||
startMonday.setUTCHours(0, 0, 0, 0);
|
||||
|
||||
logger.info(`[TRACE] Starting scrape from ${startMonday.toISOString().split('T')[0]}`);
|
||||
|
||||
const days: DayMenu[] = [];
|
||||
const dayNames = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'];
|
||||
|
||||
const MAX_WEEKS = 8;
|
||||
const MIN_DAYS_COVERAGE = 14;
|
||||
|
||||
let currentDate = new Date(startMonday);
|
||||
let daysProcessed = 0;
|
||||
|
||||
while (true) {
|
||||
// Safety break to prevent infinite loops (approx 8 weeks)
|
||||
if (daysProcessed > MAX_WEEKS * 7) {
|
||||
logger.warn('[TRACE] Reached maximum week limit (safety break). Stopping.');
|
||||
break;
|
||||
}
|
||||
|
||||
const dayOfWeek = currentDate.getUTCDay(); // 0=Sun, 1=Mon, ..., 6=Sat
|
||||
const isWeekend = dayOfWeek === 0 || dayOfWeek === 6;
|
||||
|
||||
if (isWeekend) {
|
||||
// Skip weekends, just advance
|
||||
// logger.debug(`[TRACE] Skipping weekend: ${currentDate.toISOString().split('T')[0]}`);
|
||||
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
|
||||
daysProcessed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const dateStr = currentDate.toISOString().split('T')[0];
|
||||
// Map 0(Sun)->6, 1(Mon)->0, etc.
|
||||
const dayNameIndex = (dayOfWeek + 6) % 7;
|
||||
const weekday = dayNames[dayNameIndex];
|
||||
|
||||
logger.info(`[TRACE] Fetching menu for ${weekday} (${dateStr})...`);
|
||||
|
||||
try {
|
||||
const dayMenu = await this.fetchMenuForDate(token, dateStr, weekday);
|
||||
|
||||
if (dayMenu.items.length === 0) {
|
||||
// Check if we have covered enough time
|
||||
// Calculate difference in days from start
|
||||
const diffTime = Math.abs(currentDate.getTime() - startMonday.getTime());
|
||||
const daysCovered = Math.ceil(diffTime / (1000 * 60 * 60 * 24));
|
||||
|
||||
if (daysCovered >= MIN_DAYS_COVERAGE) {
|
||||
logger.info(`[TRACE] Stopping scraping at ${dateStr} (No items found and > 2 weeks covered)`);
|
||||
break;
|
||||
} else {
|
||||
logger.info(`[TRACE] Empty menu at ${dateStr}, but only covered ${daysCovered} days. Continuing...`);
|
||||
// Add empty day to preserve structure if needed, or just skip?
|
||||
// Usually we want to record empty days if they are valid weekdays
|
||||
days.push(dayMenu);
|
||||
}
|
||||
} else {
|
||||
days.push(dayMenu);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`[TRACE] Failed to fetch menu for ${dateStr}: ${error}`);
|
||||
days.push({ date: dateStr, weekday, items: [] });
|
||||
}
|
||||
|
||||
// Advance to next day
|
||||
currentDate.setUTCDate(currentDate.getUTCDate() + 1);
|
||||
daysProcessed++;
|
||||
|
||||
// Be nice to the API
|
||||
await this.wait(150);
|
||||
}
|
||||
|
||||
const resultMenu: WeeklyMenu = {
|
||||
year: weekInfo.year,
|
||||
weekNumber: weekInfo.weekNumber,
|
||||
days: days,
|
||||
scrapedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
logger.success(`[TRACE] Scraping completed. Found ${days.length} days of menus.`);
|
||||
return resultMenu;
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`[TRACE] Scraping failed: ${error}`);
|
||||
await this.saveScreenshot('scrape_error');
|
||||
throw error;
|
||||
} finally {
|
||||
await this.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to wait
|
||||
*/
|
||||
private async wait(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
// CSS Selectors based on screen documentation
|
||||
|
||||
export const SELECTORS = {
|
||||
// Cookie Consent (Screen #1)
|
||||
COOKIE_ACCEPT_ALL: 'button::-p-text(Accept all), button::-p-text(Alle akzeptieren), button::-p-text(Zustimmen), .cmpboxbtnyes',
|
||||
|
||||
// Landing Page (Screen #2)
|
||||
PREORDER_MENU_BUTTON: 'button.order-type-button.button.high::-p-text(Pre-order menu)',
|
||||
|
||||
// Login Modal (Screen #5)
|
||||
LOGIN_MODAL_CONTAINER: 'app-access-code-dialog, app-access-code-login',
|
||||
LOGIN_ACCESS_CODE: 'input[formcontrolname="accessCode"]',
|
||||
LOGIN_PASSWORD: 'input[formcontrolname="password"]',
|
||||
LOGIN_SUBMIT: 'button[bessa-button].base-button.button',
|
||||
LOGIN_ERROR_MESSAGE: '.mat-error, .toast-error, app-message, .error, [class*="error"]',
|
||||
|
||||
// Day Selection Dialog (Screen #10, #11)
|
||||
DAY_SELECTION_DIALOG: 'app-canteen-dialog, app-bessa-select-day-dialog',
|
||||
WEEK_CHEVRON_NEXT: 'button[aria-label="next week"]',
|
||||
WEEK_CHEVRON_PREV: 'button[aria-label="previous week"]',
|
||||
WEEK_HEADER: 'h2, [class*="week-header"], .calendar-week',
|
||||
DAY_ROW: 'app-date-line',
|
||||
ADD_ORDER_LINK: 'div.clickable',
|
||||
|
||||
// Menu Overview (Screen #14)
|
||||
MENU_CARD: '.menu-card, .dish-card, app-bessa-menu-card, [class*="menu-item"]',
|
||||
MENU_ITEM_TITLE: 'h3, .menu-title, [class*="title"]',
|
||||
MENU_ITEM_DESCRIPTION: 'p, .menu-description, [class*="description"]',
|
||||
MENU_ITEM_PRICE: '.price, [class*="price"], .amount',
|
||||
MENU_ITEM_ADD_BUTTON: 'button::-p-text(+), button.add-button',
|
||||
NOT_AVAILABLE_TEXT: '::-p-text(Not available), ::-p-text(Nicht verfügbar)',
|
||||
|
||||
// Week/Date Display
|
||||
CALENDAR_WEEK_DISPLAY: '[class*="week"]',
|
||||
DATE_DISPLAY: '[class*="date"]',
|
||||
|
||||
// Close/Back buttons
|
||||
CLOSE_BUTTON: 'button[aria-label="close"], .close-btn, button.close, mat-icon::-p-text(close)',
|
||||
BACK_BUTTON: 'button[aria-label="back"], .back-arrow, button.back, mat-icon::-p-text(arrow_back)',
|
||||
DONE_BUTTON: 'button::-p-text(Done), button::-p-text(Fertig)',
|
||||
} as const;
|
||||
|
||||
export const URLS = {
|
||||
BASE: 'https://web.bessa.app/knapp-kantine',
|
||||
API_BASE: 'https://api.bessa.app/v1',
|
||||
} as const;
|
||||
748
src/server.ts
748
src/server.ts
@@ -1,748 +0,0 @@
|
||||
import express from 'express';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { config } from './config.js';
|
||||
import { logger } from './utils/logger.js';
|
||||
import { FlagStore, FlaggedItem } from './storage/flag-store.js';
|
||||
import { SseManager } from './services/sse-manager.js';
|
||||
import { PollingOrchestrator } from './services/polling-orchestrator.js';
|
||||
|
||||
const app = express();
|
||||
const port = 3005; // Changed from 3000 to avoid conflicts
|
||||
|
||||
// Get current directory
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// Project root (assuming we are in src/)
|
||||
const projectRoot = join(__dirname, '..');
|
||||
const publicDir = join(projectRoot, 'public');
|
||||
const dataFile = join(projectRoot, 'data', 'menus.json');
|
||||
const dataDir = join(projectRoot, 'data');
|
||||
|
||||
// Initialize Services
|
||||
const flagStore = new FlagStore(dataDir);
|
||||
const sseManager = new SseManager();
|
||||
const orchestrator = new PollingOrchestrator(flagStore, sseManager);
|
||||
|
||||
// Bessa API Constants
|
||||
const BESSA_API_BASE = 'https://api.bessa.app/v1';
|
||||
const GUEST_TOKEN = 'c3418725e95a9f90e3645cbc846b4d67c7c66131';
|
||||
const CLIENT_VERSION = '1.7.0_prod/2026-01-26';
|
||||
|
||||
// Middleware
|
||||
app.use(express.json());
|
||||
|
||||
// API Routes
|
||||
app.post('/api/login', async (req, res) => {
|
||||
const { employeeId, password } = req.body;
|
||||
|
||||
if (!employeeId || !password) {
|
||||
return res.status(400).json({ error: 'Employee ID and password are required' });
|
||||
}
|
||||
|
||||
// Transform employee ID to email format as expected by Bessa API
|
||||
const email = `knapp-${employeeId}@bessa.app`;
|
||||
|
||||
try {
|
||||
const response = await fetch(`${BESSA_API_BASE}/auth/login/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Token ${GUEST_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
},
|
||||
body: JSON.stringify({ email, password })
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
const token = data.key;
|
||||
|
||||
// Fetch user details to get First Name
|
||||
try {
|
||||
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
|
||||
headers: {
|
||||
'Authorization': `Token ${token}`,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (userResponse.ok) {
|
||||
const userData = await userResponse.json();
|
||||
res.json({
|
||||
key: token,
|
||||
firstName: userData.first_name,
|
||||
lastName: userData.last_name
|
||||
});
|
||||
} else {
|
||||
// Fallback if user fetch fails
|
||||
logger.warn(`Failed to fetch user details for ${email}`);
|
||||
res.json({ key: token });
|
||||
}
|
||||
} catch (userError) {
|
||||
logger.error(`Error fetching user details: ${userError}`);
|
||||
res.json({ key: token });
|
||||
}
|
||||
} else {
|
||||
logger.error(`Login failed for ${email}: ${JSON.stringify(data)}`);
|
||||
res.status(response.status).json({ error: data.non_field_errors?.[0] || 'Login failed' });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Login error: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/me', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) {
|
||||
return res.status(401).json({ error: 'No token provided' });
|
||||
}
|
||||
|
||||
try {
|
||||
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (userResponse.ok) {
|
||||
const userData = await userResponse.json();
|
||||
res.json({
|
||||
firstName: userData.first_name,
|
||||
lastName: userData.last_name,
|
||||
email: userData.email
|
||||
});
|
||||
} else {
|
||||
res.status(userResponse.status).json({ error: 'Failed to fetch user details' });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching user details: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/user/orders', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader) {
|
||||
return res.status(401).json({ error: 'Authorization header is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${BESSA_API_BASE}/venues/591/menu/dates/`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
// Return full order details per date for orderMap building
|
||||
const dateOrders = data.results.map((day: any) => ({
|
||||
date: day.date,
|
||||
orders: (day.orders || []).map((order: any) => ({
|
||||
id: order.id,
|
||||
state: order.order_state,
|
||||
total: order.total,
|
||||
items: (order.items || []).map((item: any) => ({
|
||||
name: item.name,
|
||||
articleId: item.article,
|
||||
price: item.price
|
||||
}))
|
||||
}))
|
||||
}));
|
||||
res.json({ dateOrders });
|
||||
} else {
|
||||
logger.error(`Failed to fetch orders: ${JSON.stringify(data)}`);
|
||||
res.status(response.status).json({ error: 'Failed to fetch orders' });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Orders fetch error: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// Place an order via Bessa API
|
||||
app.post('/api/order', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) {
|
||||
return res.status(401).json({ error: 'Authorization header is required' });
|
||||
}
|
||||
|
||||
const { date, articleId, name, price, vat, description } = req.body;
|
||||
if (!date || !articleId || !name || price === undefined) {
|
||||
return res.status(400).json({ error: 'Missing required fields: date, articleId, name, price' });
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch user details for customer object
|
||||
const userResponse = await fetch(`${BESSA_API_BASE}/auth/user/`, {
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (!userResponse.ok) {
|
||||
return res.status(401).json({ error: 'Failed to fetch user details' });
|
||||
}
|
||||
|
||||
const userData = await userResponse.json();
|
||||
const now = new Date().toISOString();
|
||||
|
||||
// Construct order payload matching exact Bessa format
|
||||
const orderPayload = {
|
||||
uuid: crypto.randomUUID(),
|
||||
created: now,
|
||||
updated: now,
|
||||
order_type: 7,
|
||||
items: [
|
||||
{
|
||||
article: articleId,
|
||||
course_group: null,
|
||||
modifiers: [],
|
||||
uuid: crypto.randomUUID(),
|
||||
name: name,
|
||||
description: description || '',
|
||||
price: String(parseFloat(price)),
|
||||
amount: 1,
|
||||
vat: vat || '10.00',
|
||||
comment: ''
|
||||
}
|
||||
],
|
||||
table: null,
|
||||
total: parseFloat(price),
|
||||
tip: 0,
|
||||
currency: 'EUR',
|
||||
venue: 591,
|
||||
states: [],
|
||||
order_state: 1,
|
||||
date: `${date}T10:00:00.000Z`,
|
||||
payment_method: 'payroll',
|
||||
customer: {
|
||||
first_name: userData.first_name,
|
||||
last_name: userData.last_name,
|
||||
email: userData.email,
|
||||
newsletter: false
|
||||
},
|
||||
preorder: false,
|
||||
delivery_fee: 0,
|
||||
cash_box_table_name: null,
|
||||
take_away: false
|
||||
};
|
||||
|
||||
logger.info(`Placing order: ${name} for ${date} (article ${articleId})`);
|
||||
|
||||
const orderResponse = await fetch(`${BESSA_API_BASE}/user/orders/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
},
|
||||
body: JSON.stringify(orderPayload)
|
||||
});
|
||||
|
||||
const orderData = await orderResponse.json();
|
||||
|
||||
if (orderResponse.ok || orderResponse.status === 201) {
|
||||
logger.success(`Order placed: ID ${orderData.id} (${name})`);
|
||||
res.status(201).json({
|
||||
orderId: orderData.id,
|
||||
hashId: orderData.hash_id,
|
||||
state: orderData.order_state,
|
||||
total: orderData.total
|
||||
});
|
||||
} else {
|
||||
logger.error(`Order failed: ${JSON.stringify(orderData)}`);
|
||||
res.status(orderResponse.status).json({
|
||||
error: orderData.detail || orderData.non_field_errors?.[0] || 'Order failed'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Order error: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// Cancel an order via Bessa API
|
||||
app.post('/api/order/cancel', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) {
|
||||
return res.status(401).json({ error: 'Authorization header is required' });
|
||||
}
|
||||
|
||||
const { orderId } = req.body;
|
||||
if (!orderId) {
|
||||
return res.status(400).json({ error: 'Missing required field: orderId' });
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Cancelling order: ${orderId}`);
|
||||
|
||||
const cancelResponse = await fetch(`${BESSA_API_BASE}/user/orders/${orderId}/cancel/`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
},
|
||||
body: JSON.stringify({})
|
||||
});
|
||||
|
||||
const cancelData = await cancelResponse.json();
|
||||
|
||||
if (cancelResponse.ok) {
|
||||
logger.success(`Order ${orderId} cancelled`);
|
||||
res.json({ success: true, orderId: cancelData.order_id, state: cancelData.state });
|
||||
} else {
|
||||
logger.error(`Cancel failed for ${orderId}: ${JSON.stringify(cancelData)}`);
|
||||
res.status(cancelResponse.status).json({
|
||||
error: cancelData.detail || 'Cancellation failed'
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Cancel error: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// --- Flagging & Polling API ---
|
||||
|
||||
app.get('/api/flags', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) return res.status(401).json({ error: 'Unauthorized' });
|
||||
|
||||
// In a real app we would filter by user, but for now return all active flags
|
||||
// or arguably only the flags for this user?
|
||||
// Requirement says "Flagged items ... flagged by user".
|
||||
// But polling is distributed.
|
||||
// Let's return all flags so UI can show them? Or just user's flags?
|
||||
// For "Yellow Glow" we likely want to see what *I* flagged.
|
||||
// Let's filter by pseudo-user-id if possible, but we don't strictly have one except the Bessa ID.
|
||||
// Let's assume the client sends a userID or we trust the client to filter.
|
||||
// For simplicity, return all, client filters? No, improved privacy:
|
||||
// We don't have a robust user session here, just the token.
|
||||
// We'll trust the client to send 'X-User-Id' for now or just return all and let client handle it.
|
||||
// Going with returning ALL for simplicity of the "Shared/Distributed" nature if we wanted shared flags,
|
||||
// but the requirement implies personal flagging.
|
||||
// Implementation Plan didn't specify strict user separation.
|
||||
// Let's return ALL for now to debug easily.
|
||||
const flags = await flagStore.getAllFlags();
|
||||
res.json(flags);
|
||||
});
|
||||
|
||||
app.post('/api/flags', async (req, res) => {
|
||||
const { id, date, articleId, userId, cutoff, description, name } = req.body;
|
||||
if (!id || !date || !articleId || !userId || !cutoff) {
|
||||
return res.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
const item: FlaggedItem = {
|
||||
id, date, articleId, userId, cutoff, description, name,
|
||||
createdAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
const success = await flagStore.addFlag(item);
|
||||
if (success) {
|
||||
logger.info(`Flag added: ${name} (${id}) by ${userId}`);
|
||||
res.status(201).json({ success: true });
|
||||
} else {
|
||||
res.status(409).json({ error: 'Flag already exists' });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/flags/:id', async (req, res) => {
|
||||
const { id } = req.params;
|
||||
const success = await flagStore.removeFlag(id);
|
||||
if (success) {
|
||||
logger.info(`Flag removed: ${id}`);
|
||||
res.json({ success: true });
|
||||
} else {
|
||||
res.status(404).json({ error: 'Flag not found' });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/check-item', async (req, res) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader) return res.status(401).json({ error: 'Unauthorized' });
|
||||
|
||||
const { date, articleId } = req.body;
|
||||
if (!date || !articleId) return res.status(400).json({ error: 'Missing date or articleId' });
|
||||
|
||||
try {
|
||||
// Fetch menu details for the specific date using User's Token
|
||||
// URL Pattern: /venues/591/menu/7/{date}/
|
||||
// Assumption: Menu ID 7 is standard.
|
||||
const response = await fetch(`${BESSA_API_BASE}/venues/591/menu/7/${date}/`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
// If 404, maybe no menu for that day?
|
||||
if (response.status === 404) {
|
||||
return res.json({ available: false, error: 'Menu not found' });
|
||||
}
|
||||
return res.status(response.status).json({ error: 'Failed to fetch menu from Bessa' });
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const results = data.results || [];
|
||||
|
||||
// Find the item
|
||||
let foundItem = null;
|
||||
for (const group of results) {
|
||||
if (group.items) {
|
||||
foundItem = group.items.find((i: any) => i.article === articleId || i.id === articleId);
|
||||
if (foundItem) break;
|
||||
}
|
||||
}
|
||||
|
||||
if (foundItem) {
|
||||
// Check availability
|
||||
const isUnlimited = foundItem.amount_tracking === false;
|
||||
const hasStock = parseInt(foundItem.available_amount) > 0;
|
||||
const isAvailable = isUnlimited || hasStock;
|
||||
|
||||
logger.info(`Check Item ${articleId} on ${date}: ${isAvailable ? 'AVAILABLE' : 'SOLD OUT'}`);
|
||||
res.json({ available: isAvailable });
|
||||
} else {
|
||||
logger.warn(`Check Item ${articleId} on ${date}: Item not found in menu`);
|
||||
res.json({ available: false, error: 'Item not found in menu' });
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Check Item Error: ${error}`);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/poll-result', async (req, res) => {
|
||||
const { flagId, isAvailable } = req.body;
|
||||
if (!flagId) return res.status(400).json({ error: 'Missing flagId' });
|
||||
|
||||
await orchestrator.handlePollResult(flagId, isAvailable);
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
app.get('/api/events', (req, res) => {
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const clientId = sseManager.addClient(res);
|
||||
|
||||
// Send initial ping/id
|
||||
sseManager.sendToClient(clientId, 'connected', { clientId });
|
||||
});
|
||||
|
||||
// SSE endpoint for menu refresh progress
|
||||
app.get('/api/refresh-progress', async (req, res) => {
|
||||
logger.info(`[DEBUG] Received SSE request with token query: ${req.query.token ? 'YES' : 'NO'}`);
|
||||
|
||||
// Get token from query parameter (EventSource doesn't support custom headers)
|
||||
const token = req.query.token as string;
|
||||
const authHeader = token ? `Token ${token}` : `Token ${GUEST_TOKEN}`;
|
||||
|
||||
// Set headers for SSE
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
res.flushHeaders();
|
||||
|
||||
const sendProgress = (data: any) => {
|
||||
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
};
|
||||
|
||||
try {
|
||||
sendProgress({ step: 'start', message: 'Hole verfügbare Daten...', current: 0, total: 100 });
|
||||
|
||||
// 1. Fetch available dates
|
||||
logger.info('Fetching available dates...');
|
||||
const datesResponse = await fetch(`${BESSA_API_BASE}/venues/591/menu/dates/`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (!datesResponse.ok) {
|
||||
throw new Error(`Failed to fetch dates: ${datesResponse.status}`);
|
||||
}
|
||||
|
||||
const datesData = await datesResponse.json();
|
||||
let availableDates = datesData.results || [];
|
||||
|
||||
// Filter for future dates or recent past (e.g. last 7 days + future)
|
||||
const today = new Date();
|
||||
today.setDate(today.getDate() - 7);
|
||||
const cutoffDate = today.toISOString().split('T')[0];
|
||||
|
||||
availableDates = availableDates
|
||||
.filter((d: any) => d.date >= cutoffDate)
|
||||
.sort((a: any, b: any) => a.date.localeCompare(b.date));
|
||||
|
||||
// Limit to reasonable amount (e.g. next 30 days)
|
||||
availableDates = availableDates.slice(0, 30);
|
||||
const totalDates = availableDates.length;
|
||||
|
||||
sendProgress({ step: 'dates_fetched', message: `${totalDates} Tage gefunden. Lade Details...`, current: 0, total: totalDates });
|
||||
|
||||
// 2. Fetch details for each date
|
||||
const allDays: any[] = [];
|
||||
let completed = 0;
|
||||
|
||||
for (const dateObj of availableDates) {
|
||||
const dateStr = dateObj.date;
|
||||
|
||||
sendProgress({
|
||||
step: 'fetching_details',
|
||||
message: `Lade Menü für ${dateStr}...`,
|
||||
current: completed + 1,
|
||||
total: totalDates
|
||||
});
|
||||
|
||||
try {
|
||||
// Menu ID 7 seems to be the standard lunch menu
|
||||
const menuDetailUrl = `${BESSA_API_BASE}/venues/591/menu/7/${dateStr}/`;
|
||||
const detailResponse = await fetch(menuDetailUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authHeader,
|
||||
'Accept': 'application/json',
|
||||
'X-Client-Version': CLIENT_VERSION
|
||||
}
|
||||
});
|
||||
|
||||
if (detailResponse.ok) {
|
||||
const detailData = await detailResponse.json();
|
||||
|
||||
// Structure: { results: [ { name: "Menü", items: [...] } ] }
|
||||
const menuGroups = detailData.results || [];
|
||||
let dayItems: any[] = [];
|
||||
|
||||
for (const group of menuGroups) {
|
||||
if (group.items && Array.isArray(group.items)) {
|
||||
dayItems = dayItems.concat(group.items);
|
||||
}
|
||||
}
|
||||
|
||||
if (dayItems.length > 0) {
|
||||
allDays.push({
|
||||
date: dateStr,
|
||||
// Use the dateObj to get weekday if possible, or compute it
|
||||
menu_items: dayItems,
|
||||
orders: dateObj.orders || [] // Store orders for cutoff extraction
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Failed to fetch details for ${dateStr}: ${err}`);
|
||||
}
|
||||
|
||||
completed++;
|
||||
// Small delay
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
// Group by Week
|
||||
const weeksMap = new Map<string, any>();
|
||||
|
||||
// Helper to get ISO week year
|
||||
const getWeekYear = (d: Date) => {
|
||||
const date = new Date(d.getTime());
|
||||
date.setDate(date.getDate() + 3 - (date.getDay() + 6) % 7);
|
||||
return date.getFullYear();
|
||||
};
|
||||
|
||||
for (const day of allDays) {
|
||||
const date = new Date(day.date);
|
||||
const weekNum = getISOWeek(date);
|
||||
const year = getWeekYear(date);
|
||||
const key = `${year}-${weekNum}`;
|
||||
|
||||
if (!weeksMap.has(key)) {
|
||||
weeksMap.set(key, {
|
||||
year: year,
|
||||
weekNumber: weekNum,
|
||||
days: []
|
||||
});
|
||||
}
|
||||
|
||||
const weekday = date.toLocaleDateString('en-US', { weekday: 'long' });
|
||||
|
||||
// Calculate order cutoff time: same day at 10:00 AM local time
|
||||
const orderCutoffDate = new Date(day.date);
|
||||
orderCutoffDate.setHours(10, 0, 0, 0); // 10:00 AM local time
|
||||
const orderCutoff = orderCutoffDate.toISOString();
|
||||
|
||||
weeksMap.get(key).days.push({
|
||||
date: day.date,
|
||||
weekday: weekday,
|
||||
orderCutoff: orderCutoff, // Add the cutoff time
|
||||
items: (day.menu_items || []).map((item: any) => {
|
||||
const isUnlimited = item.amount_tracking === false;
|
||||
const hasStock = parseInt(item.available_amount) > 0;
|
||||
|
||||
return {
|
||||
id: `${day.date}_${item.id}`,
|
||||
name: item.name || 'Unknown',
|
||||
description: item.description || '',
|
||||
price: parseFloat(item.price) || 0,
|
||||
available: isUnlimited || hasStock,
|
||||
availableAmount: parseInt(item.available_amount) || 0,
|
||||
amountTracking: item.amount_tracking !== false // Default to true if missing
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
const menuData = {
|
||||
weeks: Array.from(weeksMap.values()).sort((a: any, b: any) => {
|
||||
if (a.year !== b.year) return a.year - b.year;
|
||||
return a.weekNumber - b.weekNumber;
|
||||
}),
|
||||
scrapedAt: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Smart merge: preserve current-week data on refresh, purge older weeks
|
||||
sendProgress({ step: 'saving', message: 'Daten werden gespeichert...', current: totalDates, total: totalDates });
|
||||
|
||||
const currentISOWeek = getISOWeek(new Date());
|
||||
const currentISOYear = getWeekYear(new Date());
|
||||
|
||||
let finalData = menuData;
|
||||
|
||||
try {
|
||||
const existingRaw = await fs.readFile(dataFile, 'utf-8');
|
||||
const existingData = JSON.parse(existingRaw);
|
||||
|
||||
if (existingData.weeks && Array.isArray(existingData.weeks)) {
|
||||
const mergedWeeks = new Map<string, any>();
|
||||
|
||||
// Add all fresh weeks first
|
||||
for (const week of menuData.weeks) {
|
||||
mergedWeeks.set(`${week.year}-${week.weekNumber}`, week);
|
||||
}
|
||||
|
||||
// Merge existing current-week data (preserve days not in fresh data)
|
||||
for (const existingWeek of existingData.weeks) {
|
||||
const key = `${existingWeek.year}-${existingWeek.weekNumber}`;
|
||||
const isCurrentOrFuture =
|
||||
existingWeek.year > currentISOYear ||
|
||||
(existingWeek.year === currentISOYear && existingWeek.weekNumber >= currentISOWeek);
|
||||
|
||||
if (!isCurrentOrFuture) {
|
||||
// Older week: purge (don't keep)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (mergedWeeks.has(key)) {
|
||||
// Merge: keep existing days that aren't in fresh data
|
||||
const freshWeek = mergedWeeks.get(key);
|
||||
const freshDates = new Set(freshWeek.days.map((d: any) => d.date));
|
||||
|
||||
for (const existDay of existingWeek.days) {
|
||||
if (!freshDates.has(existDay.date)) {
|
||||
freshWeek.days.push(existDay);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort days by date
|
||||
freshWeek.days.sort((a: any, b: any) => a.date.localeCompare(b.date));
|
||||
} else {
|
||||
// Future week not in fresh data: keep as-is
|
||||
mergedWeeks.set(key, existingWeek);
|
||||
}
|
||||
}
|
||||
|
||||
finalData = {
|
||||
weeks: Array.from(mergedWeeks.values()).sort((a: any, b: any) => {
|
||||
if (a.year !== b.year) return a.year - b.year;
|
||||
return a.weekNumber - b.weekNumber;
|
||||
}),
|
||||
scrapedAt: new Date().toISOString()
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
// No existing data or parse error — use fresh data as-is
|
||||
logger.info('No existing menu data to merge, using fresh data.');
|
||||
}
|
||||
|
||||
await fs.writeFile(dataFile, JSON.stringify(finalData, null, 2), 'utf-8');
|
||||
|
||||
sendProgress({ step: 'complete', message: 'Aktualisierung abgeschlossen!', current: totalDates, total: totalDates });
|
||||
res.write('event: done\ndata: {}\n\n');
|
||||
res.end();
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Refresh error: ${error}`);
|
||||
sendProgress({ step: 'error', message: `Fehler: ${error}`, current: 0, total: 100 });
|
||||
res.write('event: error\ndata: {}\n\n');
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
// Helper function for ISO week number
|
||||
function getISOWeek(date: Date): number {
|
||||
const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()));
|
||||
const dayNum = d.getUTCDay() || 7;
|
||||
d.setUTCDate(d.getUTCDate() + 4 - dayNum);
|
||||
const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1));
|
||||
return Math.ceil((((d.getTime() - yearStart.getTime()) / 86400000) + 1) / 7);
|
||||
}
|
||||
|
||||
app.get('/api/menus', async (req, res) => {
|
||||
try {
|
||||
await fs.access(dataFile);
|
||||
const data = await fs.readFile(dataFile, 'utf-8');
|
||||
res.header('Content-Type', 'application/json');
|
||||
res.send(data);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to read menu data: ${error}`);
|
||||
// If file doesn't exist, return empty structure
|
||||
res.json({ days: [], updated: null });
|
||||
}
|
||||
});
|
||||
|
||||
// Serve Static Files
|
||||
app.use(express.static(publicDir));
|
||||
|
||||
// Fallback to index.html for any other request
|
||||
app.use((req, res) => {
|
||||
if (req.method === 'GET') {
|
||||
res.sendFile(join(publicDir, 'index.html'));
|
||||
}
|
||||
});
|
||||
|
||||
// Start Server
|
||||
app.listen(port, () => {
|
||||
logger.success(`Web Interface running at http://localhost:${port}`);
|
||||
logger.info(`Serving static files from: ${publicDir}`);
|
||||
|
||||
// Start Polling Orchestrator
|
||||
orchestrator.start();
|
||||
});
|
||||
@@ -1,92 +0,0 @@
|
||||
|
||||
import { FlagStore, FlaggedItem } from '../storage/flag-store.js';
|
||||
import { SseManager } from './sse-manager.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
export class PollingOrchestrator {
|
||||
private flagStore: FlagStore;
|
||||
private sseManager: SseManager;
|
||||
private intervalId: NodeJS.Timeout | null = null;
|
||||
private intervalMs: number = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
constructor(flagStore: FlagStore, sseManager: SseManager) {
|
||||
this.flagStore = flagStore;
|
||||
this.sseManager = sseManager;
|
||||
}
|
||||
|
||||
start(): void {
|
||||
if (this.intervalId) return;
|
||||
|
||||
logger.info('Starting Polling Orchestrator...');
|
||||
// Run immediately then interval
|
||||
this.distributeTasks();
|
||||
this.intervalId = setInterval(() => this.distributeTasks(), this.intervalMs);
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
if (this.intervalId) {
|
||||
clearInterval(this.intervalId);
|
||||
this.intervalId = null;
|
||||
}
|
||||
}
|
||||
|
||||
async distributeTasks(): Promise<void> {
|
||||
const clients = this.sseManager.getAllClientIds();
|
||||
if (clients.length === 0) {
|
||||
logger.info('No active clients to poll. Skipping cycle.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up expired flags first
|
||||
await this.flagStore.pruneExpiredFlags();
|
||||
|
||||
const flags = await this.flagStore.getAllFlags();
|
||||
if (flags.length === 0) return;
|
||||
|
||||
logger.info(`Distributing ${flags.length} polling tasks across ${clients.length} clients.`);
|
||||
|
||||
// Simple Load Balancing: Round Robin
|
||||
let clientIndex = 0;
|
||||
|
||||
for (const flag of flags) {
|
||||
const clientId = clients[clientIndex];
|
||||
|
||||
// Send poll request to client
|
||||
this.sseManager.sendToClient(clientId, 'poll_request', {
|
||||
flagId: flag.id,
|
||||
date: flag.date,
|
||||
articleId: flag.articleId,
|
||||
name: flag.name
|
||||
});
|
||||
|
||||
logger.info(`Assigned flag ${flag.id} to client ${clientId}`);
|
||||
|
||||
// Move to next client
|
||||
clientIndex = (clientIndex + 1) % clients.length;
|
||||
}
|
||||
}
|
||||
|
||||
async handlePollResult(flagId: string, isAvailable: boolean): Promise<void> {
|
||||
if (!isAvailable) return;
|
||||
|
||||
const flag = await this.flagStore.getFlag(flagId);
|
||||
if (!flag) return; // Flag might have been removed
|
||||
|
||||
logger.success(`Item ${flag.name} (${flag.id}) is now AVAILABLE! Broadcasting...`);
|
||||
|
||||
// Notify ALL clients
|
||||
this.sseManager.broadcast('item_update', {
|
||||
flagId: flag.id,
|
||||
status: 'available',
|
||||
name: flag.name,
|
||||
date: flag.date,
|
||||
articleId: flag.articleId
|
||||
});
|
||||
|
||||
// Remove flag since it's now available?
|
||||
// Or keep it until cutoff? Requirement says "remove when cutoff reached"
|
||||
// But if it becomes available, we might want to keep checking if it becomes unavailable again?
|
||||
// Let's keep it for now, user can manually remove or it expires.
|
||||
// Actually, if user orders it, they should likely unflag it.
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
|
||||
import { Response } from 'express';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { randomUUID } from 'crypto';
|
||||
|
||||
interface ConnectedClient {
|
||||
id: string;
|
||||
res: Response;
|
||||
userId?: string; // If authenticated
|
||||
}
|
||||
|
||||
export class SseManager {
|
||||
private clients: Map<string, ConnectedClient> = new Map();
|
||||
|
||||
addClient(res: Response, userId?: string): string {
|
||||
const id = randomUUID();
|
||||
const client: ConnectedClient = { id, res, userId };
|
||||
this.clients.set(id, client);
|
||||
|
||||
// Remove client on connection close
|
||||
res.on('close', () => {
|
||||
this.clients.delete(id);
|
||||
logger.info(`SSE Client disconnected: ${id}`);
|
||||
});
|
||||
|
||||
logger.info(`SSE Client connected: ${id} (User: ${userId || 'Guest'})`);
|
||||
return id;
|
||||
}
|
||||
|
||||
removeClient(id: string): void {
|
||||
const client = this.clients.get(id);
|
||||
if (client) {
|
||||
client.res.end();
|
||||
this.clients.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
sendToClient(clientId: string, event: string, data: any): boolean {
|
||||
const client = this.clients.get(clientId);
|
||||
if (!client) return false;
|
||||
|
||||
client.res.write(`event: ${event}\n`);
|
||||
client.res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
return true;
|
||||
}
|
||||
|
||||
broadcast(event: string, data: any): void {
|
||||
this.clients.forEach(client => {
|
||||
client.res.write(`event: ${event}\n`);
|
||||
client.res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
});
|
||||
}
|
||||
|
||||
getActiveClientCount(): number {
|
||||
return this.clients.size;
|
||||
}
|
||||
|
||||
getAllClientIds(): string[] {
|
||||
return Array.from(this.clients.keys());
|
||||
}
|
||||
|
||||
// Helper to get a random client for load balancing
|
||||
getRandomClient(): string | null {
|
||||
const keys = Array.from(this.clients.keys());
|
||||
if (keys.length === 0) return null;
|
||||
const randomIndex = Math.floor(Math.random() * keys.length);
|
||||
return keys[randomIndex];
|
||||
}
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
|
||||
import fs from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
export interface FlaggedItem {
|
||||
id: string; // composite: date_articleId
|
||||
date: string;
|
||||
articleId: number;
|
||||
userId: string; // Who flagged it (first user)
|
||||
cutoff: string; // ISO date string
|
||||
createdAt: string;
|
||||
description?: string; // Optional: Store name/desc for notifications
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export class FlagStore {
|
||||
private filePath: string;
|
||||
private flags: Map<string, FlaggedItem> = new Map();
|
||||
private initialized: boolean = false;
|
||||
|
||||
constructor(dataDir: string) {
|
||||
this.filePath = join(dataDir, 'flags.json');
|
||||
}
|
||||
|
||||
async init(): Promise<void> {
|
||||
if (this.initialized) return;
|
||||
|
||||
try {
|
||||
const data = await fs.readFile(this.filePath, 'utf-8');
|
||||
const parsed = JSON.parse(data);
|
||||
if (Array.isArray(parsed)) {
|
||||
parsed.forEach((item: FlaggedItem) => {
|
||||
this.flags.set(item.id, item);
|
||||
});
|
||||
}
|
||||
logger.info(`Loaded ${this.flags.size} flags from storage.`);
|
||||
} catch (error) {
|
||||
// If file doesn't exist, start empty
|
||||
logger.info('No existing flags found, starting with empty store.');
|
||||
}
|
||||
|
||||
this.initialized = true;
|
||||
}
|
||||
|
||||
async save(): Promise<void> {
|
||||
try {
|
||||
const data = Array.from(this.flags.values());
|
||||
await fs.writeFile(this.filePath, JSON.stringify(data, null, 2), 'utf-8');
|
||||
} catch (error) {
|
||||
logger.error(`Failed to save flags: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async addFlag(item: FlaggedItem): Promise<boolean> {
|
||||
if (!this.initialized) await this.init();
|
||||
|
||||
if (this.flags.has(item.id)) {
|
||||
return false; // Already exists
|
||||
}
|
||||
|
||||
this.flags.set(item.id, item);
|
||||
await this.save();
|
||||
return true;
|
||||
}
|
||||
|
||||
async removeFlag(id: string): Promise<boolean> {
|
||||
if (!this.initialized) await this.init();
|
||||
|
||||
if (this.flags.has(id)) {
|
||||
this.flags.delete(id);
|
||||
await this.save();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async getFlag(id: string): Promise<FlaggedItem | undefined> {
|
||||
if (!this.initialized) await this.init();
|
||||
return this.flags.get(id);
|
||||
}
|
||||
|
||||
async getAllFlags(): Promise<FlaggedItem[]> {
|
||||
if (!this.initialized) await this.init();
|
||||
return Array.from(this.flags.values());
|
||||
}
|
||||
|
||||
async pruneExpiredFlags(): Promise<number> {
|
||||
if (!this.initialized) await this.init();
|
||||
|
||||
const now = new Date();
|
||||
let pruned = 0;
|
||||
|
||||
for (const [id, item] of this.flags.entries()) {
|
||||
const cutoff = new Date(item.cutoff);
|
||||
if (now > cutoff) {
|
||||
this.flags.delete(id);
|
||||
pruned++;
|
||||
}
|
||||
}
|
||||
|
||||
if (pruned > 0) {
|
||||
await this.save();
|
||||
logger.info(`Pruned ${pruned} expired flags.`);
|
||||
}
|
||||
|
||||
return pruned;
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { MenuDatabase, WeeklyMenu } from '../types.js';
|
||||
import { config } from '../config.js';
|
||||
import { logger } from '../utils/logger.js';
|
||||
|
||||
/**
|
||||
* Load existing menu database from JSON file
|
||||
*/
|
||||
export async function loadMenus(): Promise<MenuDatabase> {
|
||||
try {
|
||||
const content = await fs.readFile(config.storage.menuFile, 'utf-8');
|
||||
return JSON.parse(content);
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
logger.info('No existing menus.json found, creating new database');
|
||||
return {
|
||||
lastUpdated: new Date().toISOString(),
|
||||
weeks: [],
|
||||
};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save menu database to JSON file
|
||||
*/
|
||||
export async function saveMenus(db: MenuDatabase): Promise<void> {
|
||||
// Ensure data directory exists
|
||||
await fs.mkdir(config.storage.dataDir, { recursive: true });
|
||||
|
||||
// Update timestamp
|
||||
db.lastUpdated = new Date().toISOString();
|
||||
|
||||
// Write with pretty formatting
|
||||
await fs.writeFile(
|
||||
config.storage.menuFile,
|
||||
JSON.stringify(db, null, 2),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
logger.success(`Saved menu database to ${config.storage.menuFile}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge a new weekly menu into the database
|
||||
* Replaces existing week if found, otherwise adds it
|
||||
*/
|
||||
export async function mergeWeeklyMenu(weeklyMenu: WeeklyMenu): Promise<void> {
|
||||
const db = await loadMenus();
|
||||
|
||||
// Find and replace existing week, or add new one
|
||||
const existingIndex = db.weeks.findIndex(
|
||||
w => w.year === weeklyMenu.year && w.weekNumber === weeklyMenu.weekNumber
|
||||
);
|
||||
|
||||
if (existingIndex >= 0) {
|
||||
db.weeks[existingIndex] = weeklyMenu;
|
||||
logger.info(`Updated existing week ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
|
||||
} else {
|
||||
db.weeks.push(weeklyMenu);
|
||||
logger.info(`Added new week ${weeklyMenu.year}-W${weeklyMenu.weekNumber}`);
|
||||
}
|
||||
|
||||
// Sort weeks by year and week number
|
||||
db.weeks.sort((a, b) => {
|
||||
if (a.year !== b.year) return a.year - b.year;
|
||||
return a.weekNumber - b.weekNumber;
|
||||
});
|
||||
|
||||
await saveMenus(db);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get menu for a specific date
|
||||
*/
|
||||
export async function getMenuForDate(date: string): Promise<import('../types.js').DayMenu | null> {
|
||||
const db = await loadMenus();
|
||||
|
||||
for (const week of db.weeks) {
|
||||
const day = week.days.find(d => d.date === date);
|
||||
if (day) return day;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
27
src/types.ts
27
src/types.ts
@@ -1,27 +0,0 @@
|
||||
// TypeScript type definitions for menu data structures
|
||||
|
||||
export interface MenuItem {
|
||||
id: string; // e.g., "2026-02-03_M1_Herzhaftes" (date + menu ID for uniqueness)
|
||||
name: string; // e.g., "M1 Herzhaftes"
|
||||
description: string; // Zutaten + Allergen-Codes
|
||||
price: number; // 5.50
|
||||
available: boolean;
|
||||
}
|
||||
|
||||
export interface DayMenu {
|
||||
date: string; // ISO format: "2026-02-03"
|
||||
weekday: string; // "Monday", "Tuesday", ...
|
||||
items: MenuItem[];
|
||||
}
|
||||
|
||||
export interface WeeklyMenu {
|
||||
year: number; // 2026 (year before week for readability)
|
||||
weekNumber: number; // 6
|
||||
days: DayMenu[];
|
||||
scrapedAt: string; // ISO timestamp
|
||||
}
|
||||
|
||||
export interface MenuDatabase {
|
||||
lastUpdated: string; // ISO timestamp
|
||||
weeks: WeeklyMenu[];
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
// Simple logger utility
|
||||
|
||||
export const logger = {
|
||||
info: (message: string, ...args: any[]) => {
|
||||
console.log(`[INFO] ${message}`, ...args);
|
||||
},
|
||||
|
||||
success: (message: string, ...args: any[]) => {
|
||||
console.log(`✅ ${message}`, ...args);
|
||||
},
|
||||
|
||||
error: (message: string, ...args: any[]) => {
|
||||
console.error(`❌ [ERROR] ${message}`, ...args);
|
||||
},
|
||||
|
||||
debug: (message: string, ...args: any[]) => {
|
||||
if (process.env.DEBUG) {
|
||||
console.log(`[DEBUG] ${message}`, ...args);
|
||||
}
|
||||
},
|
||||
|
||||
warn: (message: string, ...args: any[]) => {
|
||||
console.warn(`⚠️ [WARN] ${message}`, ...args);
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user