const express = require("express"); const multer = require('multer'); const next = require("next"); const path = require("path"); const fs = require("fs"); const dotenv = require("dotenv"); const data = require("./src/helpers/data"); const sharp = require('sharp'); const https = require('https'); //const getPort = require("get-port"); let getPort; import('get-port').then(module => { getPort = module.default; // Initialize your server or other logic that relies on getPort here }); process.env.TZ = 'Europe/Sofia'; // Global variable to store the base URL let baseUrlGlobal; // if (process.env.NODE_ENV === 'test') { // // Load environment variables from .env.test // require('dotenv').config({ path: '.env.test' }); // } else { // // Load default environment variables // require('dotenv').config(); // } console.log("initial process.env.NODE_ENV = ", process.env.NODE_ENV); require('dotenv').config({ path: `.env.${process.env.NODE_ENV}` }); console.log("process.env.NODE_ENV = ", process.env.NODE_ENV); const PORT = process.env.NEXT_PUBLIC_PORT || 3000; const HOST = process.env.NEXT_PUBLIC_HOST; const dev = process.env.NODE_ENV !== "production"; const PROTOCOL = process.env.NEXT_PUBLIC_PROTOCOL; const app = next({ dev }); const handle = app.getRequestHandler(); console.log("process.env.SSL_ENABLED = ", process.env.SSL_ENABLED); console.log("process.env.NEXTAUTH_URL = ", process.env.NEXTAUTH_URL); console.log("process.env.NEXT_PUBLIC_PORT = ", process.env.NEXT_PUBLIC_PORT); //require('module-alias/register'); //import helpers const email = require("./src/helpers/email"); const excel = require("./src/helpers/excel"); const common = require("./src/helpers/common"); //const common = require("@common"); const { Shift, Publisher, PrismaClient } = require("@prisma/client"); const { default: test } = require("node:test"); // Configure multer storage const storageMem = multer.memoryStorage(); // Store the file in memory temporarily const uploadTmp = multer({ storage: storageMem }); const prisma = common.getPrismaClient(); // handlers app .prepare() .then(() => { const server = express(); // Add the middleware to set 'x-forwarded-host' header server.use((req, res, next) => { req.headers['x-forwarded-host'] = req.headers['x-forwarded-host'] || req.headers.host; // --------------- if (!baseUrlGlobal) { const protocol = req.headers['x-forwarded-proto'] || 'http'; const host = req.headers.host; const baseUrl = `${protocol}://${host}`; baseUrlGlobal = baseUrl; fs.writeFileSync(path.join(__dirname, 'baseUrl.txt'), baseUrlGlobal, 'utf8'); console.log("baseUrlGlobal set to: " + baseUrlGlobal); } next(); }); server.use("/favicon.ico", express.static("styles/favicon_io/favicon.ico")); server.get("/last_schedule_json", (req, res) => { // var data = JSON.parse(fs.readFileSync("./content/sources/march_flat.json", "utf8")); // const newData = data.map((item) => { // const names = item.names.filter((name) => { // return !name.startsWith('Прибира количка') && !name.startsWith('Докарва количка'); // }); // return { ...item, names }; // }); res.json(fs.readFileSync("./content/sources/march_flat.json", "utf8")); }); server.post('/countNames', uploadTmp.single('file'), (req, res) => { try { if (!req.file) { return res.status(400).json({ message: 'No file uploaded' }); } // Read the file buffer from the uploaded file const fileContent = req.file.buffer.toString('utf8'); // Parse the JSON content const data = JSON.parse(fileContent); res.json(countNames(data)); } catch (error) { console.error('Error:', error.message); res.status(500).json({ message: 'An error occurred while processing the file' }); } }); server.post("/readword/:year?/:month?/:date?", uploadTmp.single('file'), async (req, res) => { let { year = '2023', month = '4', date } = req.params; await excel.ReadDocxFileForMonth(null, req.file.buffer, month, year); res.send({ message: "ok" }); }); server.post('/shiftimportJson', uploadTmp.single('file'), async (req, res) => { try { if (!req.file) { return res.status(400).json({ message: 'No file uploaded' }); } // Read the file buffer from the uploaded file const fileContent = req.file.buffer.toString('utf8'); const data = JSON.parse(fileContent); res.json(await excel.processEvents(data)); } catch (error) { console.error('Error:', error.message); res.status(500).json({ message: 'An error occurred while processing the file' }); } }); //content upload const storageDisk = multer.diskStorage({ destination: function (req, file, cb) { const contentDir = path.join(__dirname, 'public/content/uploads'); if (!fs.existsSync(contentDir)) { fs.mkdirSync(contentDir, { recursive: true }); } cb(null, contentDir); }, filename: function (req, file, cb) { const prefix = req.body.prefix || 'image'; const timestamp = Date.now(); const fileExt = path.extname(file.originalname); cb(null, `${prefix}-${timestamp}${fileExt}`); } }); const uploadContent = multer({ storage: storageDisk }); // Image upload endpoint server.post('/upload', uploadContent.array('image'), async (req, res) => { if (!req.files || req.files.length === 0) { return res.status(400).json({ error: 'No files uploaded.' }); } const directory = path.dirname(req.files[0].path); //path.join(__dirname, 'public/content/uploads'); const thumbDirectory = path.join(directory, "thumb"); if (!fs.existsSync(thumbDirectory)) { fs.mkdirSync(thumbDirectory, { recursive: true }); } try { // Process all files and get an array of objects with originalUrl and thumbUrl const processedFiles = await Promise.all(req.files.map(async (file) => { // Use provided prefix as filename, or fallback to original filename const prefix = req.body.prefix || path.parse(file.originalname).name; const fileExtension = path.extname(file.originalname); const newFilename = prefix + fileExtension; const originalPath = path.join(directory, newFilename); const thumbPath = path.join(thumbDirectory, newFilename); // Define a maximum width for the original image. This value can be adjusted based on your needs. // 1920 is a common width for full HD resolution, suitable for fullscreen display on most devices. const maxWidth = 1920; // Overwrite any existing file // Resize and compress the original image if (!fs.existsSync(originalPath)) { } await sharp(file.path) .resize({ width: maxWidth, fit: sharp.fit.inside, withoutEnlargement: true, // Ensures the image is not enlarged if it's smaller than maxWidth }) .jpeg({ quality: 80 }) // Adjust the quality for JPEG images. 80 is a good balance between quality and file size. .toFile(originalPath); // Saves the optimized original image with the new filename // Resize the image to a maximum of 256 pixels on the longest side for thumbnail await sharp(file.path) .resize(320, 320, { fit: sharp.fit.inside, withoutEnlargement: true, }).toFile(thumbPath); // Saves the thumbnail const originalUrl = `/uploads/${newFilename}`; const thumbUrl = `/uploads/thumb/${newFilename}`; try { // Optionally, delete the temporary uploaded file // fs.unlinkSync(file.path); fs.unlinkSync(file.path); } catch (err) { console.error('Error deleting file:', err); } return { originalUrl, thumbUrl }; })); // Respond with the array of processed files res.json(processedFiles); } catch (error) { console.error('Error processing files:', error); res.status(500).json({ error: 'Error processing files.' }); } }); // Static route to serve uploaded images server.use('/uploads', express.static(path.join(__dirname, 'public/content/uploads'))); // server.use('/guidelines', express.static(path.join(__dirname, 'public/content/guidelines'))); server.get('/uploads-list', (req, res) => { const uploadsDir = path.join(__dirname, 'public/content/uploads'); fs.readdir(uploadsDir, (err, files) => { if (err) { console.error('Error reading uploads directory:', err); return res.status(500).json({ error: 'Internal Server Error' }); } const imageUrls = files.map(file => `${req.protocol}://${req.get('host')}/content/uploads/${file}`); res.json({ imageUrls }); }); }); server.get("/getDocxFile/:year/:month", async (req, res) => { // let { year = '2023', month = parseInt(req.params.month || '5') - 1 } = req.params; const year = req.params.year; let month = parseInt(req.params.month) - 1; // const fromDate = new Date(year, month, 1); // month is 0 based // to last day of the month. special case december // const toDate = new Date(year, month + 1, 0); // month is 0 based const monthInfo = common.getMonthDatesInfo(new Date(year, month, 1)); const fromDate = monthInfo.firstMonday; const toDate = monthInfo.lastSunday; console.log("getting word file for month: " + month + " year: " + year); var shifts = await prisma.shift.findMany({ where: { isactive: true, startTime: { gte: fromDate, lt: toDate, }, }, include: { assignments: { where: {}, include: { publisher: true, }, }, cartEvent: { include: { location: true, }, }, }, }); let json = JSON.stringify(shifts); //fs.writeFileSync("./content/shifts.json", json, "utf8"); //res.send(shifts); // transform the source data to the format needed for the word template // Group the shifts by day and time const groupedShifts = {}; const startDate = new Date(shifts[0].startTime); const monthName = common.getMonthName(shifts[0].startTime.getMonth()); let i = 0; try { for (const shift of shifts) { i++; const date = new Date(shift.startTime); const day = common.getISODateOnly(date) const time = common.getTimeRange(shift.startTime, shift.endTime); //common.getLocalTime(date); if (!groupedShifts[day]) { groupedShifts[day] = {}; } if (!groupedShifts[day][time]) { groupedShifts[day][time] = []; } let shiftSchedule = { date: date, placeOfEvent: shift.cartEvent.location.name, time: time, //bold the text after - in the notes notes: shift.notes.substring(0, shift.notes.indexOf("-") + 1), notes_bold: shift.notes.substring(shift.notes.indexOf("-") + 1), names: shift.assignments .map((assignment) => { return ( assignment.publisher.firstName + " " + assignment.publisher.lastName ); }) .join(", "), }; groupedShifts[day][time].push(shiftSchedule); } } catch (err) { console.log(err + " " + JSON.stringify(shifts[i])); } // Create the output object in the format of the second JSON file const monthlySchedule = { month: monthName, year: startDate.getFullYear(), events: [], }; for (const day in groupedShifts) { var dayEvent = null; for (const time in groupedShifts[day]) { if (dayEvent == null) { const shift = groupedShifts[day][time][0]; if (!shift) { console.log("shift is null"); continue; } let weekday = common.getDayOfWeekName(shift.date); weekday = weekday.charAt(0).toUpperCase() + weekday.slice(1); let weekNr = common.getWeekNumber(shift.date); console.log("weekday = " + weekday, " weekNr = " + weekNr); dayEvent = { week: weekNr, dayOfWeek: weekday, dayOfMonth: shift.date.getDate(), placeOfEvent: shift.placeOfEvent, shifts: [], //transport: shift.notes, }; } dayEvent.shifts.push(...groupedShifts[day][time]); } monthlySchedule.events.push(dayEvent); } month += 1; const outputPath = path.join(process.cwd(), 'public', 'content', 'output'); if (!fs.existsSync(outputPath)) { fs.mkdirSync(outputPath, { recursive: true }); } fs.writeFileSync(path.join(outputPath, `shifts ${year}.${month}.json`), JSON.stringify(monthlySchedule), 'utf8'); // Load the Handlebars template from a file const template = fs.readFileSync("./src/templates/word.html", "utf8"); const filename = path.join(outputPath, `schedule ${year}.${month}.html`) generateWordFile(filename, monthlySchedule, template).then((result) => { fs.writeFileSync(path.join(outputPath, `Schedule ${year}.${month}.docx`), result, "utf8"); // const buffer = Buffer.from(await docx.arrayBuffer()); // const formattedDate = new Date().toISOString().replace(/[:T-]/g, '').slice(0, -5); // const filename = outputFilename + `/График ${formattedDate}.docx`; // fs.writeFileSync(filename, buffer); res.setHeader( "Content-Type", "application/vnd.openxmlformats-officedocument.wordprocessingml.document" ); res.setHeader( "Content-Disposition", "attachment; filename=Grafik 2023.${month}.docx" ); res.end(result); }); }); //test email sending server.get("/testemail", (req, res) => { email.SendTestEmail("doby_p@abv.bg").then((result) => { res.send(result); }); res.end(email.GetLastResult()); }); server.get("/sendmails/:year/:month", async (req, res) => { const year = req.params.year; const month = parseInt(req.params.month) - 1; const fromDate = new Date(year, month, 1); // month is 0 based // to last day of the month. special case december const toDate = new Date(year, month + 1, 0); // month is 0 based // toDate.setMonth(fromDate.getMonth() + 1); var publishers = await prisma.publisher.findMany({ where: { isactive: true, email: { not: "", }, assignments: { some: { shift: { startTime: { gte: fromDate, lt: toDate, }, }, }, }, }, include: { assignments: { where: { shift: { startTime: { gte: fromDate, lt: toDate, }, }, }, include: { shift: { include: { cartEvent: { include: { location: true, }, }, }, }, }, }, // shifts: { // where: { // startTime: { // gte: fromDate, // lt: toDate, // }, // }, // include: { // cartEvent: { // include: { // location: true, // }, // }, // }, // }, }, }); if (publishers.length == 0) { res.send("no publishers found"); res.end(); return; } // var testPublisher = publishers.find((p) => p.email == "dobromir.popov@gmail.com"); // this returns the first publisher with email!!! fix that find by email! var testPublisher = publishers.find((p) => p.email == "dobromir.popov@gmail.com") if (testPublisher != null && testPublisher != undefined) { testPublishers = publishers[0]; // inform about all assignments which are not sent var shifts = testPublisher.assignments .filter((a) => !a.isMailSent) .map((a) => a.shift); if (shifts.length == 0) { res.send("no new shifts for " + testPublisher.email); res.end(); return; } email.SendEmail_NewShifts(testPublisher, shifts) .then((result) => { console.log("sending email to " + testPublisher.email + " for " + shifts.length + " shifts: " + shifts.map((s) => s.id).join(",") + " result = " + result); // mark all assignments as sent testPublisher.assignments.forEach(async (a) => { var updated = await prisma.assignment.update({ where: { id: a.id }, data: { isMailSent: true }, }); console.log("updated assignment " + a.id + " isMailSent = " + updated.isMailSent); }); res.send(result); res.end(email.GetLastResult()); }); } //send emails to all publishers // publishers.forEach((publisher) => { // email // .SendEmail_NewShifts(publisher, publisher.assignments) // .then((result) => { // res.send(result); // res.end(email.GetLastResult()); // }); // }); }); // --------------- EXCEL EXPORT ROUTE ---------------- server.get("/generatexcel/:year/:month/:process", async (req, res) => { await excel.GenerateExcel(req, res); }); server.get("/p/test/:id", (req, res) => { const actualPage = "/post"; const queryParams = { title: req.params.id }; return res.end("custom express route for /p/test/:id"); }); server.get("*", (req, res) => { return handle(req, res); }); server.post("*", (req, res) => { return handle(req, res); }); server.put("*", (req, res) => { return handle(req, res); }); server.patch("*", (req, res) => { return handle(req, res); }); server.delete("*", (req, res) => { return handle(req, res); }); //check if ssl is enabled if (process.env.SSL_ENABLED === "true") { console.log("SSL_ENABLED = true"); // Redirect from http to https // server.use((req, res, next) => { // if (req.headers['x-forwarded-proto'] !== 'https') { // return res.redirect(`https://${req.headers.host}${req.url}`); // } // next(); // }); if (process.env.SSL_KEY && process.env.SSL_CERT) { const options = { key: fs.readFileSync(process.env.SSL_KEY), cert: fs.readFileSync(process.env.SSL_CERT), secureProtocol: 'TLSv1_2_method', // Example: Force TLS 1.2 }; https.createServer(options, server).listen(PORT); } } else { server.listen(PORT, (err) => { if (err) throw err; console.log(`> Ready on ${PROTOCOL}://${HOST}:${PORT}`); }); } }) .catch((ex) => { console.warn(`Error starting server on ${HOST}:${PORT}`) console.error(ex.stack); process.exit(1); }); //read word file from disk and parse it's content. the file contains separate tables for each day of the month. use it as template for the schedule for the next month const generateWordFile = async (outputFilename, data, templateSrc) => { const handlebars = require("handlebars"); const htmlDocx = require("html-docx-js"); // Compile the Handlebars template const template = handlebars.compile(templateSrc); // Generate the HTML output using the template and the events data const html = template(data); //save the generated html to disk fs.writeFileSync(outputFilename + ".html", html, "utf8"); // Convert the HTML to Docx using HtmlDocx const docx = htmlDocx.asBlob(html, { orientiation: "portrait", margins: { top: 720, bottom: 720, left: 720, right: 720 }, pageNumberStart: 1, pageNumberFormatType: "cardinalText", pageSize: { width: 12240, height: 15840, }, }); const buffer = Buffer.from(await docx.arrayBuffer()); // const formattedDate = new Date().toISOString().replace(/[:T-]/g, '').slice(0, -5); // const filename = outputFilename + `/График ${formattedDate}.docx`; // fs.writeFileSync(filename, buffer); return buffer; }; // #################### statistics #################### const telegramBot = require('./src/telegram'); async function Stat() { var date = new Date(); const monthInfo = common.getMonthDatesInfo(new Date(date.getFullYear(), date.getMonth(), 1)); //get all shifts for previous 2 months //console.log("finding shifts for previous 3 months for statistics (between " + new Date(date.getFullYear(), date.getMonth() - 2, 1) + " and " + monthInfo.firstMonday + ")"); var previousShifts = await prisma.shift.findMany({ where: { startTime: { gte: new Date(date.getFullYear(), date.getMonth() - 2, 1), lt: monthInfo.firstMonday, }, }, }); //if there are no locations, execute /prisma/seed.sql to create them let locations = await prisma.location.findMany(); if (locations.length == 0) { console.log("no locations found, seeding the database."); //we edecute prisma/seed.sql to create them. use prisma to run the sql query data.runSqlFile(path.join(__dirname, 'prisma/seed.sql')); locations = await prisma.location.findMany(); } telegramBot.Initialize(); console.log( "found " + previousShifts.length + " shifts for previous 2 months" ); } Stat(); exports.baseUrlGlobal = baseUrlGlobal; exports.default = app;