Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: adjust last update time logic while menu is offline #41

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions app/controllers/meals_controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export default class MealsController {
const lastHash = await WebsiteHash.query()
.orderBy("updatedAt", "desc")
.first();
//this case is kinda weird now, but it is very rare
if (lastHash === null) {
return response
.status(200)
Expand Down
17 changes: 10 additions & 7 deletions scripts/menu_scrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,20 @@ export const url = "https://sks.pwr.edu.pl/menu/";

export async function runScrapper() {
const trx = await db.transaction();
const response = await fetch(url);
const data = await response.text();
const $ = cheerio.load(data);

try {
const currentHash = await cacheMenu();
const storedHash = await WebsiteHash.query()
.where("hash", currentHash)
.first();

if (storedHash !== null) {
await storedHash.merge({ updatedAt: DateTime.now() }).save();
if ($("#menu_table").text().trim() === "") {
await storedHash.merge({ updatedAt: DateTime.now() }).save();
}
logger.info(
"Hash already exists in the database. Not proceeding with scraping.",
);
Expand All @@ -33,7 +39,7 @@ export async function runScrapper() {
{ hash: currentHash },
{ client: trx },
);
const meals = await scrapeMenu();
const meals = await scrapeMenu(data);

for (const meal of meals) {
if (meal.price === 0) {
Expand Down Expand Up @@ -64,10 +70,8 @@ export async function runScrapper() {
}
}

export async function scrapeMenu() {
const response = await fetch(url);
const data = await response.text();
const $ = cheerio.load(data);
export async function scrapeMenu(html: string) {
const $ = cheerio.load(html);

return $(".category")
.map((_, category) => {
Expand Down Expand Up @@ -106,7 +110,6 @@ export async function scrapeMenu() {
export async function cacheMenu() {
const response = await fetch(url);
const data = await response.text();
console.log(data);
return createHash("sha256").update(data).digest("hex");
}

Expand Down
16 changes: 8 additions & 8 deletions tests/unit/menuScrapper/scrape_menu.spec.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
import nock from "nock";
import fs from "node:fs";
import path from "node:path";

import { test } from "@japa/runner";

import { expectedResponse } from "#tests/fixtures/parsed_menu_expected_response";

import { scrapeMenu, url } from "../../../scripts/menu_scrapper.js";
import { scrapeMenu } from "../../../scripts/menu_scrapper.js";

test.group("Menu scrapper scrape menu", () => {
test("should parse the external menu response", async ({ assert }) => {
nock(url)
.get("/")
.replyWithFile(200, "./tests/fixtures/external_menu_response.html", {
"Content-Type": "text/html; charset=UTF-8",
});
const htmlResponse = fs.readFileSync(
path.resolve("./tests/fixtures/external_menu_response.html"),
"utf8",
);

const response = await scrapeMenu();
const response = await scrapeMenu(htmlResponse);
assert.deepEqual(response, expectedResponse);
});
});
Loading