Skip to content

Commit

Permalink
fix(catalogue): Entries with no associated companies
Browse files Browse the repository at this point in the history
  • Loading branch information
Princesseuh committed Jul 4, 2024
1 parent c9a6383 commit 70a710a
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 21 deletions.
29 changes: 16 additions & 13 deletions db/seed.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@ import type { LocalImageServiceWithPlaceholder } from "src/imageService";

// https://astro.build/db/seed
export default async function seed() {
await prepareDB();
await prepareDB();
}

export async function prepareDB() {
const games = await getCollection("games");
const games = await getCollection("games");
const movies = await getCollection("movies");
const shows = await getCollection("shows");
const books = await getCollection("books");

const catalogueContent = [...games, ...movies, ...shows, ...books];
const catalogueContent = [...games, ...movies, ...shows, ...books];
for (const entry of catalogueContent) {
await addCatalogueEntry(entry);
}
Expand All @@ -26,16 +26,19 @@ export async function addCatalogueEntry(entry: allCatalogueTypes) {
const [processedCover, placeholderURL] = await getCoverAndPlaceholder(cover);
const metadata = await getCatalogueData(entry);

const author = getAuthorFromEntryMetadata(type, metadata);
const author = getAuthorFromEntryMetadata(type, metadata);

const coverId = await db.insert(Cover).values({
src: processedCover.src,
width: processedCover.attributes.width,
height: processedCover.attributes.height,
placeholder: placeholderURL,
}).returning({ id: Cover.id });
const coverId = await db
.insert(Cover)
.values({
src: processedCover.src,
width: processedCover.attributes.width,
height: processedCover.attributes.height,
placeholder: placeholderURL,
})
.returning({ id: Cover.id });

const firstCoverId = coverId[0]?.id ?? -1;
const firstCoverId = coverId[0]?.id ?? -1;

const insertData = {
type: type,
Expand Down Expand Up @@ -71,11 +74,11 @@ async function getCoverAndPlaceholder(cover: ImageMetadata) {
function getAuthorFromEntryMetadata(type: CatalogueType, metadata: any) {
switch (type) {
case "game":
return metadata.companies[0].name;
return metadata?.companies?.[0]?.name ?? "Unknown";
case "book":
return metadata.authors[0] ?? "Unknown";
case "movie":
case "show":
return metadata.companies[0];
return metadata?.companies[0]?.name ?? "Unknown";
}
}
2 changes: 1 addition & 1 deletion scripts/catalogueUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export function getContentDirs(type: "games" | "books" | "movies" | "shows"): UR
const dirs = fs
.readdirSync(dirPath, { withFileTypes: true })
.filter((dir) => dir.isDirectory())
.map((dir) => new URL(dir.name + "/", dirPath));
.map((dir) => new URL(`${dir.name}/`, dirPath));

return dirs;
}
Expand Down
4 changes: 2 additions & 2 deletions scripts/getInfoCoverBook.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ export async function getDataForBooks() {
Logger.info(`Getting data for ${bold(dirBasename)}...`);
const dataFilePath = new URL("./_data.json", bookDir);
if (fs.existsSync(dataFilePath)) {
Logger.info(gray(`Data already exists, skipping...`));
Logger.info(gray("Data already exists, skipping..."));
continue;
}

const markdownContent = fs.readFileSync(new URL(dirBasename + ".mdoc", bookDir)).toString();
const markdownContent = fs.readFileSync(new URL(`${dirBasename}.mdoc`, bookDir)).toString();
const isbn = matter(markdownContent).data.isbn;
const response = await fetch(
`https://openlibrary.org/api/books?bibkeys=ISBN:${isbn}&jscmd=details&format=json`,
Expand Down
6 changes: 3 additions & 3 deletions scripts/getInfoCoverGame.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,12 @@ export async function getDataForGames() {

Logger.info(`Getting data for ${bold(dirBasename)}...`);
if (fs.existsSync(dataFilePath)) {
Logger.info(gray(`Data already exists, skipping...`));
Logger.info(gray("Data already exists, skipping..."));
continue;
}

const markdownContent = fs
.readFileSync(new URL(path.basename(gameDir.pathname) + ".mdoc", gameDir))
.readFileSync(new URL(`${path.basename(gameDir.pathname)}.mdoc`, gameDir))
.toString();
const gameID = matter(markdownContent).data.igdb;
const response = await client
Expand All @@ -77,7 +77,7 @@ export async function getDataForGames() {
const { id, cover, involved_companies, ...cleanedData } = gameData;
const resultData = {
...cleanedData,
companies: gameData.involved_companies
companies: (gameData?.involved_companies ?? [])
.filter((company) => !company.supporting)
.map((company) => ({
id: company.company.id,
Expand Down
4 changes: 2 additions & 2 deletions scripts/getInfoCoverMovieShow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ export async function getDataForMoviesAndShows(type: "movies" | "shows") {

Logger.info(`Getting data for ${type}/${bold(dirBasename)}...`);
if (fs.existsSync(dataFilePath)) {
Logger.info(gray(`Data already exists, skipping...`));
Logger.info(gray("Data already exists, skipping..."));
continue;
}

const markdownContent = fs
.readFileSync(new URL(path.basename(movieShowDir.pathname) + ".mdoc", movieShowDir))
.readFileSync(new URL(`${path.basename(movieShowDir.pathname)}.mdoc`, movieShowDir))
.toString();
const id = matter(markdownContent).data.tmdb;
const response = (await fetch(
Expand Down
16 changes: 16 additions & 0 deletions src/content/games/dark-souls-daughters-of-ash/_data.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"first_release_date": 1546387200,
"genres": [
{
"id": 31,
"name": "Adventure"
}
],
"platforms": [
{
"id": 6,
"abbreviation": "PC"
}
],
"companies": []
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 70a710a

Please sign in to comment.