Skip to content

Commit

Permalink
fix: fixed logger
Browse files Browse the repository at this point in the history
  • Loading branch information
mbret committed Mar 17, 2024
1 parent a696008 commit 6c19693
Show file tree
Hide file tree
Showing 18 changed files with 187 additions and 93 deletions.
132 changes: 127 additions & 5 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
"@emotion/cache": "^11.11.0",
"@mui/material-nextjs": "^5.15.11",
"arg": "^5.0.2",
"pino": "^8.19.0",
"react-markdown": "^9.0.1",
"reactjrx": "^1.79.1",
"remark-gfm": "^4.0.0",
Expand Down
4 changes: 3 additions & 1 deletion packages/api/src/functions/refreshMetadata/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,6 @@ const lambda: ValidatedEventAPIGatewayProxyEvent<typeof schema> = async (
}
}

export const main = withMiddy(lambda)
export const main = withMiddy(lambda, {
schema
})
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ const lambda: ValidatedEventAPIGatewayProxyEvent<typeof schema> = async (

await deleteLock(supabase, lockId)

Logger.log(`lambda executed with success for ${collection._id}`)
Logger.info(`lambda executed with success for ${collection._id}`)

return {
statusCode: 200,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ const lambda: ValidatedEventAPIGatewayProxyEvent<typeof schema> = async (
deleteLock(supabase, lockId)
])

Logger.log(`lambda executed with success for ${book._id}`)
Logger.info(`lambda executed with success for ${book._id}`)

return {
statusCode: 200,
Expand Down
12 changes: 6 additions & 6 deletions packages/api/src/libs/books/retrieveMetadataAndSaveCover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import { isBookProtected } from "@libs/couch/isBookProtected"
import nano from "nano"
import { atomicUpdate } from "@libs/couch/dbHelpers"

const logger = Logger.namespace("retrieveMetadataAndSaveCover")
const logger = Logger.child({module: "retrieveMetadataAndSaveCover"})

export type RetrieveMetadataAndSaveCoverContext = {
userName: string
Expand All @@ -39,7 +39,7 @@ export const retrieveMetadataAndSaveCover = async (
db: nano.DocumentScope<unknown>
}
) => {
logger.log(
logger.info(
`syncMetadata run for user ${ctx.userName} with book ${ctx.book._id}`
)
let bookNameForDebug = ""
Expand All @@ -49,7 +49,7 @@ export const retrieveMetadataAndSaveCover = async (
try {
bookNameForDebug = reduceMetadata(ctx.book.metadata).title || ""

logger.log(
logger.info(
`syncMetadata processing ${ctx.book._id} with resource id ${ctx.link.resourceId}`
)

Expand Down Expand Up @@ -189,8 +189,8 @@ export const retrieveMetadataAndSaveCover = async (
)
.sort()[0]

Logger.log(`coverRelativePath`, coverRelativePath)
Logger.log(`opfBasePath`, opfBasePath)
logger.info(`coverRelativePath`, coverRelativePath)
logger.info(`opfBasePath`, opfBasePath)

metadataList.push({
type: "file",
Expand All @@ -210,7 +210,7 @@ export const retrieveMetadataAndSaveCover = async (
console.log(`No cover path found for ${tmpFilePath}`)
}
} else {
logger.log(
logger.info(
`${contentType} cannot be extracted to retrieve information (cover, etc)`
)
}
Expand Down
6 changes: 3 additions & 3 deletions packages/api/src/libs/books/saveCoverFromArchiveToBucket.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { Logger } from "@libs/logger"
import { saveCoverFromBufferToBucket } from "./saveCoverFromBufferToBucket"
import { asError } from "@libs/utils"

const logger = Logger.namespace("saveCoverFromArchiveToBucket")
const logger = Logger.child({ module: "saveCoverFromArchiveToBucket" })

type Context = {
userNameHex: string
Expand All @@ -27,7 +27,7 @@ export const saveCoverFromArchiveToBucket = async (
folderBasePath === `` ? coverPath : `${folderBasePath}/${coverPath}`
const objectKey = `cover-${ctx.userNameHex}-${book._id}`

logger.log(`prepare to save cover ${objectKey}`)
Logger.info(`prepare to save cover ${objectKey}`)

const zip = fs
.createReadStream(epubFilepath)
Expand All @@ -40,7 +40,7 @@ export const saveCoverFromArchiveToBucket = async (

await saveCoverFromBufferToBucket(entryAsBuffer, objectKey)

logger.log(`cover ${objectKey} has been saved/updated`)
Logger.info(`cover ${objectKey} has been saved/updated`)
} else {
entry.autodrain()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { Logger } from "@libs/logger"
import axios from "axios"
import { saveCoverFromBufferToBucket } from "./saveCoverFromBufferToBucket"

const logger = Logger.namespace("saveCoverFromExternalLinkToBucket")
const logger = Logger.child({module: "saveCoverFromExternalLinkToBucket"})

type Context = {
userNameHex: string
Expand All @@ -16,7 +16,7 @@ export const saveCoverFromExternalLinkToBucket = async (
) => {
const objectKey = `cover-${ctx.userNameHex}-${book._id}`

logger.log(`prepare to save cover ${objectKey}`)
Logger.info(`prepare to save cover ${objectKey}`)

try {
// @todo request is deprecated, switch to something else
Expand All @@ -28,7 +28,7 @@ export const saveCoverFromExternalLinkToBucket = async (

await saveCoverFromBufferToBucket(entryAsBuffer, objectKey)

Logger.log(`cover ${objectKey} has been saved/updated`)
Logger.info(`cover ${objectKey} has been saved/updated`)
} catch (e) {
logger.error(e)
}
Expand Down
6 changes: 3 additions & 3 deletions packages/api/src/libs/collections/refreshMetadata.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export const refreshMetadata = async (
}
) => {
if (collection.type !== "series") {
Logger.log(`Ignoring ${collection._id} because of type ${collection.type}`)
Logger.info(`Ignoring ${collection._id} because of type ${collection.type}`)

return
}
Expand Down Expand Up @@ -74,7 +74,7 @@ export const refreshMetadata = async (
*/

if (soft && isCollectionAlreadyUpdatedFromLink) {
Logger.log(`${collection._id} already has metadata, ignoring it!`)
Logger.info(`${collection._id} already has metadata, ignoring it!`)

return {
statusCode: 200,
Expand All @@ -83,7 +83,7 @@ export const refreshMetadata = async (
}

if (soft && !metadataLink && collection.lastMetadataUpdatedAt) {
Logger.log(
Logger.info(
`${collection._id} does not have link and is already refreshed, ignoring it!`
)

Expand Down
Loading

0 comments on commit 6c19693

Please sign in to comment.