Skip to content

Commit

Permalink
Merge pull request #1362 from nickgros/SWC-7064e
Browse files Browse the repository at this point in the history
  • Loading branch information
nickgros authored Nov 12, 2024
2 parents 123c604 + 1f072f9 commit cadec49
Show file tree
Hide file tree
Showing 17 changed files with 2,487 additions and 529 deletions.
4 changes: 2 additions & 2 deletions apps/SageAccountWeb/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
"universal-cookie": "^4.0.4"
},
"devDependencies": {
"@testing-library/jest-dom": "^6.4.6",
"@testing-library/react": "^16.0.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.0.1",
"@testing-library/user-event": "^14.5.2",
"@types/katex": "^0.5.0",
"@types/node": "^20.14.10",
Expand Down
2 changes: 1 addition & 1 deletion apps/synapse-oauth-signin/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
},
"devDependencies": {
"@sage-bionetworks/synapse-types": "workspace:*",
"@testing-library/react": "16.0.0",
"@testing-library/react": "16.0.1",
"@testing-library/user-event": "^14.5.2",
"@types/isomorphic-fetch": "^0.0.36",
"@types/jest": "^29.5.12",
Expand Down
4 changes: 2 additions & 2 deletions apps/synapse-portal-framework/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@
"type-check": "tsc --noEmit"
},
"devDependencies": {
"@testing-library/jest-dom": "^6.4.6",
"@testing-library/react": "^16.0.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.0.1",
"@testing-library/user-event": "^14.5.2",
"@types/katex": "^0.5.0",
"@types/lodash": "^4.17.0",
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@
"overrides": {
"word-wrap": "^1.2.4",
"semver": "^7.5.4",
"@types/react": "18.2.64",
"@types/react": "18.3.12",
"@types/react-dom": "18.3.1",
"goober": "2.1.9",
"react-hot-toast": "2.2.0",
"postcss": "^8.4.31"
Expand Down
2 changes: 2 additions & 0 deletions packages/synapse-react-client/jest.config.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ const esModules = [
'lodash-es',
'nanoid',
'mui-one-time-password-input',
'p-limit',
'yocto-queue',
]

/** @type {import('jest').Config} */
Expand Down
7 changes: 4 additions & 3 deletions packages/synapse-react-client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,9 @@
"@storybook/testing-library": "^0.2.2",
"@storybook/theming": "^8.2.4",
"@svgr/plugin-jsx": "^8.1.0",
"@testing-library/dom": "^10.3.0",
"@testing-library/jest-dom": "^6.4.6",
"@testing-library/react": "^16.0.0",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.0.1",
"@testing-library/user-event": "^14.5.2",
"@types/brainhubeu__react-carousel": "1.15.0",
"@types/dagre": "^0.7.52",
Expand Down Expand Up @@ -214,6 +214,7 @@
"memfs": "^3.5.3",
"msw": "^1.3.2",
"msw-storybook-addon": "^1.10.0",
"p-limit": "^6.1.0",
"path-browserify": "^1.0.1",
"postcss-normalize": "^10.0.1",
"prettier": "^2.8.8",
Expand Down
167 changes: 91 additions & 76 deletions packages/synapse-react-client/src/synapse-client/SynapseClient.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { JSONSchema7 } from 'json-schema'
import { memoize } from 'lodash-es'
import SparkMD5 from 'spark-md5'
import UniversalCookies from 'universal-cookie'
import {
Expand Down Expand Up @@ -2074,6 +2075,8 @@ export const uploadFile = (
contentType: string = file.type,
progressCallback?: (progress: ProgressCallback) => void,
getIsCancelled?: () => boolean,
onMd5Computed?: () => void,
abortController?: AbortController,
) => {
return new Promise<FileUploadComplete>(
(fileUploadResolve, fileUploadReject) => {
Expand All @@ -2089,6 +2092,9 @@ export const uploadFile = (
storageLocationId,
}
calculateMd5(file).then((md5: string) => {
if (onMd5Computed) {
onMd5Computed()
}
request.contentMD5Hex = md5
startMultipartUpload(
accessToken,
Expand All @@ -2099,54 +2105,57 @@ export const uploadFile = (
fileUploadReject,
progressCallback,
getIsCancelled,
abortController,
)
})
},
)
}

export const calculateMd5 = (fileBlob: File | Blob): Promise<string> => {
/**
* Calculate the MD5 of the data in a Blob. This function is memoized, so if the same {@link Blob} object is
* passed after the MD5 is computed, no computation will occur.
*/
export const calculateMd5: (blob: Blob) => Promise<string> = memoize(blob => {
// code taken from md5 example from library
return new Promise((resolve, reject) => {
const blobSlice = File.prototype.slice,
file = fileBlob,
chunkSize = 2097152, // Read in chunks of 2MB
chunks = Math.ceil(file.size / chunkSize),
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader()
const chunkSize = 2097152 // Read in chunks of 2M
const chunks = Math.ceil(blob.size / chunkSize)
const spark = new SparkMD5.ArrayBuffer()
const fileReader = new FileReader()
let currentChunk = 0

fileReader.onload = function (e) {
console.log('read chunk nr', currentChunk + 1, 'of', chunks)
console.debug('read chunk nr', currentChunk + 1, 'of', chunks)
spark.append(fileReader.result as ArrayBuffer) // Append array buffer
currentChunk++

if (currentChunk < chunks) {
loadNext()
} else {
console.log('finished loading')
console.debug('finished loading')
const md5: string = spark.end()
console.info('computed hash', md5) // Compute hash
console.debug('computed hash', md5) // Compute hash
resolve(md5)
}
}

fileReader.onerror = function () {
console.warn('oops, something went wrong.')
console.warn('oops, something went wrong.', fileReader.error)
reject(fileReader.error)
}

const loadNext = () => {
const start = currentChunk * chunkSize,
end = start + chunkSize >= file.size ? file.size : start + chunkSize
end = start + chunkSize >= blob.size ? blob.size : start + chunkSize

fileReader.readAsArrayBuffer(blobSlice.call(file, start, end))
fileReader.readAsArrayBuffer(blob.slice(start, end))
}
loadNext()
})
}
})

const processFilePart = (
const processFilePart = async (
partNumber: number,
multipartUploadStatus: MultipartUploadStatus,
clientSidePartsState: boolean[],
Expand All @@ -2158,6 +2167,7 @@ const processFilePart = (
fileUploadReject: (reason: any) => void,
updateProgress: () => void,
getIsCancelled?: () => boolean,
abortController?: AbortController,
) => {
if (clientSidePartsState![partNumber - 1]) {
// no-op. this part has already been processed!
Expand All @@ -2172,73 +2182,75 @@ const processFilePart = (
partNumbers: [partNumber],
}
const presignedUrlUrl = `/file/v1/file/multipart/${uploadId}/presigned/url/batch`
doPost<BatchPresignedUploadUrlResponse>(

const presignedUrlResponse = await doPost<BatchPresignedUploadUrlResponse>(
presignedUrlUrl,
presignedUploadUrlRequest,
accessToken,
BackendDestinationEnum.REPO_ENDPOINT,
).then(async (presignedUrlResponse: BatchPresignedUploadUrlResponse) => {
const presignedUrl =
presignedUrlResponse.partPresignedUrls[0].uploadPresignedUrl
// calculate the byte range
const startByte = (partNumber - 1) * request.partSizeBytes
let endByte = partNumber * request.partSizeBytes - 1
if (endByte >= request.fileSizeBytes) {
endByte = request.fileSizeBytes - 1
}
const fileSlice = file.slice(
startByte,
endByte + 1,
presignedUploadUrlRequest.contentType,
)
const presignedUrl =
presignedUrlResponse.partPresignedUrls[0].uploadPresignedUrl
// calculate the byte range
const startByte = (partNumber - 1) * request.partSizeBytes
let endByte = partNumber * request.partSizeBytes - 1
if (endByte >= request.fileSizeBytes) {
endByte = request.fileSizeBytes - 1
}
const fileSlice = file.slice(
startByte,
endByte + 1,
presignedUploadUrlRequest.contentType,
)
await uploadFilePart(
presignedUrl,
fileSlice,
presignedUploadUrlRequest.contentType,
getIsCancelled,
abortController,
)

// uploaded the part. calculate md5 of the part and add the part to the upload
const md5 = await calculateMd5(fileSlice)
const addPartUrl = `/file/v1/file/multipart/${uploadId}/add/${partNumber}?partMD5Hex=${md5}`
const addPartResponse = await doPut<AddPartResponse>(
addPartUrl,
undefined,
accessToken,
BackendDestinationEnum.REPO_ENDPOINT,
)
if (addPartResponse.addPartState === 'ADD_SUCCESS') {
// done with this part!
clientSidePartsState![partNumber - 1] = true
updateProgress()
checkUploadComplete(
multipartUploadStatus,
clientSidePartsState,
fileName,
accessToken,
fileUploadResolve,
fileUploadReject,
)
await uploadFilePart(
presignedUrl,
fileSlice,
presignedUploadUrlRequest.contentType,
} else {
// retry after a brief delay
await delay(1000)
await processFilePart(
partNumber,
multipartUploadStatus,
clientSidePartsState,
accessToken,
fileName,
file,
request,
fileUploadResolve,
fileUploadReject,
updateProgress,
getIsCancelled,
abortController,
)
// uploaded the part. calculate md5 of the part and add the part to the upload
calculateMd5(fileSlice).then((md5: string) => {
const addPartUrl = `/file/v1/file/multipart/${uploadId}/add/${partNumber}?partMD5Hex=${md5}`
doPut<AddPartResponse>(
addPartUrl,
undefined,
accessToken,
BackendDestinationEnum.REPO_ENDPOINT,
).then((addPartResponse: AddPartResponse) => {
if (addPartResponse.addPartState === 'ADD_SUCCESS') {
// done with this part!
clientSidePartsState![partNumber - 1] = true
updateProgress()
checkUploadComplete(
multipartUploadStatus,
clientSidePartsState,
fileName,
accessToken,
fileUploadResolve,
fileUploadReject,
)
} else {
// retry after a brief delay
delay(1000).then(() => {
processFilePart(
partNumber,
multipartUploadStatus,
clientSidePartsState,
accessToken,
fileName,
file,
request,
fileUploadResolve,
fileUploadReject,
updateProgress,
)
})
}
})
})
})
}
}

export const checkUploadComplete = (
status: MultipartUploadStatus,
clientSidePartsState: boolean[],
Expand Down Expand Up @@ -2277,14 +2289,15 @@ const uploadFilePart = async (
file: Blob,
contentType: string,
getIsCancelled?: () => boolean,
abortController?: AbortController,
) => {
const controller = new AbortController()
const controller = abortController || new AbortController()
const signal = controller.signal

const checkIsCancelled = () => {
if (getIsCancelled) {
const isCancelled = getIsCancelled()
if (isCancelled) {
if (isCancelled && !controller.signal.aborted) {
controller.abort()
}
}
Expand Down Expand Up @@ -2314,6 +2327,7 @@ export const startMultipartUpload = (
fileUploadReject: (reason: any) => void,
progressCallback?: (progress: ProgressCallback) => void,
getIsCancelled?: () => boolean,
abortController?: AbortController,
) => {
const url = '/file/v1/file/multipart'
doPost<MultipartUploadStatus>(
Expand Down Expand Up @@ -2353,6 +2367,7 @@ export const startMultipartUpload = (
fileUploadReject,
updateProgress,
getIsCancelled,
abortController,
)
} else {
updateProgress()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { SynapseClientError } from '@sage-bionetworks/synapse-client'
import { FileUploadComplete } from '@sage-bionetworks/synapse-types'
import { useMutation, UseMutationOptions } from '@tanstack/react-query'
import { uploadFile } from '../../synapse-client/SynapseClient'
import { useSynapseContext } from '../../utils/index'
import { FileUploadArgs } from './FileUploadArgs'

type UseSynapseMultipartUploadArgs = FileUploadArgs & {
readonly storageLocationId: number | undefined
}

export function useSynapseMultipartUpload(
options?: UseMutationOptions<
FileUploadComplete,
SynapseClientError,
UseSynapseMultipartUploadArgs
>,
) {
const { accessToken } = useSynapseContext()
return useMutation({
...options,
mutationFn: (args: UseSynapseMultipartUploadArgs) => {
const {
blob,
fileName,
storageLocationId,
contentType,
progressCallback,
abortController,
onMd5Computed,
} = args

return uploadFile(
accessToken,
fileName,
blob,
storageLocationId,
contentType,
progressCallback,
undefined,
onMd5Computed,
abortController,
)
},
})
}
Loading

0 comments on commit cadec49

Please sign in to comment.