Skip to content

Commit

Permalink
Merge pull request #1071 from AI4Bharat/revert-1069-revert-1059-shoon…
Browse files Browse the repository at this point in the history
…ya-backend-audio

Revert "Revert "use shoonya-backend for audio data""
  • Loading branch information
ishvindersethi22 authored Jul 4, 2024
2 parents 19d6701 + 8867e53 commit 61c1780
Show file tree
Hide file tree
Showing 6 changed files with 124 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import ArrowRightIcon from "@mui/icons-material/ArrowRight";
import ArrowBackIcon from "@mui/icons-material/ArrowBack";
import getTaskAssignedUsers from '../../../../utils/getTaskAssignedUsers';
import LightTooltip from "../../component/common/Tooltip";
import configs from '../../../../config/config';

const AllAudioTranscriptionLandingPage = () => {
const classes = AudioTranscriptionLandingStyle();
Expand Down Expand Up @@ -84,6 +85,7 @@ const AllAudioTranscriptionLandingPage = () => {
const [advancedWaveformSettings, setAdvancedWaveformSettings] = useState(false);
const [assignedUsers, setAssignedUsers] = useState(null);
const [waveSurfer, setWaveSurfer] = useState(true);
const [audioURL, setAudioURL] = useState("");

const handleCollapseClick = () => {
!showNotes && setShowStdTranscript(false);
Expand Down Expand Up @@ -113,11 +115,32 @@ const AllAudioTranscriptionLandingPage = () => {
});
} else {
setTaskData(resp);
if (resp?.data?.audio_duration < 700){
if (resp?.data?.audio_duration < 1000){
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};
Expand Down Expand Up @@ -472,11 +495,14 @@ const AllAudioTranscriptionLandingPage = () => {
</Tooltip>
</Grid>
</Grid>
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
taskData={taskData}
/>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
taskData={taskData}
audioUrl={audioURL}
/>
}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -704,7 +730,7 @@ const AllAudioTranscriptionLandingPage = () => {
position="fixed"
bottom={1}
>
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings} />}
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings} />)}
</Grid>
</>
);
Expand Down
5 changes: 3 additions & 2 deletions src/ui/pages/container/CL-Transcription/AudioPanel.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ import APITransport from "../../../../redux/actions/apitransport/apitransport";
const AudioPanel = memo( ({
setCurrentTime,
setPlaying,
taskData
taskData,
audioUrl
}) => {
const classes = AudioTranscriptionLandingStyle();
const dispatch = useDispatch();
Expand Down Expand Up @@ -67,7 +68,7 @@ const AudioPanel = memo( ({
id ="audio-panel"
controls
controlsList="nodownload"
src={TaskDetails?.data?.audio_url}
src={audioUrl}
preload="metadata"
type="audio"
// style={{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ import ArrowRightIcon from "@mui/icons-material/ArrowRight";
import ArrowBackIcon from "@mui/icons-material/ArrowBack";
import getTaskAssignedUsers from '../../../../utils/getTaskAssignedUsers';
import LightTooltip from "../../component/common/Tooltip";
import configs from '../../../../config/config';

const AudioTranscriptionLandingPage = () => {
const classes = AudioTranscriptionLandingStyle();
Expand Down Expand Up @@ -112,6 +113,7 @@ const AudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(true);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -288,11 +290,32 @@ const AudioTranscriptionLandingPage = () => {
});
} else {
setTaskData(resp);
if (resp?.data?.audio_duration < 700){
if (resp?.data?.audio_duration < 1000){
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};
Expand Down Expand Up @@ -926,14 +949,15 @@ useEffect(() => {
filterMessage={filterMessage}
taskData={taskData}
/>
<AudioPanel
{audioURL && <AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
handleAnnotationClick={handleAnnotationClick}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskData}
/>
audioUrl={audioURL}
/>}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1209,7 +1233,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings}/>}
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings}/>)}
</Grid>
</>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ import ArrowRightIcon from "@mui/icons-material/ArrowRight";
import ArrowBackIcon from "@mui/icons-material/ArrowBack";
import getTaskAssignedUsers from '../../../../utils/getTaskAssignedUsers';
import LightTooltip from "../../component/common/Tooltip"
import configs from '../../../../config/config';

const ReviewAudioTranscriptionLandingPage = () => {
const classes = AudioTranscriptionLandingStyle();
Expand Down Expand Up @@ -116,6 +117,7 @@ const ReviewAudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(true);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -304,11 +306,33 @@ const ReviewAudioTranscriptionLandingPage = () => {
variant: "error",
});
}else{setTaskDetailList(resp);
if (resp?.data?.audio_duration < 700){
if (resp?.data?.audio_duration < 1000){
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}}
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};

Expand Down Expand Up @@ -1124,13 +1148,14 @@ useEffect(() => {
disableButton={disableButton}
anchorEl={anchorEl} setAnchorEl={setAnchorEl}
/>
<AudioPanel
{audioURL && <AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskDetailList}
/>
audioUrl={audioURL}
/>}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1431,7 +1456,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings}/>}
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings}/>)}
</Grid>
</>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ import ArrowRightIcon from "@mui/icons-material/ArrowRight";
import ArrowBackIcon from "@mui/icons-material/ArrowBack";
import getTaskAssignedUsers from '../../../../utils/getTaskAssignedUsers';
import LightTooltip from "../../component/common/Tooltip";
import configs from '../../../../config/config';

const SuperCheckerAudioTranscriptionLandingPage = () => {
const classes = AudioTranscriptionLandingStyle();
Expand Down Expand Up @@ -114,6 +115,7 @@ const SuperCheckerAudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(false);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -225,11 +227,33 @@ const SuperCheckerAudioTranscriptionLandingPage = () => {
variant: "error",
});
}else{setTaskDetailList(resp);
if (resp?.data?.audio_duration < 700){
if (resp?.data?.audio_duration < 1000){
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}}
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};

Expand Down Expand Up @@ -969,14 +993,16 @@ useEffect(() => {
anchorEl={anchorEl}
setAnchorEl={setAnchorEl}
/>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
// handleAnnotationClick={handleAnnotationClick}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskDetailList}
/>
audioUrl={audioURL}
/>}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1248,7 +1274,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings} />}
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings} />)}
</Grid>
</>
);
Expand Down
3 changes: 0 additions & 3 deletions src/ui/pages/container/CL-Transcription/wavesurfer.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
barRadius: waveformSettings.barRadius,
barHeight: waveformSettings.barHeight,
mediaControls: true,
url: details?.data?.audio_url,
hideScrollbar: true,
autoCenter: true,
autoScroll: true,
Expand Down Expand Up @@ -115,7 +114,6 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
miniMap.current = WaveSurfer.create({
container: document.querySelector('#minimap'),
height: '20',
url: details?.data?.audio_url,
hideScrollbar: true,
mediaControls: true,
media: document.querySelector('audio'),
Expand Down Expand Up @@ -190,7 +188,6 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
miniMap.current = WaveSurfer.create({
container: document.querySelector('#minimap'),
height: '20',
url: details?.data?.audio_url,
mediaControls: true,
media: document.querySelector('audio'),
hideScrollbar: true,
Expand Down

0 comments on commit 61c1780

Please sign in to comment.