CallOverlay.svelte 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693
  1. <script lang="ts">
  2. import { config, settings, showCallOverlay } from '$lib/stores';
  3. import { onMount, tick, getContext } from 'svelte';
  4. import { blobToFile, calculateSHA256, extractSentences, findWordIndices } from '$lib/utils';
  5. import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio';
  6. import { toast } from 'svelte-sonner';
  7. import Tooltip from '$lib/components/common/Tooltip.svelte';
  8. import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte';
  9. import { get } from 'svelte/store';
  10. const i18n = getContext('i18n');
  11. export let submitPrompt: Function;
  12. export let files;
  13. let loading = false;
  14. let confirmed = false;
  15. let camera = false;
  16. let cameraStream = null;
  17. let assistantSpeaking = false;
  18. let assistantAudio = {};
  19. let assistantAudioIdx = null;
  20. let rmsLevel = 0;
  21. let hasStartedSpeaking = false;
  22. let currentUtterance = null;
  23. let mediaRecorder;
  24. let audioChunks = [];
  25. const MIN_DECIBELS = -45;
  26. const VISUALIZER_BUFFER_LENGTH = 300;
  27. // Function to calculate the RMS level from time domain data
  28. const calculateRMS = (data: Uint8Array) => {
  29. let sumSquares = 0;
  30. for (let i = 0; i < data.length; i++) {
  31. const normalizedValue = (data[i] - 128) / 128; // Normalize the data
  32. sumSquares += normalizedValue * normalizedValue;
  33. }
  34. return Math.sqrt(sumSquares / data.length);
  35. };
  36. const normalizeRMS = (rms) => {
  37. rms = rms * 10;
  38. const exp = 1.5; // Adjust exponent value; values greater than 1 expand larger numbers more and compress smaller numbers more
  39. const scaledRMS = Math.pow(rms, exp);
  40. // Scale between 0.01 (1%) and 1.0 (100%)
  41. return Math.min(1.0, Math.max(0.01, scaledRMS));
  42. };
  43. const analyseAudio = (stream) => {
  44. const audioContext = new AudioContext();
  45. const audioStreamSource = audioContext.createMediaStreamSource(stream);
  46. const analyser = audioContext.createAnalyser();
  47. analyser.minDecibels = MIN_DECIBELS;
  48. audioStreamSource.connect(analyser);
  49. const bufferLength = analyser.frequencyBinCount;
  50. const domainData = new Uint8Array(bufferLength);
  51. const timeDomainData = new Uint8Array(analyser.fftSize);
  52. let lastSoundTime = Date.now();
  53. hasStartedSpeaking = false;
  54. const detectSound = () => {
  55. const processFrame = () => {
  56. if (!mediaRecorder || !$showCallOverlay) {
  57. if (mediaRecorder) {
  58. mediaRecorder.stop();
  59. }
  60. return;
  61. }
  62. analyser.getByteTimeDomainData(timeDomainData);
  63. analyser.getByteFrequencyData(domainData);
  64. // Calculate RMS level from time domain data
  65. rmsLevel = calculateRMS(timeDomainData);
  66. // Check if initial speech/noise has started
  67. const hasSound = domainData.some((value) => value > 0);
  68. if (hasSound) {
  69. stopAllAudio();
  70. hasStartedSpeaking = true;
  71. lastSoundTime = Date.now();
  72. }
  73. // Start silence detection only after initial speech/noise has been detected
  74. if (hasStartedSpeaking) {
  75. if (Date.now() - lastSoundTime > 2000) {
  76. confirmed = true;
  77. if (mediaRecorder) {
  78. mediaRecorder.stop();
  79. }
  80. }
  81. }
  82. window.requestAnimationFrame(processFrame);
  83. };
  84. window.requestAnimationFrame(processFrame);
  85. };
  86. detectSound();
  87. };
  88. const stopAllAudio = () => {
  89. if (currentUtterance) {
  90. speechSynthesis.cancel();
  91. currentUtterance = null;
  92. }
  93. if (assistantAudio[assistantAudioIdx]) {
  94. assistantAudio[assistantAudioIdx].pause();
  95. assistantAudio[assistantAudioIdx].currentTime = 0;
  96. }
  97. const audioElement = document.getElementById('audioElement');
  98. audioElement.pause();
  99. audioElement.currentTime = 0;
  100. assistantSpeaking = false;
  101. };
  102. const playAudio = (idx) => {
  103. if ($showCallOverlay) {
  104. return new Promise((res) => {
  105. assistantAudioIdx = idx;
  106. const audioElement = document.getElementById('audioElement');
  107. const audio = assistantAudio[idx];
  108. audioElement.src = audio.src; // Assume `assistantAudio` has objects with a `src` property
  109. audioElement.muted = true;
  110. audioElement
  111. .play()
  112. .then(() => {
  113. audioElement.muted = false;
  114. })
  115. .catch((error) => {
  116. toast.error(error);
  117. });
  118. audioElement.onended = async (e) => {
  119. await new Promise((r) => setTimeout(r, 300));
  120. if (Object.keys(assistantAudio).length - 1 === idx) {
  121. assistantSpeaking = false;
  122. }
  123. res(e);
  124. };
  125. });
  126. } else {
  127. return Promise.resolve();
  128. }
  129. };
  130. const getOpenAISpeech = async (text) => {
  131. const res = await synthesizeOpenAISpeech(
  132. localStorage.token,
  133. $settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
  134. text
  135. ).catch((error) => {
  136. toast.error(error);
  137. assistantSpeaking = false;
  138. return null;
  139. });
  140. if (res) {
  141. const blob = await res.blob();
  142. const blobUrl = URL.createObjectURL(blob);
  143. const audio = new Audio(blobUrl);
  144. assistantAudio = audio;
  145. }
  146. };
  147. const transcribeHandler = async (audioBlob) => {
  148. // Create a blob from the audio chunks
  149. await tick();
  150. const file = blobToFile(audioBlob, 'recording.wav');
  151. const res = await transcribeAudio(localStorage.token, file).catch((error) => {
  152. toast.error(error);
  153. return null;
  154. });
  155. if (res) {
  156. console.log(res.text);
  157. if (res.text !== '') {
  158. const _responses = await submitPrompt(res.text);
  159. console.log(_responses);
  160. if (_responses.at(0)) {
  161. const content = _responses[0];
  162. if (content) {
  163. assistantSpeakingHandler(content);
  164. }
  165. }
  166. }
  167. }
  168. };
  169. const assistantSpeakingHandler = async (content) => {
  170. assistantSpeaking = true;
  171. if (($config.audio.tts.engine ?? '') == '') {
  172. let voices = [];
  173. const getVoicesLoop = setInterval(async () => {
  174. voices = await speechSynthesis.getVoices();
  175. if (voices.length > 0) {
  176. clearInterval(getVoicesLoop);
  177. const voice =
  178. voices
  179. ?.filter(
  180. (v) => v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
  181. )
  182. ?.at(0) ?? undefined;
  183. currentUtterance = new SpeechSynthesisUtterance(content);
  184. if (voice) {
  185. currentUtterance.voice = voice;
  186. }
  187. speechSynthesis.speak(currentUtterance);
  188. }
  189. }, 100);
  190. } else if ($config.audio.tts.engine === 'openai') {
  191. console.log('openai');
  192. const sentences = extractSentences(content).reduce((mergedTexts, currentText) => {
  193. const lastIndex = mergedTexts.length - 1;
  194. if (lastIndex >= 0) {
  195. const previousText = mergedTexts[lastIndex];
  196. const wordCount = previousText.split(/\s+/).length;
  197. if (wordCount < 2) {
  198. mergedTexts[lastIndex] = previousText + ' ' + currentText;
  199. } else {
  200. mergedTexts.push(currentText);
  201. }
  202. } else {
  203. mergedTexts.push(currentText);
  204. }
  205. return mergedTexts;
  206. }, []);
  207. console.log(sentences);
  208. let lastPlayedAudioPromise = Promise.resolve(); // Initialize a promise that resolves immediately
  209. for (const [idx, sentence] of sentences.entries()) {
  210. const res = await synthesizeOpenAISpeech(
  211. localStorage.token,
  212. $settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
  213. sentence
  214. ).catch((error) => {
  215. toast.error(error);
  216. assistantSpeaking = false;
  217. return null;
  218. });
  219. if (res) {
  220. const blob = await res.blob();
  221. const blobUrl = URL.createObjectURL(blob);
  222. const audio = new Audio(blobUrl);
  223. assistantAudio[idx] = audio;
  224. lastPlayedAudioPromise = lastPlayedAudioPromise.then(() => playAudio(idx));
  225. }
  226. }
  227. }
  228. };
  229. const stopRecordingCallback = async () => {
  230. if ($showCallOverlay) {
  231. if (confirmed) {
  232. loading = true;
  233. if (cameraStream) {
  234. const imageUrl = takeScreenshot();
  235. files = [
  236. {
  237. type: 'image',
  238. url: imageUrl
  239. }
  240. ];
  241. }
  242. const audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
  243. await transcribeHandler(audioBlob);
  244. confirmed = false;
  245. loading = false;
  246. }
  247. audioChunks = [];
  248. mediaRecorder = false;
  249. startRecording();
  250. } else {
  251. audioChunks = [];
  252. mediaRecorder = false;
  253. }
  254. };
  255. const startRecording = async () => {
  256. const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
  257. mediaRecorder = new MediaRecorder(stream);
  258. mediaRecorder.onstart = () => {
  259. console.log('Recording started');
  260. audioChunks = [];
  261. analyseAudio(stream);
  262. };
  263. mediaRecorder.ondataavailable = (event) => {
  264. if (hasStartedSpeaking) {
  265. audioChunks.push(event.data);
  266. }
  267. };
  268. mediaRecorder.onstop = async () => {
  269. console.log('Recording stopped');
  270. await stopRecordingCallback();
  271. };
  272. mediaRecorder.start();
  273. };
  274. let videoInputDevices = [];
  275. let selectedVideoInputDeviceId = null;
  276. const getVideoInputDevices = async () => {
  277. const devices = await navigator.mediaDevices.enumerateDevices();
  278. videoInputDevices = devices.filter((device) => device.kind === 'videoinput');
  279. if (!!navigator.mediaDevices.getDisplayMedia) {
  280. videoInputDevices = [
  281. ...videoInputDevices,
  282. {
  283. deviceId: 'screen',
  284. label: 'Screen Share'
  285. }
  286. ];
  287. }
  288. console.log(videoInputDevices);
  289. if (selectedVideoInputDeviceId === null && videoInputDevices.length > 0) {
  290. selectedVideoInputDeviceId = videoInputDevices[0].deviceId;
  291. }
  292. };
  293. const startCamera = async () => {
  294. await getVideoInputDevices();
  295. if (cameraStream === null) {
  296. camera = true;
  297. await tick();
  298. try {
  299. await startVideoStream();
  300. } catch (err) {
  301. console.error('Error accessing webcam: ', err);
  302. }
  303. }
  304. };
  305. const startVideoStream = async () => {
  306. const video = document.getElementById('camera-feed');
  307. if (video) {
  308. if (selectedVideoInputDeviceId === 'screen') {
  309. cameraStream = await navigator.mediaDevices.getDisplayMedia({
  310. video: {
  311. cursor: 'always'
  312. },
  313. audio: false
  314. });
  315. } else {
  316. cameraStream = await navigator.mediaDevices.getUserMedia({
  317. video: {
  318. deviceId: selectedVideoInputDeviceId ? { exact: selectedVideoInputDeviceId } : undefined
  319. }
  320. });
  321. }
  322. if (cameraStream) {
  323. await getVideoInputDevices();
  324. video.srcObject = cameraStream;
  325. await video.play();
  326. }
  327. }
  328. };
  329. const stopVideoStream = async () => {
  330. if (cameraStream) {
  331. const tracks = cameraStream.getTracks();
  332. tracks.forEach((track) => track.stop());
  333. }
  334. cameraStream = null;
  335. };
  336. const takeScreenshot = () => {
  337. const video = document.getElementById('camera-feed');
  338. const canvas = document.getElementById('camera-canvas');
  339. if (!canvas) {
  340. return;
  341. }
  342. const context = canvas.getContext('2d');
  343. // Make the canvas match the video dimensions
  344. canvas.width = video.videoWidth;
  345. canvas.height = video.videoHeight;
  346. // Draw the image from the video onto the canvas
  347. context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
  348. // Convert the canvas to a data base64 URL and console log it
  349. const dataURL = canvas.toDataURL('image/png');
  350. console.log(dataURL);
  351. return dataURL;
  352. };
  353. const stopCamera = async () => {
  354. await stopVideoStream();
  355. camera = false;
  356. };
  357. $: if ($showCallOverlay) {
  358. startRecording();
  359. } else {
  360. stopCamera();
  361. }
  362. </script>
  363. {#if $showCallOverlay}
  364. <audio id="audioElement" src="" style="display: none;" />
  365. <div class=" absolute w-full h-screen max-h-[100dvh] flex z-[999] overflow-hidden">
  366. <div
  367. class="absolute w-full h-screen max-h-[100dvh] bg-white text-gray-700 dark:bg-black dark:text-gray-300 flex justify-center"
  368. >
  369. <div class="max-w-lg w-full h-screen max-h-[100dvh] flex flex-col justify-between p-3 md:p-6">
  370. {#if camera}
  371. <div class="flex justify-center items-center w-full min-h-20">
  372. {#if loading}
  373. <svg
  374. class="size-12 text-gray-900 dark:text-gray-400"
  375. viewBox="0 0 24 24"
  376. fill="currentColor"
  377. xmlns="http://www.w3.org/2000/svg"
  378. ><style>
  379. .spinner_qM83 {
  380. animation: spinner_8HQG 1.05s infinite;
  381. }
  382. .spinner_oXPr {
  383. animation-delay: 0.1s;
  384. }
  385. .spinner_ZTLf {
  386. animation-delay: 0.2s;
  387. }
  388. @keyframes spinner_8HQG {
  389. 0%,
  390. 57.14% {
  391. animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
  392. transform: translate(0);
  393. }
  394. 28.57% {
  395. animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
  396. transform: translateY(-6px);
  397. }
  398. 100% {
  399. transform: translate(0);
  400. }
  401. }
  402. </style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
  403. class="spinner_qM83 spinner_oXPr"
  404. cx="12"
  405. cy="12"
  406. r="3"
  407. /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
  408. >
  409. {:else}
  410. <div
  411. class=" {rmsLevel * 100 > 4
  412. ? ' size-[4.5rem]'
  413. : rmsLevel * 100 > 2
  414. ? ' size-16'
  415. : rmsLevel * 100 > 1
  416. ? 'size-14'
  417. : 'size-12'} transition-all bg-black dark:bg-white rounded-full"
  418. />
  419. {/if}
  420. <!-- navbar -->
  421. </div>
  422. {/if}
  423. <div class="flex justify-center items-center flex-1 h-full w-full max-h-full">
  424. {#if !camera}
  425. {#if loading}
  426. <svg
  427. class="size-44 text-gray-900 dark:text-gray-400"
  428. viewBox="0 0 24 24"
  429. fill="currentColor"
  430. xmlns="http://www.w3.org/2000/svg"
  431. ><style>
  432. .spinner_qM83 {
  433. animation: spinner_8HQG 1.05s infinite;
  434. }
  435. .spinner_oXPr {
  436. animation-delay: 0.1s;
  437. }
  438. .spinner_ZTLf {
  439. animation-delay: 0.2s;
  440. }
  441. @keyframes spinner_8HQG {
  442. 0%,
  443. 57.14% {
  444. animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
  445. transform: translate(0);
  446. }
  447. 28.57% {
  448. animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
  449. transform: translateY(-6px);
  450. }
  451. 100% {
  452. transform: translate(0);
  453. }
  454. }
  455. </style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
  456. class="spinner_qM83 spinner_oXPr"
  457. cx="12"
  458. cy="12"
  459. r="3"
  460. /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
  461. >
  462. {:else}
  463. <div
  464. class=" {rmsLevel * 100 > 4
  465. ? ' size-52'
  466. : rmsLevel * 100 > 2
  467. ? 'size-48'
  468. : rmsLevel * 100 > 1
  469. ? 'size-[11.5rem]'
  470. : 'size-44'} transition-all bg-black dark:bg-white rounded-full"
  471. />
  472. {/if}
  473. {:else}
  474. <div
  475. class="relative flex video-container w-full max-h-full pt-2 pb-4 md:py-6 px-2 h-full"
  476. >
  477. <video
  478. id="camera-feed"
  479. autoplay
  480. class="rounded-2xl h-full min-w-full object-cover object-center"
  481. playsinline
  482. />
  483. <canvas id="camera-canvas" style="display:none;" />
  484. <div class=" absolute top-4 md:top-8 left-4">
  485. <button
  486. type="button"
  487. class="p-1.5 text-white cursor-pointer backdrop-blur-xl bg-black/10 rounded-full"
  488. on:click={() => {
  489. stopCamera();
  490. }}
  491. >
  492. <svg
  493. xmlns="http://www.w3.org/2000/svg"
  494. viewBox="0 0 16 16"
  495. fill="currentColor"
  496. class="size-6"
  497. >
  498. <path
  499. d="M5.28 4.22a.75.75 0 0 0-1.06 1.06L6.94 8l-2.72 2.72a.75.75 0 1 0 1.06 1.06L8 9.06l2.72 2.72a.75.75 0 1 0 1.06-1.06L9.06 8l2.72-2.72a.75.75 0 0 0-1.06-1.06L8 6.94 5.28 4.22Z"
  500. />
  501. </svg>
  502. </button>
  503. </div>
  504. </div>
  505. {/if}
  506. </div>
  507. <div class="flex justify-between items-center pb-2 w-full">
  508. <div>
  509. {#if camera}
  510. <VideoInputMenu
  511. devices={videoInputDevices}
  512. on:change={async (e) => {
  513. console.log(e.detail);
  514. selectedVideoInputDeviceId = e.detail;
  515. await stopVideoStream();
  516. await startVideoStream();
  517. }}
  518. >
  519. <button class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900" type="button">
  520. <svg
  521. xmlns="http://www.w3.org/2000/svg"
  522. viewBox="0 0 20 20"
  523. fill="currentColor"
  524. class="size-5"
  525. >
  526. <path
  527. fill-rule="evenodd"
  528. d="M15.312 11.424a5.5 5.5 0 0 1-9.201 2.466l-.312-.311h2.433a.75.75 0 0 0 0-1.5H3.989a.75.75 0 0 0-.75.75v4.242a.75.75 0 0 0 1.5 0v-2.43l.31.31a7 7 0 0 0 11.712-3.138.75.75 0 0 0-1.449-.39Zm1.23-3.723a.75.75 0 0 0 .219-.53V2.929a.75.75 0 0 0-1.5 0V5.36l-.31-.31A7 7 0 0 0 3.239 8.188a.75.75 0 1 0 1.448.389A5.5 5.5 0 0 1 13.89 6.11l.311.31h-2.432a.75.75 0 0 0 0 1.5h4.243a.75.75 0 0 0 .53-.219Z"
  529. clip-rule="evenodd"
  530. />
  531. </svg>
  532. </button>
  533. </VideoInputMenu>
  534. {:else}
  535. <Tooltip content="{$i18n.t('Camera')}">
  536. <button
  537. class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
  538. type="button"
  539. on:click={() => {
  540. startCamera();
  541. }}
  542. >
  543. <svg
  544. xmlns="http://www.w3.org/2000/svg"
  545. fill="none"
  546. viewBox="0 0 24 24"
  547. stroke-width="1.5"
  548. stroke="currentColor"
  549. class="size-5"
  550. >
  551. <path
  552. stroke-linecap="round"
  553. stroke-linejoin="round"
  554. d="M6.827 6.175A2.31 2.31 0 0 1 5.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 0 0 2.25 2.25h15A2.25 2.25 0 0 0 21.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 0 0-1.134-.175 2.31 2.31 0 0 1-1.64-1.055l-.822-1.316a2.192 2.192 0 0 0-1.736-1.039 48.774 48.774 0 0 0-5.232 0 2.192 2.192 0 0 0-1.736 1.039l-.821 1.316Z"
  555. />
  556. <path
  557. stroke-linecap="round"
  558. stroke-linejoin="round"
  559. d="M16.5 12.75a4.5 4.5 0 1 1-9 0 4.5 4.5 0 0 1 9 0ZM18.75 10.5h.008v.008h-.008V10.5Z"
  560. />
  561. </svg>
  562. </button>
  563. </Tooltip>
  564. {/if}
  565. </div>
  566. <div>
  567. <button type="button">
  568. <div class=" line-clamp-1 text-sm font-medium">
  569. {#if loading}
  570. {$i18n.t('Thinking...')}
  571. {:else}
  572. {$i18n.t('Listening...')}
  573. {/if}
  574. </div>
  575. </button>
  576. </div>
  577. <div>
  578. <button
  579. class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
  580. on:click={async () => {
  581. showCallOverlay.set(false);
  582. }}
  583. type="button"
  584. >
  585. <svg
  586. xmlns="http://www.w3.org/2000/svg"
  587. viewBox="0 0 20 20"
  588. fill="currentColor"
  589. class="size-5"
  590. >
  591. <path
  592. d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
  593. />
  594. </svg>
  595. </button>
  596. </div>
  597. </div>
  598. </div>
  599. </div>
  600. </div>
  601. {/if}