Automatic Speech Recognition
Transformers
Safetensors
meralion2
meralion
meralion-2
custom_code
File size: 45,713 Bytes
894113d
 
 
 
 
 
 
 
 
 
 
 
aea47c2
894113d
 
 
 
 
aea47c2
894113d
 
 
 
 
922ab2f
aea47c2
 
 
 
 
04a3a49
73b8cbc
 
 
 
04a3a49
aea47c2
 
 
09dec51
aea47c2
09dec51
aea47c2
 
922ab2f
09dec51
 
 
 
894113d
aea47c2
894113d
aea47c2
894113d
aea47c2
 
894113d
09dec51
894113d
09dec51
894113d
 
 
922ab2f
 
09dec51
894113d
922ab2f
 
894113d
cdd2358
 
894113d
aea47c2
 
894113d
 
 
922ab2f
894113d
cdd0fdd
894113d
 
 
 
 
09dec51
894113d
09dec51
894113d
aea47c2
 
 
0c97331
 
aea47c2
ee73e2d
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0c97331
 
aea47c2
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ee73e2d
 
aea47c2
ee73e2d
09dec51
0c97331
ee73e2d
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ee73e2d
aea47c2
 
 
 
 
 
 
 
 
 
 
 
0c97331
aea47c2
0c97331
aea47c2
 
 
 
 
ee73e2d
aea47c2
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
aea47c2
 
 
 
 
 
 
 
 
 
 
 
 
 
ee73e2d
 
 
894113d
 
09dec51
894113d
 
 
 
92f5e38
09dec51
 
 
92f5e38
09dec51
 
92f5e38
 
 
 
 
 
 
2e14e88
 
92f5e38
09dec51
894113d
922ab2f
09dec51
 
 
 
 
 
 
 
 
 
0c97331
09dec51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
894113d
09dec51
 
 
 
 
 
 
 
 
 
 
 
 
894113d
09dec51
 
 
 
 
894113d
09dec51
 
 
 
 
ed3af08
09dec51
 
 
 
ed3af08
09dec51
 
 
 
 
894113d
09dec51
aea47c2
894113d
 
 
 
 
09dec51
894113d
 
 
 
 
 
 
 
 
 
 
 
 
 
09dec51
 
 
894113d
09dec51
894113d
09dec51
 
894113d
 
 
 
 
 
 
 
09dec51
 
 
 
894113d
09dec51
 
 
894113d
09dec51
 
894113d
09dec51
894113d
 
 
 
 
 
 
cdd2358
894113d
 
 
cdd2358
894113d
cdd2358
894113d
 
 
 
 
da18003
 
 
 
 
894113d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
---
license: other
datasets:
- MERaLiON/Multitask-National-Speech-Corpus-v1
language:
- en
- zh
- ms
- ta
- id
- th
- vi

metrics:
- wer
- bleu
base_model:
- openai/whisper-large-v3
- google/gemma-2-9b-it
library_name: transformers
tags:
- meralion
- meralion-2
---

<h1 align="center">🔥 MERaLiON-2 🔥</h1>

<p align="center">
  <a href="https://huggingface.co/MERaLiON/MERaLiON-2-10B">🚀 MERaLiON-2-10B</a> | 
  <a href="https://huggingface.co/MERaLiON/MERaLiON-2-10B-ASR">🚀 MERaLiON-2-10B-ASR</a> | 
  <a href="https://huggingface.co/MERaLiON/MERaLiON-2-3B">🚀 MERaLiON-2-3B</a>
</p>

<p align="center">
  <a href="https://meralion.org/demo/">💻 Web Demo</a> |
  <a href="https://huggingface.co/MERaLiON/MERaLiON-2-10B/blob/main/vllm_plugin_meralion2/readme.md">⚙️ vLLM</a>
</p>

## Introduction 
We are pleased to announce the release of **MERaLiON2**, the latest addition to the MERaLiON family of speech-text large language models. Our flagship model, [**MERaLiON-2-10B**](https://huggingface.co/MERaLiON/MERaLiON-2-10B), demonstrates competitive performance across benchmark evaluations in tasks such as multilingual automatic speech recognition (ASR), speech translation (ST), audio scene understanding, emotion recognition, and general speech comprehension. These results are comparable to those achieved by other state-of-the-art open-source AudioLLMs, including Qwen2.5-Omni-7B and Phi-4-multimodal-instruct.

MERaLiON-2-10B is specifically designed to follow complex instructions with a nuanced understanding of **Singapore’s multilingual and multicultural context**. It integrates a localized Whisper-large-v3 speech encoder and Gemma-2-9b text decoder. The following graph presents task-specific evaluation scores, assessed using the **LLM-as-a-Judge** framework across multiple datasets. For the speech translation task, performance is measured using the BLEU metric, where higher scores indicate better translation quality.

<img src="radar_task.png" alt="model_capability" width="800" style="margin-left:'auto' margin-right:'auto' display:'block'"/>


In addition, we introduce an ASR-optimized variant, [**MERaLiON-2-10B-ASR**](https://huggingface.co/MERaLiON/MERaLiON-2-10B-ASR), which delivers a **5–30%** performance improvement over OpenAI’s `whisper-large-v3` on speech recognition tasks. This enhancement spans Singapore’s 4 official languages—**English**, **Mandarin**, **Malay**, and **Tamil**—as well as 3 South-East Asian languages: **Indonesian**, **Thai**, and **Vietnamese**. The model also demonstrates robust handling of **code-switching scenarios** and local colloquialisms, reflecting its adaptability to Singapore’s diverse linguistic landscape.

The following visualization illustrates the **1 - Word Error Rate (WER)** metric across these seven languages, comparing MERaLiON-2-10B-ASR with other leading models. A higher value indicates better transcription accuracy.

<img src="radar_asr.png" alt="model_capability" width="800" style="margin-left:'auto' margin-right:'auto' display:'block'"/>

We also provide [MERaLiON-2-3B](https://huggingface.co/MERaLiON/MERaLiON-2-3B) that balances performance with reduced computational requirements, enabling broader accessibility and lightweight deployment. 


- **Extended Audio Length**: Support audio inputs up to 300 seconds (5 minutes) for audio & speech question answering tasks, **30s for a satisfactory performance for speech transcription (ASR) and speech translation (ST) tasks**. 

- **Expanded Language Coverage**: In addition to English, Chinese, and Singlish, V2 introduces support for Malay, Tamil, and other South-East Asia languages including Indonesian, Thai, and Vietnamese.

- **Improved Performance**: Achieves higher performance across a wide range of tasks. See the [Evaluation](#performance) section for detailed benchmarks.

- **Higher Quality Training Data**: Trained on 120,000 hours of curated speech and audio data, filtered for quality and diversity, with an emphasis on local and multilingual audio sources.

- **Three Model Variants**: Available in general-purpose ([MERaLiON-2-10B](https://huggingface.co/MERaLiON/MERaLiON-2-10B)), ASR-optimized ([MERaLiON-2-10B-ASR](https://huggingface.co/MERaLiON/MERaLiON-2-10B-ASR)) and light-weight ([MERaLiON-2-3B](https://huggingface.co/MERaLiON/MERaLiON-2-3B)) configurations to balance latency, compute efficiency, and task performance across different deployment needs.

## Model Description: 

MERaLiON stands for **M**ultimodal **E**mpathetic **R**easoning **a**nd **L**earning **i**n **O**ne **N**etwork. 

MERaLiON-2 is a family of Speech-Text Large Language Models tailored for **Singapore’s multilingual and multicultural landscape**, as well as the wider **Southeast Asian region**. 
The 10B model integrates a localized [Whisper-Large-V3](https://huggingface.co/openai/whisper-large-v3) speech encoder with the [Gemma2-9b-IT](https://huggingface.co/google/gemma-2-9b-it) text decoder.
The 3B model integrates a localized [Whisper-Large-V3](https://huggingface.co/openai/whisper-large-v3) speech encoder with the [Gemma2-2b-IT](https://huggingface.co/google/gemma-2-2b-it) text decoder.

MERaLiON-2-10B is finetuned on **120,000 hours of speech and audio data** across **6 diverse tasks**: Automatic Speech Recognition (ASR), Spoken Question Answering (SQA), Spoken Dialogue Summarization (SDS), Audio Captioning (AC), Audio-Scene Question Answering (ASQA) and Paralinguistic Question Answering (PQA). 
The model supports long-form audio inputs of up to 300 seconds (5 minutes) and is specifically adapted to handle the linguistic nuances, accents, and dialects commonly found across Singapore and neighboring countries.

- **Developed by:** I<sup>2</sup>R, A\*STAR, Singapore
- **Model type:** Multimodal LLM
- **Language(s):** Primarily English (Global and Singapore), Chinese, with support for audio of regional languages including Malay, Tamil, Indonesian, Thai, and Vietnamese.
- **Audio:** **Mono** channel audio, **16000** hz, up to **300** seconds.
- **License:** [MERaLiON Public License](MERaLiON-Public-Licence-v2.pdf)
- **Demo:** [MERaLiON-AudioLLM Web Demo](https://meralion.org/demo/)

**MERaLiON-2** is an upgraded version of [MERaLiON-AudioLLM](https://huggingface.co/MERaLiON/MERaLiON-AudioLLM-Whisper-SEA-LION).


## Performance: 

We benchmark MERaLiON-2 series models with extended [AudioBench benchmark](https://huggingface.co/spaces/MERaLiON/AudioBench-Leaderboard) against several recently released open-source multimodal models — SALMONN-7B, Qwen2.5-Omni series and Phi-4-Multimodal — as well as two cascade model. 


**Better Automatic Speech Recognition (ASR) Accuracy**

MERaLiON-2-10B-ASR and MERaLiON-2-10B demonstrate leading performance in Singlish, Mandarin, Malay, Tamil, and other Southeast Asian languages, while maintaining competitive results in English compared to `Whisper-large-v3`. The following table shows the average transcription `Word Error Rate` by language for the MERaLiON family and other leading AudioLLMs. The `Private Dataset` includes a collection of Singapore's locally accented speeches with code-switch.
Please visit [AudioBench benchmark](https://huggingface.co/spaces/MERaLiON/AudioBench-Leaderboard) for dataset-level evaluation results.

<style type="text/css">
#T_0910c th {
  text-align: center;
}
#T_0910c_row0_col0, #T_0910c_row1_col0, #T_0910c_row2_col0, #T_0910c_row3_col0, #T_0910c_row4_col0, #T_0910c_row5_col0, #T_0910c_row6_col7, #T_0910c_row7_col0, #T_0910c_row8_col0 {
  font-weight: bold;
  text-decoration: underline;
  text-align: center;
}
#T_0910c_row0_col1, #T_0910c_row1_col1, #T_0910c_row2_col1, #T_0910c_row3_col1, #T_0910c_row4_col1, #T_0910c_row5_col1, #T_0910c_row6_col1, #T_0910c_row7_col1, #T_0910c_row8_col1 {
  text-align: center;
}
#T_0910c_row0_col2, #T_0910c_row0_col3, #T_0910c_row0_col4, #T_0910c_row0_col5, #T_0910c_row0_col6, #T_0910c_row0_col7, #T_0910c_row0_col8, #T_0910c_row0_col9, #T_0910c_row0_col10, #T_0910c_row0_col11, #T_0910c_row1_col2, #T_0910c_row1_col3, #T_0910c_row1_col4, #T_0910c_row1_col5, #T_0910c_row1_col6, #T_0910c_row1_col7, #T_0910c_row1_col8, #T_0910c_row1_col9, #T_0910c_row1_col10, #T_0910c_row1_col11, #T_0910c_row2_col2, #T_0910c_row2_col3, #T_0910c_row2_col4, #T_0910c_row2_col5, #T_0910c_row2_col6, #T_0910c_row2_col7, #T_0910c_row2_col8, #T_0910c_row2_col9, #T_0910c_row2_col10, #T_0910c_row2_col11, #T_0910c_row3_col2, #T_0910c_row3_col3, #T_0910c_row3_col4, #T_0910c_row3_col5, #T_0910c_row3_col6, #T_0910c_row3_col7, #T_0910c_row3_col8, #T_0910c_row3_col9, #T_0910c_row3_col10, #T_0910c_row3_col11, #T_0910c_row4_col2, #T_0910c_row4_col3, #T_0910c_row4_col4, #T_0910c_row4_col5, #T_0910c_row4_col6, #T_0910c_row4_col7, #T_0910c_row4_col8, #T_0910c_row4_col9, #T_0910c_row4_col10, #T_0910c_row4_col11, #T_0910c_row5_col2, #T_0910c_row5_col3, #T_0910c_row5_col4, #T_0910c_row5_col5, #T_0910c_row5_col6, #T_0910c_row5_col7, #T_0910c_row5_col8, #T_0910c_row5_col9, #T_0910c_row5_col10, #T_0910c_row5_col11, #T_0910c_row6_col0, #T_0910c_row6_col2, #T_0910c_row6_col3, #T_0910c_row6_col4, #T_0910c_row6_col5, #T_0910c_row6_col6, #T_0910c_row6_col8, #T_0910c_row6_col9, #T_0910c_row6_col10, #T_0910c_row6_col11, #T_0910c_row7_col2, #T_0910c_row7_col3, #T_0910c_row7_col4, #T_0910c_row7_col5, #T_0910c_row7_col6, #T_0910c_row7_col7, #T_0910c_row7_col8, #T_0910c_row7_col9, #T_0910c_row7_col10, #T_0910c_row7_col11, #T_0910c_row8_col2, #T_0910c_row8_col3, #T_0910c_row8_col4, #T_0910c_row8_col5, #T_0910c_row8_col6, #T_0910c_row8_col7, #T_0910c_row8_col8, #T_0910c_row8_col9, #T_0910c_row8_col10, #T_0910c_row8_col11 {
  text-align: center;
}
</style>
<table id="T_0910c">
  <thead>
    <tr>
      <th class="blank level0" >&nbsp;</th>
      <th id="T_0910c_level0_col0" class="col_heading level0 col0" >MERaLiON-2-10B-ASR</th>
      <th id="T_0910c_level0_col1" class="col_heading level0 col1" >MERaLiON-2-10B</th>
      <th id="T_0910c_level0_col2" class="col_heading level0 col2" >MERaLiON-2-3B</th>
      <th id="T_0910c_level0_col3" class="col_heading level0 col3" >whisper_large_v3</th>
      <th id="T_0910c_level0_col4" class="col_heading level0 col4" >cascade-whisper_large_v3-llama_3_8b_instruct</th>
      <th id="T_0910c_level0_col5" class="col_heading level0 col5" >cascade-whisper_large_v2-gemma2_9b_cpt-sea_lionv3_instruct</th>
      <th id="T_0910c_level0_col6" class="col_heading level0 col6" >MERaLiON-AudioLLM-Whisper-SEA-LION</th>
      <th id="T_0910c_level0_col7" class="col_heading level0 col7" >Qwen2.5-Omni-7B</th>
      <th id="T_0910c_level0_col8" class="col_heading level0 col8" >SeaLLMs-Audio-7B</th>
      <th id="T_0910c_level0_col9" class="col_heading level0 col9" >Qwen2.5-Omni-3B</th>
      <th id="T_0910c_level0_col10" class="col_heading level0 col10" >SALMONN_7B</th>
      <th id="T_0910c_level0_col11" class="col_heading level0 col11" >phi_4_multimodal_instruct</th>
    </tr>
  </thead>
  <tbody>
    <tr>
      <th id="T_0910c_level0_row0" class="row_heading level0 row0" >Thai</th>
      <td id="T_0910c_row0_col0" class="data row0 col0" >0.096526</td>
      <td id="T_0910c_row0_col1" class="data row0 col1" >0.109365</td>
      <td id="T_0910c_row0_col2" class="data row0 col2" >0.107279</td>
      <td id="T_0910c_row0_col3" class="data row0 col3" >0.121073</td>
      <td id="T_0910c_row0_col4" class="data row0 col4" >0.120257</td>
      <td id="T_0910c_row0_col5" class="data row0 col5" >0.172105</td>
      <td id="T_0910c_row0_col6" class="data row0 col6" >0.919330</td>
      <td id="T_0910c_row0_col7" class="data row0 col7" >0.126497</td>
      <td id="T_0910c_row0_col8" class="data row0 col8" >0.117152</td>
      <td id="T_0910c_row0_col9" class="data row0 col9" >0.163150</td>
      <td id="T_0910c_row0_col10" class="data row0 col10" >1.191099</td>
      <td id="T_0910c_row0_col11" class="data row0 col11" >1.510068</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row1" class="row_heading level0 row1" >Tamil</th>
      <td id="T_0910c_row1_col0" class="data row1 col0" >0.271279</td>
      <td id="T_0910c_row1_col1" class="data row1 col1" >0.327081</td>
      <td id="T_0910c_row1_col2" class="data row1 col2" >0.344081</td>
      <td id="T_0910c_row1_col3" class="data row1 col3" >0.441483</td>
      <td id="T_0910c_row1_col4" class="data row1 col4" >0.475225</td>
      <td id="T_0910c_row1_col5" class="data row1 col5" >0.492336</td>
      <td id="T_0910c_row1_col6" class="data row1 col6" >0.561315</td>
      <td id="T_0910c_row1_col7" class="data row1 col7" >1.024916</td>
      <td id="T_0910c_row1_col8" class="data row1 col8" >2.325402</td>
      <td id="T_0910c_row1_col9" class="data row1 col9" >1.315143</td>
      <td id="T_0910c_row1_col10" class="data row1 col10" >1.306694</td>
      <td id="T_0910c_row1_col11" class="data row1 col11" >1.876722</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row2" class="row_heading level0 row2" >Singlish</th>
      <td id="T_0910c_row2_col0" class="data row2 col0" >0.129830</td>
      <td id="T_0910c_row2_col1" class="data row2 col1" >0.168813</td>
      <td id="T_0910c_row2_col2" class="data row2 col2" >0.180395</td>
      <td id="T_0910c_row2_col3" class="data row2 col3" >0.248945</td>
      <td id="T_0910c_row2_col4" class="data row2 col4" >0.251608</td>
      <td id="T_0910c_row2_col5" class="data row2 col5" >0.255717</td>
      <td id="T_0910c_row2_col6" class="data row2 col6" >0.143800</td>
      <td id="T_0910c_row2_col7" class="data row2 col7" >0.439071</td>
      <td id="T_0910c_row2_col8" class="data row2 col8" >0.795990</td>
      <td id="T_0910c_row2_col9" class="data row2 col9" >0.389393</td>
      <td id="T_0910c_row2_col10" class="data row2 col10" >0.441490</td>
      <td id="T_0910c_row2_col11" class="data row2 col11" >0.448863</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row3" class="row_heading level0 row3" >Malay</th>
      <td id="T_0910c_row3_col0" class="data row3 col0" >0.194638</td>
      <td id="T_0910c_row3_col1" class="data row3 col1" >0.209074</td>
      <td id="T_0910c_row3_col2" class="data row3 col2" >0.279891</td>
      <td id="T_0910c_row3_col3" class="data row3 col3" >0.219692</td>
      <td id="T_0910c_row3_col4" class="data row3 col4" >0.311921</td>
      <td id="T_0910c_row3_col5" class="data row3 col5" >0.314378</td>
      <td id="T_0910c_row3_col6" class="data row3 col6" >0.289895</td>
      <td id="T_0910c_row3_col7" class="data row3 col7" >1.460664</td>
      <td id="T_0910c_row3_col8" class="data row3 col8" >0.765565</td>
      <td id="T_0910c_row3_col9" class="data row3 col9" >2.943750</td>
      <td id="T_0910c_row3_col10" class="data row3 col10" >1.085867</td>
      <td id="T_0910c_row3_col11" class="data row3 col11" >3.762933</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row4" class="row_heading level0 row4" >English</th>
      <td id="T_0910c_row4_col0" class="data row4 col0" >0.078544</td>
      <td id="T_0910c_row4_col1" class="data row4 col1" >0.088259</td>
      <td id="T_0910c_row4_col2" class="data row4 col2" >0.122295</td>
      <td id="T_0910c_row4_col3" class="data row4 col3" >0.080841</td>
      <td id="T_0910c_row4_col4" class="data row4 col4" >0.081568</td>
      <td id="T_0910c_row4_col5" class="data row4 col5" >0.104830</td>
      <td id="T_0910c_row4_col6" class="data row4 col6" >0.110567</td>
      <td id="T_0910c_row4_col7" class="data row4 col7" >0.134216</td>
      <td id="T_0910c_row4_col8" class="data row4 col8" >0.197824</td>
      <td id="T_0910c_row4_col9" class="data row4 col9" >0.110353</td>
      <td id="T_0910c_row4_col10" class="data row4 col10" >0.191492</td>
      <td id="T_0910c_row4_col11" class="data row4 col11" >0.098225</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row5" class="row_heading level0 row5" >Indonesian</th>
      <td id="T_0910c_row5_col0" class="data row5 col0" >0.121020</td>
      <td id="T_0910c_row5_col1" class="data row5 col1" >0.142813</td>
      <td id="T_0910c_row5_col2" class="data row5 col2" >0.131950</td>
      <td id="T_0910c_row5_col3" class="data row5 col3" >0.137102</td>
      <td id="T_0910c_row5_col4" class="data row5 col4" >0.135390</td>
      <td id="T_0910c_row5_col5" class="data row5 col5" >0.159476</td>
      <td id="T_0910c_row5_col6" class="data row5 col6" >0.298365</td>
      <td id="T_0910c_row5_col7" class="data row5 col7" >0.168659</td>
      <td id="T_0910c_row5_col8" class="data row5 col8" >0.220227</td>
      <td id="T_0910c_row5_col9" class="data row5 col9" >0.205216</td>
      <td id="T_0910c_row5_col10" class="data row5 col10" >1.653502</td>
      <td id="T_0910c_row5_col11" class="data row5 col11" >3.565510</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row6" class="row_heading level0 row6" >Mandarian</th>
      <td id="T_0910c_row6_col0" class="data row6 col0" >0.103694</td>
      <td id="T_0910c_row6_col1" class="data row6 col1" >0.132025</td>
      <td id="T_0910c_row6_col2" class="data row6 col2" >0.145878</td>
      <td id="T_0910c_row6_col3" class="data row6 col3" >0.170980</td>
      <td id="T_0910c_row6_col4" class="data row6 col4" >0.196867</td>
      <td id="T_0910c_row6_col5" class="data row6 col5" >0.291733</td>
      <td id="T_0910c_row6_col6" class="data row6 col6" >0.291183</td>
      <td id="T_0910c_row6_col7" class="data row6 col7" >0.102419</td>
      <td id="T_0910c_row6_col8" class="data row6 col8" >0.309782</td>
      <td id="T_0910c_row6_col9" class="data row6 col9" >0.130429</td>
      <td id="T_0910c_row6_col10" class="data row6 col10" >0.939545</td>
      <td id="T_0910c_row6_col11" class="data row6 col11" >0.238879</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row7" class="row_heading level0 row7" >Vietnamese</th>
      <td id="T_0910c_row7_col0" class="data row7 col0" >0.118693</td>
      <td id="T_0910c_row7_col1" class="data row7 col1" >0.134808</td>
      <td id="T_0910c_row7_col2" class="data row7 col2" >0.155110</td>
      <td id="T_0910c_row7_col3" class="data row7 col3" >0.148474</td>
      <td id="T_0910c_row7_col4" class="data row7 col4" >0.136075</td>
      <td id="T_0910c_row7_col5" class="data row7 col5" >0.164078</td>
      <td id="T_0910c_row7_col6" class="data row7 col6" >0.952040</td>
      <td id="T_0910c_row7_col7" class="data row7 col7" >0.205491</td>
      <td id="T_0910c_row7_col8" class="data row7 col8" >0.222001</td>
      <td id="T_0910c_row7_col9" class="data row7 col9" >0.186786</td>
      <td id="T_0910c_row7_col10" class="data row7 col10" >1.521174</td>
      <td id="T_0910c_row7_col11" class="data row7 col11" >1.805643</td>
    </tr>
    <tr>
      <th id="T_0910c_level0_row8" class="row_heading level0 row8" >Private Dataset</th>
      <td id="T_0910c_row8_col0" class="data row8 col0" >0.106150</td>
      <td id="T_0910c_row8_col1" class="data row8 col1" >0.112360</td>
      <td id="T_0910c_row8_col2" class="data row8 col2" >0.147258</td>
      <td id="T_0910c_row8_col3" class="data row8 col3" >0.116630</td>
      <td id="T_0910c_row8_col4" class="data row8 col4" >0.118434</td>
      <td id="T_0910c_row8_col5" class="data row8 col5" >0.143812</td>
      <td id="T_0910c_row8_col6" class="data row8 col6" >0.130667</td>
      <td id="T_0910c_row8_col7" class="data row8 col7" >0.222770</td>
      <td id="T_0910c_row8_col8" class="data row8 col8" >0.496540</td>
      <td id="T_0910c_row8_col9" class="data row8 col9" >0.164556</td>
      <td id="T_0910c_row8_col10" class="data row8 col10" >0.273304</td>
      <td id="T_0910c_row8_col11" class="data row8 col11" >0.229450</td>
    </tr>
  </tbody>
</table>


**Better Instruction Following and Audio Understanding**

**MERaLiON-2-10B** exhibits substantial advancements in speech and audio understanding, as well as paralinguistic tasks. Notably, it adeptly handles complex instructions and responds with enhanced flexibility, effectively preserving the pre-trained knowledge from Gemma during the audio fine-tuning process. This capability enables MERaLiON-2-10B to provide detailed explanations regarding speech content and the speaker's emotional state. Furthermore, with appropriate prompt adjustments, the model can assume various roles, such as a voice assistant, virtual caregiver, or an integral component of sophisticated multi-agent AI systems and software solutions.
Please visit [AudioBench benchmark](https://huggingface.co/spaces/MERaLiON/AudioBench-Leaderboard) for dataset-level evaluation results.

<style type="text/css">
#T_b6ba8 th {
  text-align: center;
}
#T_b6ba8_row0_col0, #T_b6ba8_row2_col0, #T_b6ba8_row3_col0, #T_b6ba8_row5_col0, #T_b6ba8_row6_col0, #T_b6ba8_row8_col0, #T_b6ba8_row9_col0, #T_b6ba8_row10_col0 {
  text-align: center;
}
#T_b6ba8_row0_col1, #T_b6ba8_row0_col2, #T_b6ba8_row0_col3, #T_b6ba8_row0_col4, #T_b6ba8_row0_col5, #T_b6ba8_row0_col6, #T_b6ba8_row0_col7, #T_b6ba8_row0_col8, #T_b6ba8_row0_col9, #T_b6ba8_row0_col11, #T_b6ba8_row0_col12, #T_b6ba8_row0_col13, #T_b6ba8_row1_col1, #T_b6ba8_row1_col2, #T_b6ba8_row1_col3, #T_b6ba8_row1_col4, #T_b6ba8_row1_col5, #T_b6ba8_row1_col6, #T_b6ba8_row1_col7, #T_b6ba8_row1_col8, #T_b6ba8_row1_col9, #T_b6ba8_row1_col10, #T_b6ba8_row1_col11, #T_b6ba8_row1_col12, #T_b6ba8_row1_col13, #T_b6ba8_row2_col2, #T_b6ba8_row2_col3, #T_b6ba8_row2_col4, #T_b6ba8_row2_col5, #T_b6ba8_row2_col6, #T_b6ba8_row2_col7, #T_b6ba8_row2_col8, #T_b6ba8_row2_col9, #T_b6ba8_row2_col10, #T_b6ba8_row2_col11, #T_b6ba8_row2_col12, #T_b6ba8_row2_col13, #T_b6ba8_row3_col1, #T_b6ba8_row3_col3, #T_b6ba8_row3_col4, #T_b6ba8_row3_col5, #T_b6ba8_row3_col6, #T_b6ba8_row3_col7, #T_b6ba8_row3_col8, #T_b6ba8_row3_col9, #T_b6ba8_row3_col10, #T_b6ba8_row3_col11, #T_b6ba8_row3_col12, #T_b6ba8_row3_col13, #T_b6ba8_row4_col1, #T_b6ba8_row4_col2, #T_b6ba8_row4_col3, #T_b6ba8_row4_col4, #T_b6ba8_row4_col5, #T_b6ba8_row4_col6, #T_b6ba8_row4_col7, #T_b6ba8_row4_col8, #T_b6ba8_row4_col9, #T_b6ba8_row4_col10, #T_b6ba8_row4_col11, #T_b6ba8_row4_col12, #T_b6ba8_row4_col13, #T_b6ba8_row5_col1, #T_b6ba8_row5_col2, #T_b6ba8_row5_col3, #T_b6ba8_row5_col5, #T_b6ba8_row5_col6, #T_b6ba8_row5_col7, #T_b6ba8_row5_col8, #T_b6ba8_row5_col9, #T_b6ba8_row5_col10, #T_b6ba8_row5_col11, #T_b6ba8_row5_col12, #T_b6ba8_row5_col13, #T_b6ba8_row6_col1, #T_b6ba8_row6_col3, #T_b6ba8_row6_col4, #T_b6ba8_row6_col5, #T_b6ba8_row6_col6, #T_b6ba8_row6_col7, #T_b6ba8_row6_col8, #T_b6ba8_row6_col9, #T_b6ba8_row6_col10, #T_b6ba8_row6_col11, #T_b6ba8_row6_col12, #T_b6ba8_row6_col13, #T_b6ba8_row7_col1, #T_b6ba8_row7_col2, #T_b6ba8_row7_col3, #T_b6ba8_row7_col4, #T_b6ba8_row7_col5, #T_b6ba8_row7_col6, #T_b6ba8_row7_col7, #T_b6ba8_row7_col8, #T_b6ba8_row7_col9, #T_b6ba8_row7_col10, #T_b6ba8_row7_col11, #T_b6ba8_row7_col12, #T_b6ba8_row7_col13, #T_b6ba8_row8_col1, #T_b6ba8_row8_col2, #T_b6ba8_row8_col3, #T_b6ba8_row8_col4, #T_b6ba8_row8_col6, #T_b6ba8_row8_col7, #T_b6ba8_row8_col8, #T_b6ba8_row8_col9, #T_b6ba8_row8_col10, #T_b6ba8_row8_col11, #T_b6ba8_row8_col12, #T_b6ba8_row8_col13, #T_b6ba8_row9_col1, #T_b6ba8_row9_col2, #T_b6ba8_row9_col4, #T_b6ba8_row9_col5, #T_b6ba8_row9_col6, #T_b6ba8_row9_col7, #T_b6ba8_row9_col8, #T_b6ba8_row9_col9, #T_b6ba8_row9_col10, #T_b6ba8_row9_col11, #T_b6ba8_row9_col12, #T_b6ba8_row9_col13, #T_b6ba8_row10_col1, #T_b6ba8_row10_col3, #T_b6ba8_row10_col4, #T_b6ba8_row10_col5, #T_b6ba8_row10_col6, #T_b6ba8_row10_col7, #T_b6ba8_row10_col8, #T_b6ba8_row10_col9, #T_b6ba8_row10_col10, #T_b6ba8_row10_col11, #T_b6ba8_row10_col12, #T_b6ba8_row10_col13 {
  text-align: center;
}
#T_b6ba8_row0_col10, #T_b6ba8_row2_col1, #T_b6ba8_row3_col2, #T_b6ba8_row5_col4, #T_b6ba8_row6_col2, #T_b6ba8_row8_col5, #T_b6ba8_row9_col3, #T_b6ba8_row10_col2 {
  font-weight: bold;
  text-decoration: underline;
  text-align: center;
}
#T_b6ba8_row1_col0, #T_b6ba8_row4_col0, #T_b6ba8_row7_col0 {
  font-weight: bold;
  text-decoration: underline;
  text-align: center;
}
</style>
<table id="T_b6ba8">
  <thead>
    <tr>
      <th class="blank level0" >&nbsp;</th>
      <th id="T_b6ba8_level0_col0" class="col_heading level0 col0" >MERaLiON-2-10B</th>
      <th id="T_b6ba8_level0_col1" class="col_heading level0 col1" >MERaLiON-AudioLLM-Whisper-SEA-LION</th>
      <th id="T_b6ba8_level0_col2" class="col_heading level0 col2" >MERaLiON-2-10B-ASR</th>
      <th id="T_b6ba8_level0_col3" class="col_heading level0 col3" >MERaLiON-2-3B</th>
      <th id="T_b6ba8_level0_col4" class="col_heading level0 col4" >SeaLLMs-Audio-7B</th>
      <th id="T_b6ba8_level0_col5" class="col_heading level0 col5" >Qwen2-Audio-7B-Instruct</th>
      <th id="T_b6ba8_level0_col6" class="col_heading level0 col6" >Qwen2.5-Omni-3B</th>
      <th id="T_b6ba8_level0_col7" class="col_heading level0 col7" >phi_4_multimodal_instruct</th>
      <th id="T_b6ba8_level0_col8" class="col_heading level0 col8" >cascade-whisper_large_v3-llama_3_8b_instruct</th>
      <th id="T_b6ba8_level0_col9" class="col_heading level0 col9" >Qwen2.5-Omni-7B</th>
      <th id="T_b6ba8_level0_col10" class="col_heading level0 col10" >cascade-whisper_large_v2-gemma2_9b_cpt-sea_lionv3_instruct</th>
      <th id="T_b6ba8_level0_col11" class="col_heading level0 col11" >Qwen-Audio-Chat</th>
      <th id="T_b6ba8_level0_col12" class="col_heading level0 col12" >SALMONN_7B</th>
      <th id="T_b6ba8_level0_col13" class="col_heading level0 col13" >WavLLM_fairseq</th>
    </tr>
  </thead>
  <tbody>
    <tr>
      <th id="T_b6ba8_level0_row0" class="row_heading level0 row0" >Speech Instruction</th>
      <td id="T_b6ba8_row0_col0" class="data row0 col0" >70.200000</td>
      <td id="T_b6ba8_row0_col1" class="data row0 col1" >70.800000</td>
      <td id="T_b6ba8_row0_col2" class="data row0 col2" >13.400000</td>
      <td id="T_b6ba8_row0_col3" class="data row0 col3" >19.100000</td>
      <td id="T_b6ba8_row0_col4" class="data row0 col4" >66.900000</td>
      <td id="T_b6ba8_row0_col5" class="data row0 col5" >48.700000</td>
      <td id="T_b6ba8_row0_col6" class="data row0 col6" >65.000000</td>
      <td id="T_b6ba8_row0_col7" class="data row0 col7" >36.200000</td>
      <td id="T_b6ba8_row0_col8" class="data row0 col8" >66.100000</td>
      <td id="T_b6ba8_row0_col9" class="data row0 col9" >58.300000</td>
      <td id="T_b6ba8_row0_col10" class="data row0 col10" >72.900000</td>
      <td id="T_b6ba8_row0_col11" class="data row0 col11" >10.200000</td>
      <td id="T_b6ba8_row0_col12" class="data row0 col12" >12.900000</td>
      <td id="T_b6ba8_row0_col13" class="data row0 col13" >20.400000</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row1" class="row_heading level0 row1" >Emotion Recognition</th>
      <td id="T_b6ba8_row1_col0" class="data row1 col0" >63.736268</td>
      <td id="T_b6ba8_row1_col1" class="data row1 col1" >48.577313</td>
      <td id="T_b6ba8_row1_col2" class="data row1 col2" >53.693298</td>
      <td id="T_b6ba8_row1_col3" class="data row1 col3" >54.040797</td>
      <td id="T_b6ba8_row1_col4" class="data row1 col4" >52.007576</td>
      <td id="T_b6ba8_row1_col5" class="data row1 col5" >49.846540</td>
      <td id="T_b6ba8_row1_col6" class="data row1 col6" >33.037836</td>
      <td id="T_b6ba8_row1_col7" class="data row1 col7" >40.677800</td>
      <td id="T_b6ba8_row1_col8" class="data row1 col8" >50.937578</td>
      <td id="T_b6ba8_row1_col9" class="data row1 col9" >31.469397</td>
      <td id="T_b6ba8_row1_col10" class="data row1 col10" >48.214969</td>
      <td id="T_b6ba8_row1_col11" class="data row1 col11" >41.671551</td>
      <td id="T_b6ba8_row1_col12" class="data row1 col12" >33.584869</td>
      <td id="T_b6ba8_row1_col13" class="data row1 col13" >50.801545</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row2" class="row_heading level0 row2" >Audio Scene Question Answering</th>
      <td id="T_b6ba8_row2_col0" class="data row2 col0" >51.140374</td>
      <td id="T_b6ba8_row2_col1" class="data row2 col1" >52.207756</td>
      <td id="T_b6ba8_row2_col2" class="data row2 col2" >49.511886</td>
      <td id="T_b6ba8_row2_col3" class="data row2 col3" >46.141353</td>
      <td id="T_b6ba8_row2_col4" class="data row2 col4" >50.193739</td>
      <td id="T_b6ba8_row2_col5" class="data row2 col5" >47.048025</td>
      <td id="T_b6ba8_row2_col6" class="data row2 col6" >48.123228</td>
      <td id="T_b6ba8_row2_col7" class="data row2 col7" >42.217143</td>
      <td id="T_b6ba8_row2_col8" class="data row2 col8" >21.876943</td>
      <td id="T_b6ba8_row2_col9" class="data row2 col9" >45.669153</td>
      <td id="T_b6ba8_row2_col10" class="data row2 col10" >18.043681</td>
      <td id="T_b6ba8_row2_col11" class="data row2 col11" >51.618622</td>
      <td id="T_b6ba8_row2_col12" class="data row2 col12" >51.816958</td>
      <td id="T_b6ba8_row2_col13" class="data row2 col13" >33.034083</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row3" class="row_heading level0 row3" >Gender Recognition</th>
      <td id="T_b6ba8_row3_col0" class="data row3 col0" >95.109423</td>
      <td id="T_b6ba8_row3_col1" class="data row3 col1" >97.177396</td>
      <td id="T_b6ba8_row3_col2" class="data row3 col2" >97.220335</td>
      <td id="T_b6ba8_row3_col3" class="data row3 col3" >93.810266</td>
      <td id="T_b6ba8_row3_col4" class="data row3 col4" >75.449392</td>
      <td id="T_b6ba8_row3_col5" class="data row3 col5" >95.963266</td>
      <td id="T_b6ba8_row3_col6" class="data row3 col6" >47.867210</td>
      <td id="T_b6ba8_row3_col7" class="data row3 col7" >70.718047</td>
      <td id="T_b6ba8_row3_col8" class="data row3 col8" >57.039409</td>
      <td id="T_b6ba8_row3_col9" class="data row3 col9" >48.724711</td>
      <td id="T_b6ba8_row3_col10" class="data row3 col10" >19.421130</td>
      <td id="T_b6ba8_row3_col11" class="data row3 col11" >60.349349</td>
      <td id="T_b6ba8_row3_col12" class="data row3 col12" >84.365092</td>
      <td id="T_b6ba8_row3_col13" class="data row3 col13" >60.773275</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row4" class="row_heading level0 row4" >Spoken QA (Singlish)</th>
      <td id="T_b6ba8_row4_col0" class="data row4 col0" >66.550000</td>
      <td id="T_b6ba8_row4_col1" class="data row4 col1" >58.900000</td>
      <td id="T_b6ba8_row4_col2" class="data row4 col2" >61.850000</td>
      <td id="T_b6ba8_row4_col3" class="data row4 col3" >59.700000</td>
      <td id="T_b6ba8_row4_col4" class="data row4 col4" >51.350000</td>
      <td id="T_b6ba8_row4_col5" class="data row4 col5" >46.700000</td>
      <td id="T_b6ba8_row4_col6" class="data row4 col6" >60.500000</td>
      <td id="T_b6ba8_row4_col7" class="data row4 col7" >61.950000</td>
      <td id="T_b6ba8_row4_col8" class="data row4 col8" >59.350000</td>
      <td id="T_b6ba8_row4_col9" class="data row4 col9" >58.400000</td>
      <td id="T_b6ba8_row4_col10" class="data row4 col10" >53.750000</td>
      <td id="T_b6ba8_row4_col11" class="data row4 col11" >42.300000</td>
      <td id="T_b6ba8_row4_col12" class="data row4 col12" >43.200000</td>
      <td id="T_b6ba8_row4_col13" class="data row4 col13" >51.200000</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row5" class="row_heading level0 row5" >Audio Captioning</th>
      <td id="T_b6ba8_row5_col0" class="data row5 col0" >35.604270</td>
      <td id="T_b6ba8_row5_col1" class="data row5 col1" >36.976419</td>
      <td id="T_b6ba8_row5_col2" class="data row5 col2" >34.466710</td>
      <td id="T_b6ba8_row5_col3" class="data row5 col3" >33.243839</td>
      <td id="T_b6ba8_row5_col4" class="data row5 col4" >45.089372</td>
      <td id="T_b6ba8_row5_col5" class="data row5 col5" >37.278810</td>
      <td id="T_b6ba8_row5_col6" class="data row5 col6" >39.200328</td>
      <td id="T_b6ba8_row5_col7" class="data row5 col7" >30.832409</td>
      <td id="T_b6ba8_row5_col8" class="data row5 col8" >2.915778</td>
      <td id="T_b6ba8_row5_col9" class="data row5 col9" >31.896243</td>
      <td id="T_b6ba8_row5_col10" class="data row5 col10" >3.140568</td>
      <td id="T_b6ba8_row5_col11" class="data row5 col11" >39.988663</td>
      <td id="T_b6ba8_row5_col12" class="data row5 col12" >28.880570</td>
      <td id="T_b6ba8_row5_col13" class="data row5 col13" >6.200867</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row6" class="row_heading level0 row6" >Spoken Dialogue Summarisation</th>
      <td id="T_b6ba8_row6_col0" class="data row6 col0" >53.100000</td>
      <td id="T_b6ba8_row6_col1" class="data row6 col1" >53.600000</td>
      <td id="T_b6ba8_row6_col2" class="data row6 col2" >55.800000</td>
      <td id="T_b6ba8_row6_col3" class="data row6 col3" >48.550000</td>
      <td id="T_b6ba8_row6_col4" class="data row6 col4" >45.450000</td>
      <td id="T_b6ba8_row6_col5" class="data row6 col5" >36.300000</td>
      <td id="T_b6ba8_row6_col6" class="data row6 col6" >46.750000</td>
      <td id="T_b6ba8_row6_col7" class="data row6 col7" >50.750000</td>
      <td id="T_b6ba8_row6_col8" class="data row6 col8" >45.850000</td>
      <td id="T_b6ba8_row6_col9" class="data row6 col9" >43.150000</td>
      <td id="T_b6ba8_row6_col10" class="data row6 col10" >51.000000</td>
      <td id="T_b6ba8_row6_col11" class="data row6 col11" >25.250000</td>
      <td id="T_b6ba8_row6_col12" class="data row6 col12" >14.400000</td>
      <td id="T_b6ba8_row6_col13" class="data row6 col13" >39.450000</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row7" class="row_heading level0 row7" >Spoken QA (English)</th>
      <td id="T_b6ba8_row7_col0" class="data row7 col0" >79.735049</td>
      <td id="T_b6ba8_row7_col1" class="data row7 col1" >63.711481</td>
      <td id="T_b6ba8_row7_col2" class="data row7 col2" >73.975834</td>
      <td id="T_b6ba8_row7_col3" class="data row7 col3" >68.715179</td>
      <td id="T_b6ba8_row7_col4" class="data row7 col4" >70.920519</td>
      <td id="T_b6ba8_row7_col5" class="data row7 col5" >68.888565</td>
      <td id="T_b6ba8_row7_col6" class="data row7 col6" >67.818546</td>
      <td id="T_b6ba8_row7_col7" class="data row7 col7" >75.513152</td>
      <td id="T_b6ba8_row7_col8" class="data row7 col8" >78.526569</td>
      <td id="T_b6ba8_row7_col9" class="data row7 col9" >68.415131</td>
      <td id="T_b6ba8_row7_col10" class="data row7 col10" >67.814538</td>
      <td id="T_b6ba8_row7_col11" class="data row7 col11" >66.069047</td>
      <td id="T_b6ba8_row7_col12" class="data row7 col12" >60.649071</td>
      <td id="T_b6ba8_row7_col13" class="data row7 col13" >70.595242</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row8" class="row_heading level0 row8" >Music Understanding</th>
      <td id="T_b6ba8_row8_col0" class="data row8 col0" >63.942713</td>
      <td id="T_b6ba8_row8_col1" class="data row8 col1" >51.347936</td>
      <td id="T_b6ba8_row8_col2" class="data row8 col2" >60.657119</td>
      <td id="T_b6ba8_row8_col3" class="data row8 col3" >55.602359</td>
      <td id="T_b6ba8_row8_col4" class="data row8 col4" >63.689975</td>
      <td id="T_b6ba8_row8_col5" class="data row8 col5" >71.609099</td>
      <td id="T_b6ba8_row8_col6" class="data row8 col6" >59.309183</td>
      <td id="T_b6ba8_row8_col7" class="data row8 col7" >55.265375</td>
      <td id="T_b6ba8_row8_col8" class="data row8 col8" >56.697557</td>
      <td id="T_b6ba8_row8_col9" class="data row8 col9" >47.598989</td>
      <td id="T_b6ba8_row8_col10" class="data row8 col10" >50.463353</td>
      <td id="T_b6ba8_row8_col11" class="data row8 col11" >59.056445</td>
      <td id="T_b6ba8_row8_col12" class="data row8 col12" >49.705139</td>
      <td id="T_b6ba8_row8_col13" class="data row8 col13" >44.313395</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row9" class="row_heading level0 row9" >Accent Recognition</th>
      <td id="T_b6ba8_row9_col0" class="data row9 col0" >41.815396</td>
      <td id="T_b6ba8_row9_col1" class="data row9 col1" >43.799799</td>
      <td id="T_b6ba8_row9_col2" class="data row9 col2" >47.788864</td>
      <td id="T_b6ba8_row9_col3" class="data row9 col3" >60.054981</td>
      <td id="T_b6ba8_row9_col4" class="data row9 col4" >10.143836</td>
      <td id="T_b6ba8_row9_col5" class="data row9 col5" >10.901397</td>
      <td id="T_b6ba8_row9_col6" class="data row9 col6" >0.478694</td>
      <td id="T_b6ba8_row9_col7" class="data row9 col7" >3.097615</td>
      <td id="T_b6ba8_row9_col8" class="data row9 col8" >21.398482</td>
      <td id="T_b6ba8_row9_col9" class="data row9 col9" >0.587293</td>
      <td id="T_b6ba8_row9_col10" class="data row9 col10" >25.929693</td>
      <td id="T_b6ba8_row9_col11" class="data row9 col11" >17.550294</td>
      <td id="T_b6ba8_row9_col12" class="data row9 col12" >11.577381</td>
      <td id="T_b6ba8_row9_col13" class="data row9 col13" >14.294613</td>
    </tr>
    <tr>
      <th id="T_b6ba8_level0_row10" class="row_heading level0 row10" >Speech Translation</th>
      <td id="T_b6ba8_row10_col0" class="data row10 col0" >27.391115</td>
      <td id="T_b6ba8_row10_col1" class="data row10 col1" >27.086366</td>
      <td id="T_b6ba8_row10_col2" class="data row10 col2" >28.540359</td>
      <td id="T_b6ba8_row10_col3" class="data row10 col3" >22.130258</td>
      <td id="T_b6ba8_row10_col4" class="data row10 col4" >21.143215</td>
      <td id="T_b6ba8_row10_col5" class="data row10 col5" >10.826666</td>
      <td id="T_b6ba8_row10_col6" class="data row10 col6" >21.776628</td>
      <td id="T_b6ba8_row10_col7" class="data row10 col7" >13.827110</td>
      <td id="T_b6ba8_row10_col8" class="data row10 col8" >13.536272</td>
      <td id="T_b6ba8_row10_col9" class="data row10 col9" >20.688241</td>
      <td id="T_b6ba8_row10_col10" class="data row10 col10" >21.437997</td>
      <td id="T_b6ba8_row10_col11" class="data row10 col11" >4.973184</td>
      <td id="T_b6ba8_row10_col12" class="data row10 col12" >13.486003</td>
      <td id="T_b6ba8_row10_col13" class="data row10 col13" >9.046791</td>
    </tr>
  </tbody>
</table>


## How to Use
> [!WARNING]
> **Out of Scope use**: This model is not intended for use in tool calling, math, and coding tasks.


MERaLiON-2 requires `transformers` version `4.50.1`

```
pip install transformers==4.50.1
pip install librosa
```

To run in GPU, MERaLiON-2 requires `flash-attn`.

```
pip install flash-attn --no-build-isolation
```

> [!TIP]
> Should you face any difficulties installing the above packages, you can try installing within this Docker container instead:
> `pytorch/pytorch:2.5.1-cuda12.1-cudnn9-devel`, whose cuda and torch environments have been tested working.

### Audio Input

- For ASR tasks, the maximum audio length is suggested to be 30 seconds at 16,000 Hz.
- For general speech & audio understanding tasks, the maximum audio length is suggested to be 300 seconds at 16,000 Hz sampling rate.

### Text Prompt

MERaLiON-2 is trained with this prompt template: 

```
Instruction: <TextHere> \nFollow the text instruction based on the following audio: <SpeechHere>
```

It is generally recommended to follow this template, i.e., replace `<TextHere>` with your text instruction while leaving the `<SpeechHere>` untouched. We list a few useful example prompts here: 

**Standard prompts for better accuracy**

```python
prompt_template = "Instruction: {query} \nFollow the text instruction based on the following audio: <SpeechHere>"

transcription_prompt = prompt_template.format(query="Please transcribe the speech")
translation_prompt = prompt_template.format(query="Please translate the speech into xxx")
```

> [!WARNING]
> Other prompts might not perform well on MERaLiON-2-10B-ASR. 

### Huggingface Inference with CPU

```python
import librosa
from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor

repo_id = "MERaLiON/MERaLiON-2-10B-ASR"

processor = AutoProcessor.from_pretrained(
    repo_id, 
    trust_remote_code=True,
    )
model = AutoModelForSpeechSeq2Seq.from_pretrained(
    repo_id,
    use_safetensors=True,
    trust_remote_code=True,
)

prompt_template = "Instruction: {query} \nFollow the text instruction based on the following audio: <SpeechHere>"
transcribe_prompt = "Please transcribe this speech."
translate_prompt = "Can you please translate this speech into written Chinese?"

# batch inference of 2 samples
conversation = [
    [{"role": "user", "content": prompt_template.format(query=transcribe_prompt)}],
    [{"role": "user", "content": prompt_template.format(query=translate_prompt)}],
]

chat_prompt = processor.tokenizer.apply_chat_template(
    conversation=conversation,
    tokenize=False,
    add_generation_prompt=True
)

# Use audio at 16000hz.
audio_array, sample_rate = librosa.load("/path/to/your/audio/file", sr=16000)
audio_array = [audio_array]*2
inputs = processor(text=chat_prompt, audios=audio_array)

# adjust the `max_new_tokens` based on your use case.
outputs = model.generate(**inputs, max_new_tokens=256)
generated_ids = outputs[:, inputs['input_ids'].size(1):]
response = processor.batch_decode(generated_ids, skip_special_tokens=True)
```

### Huggingface GPU Inference

```python
import torch
import librosa
from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor

repo_id = "MERaLiON/MERaLiON-2-10B-ASR"
device = "cuda"

processor = AutoProcessor.from_pretrained(
    repo_id, 
    trust_remote_code=True,
    )
model = AutoModelForSpeechSeq2Seq.from_pretrained(
    repo_id,
    use_safetensors=True,
    trust_remote_code=True,
    attn_implementation="flash_attention_2",
    torch_dtype=torch.bfloat16
).to(device)

prompt_template = "Instruction: {query} \nFollow the text instruction based on the following audio: <SpeechHere>"
transcribe_prompt = "Please transcribe this speech."
translate_prompt = "Can you please translate this speech into written Chinese?"

# batch inference of 2 samples
conversation = [
    [{"role": "user", "content": prompt_template.format(query=transcribe_prompt)}],
    [{"role": "user", "content": prompt_template.format(query=translate_prompt)}],
]

chat_prompt = processor.tokenizer.apply_chat_template(
    conversation=conversation,
    tokenize=False,
    add_generation_prompt=True
)

# Use audio at 16000hz.
audio_array, sample_rate = librosa.load("/path/to/your/audio/file", sr=16000)
audio_array = [audio_array]*2
inputs = processor(text=chat_prompt, audios=audio_array)

for key, value in inputs.items():
    if isinstance(value, torch.Tensor):
        inputs[key] = inputs[key].to(device)

        if value.dtype == torch.float32:
            inputs[key] = inputs[key].to(torch.bfloat16)

# adjust the `max_new_tokens` based on your use case.
outputs = model.generate(**inputs, max_new_tokens=256)
generated_ids = outputs[:, inputs['input_ids'].size(1):]
response = processor.batch_decode(generated_ids, skip_special_tokens=True)
```

## ⚠️ Disclaimer

The current MERaLiON-2 has not been specifically aligned for safety and may generate content that is inappropriate, offensive, or harmful. Developers and users are responsible for performing their own safety fine-tuning and implementing necessary security measures. The authors shall not be held liable for any claims, damages, or other liabilities arising from the use of the released models, weights, or code.

### Compute and Infrastructure

MERaLiON-2 was trained on the [**ASPIRE 2A+**](https://help.nscc.sg/aspire2aplus/about/) Supercomputer Cluster, provided by [**National Supercomputing Centre (NSCC)**](https://www.nscc.sg/), Singapore. ASPIRE 2A+ cluster provides multiple H100 nodes, with each compute node equipped with 8 Nvidia H100 GPUs, 2 TB of RAM, and 30 TB of locally attached NVMe storage. These nodes are interconnected via a rail-optimised, full fat-tree topology, utilising 400 Gb/s NDR InfiniBand cables. Additionally, the cluster incorporates a 2.5 PB SSD-based Lustre file system, linked to the H100 nodes through high-speed InfiniBand connections. 

With a global batch size of 768, we trained the current release of MERaLiON-2 for around 200k steps, which took around 2 days to complete using 16 nodes, 128 H100 GPUs.

## 📚 Citation

If you find our work useful, please cite our papers:

[MERaLiON-AudioLLM: Bridging Audio and Language with Large Language Models](https://arxiv.org/abs/2412.09818) <br>
[AudioBench: A Universal Benchmark for Audio Large Language Models](https://aclanthology.org/2025.naacl-long.218/) <br>
[Advancing Singlish Understanding: Bridging the Gap with Datasets and Multimodal Models](https://arxiv.org/abs/2501.01034) <br>
[MoWE-Audio: Multitask AudioLLMs with Mixture of Weak Encoders](https://arxiv.org/abs/2409.06635) <br>

```
@misc{he2024meralionaudiollmtechnicalreport,
      title={MERaLiON-AudioLLM: Bridging Audio and Language with Large Language Models}, 
      author={{MERaLiON Team}},
      year={2024},
      eprint={2412.09818},
      archivePrefix={arXiv},
      primaryClass={cs.CL},
      url={https://arxiv.org/abs/2412.09818}, 
}
```

```
@article{wang2024audiobench,
    title={AudioBench: A Universal Benchmark for Audio Large Language Models},
    author={Wang, Bin and Zou, Xunlong and Lin, Geyu and Sun, Shuo and Liu, Zhuohan and Zhang, Wenyu and Liu, Zhengyuan and Aw, AiTi and Chen, Nancy F},
    journal={NAACL},
    year={2025}
    }
```

```
@article{wang2025advancing,
    title={Advancing Singlish Understanding: Bridging the Gap with Datasets and Multimodal Models},
    author={Wang, Bin and Zou, Xunlong and Sun, Shuo and Zhang, Wenyu and He, Yingxu and Liu, Zhuohan and Wei, Chengwei and Chen, Nancy F and Aw, AiTi},
    journal={arXiv preprint arXiv:2501.01034},
    year={2025}
    }
```

```
@article{zhang2024mowe,
    title={MoWE-Audio: Multitask AudioLLMs with Mixture of Weak Encoders},
    author={Zhang, Wenyu and Sun, Shuo and Wang, Bin and Zou, Xunlong and Liu, Zhuohan and He, Yingxu and Lin, Geyu and Chen, Nancy F and Aw, Ai Ti},
    journal={ICASSP},
    year={2025}
    }
```