Browse Source

WIP Rustification of the example

pull/83/head
Andrea Ciliberti 2 years ago
parent
commit
169d353d9f
  1. 144
      ctru-rs/examples/audio_filters.rs
  2. 32
      ctru-rs/src/services/ndsp.rs

144
ctru-rs/examples/audio_filters.rs

@ -1,8 +1,10 @@
#![feature(allocator_api)] #![feature(allocator_api)]
use ctru::prelude::*;
use ctru::services::ndsp::{Ndsp, OutputMode, InterpolationType};
use ctru::linear::LinearAllocator; use ctru::linear::LinearAllocator;
use ctru::prelude::*;
use ctru::services::ndsp::{
AudioFormat, InterpolationType, Ndsp, OutputMode, WaveBuffer, WaveInfo,
};
const SAMPLERATE: u32 = 22050; const SAMPLERATE: u32 = 22050;
const SAMPLESPERBUF: u32 = SAMPLERATE / 30; // 735 const SAMPLESPERBUF: u32 = SAMPLERATE / 30; // 735
@ -13,18 +15,14 @@ fn array_size(array: &[u8]) -> usize {
} // (sizeof(array)/sizeof(array[0])) } // (sizeof(array)/sizeof(array[0]))
// audioBuffer is stereo PCM16 // audioBuffer is stereo PCM16
void fill_buffer(void* audioBuffer, size_t offset, size_t size, int frequency) { fn fill_buffer(audioData: &mut Box<[u8], LinearAlloc>, frequency: i32) {
u32* dest = (u32*) audioBuffer; for i in 0..size {
for (int i = 0; i < size; i++) {
// This is a simple sine wave, with a frequency of `frequency` Hz, and an amplitude 30% of maximum. // This is a simple sine wave, with a frequency of `frequency` Hz, and an amplitude 30% of maximum.
s16 sample = 0.3 * 0x7FFF * sin(frequency * (2 * M_PI) * (offset + i) / SAMPLERATE); let sample: i16 = 0.3 * 0x7FFF * sin(frequency * (2 * std::f32::PI) * i / SAMPLERATE);
// Stereo samples are interleaved: left and right channels. // Stereo samples are interleaved: left and right channels.
dest[i] = (sample << 16) | (sample & 0xffff); audioData[i] = (sample << 16) | (sample & 0xffff);
} }
DSP_FlushDataCache(audioBuffer, size);
} }
fn main() { fn main() {
@ -36,7 +34,15 @@ fn main() {
println!("libctru filtered streamed audio\n"); println!("libctru filtered streamed audio\n");
let audioBuffer = Box::new_in([0u32; (SAMPLESPERBUF * BYTESPERSAMPLE * 2)], LinearAllocator); let audioBuffer = Box::new_in(
[0u32; (SAMPLESPERBUF * BYTESPERSAMPLE * 2)],
LinearAllocator,
);
fill_buffer(audioBuffer, notefreq[note]);
let audioBuffer1 =
WaveBuffer::new(audioBuffer, AudioFormat::PCM16Stereo).expect("Couldn't sync DSP cache");
let audioBuffer2 = audioBuffer1.clone();
let fillBlock = false; let fillBlock = false;
@ -60,10 +66,7 @@ fn main() {
// Note Frequencies // Note Frequencies
let notefreq = [ let notefreq = [
220, 220, 440, 880, 1760, 3520, 7040, 14080, 7040, 3520, 1760, 880, 440,
440, 880, 1760, 3520, 7040,
14080,
7040, 3520, 1760, 880, 440
]; ];
let note: i32 = 4; let note: i32 = 4;
@ -76,7 +79,7 @@ fn main() {
"High-Pass", "High-Pass",
"Band-Pass", "Band-Pass",
"Notch", "Notch",
"Peaking" "Peaking",
]; ];
let filter = 0; let filter = 0;
@ -84,97 +87,84 @@ fn main() {
// We set up two wave buffers and alternate between the two, // We set up two wave buffers and alternate between the two,
// effectively streaming an infinitely long sine wave. // effectively streaming an infinitely long sine wave.
ndspWaveBuf waveBuf[2]; let mut buf1 = WaveInfo::new(&mut audioBuffer1, false);
memset(waveBuf,0,sizeof(waveBuf)); let mut buf2 = WaveInfo::new(&mut audioBuffer2, false);
waveBuf[0].data_vaddr = &audioBuffer[0];
waveBuf[0].nsamples = SAMPLESPERBUF;
waveBuf[1].data_vaddr = &audioBuffer[SAMPLESPERBUF];
waveBuf[1].nsamples = SAMPLESPERBUF;
let stream_offset = 0; unsafe {
channel_zero.add_wave_buffer(buf1);
fill_buffer(audioBuffer,stream_offset, SAMPLESPERBUF * 2, notefreq[note]); channel_zero.add_wave_buffer(buf2);
};
stream_offset += SAMPLESPERBUF;
channel_zero.add_wave_buffer(&waveBuf[0]);
channel_zero.add_wave_buffer(&waveBuf[1]);
println!("Press up/down to change tone frequency\n"); println!("Press up/down to change tone frequency\n");
println!("Press left/right to change filter\n"); println!("Press left/right to change filter\n");
println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]); println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]);
println!("\x1b[7;1Hfilter = {} ", filter_names[filter]); println!("\x1b[7;1Hfilter = {} ", filter_names[filter]);
while(aptMainLoop()) { while apt.main_loop() {
hid.scan_input();
gfxSwapBuffers(); let keys_down = hid.keys_down();
gfxFlushBuffers();
gspWaitForVBlank();
hidScanInput();
u32 kDown = hidKeysDown();
if (kDown & KEY_START) if keys_down.contains(KeyPad::KEY_START) {
break; // break in order to return to hbmenu break;
} // break in order to return to hbmenu
if (kDown & KEY_DOWN) { if keys_down.contains(KeyPad::KEY_DOWN) {
note--; note -= 1;
if (note < 0) { if (note < 0) {
note = ARRAY_SIZE(notefreq) - 1; note = notefreq.len() - 1;
} }
println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]); println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]);
} else if (kDown & KEY_UP) { } else if keys_down.contains(KeyPad::KEY_UP) {
note++; note += 1;
if (note >= ARRAY_SIZE(notefreq)) { if (note >= notefreq.len()) {
note = 0; note = 0;
} }
println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]); println!("\x1b[6;1Hnote = {} Hz ", notefreq[note]);
} }
bool update_params = false; let update_params = false;
if (kDown & KEY_LEFT) { if keys_down.contains(KeyPad::KEY_LEFT) {
filter--; filter -= 1;
if (filter < 0) { if (filter < 0) {
filter = ARRAY_SIZE(filter_names) - 1; filter = filter_names.len() - 1;
} }
update_params = true; update_params = true;
} else if (kDown & KEY_RIGHT) { } else if keys_down.contains(KeyPad::KEY_LEFT) {
filter++; filter += 1;
if (filter >= ARRAY_SIZE(filter_names)) { if (filter >= filter_names.len()) {
filter = 0; filter = 0;
} }
update_params = true; update_params = true;
} }
if (update_params) { if update_params {
println!("\x1b[7;1Hfilter = {} ", filter_names[filter]); println!("\x1b[7;1Hfilter = {} ", filter_names[filter]);
switch (filter) { match filter {
default: 1 => ndspChnIirBiquadSetParamsLowPassFilter(0, 1760., 0.707),
ndspChnIirBiquadSetEnable(0, false); 2 => ndspChnIirBiquadSetParamsHighPassFilter(0, 1760., 0.707),
break; 3 => ndspChnIirBiquadSetParamsBandPassFilter(0, 1760., 0.707),
case 1: 4 => ndspChnIirBiquadSetParamsNotchFilter(0, 1760., 0.707),
ndspChnIirBiquadSetParamsLowPassFilter(0, 1760.f, 0.707f); 5 => ndspChnIirBiquadSetParamsPeakingEqualizer(0, 1760., 0.707, 3.0),
break; _ => ndspChnIirBiquadSetEnable(0, false),
case 2:
ndspChnIirBiquadSetParamsHighPassFilter(0, 1760.f, 0.707f);
break;
case 3:
ndspChnIirBiquadSetParamsBandPassFilter(0, 1760.f, 0.707f);
break;
case 4:
ndspChnIirBiquadSetParamsNotchFilter(0, 1760.f, 0.707f);
break;
case 5:
ndspChnIirBiquadSetParamsPeakingEqualizer(0, 1760.f, 0.707f, 3.0f);
break;
} }
} }
if (waveBuf[fillBlock].status == NDSP_WBUF_DONE) { if waveBuf[fillBlock].status == NDSP_WBUF_DONE {
fill_buffer(waveBuf[fillBlock].data_pcm16, stream_offset, waveBuf[fillBlock].nsamples, notefreq[note]); if fillBlock {
ndspChnWaveBufAdd(0, &waveBuf[fillBlock]); fill_buffer(buf1.data_pcm16, notefreq[note]);
stream_offset += waveBuf[fillBlock].nsamples; channel_zero.add_wave_buffer(buf1);
} else {
fill_buffer(waveBuf[fillBlock].data_pcm16, notefreq[note]);
channel_zero.add_wave_buffer(buf2);
}
fillBlock = !fillBlock; fillBlock = !fillBlock;
} }
// Flush and swap framebuffers
gfx.flush_buffers();
gfx.swap_buffers();
//Wait for VBlank
gfx.wait_for_vblank();
} }
} }

32
ctru-rs/src/services/ndsp.rs

@ -32,17 +32,19 @@ pub enum AudioFormat {
} }
/// Base struct to represent audio wave data. This requires audio format information. /// Base struct to represent audio wave data. This requires audio format information.
#[derive(Debug, Clone)]
pub struct WaveBuffer { pub struct WaveBuffer {
/// Buffer data. This data must be allocated on the LINEAR memory. /// Buffer data. This data must be allocated on the LINEAR memory.
data: Box<[u8], LinearAllocator>, data: Box<[u8], LinearAllocator>,
audio_format: AudioFormat, audio_format: AudioFormat,
length: usize, nsamples: usize, // We don't use the slice's length here because depending on the format it may vary
// adpcm_data: AdpcmData, TODO: Requires research on how this format is handled. // adpcm_data: AdpcmData, TODO: Requires research on how this format is handled.
} }
pub struct WaveInfo { /// Informational struct holding the raw audio data and playaback info. This corresponds to [ctru_sys::ndspWaveBuf]
pub struct WaveInfo<'b> {
/// Data block of the audio wave (plus its format information). /// Data block of the audio wave (plus its format information).
buffer: WaveBuffer, buffer: &'b mut WaveBuffer,
// Holding the data with the raw format is necessary since `libctru` will access it. // Holding the data with the raw format is necessary since `libctru` will access it.
raw_data: ctru_sys::ndspWaveBuf, raw_data: ctru_sys::ndspWaveBuf,
} }
@ -178,12 +180,22 @@ impl AudioFormat {
} }
impl WaveBuffer { impl WaveBuffer {
pub fn new(data: Box<[u8], LinearAllocator>, audio_format: AudioFormat) -> Self { pub fn new(data: Box<[u8], LinearAllocator>, audio_format: AudioFormat) -> crate::Result<Self> {
WaveBuffer { let nsamples = data.len() / format.size();
unsafe {
ResultCode(ctru_sys::DSP_FlushDataCache(data.as_ptr(), data.len()))?;
}
Ok(WaveBuffer {
data, data,
audio_format, audio_format,
length: data.len() / format.size(), nsamples,
})
} }
pub fn get_mut_data(&mut self) -> &mut Box<[u8], LinearAllocator> {
&mut self.data
} }
pub fn format(&self) -> AudioFormat { pub fn format(&self) -> AudioFormat {
@ -195,8 +207,8 @@ impl WaveBuffer {
} }
} }
impl WaveInfo { impl<'b> WaveInfo<'b> {
pub fn new(buffer: WaveBuffer, looping: bool) -> Self { pub fn new(buffer: &'b mut WaveBuffer, looping: bool) -> Self {
let raw_data = ctru_sys::ndspWaveBuf { let raw_data = ctru_sys::ndspWaveBuf {
__bindgen_anon_1: buffer.data.as_ptr(), // Buffer data virtual address __bindgen_anon_1: buffer.data.as_ptr(), // Buffer data virtual address
nsamples: buffer.length, nsamples: buffer.length,
@ -211,6 +223,10 @@ impl WaveInfo {
Self { buffer, raw_data } Self { buffer, raw_data }
} }
pub fn get_mut_wavebuffer(&mut self) -> &'b mut WaveBuffer {
&mut self.buffer
}
} }
impl Drop for Ndsp { impl Drop for Ndsp {

Loading…
Cancel
Save