Browse Source

Merge branch 'roton'

improved_timing
Ian Burgmyer 6 years ago
parent
commit
beea4e816e
  1. 39
      DotSDL/Audio/AudioBuffer.cs
  2. 84
      DotSDL/Audio/Channel.cs
  3. 26
      DotSDL/Audio/ChannelCount.cs
  4. 251
      DotSDL/Audio/FormatConverter.cs
  5. 26
      DotSDL/Audio/Playback.cs
  6. 12
      README.md
  7. 6
      Samples/Sample.Audio/Window.cs

39
DotSDL/Audio/AudioBuffer.cs

@ -2,10 +2,45 @@
/// <summary>
/// Represents an audio buffer.
/// </summary>
public struct AudioBuffer {
public class AudioBuffer {
private ChannelCount _channels = ChannelCount.Stereo;
private int _bufferLength = 0;
/// <summary>
/// The samples in the audio buffer.
/// </summary>
public double[] Samples;
public double[][] Samples;
/// <summary>
/// The number of audio channels that are contained in this buffer.
/// </summary>
public ChannelCount Channels {
get => _channels;
set {
_channels = value;
InitializeBuffer();
}
}
/// <summary>
/// Returns the number of samples represented by this <see cref="AudioBuffer"/>.
/// </summary>
public int Length {
get => _bufferLength;
set {
_bufferLength = value;
InitializeBuffer();
}
}
/// <summary>
/// Initializes the buffer. This should be called every time the buffer is resized
/// or the number of channels changes. This will clear the buffer.
/// </summary>
private void InitializeBuffer() {
Samples = new double[(int)_channels][];
for(var i = 0; i < (int)_channels; i++)
Samples[i] = new double[_bufferLength];
}
}
}

84
DotSDL/Audio/Channel.cs

@ -0,0 +1,84 @@
namespace DotSDL.Audio {
/// <summary>
/// Describes an individual audio channel. This is typically used to target
/// a specific speaker when populating an <see cref="AudioBuffer"/>.
/// </summary>
public static class Channel {
/********
* Mono *
********/
/// <summary>
/// Describes the single channel in a monoaural configuration.
/// </summary>
public static int Mono = 0;
/**********
* Stereo *
**********/
/// <summary>
/// Describes the left channel in a stereo configuration.
/// </summary>
public static int StereoLeft = 0;
/// <summary>
/// Describes the right channel in a stereo configuration.
/// </summary>
public static int StereoRight = 1;
/****************
* Quadraphonic *
****************/
/// <summary>
/// Describes the front-left channel in a quadraphonic configuration.
/// </summary>
public static int QuadFrontLeft = 0;
/// <summary>
/// Describes the front-right channel in a quadraphonic configuration.
/// </summary>
public static int QuadFrontRight = 1;
/// <summary>
/// Describes the rear-left channel in a quadraphonic configuration.
/// </summary>
public static int QuadRearLeft = 2;
/// <summary>
/// Describes the rear-right channel in a quadraphonic configuration.
/// </summary>
public static int QuadRearRight = 3;
/*******
* 5.1 *
*******/
/// <summary>
/// Describes the front-left channel in a 5.1 configuration.
/// </summary>
public static int FiveOneFrontLeft = 0;
/// <summary>
/// Describes the front-right channel in a 5.1 configuration.
/// </summary>
public static int FiveOneFrontRight = 1;
/// <summary>
/// Describes the center channel in a 5.1 configuration.
/// </summary>
public static int FiveOneCenter = 2;
/// <summary>
/// Describes the subwoofer channel in a 5.1 configuration.
/// </summary>
public static int FiveOneLfe = 3;
/// <summary>
/// Describes the rear-left channel in a 5.1 configuration.
/// </summary>
public static int FiveOneRearLeft = 4;
/// <summary>
/// Describes the rear-right channel in a 5.1 configuration.
/// </summary>
public static int FiveOneRearRight = 5;
}
}

26
DotSDL/Audio/ChannelCount.cs

@ -0,0 +1,26 @@
namespace DotSDL.Audio {
/// <summary>
/// Describes the number of channels that a sound source supports.
/// </summary>
public enum ChannelCount {
/// <summary>
/// Monoaural audio.
/// </summary>
Mono = 1,
/// <summary>
/// Stereo audio.
/// </summary>
Stereo = 2,
/// <summary>
/// Quadraphonic audio.
/// </summary>
Quadraphonic = 4,
/// <summary>
/// 5.1 audio.
/// </summary>
FiveOne = 6
}
}

251
DotSDL/Audio/FormatConverter.cs

@ -12,31 +12,216 @@ namespace DotSDL.Audio {
/// <param name="buffer">A DotSDL <see cref="AudioBuffer"/>.</param>
/// <param name="stream">The byte array to write the converted data to.</param>
/// <param name="format">The SDL <see cref="AudioFormat"/> to convert to.</param>
internal static void ConvertFormat(AudioBuffer buffer, ref byte[] stream, SdlAudio.AudioFormat format) {
/// <param name="channels">The number of channels that the final mix should be converted to.</param>
internal static void ConvertFormat(ref AudioBuffer buffer, ref byte[] stream, SdlAudio.AudioFormat format, ChannelCount channels) {
double[] samples;
switch(channels) {
case ChannelCount.Mono:
ToMono(ref buffer, out samples);
break;
case ChannelCount.Stereo:
ToStereo(ref buffer, out samples);
break;
case ChannelCount.Quadraphonic:
ToQuadraphonic(ref buffer, out samples);
break;
case ChannelCount.FiveOne:
ToFiveOne(ref buffer, out samples);
break;
default:
throw new NotImplementedException();
}
switch(format) {
case SdlAudio.AudioFormat.SignedByte:
case SdlAudio.AudioFormat.UnsignedByte:
ToInt8(buffer, ref stream);
ToInt8(ref samples, ref stream);
break;
case SdlAudio.AudioFormat.SignedShortLittleEndian:
case SdlAudio.AudioFormat.UnsignedShortLittleEndian:
ToInt16(buffer, ref stream, true);
ToInt16(ref samples, ref stream, true);
break;
case SdlAudio.AudioFormat.SignedShortBigEndian:
case SdlAudio.AudioFormat.UnsignedShortBigEndian:
ToInt16(buffer, ref stream, false);
ToInt16(ref samples, ref stream, false);
break;
case SdlAudio.AudioFormat.SignedIntLittleEndian:
ToInt32(buffer, ref stream, true);
ToInt32(ref samples, ref stream, true);
break;
case SdlAudio.AudioFormat.SignedIntBigEndian:
ToInt32(buffer, ref stream, false);
ToInt32(ref samples, ref stream, false);
break;
case SdlAudio.AudioFormat.FloatLittleEndian:
ToFloat32(buffer, ref stream, true);
ToFloat32(ref samples, ref stream, true);
break;
case SdlAudio.AudioFormat.FloatBigEndian:
ToFloat32(buffer, ref stream, false);
ToFloat32(ref samples, ref stream, false);
break;
default:
throw new NotImplementedException();
}
}
/// <summary>
/// Converts the contents of an <see cref="AudioBuffer"/> into a monophonic stream.
/// </summary>
/// <param name="buffer">An <see cref="AudioBuffer"/> to process.</param>
/// <param name="samples">The double array to write the converted data to.</param>
private static void ToMono(ref AudioBuffer buffer, out double[] samples) {
samples = new double[buffer.Length * (int)ChannelCount.Mono];
switch(buffer.Channels) {
case ChannelCount.Mono:
samples = buffer.Samples[Channel.Mono];
break;
case ChannelCount.Stereo:
for(var i = 0; i < buffer.Length; i++)
samples[i] = (buffer.Samples[Channel.StereoLeft][i]
+ buffer.Samples[Channel.StereoRight][i]) / 2;
break;
case ChannelCount.Quadraphonic:
for(var i = 0; i < buffer.Length; i++)
samples[i] = (buffer.Samples[Channel.QuadFrontLeft][i]
+ buffer.Samples[Channel.QuadFrontRight][i]
+ buffer.Samples[Channel.QuadRearLeft][i]
+ buffer.Samples[Channel.QuadRearRight][i]) / 4;
break;
case ChannelCount.FiveOne:
throw new NotImplementedException();
default:
throw new NotImplementedException();
}
}
/// <summary>
/// Converts the contents of an <see cref="AudioBuffer"/> into a stereo stream.
/// </summary>
/// <param name="buffer">An <see cref="AudioBuffer"/> to process.</param>
/// <param name="samples">The double array to write the converted data to</param>
private static void ToStereo(ref AudioBuffer buffer, out double[] samples) {
const int ch = (int)ChannelCount.Stereo;
samples = new double[buffer.Length * (int)ChannelCount.Stereo];
switch(buffer.Channels) {
case ChannelCount.Mono:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 1] = buffer.Samples[Channel.Mono][i];
}
break;
case ChannelCount.Stereo:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.StereoLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.StereoRight][i];
}
break;
case ChannelCount.Quadraphonic:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = (buffer.Samples[Channel.QuadFrontLeft][i]
+ buffer.Samples[Channel.QuadRearLeft][i]) / 2;
samples[i * ch + 1] = (buffer.Samples[Channel.QuadFrontRight][i]
+ buffer.Samples[Channel.QuadRearRight][i]) / 2;
}
break;
case ChannelCount.FiveOne:
throw new NotImplementedException();
default:
throw new NotImplementedException();
}
}
/// <summary>
/// Converts the contents of an <see cref="AudioBuffer"/> into a quadrophonic stream.
/// </summary>
/// <param name="buffer">An <see cref="AudioBuffer"/> to process.</param>
/// <param name="samples">The double array to write the converted data to.</param>
private static void ToQuadraphonic(ref AudioBuffer buffer, out double[] samples) {
const int ch = (int)ChannelCount.Quadraphonic;
samples = new double[buffer.Length * (int)ChannelCount.Quadraphonic];
switch(buffer.Channels) {
case ChannelCount.Mono:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 1] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 2] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 3] = buffer.Samples[Channel.Mono][i];
}
break;
case ChannelCount.Stereo:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.StereoLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.StereoRight][i];
samples[i * ch + 2] = buffer.Samples[Channel.StereoLeft][i];
samples[i * ch + 3] = buffer.Samples[Channel.StereoRight][i];
}
break;
case ChannelCount.Quadraphonic:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.QuadFrontLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.QuadFrontRight][i];
samples[i * ch + 2] = buffer.Samples[Channel.QuadRearLeft][i];
samples[i * ch + 3] = buffer.Samples[Channel.QuadRearRight][i];
}
break;
case ChannelCount.FiveOne:
throw new NotImplementedException();
default:
throw new NotImplementedException();
}
}
/// <summary>
/// Converts the contents of an <see cref="AudioBuffer"/> into a 5.1 stream.
/// </summary>
/// <param name="buffer">An <see cref="AudioBuffer"/> to process.</param>
/// <param name="samples">The double array to write the converted data to.</param>
private static void ToFiveOne(ref AudioBuffer buffer, out double[] samples) {
const int ch = (int)ChannelCount.FiveOne;
samples = new double[buffer.Length * (int)ChannelCount.FiveOne];
// TODO: Improve upmixing.
switch(buffer.Channels) {
case ChannelCount.Mono:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 1] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 2] = 0; // Center channel.
samples[i * ch + 3] = 0; // LFE.
samples[i * ch + 4] = buffer.Samples[Channel.Mono][i];
samples[i * ch + 5] = buffer.Samples[Channel.Mono][i];
}
break;
case ChannelCount.Stereo:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.StereoLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.StereoRight][i];
samples[i * ch + 2] = 0; // Center channel.
samples[i * ch + 3] = 0; // LFE.
samples[i * ch + 4] = buffer.Samples[Channel.StereoLeft][i];
samples[i * ch + 5] = buffer.Samples[Channel.StereoRight][i];
}
break;
case ChannelCount.Quadraphonic:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.QuadFrontLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.QuadFrontRight][i];
samples[i * ch + 2] = 0; // Center channel.
samples[i * ch + 3] = 0; // LFE.
samples[i * ch + 4] = buffer.Samples[Channel.QuadRearLeft][i];
samples[i * ch + 5] = buffer.Samples[Channel.QuadRearRight][i];
}
break;
case ChannelCount.FiveOne:
for(var i = 0; i < buffer.Length; i++) {
samples[i * ch] = buffer.Samples[Channel.FiveOneFrontLeft][i];
samples[i * ch + 1] = buffer.Samples[Channel.FiveOneFrontRight][i];
samples[i * ch + 2] = buffer.Samples[Channel.FiveOneCenter][i];
samples[i * ch + 3] = buffer.Samples[Channel.FiveOneLfe][i];
samples[i * ch + 4] = buffer.Samples[Channel.FiveOneRearLeft][i];
samples[i * ch + 5] = buffer.Samples[Channel.FiveOneRearRight][i];
}
break;
default:
throw new NotImplementedException();
@ -46,11 +231,11 @@ namespace DotSDL.Audio {
/// <summary>
/// Converts an audio buffer to a byte array with 8-bit samples.
/// </summary>
/// <param name="buffer">A DotSDL <see cref="AudioBuffer"/>.</param>
/// <param name="samples">An array of double-precision floating point samples.</param>
/// <param name="stream">The byte array to write the converted data to.</param>
private static void ToInt8(AudioBuffer buffer, ref byte[] stream) {
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = buffer.Samples[i];
private static void ToInt8(ref double[] samples, ref byte[] stream) {
for(var i = 0; i < samples.Length; i++) {
var sample = samples[i];
var newSample = (sbyte)(sample * sbyte.MaxValue);
stream[i] = (byte)newSample;
}
@ -59,22 +244,22 @@ namespace DotSDL.Audio {
/// <summary>
/// Converts an audio buffer to a byte array with 16-bit samples.
/// </summary>
/// <param name="buffer">A DotSDL <see cref="AudioBuffer"/>.</param>
/// <param name="samples">An array of double-precision floating point samples.</param>
/// <param name="stream">The byte array to write the converted data to.</param>
/// <param name="littleEndian">Indicates whether the target byte stream should be little endian.</param>
private static void ToInt16(AudioBuffer buffer, ref byte[] stream, bool littleEndian) {
private static void ToInt16(ref double[] samples, ref byte[] stream, bool littleEndian) {
if((BitConverter.IsLittleEndian && littleEndian) || (!BitConverter.IsLittleEndian && !littleEndian)) {
// Sample endian == system endian.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = samples[i];
var newSample = (short)(sample * short.MaxValue);
stream[i * 2] = (byte)newSample;
stream[i * 2 + 1] = (byte)(newSample >> 8);
}
} else {
// Sample endian != system endian. Flip bytes.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = samples[i];
var newSample = (short)(sample * short.MaxValue);
stream[i * 2] = (byte)(newSample >> 8);
stream[i * 2 + 1] = (byte)newSample;
@ -83,16 +268,16 @@ namespace DotSDL.Audio {
}
/// <summary>
/// Converts an audio buffer to a byte array with little-endian 32-bit samples.
/// Converts an audio buffer to a byte array with 32-bit integer samples.
/// </summary>
/// <param name="buffer">A DotSDL <see cref="AudioBuffer"/>.</param>
/// <param name="samples">An array of double-precision floating point samples.</param>
/// <param name="stream">The byte array to write the converted data to.</param>
/// <param name="littleEndian">Indicates whether the target byte stream should be little endian.</param>
private static void ToInt32(AudioBuffer buffer, ref byte[] stream, bool littleEndian) {
private static void ToInt32(ref double[] samples, ref byte[] stream, bool littleEndian) {
if((BitConverter.IsLittleEndian && littleEndian) || (!BitConverter.IsLittleEndian && !littleEndian)) {
// Sample endian == system endian.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = samples[i];
var newSample = (int)(sample * int.MaxValue);
stream[i * 4] = (byte)newSample;
stream[i * 4 + 1] = (byte)(newSample >> 8);
@ -101,8 +286,8 @@ namespace DotSDL.Audio {
}
} else {
// Sample endian != system endian. Flip bytes.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = samples[i];
var newSample = (int)(sample * int.MaxValue);
stream[i * 4] = (byte)(newSample >> 24);
stream[i * 4 + 1] = (byte)(newSample >> 16);
@ -112,11 +297,17 @@ namespace DotSDL.Audio {
}
}
private static void ToFloat32(AudioBuffer buffer, ref byte[] stream, bool littleEndian) {
/// <summary>
/// Converts an audio buffer to a byte array with little-endian floating point samples.
/// </summary>
/// <param name="samples">An array of double-precision floating point samples.</param>
/// <param name="stream">The byte array to write the converted data to.</param>
/// <param name="littleEndian">Indicates whether the target byte stream should be little endian.</param>
private static void ToFloat32(ref double[] samples, ref byte[] stream, bool littleEndian) {
if((BitConverter.IsLittleEndian && littleEndian) || (!BitConverter.IsLittleEndian && !littleEndian)) {
// Sample endian == system endian.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = (float)buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = (float)samples[i];
var newSample = BitConverter.GetBytes(sample);
stream[i * 4] = newSample[0];
stream[i * 4 + 1] = newSample[1];
@ -125,8 +316,8 @@ namespace DotSDL.Audio {
}
} else {
// Sample endian != system endian. Flip bytes.
for(var i = 0; i < buffer.Samples.Length; i++) {
var sample = (float)buffer.Samples[i];
for(var i = 0; i < samples.Length; i++) {
var sample = (float)samples[i];
var newSample = BitConverter.GetBytes(sample);
stream[i * 4] = newSample[3];
stream[i * 4 + 1] = newSample[2];

26
DotSDL/Audio/Playback.cs

@ -8,7 +8,7 @@ namespace DotSDL.Audio {
/// Represents a streaming audio playback engine.
/// </summary>
public class Playback {
private const ushort DefaultBufferSize = 4096;
private const ushort DefaultBufferSize = 1024;
private readonly SdlInit _sdlInit = SdlInit.Instance;
private readonly uint _deviceId;
@ -31,7 +31,7 @@ namespace DotSDL.Audio {
/// <summary>
/// The number of sound channels for the open audio device.
/// </summary>
public byte Channels { get; }
public ChannelCount Channels { get; }
/// <summary>
/// The buffer size for the open audio device, in bytes.
@ -58,13 +58,18 @@ namespace DotSDL.Audio {
/// </summary>
public event EventHandler<AudioBuffer> BufferEmpty;
/// <summary>
/// The number of channels that the application is using.
/// </summary>
private ChannelCount RequestedChannels { get; set; }
/// <summary>
/// Initializes a new instance of the audio engine.
/// </summary>
/// <param name="freqency">The desired frequency, in hertz.</param>
/// <param name="format">The desired audio format.</param>
/// <param name="channels">The desired number of channels.</param>
public Playback(int freqency, AudioFormat format, byte channels)
public Playback(int freqency, AudioFormat format, ChannelCount channels)
: this(freqency, format, channels, DefaultBufferSize) { }
/// <summary>
@ -74,14 +79,15 @@ namespace DotSDL.Audio {
/// <param name="format">The desired audio format.</param>
/// <param name="channels">The desired number of channels.</param>
/// <param name="buffer">The desired buffer size, in samples.</param>
public Playback(int freqency, AudioFormat format, byte channels, ushort buffer) {
public Playback(int freqency, AudioFormat format, ChannelCount channels, ushort buffer) {
_sdlInit.InitSubsystem(Init.SubsystemFlags.Audio);
SdlAudio.AudioSpec actual;
RequestedChannels = channels;
var desired = new SdlAudio.AudioSpec {
Freq = freqency,
Format = GetBestAudioFormat(format),
Channels = channels,
Channels = (byte)channels,
Silence = 0,
Samples = buffer,
Padding = 0,
@ -90,11 +96,11 @@ namespace DotSDL.Audio {
Userdata = IntPtr.Zero
};
_deviceId = SdlAudio.OpenAudioDevice(IntPtr.Zero, 0, ref desired, out actual, SdlAudio.AllowedChanges.AllowAnyChange);
_deviceId = SdlAudio.OpenAudioDevice(IntPtr.Zero, 0, ref desired, out SdlAudio.AudioSpec actual, SdlAudio.AllowedChanges.AllowAnyChange);
// Populate the obtained values in the object properties.
Frequency = actual.Freq;
Channels = actual.Channels;
Channels = (ChannelCount)actual.Channels;
BufferSizeSamples = actual.Samples;
BufferSizeBytes = actual.Size;
BitSize = SdlAudio.BitSize((ushort)actual.Format);
@ -120,11 +126,11 @@ namespace DotSDL.Audio {
private void Callback(IntPtr userdata, IntPtr stream, int len) {
if(BufferEmpty == null) return;
var buffer = new AudioBuffer { Samples = new double[BufferSizeSamples] };
var buffer = new AudioBuffer { Channels = RequestedChannels, Length = BufferSizeSamples };
BufferEmpty(this, buffer);
var newStream = new byte[len];
FormatConverter.ConvertFormat(buffer, ref newStream, _sdlAudioSpec.Format);
FormatConverter.ConvertFormat(ref buffer, ref newStream, _sdlAudioSpec.Format, Channels);
Marshal.Copy(newStream, 0, stream, len);
}

12
README.md

@ -12,12 +12,14 @@ At this time, DotSDL supports the following features:
* Audio
* Support for all audio formats supported by SDL.
* Mono output.
* Full upmixing and downmixing for mono, stereo, and quadraphonic audio.
* 5.1 audio is supported, but upmixing and downmixing support for it is
currently limited.
* Input
* Keyboard input.
* Window events.
* Graphics
* A single 32-bit ARGB canvas (useful for simple pixel plotting).
* A single 32-bit ARGB canvas (useful for pixel plotting).
* Power
* Battery state.
@ -29,5 +31,7 @@ the sample/test projects and to read over the XMLDocs on the classes and
methods.
If you would still like to play around with DotSDL, the project can be built
using Microsoft Visual Studio 2017. You will also need a SDL2.dll binary for
each architecture that you plan to build your project against.
using the .NET Core SDK or any IDE that support .NET Standard projects. You
will also need a native SDL2 library for each architecture that you plan to
build your project against.

6
Samples/Sample.Audio/Window.cs

@ -32,7 +32,7 @@ namespace Sample.Audio {
KeyPressed += Window_KeyPressed;
KeyReleased += Window_KeyReleased;
_audio = new Playback(44100, AudioFormat.Integer16, 1);
_audio = new Playback(44100, AudioFormat.Integer16, ChannelCount.Mono);
_audioFreq = _audio.Frequency;
var floatingPointText = _audio.FloatingPoint ? "floating-point, " : "";
@ -50,8 +50,8 @@ namespace Sample.Audio {
private void Audio_BufferEmpty(object sender, AudioBuffer e) {
var t = (Math.PI * 2.0 * _freq) / _audioFreq;
for(var i = 0; i < e.Samples.Length; i++)
e.Samples[i] = Math.Sin(_time++ * t);
for(var i = 0; i < e.Length; i++)
e.Samples[Channel.Mono][i] = Math.Sin(_time++ * t);
}
private void DrawGlyph(ref Canvas canvas, char ch, int xPos, Color c) {

Loading…
Cancel
Save