本文整理汇总了C#中SharpDX.XAudio2.XAudio2类的典型用法代码示例。如果您正苦于以下问题:C# XAudio2类的具体用法?C# XAudio2怎么用?C# XAudio2使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
XAudio2类属于SharpDX.XAudio2命名空间,在下文中一共展示了XAudio2类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: SubmixVoice
/// <summary>
/// Creates and configures a submix voice with an effect chain.
/// </summary>
/// <param name="device">an instance of <see cref = "SharpDX.XAudio2.XAudio2" /></param>
/// <param name="inputChannels">[in] Number of channels in the input audio data of the submix voice. InputChannels must be less than or equal to XAUDIO2_MAX_AUDIO_CHANNELS. </param>
/// <param name="inputSampleRate">[in] Sample rate of the input audio data of submix voice. This rate must be a multiple of XAUDIO2_QUANTUM_DENOMINATOR. InputSampleRate must be between XAUDIO2_MIN_SAMPLE_RATE and XAUDIO2_MAX_SAMPLE_RATE. </param>
/// <param name="flags">[in] Flags that specify the behavior of the submix voice. Can be 0 or the following: ValueDescriptionXAUDIO2_VOICE_USEFILTERThe filter effect should be available on this voice.? </param>
/// <param name="processingStage">[in] An arbitrary number that specifies when this voice is processed with respect to other submix voices, if the XAudio2 engine is running other submix voices. The voice is processed after all other voices that include a smaller ProcessingStage value, and before all other voices that include a larger ProcessingStage value. Voices that include the same ProcessingStage value are processed in any order. A submix voice cannot send to another submix voice with a lower or equal ProcessingStage value; this prevents audio being lost due to a submix cycle. </param>
/// <param name="effectDescriptors">[in, optional] Pointer to a list of XAUDIO2_EFFECT_CHAIN structures that describe an effect chain to use in the submix voice.</param>
/// <returns>No documentation.</returns>
/// <unmanaged>HRESULT IXAudio2::CreateSubmixVoice([Out] IXAudio2SubmixVoice** ppSubmixVoice,[None] UINT32 InputChannels,[None] UINT32 InputSampleRate,[None] UINT32 Flags,[None] UINT32 ProcessingStage,[In, Optional] const XAUDIO2_VOICE_SENDS* pSendList,[In, Optional] const XAUDIO2_EFFECT_CHAIN* pEffectChain)</unmanaged>
public SubmixVoice(XAudio2 device, int inputChannels, int inputSampleRate, SubmixVoiceFlags flags, int processingStage, EffectDescriptor[] effectDescriptors)
: base(IntPtr.Zero)
{
if (effectDescriptors != null)
{
unsafe
{
var tempSendDescriptor = new EffectChain();
var effectDescriptorNatives = new EffectDescriptor.__Native[effectDescriptors.Length];
for (int i = 0; i < effectDescriptorNatives.Length; i++)
effectDescriptors[i].__MarshalTo(ref effectDescriptorNatives[i]);
tempSendDescriptor.EffectCount = effectDescriptorNatives.Length;
fixed (void* pEffectDescriptors = &effectDescriptorNatives[0])
{
tempSendDescriptor.EffectDescriptorPointer = (IntPtr)pEffectDescriptors;
device.CreateSubmixVoice(this, inputChannels, inputSampleRate, unchecked((int)flags), processingStage, null, tempSendDescriptor);
}
}
}
else
{
device.CreateSubmixVoice(this, inputChannels, inputSampleRate, unchecked((int)flags), processingStage, null, null);
}
}
开发者ID:QuantumDeveloper,项目名称:SharpDX,代码行数:36,代码来源:SubmixVoice.cs
示例2: XAudioDevice
public XAudioDevice()
{
if (StackTraceExtensions.StartedFromNUnitConsoleButNotFromNCrunch)
return;
XAudio = new XAudio2();
MasteringVoice = new MasteringVoice(XAudio);
}
开发者ID:BEEden,项目名称:DeltaEngine.SharpDX,代码行数:7,代码来源:XAudioDevice.cs
示例3: InitializeMediaPlayer
/// <summary>
/// Initialize the media element for playback
/// </summary>
/// <param name="streamConfig">Object containing stream configuration details</param>
void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
{
this._streamSource = streamSource;
_videoMss = new MediaStreamSource(new VideoStreamDescriptor(VideoEncodingProperties.CreateH264()));
_videoMss.BufferTime = TimeSpan.Zero;
_videoMss.CanSeek = false;
_videoMss.Duration = TimeSpan.Zero;
_videoMss.SampleRequested += _videoMss_SampleRequested;
XAudio2 xaudio = new XAudio2();
MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
WaveFormat format = new WaveFormat(48000, 16, 2);
// Set for low latency playback
StreamDisplay.RealTimePlayback = true;
// Render on the full window to avoid extra compositing
StreamDisplay.IsFullWindow = true;
// Disable built-in transport controls
StreamDisplay.AreTransportControlsEnabled = false;
// Start playing right away
StreamDisplay.AutoPlay = true;
StreamDisplay.SetMediaStreamSource(_videoMss);
AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
}
开发者ID:ramonvc,项目名称:moonlight-windows,代码行数:34,代码来源:MediaPlayer.cs
示例4: AudioPlayer
/// <summary>
/// Initializes a new instance of the <see cref="AudioPlayer" /> class.
/// </summary>
/// <param name="xaudio2">The xaudio2 engine.</param>
/// <param name="audioStream">The input audio stream.</param>
public AudioPlayer(XAudio2 xaudio2, Stream audioStream)
{
this.xaudio2 = xaudio2;
audioDecoder = new AudioDecoder(audioStream);
//sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat);
sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat,0,1.0f);
localVolume = 1.0f;
sourceVoice.BufferEnd += sourceVoice_BufferEnd;
sourceVoice.Start();
bufferEndEvent = new AutoResetEvent(false);
playEvent = new ManualResetEvent(false);
waitForPlayToOutput = new ManualResetEvent(false);
clock = new Stopwatch();
// Pre-allocate buffers
audioBuffersRing = new AudioBuffer[3];
memBuffers = new DataPointer[audioBuffersRing.Length];
for (int i = 0; i < audioBuffersRing.Length; i++)
{
audioBuffersRing[i] = new AudioBuffer();
memBuffers[i].Size = 32 * 1024; // default size 32Kb
memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size);
}
// Initialize to stopped
State = AudioPlayerState.Stopped;
// Starts the playing thread
playingTask = Task.Factory.StartNew(PlayAsync, TaskCreationOptions.LongRunning);
}
开发者ID:vulcanlee,项目名称:Windows8Lab,代码行数:38,代码来源:AudioPlayer.cs
示例5: WaveManager
public WaveManager()
{
xAudio = new XAudio2();
var mastering = new MasteringVoice(xAudio);
mastering.SetVolume(1, 0);
xAudio.StartEngine();
}
开发者ID:epolekoff,项目名称:SynesthesiaChaos,代码行数:7,代码来源:WaveManager.cs
示例6: InitializeMediaPlayer
/// <summary>
/// Initialize the media element for playback
/// </summary>
/// <param name="streamConfig">Object containing stream configuration details</param>
void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
{
this._streamSource = streamSource;
// This code is based upon the MS FFmpegInterop project on GitHub
VideoEncodingProperties videoProps = VideoEncodingProperties.CreateH264();
videoProps.ProfileId = H264ProfileIds.High;
videoProps.Width = (uint)streamConfig.GetWidth();
videoProps.Height = (uint)streamConfig.GetHeight();
videoProps.Bitrate = (uint)streamConfig.GetBitrate();
_videoMss = new MediaStreamSource(new VideoStreamDescriptor(videoProps));
_videoMss.BufferTime = TimeSpan.Zero;
_videoMss.CanSeek = false;
_videoMss.Duration = TimeSpan.Zero;
_videoMss.SampleRequested += _videoMss_SampleRequested;
XAudio2 xaudio = new XAudio2();
MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
WaveFormat format = new WaveFormat(48000, 16, 2);
// Set for low latency playback
StreamDisplay.RealTimePlayback = true;
// Render on the full window to avoid extra compositing
StreamDisplay.IsFullWindow = true;
// Disable built-in transport controls
StreamDisplay.AreTransportControlsEnabled = false;
StreamDisplay.SetMediaStreamSource(_videoMss);
AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
}
开发者ID:cmathser,项目名称:moonlight-windows,代码行数:37,代码来源:MediaPlayer.cs
示例7: EffectManager
public EffectManager(XAudio2 xaudio2, int maxInstances, string soundPath)
{
this.xaudio2 = xaudio2;
this.maxInstances = maxInstances;
this.soundPath = soundPath;
this.instances = new Dictionary<WaveFormat, List<SourceVoice>>();
}
开发者ID:HakanL,项目名称:animatroller,代码行数:8,代码来源:EffectManager.cs
示例8: MySourceVoice
public MySourceVoice(XAudio2 device, WaveFormat sourceFormat)
{
m_voice = new SourceVoice(device, sourceFormat, true);
m_voice.BufferEnd += OnStopPlaying;
m_valid = true;
Flush();
}
开发者ID:fluxit,项目名称:SpaceEngineers,代码行数:8,代码来源:MySourceVoice.cs
示例9: HasDeviceChanged
public static unsafe bool HasDeviceChanged(XAudio2 engine, string displayName)
{
const int GetDeviceDetailsMethodOffset = 4;
XAUDIO2_DEVICE_DETAILS details;
var result = (Result)NativeCall.Function<int, IntPtr, int, IntPtr>(new NativeFunction(engine.NativePointer, GetDeviceDetailsMethodOffset), engine.NativePointer, 0, new IntPtr(&details));
result.CheckError();
return !displayName.Equals(details.DisplayName, 256);
}
开发者ID:ChristianHeinz71,项目名称:SpaceEngineers,代码行数:9,代码来源:MyAudio_Native.cs
示例10: MySourceVoice
public MySourceVoice(XAudio2 device, WaveFormat sourceFormat)
{
m_voice = new SourceVoice(device, sourceFormat, true);
m_voice.BufferEnd += OnStopPlayingBuffered;
m_valid = true;
m_dataStreams = new Queue<DataStream>();
Flush();
}
开发者ID:2asoft,项目名称:SpaceEngineers,代码行数:9,代码来源:MySourceVoice.cs
示例11: SoundManager
public SoundManager(int sounds)
{
_audio = new XAudio2();
_masteringVoice = new MasteringVoice(_audio);
_masteringVoice.SetVolume(0.5f);
_soundStreams = new SoundStream[sounds];
_audioBuffers = new AudioBuffer[sounds];
_sourceVoices = new SourceVoice[sounds];
}
开发者ID:quibsorg,项目名称:CsGoAimbot,代码行数:9,代码来源:SoundManager.cs
示例12: Audio
public Audio(String fileName)
{
device = new XAudio2();
masteringVoice = new MasteringVoice(device);
stream = new SoundStream(File.OpenRead("Content/"+fileName));
buffer = new AudioBuffer {Stream = stream.ToDataStream(),
AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream};
stream.Close();
}
开发者ID:Jojendersie,项目名称:Voxelseeds,代码行数:9,代码来源:Audio.cs
示例13: SoundManager
public SoundManager(int cntVoices)
{
audio = new XAudio2();
masteringVoice = new MasteringVoice(audio);
masteringVoice.SetVolume(0.5f);
voices = new SourceVoice[cntVoices];
buffers = new AudioBuffer[cntVoices];
streams = new SoundStream[cntVoices];
}
开发者ID:nkarpey,项目名称:Zat-s-External-CSGO-Multihack,代码行数:9,代码来源:SoundManager.cs
示例14: MySourceVoicePool
public MySourceVoicePool(XAudio2 audioEngine, WaveFormat waveformat, MyCueBank owner)
{
m_audioEngine = audioEngine;
m_waveFormat = waveformat;
m_owner = owner;
m_availableVoices = new MyConcurrentQueue<MySourceVoice>(32);
m_fadingOutVoices = new List<MySourceVoice>();
m_currentCount = 0;
}
开发者ID:fluxit,项目名称:SpaceEngineers,代码行数:9,代码来源:MySourceVoicePool.cs
示例15: InitializeSoundEffect
internal static void InitializeSoundEffect()
{
try
{
if (Device == null)
{
#if !WINRT && DEBUG
try
{
//Fails if the XAudio2 SDK is not installed
Device = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.DefaultProcessor);
Device.StartEngine();
}
catch
#endif
{
Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor);
Device.StartEngine();
}
}
// Just use the default device.
#if WINRT
string deviceId = null;
#else
const int deviceId = 0;
#endif
if (MasterVoice == null)
{
// Let windows autodetect number of channels and sample rate.
MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId);
MasterVoice.SetVolume(_masterVolume, 0);
}
// The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout.
#if WINRT
Speakers = (Speakers)MasterVoice.ChannelMask;
#else
var deviceDetails = Device.GetDeviceDetails(deviceId);
Speakers = deviceDetails.OutputFormat.ChannelMask;
#endif
}
catch
{
// Release the device and null it as
// we have no audio support.
if (Device != null)
{
Device.Dispose();
Device = null;
}
MasterVoice = null;
}
}
开发者ID:Breadmouth,项目名称:Gravitas,代码行数:56,代码来源:SoundEffect.XAudio.cs
示例16: X3DAudioEngine
public X3DAudioEngine()
{
_xaudio2 = new XAudio2();
_masteringVoice = new MasteringVoice(_xaudio2);
_deviceFormat = _xaudio2.GetDeviceDetails(0).OutputFormat;
_x3dAudio = new X3DAudio(_deviceFormat.ChannelMask);
Position = new Vector3D(0, 0, 0);
Rotation = System.Windows.Media.Media3D.Quaternion.Identity;
}
开发者ID:gongfuPanada,项目名称:VrPlayer,代码行数:11,代码来源:X3DAudioEngine.cs
示例17: SetAudioEngine
public void SetAudioEngine(XAudio2 audioEngine)
{
if (m_audioEngine != audioEngine)
{
m_audioEngine = audioEngine;
m_playingVoices.Clear();
m_availableVoices.Clear();
m_fadingOutVoices.Clear();
m_maxCount = 0;
}
}
开发者ID:ChristianHeinz71,项目名称:SpaceEngineers,代码行数:11,代码来源:MySourceVoicePool.cs
示例18: TrackPlayer
public TrackPlayer(XAudio2 xaudio2, string[] filenames)
{
if (filenames.Length == 0)
throw new ArgumentException("No filenames specified");
this.xaudio2 = xaudio2;
this.filenames = filenames;
this.rnd = new Random();
this.players = new List<AudioPlayer>();
}
开发者ID:HakanL,项目名称:animatroller,代码行数:11,代码来源:TrackPlayer.cs
示例19: SetAudioEngine
public void SetAudioEngine(XAudio2 audioEngine)
{
if (m_audioEngine != audioEngine)
{
if (OnAudioEngineChanged != null) OnAudioEngineChanged();
m_audioEngine = audioEngine;
m_availableVoices.Clear();
m_fadingOutVoices.Clear();
m_currentCount = 0;
}
}
开发者ID:fluxit,项目名称:SpaceEngineers,代码行数:11,代码来源:MySourceVoicePool.cs
示例20: XAudioSound
public XAudioSound(string filename, XAudioDevice device)
: base(filename, device)
{
xAudio = device.XAudio2;
using (var stream = LoadStream("Content/" + filename + ".wav"))
{
format = stream.Format;
length = CalculateLengthInSeconds(format, (int)stream.Length);
buffer = CreateAudioBuffer(stream.ToDataStream());
decodedInfo = stream.DecodedPacketsInfo;
}
}
开发者ID:hillwhite,项目名称:DeltaEngine,代码行数:12,代码来源:XAudioSound.cs
注:本文中的SharpDX.XAudio2.XAudio2类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论