本文整理汇总了C#中System.Windows.Media.MediaStreamDescription类的典型用法代码示例。如果您正苦于以下问题:C# MediaStreamDescription类的具体用法?C# MediaStreamDescription怎么用?C# MediaStreamDescription使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MediaStreamDescription类属于System.Windows.Media命名空间,在下文中一共展示了MediaStreamDescription类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: OpenMediaAsync
protected override void OpenMediaAsync ()
{
Dictionary<MediaSourceAttributesKeys, string> media_attributes = new Dictionary<MediaSourceAttributesKeys, string> ();
List<MediaStreamDescription> media_streams = new List<MediaStreamDescription> ();
Dictionary<MediaStreamAttributeKeys, string> stream_attributes = new Dictionary<MediaStreamAttributeKeys,string> ();
MediaStreamDescription media_stream = new MediaStreamDescription (MediaStreamType.Audio, stream_attributes);
long duration = 60 * 10000;
WaveFormatEx wave = new WaveFormatEx ();
Mp3Frame frame = Mp3Frame.Read (stream);
wave.FormatTag = 85;
wave.AvgBytesPerSec = (uint) frame.Bitrate / 8;
wave.BitsPerSample = 0;
wave.BlockAlign = 1;
wave.Channels = (ushort) frame.Channels;
wave.SamplesPerSec = (ushort) frame.SampleRate;
wave.Size = 12;
media_attributes.Add (MediaSourceAttributesKeys.CanSeek, "0");
media_attributes.Add (MediaSourceAttributesKeys.Duration, duration.ToString ());
stream_attributes [MediaStreamAttributeKeys.CodecPrivateData] = wave.Encoded;
media_streams.Add (media_stream);
try {
this.frame = frame;
this.description = media_stream;
ReportOpenMediaCompleted (media_attributes, media_streams);
opened = DateTime.Now;
} catch (Exception ex) {
Console.WriteLine (ex);
}
}
开发者ID:dfr0,项目名称:moon,代码行数:33,代码来源:Mp3Demuxer.cs
示例2: OpenMediaAsync
protected override void OpenMediaAsync()
{
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
byte[] videoData = new byte[this.videoStream.Length];
if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
{
throw new IOException("Could not read in the VideoStream");
}
dec = new org.diracvideo.Jirac.Decoder();
dec.Push(videoData, 0, videoData.Length);
dec.Decode();
mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dec.format.width.ToString ();
mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dec.format.height.ToString ();
this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(streamDescription);
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString ();
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
}
开发者ID:mono,项目名称:mooncodecs,代码行数:29,代码来源:DiracStreamSource.cs
示例3: OpenMediaAsync
protected override void OpenMediaAsync()
{
_frameTime = (int)TimeSpan.FromSeconds((double)1 / 30).Ticks;
// Init
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
// Stream Description
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
streamAttributes[MediaStreamAttributeKeys.Height] = format.PixelHeight.ToString();
streamAttributes[MediaStreamAttributeKeys.Width] = format.PixelWidth.ToString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
_videoDescription = msd;
availableStreams.Add(_videoDescription);
// a zero timespan is an infinite video
sourceAttributes[MediaSourceAttributesKeys.Duration] =
TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
开发者ID:kindohm,项目名称:silverlight-app-does-what,代码行数:27,代码来源:VideoDecoder.cs
示例4: OpenMediaAsync
protected override void OpenMediaAsync()
{
try
{
this.wavParser = new WavParser(this.stream);
this.wavParser.ParseWaveHeader();
this.wavParser.WaveFormatEx.ValidateWaveFormat();
this.startPosition = this.currentPosition = this.wavParser.DataPosition;
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.wavParser.WaveFormatEx.ToHexString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
this.audioDesc = msd;
availableStreams.Add(this.audioDesc);
sourceAttributes[MediaSourceAttributesKeys.Duration] = this.wavParser.Duration.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
catch(Exception){}
}
开发者ID:Povt81Barnaul,项目名称:SLAVINYAK,代码行数:27,代码来源:WaveMediaStreamSource.cs
示例5: H264Parser
/// <summary>
/// Default constructor
/// </summary>
/// <param name="msd"></param>
public H264Parser(SampleBuffer outputBuffer, IContainerMetadata metadata, HLSStream hlsStream)
: base(outputBuffer, hlsStream)
{
string[] resolution = null;
string s;
if (metadata.Attributes != null &&
metadata.Attributes.TryGetValue(HLSPlaylistMetaKeys.Resolution, out s))
{
string[] components = s.Split(new char[] { 'x' });
if (components != null && components.Length == 2)
resolution = components;
}
if (resolution == null)
{
HLSTrace.WriteLine("Missing 'Resolution' tag in HLS MetaKeys, defaulting to the maximum supported resolution of 1280x720.");
resolution = new string[] { "1280", "720" };
}
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
streamAttributes[MediaStreamAttributeKeys.Width] = resolution[0];
streamAttributes[MediaStreamAttributeKeys.Height] = resolution[1];
Description = new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
}
开发者ID:randyhhh,项目名称:HLSSample_dev-CL376551,代码行数:30,代码来源:H264Parser.cs
示例6: MediaStreamSample
public MediaStreamSample (MediaStreamDescription mediaStreamDescription, Stream stream, long offset, long count, long timestamp, long duration, IDictionary<MediaSampleAttributeKeys, string> attributes)
{
this.media_stream_description = mediaStreamDescription;
this.stream = stream;
this.offset = offset;
this.count = count;
this.timestamp = timestamp;
this.attributes = attributes;
this.duration = duration;
}
开发者ID:dfr0,项目名称:moon,代码行数:10,代码来源:MediaStreamSample.cs
示例7: OpenMediaAsync
protected override void OpenMediaAsync()
{
int channels = this.Asap.GetInfo().GetChannels();
int blockSize = channels * BitsPerSample >> 3;
string waveFormatHex = string.Format("0100{0:X2}00{1:X8}{2:X8}{3:X2}00{4:X2}000000",
channels, SwapBytes(ASAP.SampleRate), SwapBytes(ASAP.SampleRate * blockSize), blockSize, BitsPerSample);
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormatHex;
this.MediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "True";
sourceAttributes[MediaSourceAttributesKeys.Duration] = (this.Duration < 0 ? 0 : this.Duration * 10000).ToString();
ReportOpenMediaCompleted(sourceAttributes, new MediaStreamDescription[1] { this.MediaStreamDescription });
}
开发者ID:hudokkow,项目名称:audiodecoder.asap,代码行数:16,代码来源:SilverASAP.cs
示例8: OpenMediaAsync
protected override void OpenMediaAsync()
{
var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
var mediaStreamDescriptions = new List<MediaStreamDescription>();
var wfx = new MediaParsers.WaveFormatExtensible () {
FormatTag = 1, // PCM
Channels = parameters.Channels,
SamplesPerSec = parameters.SamplesPerSecond,
AverageBytesPerSecond = parameters.SamplesPerSecond * 2 * 2,
BlockAlign = 0,
BitsPerSample = parameters.BitsPerSample,
Size = 0 };
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = wfx.ToHexString();
this.media_desc = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
mediaStreamDescriptions.Add(this.media_desc);
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.track_duration.Ticks.ToString (CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString ();
}
开发者ID:atsushieno,项目名称:synthesis,代码行数:23,代码来源:AudioQueueMediaStreamSource.cs
示例9: OpenMediaAsync
protected override void OpenMediaAsync()
{
startPosition = currentPosition = 0;
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
string format = "";
format += ToLittleEndianString(string.Format("{0:X4}", 1)); //PCM
format += ToLittleEndianString(string.Format("{0:X4}", Constants.ChannelCount));
format += ToLittleEndianString(string.Format("{0:X8}", Constants.SampleRate));
format += ToLittleEndianString(string.Format("{0:X8}", byteRate));
format += ToLittleEndianString(string.Format("{0:X4}", blockAlign));
format += ToLittleEndianString(string.Format("{0:X4}", Constants.BitsPerSample));
format += ToLittleEndianString(string.Format("{0:X4}", 0));
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = format;
mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
availableStreams.Add(mediaStreamDescription);
sourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
开发者ID:cmcginn,项目名称:MHFinance,代码行数:24,代码来源:StereoPcmStreamSource.cs
示例10: OpenMediaAsync
protected override void OpenMediaAsync()
{
// Initialize data structures to pass to the Media pipeline via the MediaStreamSource
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
// Pull in the entire Audio stream.
byte[] videoData = new byte[this.videoStream.Length];
if (videoData.Length != this.videoStream.Read(videoData, 0, videoData.Length))
{
throw new IOException("Could not read in the VideoStream");
}
//TODO parse until first frame
//todo find what is the offset of first frame and put it in push len param
org.diracvideo.Jirac.Decoder dec = new org.diracvideo.Jirac.Decoder();
dec.Push(videoData, 0, videoData.Length);
dec.Decode();
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = dec.format.ToString();
this.streamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(streamDescription);
// Setting a 0 duration to avoid the math to calcualte the Mp3 file length in minutes and seconds.
// This was done just to simplify this initial version of the code for other people reading it.
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromMinutes(5).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
// Report that the DiracMediaStreamSource has finished initializing its internal state and can now
// pass in Dirac Samples.
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
//this.currentFrameStartPosition = result;
//this.currentFrameSize = mpegLayer3Frame.FrameSize;
}
开发者ID:mono,项目名称:csdirac,代码行数:36,代码来源:DiracStreamSource.cs
示例11: OpenMediaAsync
/// <summary>
/// Open the media.
/// Create the structures.
/// </summary>
protected override void OpenMediaAsync() {
header = WaveFormatExtensible.ReadHeader(stream);
header.ValidateWaveFormat();
sampleSize = (long)header.Channels * header.BitsPerSample / 8 * numSamples;
startPosition = currentPosition = stream.Position;
pcmDataLen = stream.Length - startPosition;
duration = header.AudioDurationFromDataLen(pcmDataLen);
// Init
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
// Stream Description
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = header.ToHexString();
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
this.audioDesc = msd;
availableStreams.Add(this.audioDesc);
sourceAttributes[MediaSourceAttributesKeys.Duration] = duration.ToString();
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
开发者ID:PavelPZ,项目名称:REW,代码行数:28,代码来源:WaveMediaStreamSource2.cs
示例12: OpenMediaAsync
/// <summary>
/// Initialises the data structures to pass data to the media pipeline
/// via the MediaStreamSource.
/// </summary>
protected override void OpenMediaAsync()
{
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes =
new Dictionary<MediaSourceAttributesKeys, string>();
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes =
new Dictionary<MediaStreamAttributeKeys, string>();
List<MediaStreamDescription> mediaStreamDescriptions =
new List<MediaStreamDescription>();
CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;
mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
mediaStreamAttributes[MediaStreamAttributeKeys.Width] = dataSource.FrameWidth.ToString();
mediaStreamAttributes[MediaStreamAttributeKeys.Height] = dataSource.FrameHeight.ToString();
videoStreamDescription =
new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(videoStreamDescription);
// A zero timespan is an infinite video
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();
frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks;
// Report that we finished initializing its internal state and can now
// pass in frame samples.
ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
DispatcherTimer fpsTimer = new DispatcherTimer();
fpsTimer.Interval = TimeSpan.FromSeconds(1);
fpsTimer.Tick += Fps_Tick;
fpsTimer.Start();
}
开发者ID:ghstshdw,项目名称:PaperPhoto,代码行数:40,代码来源:CameraStreamSource.cs
示例13: ReadPastId3v2TagsCallback
/// <summary>
/// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
/// </summary>
/// <param name="mpegLayer3Frame"> First MpegFrame</param>
/// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
/// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
/// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
private void ReadPastId3v2TagsCallback(
MpegFrame mpegLayer3Frame,
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes,
List<MediaStreamDescription> mediaStreamDescriptions,
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes)
{
if (mpegLayer3Frame.FrameSize <= 0)
{
throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
}
// Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
WaveFormatExtensible wfx = new WaveFormatExtensible();
this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;
this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag = 85;
this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec = mpegLayer3Frame.SamplingRate;
this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign = 1;
this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample = 0;
this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize = 12;
this.MpegLayer3WaveFormat.Id = 1;
this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
this.MpegLayer3WaveFormat.FramesPerBlock = 1;
this.MpegLayer3WaveFormat.BlockSize = (short)mpegLayer3Frame.FrameSize;
this.MpegLayer3WaveFormat.CodecDelay = 0;
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
mediaStreamDescriptions.Add(this.audioStreamDescription);
this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
if (this.audioStream.CanSeek)
{
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
}
else
{
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
}
// Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
// pass in Mp3 Samples.
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
this.currentFrame = mpegLayer3Frame;
this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
}
开发者ID:nstuke,项目名称:ManagedMediaHelpers,代码行数:60,代码来源:Mp3MediaStreamSource.cs
示例14: OpenMediaAsync
protected override void OpenMediaAsync()
{
//WaveFormatEx
HeAacWaveFormat aacf = new HeAacWaveFormat();
WaveFormatExtensible wfx = new WaveFormatExtensible();
aacf.WaveFormatExtensible = wfx;
aacf.WaveFormatExtensible.FormatTag = 0x1610; //0xFF;//0x1610;
aacf.WaveFormatExtensible.Channels = 2; //
aacf.WaveFormatExtensible.BlockAlign = 1;
aacf.WaveFormatExtensible.BitsPerSample = 0;//16; //unkonw set to 0
aacf.WaveFormatExtensible.SamplesPerSec = 24000; // from 8000 to 96000 Hz
aacf.WaveFormatExtensible.AverageBytesPerSecond = 0;//wfx.SamplesPerSec * wfx.Channels * wfx.BitsPerSample / wfx.BlockAlign;
aacf.WaveFormatExtensible.Size = 12;
// Extra 3 words in WAVEFORMATEX
// refer to http://msdn.microsoft.com/en-us/library/windows/desktop/dd757806(v=vs.85).aspx
aacf.wPayloadType = 0x0; //Audio Data Transport Stream (ADTS). The stream contains an adts_sequence, as defined by MPEG-2.
aacf.wAudioProfileLevelIndication = 0xFE;
aacf.wStructType = 0;
string codecPrivateData = aacf.ToHexString();
Dictionary<MediaStreamAttributeKeys, string> audioStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
audioStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = codecPrivateData;
audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, audioStreamAttributes);
m_vbuffer.WaitForWorkItem();
m_curVideoBlk = m_vbuffer.Dequeue().CommandParameter as MSF.VideoBlock;
if (m_curVideoBlk == null)
return;
vIdx = 0;
fNum = (int)m_curVideoBlk.VideoFrameNum;
H264NalFormat h264f = new H264NalFormat();
h264f.sps = m_curVideoBlk.FirstIFrameInfo.sps;
h264f.pps = m_curVideoBlk.FirstIFrameInfo.pps;
string s = h264f.ToHexString();
//Video
Dictionary<MediaStreamAttributeKeys, string> videoStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
videoStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
videoStreamAttributes[MediaStreamAttributeKeys.Height] = "240";
videoStreamAttributes[MediaStreamAttributeKeys.Width] = "320";
videoStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = s;//"0000016742E00D96520283F40500000168CE388000";
videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, videoStreamAttributes);
//Media
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(6).Ticks.ToString(CultureInfo.InvariantCulture);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
List<MediaStreamDescription> mediaStreamDescriptions = new List<MediaStreamDescription>();
#if !DEBUG
// Emulator does not support HE-AAC
mediaStreamDescriptions.Add(audioStreamDescription);
#endif
mediaStreamDescriptions.Add(videoStreamDescription);
this.AudioBufferLength = 500;
this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
}
开发者ID:yspxman,项目名称:CMTVDemo,代码行数:66,代码来源:CmmbStreamSource2.cs
示例15: ParseAvcConfig
private static void ParseAvcConfig(
MediaStreamDescription stream,
List<MediaStreamSample> samples,
byte[] data)
{
System.IO.Stream ios = new System.IO.MemoryStream(data);
ios.Seek(5, System.IO.SeekOrigin.Begin);
int num_sps = ios.ReadByte() & 0x1f;
for (int i = 0; i < num_sps; ++i)
{
int len_sps = (ios.ReadByte() << 8) | ios.ReadByte();
byte[] sps = new byte[len_sps];
ios.Read(sps, 0, len_sps);
samples.Add(new MediaStreamSample(
stream,
new System.IO.MemoryStream(sps),
0,
len_sps,
0,
new Dictionary<MediaSampleAttributeKeys, string>()));
}
int num_pps = ios.ReadByte();
for (int i = 0; i < num_pps; ++i)
{
int len_pps = (ios.ReadByte() << 8) | ios.ReadByte();
byte[] pps = new byte[len_pps];
ios.Read(pps, 0, len_pps);
samples.Add(new MediaStreamSample(
stream,
new System.IO.MemoryStream(pps),
0,
len_pps,
0,
new Dictionary<MediaSampleAttributeKeys, string>()));
}
}
开发者ID:uvbs,项目名称:MyProjects,代码行数:36,代码来源:PpboxSource.cs
示例16: OpenMediaCallback
private void OpenMediaCallback(
Error ec)
{
if (ec != Error.success)
{
ErrorOccurred(ec.ToString());
return;
}
Media media;
demuxer_.get_media(out media);
Dictionary<MediaSourceAttributesKeys, string> mediaSourceAttributes =
new Dictionary<MediaSourceAttributesKeys, string>();
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
media.duration.ToString();
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] =
(media.duration != ulong.MaxValue).ToString();
List<MediaStreamDescription> mediaStreamDescriptions =
new List<MediaStreamDescription>();
for (int i = 0; i < media.streams.Length; ++i)
{
Stream stream = media.streams[i];
Dictionary<MediaStreamAttributeKeys, string> mediaStreamAttributes =
new Dictionary<MediaStreamAttributeKeys, string>();
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
stream.codec_data.ToString();
if (stream.type == StreamType.video)
{
mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] =
FourCC[(int)stream.sub_type];
mediaStreamAttributes[MediaStreamAttributeKeys.Width] =
stream.video.width.ToString();
mediaStreamAttributes[MediaStreamAttributeKeys.Height] =
stream.video.height.ToString();
char[] CodecPrivateDataHex = new char[stream.codec_data.Length * 2];
int index = 0;
ToHexHelper(CodecPrivateDataHex, ref index, stream.codec_data); // ExtraData
//mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
// new String(CodecPrivateDataHex);
MediaStreamDescription videoStreamDescription =
new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
mediaStreamDescriptions.Add(videoStreamDescription);
mediaStreamTypes_.Add(MediaStreamType.Video);
mediaStreamDescriptions_[MediaStreamType.Video] = videoStreamDescription;
mediaStreamSamples_[MediaStreamType.Video] = new List<MediaStreamSample>();
//ParseAvcConfig(videoStreamDescription, mediaStreamSamples_[MediaStreamType.Video], stream.codec_data);
}
else if (stream.type == StreamType.audio)
{
char[] WaveFormatExHex = new char[9 * 4 + stream.codec_data.Length * 2];
int index = 0;
ToHexHelper(WaveFormatExHex, ref index, 2, 255); // FormatTag
ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.channel_count); // Channels
ToHexHelper(WaveFormatExHex, ref index, 4, stream.audio.sample_rate); // SamplesPerSec
ToHexHelper(WaveFormatExHex, ref index, 4, 0); // AverageBytesPerSecond
ToHexHelper(WaveFormatExHex, ref index, 2, 1); // BlockAlign
ToHexHelper(WaveFormatExHex, ref index, 2, stream.audio.sample_size); // BitsPerSample
ToHexHelper(WaveFormatExHex, ref index, 2, stream.codec_data.Length); // ExtraDataSize
ToHexHelper(WaveFormatExHex, ref index, stream.codec_data); // ExtraData
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] =
new String(WaveFormatExHex);
MediaStreamDescription audioStreamDescription =
new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
mediaStreamDescriptions.Add(audioStreamDescription);
mediaStreamTypes_.Add(MediaStreamType.Audio);
mediaStreamDescriptions_[MediaStreamType.Audio] = audioStreamDescription;
mediaStreamSamples_[MediaStreamType.Audio] = new List<MediaStreamSample>();
}
else
{
mediaStreamTypes_.Add(MediaStreamType.Script);
}
} // for
ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
}
开发者ID:uvbs,项目名称:MyProjects,代码行数:78,代码来源:PpboxSource.cs
示例17: OpenMediaAsync
protected override void OpenMediaAsync()
{
currentPosition = currentTimeStamp = 0;
memoryStream = new MemoryStream();
binaryWriter = new BinaryWriter(memoryStream);
var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>(1);
var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>(3);
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = Formatter.ToPcmBase16String(DefaultWaveFormat);
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
streamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
timer.Start();
ReportOpenMediaCompleted(mediaSourceAttributes, new[] { streamDescription });
}
开发者ID:pcwiek,项目名称:PureToneAudiometer,代码行数:19,代码来源:PureToneSource.cs
示例18: PrepareVideo
private void PrepareVideo()
{
Debug.WriteLine("VideoMediaStreamSource::PrepareVideo()");
// Stream Description
Dictionary<MediaStreamAttributeKeys, string> streamAttributes =
new Dictionary<MediaStreamAttributeKeys, string>();
// Select the same encoding and dimensions as the video capture
streamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "H264";
streamAttributes[MediaStreamAttributeKeys.Height] = _frameHeight.ToString();
streamAttributes[MediaStreamAttributeKeys.Width] = _frameWidth.ToString();
MediaStreamDescription msd =
new MediaStreamDescription(MediaStreamType.Video, streamAttributes);
_videoDesc = msd;
}
开发者ID:sarandogou,项目名称:boghe,代码行数:17,代码来源:VideoMediaStreamSource.cs
示例19: SwitchMediaStreamAsync
protected abstract void SwitchMediaStreamAsync (MediaStreamDescription mediaStreamDescription);
开发者ID:kangaroo,项目名称:moon,代码行数:1,代码来源:MediaStreamSource.cs
示例20: ReportSwitchMediaStreamCompleted
protected void ReportSwitchMediaStreamCompleted (MediaStreamDescription mediaStreamDescription)
{
// FIXME: wrong/overzealous validations wrt SL2 (see unit tests)
if (closed || media_element == null || demuxer == IntPtr.Zero)
throw new InvalidOperationException ();
// FIXME: where is the mediaStreamDescription parameter being used ?
NativeMethods.imedia_demuxer_report_get_frame_completed (demuxer, IntPtr.Zero);
}
开发者ID:kangaroo,项目名称:moon,代码行数:9,代码来源:MediaStreamSource.cs
注:本文中的System.Windows.Media.MediaStreamDescription类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论