本文整理汇总了C#中MediaBrowser.Model.Dto.MediaSourceInfo类的典型用法代码示例。如果您正苦于以下问题:C# MediaSourceInfo类的具体用法?C# MediaSourceInfo怎么用?C# MediaSourceInfo使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MediaSourceInfo类属于MediaBrowser.Model.Dto命名空间,在下文中一共展示了MediaSourceInfo类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: GetChannelStreamMediaSources
protected override async Task<List<MediaSourceInfo>> GetChannelStreamMediaSources(TunerHostInfo info, string channelId, CancellationToken cancellationToken)
{
var urlHash = info.Url.GetMD5().ToString("N");
var prefix = ChannelIdPrefix + urlHash;
if (!channelId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return null;
}
var channels = await GetChannels(info, true, cancellationToken).ConfigureAwait(false);
var m3uchannels = channels.Cast<M3UChannel>();
var channel = m3uchannels.FirstOrDefault(c => string.Equals(c.Id, channelId, StringComparison.OrdinalIgnoreCase));
if (channel != null)
{
var path = channel.Path;
MediaProtocol protocol = MediaProtocol.File;
if (path.StartsWith("http", StringComparison.OrdinalIgnoreCase))
{
protocol = MediaProtocol.Http;
}
else if (path.StartsWith("rtmp", StringComparison.OrdinalIgnoreCase))
{
protocol = MediaProtocol.Rtmp;
}
else if (path.StartsWith("rtsp", StringComparison.OrdinalIgnoreCase))
{
protocol = MediaProtocol.Rtsp;
}
var mediaSource = new MediaSourceInfo
{
Path = channel.Path,
Protocol = protocol,
MediaStreams = new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Video,
// Set the index to -1 because we don't know the exact index of the video stream within the container
Index = -1,
IsInterlaced = true
},
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1
}
},
RequiresOpening = false,
RequiresClosing = false,
ReadAtNativeFramerate = true
};
return new List<MediaSourceInfo> { mediaSource };
}
return new List<MediaSourceInfo>();
}
开发者ID:softworkz,项目名称:Emby,代码行数:60,代码来源:M3UTunerHost.cs
示例2: ToMediaSource
public MediaSourceInfo ToMediaSource()
{
var id = Path.GetMD5().ToString("N");
var source = new MediaSourceInfo
{
MediaStreams = GetMediaStreams(this).ToList(),
Container = Container,
Protocol = Protocol,
Path = Path,
RequiredHttpHeaders = RequiredHttpHeaders,
RunTimeTicks = RunTimeTicks,
Name = id,
Id = id,
ReadAtNativeFramerate = ReadAtNativeFramerate
};
var bitrate = (AudioBitrate ?? 0) + (VideoBitrate ?? 0);
if (bitrate > 0)
{
source.Bitrate = bitrate;
}
return source;
}
开发者ID:Ceten,项目名称:MediaBrowser,代码行数:27,代码来源:ChannelMediaInfo.cs
示例3: Record
public async Task Record(MediaSourceInfo mediaSource, string targetFile, TimeSpan duration, Action onStarted, CancellationToken cancellationToken)
{
var httpRequestOptions = new HttpRequestOptions()
{
Url = mediaSource.Path
};
httpRequestOptions.BufferContent = false;
using (var response = await _httpClient.SendAsync(httpRequestOptions, "GET").ConfigureAwait(false))
{
_logger.Info("Opened recording stream from tuner provider");
using (var output = _fileSystem.GetFileStream(targetFile, FileMode.Create, FileAccess.Write, FileShare.Read))
{
onStarted();
_logger.Info("Copying recording stream to file {0}", targetFile);
// The media source if infinite so we need to handle stopping ourselves
var durationToken = new CancellationTokenSource(duration);
cancellationToken = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, durationToken.Token).Token;
await CopyUntilCancelled(response.Content, output, cancellationToken).ConfigureAwait(false);
}
}
_logger.Info("Recording completed to file {0}", targetFile);
}
开发者ID:t-andre,项目名称:Emby,代码行数:29,代码来源:DirectRecorder.cs
示例4: Record
public async Task Record(MediaSourceInfo mediaSource, string targetFile, Action onStarted, CancellationToken cancellationToken)
{
_targetPath = targetFile;
_fileSystem.CreateDirectory(Path.GetDirectoryName(targetFile));
var process = new Process
{
StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
// Must consume both stdout and stderr or deadlocks may occur
RedirectStandardOutput = true,
RedirectStandardError = true,
RedirectStandardInput = true,
FileName = _mediaEncoder.EncoderPath,
Arguments = GetCommandLineArgs(mediaSource, targetFile),
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false
},
EnableRaisingEvents = true
};
_process = process;
var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
_logger.Info(commandLineLogMessage);
var logFilePath = Path.Combine(_appPaths.LogDirectoryPath, "record-transcode-" + Guid.NewGuid() + ".txt");
_fileSystem.CreateDirectory(Path.GetDirectoryName(logFilePath));
// FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
_logFileStream = _fileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(_json.SerializeToString(mediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine);
await _logFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationToken).ConfigureAwait(false);
process.Exited += (sender, args) => OnFfMpegProcessExited(process);
process.Start();
cancellationToken.Register(Stop);
// MUST read both stdout and stderr asynchronously or a deadlock may occurr
process.BeginOutputReadLine();
// Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
StartStreamingLog(process.StandardError.BaseStream, _logFileStream);
// Wait for the file to exist before proceeeding
while (!_hasExited)
{
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
}
}
开发者ID:ratanparai,项目名称:Emby,代码行数:59,代码来源:EncodedRecorder.cs
示例5: Record
public async Task Record(MediaSourceInfo mediaSource, string targetFile, TimeSpan duration, Action onStarted, CancellationToken cancellationToken)
{
var durationToken = new CancellationTokenSource(duration);
cancellationToken = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, durationToken.Token).Token;
await RecordFromFile(mediaSource, mediaSource.Path, targetFile, duration, onStarted, cancellationToken).ConfigureAwait(false);
_logger.Info("Recording completed to file {0}", targetFile);
}
开发者ID:t-andre,项目名称:Emby,代码行数:9,代码来源:EncodedRecorder.cs
示例6: HdHomerunLiveStream
public HdHomerunLiveStream(MediaSourceInfo mediaSource, string originalStreamId, IFileSystem fileSystem, IHttpClient httpClient, ILogger logger, IServerApplicationPaths appPaths, IServerApplicationHost appHost)
: base(mediaSource)
{
_fileSystem = fileSystem;
_httpClient = httpClient;
_logger = logger;
_appPaths = appPaths;
_appHost = appHost;
OriginalStreamId = originalStreamId;
_multicastStream = new MulticastStream(_logger);
}
开发者ID:t-andre,项目名称:Emby,代码行数:11,代码来源:HdHomerunLiveStream.cs
示例7: RecordInternal
public async Task RecordInternal(MediaSourceInfo mediaSource, string tempFile, string targetFile, TimeSpan duration, Action onStarted, CancellationToken cancellationToken)
{
var httpRequestOptions = new HttpRequestOptions()
{
Url = mediaSource.Path
};
httpRequestOptions.BufferContent = false;
using (var response = await _httpClient.SendAsync(httpRequestOptions, "GET").ConfigureAwait(false))
{
_logger.Info("Opened recording stream from tuner provider");
Directory.CreateDirectory(Path.GetDirectoryName(tempFile));
using (var output = _fileSystem.GetFileStream(tempFile, FileMode.Create, FileAccess.Write, FileShare.Read))
{
//onStarted();
_logger.Info("Copying recording stream to file {0}", tempFile);
var bufferMs = 5000;
if (mediaSource.RunTimeTicks.HasValue)
{
// The media source already has a fixed duration
// But add another stop 1 minute later just in case the recording gets stuck for any reason
var durationToken = new CancellationTokenSource(duration.Add(TimeSpan.FromMinutes(1)));
cancellationToken = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, durationToken.Token).Token;
}
else
{
// The media source if infinite so we need to handle stopping ourselves
var durationToken = new CancellationTokenSource(duration.Add(TimeSpan.FromMilliseconds(bufferMs)));
cancellationToken = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, durationToken.Token).Token;
}
var tempFileTask = response.Content.CopyToAsync(output, StreamDefaults.DefaultCopyToBufferSize, cancellationToken);
// Give the temp file a little time to build up
await Task.Delay(bufferMs, cancellationToken).ConfigureAwait(false);
var recordTask = Task.Run(() => RecordFromFile(mediaSource, tempFile, targetFile, duration, onStarted, cancellationToken), cancellationToken);
await tempFileTask.ConfigureAwait(false);
await recordTask.ConfigureAwait(false);
}
}
_logger.Info("Recording completed to file {0}", targetFile);
}
开发者ID:softworkz,项目名称:Emby,代码行数:52,代码来源:EncodedRecorder.cs
示例8: GetOutputPath
public string GetOutputPath(MediaSourceInfo mediaSource, string targetFile)
{
if (_liveTvOptions.EnableOriginalAudioWithEncodedRecordings)
{
// if the audio is aac_latm, stream copying to mp4 will fail
var streams = mediaSource.MediaStreams ?? new List<MediaStream>();
if (streams.Any(i => i.Type == MediaStreamType.Audio && (i.Codec ?? string.Empty).IndexOf("aac", StringComparison.OrdinalIgnoreCase) != -1))
{
return Path.ChangeExtension(targetFile, ".mkv");
}
}
return Path.ChangeExtension(targetFile, ".mp4");
}
开发者ID:paul-777,项目名称:Emby,代码行数:14,代码来源:EncodedRecorder.cs
示例9: Play
public void Play(string path, long startPositionTicks, bool isVideo, MediaSourceInfo mediaSource, BaseItemDto item, bool isFullScreen)
{
_isFadingOut = false;
_isVideo = isVideo;
var forcedVideoRenderer = isFullScreen ? null : "evr";
if (_player.PlayState != PlayState.Idle)
{
_player.Stop();
}
_player.Play(path, startPositionTicks, isVideo, item, mediaSource, forcedVideoRenderer);
}
开发者ID:kabellrics,项目名称:Emby.Theater.Windows,代码行数:14,代码来源:DirectShowPlayerBridge.cs
示例10: Run
private async Task Run(Video item, string itemModifier, MediaSourceInfo mediaSource, int width, CancellationToken cancellationToken)
{
if (!HasBif(item, itemModifier, width, mediaSource))
{
await BifWriterSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (!HasBif(item, itemModifier, width, mediaSource))
{
await CreateBif(item, itemModifier, width, mediaSource, cancellationToken).ConfigureAwait(false);
}
}
finally
{
BifWriterSemaphore.Release();
}
}
}
开发者ID:heksesang,项目名称:Emby.Plugins,代码行数:19,代码来源:VideoProcessor.cs
示例11: Record
public async Task Record(MediaSourceInfo mediaSource, string targetFile, TimeSpan duration, Action onStarted, CancellationToken cancellationToken)
{
var tempfile = Path.Combine(_appPaths.TranscodingTempPath, Guid.NewGuid().ToString("N") + ".ts");
try
{
await RecordInternal(mediaSource, tempfile, targetFile, duration, onStarted, cancellationToken)
.ConfigureAwait(false);
}
finally
{
try
{
File.Delete(tempfile);
}
catch (Exception ex)
{
_logger.ErrorException("Error deleting recording temp file", ex);
}
}
}
开发者ID:softworkz,项目名称:Emby,代码行数:21,代码来源:EncodedRecorder.cs
示例12: AddMediaSource
private void AddMediaSource(List<MediaSourceInfo> list,
LocalItem item,
MediaSourceInfo mediaSource,
IServerSyncProvider provider,
SyncTarget target)
{
SetStaticMediaSourceInfo(item, mediaSource);
var requiresDynamicAccess = provider as IHasDynamicAccess;
if (requiresDynamicAccess != null)
{
mediaSource.RequiresOpening = true;
var keyList = new List<string>();
keyList.Add(provider.GetType().FullName.GetMD5().ToString("N"));
keyList.Add(target.Id.GetMD5().ToString("N"));
keyList.Add(item.Id);
mediaSource.OpenToken = string.Join(StreamIdDelimeterString, keyList.ToArray());
}
list.Add(mediaSource);
}
开发者ID:ratanparai,项目名称:Emby,代码行数:23,代码来源:SyncedMediaSourceProvider.cs
示例13: Record
public async Task Record(MediaSourceInfo mediaSource, string targetFile, Action onStarted, CancellationToken cancellationToken)
{
var httpRequestOptions = new HttpRequestOptions()
{
Url = mediaSource.Path
};
httpRequestOptions.BufferContent = false;
using (var response = await _httpClient.SendAsync(httpRequestOptions, "GET").ConfigureAwait(false))
{
_logger.Info("Opened recording stream from tuner provider");
using (var output = _fileSystem.GetFileStream(targetFile, FileMode.Create, FileAccess.Write, FileShare.Read))
{
onStarted();
_logger.Info("Copying recording stream to file stream");
await response.Content.CopyToAsync(output, StreamDefaults.DefaultCopyToBufferSize, cancellationToken).ConfigureAwait(false);
}
}
}
开发者ID:ratanparai,项目名称:Emby,代码行数:23,代码来源:DirectRecorder.cs
示例14: GetMediaSource
private MediaSourceInfo GetMediaSource(TunerHostInfo info, string channelId, string profile)
{
int? width = null;
int? height = null;
bool isInterlaced = true;
var videoCodec = !string.IsNullOrWhiteSpace(GetEncodingOptions().HardwareAccelerationType) ? null : "mpeg2video";
int? videoBitrate = null;
if (string.Equals(profile, "mobile", StringComparison.OrdinalIgnoreCase))
{
width = 1280;
height = 720;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 2000000;
}
else if (string.Equals(profile, "heavy", StringComparison.OrdinalIgnoreCase))
{
width = 1920;
height = 1080;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 15000000;
}
else if (string.Equals(profile, "internet720", StringComparison.OrdinalIgnoreCase))
{
width = 1280;
height = 720;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 8000000;
}
else if (string.Equals(profile, "internet540", StringComparison.OrdinalIgnoreCase))
{
width = 1280;
height = 720;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 2500000;
}
else if (string.Equals(profile, "internet480", StringComparison.OrdinalIgnoreCase))
{
width = 848;
height = 480;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 2000000;
}
else if (string.Equals(profile, "internet360", StringComparison.OrdinalIgnoreCase))
{
width = 640;
height = 360;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 1500000;
}
else if (string.Equals(profile, "internet240", StringComparison.OrdinalIgnoreCase))
{
width = 432;
height = 240;
isInterlaced = false;
videoCodec = "h264";
videoBitrate = 1000000;
}
var url = GetApiUrl(info, true) + "/auto/v" + channelId;
if (!string.IsNullOrWhiteSpace(profile) && !string.Equals(profile, "native", StringComparison.OrdinalIgnoreCase))
{
url += "?transcode=" + profile;
}
var mediaSource = new MediaSourceInfo
{
Path = url,
Protocol = MediaProtocol.Http,
MediaStreams = new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Video,
// Set the index to -1 because we don't know the exact index of the video stream within the container
Index = -1,
IsInterlaced = isInterlaced,
Codec = videoCodec,
Width = width,
Height = height,
BitRate = videoBitrate
},
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1,
Codec = "ac3",
BitRate = 192000
}
},
//.........这里部分代码省略.........
开发者ID:rezafouladian,项目名称:Emby,代码行数:101,代码来源:HdHomerunHost.cs
示例15: GetVersionInfo
private static MediaSourceInfo GetVersionInfo(bool enablePathSubstitution, Video i, MediaSourceType type)
{
var mediaStreams = MediaSourceManager.GetMediaStreams(i.Id)
.ToList();
var locationType = i.LocationType;
var info = new MediaSourceInfo
{
Id = i.Id.ToString("N"),
IsoType = i.IsoType,
Protocol = locationType == LocationType.Remote ? MediaProtocol.Http : MediaProtocol.File,
MediaStreams = mediaStreams,
Name = GetMediaSourceName(i, mediaStreams),
Path = enablePathSubstitution ? GetMappedPath(i.Path, locationType) : i.Path,
RunTimeTicks = i.RunTimeTicks,
Video3DFormat = i.Video3DFormat,
VideoType = i.VideoType,
Container = i.Container,
Size = i.Size,
Timestamp = i.Timestamp,
Type = type,
PlayableStreamFileNames = i.PlayableStreamFileNames.ToList(),
SupportsDirectStream = i.VideoType == VideoType.VideoFile
};
if (info.Protocol == MediaProtocol.File)
{
info.ETag = i.DateModified.Ticks.ToString(CultureInfo.InvariantCulture).GetMD5().ToString("N");
}
if (i.IsShortcut)
{
info.Path = i.ShortcutPath;
if (info.Path.StartsWith("Http", StringComparison.OrdinalIgnoreCase))
{
info.Protocol = MediaProtocol.Http;
}
else if (info.Path.StartsWith("Rtmp", StringComparison.OrdinalIgnoreCase))
{
info.Protocol = MediaProtocol.Rtmp;
}
else if (info.Path.StartsWith("Rtsp", StringComparison.OrdinalIgnoreCase))
{
info.Protocol = MediaProtocol.Rtsp;
}
else
{
info.Protocol = MediaProtocol.File;
}
}
if (string.IsNullOrEmpty(info.Container))
{
if (i.VideoType == VideoType.VideoFile || i.VideoType == VideoType.Iso)
{
if (!string.IsNullOrWhiteSpace(i.Path) && locationType != LocationType.Remote && locationType != LocationType.Virtual)
{
info.Container = System.IO.Path.GetExtension(i.Path).TrimStart('.');
}
}
}
try
{
var bitrate = i.TotalBitrate ??
info.MediaStreams.Where(m => m.Type != MediaStreamType.Subtitle && !string.Equals(m.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase))
.Select(m => m.BitRate ?? 0)
.Sum();
if (bitrate > 0)
{
info.Bitrate = bitrate;
}
}
catch (OverflowException ex)
{
Logger.ErrorException("Error calculating total bitrate", ex);
}
return info;
}
开发者ID:7illusions,项目名称:Emby,代码行数:83,代码来源:Video.cs
示例16: AddMediaInfo
private async Task AddMediaInfo(MediaSourceInfo mediaSource, bool isAudio, CancellationToken cancellationToken)
{
var originalRuntime = mediaSource.RunTimeTicks;
mediaSource.DefaultSubtitleStreamIndex = null;
// Null this out so that it will be treated like a live stream
if (!originalRuntime.HasValue)
{
mediaSource.RunTimeTicks = null;
}
var audioStream = mediaSource.MediaStreams.FirstOrDefault(i => i.Type == Model.Entities.MediaStreamType.Audio);
if (audioStream == null || audioStream.Index == -1)
{
mediaSource.DefaultAudioStreamIndex = null;
}
else
{
mediaSource.DefaultAudioStreamIndex = audioStream.Index;
}
var videoStream = mediaSource.MediaStreams.FirstOrDefault(i => i.Type == Model.Entities.MediaStreamType.Video);
if (videoStream != null)
{
if (!videoStream.BitRate.HasValue)
{
var width = videoStream.Width ?? 1920;
if (width >= 1900)
{
videoStream.BitRate = 8000000;
}
else if (width >= 1260)
{
videoStream.BitRate = 3000000;
}
else if (width >= 700)
{
videoStream.BitRate = 1000000;
}
}
}
// Try to estimate this
if (!mediaSource.Bitrate.HasValue)
{
var total = mediaSource.MediaStreams.Select(i => i.BitRate ?? 0).Sum();
if (total > 0)
{
mediaSource.Bitrate = total;
}
}
}
开发者ID:rezafouladian,项目名称:Emby,代码行数:58,代码来源:LiveTvMediaSourceProvider.cs
示例17: GetVideoDirectPlayProfile
private PlayMethod? GetVideoDirectPlayProfile(DeviceProfile profile,
MediaSourceInfo mediaSource,
MediaStream videoStream,
MediaStream audioStream,
bool isEligibleForDirectPlay,
bool isEligibleForDirectStream)
{
// See if it can be direct played
DirectPlayProfile directPlay = null;
foreach (DirectPlayProfile i in profile.DirectPlayProfiles)
{
if (i.Type == DlnaProfileType.Video && IsVideoDirectPlaySupported(i, mediaSource, videoStream, audioStream))
{
directPlay = i;
break;
}
}
if (directPlay == null)
{
_logger.Debug("Profile: {0}, No direct play profiles found for Path: {1}",
profile.Name ?? "Unknown Profile",
mediaSource.Path ?? "Unknown path");
return null;
}
string container = mediaSource.Container;
List<ProfileCondition> conditions = new List<ProfileCondition>();
foreach (ContainerProfile i in profile.ContainerProfiles)
{
if (i.Type == DlnaProfileType.Video &&
ListHelper.ContainsIgnoreCase(i.GetContainers(), container))
{
foreach (ProfileCondition c in i.Conditions)
{
conditions.Add(c);
}
}
}
ConditionProcessor conditionProcessor = new ConditionProcessor();
int? width = videoStream == null ? null : videoStream.Width;
int? height = videoStream == null ? null : videoStream.Height;
int? bitDepth = videoStream == null ? null : videoStream.BitDepth;
int? videoBitrate = videoStream == null ? null : videoStream.BitRate;
double? videoLevel = videoStream == null ? null : videoStream.Level;
string videoProfile = videoStream == null ? null : videoStream.Profile;
float? videoFramerate = videoStream == null ? null : videoStream.AverageFrameRate ?? videoStream.AverageFrameRate;
bool? isAnamorphic = videoStream == null ? null : videoStream.IsAnamorphic;
bool? isCabac = videoStream == null ? null : videoStream.IsCabac;
int? audioBitrate = audioStream == null ? null : audioStream.BitRate;
int? audioChannels = audioStream == null ? null : audioStream.Channels;
string audioProfile = audioStream == null ? null : audioStream.Profile;
TransportStreamTimestamp? timestamp = videoStream == null ? TransportStreamTimestamp.None : mediaSource.Timestamp;
int? packetLength = videoStream == null ? null : videoStream.PacketLength;
int? refFrames = videoStream == null ? null : videoStream.RefFrames;
int? numAudioStreams = mediaSource.GetStreamCount(MediaStreamType.Audio);
int? numVideoStreams = mediaSource.GetStreamCount(MediaStreamType.Video);
// Check container conditions
foreach (ProfileCondition i in conditions)
{
if (!conditionProcessor.IsVideoConditionSatisfied(i, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isCabac, refFrames, numVideoStreams, numAudioStreams))
{
LogConditionFailure(profile, "VideoContainerProfile", i, mediaSource);
return null;
}
}
string videoCodec = videoStream == null ? null : videoStream.Codec;
if (string.IsNullOrEmpty(videoCodec))
{
_logger.Debug("Profile: {0}, DirectPlay=false. Reason=Unknown video codec. Path: {1}",
profile.Name ?? "Unknown Profile",
mediaSource.Path ?? "Unknown path");
return null;
}
conditions = new List<ProfileCondition>();
foreach (CodecProfile i in profile.CodecProfiles)
{
if (i.Type == CodecType.Video && i.ContainsCodec(videoCodec, container))
{
foreach (ProfileCondition c in i.Conditions)
{
conditions.Add(c);
}
}
}
foreach (ProfileCondition i in conditions)
//.........这里部分代码省略.........
开发者ID:jrags56,项目名称:MediaBrowser,代码行数:101,代码来源:StreamBuilder.cs
示例18: Normalize
private void Normalize(MediaSourceInfo mediaSource, bool isVideo)
{
if (mediaSource.MediaStreams.Count == 0)
{
if (isVideo)
{
mediaSource.MediaStreams.AddRange(new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Video,
// Set the index to -1 because we don't know the exact index of the video stream within the container
Index = -1,
// Set to true if unknown to enable deinterlacing
IsInterlaced = true
},
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1
}
});
}
else
{
mediaSource.MediaStreams.AddRange(new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1
}
});
}
}
// Clean some bad data coming from providers
foreach (var stream in mediaSource.MediaStreams)
{
if (stream.BitRate.HasValue && stream.BitRate <= 0)
{
stream.BitRate = null;
}
if (stream.Channels.HasValue && stream.Channels <= 0)
{
stream.Channels = null;
}
if (stream.AverageFrameRate.HasValue && stream.AverageFrameRate <= 0)
{
stream.AverageFrameRate = null;
}
if (stream.RealFrameRate.HasValue && stream.RealFrameRate <= 0)
{
stream.RealFrameRate = null;
}
if (stream.Width.HasValue && stream.Width <= 0)
{
stream.Width = null;
}
if (stream.Height.HasValue && stream.Height <= 0)
{
stream.Height = null;
}
if (stream.SampleRate.HasValue && stream.SampleRate <= 0)
{
stream.SampleRate = null;
}
if (stream.Level.HasValue && stream.Level <= 0)
{
stream.Level = null;
}
}
var indexes = mediaSource.MediaStreams.Select(i => i.Index).Distinct().ToList();
// If there are duplicate stream indexes, set them all to unknown
if (indexes.Count != mediaSource.MediaStreams.Count)
{
foreach (var stream in mediaSource.MediaStreams)
{
stream.Index = -1;
}
}
// Set the total bitrate if not already supplied
if (!mediaSource.Bitrate.HasValue)
{
var total = mediaSource.MediaStreams.Select(i => i.BitRate ?? 0).Sum();
if (total > 0)
{
mediaSource.Bitrate = total;
}
}
}
开发者ID:rezafouladian,项目名称:Emby,代码行数:98,代码来源:LiveTvManager.cs
示例19: LiveStream
public LiveStream(MediaSourceInfo mediaSource)
{
OriginalMediaSource = mediaSource;
OpenedMediaSource = mediaSource;
EnableStreamSharing = true;
}
开发者ID:t-andre,项目名称:Emby,代码行数:6,代码来源:LiveStream.cs
示例20: Normalize
private void Normalize(MediaSourceInfo mediaSource, ILiveTvService service, bool isVideo)
{
if (mediaSource.MediaStreams.Count == 0)
{
if (isVideo)
{
mediaSource.MediaStreams.AddRange(new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Video,
// Set the index to -1 because we don't know the exact index of the video stream within the container
Index = -1,
// Set to true if unknown to enable deinterlacing
IsInterlaced = true
},
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1
}
});
}
else
{
mediaSource.MediaStreams.AddRange(new List<MediaStream>
{
new MediaStream
{
Type = MediaStreamType.Audio,
// Set the index to -1 because we don't know the exact index of the audio stream within the container
Index = -1
}
});
}
}
// Clean some bad data coming from providers
foreach (var stream in mediaSource.MediaStreams)
{
if (stream.BitRate.HasValue && stream.BitRate <= 0)
{
stream.BitRate = null;
}
if (stream.Channels.HasValue && stream.Channels <= 0)
{
stream.Channels = null;
}
if (stream.AverageFrameRate.HasValue && stream.AverageFrameRate <= 0)
{
stream.AverageFrameRate = null;
}
if (stream.RealFrameRate.HasValue && stream.Re
|
请发表评论