Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
kingslay committed Apr 15, 2024
2 parents abb198b + 39dda2f commit 5dd6262
Show file tree
Hide file tree
Showing 11 changed files with 58 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
xcodebuild -version
- name: Build
run: |
swift build -v
swift build
swift build --sdk `xcrun -sdk iphonesimulator -show-sdk-path` -Xswiftc -target -Xswiftc x86_64-apple-ios13.0-simulator
swift build --sdk `xcrun -sdk appletvsimulator -show-sdk-path` -Xswiftc -target -Xswiftc x86_64-apple-tvos13.0-simulator
swift build --sdk `xcrun -sdk xrsimulator -show-sdk-path` -Xswiftc -target -Xswiftc x86_64-apple-xros1.0-simulator
Expand Down
9 changes: 4 additions & 5 deletions Sources/KSPlayer/AVPlayer/KSOptions.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ open class KSOptions {
*/
public var seekFlags = Int32(1)
// ffmpeg only cache http
// 这个开关不能用,因为ff_tempfile: Cannot open temporary file
public var cache = false
// record stream
public var outputURL: URL?
Expand Down Expand Up @@ -357,13 +358,10 @@ open class KSOptions {
}

open func videoClockSync(main: KSClock, nextVideoTime: TimeInterval, fps: Float, frameCount: Int) -> (Double, ClockProcessType) {
var desire = main.getTime() - videoDelay
#if !os(macOS)
desire -= AVAudioSession.sharedInstance().outputLatency
#endif
let desire = main.getTime() - videoDelay
let diff = nextVideoTime - desire
// print("[video] video diff \(diff) nextVideoTime \(nextVideoTime) main \(main.time.seconds)")
if diff >= 1 / Double(fps * 2) {
if diff >= 1 / Double(max(fps * 2, 100)) {
videoClockDelayCount = 0
return (diff, .remain)
} else {
Expand Down Expand Up @@ -480,6 +478,7 @@ public extension KSOptions {
static var canStartPictureInPictureAutomaticallyFromInline = true
static var preferredFrame = true
static var displayCriteriaFormatDescriptionEnabled = false
static var useSystemHTTPProxy = true
/// 日志级别
static var logLevel = LogLevel.warning
static var logger: LogHandler = OSLog(lable: "KSPlayer")
Expand Down
3 changes: 3 additions & 0 deletions Sources/KSPlayer/AVPlayer/MediaPlayerProtocol.swift
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,9 @@ public extension CMFormatDescription {
}

func setHttpProxy() {
guard KSOptions.useSystemHTTPProxy else {
return
}
guard let proxySettings = CFNetworkCopySystemProxySettings()?.takeUnretainedValue() as? NSDictionary else {
unsetenv("http_proxy")
return
Expand Down
8 changes: 6 additions & 2 deletions Sources/KSPlayer/MEPlayer/AudioEnginePlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ public protocol AudioOutput: FrameOutput {
var isMuted: Bool { get set }
init()
func prepare(audioFormat: AVAudioFormat)
func play()
}

public protocol AudioDynamicsProcessor {
Expand Down Expand Up @@ -307,7 +306,12 @@ public class AudioEnginePlayer: AudioOutput {
if let currentRender {
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
if currentPreparePosition > 0 {
renderSource?.setAudio(time: currentRender.timebase.cmtime(for: currentPreparePosition), position: currentRender.position)
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
#if !os(macOS)
// AVSampleBufferAudioRenderer不需要处理outputLatency,其他音频输出的都要处理,没有蓝牙的话,outputLatency为0.015,有蓝牙耳机的话为0.176
time = time - CMTime(seconds: AVAudioSession.sharedInstance().outputLatency, preferredTimescale: time.timescale)
#endif
renderSource?.setAudio(time: time, position: currentRender.position)
}
}
}
Expand Down
6 changes: 5 additions & 1 deletion Sources/KSPlayer/MEPlayer/AudioGraphPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,11 @@ extension AudioGraphPlayer {
if let currentRender {
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
if currentPreparePosition > 0 {
renderSource?.setAudio(time: currentRender.timebase.cmtime(for: currentPreparePosition), position: currentRender.position)
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
#if !os(macOS)
time = time - CMTime(seconds: AVAudioSession.sharedInstance().outputLatency, preferredTimescale: time.timescale)
#endif
renderSource?.setAudio(time: time, position: currentRender.position)
}
}
}
Expand Down
17 changes: 14 additions & 3 deletions Sources/KSPlayer/MEPlayer/AudioRendererPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,12 @@ public class AudioRendererPlayer: AudioOutput {
// }
}

public func prepare(audioFormat _: AVAudioFormat) {}
public func prepare(audioFormat: AVAudioFormat) {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(audioFormat.channelCount))
KSLog("[audio] set preferredOutputNumberOfChannels: \(audioFormat.channelCount)")
#endif
}

public func play() {
let time: CMTime
Expand Down Expand Up @@ -85,7 +90,7 @@ public class AudioRendererPlayer: AudioOutput {
}
self.request()
}
periodicTimeObserver = synchronizer.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.02), queue: .main) { [weak self] time in
periodicTimeObserver = synchronizer.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.01), queue: .main) { [weak self] time in
guard let self else {
return
}
Expand Down Expand Up @@ -124,8 +129,14 @@ public class AudioRendererPlayer: AudioOutput {
render = AudioFrame(array: array)
}
if let sampleBuffer = render.toCMSampleBuffer() {
renderer.audioTimePitchAlgorithm = render.audioFormat.channelCount > 2 ? .spectral : .timeDomain
let channelCount = render.audioFormat.channelCount
renderer.audioTimePitchAlgorithm = channelCount > 2 ? .spectral : .timeDomain
renderer.enqueue(sampleBuffer)
#if !os(macOS)
if AVAudioSession.sharedInstance().preferredInputNumberOfChannels != channelCount {
try? AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(Int(channelCount))
}
#endif
}
}
}
Expand Down
6 changes: 5 additions & 1 deletion Sources/KSPlayer/MEPlayer/AudioUnitPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,11 @@ extension AudioUnitPlayer {
if let currentRender {
let currentPreparePosition = currentRender.timestamp + currentRender.duration * Int64(currentRenderReadOffset) / Int64(currentRender.numberOfSamples)
if currentPreparePosition > 0 {
renderSource?.setAudio(time: currentRender.timebase.cmtime(for: currentPreparePosition), position: currentRender.position)
var time = currentRender.timebase.cmtime(for: currentPreparePosition)
#if !os(macOS)
time = time - CMTime(seconds: AVAudioSession.sharedInstance().outputLatency, preferredTimescale: time.timescale)
#endif
renderSource?.setAudio(time: time, position: currentRender.position)
}
}
}
Expand Down
1 change: 1 addition & 0 deletions Sources/KSPlayer/MEPlayer/KSMEPlayer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,7 @@ extension KSMEPlayer: MediaPlayerProtocol {
}
self.options = options
playerItem.delegate = self
audioOutput.flush()
audioOutput.renderSource = playerItem
videoOutput?.renderSource = playerItem
videoOutput?.options = options
Expand Down
1 change: 0 additions & 1 deletion Sources/KSPlayer/MEPlayer/MetalPlayView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ public protocol VideoOutput: FrameOutput {
var pixelBuffer: PixelBufferProtocol? { get }
init(options: KSOptions)
func invalidate()
func play()
func readNextFrame()
}

Expand Down
2 changes: 1 addition & 1 deletion Sources/KSPlayer/MEPlayer/Model.swift
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ public protocol FrameOutput: AnyObject {
var renderSource: OutputRenderSourceDelegate? { get set }
func pause()
func flush()
func play()
}

protocol MEFrame: ObjectQueueItem {
Expand All @@ -81,7 +82,6 @@ public extension KSOptions {
static var enableSensor = true
static var stackSize = 65536
static var isClearVideoWhereReplace = true
/// true: AVSampleBufferAudioRenderer false: AVAudioEngine
static var audioPlayerType: AudioOutput.Type = AudioEnginePlayer.self
static var videoPlayerType: (VideoOutput & UIView).Type = MetalPlayView.self
static var yadifMode = 1
Expand Down
24 changes: 18 additions & 6 deletions Sources/KSPlayer/Subtitle/KSSubtitle.swift
Original file line number Diff line number Diff line change
Expand Up @@ -240,26 +240,38 @@ open class SubtitleModel: ObservableObject {
case .smaller:
#if os(tvOS) || os(xrOS)
return 48
#elseif os(macOS)
#elseif os(macOS) || os(xrOS)
return 20
#else
return 12
if UI_USER_INTERFACE_IDIOM() == .phone {
return 12
} else {
return 20
}
#endif
case .standard:
#if os(tvOS) || os(xrOS)
return 58
#elseif os(macOS)
#elseif os(macOS) || os(xrOS)
return 26
#else
return 16
if UI_USER_INTERFACE_IDIOM() == .phone {
return 16
} else {
return 26
}
#endif
case .large:
#if os(tvOS) || os(xrOS)
return 68
#elseif os(macOS)
#elseif os(macOS) || os(xrOS)
return 32
#else
return 20
if UI_USER_INTERFACE_IDIOM() == .phone {
return 20
} else {
return 32
}
#endif
}
}
Expand Down

0 comments on commit 5dd6262

Please sign in to comment.