我要求从文档目录加载所有视频,然后合并成单个视频。然后将视频上传到服务器。但是这段代码只合并了两个视频。我如何合并多个视频。
func mergeVideo(index:Int){
print("index:\(index)")
let item:AVPlayerItem!
Utility.showActivityIndicator()
var url = String()
var url2 = String()
url = fileManager.appending(videoFileNames[index] + ".MOV")
url2 = fileManager.appending(videoFileNames[index+1] + ".MOV")
let avAsset = AVAsset(url:NSURL(fileURLWithPath: url) as URL)
let avAsset2 = AVAsset(url:NSURL(fileURLWithPath: url2) as URL)
firstAsset = avAsset
secondAsset = avAsset2
if let firstAsset = firstAsset, let secondAsset = secondAsset {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
// 2 - Create two video tracks
let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
print("Failed to load first track")
}
let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), of: secondAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: firstAsset.duration)
} catch _ {
print("Failed to load second track")
}
// 2.1
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
// 2.2
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
firstInstruction.setOpacity(0.0, at: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
// 2.3
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
item = AVPlayerItem(asset: mixComposition)
item.videoComposition = mainComposition
// 3 -Code for Audio track
/*
if let loadedAudioAsset = audioAsset {
let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
at: kCMTimeZero)
} catch _ {
print("Failed to load Audio track")
}
}*/
// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
MergeURL = URL(fileURLWithPath: savePath)
// 5 - Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = MergeURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
// 6 - Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async { _ in
self.exportDidFinish(exporter)
}
}
}
}
func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
at: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
if assetInfo.orientation == .down {
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(M_PI))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: kCMTimeZero)
}
return instruction
}
最佳答案
func mergeVideo(_ mAssetsList: [AVAsset]) {
let mainComposition = AVMutableVideoComposition()
var startDuration:CMTime = kCMTimeZero
let mainInstruction = AVMutableVideoCompositionInstruction()
let mixComposition = AVMutableComposition()
var allVideoInstruction = [AVMutableVideoCompositionLayerInstruction]()
var assets = mAssetsList
var strCaption = EMPTY_STRING
for i in 0 ..< assets.count {
let currentAsset:AVAsset = assets[i] //Current Asset.
let currentTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try currentTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,
currentAsset.duration), of: currentAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: startDuration)
//Creates Instruction for current video asset.
let currentInstruction:AVMutableVideoCompositionLayerInstruction = videoCompositionInstructionForTrack(currentTrack, asset: currentAsset)
currentInstruction.setOpacityRamp(fromStartOpacity: 0.0,
toEndOpacity: 1.0,
timeRange:CMTimeRangeMake(
startDuration,
CMTimeMake(1, 1)))
if i != assets.count - 1 {
//Sets Fade out effect at the end of the video.
currentInstruction.setOpacityRamp(fromStartOpacity: 1.0,
toEndOpacity: 0.0,
timeRange:CMTimeRangeMake(
CMTimeSubtract(
CMTimeAdd(currentAsset.duration, startDuration),
CMTimeMake(1, 1)),
CMTimeMake(2, 1)))
}
let transform:CGAffineTransform = currentTrack.preferredTransform
if orientationFromTransform(transform).isPortrait {
let outputSize:CGSize = CGSize(width: VIDEO_WIDTH, height: VIDEO_HEIGHT)
let horizontalRatio = CGFloat(outputSize.width) / currentTrack.naturalSize.width
let verticalRatio = CGFloat(outputSize.height) / currentTrack.naturalSize.height
let scaleToFitRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let FirstAssetScaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
if currentAsset.g_orientation == .landscapeLeft {
let rotation = CGAffineTransform(rotationAngle: .pi)
let translateToCenter = CGAffineTransform(translationX: VIDEO_WIDTH, y: VIDEO_HEIGHT)
let mixedTransform = rotation.concatenating(translateToCenter)
currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor).concatenating(mixedTransform), at: kCMTimeZero)
} else {
currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
}
}
allVideoInstruction.append(currentInstruction) //Add video instruction in Instructions Array.
startDuration = CMTimeAdd(startDuration, currentAsset.duration)
} catch _ {
print(ERROR_LOADING_VIDEO)
}
}
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, startDuration)
mainInstruction.layerInstructions = allVideoInstruction
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: 640, height: 480)
// Create path to store merged video.
let savePath = (getDocumentsDirectory() as NSString).appendingPathComponent("\(MERGED_VIDEO).mp4")
let url = URL(fileURLWithPath: savePath)
deleteFileAtPath(savePath)
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVEXPORT_PRESET_NAME) else { return }
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = false
exporter.videoComposition = mainComposition
// Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async { _ in
self.exportDidFinish(exporter)
}
}
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
print(session.outputURL)
}
}
func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
return instruction
}
extension AVAsset {
var g_size: CGSize {
return tracks(withMediaType: AVMediaTypeVideo).first?.naturalSize ?? .zero
}
var g_orientation: UIInterfaceOrientation {
guard let transform = tracks(withMediaType: AVMediaTypeVideo).first?.preferredTransform else {
return .portrait
}
switch (transform.tx, transform.ty) {
case (0, 0):
return .landscapeRight
case (g_size.width, g_size.height):
return .landscapeLeft
case (0, g_size.width):
return .portraitUpsideDown
default:
return .portrait
}
}
}
从文档目录中的所有视频创建 AVAssets 数组,并将其传递给 mergeVideo 函数以将它们全部合并。
关于ios - 合并目录中的多个视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/41973286/
总的来说,我对ruby还比较陌生,我正在为我正在创建的对象编写一些rspec测试用例。许多测试用例都非常基础,我只是想确保正确填充和返回值。我想知道是否有办法使用循环结构来执行此操作。不必为我要测试的每个方法都设置一个assertEquals。例如:describeitem,"TestingtheItem"doit"willhaveanullvaluetostart"doitem=Item.new#HereIcoulddotheitem.name.shouldbe_nil#thenIcoulddoitem.category.shouldbe_nilendend但我想要一些方法来使用
我试图在一个项目中使用rake,如果我把所有东西都放到Rakefile中,它会很大并且很难读取/找到东西,所以我试着将每个命名空间放在lib/rake中它自己的文件中,我添加了这个到我的rake文件的顶部:Dir['#{File.dirname(__FILE__)}/lib/rake/*.rake'].map{|f|requiref}它加载文件没问题,但没有任务。我现在只有一个.rake文件作为测试,名为“servers.rake”,它看起来像这样:namespace:serverdotask:testdoputs"test"endend所以当我运行rakeserver:testid时
作为我的Rails应用程序的一部分,我编写了一个小导入程序,它从我们的LDAP系统中吸取数据并将其塞入一个用户表中。不幸的是,与LDAP相关的代码在遍历我们的32K用户时泄漏了大量内存,我一直无法弄清楚如何解决这个问题。这个问题似乎在某种程度上与LDAP库有关,因为当我删除对LDAP内容的调用时,内存使用情况会很好地稳定下来。此外,不断增加的对象是Net::BER::BerIdentifiedString和Net::BER::BerIdentifiedArray,它们都是LDAP库的一部分。当我运行导入时,内存使用量最终达到超过1GB的峰值。如果问题存在,我需要找到一些方法来更正我的代
Rails2.3可以选择随时使用RouteSet#add_configuration_file添加更多路由。是否可以在Rails3项目中做同样的事情? 最佳答案 在config/application.rb中:config.paths.config.routes在Rails3.2(也可能是Rails3.1)中,使用:config.paths["config/routes"] 关于ruby-on-rails-Rails3中的多个路由文件,我们在StackOverflow上找到一个类似的问题
我有多个ActiveRecord子类Item的实例数组,我需要根据最早的事件循环打印。在这种情况下,我需要打印付款和维护日期,如下所示:ItemAmaintenancerequiredin5daysItemBpaymentrequiredin6daysItemApaymentrequiredin7daysItemBmaintenancerequiredin8days我目前有两个查询,用于查找maintenance和payment项目(非排他性查询),并输出如下内容:paymentrequiredin...maintenancerequiredin...有什么方法可以改善上述(丑陋的)代
我需要从一个View访问多个模型。以前,我的links_controller仅用于提供以不同方式排序的链接资源。现在我想包括一个部分(我假设)显示按分数排序的顶级用户(@users=User.all.sort_by(&:score))我知道我可以将此代码插入每个链接操作并从View访问它,但这似乎不是“ruby方式”,我将需要在不久的将来访问更多模型。这可能会变得很脏,是否有针对这种情况的任何技术?注意事项:我认为我的应用程序正朝着单一格式和动态页面内容的方向发展,本质上是一个典型的网络应用程序。我知道before_filter但考虑到我希望应用程序进入的方向,这似乎很麻烦。最终从任何
我在我的项目中添加了一个系统来重置用户密码并通过电子邮件将密码发送给他,以防他忘记密码。昨天它运行良好(当我实现它时)。当我今天尝试启动服务器时,出现以下错误。=>BootingWEBrick=>Rails3.2.1applicationstartingindevelopmentonhttp://0.0.0.0:3000=>Callwith-dtodetach=>Ctrl-CtoshutdownserverExiting/Users/vinayshenoy/.rvm/gems/ruby-1.9.3-p0/gems/actionmailer-3.2.1/lib/action_mailer
刚入门rails,开始慢慢理解。有人可以解释或给我一些关于在application_controller中编码的好处或时间和原因的想法吗?有哪些用例。您如何为Rails应用程序使用应用程序Controller?我不想在那里放太多代码,因为据我了解,每个请求都会调用此Controller。这是真的? 最佳答案 ApplicationController实际上是您应用程序中的每个其他Controller都将从中继承的类(尽管这不是强制性的)。我同意不要用太多代码弄乱它并保持干净整洁的态度,尽管在某些情况下ApplicationContr
我想向我的Controller传递一个参数,它是一个简单的复选框,但我不知道如何在模型的form_for中引入它,这是我的观点:{:id=>'go_finance'}do|f|%>Transferirde:para:Entrada:"input",:placeholder=>"Quantofoiganho?"%>Saída:"output",:placeholder=>"Quantofoigasto?"%>Nota:我想做一个额外的复选框,但我该怎么做,模型中没有一个对象,而是一个要检查的对象,以便在Controller中创建一个ifelse,如果没有检查,请帮助我,非常感谢,谢谢
我注意到像bundler这样的项目在每个specfile中执行requirespec_helper我还注意到rspec使用选项--require,它允许您在引导rspec时要求一个文件。您还可以将其添加到.rspec文件中,因此只要您运行不带参数的rspec就会添加它。使用上述方法有什么缺点可以解释为什么像bundler这样的项目选择在每个规范文件中都需要spec_helper吗? 最佳答案 我不在Bundler上工作,所以我不能直接谈论他们的做法。并非所有项目都checkin.rspec文件。原因是这个文件,通常按照当前的惯例,只