1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
| /*!
* 撮影ボタン(作成する)
*/
- (IBAction)shutterButtonTapped:(id)sender {
// UIImagePickerで動画の撮影をする
UIImagePickerController *picker = [[UIImagePickerController alloc]init];
picker.sourceType = UIImagePickerControllerSourceTypeCamera;
picker.mediaTypes = @[(NSString *)kUTTypeMovie];
picker.cameraCaptureMode = UIImagePickerControllerCameraCaptureModeVideo;
picker.videoQuality = UIImagePickerControllerQualityType640x480;
picker.videoMaximumDuration = 10;
picker.allowsEditing = NO;
picker.delegate = self;
[self presentViewController:picker animated:YES completion:nil];
}
/*!
* 撮影終了のタイミング
*/
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
// UIImagePickerをしまう
[picker dismissViewControllerAnimated:YES completion:nil];
// ~/Documentsディレクトリの取得
NSArray *directories = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentDirectory = directories[0];
// 撮影した動画をAssetに格納
NSURL *assetURL = info[UIImagePickerControllerMediaURL];
AVURLAsset *asset = [[AVURLAsset alloc]initWithURL:assetURL options:nil];
// 動画を切り出す間隔を配列に格納する
Float64 frameParSecond = 0.033f;
Float64 durationSeconds = CMTimeGetSeconds([asset duration]); // 動画の長さ
Float64 frameTimeStamp = 0;
NSMutableArray *times = [NSMutableArray array];
while (frameTimeStamp <= durationSeconds) {
[times addObject:[NSValue valueWithCMTime:CMTimeMakeWithSeconds(frameTimeStamp, 600)]];
CMTime nextTime = CMTimeMakeWithSeconds(frameTimeStamp, 600);
frameTimeStamp = CMTimeGetSeconds(nextTime) + frameParSecond;
}
// 画像ジェネレーターを生成
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]initWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.requestedTimeToleranceAfter = kCMTimeZero;
// 一連の画像を生成(timesに格納されている分だけ、第2引数がコールバックされる)
[generator generateCGImagesAsynchronouslyForTimes:times
completionHandler:^(CMTime requestedTime, CGImageRef image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error) {
NSString *requestedTimeString = (NSString*)CFBridgingRelease(CMTimeCopyDescription(NULL, requestedTime));
NSString *acturalTimeString = (NSString*)CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
NSLog(@"Requested: %@; actual: %@", requestedTimeString, acturalTimeString);
if (result == AVAssetImageGeneratorSucceeded) {
// JPEG画像を生成し、保存する
[self writeCGImageTo:documentDirectory image:image];
// iCnt++;
// 完了した判断はtimesの個数と保存した個数でする?
// [times count] == iCntだったら、SVProgressHub停止みたいな
}
if (result == AVAssetImageGeneratorFailed) {
NSLog(@"Failed with error: %@", [error localizedDescription]);
}
if (result == AVAssetImageGeneratorCancelled) {
NSLog(@"Cancelled");
}
}];
}
/*!
* 動画から切り出されたCGImageRef形式のデータをJPEG画像(ファイル名は連番:imageNameNumber)に生成し、指定ディレクトリに書き出す
*/
- (BOOL)writeCGImageTo:(NSString*)path image:(CGImageRef)cgImage
{
NSLog(@"Called");
NSString *str = [NSString stringWithFormat:@"img_%@.jpg", @(self.imageNameNumber++)];
path = [path stringByAppendingPathComponent:str];
UIImage *saveImage = [UIImage imageWithCGImage:cgImage];
NSData *data = UIImageJPEGRepresentation(saveImage, 1.0); // 0.0(低水準)〜1.0(高水準)
BOOL result = [data writeToFile:path atomically:YES];
return result;
}
|