Skocz do zawartości

Witaj!

Zaloguj lub Zarejestruj się aby uzyskać pełny dostęp do forum.

Zdjęcie
- - - - -

UIVideoEditorController przycinanie wideo


  • Zaloguj się, aby dodać odpowiedź
Brak odpowiedzi do tego tematu

#1 macQUB

macQUB
  • 334 postów
  • SkądPodkarpacie

Napisano 03 grudnia 2014 - 12:53

Mam problem z tą kontrolką a mianowicie po przycięciu tracę jakość video. Ustawiam wartość na UIImagePickerControllerQualityTypeIFrame1280x720 ale i tak w wynikowe wideo jest rozmiaru 360 × 640 czyli o połowe mniejszy.

Mój kod:

przycinanie

- (void)viewDidLoad {
  [super viewDidLoad];
  if ([UIVideoEditorController canEditVideoAtPath:self.videoPath]) {

      self.editor = [UIVideoEditorController new];
      self.editor.videoPath = self.videoPath;
      self.editor.videoMaximumDuration = 180.0;
      self.editor.videoQuality =
          UIImagePickerControllerQualityTypeIFrame1280x720;
      self.editor.delegate = self;

      [self.navigationController presentViewController:self.editor
                                              animated:NO
                                            completion:nil];

}

- (void)videoEditorController:(UIVideoEditorController *)editor
     didSaveEditedVideoToPath:(NSString *)editedVideoPath {

  [self removeImage:editor.videoPath];

  [editor dismissViewControllerAnimated:YES
                             completion:^{

                             }];
#if DEBUG
  AVAssetTrack *videoTrack = nil;
  AVURLAsset *asset =
      [AVAsset assetWithURL:[NSURL fileURLWithPath:editedVideoPath]];
  NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];

  CMFormatDescriptionRef formatDescription = NULL;
  NSArray *formatDescriptions = [videoTrack formatDescriptions];
  if ([formatDescriptions count] > 0)
    formatDescription =
        (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex : 0];

  if ([videoTracks count] > 0)
    videoTrack = [videoTracks objectAtIndex:0];

  CGSize trackDimensions = {
      .width = 0.0, .height = 0.0,
  };
  trackDimensions = [videoTrack naturalSize];

  int width = trackDimensions.width;
  int height = trackDimensions.height;
  DLog(@"Resolution = %d X %d", width, height);
   
#endif
   
  if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(editedVideoPath)) {

    [self convertVideo:[NSURL fileURLWithPath:editedVideoPath]];

  } else {
    DLog(@"cannot crop video on path %@", editedVideoPath);
  }
} 

nagrywanie:

- (void)setupCamera {
  //---------------------------------
  //----- SETUP CAPTURE SESSION -----
  //---------------------------------
  DLog(@"Setting up capture session");
  captureSession = [[AVCaptureSession alloc] init];
 
  //----- ADD INPUTS -----
  DLog(@"Adding video input");
 
  // ADD VIDEO INPUT
  AVCaptureDevice *VideoDevice =
      [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  if (VideoDevice) {
    NSError *error;
    videoInputDevice =
        [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
    if (!error) {
      if ([captureSession canAddInput:videoInputDevice])
        [captureSession addInput:videoInputDevice];
      else
        DLog(@"Couldn't add video input");
    } else {
      DLog(@"Couldn't create video input");
    }
  } else {
    DLog(@"Couldn't create video capture device");
  }
 
  //----- ADD OUTPUTS -----
 
  // ADD VIDEO PREVIEW LAYER
  DLog(@"Adding video preview layer");
  [self setPreviewLayer:[[AVCaptureVideoPreviewLayer alloc]
                            initWithSession:captureSession]];
 
  //    _PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
  //    //<<SET ORIENTATION.  You can deliberatly set this wrong to flip the
  //    image and may actually need to set it wrong to get the right image
 
  [[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
 
  // ADD MOVIE FILE OUTPUT
  DLog(@"Adding movie file output");
  movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
   
  Float64 TotalSeconds = 180;       // Total seconds
  int32_t preferredTimeScale = 100; // Frames per second
 
  CMTime maxDuration = CMTimeMakeWithSeconds(
      TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
  movieFileOutput.maxRecordedDuration = maxDuration;
 
  movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN
  // BYTES FOR RECORDING TO
  // CONTINUE ON A VOLUME
  if ([captureSession canAddOutput:movieFileOutput])
    [captureSession addOutput:movieFileOutput];
 
  // SET THE CONNECTION PROPERTIES (output properties)
  [self CameraSetOutputProperties]; //(We call a method as it also has to be
                                    // done after changing camera)
                                    //  AVCaptureConnection *captureConnection =
  //      [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
  //
  //    if (captureConnection.supportsVideoMinFrameDuration)
  //        captureConnection.act = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
  //    if (captureConnection.supportsVideoMaxFrameDuration)
  //        captureConnection.videoMaxFrameDuration = CMTimeMake(1,
  //        CAPTURE_FRAMES_PER_SECOND);
 
  //----- SET THE IMAGE QUALITY / RESOLUTION -----
  // Options:
  //    AVCaptureSessionPresetHigh - Highest recording quality (varies per
  // device)
  //    AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values
  // may change)
  //    AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may
  // change)
  //    AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before
  // setting it)
  //    AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported
  // before setting it)
  //    AVCaptureSessionPresetPhoto - Full photo resolution (not supported for
  // video output)
  DLog(@"Setting image quality");
 
  if ([captureSession
          canSetSessionPreset:AVCaptureSessionPresetiFrame1280x720]) // Check
                                                                     // size
    // based configs
    // are supported
    // before setting
    // them
    [captureSession setSessionPreset:AVCaptureSessionPresetiFrame1280x720];
 
  //----- DISPLAY THE PREVIEW LAYER -----
  // Display it full screen under out view controller existing controls
  DLog(@"Display the preview layer");
  CGRect layerRect = self.containerFrame;
  [self.previewLayer setBounds:layerRect];
  [self.previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
                                             CGRectGetMidY(layerRect))];
  //[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
  // We use this instead so it goes on a layer behind our UI controls (avoids us
  // having to manually bring each control to the front):
  UIView *CameraView = [[UIView alloc] init];
  [self.containerView addSubview:CameraView];
  //  [CameraView bringSubviewToFront:self.containerView];
  [CameraView sendSubviewToBack:self.containerView];
  [[CameraView layer] addSublayer:self.previewLayer];
 
  //----- START THE CAPTURE SESSION RUNNING -----
  [captureSession startRunning];
  WeAreRecording = NO;
}
 
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void)CameraSetOutputProperties {
  // SET THE CONNECTION PROPERTIES (output properties)
 
  AVCaptureDevice *device =
      [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] firstObject];
  if ([device isTorchModeSupported:AVCaptureTorchModeOn]) {
    [device lockForConfiguration:nil];
    // Set frame rate (if requried)
    CMTimeShow(device.activeVideoMinFrameDuration);
    CMTimeShow(device.activeVideoMaxFrameDuration);
 
    device.activeVideoMinFrameDuration =
        CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
 
    device.activeVideoMaxFrameDuration =
        CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
 
    CMTimeShow(device.activeVideoMinFrameDuration);
    CMTimeShow(device.activeVideoMaxFrameDuration);
    [device unlockForConfiguration];
  }
}
 
//********** GET CAMERA IN SPECIFIED POSITION IF IT EXISTS **********
- (AVCaptureDevice *)CameraWithPosition:(AVCaptureDevicePosition)Position {
  NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  for (AVCaptureDevice *Device in Devices) {
    if ([Device position] == Position) {
      return Device;
    }
  }
  return nil;
}
 
- (void)StartStopButtonPressed {
 
  if (!WeAreRecording) {
    //----- START RECORDING -----
    DLog(@"START RECORDING");
    WeAreRecording = YES;
 
    // Create temporary URL to record to
    NSString *outputPath = [[NSString alloc]
        initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
    NSFileManager *fileManager = [NSFileManager defaultManager];
    if ([fileManager fileExistsAtPath:outputPath]) {
      NSError *error;
      if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
        // Error - handle if requried
      }
    }
    // Start recording
    [movieFileOutput startRecordingToOutputFileURL:outputURL
                                 recordingDelegate:self];
  } else {
    //----- STOP RECORDING -----
    DLog(@"STOP RECORDING");
    WeAreRecording = NO;
 
    [movieFileOutput stopRecording];
  }
}

iPhone 5 i system ios 7


Użytkownik macQUB edytował ten post 03 grudnia 2014 - 12:53





Użytkownicy przeglądający ten temat: 0

0 użytkowników, 0 gości, 0 anonimowych