I need to add visual effect
view with center activityindicator
with loading
label for all device fullscreen
using objective C
. I am using storyboard
below posted Image I cant add visual effect blur view on over all top side.
Asked
Active
Viewed 1,189 times
1

Sanju
- 129
- 1
- 10
-
You should do it in codes.And you'd better write a base class to handle it. – Lumialxk Jan 06 '16 at 05:14
-
This answer does something similar. It creates a blur effect and you may add the activity indicator to the view. To implement create a base class and call your code. https://stackoverflow.com/questions/32208830/modal-form-sheet-on-ipad-transparent-in-swift/32209870#32209870 – Gjchoza Jan 07 '16 at 18:45
2 Answers
2
The easiest way and clean way is just drag it in or over yours UIView. U need to set AutoLayout Constrain in Visual Effect View.
Selected are the Visual Effect View

vhong
- 594
- 4
- 27
0
You can try below code to add blur effects:
// Usage
[loginView blurBackgroundWithScreenshotView:backgroundImageView
withType:BlurSubView
withColor:BlurBlackView
withPoint:CGPointZero];
//
// UIImage+Blur.h
//
// Created by Ankit Thakur on 07/03/14.
//
#import <UIKit/UIKit.h>
@interface UIImage (Blur)
- (UIImage*) blurredSnapshot;
@end
//
// UIImage+Blur.m
//
// Created by Ankit Thakur on 07/03/14.
//
#import "UIImage+Blur.h"
#import <Accelerate/Accelerate.h>
@implementation UIImage (Blur)
- (UIImage *)applyBlurWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage
{
// Check pre-conditions.
if (self.size.width < 1 || self.size.height < 1) {
NSLog (@"*** error: invalid size: (%.2f x %.2f). Both dimensions must be >= 1: %@", self.size.width, self.size.height, self);
return nil;
}
if (!self.CGImage) {
NSLog (@"*** error: image must be backed by a CGImage: %@", self);
return nil;
}
if (maskImage && !maskImage.CGImage) {
NSLog (@"*** error: maskImage must be backed by a CGImage: %@", maskImage);
return nil;
}
CGRect imageRect = { CGPointZero, self.size };
UIImage *effectImage = self;
BOOL hasBlur = blurRadius > __FLT_EPSILON__;
BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__;
if (hasBlur || hasSaturationChange) {
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef effectInContext = UIGraphicsGetCurrentContext();
CGContextScaleCTM(effectInContext, 1.0, -1.0);
CGContextTranslateCTM(effectInContext, 0, -self.size.height);
CGContextDrawImage(effectInContext, imageRect, self.CGImage);
vImage_Buffer effectInBuffer;
effectInBuffer.data = CGBitmapContextGetData(effectInContext);
effectInBuffer.width = CGBitmapContextGetWidth(effectInContext);
effectInBuffer.height = CGBitmapContextGetHeight(effectInContext);
effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext);
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef effectOutContext = UIGraphicsGetCurrentContext();
vImage_Buffer effectOutBuffer;
effectOutBuffer.data = CGBitmapContextGetData(effectOutContext);
effectOutBuffer.width = CGBitmapContextGetWidth(effectOutContext);
effectOutBuffer.height = CGBitmapContextGetHeight(effectOutContext);
effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext);
if (hasBlur) {
// A description of how to compute the box kernel width from the Gaussian
// radius (aka standard deviation) appears in the SVG spec:
// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
//
// For larger values of 's' (s >= 2.0), an approximation can be used: Three
// successive box-blurs build a piece-wise quadratic convolution kernel, which
// approximates the Gaussian kernel to within roughly 3%.
//
// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
//
// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
//
CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale];
NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5);
if (radius % 2 != 1) {
radius += 1; // force radius to be odd so that the three box-blur methodology works.
}
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend);
vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend);
vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, radius, radius, 0, kvImageEdgeExtend);
}
BOOL effectImageBuffersAreSwapped = NO;
if (hasSaturationChange) {
CGFloat s = saturationDeltaFactor;
CGFloat floatingPointSaturationMatrix[] = {
0.0722 + 0.9278 * s, 0.0722 - 0.0722 * s, 0.0722 - 0.0722 * s, 0,
0.7152 - 0.7152 * s, 0.7152 + 0.2848 * s, 0.7152 - 0.7152 * s, 0,
0.2126 - 0.2126 * s, 0.2126 - 0.2126 * s, 0.2126 + 0.7873 * s, 0,
0, 0, 0, 1,
};
const int32_t divisor = 256;
NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]);
int16_t saturationMatrix[matrixSize];
for (NSUInteger i = 0; i < matrixSize; ++i) {
saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor);
}
if (hasBlur) {
vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
effectImageBuffersAreSwapped = YES;
}
else {
vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags);
}
}
if (!effectImageBuffersAreSwapped)
effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if (effectImageBuffersAreSwapped)
effectImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
}
// Set up output context.
UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]);
CGContextRef outputContext = UIGraphicsGetCurrentContext();
CGContextScaleCTM(outputContext, 1.0, -1.0);
CGContextTranslateCTM(outputContext, 0, -self.size.height);
// Draw base image.
CGContextDrawImage(outputContext, imageRect, self.CGImage);
// Draw effect image.
if (hasBlur) {
CGContextSaveGState(outputContext);
if (maskImage) {
CGContextClipToMask(outputContext, imageRect, maskImage.CGImage);
}
CGContextDrawImage(outputContext, imageRect, effectImage.CGImage);
CGContextRestoreGState(outputContext);
}
// Add in color tint.
if (tintColor) {
CGContextSaveGState(outputContext);
CGContextSetFillColorWithColor(outputContext, tintColor.CGColor);
CGContextFillRect(outputContext, imageRect);
CGContextRestoreGState(outputContext);
}
// Output image is ready.
UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return outputImage;
}
- (UIImage*) blurredSnapshot{
UIColor *tintColor = [UIColor colorWithWhite:0.8 alpha:0.7];
// UIColor *tintColor = [UIColor colorWithRed:255.0/255.0 green:255.0/255.0 blue:255.0/255.0 alpha:0.5];
UIImage *image = [self applyBlurWithRadius:5 tintColor:tintColor saturationDeltaFactor:1.8 maskImage:nil];
return image;
}
@end
//
// BlurView.h
//
// Created by Ankit Thakur on 07/03/14.
//
#import <UIKit/UIKit.h>
typedef NS_ENUM(NSUInteger, BlurViewType) {
BlurSubView,
BlurPopView
};
typedef NS_ENUM(NSUInteger, BlurViewColorType) {
BlurWhiteView,
BlurBlackView
};
@interface BlurView : UIView
- (void) blurBackgroundWithScreenshotView:(UIView*)screenshotView withType:(BlurViewType)type withColor:(BlurViewColorType)colorType withPoint:(CGPoint)point;
@end
//
// BlurView.m
//
// Created by Ankit Thakur on 07/03/14.
//
#import "BlurView.h"
#import "UIImage+Blur.h"
@interface BlurView (){
}
@end
@implementation BlurView
- (id)initWithCoder:(NSCoder *)aDecoder
{
self = [super initWithCoder:aDecoder];
if (self) {
// Initialization code
}
return self;
}
- (void) blurBackgroundWithScreenshotView:(UIView*)screenshotView withType:(BlurViewType)type withColor:(BlurViewColorType)colorType withPoint:(CGPoint)point{
NSLog(@"%ld",(long)screenshotView.tag);
if ([self viewWithTag:111]) {
[[self viewWithTag:111] removeFromSuperview];
}
@autoreleasepool {
UIGraphicsBeginImageContext(screenshotView.frame.size);
if([self respondsToSelector:@selector(drawViewHierarchyInRect:afterScreenUpdates:)]){
[screenshotView drawViewHierarchyInRect:self.frame afterScreenUpdates:NO];
}
else{
[screenshotView.layer renderInContext:UIGraphicsGetCurrentContext()];
}
UIGraphicsEndImageContext();
CGPoint origin = self.frame.origin;
switch (type) {
case BlurSubView:
origin.x = - origin.x;
origin.y = -origin.y;
break;
case BlurPopView:
origin.x = -point.x;
origin.y = -point.y;
break;
default:
break;
}
UIImage *blurImage = nil;
UIGraphicsBeginImageContext(self.bounds.size);
CGContextTranslateCTM(UIGraphicsGetCurrentContext(), origin.x, origin.y);
[screenshotView.layer renderInContext:UIGraphicsGetCurrentContext()];
blurImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *imageData = UIImageJPEGRepresentation(blurImage, 1.0);
blurImage = [UIImage imageWithData:imageData];
switch (colorType) {
case BlurWhiteView:
blurImage = [blurImage blurredWhiteSnapshot];
break;
case BlurBlackView:
blurImage = [blurImage blurredBlackSnapshot];
break;
default:
break;
}
UIImageView *imageView = [[UIImageView alloc] initWithFrame:self.bounds];
imageView.tag = 111;
imageView.image = blurImage;
[self addSubview:imageView];
[self sendSubviewToBack:imageView];
blurImage = nil;
}
}
@end

Ankit Thakur
- 4,739
- 1
- 19
- 35