iOS实现毛玻璃效果,图片模糊效果的三种方法详解手机开发

App设计时往往会用到一些模糊效果或者毛玻璃效果,iOS目前已提供一些模糊API可以让我们方便是使用。

第一种使用Core Image进行模糊

- (UIImage *)blurryImage:(UIImage *)image    
           withBlurLevel:(CGFloat)blur {   
    CIImage *inputImage = [CIImage imageWithCGImage:image.CGImage];   
    CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"   
                         keysAndValues:kCIInputImageKey, inputImage,   
                                       @"inputRadius", @(blur),    
                                       nil];   
        
    CIImage *outputImage = filter.outputImage;   
        
    CGImageRef outImage = [self.context createCGImage:outputImage    
                                   fromRect:[outputImage extent]];   
    return [UIImage imageWithCGImage:outImage];   
}

第二种使用vImage API进行模糊

- (UIImage *)blurryImage:(UIImage *)image withBlurLevel:(CGFloat)blur {   
    if (blur < 0.f || blur > 1.f) {   
        blur = 0.5f;   
    }   
    int boxSize = (int)(blur * 100);   
    boxSize = boxSize - (boxSize % 2) + 1;   
        
    CGImageRef img = image.CGImage;   
        
    vImage_Buffer inBuffer, outBuffer;   
    vImage_Error error;   
        
    void *pixelBuffer;   
        
    CGDataProviderRef inProvider = CGImageGetDataProvider(img);   
    CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);   
        
    inBuffer.width = CGImageGetWidth(img);   
    inBuffer.height = CGImageGetHeight(img);   
    inBuffer.rowBytes = CGImageGetBytesPerRow(img);   
        
    inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);   
        
    pixelBuffer = malloc(CGImageGetBytesPerRow(img) *    
                         CGImageGetHeight(img));   
        
    if(pixelBuffer == NULL)   
        NSLog(@"No pixelbuffer");   
        
    outBuffer.data = pixelBuffer;   
    outBuffer.width = CGImageGetWidth(img);   
    outBuffer.height = CGImageGetHeight(img);   
    outBuffer.rowBytes = CGImageGetBytesPerRow(img);   
        
    error = vImageBoxConvolve_ARGB8888(&inBuffer,    
                                       &outBuffer,    
                                       NULL,    
                                       0,    
                                       0,    
                                       boxSize,    
                                       boxSize,    
                                       NULL,    
                                       kvImageEdgeExtend);   
        
        
    if (error) {   
        NSLog(@"error from convolution %ld", error);   
    }   
        
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();   
    CGContextRef ctx = CGBitmapContextCreate(   
                                    outBuffer.data,   
                                    outBuffer.width,   
                                    outBuffer.height,   
                                    8,   
                                    outBuffer.rowBytes,   
                                    colorSpace,   
                                    kCGImageAlphaNoneSkipLast);   
    CGImageRef imageRef = CGBitmapContextCreateImage (ctx);   
    UIImage *returnImage = [UIImage imageWithCGImage:imageRef];   
        
    //clean up   
    CGContextRelease(ctx);   
    CGColorSpaceRelease(colorSpace);   
        
    free(pixelBuffer);   
    CFRelease(inBitmapData);   
        
    CGColorSpaceRelease(colorSpace);   
    CGImageRelease(imageRef);   
        
    return returnImage;   
}  

第三种方法是网上找到的(毛玻璃效果)

// 内部方法,核心代码,封装了毛玻璃效果 参数:半径,颜色,色彩饱和度 
- (UIImage *)imageBluredWithRadius:(CGFloat)blurRadius tintColor:(UIColor *)tintColor saturationDeltaFactor:(CGFloat)saturationDeltaFactor maskImage:(UIImage *)maskImage { 
    CGRect imageRect = { CGPointZero, self.size }; 
    UIImage *effectImage = self; 
    BOOL hasBlur = blurRadius > __FLT_EPSILON__; 
    BOOL hasSaturationChange = fabs(saturationDeltaFactor - 1.) > __FLT_EPSILON__; 
    if (hasBlur || hasSaturationChange) { 
        UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); 
        CGContextRef effectInContext = UIGraphicsGetCurrentContext(); 
        CGContextScaleCTM(effectInContext, 1.0, -1.0); 
        CGContextTranslateCTM(effectInContext, 0, -self.size.height); 
        CGContextDrawImage(effectInContext, imageRect, self.CGImage); 
         
        vImage_Buffer effectInBuffer; 
        effectInBuffer.data     = CGBitmapContextGetData(effectInContext); 
        effectInBuffer.width    = CGBitmapContextGetWidth(effectInContext); 
        effectInBuffer.height   = CGBitmapContextGetHeight(effectInContext); 
        effectInBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectInContext); 
         
        UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); 
        CGContextRef effectOutContext = UIGraphicsGetCurrentContext(); 
        vImage_Buffer effectOutBuffer; 
        effectOutBuffer.data     = CGBitmapContextGetData(effectOutContext); 
        effectOutBuffer.width    = CGBitmapContextGetWidth(effectOutContext); 
        effectOutBuffer.height   = CGBitmapContextGetHeight(effectOutContext); 
        effectOutBuffer.rowBytes = CGBitmapContextGetBytesPerRow(effectOutContext); 
         
        if (hasBlur) { 
            CGFloat inputRadius = blurRadius * [[UIScreen mainScreen] scale]; 
            NSUInteger radius = floor(inputRadius * 3. * sqrt(2 * M_PI) / 4 + 0.5); 
            if (radius % 2 != 1) { 
                radius += 1; // force radius to be odd so that the three box-blur methodology works. 
            } 
            vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend); 
            vImageBoxConvolve_ARGB8888(&effectOutBuffer, &effectInBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend); 
            vImageBoxConvolve_ARGB8888(&effectInBuffer, &effectOutBuffer, NULL, 0, 0, (short)radius, (short)radius, 0, kvImageEdgeExtend); 
        } 
        BOOL effectImageBuffersAreSwapped = NO; 
        if (hasSaturationChange) { 
            CGFloat s = saturationDeltaFactor; 
            CGFloat floatingPointSaturationMatrix[] = { 
                0.0722 + 0.9278 * s,  0.0722 - 0.0722 * s,  0.0722 - 0.0722 * s,  0, 
                0.7152 - 0.7152 * s,  0.7152 + 0.2848 * s,  0.7152 - 0.7152 * s,  0, 
                0.2126 - 0.2126 * s,  0.2126 - 0.2126 * s,  0.2126 + 0.7873 * s,  0, 
                0,                    0,                    0,  1, 
            }; 
            const int32_t divisor = 256; 
            NSUInteger matrixSize = sizeof(floatingPointSaturationMatrix)/sizeof(floatingPointSaturationMatrix[0]); 
            int16_t saturationMatrix[matrixSize]; 
            for (NSUInteger i = 0; i < matrixSize; ++i) { 
                saturationMatrix[i] = (int16_t)roundf(floatingPointSaturationMatrix[i] * divisor); 
            } 
            if (hasBlur) { 
                vImageMatrixMultiply_ARGB8888(&effectOutBuffer, &effectInBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); 
                effectImageBuffersAreSwapped = YES; 
            } 
            else { 
                vImageMatrixMultiply_ARGB8888(&effectInBuffer, &effectOutBuffer, saturationMatrix, divisor, NULL, NULL, kvImageNoFlags); 
            } 
        } 
        if (!effectImageBuffersAreSwapped) 
            effectImage = UIGraphicsGetImageFromCurrentImageContext(); 
        UIGraphicsEndImageContext(); 
         
        if (effectImageBuffersAreSwapped) 
            effectImage = UIGraphicsGetImageFromCurrentImageContext(); 
        UIGraphicsEndImageContext(); 
    } 
     
    // 开启上下文 用于输出图像 
    UIGraphicsBeginImageContextWithOptions(self.size, NO, [[UIScreen mainScreen] scale]); 
    CGContextRef outputContext = UIGraphicsGetCurrentContext(); 
    CGContextScaleCTM(outputContext, 1.0, -1.0); 
    CGContextTranslateCTM(outputContext, 0, -self.size.height); 
     
    // 开始画底图 
    CGContextDrawImage(outputContext, imageRect, self.CGImage); 
     
    // 开始画模糊效果 
    if (hasBlur) { 
        CGContextSaveGState(outputContext); 
        if (maskImage) { 
            CGContextClipToMask(outputContext, imageRect, maskImage.CGImage); 
        } 
        CGContextDrawImage(outputContext, imageRect, effectImage.CGImage); 
        CGContextRestoreGState(outputContext); 
    } 
     
    // 添加颜色渲染 
    if (tintColor) { 
        CGContextSaveGState(outputContext); 
        CGContextSetFillColorWithColor(outputContext, tintColor.CGColor); 
        CGContextFillRect(outputContext, imageRect); 
        CGContextRestoreGState(outputContext); 
    } 
     
    // 输出成品,并关闭上下文 
    UIImage *outputImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
     
    return outputImage; 
}

原创文章,作者:奋斗,如若转载,请注明出处:https://blog.ytso.com/5173.html

(0)
上一篇 2021年7月16日
下一篇 2021年7月16日

相关推荐

发表回复

登录后才能评论