网上查到的方案
typedef NS_ENUM(NSUInteger,webviewLoadingStatus) {
WebViewNormalStatus = 0, //正常
WebViewErrorStatus, //白屏
WebViewPendStatus, //待决
};
// 判断是否白屏
- (void)judgeLoadingStatus:(WKWebview *)webview withBlock:(void (^)(webviewLoadingStatus status))completionBlock{
webviewLoadingStatus __block status = WebViewPendStatus;
if (@available(iOS 11.0, *)) {
if (webview && [webview isKindOfClass:[WKWebView class]]) {
CGFloat statusBarHeight = [[UIApplication sharedApplication] statusBarFrame].size.height; //状态栏高度
CGFloat navigationHeight = webview.viewController.navigationController.navigationBar.frame.size.height; //导航栏高度
WKSnapshotConfiguration *shotConfiguration = [[WKSnapshotConfiguration alloc] init];
shotConfiguration.rect = CGRectMake(0, statusBarHeight + navigationHeight, webview.bounds.size.width, (webview.bounds.size.height - navigationHeight - statusBarHeight)); //仅截图检测导航栏以下部分内容
[webview takeSnapshotWithConfiguration:shotConfiguration completionHandler:^(UIImage * _Nullable snapshotImage, NSError * _Nullable error) {
if (snapshotImage) {
CGImageRef imageRef = snapshotImage.CGImage;
UIImage * scaleImage = [self scaleImage:snapshotImage];
BOOL isWhiteScreen = [self searchEveryPixel:scaleImage];
if (isWhiteScreen) {
status = WebViewErrorStatus;
}else{
status = WebViewNormalStatus;
}
}
if (completionBlock) {
completionBlock(status);
}
}];
}
}
}
// 遍历像素点 白色像素占比大于95%认定为白屏
- (BOOL)searchEveryPixel:(UIImage *)image {
CGImageRef cgImage = [image CGImage];
size_t width = CGImageGetWidth(cgImage);
size_t height = CGImageGetHeight(cgImage);
size_t bytesPerRow = CGImageGetBytesPerRow(cgImage); //每个像素点包含r g b a 四个字节
size_t bitsPerPixel = CGImageGetBitsPerPixel(cgImage);
CGDataProviderRef dataProvider = CGImageGetDataProvider(cgImage);
CFDataRef data = CGDataProviderCopyData(dataProvider);
UInt8 * buffer = (UInt8*)CFDataGetBytePtr(data);
int whiteCount = 0;
int totalCount = 0;
for (int j = 0; j < height; j ++ ) {
for (int i = 0; i < width; i ++) {
UInt8 * pt = buffer + j * bytesPerRow + i * (bitsPerPixel / 8);
UInt8 red = * pt;
UInt8 green = *(pt + 1);
UInt8 blue = *(pt + 2);
// UInt8 alpha = *(pt + 3);
totalCount ++;
if (red == 255 && green == 255 && blue == 255) {
whiteCount ++;
}
}
}
float proportion = (float)whiteCount / totalCount ;
NSLog(@"当前像素点数:%d,白色像素点数:%d , 占比: %f",totalCount , whiteCount , proportion );
if (proportion > 0.95) {
return YES;
}else{
return NO;
}
}
//缩放图片
- (UIImage *)scaleImage: (UIImage *)image {
CGFloat scale = 0.2;
CGSize newsize;
newsize.width = floor(image.size.width * scale);
newsize.height = floor(image.size.height * scale);
if (@available(iOS 10.0, *)) {
UIGraphicsImageRenderer * renderer = [[UIGraphicsImageRenderer alloc] initWithSize:newsize];
return [renderer imageWithActions:^(UIGraphicsImageRendererContext * _Nonnull rendererContext) {
[image drawInRect:CGRectMake(0, 0, newsize.width, newsize.height)];
}];
}else{
return image;
}
}
// 链接:
以上方案存在的问题
1、对屏幕进行截屏,不适用于半透明背景的网页
2、CFDataGetBytePtr有一定的概率会导致崩溃crash
演进的方案
- (void)webView:(WKWebView *)webView didFinishNavigation:(WKNavigation *)navigation {
@weakify(self, webView)
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
@strongify(self, webView)
if (self.isHalfScreen) {
if (self.whiteScreenCheckCount > 0) {
UIImage *img = [self shotShareImageFromView:webView];
UIImage *scaleSmallImg = [self scaleImage:img];
BOOL isWhiteScreen = [self searchEveryPixel:scaleSmallImg];
self.whiteScreenCheckCount--;
if (isWhiteScreen) {
DDLogInfo(@"【WebView】白屏, %@", webView.URL.absoluteString);
[webView reload];
}
}
else {
DDLogInfo(@"【WebView】白屏,2次重试用完,报错提示,延时1秒并收起 %@", webView.URL.absoluteString);
[self.statusManager showOnlyErrorView];
self.statusManager.currentView.userInteractionEnabled = NO;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
@strongify(self)
if (self.gobackBlock) {
self.gobackBlock();
}
});
}
}
});
// 注:下面的方法是对屏幕截屏,有缺陷,现在采用上面对webview转图片的方式来判断白屏
// if ([webView.URL.absoluteString containsString:@"mallpay"]) {
// [self judgeLoadingStatus:webView withBlock:^(webviewLoadingStatus status) {
// if(status == WebViewNormalStatus){
// // //页面状态正常
// // [self stopIndicatorWithImmediate:NO afterDelay:1.5f indicatorString:@"页面加载完成" complete:nil];
// // dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
// // [self.webView evaluateJavaScript:@"signjsResult()" completionHandler:^(id _Nullable response, NSError * _Nullable error) {
// // }];
// // });
// }else{
// //可能发生了白屏,刷新页面
// dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
// NSURLRequest *request = [[NSURLRequest alloc]initWithURL:webView.URL];
// [self.webView loadRequest:request];
// });
// }
// }];
// }
}
//Uiview转图片:截取屏幕中的UIView的frame大小的截图
- (UIImage *)shotShareImageFromView:(UIView *)view
{
//模糊方法
// UIGraphicsBeginImageContext(CGSizeMake(view.layer.bounds.size.width, self.layer.bounds.size.height));
// CGContextRef context = UIGraphicsGetCurrentContext();
// [view.layer renderInContext:context];
// UIImage* tImage = UIGraphicsGetImageFromCurrentImageContext();
// UIGraphicsEndImageContext();
// return tImage;
//高清方法
//第一个参数表示区域大小 第二个参数表示是否是非透明的。如果需要显示半透明效果,需要传NO,否则传YES。第三个参数就是屏幕密度了
CGSize size = CGSizeMake(view.layer.bounds.size.width, view.layer.bounds.size.height);
UIGraphicsBeginImageContextWithOptions(size, NO, [UIScreen mainScreen].scale);
[view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
/// 判断是否白屏
//- (void)judgeLoadingStatus:(WKWebView *)webview withBlock:(void (^)(webviewLoadingStatus status))completionBlock{
// webviewLoadingStatus __block status = WebViewPendStatus;
// if (@available(iOS 11.0, *)) {
// if (webview && [webview isKindOfClass:[WKWebView class]]) {
//
// CGFloat statusBarHeight = [[UIApplication sharedApplication] statusBarFrame].size.height; //状态栏高度
// CGFloat navigationHeight = 80; //导航栏高度
// WKSnapshotConfiguration *shotConfiguration = [[WKSnapshotConfiguration alloc] init];
// shotConfiguration.rect = CGRectMake(0, statusBarHeight + navigationHeight, webview.bounds.size.width, (webview.bounds.size.height - navigationHeight - statusBarHeight)); //仅截图检测导航栏以下部分内容
// [webview takeSnapshotWithConfiguration:shotConfiguration completionHandler:^(UIImage * _Nullable snapshotImage, NSError * _Nullable error) {
// if (snapshotImage) {
// UIImage * scaleImage = [self scaleImage:snapshotImage];
//
// BOOL isWhiteScreen = [self searchEveryPixel:scaleImage];
// if (isWhiteScreen) {
// status = WebViewErrorStatus;
// }else{
// status = WebViewNormalStatus;
// }
// }
// if (completionBlock) {
// completionBlock(status);
// }
// }];
// }
// }
//}
- (void)webView:(WKWebView *)webView didFailProvisionalNavigation:(WKNavigation *)navigation withError:(NSError *)error {
[self.statusManager showErrorView];
}
- (BOOL)searchEveryPixel:(UIImage *)image {
BOOL isBlankPage = YES;
// 从背景色提取rgb分量,用于白屏检测
UIColor *color = self.webView.backgroundColor;
CGFloat bgRed = 0.0;
CGFloat bgGreen = 0.0;
CGFloat bgBlue = 0.0;
CGFloat alpha = 0.0;
[color getRed:&bgRed green:&bgGreen blue:&bgBlue alpha:&alpha];
bgRed = (NSInteger)(bgRed * 255);
bgGreen = (NSInteger)(bgGreen * 255);
bgBlue = (NSInteger)(bgBlue * 255);
// NSLog(@"__++- %@ %@ %@", @(bgRed), @(bgGreen), @(bgBlue));
// 第一步 本来是先把图片缩小 加快计算速度. 但越小结果误差可能越大
// 但是传进来的图片已经缩小过了,所以这第一步是把image搞到bitmap上
int bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
CGSize thumbSize = image.size;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(NULL,thumbSize.width,thumbSize.height, 8, thumbSize.width*4, colorSpace,bitmapInfo);
CGRect drawRect = CGRectMake(0, 0, thumbSize.width, thumbSize.height);
CGContextDrawImage(context, drawRect, image.CGImage);
CGColorSpaceRelease(colorSpace);
// 第二步 取每个点的像素值
unsigned char* data = CGBitmapContextGetData (context);
if (!data) {
DDLogInfo(@"【WebView】白屏,从Bitmap拿图片数据失败");
return isBlankPage;
}
int whiteCount = 0;
int totalCount = 0;
for (int x = 0; x < thumbSize.width; x++) {
for (int y = 0; y < thumbSize.height; y++) {
int offset = 4 * (x * y);
int red = data[offset];
int green = data[offset + 1];
int blue = data[offset + 2];
// int alpha = data[offset + 3];
totalCount ++;
// NSLog(@"++- %@, %@, %@",@(red) , @(green) , @(blue));
if ((bgRed == red) && (bgGreen == green) && (bgBlue == blue)) {
// NSLog(@"++- ++- %@, %@, %@",@(red) , @(green) , @(blue));
whiteCount++;
}
}
}
CGContextRelease(context);
float proportion = (float)whiteCount / totalCount ;
// NSLog(@"当前像素点数:%d,白色像素点数:%d , 占比: %f",totalCount , whiteCount , proportion);
if (proportion < 1) {
isBlankPage = NO;
}
return isBlankPage;
}
// 从UIColor提取rgb分量
//- (void)getRGBComponents:(CGFloat [3])components forColor:(UIColor *)color {
- (void)getRGBComponents:(int [3])components forColor:(UIColor *)color {
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
unsigned char resultingPixel[4];
CGContextRef context = CGBitmapContextCreate(&resultingPixel,
1,
1,
8,
4,
rgbColorSpace,
(CGBitmapInfo)kCGImageAlphaNoneSkipLast);
CGContextSetFillColorWithColor(context, [color CGColor]);
CGContextFillRect(context, CGRectMake(0, 0, 1, 1));
CGContextRelease(context);
CGColorSpaceRelease(rgbColorSpace);
for (int component = 0; component < 3; component++) {
// components[component] = resultingPixel[component] / 255.0f;
components[component] = resultingPixel[component];
}
}
///缩放图片
- (UIImage *)scaleImage: (UIImage *)image {
CGFloat scale = 0.2;
CGSize newsize;
newsize.width = floor(image.size.width * scale);
newsize.height = floor(image.size.height * scale);
if (@available(iOS 10.0, *)) {
UIGraphicsImageRenderer * renderer = [[UIGraphicsImageRenderer alloc] initWithSize:newsize];
return [renderer imageWithActions:^(UIGraphicsImageRendererContext * _Nonnull rendererContext) {
[image drawInRect:CGRectMake(0, 0, newsize.width, newsize.height)];
}];
}else{
return image;
}
}
/// 遍历像素点 白色像素占比大于95%认定为白屏
//- (BOOL)searchEveryPixel:(UIImage *)image {
// // 从背景色提取rgb分量,用于白屏检测
// UIColor *color = self.webView.backgroundColor;
// int components[3];
// [self getRGBComponents:components forColor:color];
NSLog(@"++- %@ %@ %@", @(components[0]), @(components[1]), @(components[2]));
// int bgRed = components[0];
// int bgGreen = components[1];
// int bgBlue = components[2];
//
// CGImageRef cgImage = [image CGImage];
// size_t width = CGImageGetWidth(cgImage);
// size_t height = CGImageGetHeight(cgImage);
// size_t bytesPerRow = CGImageGetBytesPerRow(cgImage); //每个像素点包含r g b a 四个字节
// size_t bitsPerPixel = CGImageGetBitsPerPixel(cgImage);
//
// CGDataProviderRef dataProvider = CGImageGetDataProvider(cgImage);
// CFDataRef data = CGDataProviderCopyData(dataProvider);
// UInt8 * buffer = (UInt8*)CFDataGetBytePtr(data);
//
// int whiteCount = 0;
// int totalCount = 0;
//
// // 统计所有像素点、白屏像素点
// for (int j = 0; j < height; j ++ ) {
// for (int i = 0; i < width; i ++) {
// UInt8 * pt = buffer + j * bytesPerRow + i * (bitsPerPixel / 8);
// UInt8 red = * pt;
// UInt8 green = *(pt + 1);
// UInt8 blue = *(pt + 2);
//
// totalCount ++;
if (red == 255 && green == 255 && blue == 255) {
whiteCount ++;
}
// if ((bgRed == red) && (bgGreen == green) && (bgBlue == blue)) {
// whiteCount++;
// }
// }
// }
// if (data) {
// CFRelease(data);
// }
// float proportion = (float)whiteCount / totalCount ;
NSLog(@"当前像素点数:%d,白色像素点数:%d , 占比: %f",totalCount , whiteCount , proportion);
// if (proportion > 0.95) {
// return YES;
// }else{
// return NO;
// }
//}
参考1:iOS获取图片主色或者突出颜色
获取图片中突出的颜色
https://github.com/tangdiforx/iOSPalette
pod 'iOSPalette'
获取图片中占比最多的颜色
/// 根据图片获取图片的主色调
/// @param image 图片
+ (UIColor *)GetImageMostColor:(UIImage *)image
{
int bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
//第一步 先把图片缩小 加快计算速度. 但越小结果误差可能越大
CGSize thumbSize = CGSizeMake(image.size.width / 6, image.size.height / 6);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(NULL,
thumbSize.width,
thumbSize.height,
8,//bits per component
thumbSize.width * 4,
colorSpace,
bitmapInfo);
CGRect drawRect = CGRectMake(0, 0, thumbSize.width, thumbSize.height);
CGContextDrawImage(context, drawRect, image.CGImage);
CGColorSpaceRelease(colorSpace);
//第二步 取每个点的像素值
unsigned char *data = CGBitmapContextGetData(context);
if (data == NULL) return nil;
NSCountedSet *cls = [NSCountedSet setWithCapacity:thumbSize.width * thumbSize.height];
for (int x = 0; x < thumbSize.width; x++) {
for (int y = 0; y < thumbSize.height; y++) {
int offset = 4 * (x * y);
int red = data[offset];
int green = data[offset + 1];
int blue = data[offset + 2];
int alpha = data[offset + 3];
if (alpha > 0) {//去除透明
if (red == 255 && green == 255 && blue == 255) {//去除白色
} else {
NSArray *clr = @[@(red), @(green), @(blue), @(alpha)];
[cls addObject:clr];
}
}
}
}
CGContextRelease(context);
//第三步 找到出现次数最多的那个颜色
NSEnumerator *enumerator = [cls objectEnumerator];
NSArray *currentColorArray = nil;
NSArray *maxColorArray = nil;
NSUInteger maxCount = 0;
while ((currentColorArray = [enumerator nextObject]) != nil) {
NSUInteger tmpCount = [cls countForObject:currentColorArray];
if (tmpCount < maxCount) continue;
maxCount = tmpCount;
maxColorArray = currentColorArray;
}
return [UIColor colorWithRed:([maxColorArray[0] intValue] / 255.0f)
green:([maxColorArray[1] intValue] / 255.0f)
blue:([maxColorArray[2] intValue] / 255.0f)
alpha:([maxColorArray[3] intValue] / 255.0f)];
}
1、创建bitmap context,且context的尺寸必须与被取色的图片相同
2、将被取色图片绘制到context上
3、调用CGBitmapContextGetData()函数获得位图原数据data
4、元数据data是以一个一个的像素点按照width*height排列的,找到取色点对应的像素(4个字节:alpha,red,green,blue),获得RGB色即可
参考代码如下:
// UIImage+ARGB.m
// Create by QuinceyYang
- (CGContextRef)createARGBBitmapContext {
// Get image width, height
size_t pixelsWide = CGImageGetWidth(self.CGImage);
size_t pixelsHigh = CGImageGetHeight(self.CGImage);
// Declare the number of bytes per row
NSInteger bitmapBytesPerRow = (pixelsWide * 4);
NSInteger bitmapByteCount = (bitmapBytesPerRow * pixelsHigh);
// Use the generic RGB color space.
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) {
fprintf(stderr, "Error allocating color space\n");
return NULL;
}
// Allocate memory for image data
void *bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
CGColorSpaceRelease( colorSpace );
return NULL;
}
// Create the bitmap context
CGContextRef context = CGBitmapContextCreate (bitmapData,
pixelsWide,
pixelsHigh,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedFirst);
if (context == NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
}
// release colorspace before returning
CGColorSpaceRelease( colorSpace );
return context;
}
- (UIColor *)getPixelColorAtPoint:(CGPoint)point
{
UIColor* color = nil;
CGImageRef inImage = self.CGImage;
// Create bitmap context to draw the image into
CGContextRef cgctx = [self createARGBBitmapContext];
if (cgctx == NULL) {
return nil; /* error */
}
size_t w = CGImageGetWidth(inImage);
size_t h = CGImageGetHeight(inImage);
CGRect rect = {{0,0},{w,h}};
// Draw the image to the bitmap context
CGContextDrawImage(cgctx, rect, inImage);
// get image data
unsigned char* data = CGBitmapContextGetData (cgctx);
if (data != NULL) {
//offset locates the pixel in the data from x,y.
//4 for 4 bytes of data per pixel, w is width of one row of data.
int offset = 4*((w*round(point.y))+round(point.x));
int alpha = data[offset];
int red = data[offset+1];
int green = data[offset+2];
int blue = data[offset+3];
//NSLog(@"offset: %i colors: RGB A %i %i %i %i",offset,red,green,blue,alpha);
color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
}
//release the context
CGContextRelease(cgctx);
// Free image data
if (data) {
free(data);
}
return color;
}
根据相应的颜色,得到对应的RGB值
代码如下:
- (NSMutableArray *)changeUIColorToRGB:(UIColor *)color
{
NSMutableArray *RGBStrValueArr = [[NSMutableArray alloc] init];
NSString *RGBStr = nil;
//获得RGB值描述
NSString *RGBValue = [NSString stringWithFormat:@"%@", color];
//将RGB值描述分隔成字符串
NSArray *RGBArr = [RGBValue componentsSeparatedByString:@" "];
//获取红色值
float r = [RGBArr[1] floatValue];
RGBStr = [NSString stringWithFormat:@"%.2lf", r];
[RGBStrValueArr addObject:RGBStr];
//获取绿色值
float g = [RGBArr[2] floatValue];
RGBStr = [NSString stringWithFormat:@"%.2lf", g];
[RGBStrValueArr addObject:RGBStr];
//获取蓝色值
float b = [RGBArr[3] floatValue];
RGBStr = [NSString stringWithFormat:@"%.2lf", b];
[RGBStrValueArr addObject:RGBStr];
float a = [RGBArr[4] floatValue];
RGBStr = [NSString stringWithFormat:@"%.2lf", a];
[RGBStrValueArr addObject:RGBStr];
//返回保存RGB值的数组
return RGBStrValueArr;
}
经过一番的查找,又找到了相关的API的用法,此处也贴出来,更新一下,代码如下:
- (NSArray *)getRGBWithColor:(UIColor *)color
{
CGFloat red = 0.0;
CGFloat green = 0.0;
CGFloat blue = 0.0;
CGFloat alpha = 0.0;
[color getRed:&red green:&green blue:&blue alpha:&alpha];
return @[@(red), @(green), @(blue), @(alpha)];
}
在调用的时候,只需要把颜色传入进去,比如说把redColor传入进去,返回的是一个数组,里面有对应的RGB值,还有alpha值!