zoukankan      html  css  js  c++  java
  • 【转】基于 CoreText 实现的高性能 UITableView

    引起UITableView卡顿比较常见的原因有cell的层级过多、cell中有触发离屏渲染的代码(譬如:cornerRadius、maskToBounds 同时使用)、像素是否对齐、是否使用UITableView自动计算cell高度的方法等。本文将从cell层级出发,以一个仿朋友圈的demo来讲述如何让列表保持顺滑,项目的源码可在文末获得。不可否认的是,过早的优化是魔鬼,请在项目出现性能瓶颈再考虑优化。

    首先看看reveal上页面层级的效果图

    2b08a274-46aa-11e6-9936-79cbdcaf3869.png

    然后是9.3系统iPhone5的真机效果

    506d7242-46b4-11e6-9bad-6b4a5d05c0a5.gif

    1、绘制文本

    使用core text可以将文本绘制在一个CGContextRef上,最后再通过UIGraphicsGetImageFromCurrentImageContext()生成图片,再将图片赋值给cell.contentView.layer,从而达到减少cell层级的目的。

    绘制普通文本(譬如用户昵称)在context上,相关注释在代码里:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    - (void)drawInContext:(CGContextRef)context withPosition:(CGPoint)p andFont:(UIFont *)font andTextColor:(UIColor *)color andHeight:(float)height andWidth:(float)width lineBreakMode:(CTLineBreakMode)lineBreakMode {
        CGSize size = CGSizeMake(width, height);
        // 翻转坐标系
        CGContextSetTextMatrix(context,CGAffineTransformIdentity);
        CGContextTranslateCTM(context,0,height);
        CGContextScaleCTM(context,1.0,-1.0);
        NSMutableDictionary * attributes = [StringAttributes attributeFont:font andTextColor:color lineBreakMode:lineBreakMode];
        // 创建绘制区域(路径)
        CGMutablePathRef path = CGPathCreateMutable();
        CGPathAddRect(path,NULL,CGRectMake(p.x, height-p.y-size.height,(size.width),(size.height)));
        // 创建AttributedString
        NSMutableAttributedString *attributedStr = [[NSMutableAttributedString alloc] initWithString:self attributes:attributes];
        CFAttributedStringRef attributedString = (__bridge CFAttributedStringRef)attributedStr;
        // 绘制frame
        CTFramesetterRef framesetter = CTFramesetterCreateWithAttributedString((CFAttributedStringRef)attributedString);
        CTFrameRef ctframe = CTFramesetterCreateFrame(framesetter, CFRangeMake(0,0),path,NULL);
        CTFrameDraw(ctframe,context);
        CGPathRelease(path);
        CFRelease(framesetter);
        CFRelease(ctframe);
        [[attributedStr mutableString] setString:@""];
        CGContextSetTextMatrix(context,CGAffineTransformIdentity);
        CGContextTranslateCTM(context,0, height);
        CGContextScaleCTM(context,1.0,-1.0);
    }

    绘制朋友圈内容文本(带链接)在context上,这里我还没有去实现文本多了会折叠的效果,与上面普通文本不同的是这里需要创建带链接的AttributeString和CTLineRef的逐行绘制:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    - (NSMutableAttributedString *)highlightText:(NSMutableAttributedString *)coloredString{
        // 创建带高亮的AttributedString
        NSString* string = coloredString.string;
        NSRange range = NSMakeRange(0,[string length]);
        NSDataDetector *linkDetector = [NSDataDetector dataDetectorWithTypes:NSTextCheckingTypeLink error:nil];
        NSArray *matches = [linkDetector matchesInString:string options:0 range:range];
        for(NSTextCheckingResult* match in matches) {
            [self.ranges addObject:NSStringFromRange(match.range)];
            UIColor *highlightColor = UIColorFromRGB(0x297bc1);
            [coloredString addAttribute:(NSString*)kCTForegroundColorAttributeName
                                  value:(id)highlightColor.CGColor range:match.range];
        }
        return coloredString;
    }
    - (void)drawFramesetter:(CTFramesetterRef)framesetter
           attributedString:(NSAttributedString *)attributedString
                  textRange:(CFRange)textRange
                     inRect:(CGRect)rect
                    context:(CGContextRef)c {
        CGMutablePathRef path = CGPathCreateMutable();
        CGPathAddRect(path, NULL, rect);
        CTFrameRef frame = CTFramesetterCreateFrame(framesetter, textRange, path, NULL);
        CGFloat ContentHeight = CGRectGetHeight(rect);
        CFArrayRef lines = CTFrameGetLines(frame);
        NSInteger numberOfLines = CFArrayGetCount(lines);
        CGPoint lineOrigins[numberOfLines];
        CTFrameGetLineOrigins(frame, CFRangeMake(0, numberOfLines), lineOrigins);
        // 遍历每一行
        for (CFIndex lineIndex = 0; lineIndex < numberOfLines; lineIndex++) {
            CGPoint lineOrigin = lineOrigins[lineIndex];
            CTLineRef line = CFArrayGetValueAtIndex(lines, lineIndex);
            CGFloat descent = 0.0f, ascent = 0.0f, lineLeading = 0.0f;
            CTLineGetTypographicBounds((CTLineRef)line, &ascent, &descent, &lineLeading);
            CGFloat penOffset = (CGFloat)CTLineGetPenOffsetForFlush(line, NSTextAlignmentLeft, rect.size.width);
            CGFloat y = lineOrigin.y - descent - self.font.descender;
            // 设置每一行位置
            CGContextSetTextPosition(c, penOffset + self.xOffset, y - self.yOffset);
            CTLineDraw(line, c);
            // CTRunRef同一行中文本的不同样式,包括颜色、字体等,此处用途为处理链接高亮
            CFArrayRef runs = CTLineGetGlyphRuns(line);
            for (int j = 0; j < CFArrayGetCount(runs); j++) {
                CGFloat runAscent, runDescent, lineLeading1;
                CTRunRef run = CFArrayGetValueAtIndex(runs, j);
                NSDictionary *attributes = (__bridge NSDictionary*)CTRunGetAttributes(run);
                // 判断是不是链接
                if (!CGColorEqualToColor((__bridge CGColorRef)([attributes valueForKey:@"CTForegroundColor"]), self.textColor.CGColor)) {
                    CFRange range = CTRunGetStringRange(run);
                    float offset = CTLineGetOffsetForStringIndex(line, range.location, NULL);
                    // 得到链接的CGRect
                    CGRect runRect;
                    runRect.size.width = CTRunGetTypographicBounds(run, CFRangeMake(0,0), &runAscent, &runDescent, &lineLeading1);
                    runRect.size.height = self.font.lineHeight;
                    runRect.origin.x = lineOrigin.x + offset+ self.xOffset;
                    runRect.origin.y = lineOrigin.y;
                    runRect.origin.y -= descent + self.yOffset;
                    // 因为坐标系被翻转,链接正常的坐标需要通过CGAffineTransform计算得到
                    CGAffineTransform transform = CGAffineTransformMakeTranslation(0, ContentHeight);
                    transform = CGAffineTransformScale(transform, 1.f, -1.f);
                    CGRect flipRect = CGRectApplyAffineTransform(runRect, transform);
                    // 保存是链接的CGRect
                    NSRange nRange = NSMakeRange(range.location, range.length);
                    self.framesDict[NSStringFromRange(nRange)] = [NSValue valueWithCGRect:flipRect];
                    // 保存同一条链接的不同CGRect,用于点击时背景色处理
                    for (NSString *rangeString in self.ranges) {
                        NSRange range = NSRangeFromString(rangeString);
                        if (NSLocationInRange(nRange.location, range)) {
                            NSMutableArray *array = self.relationDict[rangeString];
                            if (array) {
                                [array addObject:NSStringFromCGRect(flipRect)];
                                self.relationDict[rangeString] = array;
                            else {
                                self.relationDict[rangeString] = [NSMutableArray arrayWithObject:NSStringFromCGRect(flipRect)];
                            }
                        }
                    }
                }
            }
        }
        CFRelease(frame);
        CFRelease(path);
    }

    上述方法运用起来就是:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    - (void)fillData:(CGContextRef)context {
        [self.nickname drawInContext:context withPosition:(CGPoint){kTextXOffset, kSpec} andFont:kNicknameFont
                        andTextColor:UIColorFromRGB(0x556c95) andHeight:self.nicknameSize.height
                            andWidth:self.nicknameSize.width lineBreakMode:kCTLineBreakByTruncatingTail];
        [self.drawer setText:self.contentString context:context contentSize:self.contentSize
             backgroundColor:[UIColor whiteColor] font:kContentTextFont textColor:[UIColor blackColor]
                       block:nil xOffset:kTextXOffset yOffset:kSpec * 2 + self.nicknameSize.height];
    }
    - (void)fillContents:(NSArray *)array {
        UIGraphicsBeginImageContextWithOptions(CGSizeMake(self.size.width, self.size.height), YES, 0);
        CGContextRef context = UIGraphicsGetCurrentContext();
        [UIColorFromRGB(0xffffff) set];
        CGContextFillRect(context, CGRectMake(0, 0, self.size.width, self.size.height));
        // 获取需要高亮的链接CGRect,并填充背景色
        if (array) {
            for (NSString *string in array) {
                CGRect rect = CGRectFromString(string);
                [UIColorFromRGB(0xe5e5e5) set];
                CGContextFillRect(context, rect);
            }
        }
        [self fillData:context];
        UIImage *temp = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
        self.contentView.layer.contents = (__bridge id _Nullable)(temp.CGImage);
    }

    这样就完成了文本的显示。

    2、显示图片

    图片包括用户头像和朋友圈的内容,这里只是将CALayer添加到contentView.layer上,具体做法是继承了CALayer,实现部分功能。

    通过链接显示图片:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    - (void)setContentsWithURLString:(NSString *)urlString {
        self.contents = (__bridge id _Nullable)([UIImage imageNamed:@"placeholder"].CGImage);
        @weakify(self)
        SDWebImageManager *manager = [SDWebImageManager sharedManager];
        [manager downloadImageWithURL:[NSURL URLWithString:urlString]
                              options:SDWebImageCacheMemoryOnly
                             progress:nil
                            completed:^(UIImage *image, NSError *error, SDImageCacheType cacheType, BOOL finished, NSURL *imageURL) {
                                if (image) {
                                    @strongify(self)
                                    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
                                        if (!_observer) {
                                            _observer = CFRunLoopObserverCreateWithHandler(kCFAllocatorDefault, kCFRunLoopBeforeWaiting | kCFRunLoopExit, false, POPAnimationApplyRunLoopOrder, ^(CFRunLoopObserverRef observer, CFRunLoopActivity activity) {
                                                self.contents = (__bridge id _Nullable)(image.CGImage);
                                            });
                                            if (_observer) {
                                                CFRunLoopAddObserver(CFRunLoopGetMain(), _observer,  kCFRunLoopCommonModes);
                                            }
                                        }
                                    });
                                    self.originImage = image;
                                }
                            }];
    }

    其他比较简单就不展开。

    3、显示小视频

    之前的一篇文章简单讲了怎么自己做一个播放器,这里就派上用场了。而显示小视频封面图片的CALayer同样在显示小视频的时候可以复用。

    这里使用了NSOperationQueue来保障播放视频的流畅性,具体继承NSOperation的VideoDecodeOperation相关代码如下:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    - (void)main {
        @autoreleasepool {
            if (self.isCancelled) {
                _newVideoFrameBlock = nil;
                _decodeFinishedBlock = nil;
                return;
            }
            AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:self.filePath] options:nil];
            NSError *error;
            AVAssetReader* reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
            if (error) {
                return;
            }
            NSArray* videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
            AVAssetTrack* videoTrack = [videoTracks objectAtIndex:0];
            // 视频播放时,m_pixelFormatType=kCVPixelFormatType_32BGRA
            // 其他用途,如视频压缩,m_pixelFormatType=kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
            int m_pixelFormatType = kCVPixelFormatType_32BGRA;
            NSDictionary* options = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt: (int)m_pixelFormatType]
                                                                forKey:(id)kCVPixelBufferPixelFormatTypeKey];
            AVAssetReaderTrackOutput* videoReaderOutput = [[AVAssetReaderTrackOutput alloc]
                    initWithTrack:videoTrack outputSettings:options];
            [reader addOutput:videoReaderOutput];
            [reader startReading];
            // 要确保nominalFrameRate>0,之前出现过android拍的0帧视频
            if (self.isCancelled) {
                _newVideoFrameBlock = nil;
                _decodeFinishedBlock = nil;
                return;
            }
            while ([reader status] == AVAssetReaderStatusReading && videoTrack.nominalFrameRate > 0) {
                if (self.isCancelled) {
                    _newVideoFrameBlock = nil;
                    _decodeFinishedBlock = nil;
                    return;
                }
                CMSampleBufferRef sampleBuffer = [videoReaderOutput copyNextSampleBuffer];
                CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
                // Lock the base address of the pixel buffer
                CVPixelBufferLockBaseAddress(imageBuffer, 0);
                // Get the number of bytes per row for the pixel buffer
                size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
                // Get the pixel buffer width and height
                size_t width = CVPixelBufferGetWidth(imageBuffer);
                size_t height = CVPixelBufferGetHeight(imageBuffer);
                //Generate image to edit`
                unsigned char* pixel = (unsigned char *)CVPixelBufferGetBaseAddress(imageBuffer);
                CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB();
                CGContextRef context=CGBitmapContextCreate(pixel, width, height, 8, bytesPerRow, colorSpace,
                                                           kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);
                if (context != NULL) {
                    CGImageRef imageRef = CGBitmapContextCreateImage(context);
                    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
                    CGColorSpaceRelease(colorSpace);
                    CGContextRelease(context);
                    // 解码图片
                    size_t width = CGImageGetWidth(imageRef);
                    size_t height = CGImageGetHeight(imageRef);
                    size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
                    // CGImageGetBytesPerRow() calculates incorrectly in iOS 5.0, so defer to CGBitmapContextCreate
                    size_t bytesPerRow = 0;
                    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
                    CGColorSpaceModel colorSpaceModel = CGColorSpaceGetModel(colorSpace);
                    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
                    if (colorSpaceModel == kCGColorSpaceModelRGB) {
                        uint32_t alpha = (bitmapInfo & kCGBitmapAlphaInfoMask);
    #pragma clang diagnostic push
    #pragma clang diagnostic ignored "-Wassign-enum"
                        if (alpha == kCGImageAlphaNone) {
                            bitmapInfo &= ~kCGBitmapAlphaInfoMask;
                            bitmapInfo |= kCGImageAlphaNoneSkipFirst;
                        else if (!(alpha == kCGImageAlphaNoneSkipFirst || alpha == kCGImageAlphaNoneSkipLast)) {
                            bitmapInfo &= ~kCGBitmapAlphaInfoMask;
                            bitmapInfo |= kCGImageAlphaPremultipliedFirst;
                        }
    #pragma clang diagnostic pop
                    }
                    CGContextRef context = CGBitmapContextCreate(NULL, width, height, bitsPerComponent,
                                                                 bytesPerRow, colorSpace, bitmapInfo);
                    CGColorSpaceRelease(colorSpace);
                    if (!context) {
                        if (self.newVideoFrameBlock) {
                            dispatch_async(dispatch_get_main_queue(), ^{
                                if (self.isCancelled) {
                                    _newVideoFrameBlock = nil;
                                    _decodeFinishedBlock = nil;
                                    return;
                                }
                                self.newVideoFrameBlock(imageRef, self.filePath);
                                CGImageRelease(imageRef);
                            });
                        }
                    else {
                        CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, width, height), imageRef);
                        CGImageRef inflatedImageRef = CGBitmapContextCreateImage(context);
                        CGContextRelease(context);
                        if (self.newVideoFrameBlock) {
                            dispatch_async(dispatch_get_main_queue(), ^{
                                if (self.isCancelled) {
                                    _newVideoFrameBlock = nil;
                                    _decodeFinishedBlock = nil;
                                    return;
                                }
                                self.newVideoFrameBlock(inflatedImageRef, self.filePath);
                                CGImageRelease(inflatedImageRef);
                            });
                        }
                        CGImageRelease(imageRef);
                    }
                    if(sampleBuffer) {
                        CMSampleBufferInvalidate(sampleBuffer);
                        CFRelease(sampleBuffer);
                        sampleBuffer = NULL;
                    else {
                        break;
                    }
                }
                [NSThread sleepForTimeInterval:CMTimeGetSeconds(videoTrack.minFrameDuration)];
            }
            if (self.isCancelled) {
                _newVideoFrameBlock = nil;
                _decodeFinishedBlock = nil;
                return;
            }
            if (self.decodeFinishedBlock) {
                self.decodeFinishedBlock(self.filePath);
            }
        }
    }

    解码图片是因为UIImage在界面需要显示的时候才开始解码,这样可能会造成主线程的卡顿,所以在子线程对其进行解压缩处理。

    具体的使用:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    - (void)playVideoWithFilePath:(NSString *)filePath_ type:(NSString *)type {
        @weakify(self)
        [[VideoPlayerManager shareInstance] decodeVideo:filePath_
                                  withVideoPerDataBlock:^(CGImageRef imageData, NSString *filePath) {
                                      @strongify(self)
                                      if ([type isEqualToString:@"video"]) {
                                          if ([filePath isEqualToString:self.filePath]) {
                                              [self.sources.firstObject
                                                      setContents:(__bridge id _Nullable)(imageData)];
                                          }
                                      }
                                  } decodeFinishBlock:^(NSString *filePath){
                    [self playVideoWithFilePath:filePath type:type];
                }];
    }

    4、其他

    1、触摸交互是覆盖了以下方法实现:

    1
    2
    3
    - (void)touchesCancelled:(NSSet<uitouch *> *)touches withEvent:(UIEvent *)event
    - (void)touchesCancelled:(NSSet<uitouch *> *)touches withEvent:(UIEvent *)event
    - (void)touchesEnded:(NSSet<uitouch *> *)touches withEvent:(UIEvent *)event</uitouch *></uitouch *></uitouch *>

    2、页面上FPS的测量是使用了YYKit项目中的YYFPSLabel。

    3、测试数据是微博找的,其中小视频是Gif快手。

    本文的代码在https://github.com/hawk0620/PYQFeedDemo

    本文作者:伯乐在线 - Hawk0620

  • 相关阅读:
    CentOS7 安装 Mysql 服务
    git 第一次 push 遇到问题
    为什么PHP(CLI)同一个错误信息会打印两次?
    python密码输入模块getpass
    Linux安装JDK详细步骤
    嘿嘿嘿,开始自学mysql
    Bable实现由ES6转译为ES5
    AJAX
    模板层
    lshw查看系统硬件信息
  • 原文地址:https://www.cnblogs.com/axclogo/p/5682123.html
Copyright © 2011-2022 走看看