在我的应用程序中,我正在创建一个图像映射,该映射将浮点图转换为像素值,并将其用作Google Maps的叠加层,但是这需要永远做,Android中的同一件事几乎是即时的。我的代码如下所示:

private func imageFromPixels(pixels: [PixelData], width: Int, height: Int) -> UIImage? {

    let bitsPerComponent = 8
    let bitsPerPixel = bitsPerComponent * 4
    let bytesPerRow = bitsPerPixel * width / 8

    let providerRef = CGDataProvider(
        data: NSData(bytes: pixels, length: height * width * 4)
    )

    let cgimage = CGImage(
        width: width,
        height: height,
        bitsPerComponent: bitsPerComponent,
        bitsPerPixel: bitsPerPixel,
        bytesPerRow: bytesPerRow,
        space: CGColorSpaceCreateDeviceRGB(),
        bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue),
        provider: providerRef!,
        decode: nil,
        shouldInterpolate: true,
        intent: .defaultIntent
    )

    if cgimage == nil {
        print("CGImage is not supposed to be nil")
        return nil
    }
    return UIImage(cgImage: cgimage!)
}

关于如何花费这么长时间的任何建议?我可以看到它使用了大约96%的CPU能力。
func fromData(pair: AllocationPair) -> UIImage? {

    let table = pair.table
    let data = pair.data

    prepareColors(allocations: table.allocations)

    let height = data.count
    let width = data[0].count

    var colors = [PixelData]()

    for row in data {
        for val in row {

            if (val == 0.0) {
                colors.append(PixelData(a: 0, r: 0, g: 0, b: 0))
                continue
            }

            if let interval = findInterval(table: table, value: val) {
                if let color = intervalColorDict[interval] {
                    colors.append(PixelData(a: color.a, r: color.r, g: color.g, b: color.b))
                }
            }
        }
    }

    return imageFromPixels(pixels: colors, width: width, height: height)
}

我尝试对它进行时间分析,这是需要时间的输出。

ios - Swift中的绘制图像需要永远-LMLPHP

最佳答案

我尝试了您的代码,但发现问题不在于您的功能。

我认为您应该使用基于UInt8的像素结构,而不是基于CGFloat的结构。

我翻译了 cocoa 应用程序的代码,结果如下:

public struct PixelData {
    var a: UInt8
    var r: UInt8
    var g: UInt8
    var b: UInt8
}

func imageFromPixels(pixels: [PixelData], width: Int, height: Int) -> NSImage? {

    let bitsPerComponent = 8
    let bitsPerPixel = bitsPerComponent * 4
    let bytesPerRow = bitsPerPixel * width / 8

    let providerRef = CGDataProvider(
        data: NSData(bytes: pixels, length: height * width * 4)
    )

    let cgimage = CGImage(
        width: width,
        height: height,
        bitsPerComponent: bitsPerComponent,
        bitsPerPixel: bitsPerPixel,
        bytesPerRow: bytesPerRow,
        space: CGColorSpaceCreateDeviceRGB(),
        bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue),
        provider: providerRef!,
        decode: nil,
        shouldInterpolate: true,
        intent: .defaultIntent
    )

    if cgimage == nil {
        print("CGImage is not supposed to be nil")
        return nil
    }
    return NSImage(cgImage: cgimage!, size: NSSize(width: width, height: height))
}

var img = [PixelData]()

for i: UInt8 in 0 ..< 20 {
    for j: UInt8 in 0 ..< 20 {
        // Creating a red 20x20 image.
        img.append(PixelData(a: 255, r: 255, g: 0, b: 0))
    }
}

var ns = imageFromPixels(pixels: img, width: 20, height: 20)

该代码快速简便,以下是调试系统的影响值:

ios - Swift中的绘制图像需要永远-LMLPHP

我认为问题出在加载像素数据,检查并确保其正常工作的部分上。

10-04 22:33
查看更多