-
-
Save shadcn/de147c42d7b3063ef7bc to your computer and use it in GitHub Desktop.
// Creates a UIColor from a Hex string. | |
func colorWithHexString (hex:String) -> UIColor { | |
var cString:String = hex.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet()).uppercaseString | |
if (cString.hasPrefix("#")) { | |
cString = cString.substringFromIndex(1) | |
} | |
if (countElements(cString) != 6) { | |
return UIColor.grayColor() | |
} | |
var rString = cString.substringToIndex(2) | |
var gString = cString.substringFromIndex(2).substringToIndex(2) | |
var bString = cString.substringFromIndex(4).substringToIndex(2) | |
var r:CUnsignedInt = 0, g:CUnsignedInt = 0, b:CUnsignedInt = 0; | |
NSScanner.scannerWithString(rString).scanHexInt(&r) | |
NSScanner.scannerWithString(gString).scanHexInt(&g) | |
NSScanner.scannerWithString(bString).scanHexInt(&b) | |
return UIColor(red: Float(r) / 255.0, green: Float(g) / 255.0, blue: Float(b) / 255.0, alpha: Float(1)) | |
} |
Consider parsing the entire hex string at once, as well as supporting short hex colors and ARGB:
import UIKit
extension String {
var hexColor: UIColor {
let hex = self.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
var int = UInt32()
NSScanner(string: hex).scanHexInt(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
return UIColor.clearColor()
}
return UIColor(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
"#f00".hexColor // r 1.0 g 0.0 b 0.0 a 1.0
"#be1337".hexColor // r 0.745 g 0.075 b 0.216 a 1.0
"#12345678".hexColor // r 0.204 g 0.337 b 0.471 a 0.071
It can also be a good idea to make the result nillable, so that the caller can determine whether parsing failed, or if the string was actually the color you defaulted to: https://gist.github.com/blixt/821a0748257dc8d3581f
Thanks @nabilmaad
UIColor extension. Xcode 7 & Swift 2
extension UIColor {
convenience init(hexString: String) {
let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
var int = UInt32()
NSScanner(string: hex).scanHexInt(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (1, 1, 1, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
thanks @nabilmaad, @blixt ,@lucatorella, they work very well
@lucotorella 👍
This worked perfectly for NSColor Swift 2:
func colorWithHexString (hex:String) -> NSColor {
var cString:String = hex.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet()).uppercaseString
if cString.hasPrefix("#") {
cString = (cString as NSString).substringFromIndex(1)
}
if cString.characters.count != 6 {
return NSColor.grayColor()
}
var rgbValue : UInt32 = 0
NSScanner(string: cString).scanHexInt(&rgbValue)
return NSColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
Update version for Swift 2:
extension UIColor {
// Creates a UIColor from a Hex string.
convenience init(hexString: String) {
var cString: String = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceAndNewlineCharacterSet()).uppercaseString
if (cString.hasPrefix("#")) {
cString = (cString as NSString).substringFromIndex(1)
}
if (cString.characters.count != 6) {
self.init(white: 0.5, alpha: 1.0)
} else {
let rString: String = (cString as NSString).substringToIndex(2)
let gString = ((cString as NSString).substringFromIndex(2) as NSString).substringToIndex(2)
let bString = ((cString as NSString).substringFromIndex(4) as NSString).substringToIndex(2)
var r: CUnsignedInt = 0, g: CUnsignedInt = 0, b: CUnsignedInt = 0;
NSScanner(string: rString).scanHexInt(&r)
NSScanner(string: gString).scanHexInt(&g)
NSScanner(string: bString).scanHexInt(&b)
self.init(red: CGFloat(r) / CGFloat(255.0), green: CGFloat(g) / CGFloat(255.0), blue: CGFloat(b) / CGFloat(255.0), alpha: CGFloat(1))
}
}
}
Thanks guys, you just saved me quite some work! 👍
Thanks!
With Hexstring and Alpha separate.
extension UIColor {
convenience init(hexString: String, alpha: Double = 1.0) {
let hex = hexString.stringByTrimmingCharactersInSet(NSCharacterSet.alphanumericCharacterSet().invertedSet)
var int = UInt32()
NSScanner(string: hex).scanHexInt(&int)
let r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(r, g, b) = ((int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(r, g, b) = (int >> 16, int >> 8 & 0xFF, int & 0xFF)
default:
(r, g, b) = (1, 1, 0)
}
self.init(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(255 * alpha) / 255)
}
}
UIColor.init(hexString: "000000", alpha: 0.5) // r 0.0 g 0.0 b 0.0 a 0.5
UIColor.init(hexString: "#fff", alpha: 0.2) // r 1.0 g 1.0 b 1.0 a 0.2
UIColor.init(hexString: "00ff00", alpha: 1.0) // r 0.0 g 1.0 b 0.0 a 1.0
UIColor.init(hexString: "0000ff") // r 0.0 g 0.0 b 1.0 a 1.0 with alpha optional which defaults to 1
I've made another UIColor extension in Swift 2.2, wish to help some one:
extension UIColor {
convenience init(hex: Int, alpha: Double = 1.0) {
self.init(red: CGFloat((hex>>16)&0xFF)/255.0, green: CGFloat((hex>>8)&0xFF)/255.0, blue: CGFloat((hex)&0xFF)/255.0, alpha: CGFloat(255 * alpha) / 255)
}
}
Sample to use:
UIColor(hex: 0xffffff) // r 1.0 g 1.0 b 1.0 a 1.0
UIColor(hex: 0xffffff, alpha: 0.5) // r 1.0 g 1.0 b 1.0 a 0.5
p.s.: Sorry for the abnormal code format, the 'Insert code' function is not good for me.
For Swift 3.0
extension String {
var hexColor: UIColor {
let hex = trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.characters.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
return .clear
}
return UIColor(red: CGFloat(r) / 255, green: CGFloat(g) / 255, blue: CGFloat(b) / 255, alpha: CGFloat(a) / 255)
}
}
@arvidurs Thanks 👍
thanks @arvidurs
@arvidurs Really helped!
Just wanna add that you have to put this code outside of any class. Otherwise you are probably going to get an exception.
nice work! @arvidurs
label.textColor = "#007aff".hexColor
in swift 4.0
convenience init(hex: String)
{
let characterSet = NSCharacterSet.whitespacesAndNewlines as! NSMutableCharacterSet
characterSet.formUnion(with: NSCharacterSet.init(charactersIn: "#") as CharacterSet)
let cString = hex.trimmingCharacters(in: characterSet as CharacterSet).uppercased()
if (cString.count != 6) {
self.init(white: 1.0, alpha: 1.0)
} else {
var rgbValue: UInt32 = 0
Scanner(string: cString).scanHexInt32(&rgbValue)
self.init(red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0))
}
}
Thanks @arshad and @nabilmaad. The one updated for Swift 2.0 works fine for me, except the if character count statement.