import Foundation
let bitSize = 16
// Print binary representation of the bits
func printBinary(_ bits: UInt16) {
let binary = String(bits, radix: 2).leftPadding(toLength: bitSize, withPad: "0")
print(binary)
}
// Find the first set bit (least significant)
func findFirstSetBit(_ bits: UInt16) -> Int? {
for i in 0..<bitSize {
if (bits >> i) & 1 == 1 {
return i
}
}
return nil
}
func printSetBitIndexes(_ bits: UInt16) {
let indexes = (0..<bitSize).filter { (bits >> $0) & 1 == 1 }
print(indexes.map(String.init).joined(separator: " "))
}
// String padding extension
extension String {
func leftPadding(toLength: Int, withPad character: Character) -> String {
let padCount = toLength - self.count
guard padCount > 0 else { return self }
return String(repeating: character, count: padCount) + self
}
}
var bits: UInt16 = 0
bits |= 1 << 3
bits |= 1 << 5
bits |= 1 << 11
bits |= 1 << 14
printBinary(bits)
if let first = findFirstSetBit(bits) {
print("First set bit at index: \(first)")
} else {
print("No bits are set.")
}
print("All the set bits indexes:")
printSetBitIndexes(bits)
/*
run:
0100100000101000
First set bit at index: 3
All the set bits indexes:
3 5 11 14
*/