J'ai les méthodes suivantes
var photos = [MWPhoto] = [MWPhoto]()
func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {
return self.photos.count
}
func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {
return self.photos[index]
}
Cependant, pour la première fois, je reçois Int is not convertible to UInt
(puisque self.photos.count
est un Int
et pour le second UInt is not convertible to Int
- depuis le self.photos[
ne peut prendre qu'un Int pour son index.
Comment puis-je convertir correctement l'UInt en Int et en arrière?
Dans le premier, le type de retour est Int, mais vous retournez Int puisque count retourne Int.
Fondamentalement, UInt a un initialiseur qui accepte des variantes d'arguments de types de valeur tels que Int, CGFloat, Double ou chaîne d'événements et renvoie un nouveau type de valeur.
-
func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {
return UInt(self.photos.count)
}
Pour le second, l'indice du tableau attend la valeur Int où vous passez Int, alors créez un nouveau type de valeur Int à partir de Int,
func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {
return self.photos[Int(index)]
}
// initializing Int
var someInt: Int = 8
someInt
// Converting Int to UInt
var someIntToUInt: UInt = UInt(someInt)
someIntToUInt
// initializing UInt
var someUInt: UInt = 10
someUInt
// Converting UInt to Int
var someUIntToInt: Int = Int(someUInt)
someUIntToInt
Si vous voulez un entier non signé à partir d'une valeur négative, utilisez UInt (bitPattern :)
let intVal = -1
let uintVal = UInt(bitPattern: intVal) // uintVal == 0xffffffffffffffff
Ajoutez ceci n'importe où en dehors d'une classe:
extension UInt {
/// SwiftExtensionKit
var toInt: Int { return Int(self) }
}
Ensuite, appelez simplement:
self.photos[index].toInt
J'ai été tellement frustré par les paramètres de la méthode cryptique de Swift bitPattern: et truncatingBitPattern: et mon incapacité à me rappeler lequel utiliser quand, que j'ai créé la classe suivante contenant un grand nombre de méthodes de conversion.
Je ne recommande pas nécessairement que vous incluiez cela dans votre programme. Je suis sûr que beaucoup de gens diront que Swift essaie de nous protéger de nous-mêmes et que saboter cet effort est stupide. Alors peut-être que vous devriez simplement garder ce fichier quelque part comme une sorte de triche afin vous pouvez rapidement déterminer comment effectuer une conversion et copier les paramètres dans votre programme si nécessaire.
Par ailleurs, JDI signifie "il suffit de le faire".
/// Class containing a large number of static methods to convert an Int to a UInt or vice-versa, and
/// also to perform conversions between different bit sizes, for example UInt32 to UInt8.
///
/// Many of these "conversions" are trivial, and are only included for the sake of completeness.
///
/// A few of the conversions involving Int and UInt can give different results when run on 32-bit
/// and 64-bit systems. All of the conversion where the bit size of both the source and the target
/// are specified will always give the same result independent of platform.
public class JDI {
// MARK: - To signed Int
// To Int8
public static func ToInt8(_ x : Int8) -> Int8 {
return x
}
public static func ToInt8(_ x : Int32) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : Int64) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : Int) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt8) -> Int8 {
return Int8(bitPattern: x)
}
public static func ToInt8(_ x : UInt32) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt64) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt) -> Int8 {
return Int8(truncatingBitPattern: x)
}
// To Int32
public static func ToInt32(_ x : Int8) -> Int32 {
return Int32(x)
}
public static func ToInt32(_ x : Int32) -> Int32 {
return x
}
public static func ToInt32(_ x : Int64) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : Int) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : UInt8) -> Int32 {
return Int32(x)
}
public static func ToInt32(_ x : UInt32) -> Int32 {
return Int32(bitPattern: x)
}
public static func ToInt32(_ x : UInt64) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : UInt) -> Int32 {
return Int32(truncatingBitPattern: x)
}
// To Int64
public static func ToInt64(_ x : Int8) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : Int32) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : Int64) -> Int64 {
return x
}
public static func ToInt64(_ x : Int) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt8) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt32) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt64) -> Int64 {
return Int64(bitPattern: x)
}
public static func ToInt64(_ x : UInt) -> Int64 {
return Int64(bitPattern: UInt64(x)) // Does not extend high bit of 32-bit input
}
// To Int
public static func ToInt(_ x : Int8) -> Int {
return Int(x)
}
public static func ToInt(_ x : Int32) -> Int {
return Int(x)
}
public static func ToInt(_ x : Int64) -> Int {
return Int(truncatingBitPattern: x)
}
public static func ToInt(_ x : Int) -> Int {
return x
}
public static func ToInt(_ x : UInt8) -> Int {
return Int(x)
}
public static func ToInt(_ x : UInt32) -> Int {
if MemoryLayout<Int>.size == MemoryLayout<Int32>.size {
return Int(Int32(bitPattern: x)) // For 32-bit systems, non-authorized interpretation
}
return Int(x)
}
public static func ToInt(_ x : UInt64) -> Int {
return Int(truncatingBitPattern: x)
}
public static func ToInt(_ x : UInt) -> Int {
return Int(bitPattern: x)
}
// MARK: - To unsigned Int
// To UInt8
public static func ToUInt8(_ x : Int8) -> UInt8 {
return UInt8(bitPattern: x)
}
public static func ToUInt8(_ x : Int32) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : Int64) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : Int) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt8) -> UInt8 {
return x
}
public static func ToUInt8(_ x : UInt32) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt64) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
// To UInt32
public static func ToUInt32(_ x : Int8) -> UInt32 {
return UInt32(bitPattern: Int32(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt32(_ x : Int32) -> UInt32 {
return UInt32(bitPattern: x)
}
public static func ToUInt32(_ x : Int64) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : Int) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : UInt8) -> UInt32 {
return UInt32(x)
}
public static func ToUInt32(_ x : UInt32) -> UInt32 {
return x
}
public static func ToUInt32(_ x : UInt64) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : UInt) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
// To UInt64
public static func ToUInt64(_ x : Int8) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt64(_ x : Int32) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt64(_ x : Int64) -> UInt64 {
return UInt64(bitPattern: x)
}
public static func ToUInt64(_ x : Int) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit if necessary, assume minus input significant
}
public static func ToUInt64(_ x : UInt8) -> UInt64 {
return UInt64(x)
}
public static func ToUInt64(_ x : UInt32) -> UInt64 {
return UInt64(x)
}
public static func ToUInt64(_ x : UInt64) -> UInt64 {
return x
}
public static func ToUInt64(_ x : UInt) -> UInt64 {
return UInt64(x) // Does not extend high bit of 32-bit input
}
// To UInt
public static func ToUInt(_ x : Int8) -> UInt {
return UInt(bitPattern: Int(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt(_ x : Int32) -> UInt {
return UInt(truncatingBitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt(_ x : Int64) -> UInt {
return UInt(truncatingBitPattern: x)
}
public static func ToUInt(_ x : Int) -> UInt {
return UInt(bitPattern: x)
}
public static func ToUInt(_ x : UInt8) -> UInt {
return UInt(x)
}
public static func ToUInt(_ x : UInt32) -> UInt {
return UInt(x)
}
public static func ToUInt(_ x : UInt64) -> UInt {
return UInt(truncatingBitPattern: x)
}
public static func ToUInt(_ x : UInt) -> UInt {
return x
}
}
Voici un code de test:
public func doTest() {
// To Int8
assert(JDI.ToInt8(42 as Int8) == 42)
assert(JDI.ToInt8(-13 as Int8) == -13)
assert(JDI.ToInt8(42 as Int32) == 42)
assert(JDI.ToInt8(257 as Int32) == 1)
assert(JDI.ToInt8(42 as Int64) == 42)
assert(JDI.ToInt8(257 as Int64) == 1)
assert(JDI.ToInt8(42 as Int) == 42)
assert(JDI.ToInt8(257 as Int) == 1)
assert(JDI.ToInt8(42 as UInt8) == 42)
assert(JDI.ToInt8(0xf3 as UInt8) == -13)
assert(JDI.ToInt8(42 as UInt32) == 42)
assert(JDI.ToInt8(0xfffffff3 as UInt32) == -13)
assert(JDI.ToInt8(42 as UInt64) == 42)
assert(JDI.ToInt8(UInt64.max - 12) == -13)
assert(JDI.ToInt8(42 as UInt) == 42)
assert(JDI.ToInt8(UInt.max - 12) == -13)
// To Int32
assert(JDI.ToInt32(42 as Int8) == 42)
assert(JDI.ToInt32(-13 as Int8) == -13)
assert(JDI.ToInt32(42 as Int32) == 42)
assert(JDI.ToInt32(-13 as Int32) == -13)
assert(JDI.ToInt32(42 as Int64) == 42)
assert(JDI.ToInt32(Int64(Int32.min) - 1) == Int32.max)
assert(JDI.ToInt32(42 as Int) == 42)
assert(JDI.ToInt32(-13 as Int) == -13)
assert(JDI.ToInt32(42 as UInt8) == 42)
assert(JDI.ToInt32(0xf3 as UInt8) == 243)
assert(JDI.ToInt32(42 as UInt32) == 42)
assert(JDI.ToInt32(0xfffffff3 as UInt32) == -13)
assert(JDI.ToInt32(42 as UInt64) == 42)
assert(JDI.ToInt32(UInt64.max - 12) == -13)
assert(JDI.ToInt32(42 as UInt) == 42)
assert(JDI.ToInt32(UInt.max - 12) == -13)
// To Int64
assert(JDI.ToInt64(42 as Int8) == 42)
assert(JDI.ToInt64(-13 as Int8) == -13)
assert(JDI.ToInt64(42 as Int32) == 42)
assert(JDI.ToInt64(-13 as Int32) == -13)
assert(JDI.ToInt64(42 as Int64) == 42)
assert(JDI.ToInt64(-13 as Int64) == -13)
assert(JDI.ToInt64(42 as Int) == 42)
assert(JDI.ToInt64(-13 as Int) == -13)
assert(JDI.ToInt64(42 as UInt8) == 42)
assert(JDI.ToInt64(0xf3 as UInt8) == 243)
assert(JDI.ToInt64(42 as UInt32) == 42)
assert(JDI.ToInt64(0xfffffff3 as UInt32) == 4294967283)
assert(JDI.ToInt64(42 as UInt64) == 42)
assert(JDI.ToInt64(UInt64.max - 12) == -13)
assert(JDI.ToInt64(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToInt64(UInt.max - 12) == 4294967283) // For 32-bit systems
#else
assert(JDI.ToInt64(UInt.max - 12) == -13) // For 64-bit systems
#endif
// To Int
assert(JDI.ToInt(42 as Int8) == 42)
assert(JDI.ToInt(-13 as Int8) == -13)
assert(JDI.ToInt(42 as Int32) == 42)
assert(JDI.ToInt(-13 as Int32) == -13)
assert(JDI.ToInt(42 as Int64) == 42)
assert(JDI.ToInt(-13 as Int64) == -13)
assert(JDI.ToInt(42 as Int) == 42)
assert(JDI.ToInt(-13 as Int) == -13)
assert(JDI.ToInt(42 as UInt8) == 42)
assert(JDI.ToInt(0xf3 as UInt8) == 243)
assert(JDI.ToInt(42 as UInt32) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToInt(0xfffffff3 as UInt32) == -13) // For 32-bit systems
#else
assert(JDI.ToInt(0xfffffff3 as UInt32) == 4294967283) // For 64-bit systems
#endif
assert(JDI.ToInt(42 as UInt64) == 42)
assert(JDI.ToInt(UInt64.max - 12) == -13)
assert(JDI.ToInt(42 as UInt) == 42)
assert(JDI.ToInt(UInt.max - 12) == -13)
// To UInt8
assert(JDI.ToUInt8(42 as Int8) == 42)
assert(JDI.ToUInt8(-13 as Int8) == 0xf3)
assert(JDI.ToUInt8(42 as Int32) == 42)
assert(JDI.ToUInt8(-13 as Int32) == 0xf3)
assert(JDI.ToUInt8(42 as Int64) == 42)
assert(JDI.ToUInt8(-13 as Int64) == 0xf3)
assert(JDI.ToUInt8(Int64.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as Int) == 42)
assert(JDI.ToUInt8(-13 as Int) == 0xf3)
assert(JDI.ToUInt8(Int.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as UInt8) == 42)
assert(JDI.ToUInt8(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt8(42 as UInt32) == 42)
assert(JDI.ToUInt8(0xfffffff3 as UInt32) == 0xf3)
assert(JDI.ToUInt8(42 as UInt64) == 42)
assert(JDI.ToUInt8(UInt64.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as UInt) == 42)
assert(JDI.ToUInt8(UInt.max - 12) == 0xf3)
// To UInt32
assert(JDI.ToUInt32(42 as Int8) == 42)
assert(JDI.ToUInt32(-13 as Int8) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int32) == 42)
assert(JDI.ToUInt32(-13 as Int32) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int64) == 42)
assert(JDI.ToUInt32(-13 as Int64) == 0xfffffff3)
assert(JDI.ToUInt32(Int64.max - 12) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int) == 42)
assert(JDI.ToUInt32(-13 as Int) == 0xfffffff3)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt32(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt32(Int.max - 12) == 0xfffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt32(42 as UInt8) == 42)
assert(JDI.ToUInt32(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt32(42 as UInt32) == 42)
assert(JDI.ToUInt32(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt32(42 as UInt64) == 42)
assert(JDI.ToUInt32(UInt64.max - 12) == 0xfffffff3)
assert(JDI.ToUInt32(42 as UInt) == 42)
assert(JDI.ToUInt32(UInt.max - 12) == 0xfffffff3)
// To UInt64
assert(JDI.ToUInt64(42 as Int8) == 42)
assert(JDI.ToUInt64(-13 as Int8) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as Int32) == 42)
assert(JDI.ToUInt64(-13 as Int32) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as Int64) == 42)
assert(JDI.ToUInt64(-13 as Int64) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(Int64.max - 12) == (UInt64.max >> 1) - 12)
assert(JDI.ToUInt64(42 as Int) == 42)
assert(JDI.ToUInt64(-13 as Int) == 0xfffffffffffffff3)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt64(42 as UInt8) == 42)
assert(JDI.ToUInt64(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt64(42 as UInt32) == 42)
assert(JDI.ToUInt64(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt64(42 as UInt64) == 42)
assert(JDI.ToUInt64(UInt64.max - 12) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
// To UInt
assert(JDI.ToUInt(42 as Int8) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int8) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(-13 as Int8) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as Int32) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int32) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(-13 as Int32) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as Int64) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int64) == 0xfffffff3) // For 32-bit systems
assert(JDI.ToUInt(Int64.max - 12) == 0xfffffff3)
#else
assert(JDI.ToUInt(-13 as Int64) == 0xfffffffffffffff3) // For 64-bit systems
assert(JDI.ToUInt(Int64.max - 12) == 0x7ffffffffffffff3)
#endif
assert(JDI.ToUInt(42 as Int) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(Int.max - 12) == 0x7ffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as UInt8) == 42)
assert(JDI.ToUInt(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt(42 as UInt32) == 42)
assert(JDI.ToUInt(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt(42 as UInt64) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(UInt.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(UInt.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
print("\nTesting JDI complete.\n")
}