diff --git a/hack/vendor.sh b/hack/vendor.sh index 0616d9bbe2..1626de87f3 100755 --- a/hack/vendor.sh +++ b/hack/vendor.sh @@ -38,6 +38,11 @@ clone git github.com/hashicorp/consul v0.5.2 clone git github.com/docker/distribution cd8ff553b6b1911be23dfeabb73e33108bcbf147 clone git github.com/vbatts/tar-split v0.9.4 +clone git github.com/docker/notary 77bced079e83d80f40c1f0a544b1a8a3b97fb052 +clone git github.com/endophage/gotuf 374908abc8af7e953a2813c5c2b3944ab625ca68 +clone git github.com/tent/canonical-json-go 96e4ba3a7613a1216cbd1badca4efe382adea337 +clone git github.com/agl/ed25519 d2b94fd789ea21d12fac1a4443dd3a3f79cda72c + clone git github.com/opencontainers/runc v0.0.2 # libcontainer # libcontainer deps (see src/github.com/docker/libcontainer/update-vendor.sh) clone git github.com/coreos/go-systemd v2 diff --git a/vendor/src/github.com/agl/ed25519/LICENSE b/vendor/src/github.com/agl/ed25519/LICENSE new file mode 100644 index 0000000000..7448756763 --- /dev/null +++ b/vendor/src/github.com/agl/ed25519/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/src/github.com/agl/ed25519/ed25519.go b/vendor/src/github.com/agl/ed25519/ed25519.go new file mode 100644 index 0000000000..700938ddda --- /dev/null +++ b/vendor/src/github.com/agl/ed25519/ed25519.go @@ -0,0 +1,125 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ed25519 implements the Ed25519 signature algorithm. See +// http://ed25519.cr.yp.to/. +package ed25519 + +// This code is a port of the public domain, "ref10" implementation of ed25519 +// from SUPERCOP. + +import ( + "crypto/sha512" + "crypto/subtle" + "io" + + "github.com/agl/ed25519/edwards25519" +) + +const ( + PublicKeySize = 32 + PrivateKeySize = 64 + SignatureSize = 64 +) + +// GenerateKey generates a public/private key pair using randomness from rand. +func GenerateKey(rand io.Reader) (publicKey *[PublicKeySize]byte, privateKey *[PrivateKeySize]byte, err error) { + privateKey = new([64]byte) + publicKey = new([32]byte) + _, err = io.ReadFull(rand, privateKey[:32]) + if err != nil { + return nil, nil, err + } + + h := sha512.New() + h.Write(privateKey[:32]) + digest := h.Sum(nil) + + digest[0] &= 248 + digest[31] &= 127 + digest[31] |= 64 + + var A edwards25519.ExtendedGroupElement + var hBytes [32]byte + copy(hBytes[:], digest) + edwards25519.GeScalarMultBase(&A, &hBytes) + A.ToBytes(publicKey) + + copy(privateKey[32:], publicKey[:]) + return +} + +// Sign signs the message with privateKey and returns a signature. +func Sign(privateKey *[PrivateKeySize]byte, message []byte) *[SignatureSize]byte { + h := sha512.New() + h.Write(privateKey[:32]) + + var digest1, messageDigest, hramDigest [64]byte + var expandedSecretKey [32]byte + h.Sum(digest1[:0]) + copy(expandedSecretKey[:], digest1[:]) + expandedSecretKey[0] &= 248 + expandedSecretKey[31] &= 63 + expandedSecretKey[31] |= 64 + + h.Reset() + h.Write(digest1[32:]) + h.Write(message) + h.Sum(messageDigest[:0]) + + var messageDigestReduced [32]byte + edwards25519.ScReduce(&messageDigestReduced, &messageDigest) + var R edwards25519.ExtendedGroupElement + edwards25519.GeScalarMultBase(&R, &messageDigestReduced) + + var encodedR [32]byte + R.ToBytes(&encodedR) + + h.Reset() + h.Write(encodedR[:]) + h.Write(privateKey[32:]) + h.Write(message) + h.Sum(hramDigest[:0]) + var hramDigestReduced [32]byte + edwards25519.ScReduce(&hramDigestReduced, &hramDigest) + + var s [32]byte + edwards25519.ScMulAdd(&s, &hramDigestReduced, &expandedSecretKey, &messageDigestReduced) + + signature := new([64]byte) + copy(signature[:], encodedR[:]) + copy(signature[32:], s[:]) + return signature +} + +// Verify returns true iff sig is a valid signature of message by publicKey. +func Verify(publicKey *[PublicKeySize]byte, message []byte, sig *[SignatureSize]byte) bool { + if sig[63]&224 != 0 { + return false + } + + var A edwards25519.ExtendedGroupElement + if !A.FromBytes(publicKey) { + return false + } + + h := sha512.New() + h.Write(sig[:32]) + h.Write(publicKey[:]) + h.Write(message) + var digest [64]byte + h.Sum(digest[:0]) + + var hReduced [32]byte + edwards25519.ScReduce(&hReduced, &digest) + + var R edwards25519.ProjectiveGroupElement + var b [32]byte + copy(b[:], sig[32:]) + edwards25519.GeDoubleScalarMultVartime(&R, &hReduced, &A, &b) + + var checkR [32]byte + R.ToBytes(&checkR) + return subtle.ConstantTimeCompare(sig[:32], checkR[:]) == 1 +} diff --git a/vendor/src/github.com/agl/ed25519/edwards25519/const.go b/vendor/src/github.com/agl/ed25519/edwards25519/const.go new file mode 100644 index 0000000000..ea5b77a710 --- /dev/null +++ b/vendor/src/github.com/agl/ed25519/edwards25519/const.go @@ -0,0 +1,1411 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package edwards25519 + +var d = FieldElement{ + -10913610, 13857413, -15372611, 6949391, 114729, -8787816, -6275908, -3247719, -18696448, -12055116, +} + +var d2 = FieldElement{ + -21827239, -5839606, -30745221, 13898782, 229458, 15978800, -12551817, -6495438, 29715968, 9444199, +} + +var SqrtM1 = FieldElement{ + -32595792, -7943725, 9377950, 3500415, 12389472, -272473, -25146209, -2005654, 326686, 11406482, +} + +var A = FieldElement{ + 486662, 0, 0, 0, 0, 0, 0, 0, 0, 0, +} + +var bi = [8]PreComputedGroupElement{ + { + FieldElement{25967493, -14356035, 29566456, 3660896, -12694345, 4014787, 27544626, -11754271, -6079156, 2047605}, + FieldElement{-12545711, 934262, -2722910, 3049990, -727428, 9406986, 12720692, 5043384, 19500929, -15469378}, + FieldElement{-8738181, 4489570, 9688441, -14785194, 10184609, -12363380, 29287919, 11864899, -24514362, -4438546}, + }, + { + FieldElement{15636291, -9688557, 24204773, -7912398, 616977, -16685262, 27787600, -14772189, 28944400, -1550024}, + FieldElement{16568933, 4717097, -11556148, -1102322, 15682896, -11807043, 16354577, -11775962, 7689662, 11199574}, + FieldElement{30464156, -5976125, -11779434, -15670865, 23220365, 15915852, 7512774, 10017326, -17749093, -9920357}, + }, + { + FieldElement{10861363, 11473154, 27284546, 1981175, -30064349, 12577861, 32867885, 14515107, -15438304, 10819380}, + FieldElement{4708026, 6336745, 20377586, 9066809, -11272109, 6594696, -25653668, 12483688, -12668491, 5581306}, + FieldElement{19563160, 16186464, -29386857, 4097519, 10237984, -4348115, 28542350, 13850243, -23678021, -15815942}, + }, + { + FieldElement{5153746, 9909285, 1723747, -2777874, 30523605, 5516873, 19480852, 5230134, -23952439, -15175766}, + FieldElement{-30269007, -3463509, 7665486, 10083793, 28475525, 1649722, 20654025, 16520125, 30598449, 7715701}, + FieldElement{28881845, 14381568, 9657904, 3680757, -20181635, 7843316, -31400660, 1370708, 29794553, -1409300}, + }, + { + FieldElement{-22518993, -6692182, 14201702, -8745502, -23510406, 8844726, 18474211, -1361450, -13062696, 13821877}, + FieldElement{-6455177, -7839871, 3374702, -4740862, -27098617, -10571707, 31655028, -7212327, 18853322, -14220951}, + FieldElement{4566830, -12963868, -28974889, -12240689, -7602672, -2830569, -8514358, -10431137, 2207753, -3209784}, + }, + { + FieldElement{-25154831, -4185821, 29681144, 7868801, -6854661, -9423865, -12437364, -663000, -31111463, -16132436}, + FieldElement{25576264, -2703214, 7349804, -11814844, 16472782, 9300885, 3844789, 15725684, 171356, 6466918}, + FieldElement{23103977, 13316479, 9739013, -16149481, 817875, -15038942, 8965339, -14088058, -30714912, 16193877}, + }, + { + FieldElement{-33521811, 3180713, -2394130, 14003687, -16903474, -16270840, 17238398, 4729455, -18074513, 9256800}, + FieldElement{-25182317, -4174131, 32336398, 5036987, -21236817, 11360617, 22616405, 9761698, -19827198, 630305}, + FieldElement{-13720693, 2639453, -24237460, -7406481, 9494427, -5774029, -6554551, -15960994, -2449256, -14291300}, + }, + { + FieldElement{-3151181, -5046075, 9282714, 6866145, -31907062, -863023, -18940575, 15033784, 25105118, -7894876}, + FieldElement{-24326370, 15950226, -31801215, -14592823, -11662737, -5090925, 1573892, -2625887, 2198790, -15804619}, + FieldElement{-3099351, 10324967, -2241613, 7453183, -5446979, -2735503, -13812022, -16236442, -32461234, -12290683}, + }, +} + +var base = [32][8]PreComputedGroupElement{ + { + { + FieldElement{25967493, -14356035, 29566456, 3660896, -12694345, 4014787, 27544626, -11754271, -6079156, 2047605}, + FieldElement{-12545711, 934262, -2722910, 3049990, -727428, 9406986, 12720692, 5043384, 19500929, -15469378}, + FieldElement{-8738181, 4489570, 9688441, -14785194, 10184609, -12363380, 29287919, 11864899, -24514362, -4438546}, + }, + { + FieldElement{-12815894, -12976347, -21581243, 11784320, -25355658, -2750717, -11717903, -3814571, -358445, -10211303}, + FieldElement{-21703237, 6903825, 27185491, 6451973, -29577724, -9554005, -15616551, 11189268, -26829678, -5319081}, + FieldElement{26966642, 11152617, 32442495, 15396054, 14353839, -12752335, -3128826, -9541118, -15472047, -4166697}, + }, + { + FieldElement{15636291, -9688557, 24204773, -7912398, 616977, -16685262, 27787600, -14772189, 28944400, -1550024}, + FieldElement{16568933, 4717097, -11556148, -1102322, 15682896, -11807043, 16354577, -11775962, 7689662, 11199574}, + FieldElement{30464156, -5976125, -11779434, -15670865, 23220365, 15915852, 7512774, 10017326, -17749093, -9920357}, + }, + { + FieldElement{-17036878, 13921892, 10945806, -6033431, 27105052, -16084379, -28926210, 15006023, 3284568, -6276540}, + FieldElement{23599295, -8306047, -11193664, -7687416, 13236774, 10506355, 7464579, 9656445, 13059162, 10374397}, + FieldElement{7798556, 16710257, 3033922, 2874086, 28997861, 2835604, 32406664, -3839045, -641708, -101325}, + }, + { + FieldElement{10861363, 11473154, 27284546, 1981175, -30064349, 12577861, 32867885, 14515107, -15438304, 10819380}, + FieldElement{4708026, 6336745, 20377586, 9066809, -11272109, 6594696, -25653668, 12483688, -12668491, 5581306}, + FieldElement{19563160, 16186464, -29386857, 4097519, 10237984, -4348115, 28542350, 13850243, -23678021, -15815942}, + }, + { + FieldElement{-15371964, -12862754, 32573250, 4720197, -26436522, 5875511, -19188627, -15224819, -9818940, -12085777}, + FieldElement{-8549212, 109983, 15149363, 2178705, 22900618, 4543417, 3044240, -15689887, 1762328, 14866737}, + FieldElement{-18199695, -15951423, -10473290, 1707278, -17185920, 3916101, -28236412, 3959421, 27914454, 4383652}, + }, + { + FieldElement{5153746, 9909285, 1723747, -2777874, 30523605, 5516873, 19480852, 5230134, -23952439, -15175766}, + FieldElement{-30269007, -3463509, 7665486, 10083793, 28475525, 1649722, 20654025, 16520125, 30598449, 7715701}, + FieldElement{28881845, 14381568, 9657904, 3680757, -20181635, 7843316, -31400660, 1370708, 29794553, -1409300}, + }, + { + FieldElement{14499471, -2729599, -33191113, -4254652, 28494862, 14271267, 30290735, 10876454, -33154098, 2381726}, + FieldElement{-7195431, -2655363, -14730155, 462251, -27724326, 3941372, -6236617, 3696005, -32300832, 15351955}, + FieldElement{27431194, 8222322, 16448760, -3907995, -18707002, 11938355, -32961401, -2970515, 29551813, 10109425}, + }, + }, + { + { + FieldElement{-13657040, -13155431, -31283750, 11777098, 21447386, 6519384, -2378284, -1627556, 10092783, -4764171}, + FieldElement{27939166, 14210322, 4677035, 16277044, -22964462, -12398139, -32508754, 12005538, -17810127, 12803510}, + FieldElement{17228999, -15661624, -1233527, 300140, -1224870, -11714777, 30364213, -9038194, 18016357, 4397660}, + }, + { + FieldElement{-10958843, -7690207, 4776341, -14954238, 27850028, -15602212, -26619106, 14544525, -17477504, 982639}, + FieldElement{29253598, 15796703, -2863982, -9908884, 10057023, 3163536, 7332899, -4120128, -21047696, 9934963}, + FieldElement{5793303, 16271923, -24131614, -10116404, 29188560, 1206517, -14747930, 4559895, -30123922, -10897950}, + }, + { + FieldElement{-27643952, -11493006, 16282657, -11036493, 28414021, -15012264, 24191034, 4541697, -13338309, 5500568}, + FieldElement{12650548, -1497113, 9052871, 11355358, -17680037, -8400164, -17430592, 12264343, 10874051, 13524335}, + FieldElement{25556948, -3045990, 714651, 2510400, 23394682, -10415330, 33119038, 5080568, -22528059, 5376628}, + }, + { + FieldElement{-26088264, -4011052, -17013699, -3537628, -6726793, 1920897, -22321305, -9447443, 4535768, 1569007}, + FieldElement{-2255422, 14606630, -21692440, -8039818, 28430649, 8775819, -30494562, 3044290, 31848280, 12543772}, + FieldElement{-22028579, 2943893, -31857513, 6777306, 13784462, -4292203, -27377195, -2062731, 7718482, 14474653}, + }, + { + FieldElement{2385315, 2454213, -22631320, 46603, -4437935, -15680415, 656965, -7236665, 24316168, -5253567}, + FieldElement{13741529, 10911568, -33233417, -8603737, -20177830, -1033297, 33040651, -13424532, -20729456, 8321686}, + FieldElement{21060490, -2212744, 15712757, -4336099, 1639040, 10656336, 23845965, -11874838, -9984458, 608372}, + }, + { + FieldElement{-13672732, -15087586, -10889693, -7557059, -6036909, 11305547, 1123968, -6780577, 27229399, 23887}, + FieldElement{-23244140, -294205, -11744728, 14712571, -29465699, -2029617, 12797024, -6440308, -1633405, 16678954}, + FieldElement{-29500620, 4770662, -16054387, 14001338, 7830047, 9564805, -1508144, -4795045, -17169265, 4904953}, + }, + { + FieldElement{24059557, 14617003, 19037157, -15039908, 19766093, -14906429, 5169211, 16191880, 2128236, -4326833}, + FieldElement{-16981152, 4124966, -8540610, -10653797, 30336522, -14105247, -29806336, 916033, -6882542, -2986532}, + FieldElement{-22630907, 12419372, -7134229, -7473371, -16478904, 16739175, 285431, 2763829, 15736322, 4143876}, + }, + { + FieldElement{2379352, 11839345, -4110402, -5988665, 11274298, 794957, 212801, -14594663, 23527084, -16458268}, + FieldElement{33431127, -11130478, -17838966, -15626900, 8909499, 8376530, -32625340, 4087881, -15188911, -14416214}, + FieldElement{1767683, 7197987, -13205226, -2022635, -13091350, 448826, 5799055, 4357868, -4774191, -16323038}, + }, + }, + { + { + FieldElement{6721966, 13833823, -23523388, -1551314, 26354293, -11863321, 23365147, -3949732, 7390890, 2759800}, + FieldElement{4409041, 2052381, 23373853, 10530217, 7676779, -12885954, 21302353, -4264057, 1244380, -12919645}, + FieldElement{-4421239, 7169619, 4982368, -2957590, 30256825, -2777540, 14086413, 9208236, 15886429, 16489664}, + }, + { + FieldElement{1996075, 10375649, 14346367, 13311202, -6874135, -16438411, -13693198, 398369, -30606455, -712933}, + FieldElement{-25307465, 9795880, -2777414, 14878809, -33531835, 14780363, 13348553, 12076947, -30836462, 5113182}, + FieldElement{-17770784, 11797796, 31950843, 13929123, -25888302, 12288344, -30341101, -7336386, 13847711, 5387222}, + }, + { + FieldElement{-18582163, -3416217, 17824843, -2340966, 22744343, -10442611, 8763061, 3617786, -19600662, 10370991}, + FieldElement{20246567, -14369378, 22358229, -543712, 18507283, -10413996, 14554437, -8746092, 32232924, 16763880}, + FieldElement{9648505, 10094563, 26416693, 14745928, -30374318, -6472621, 11094161, 15689506, 3140038, -16510092}, + }, + { + FieldElement{-16160072, 5472695, 31895588, 4744994, 8823515, 10365685, -27224800, 9448613, -28774454, 366295}, + FieldElement{19153450, 11523972, -11096490, -6503142, -24647631, 5420647, 28344573, 8041113, 719605, 11671788}, + FieldElement{8678025, 2694440, -6808014, 2517372, 4964326, 11152271, -15432916, -15266516, 27000813, -10195553}, + }, + { + FieldElement{-15157904, 7134312, 8639287, -2814877, -7235688, 10421742, 564065, 5336097, 6750977, -14521026}, + FieldElement{11836410, -3979488, 26297894, 16080799, 23455045, 15735944, 1695823, -8819122, 8169720, 16220347}, + FieldElement{-18115838, 8653647, 17578566, -6092619, -8025777, -16012763, -11144307, -2627664, -5990708, -14166033}, + }, + { + FieldElement{-23308498, -10968312, 15213228, -10081214, -30853605, -11050004, 27884329, 2847284, 2655861, 1738395}, + FieldElement{-27537433, -14253021, -25336301, -8002780, -9370762, 8129821, 21651608, -3239336, -19087449, -11005278}, + FieldElement{1533110, 3437855, 23735889, 459276, 29970501, 11335377, 26030092, 5821408, 10478196, 8544890}, + }, + { + FieldElement{32173121, -16129311, 24896207, 3921497, 22579056, -3410854, 19270449, 12217473, 17789017, -3395995}, + FieldElement{-30552961, -2228401, -15578829, -10147201, 13243889, 517024, 15479401, -3853233, 30460520, 1052596}, + FieldElement{-11614875, 13323618, 32618793, 8175907, -15230173, 12596687, 27491595, -4612359, 3179268, -9478891}, + }, + { + FieldElement{31947069, -14366651, -4640583, -15339921, -15125977, -6039709, -14756777, -16411740, 19072640, -9511060}, + FieldElement{11685058, 11822410, 3158003, -13952594, 33402194, -4165066, 5977896, -5215017, 473099, 5040608}, + FieldElement{-20290863, 8198642, -27410132, 11602123, 1290375, -2799760, 28326862, 1721092, -19558642, -3131606}, + }, + }, + { + { + FieldElement{7881532, 10687937, 7578723, 7738378, -18951012, -2553952, 21820786, 8076149, -27868496, 11538389}, + FieldElement{-19935666, 3899861, 18283497, -6801568, -15728660, -11249211, 8754525, 7446702, -5676054, 5797016}, + FieldElement{-11295600, -3793569, -15782110, -7964573, 12708869, -8456199, 2014099, -9050574, -2369172, -5877341}, + }, + { + FieldElement{-22472376, -11568741, -27682020, 1146375, 18956691, 16640559, 1192730, -3714199, 15123619, 10811505}, + FieldElement{14352098, -3419715, -18942044, 10822655, 32750596, 4699007, -70363, 15776356, -28886779, -11974553}, + FieldElement{-28241164, -8072475, -4978962, -5315317, 29416931, 1847569, -20654173, -16484855, 4714547, -9600655}, + }, + { + FieldElement{15200332, 8368572, 19679101, 15970074, -31872674, 1959451, 24611599, -4543832, -11745876, 12340220}, + FieldElement{12876937, -10480056, 33134381, 6590940, -6307776, 14872440, 9613953, 8241152, 15370987, 9608631}, + FieldElement{-4143277, -12014408, 8446281, -391603, 4407738, 13629032, -7724868, 15866074, -28210621, -8814099}, + }, + { + FieldElement{26660628, -15677655, 8393734, 358047, -7401291, 992988, -23904233, 858697, 20571223, 8420556}, + FieldElement{14620715, 13067227, -15447274, 8264467, 14106269, 15080814, 33531827, 12516406, -21574435, -12476749}, + FieldElement{236881, 10476226, 57258, -14677024, 6472998, 2466984, 17258519, 7256740, 8791136, 15069930}, + }, + { + FieldElement{1276410, -9371918, 22949635, -16322807, -23493039, -5702186, 14711875, 4874229, -30663140, -2331391}, + FieldElement{5855666, 4990204, -13711848, 7294284, -7804282, 1924647, -1423175, -7912378, -33069337, 9234253}, + FieldElement{20590503, -9018988, 31529744, -7352666, -2706834, 10650548, 31559055, -11609587, 18979186, 13396066}, + }, + { + FieldElement{24474287, 4968103, 22267082, 4407354, 24063882, -8325180, -18816887, 13594782, 33514650, 7021958}, + FieldElement{-11566906, -6565505, -21365085, 15928892, -26158305, 4315421, -25948728, -3916677, -21480480, 12868082}, + FieldElement{-28635013, 13504661, 19988037, -2132761, 21078225, 6443208, -21446107, 2244500, -12455797, -8089383}, + }, + { + FieldElement{-30595528, 13793479, -5852820, 319136, -25723172, -6263899, 33086546, 8957937, -15233648, 5540521}, + FieldElement{-11630176, -11503902, -8119500, -7643073, 2620056, 1022908, -23710744, -1568984, -16128528, -14962807}, + FieldElement{23152971, 775386, 27395463, 14006635, -9701118, 4649512, 1689819, 892185, -11513277, -15205948}, + }, + { + FieldElement{9770129, 9586738, 26496094, 4324120, 1556511, -3550024, 27453819, 4763127, -19179614, 5867134}, + FieldElement{-32765025, 1927590, 31726409, -4753295, 23962434, -16019500, 27846559, 5931263, -29749703, -16108455}, + FieldElement{27461885, -2977536, 22380810, 1815854, -23033753, -3031938, 7283490, -15148073, -19526700, 7734629}, + }, + }, + { + { + FieldElement{-8010264, -9590817, -11120403, 6196038, 29344158, -13430885, 7585295, -3176626, 18549497, 15302069}, + FieldElement{-32658337, -6171222, -7672793, -11051681, 6258878, 13504381, 10458790, -6418461, -8872242, 8424746}, + FieldElement{24687205, 8613276, -30667046, -3233545, 1863892, -1830544, 19206234, 7134917, -11284482, -828919}, + }, + { + FieldElement{11334899, -9218022, 8025293, 12707519, 17523892, -10476071, 10243738, -14685461, -5066034, 16498837}, + FieldElement{8911542, 6887158, -9584260, -6958590, 11145641, -9543680, 17303925, -14124238, 6536641, 10543906}, + FieldElement{-28946384, 15479763, -17466835, 568876, -1497683, 11223454, -2669190, -16625574, -27235709, 8876771}, + }, + { + FieldElement{-25742899, -12566864, -15649966, -846607, -33026686, -796288, -33481822, 15824474, -604426, -9039817}, + FieldElement{10330056, 70051, 7957388, -9002667, 9764902, 15609756, 27698697, -4890037, 1657394, 3084098}, + FieldElement{10477963, -7470260, 12119566, -13250805, 29016247, -5365589, 31280319, 14396151, -30233575, 15272409}, + }, + { + FieldElement{-12288309, 3169463, 28813183, 16658753, 25116432, -5630466, -25173957, -12636138, -25014757, 1950504}, + FieldElement{-26180358, 9489187, 11053416, -14746161, -31053720, 5825630, -8384306, -8767532, 15341279, 8373727}, + FieldElement{28685821, 7759505, -14378516, -12002860, -31971820, 4079242, 298136, -10232602, -2878207, 15190420}, + }, + { + FieldElement{-32932876, 13806336, -14337485, -15794431, -24004620, 10940928, 8669718, 2742393, -26033313, -6875003}, + FieldElement{-1580388, -11729417, -25979658, -11445023, -17411874, -10912854, 9291594, -16247779, -12154742, 6048605}, + FieldElement{-30305315, 14843444, 1539301, 11864366, 20201677, 1900163, 13934231, 5128323, 11213262, 9168384}, + }, + { + FieldElement{-26280513, 11007847, 19408960, -940758, -18592965, -4328580, -5088060, -11105150, 20470157, -16398701}, + FieldElement{-23136053, 9282192, 14855179, -15390078, -7362815, -14408560, -22783952, 14461608, 14042978, 5230683}, + FieldElement{29969567, -2741594, -16711867, -8552442, 9175486, -2468974, 21556951, 3506042, -5933891, -12449708}, + }, + { + FieldElement{-3144746, 8744661, 19704003, 4581278, -20430686, 6830683, -21284170, 8971513, -28539189, 15326563}, + FieldElement{-19464629, 10110288, -17262528, -3503892, -23500387, 1355669, -15523050, 15300988, -20514118, 9168260}, + FieldElement{-5353335, 4488613, -23803248, 16314347, 7780487, -15638939, -28948358, 9601605, 33087103, -9011387}, + }, + { + FieldElement{-19443170, -15512900, -20797467, -12445323, -29824447, 10229461, -27444329, -15000531, -5996870, 15664672}, + FieldElement{23294591, -16632613, -22650781, -8470978, 27844204, 11461195, 13099750, -2460356, 18151676, 13417686}, + FieldElement{-24722913, -4176517, -31150679, 5988919, -26858785, 6685065, 1661597, -12551441, 15271676, -15452665}, + }, + }, + { + { + FieldElement{11433042, -13228665, 8239631, -5279517, -1985436, -725718, -18698764, 2167544, -6921301, -13440182}, + FieldElement{-31436171, 15575146, 30436815, 12192228, -22463353, 9395379, -9917708, -8638997, 12215110, 12028277}, + FieldElement{14098400, 6555944, 23007258, 5757252, -15427832, -12950502, 30123440, 4617780, -16900089, -655628}, + }, + { + FieldElement{-4026201, -15240835, 11893168, 13718664, -14809462, 1847385, -15819999, 10154009, 23973261, -12684474}, + FieldElement{-26531820, -3695990, -1908898, 2534301, -31870557, -16550355, 18341390, -11419951, 32013174, -10103539}, + FieldElement{-25479301, 10876443, -11771086, -14625140, -12369567, 1838104, 21911214, 6354752, 4425632, -837822}, + }, + { + FieldElement{-10433389, -14612966, 22229858, -3091047, -13191166, 776729, -17415375, -12020462, 4725005, 14044970}, + FieldElement{19268650, -7304421, 1555349, 8692754, -21474059, -9910664, 6347390, -1411784, -19522291, -16109756}, + FieldElement{-24864089, 12986008, -10898878, -5558584, -11312371, -148526, 19541418, 8180106, 9282262, 10282508}, + }, + { + FieldElement{-26205082, 4428547, -8661196, -13194263, 4098402, -14165257, 15522535, 8372215, 5542595, -10702683}, + FieldElement{-10562541, 14895633, 26814552, -16673850, -17480754, -2489360, -2781891, 6993761, -18093885, 10114655}, + FieldElement{-20107055, -929418, 31422704, 10427861, -7110749, 6150669, -29091755, -11529146, 25953725, -106158}, + }, + { + FieldElement{-4234397, -8039292, -9119125, 3046000, 2101609, -12607294, 19390020, 6094296, -3315279, 12831125}, + FieldElement{-15998678, 7578152, 5310217, 14408357, -33548620, -224739, 31575954, 6326196, 7381791, -2421839}, + FieldElement{-20902779, 3296811, 24736065, -16328389, 18374254, 7318640, 6295303, 8082724, -15362489, 12339664}, + }, + { + FieldElement{27724736, 2291157, 6088201, -14184798, 1792727, 5857634, 13848414, 15768922, 25091167, 14856294}, + FieldElement{-18866652, 8331043, 24373479, 8541013, -701998, -9269457, 12927300, -12695493, -22182473, -9012899}, + FieldElement{-11423429, -5421590, 11632845, 3405020, 30536730, -11674039, -27260765, 13866390, 30146206, 9142070}, + }, + { + FieldElement{3924129, -15307516, -13817122, -10054960, 12291820, -668366, -27702774, 9326384, -8237858, 4171294}, + FieldElement{-15921940, 16037937, 6713787, 16606682, -21612135, 2790944, 26396185, 3731949, 345228, -5462949}, + FieldElement{-21327538, 13448259, 25284571, 1143661, 20614966, -8849387, 2031539, -12391231, -16253183, -13582083}, + }, + { + FieldElement{31016211, -16722429, 26371392, -14451233, -5027349, 14854137, 17477601, 3842657, 28012650, -16405420}, + FieldElement{-5075835, 9368966, -8562079, -4600902, -15249953, 6970560, -9189873, 16292057, -8867157, 3507940}, + FieldElement{29439664, 3537914, 23333589, 6997794, -17555561, -11018068, -15209202, -15051267, -9164929, 6580396}, + }, + }, + { + { + FieldElement{-12185861, -7679788, 16438269, 10826160, -8696817, -6235611, 17860444, -9273846, -2095802, 9304567}, + FieldElement{20714564, -4336911, 29088195, 7406487, 11426967, -5095705, 14792667, -14608617, 5289421, -477127}, + FieldElement{-16665533, -10650790, -6160345, -13305760, 9192020, -1802462, 17271490, 12349094, 26939669, -3752294}, + }, + { + FieldElement{-12889898, 9373458, 31595848, 16374215, 21471720, 13221525, -27283495, -12348559, -3698806, 117887}, + FieldElement{22263325, -6560050, 3984570, -11174646, -15114008, -566785, 28311253, 5358056, -23319780, 541964}, + FieldElement{16259219, 3261970, 2309254, -15534474, -16885711, -4581916, 24134070, -16705829, -13337066, -13552195}, + }, + { + FieldElement{9378160, -13140186, -22845982, -12745264, 28198281, -7244098, -2399684, -717351, 690426, 14876244}, + FieldElement{24977353, -314384, -8223969, -13465086, 28432343, -1176353, -13068804, -12297348, -22380984, 6618999}, + FieldElement{-1538174, 11685646, 12944378, 13682314, -24389511, -14413193, 8044829, -13817328, 32239829, -5652762}, + }, + { + FieldElement{-18603066, 4762990, -926250, 8885304, -28412480, -3187315, 9781647, -10350059, 32779359, 5095274}, + FieldElement{-33008130, -5214506, -32264887, -3685216, 9460461, -9327423, -24601656, 14506724, 21639561, -2630236}, + FieldElement{-16400943, -13112215, 25239338, 15531969, 3987758, -4499318, -1289502, -6863535, 17874574, 558605}, + }, + { + FieldElement{-13600129, 10240081, 9171883, 16131053, -20869254, 9599700, 33499487, 5080151, 2085892, 5119761}, + FieldElement{-22205145, -2519528, -16381601, 414691, -25019550, 2170430, 30634760, -8363614, -31999993, -5759884}, + FieldElement{-6845704, 15791202, 8550074, -1312654, 29928809, -12092256, 27534430, -7192145, -22351378, 12961482}, + }, + { + FieldElement{-24492060, -9570771, 10368194, 11582341, -23397293, -2245287, 16533930, 8206996, -30194652, -5159638}, + FieldElement{-11121496, -3382234, 2307366, 6362031, -135455, 8868177, -16835630, 7031275, 7589640, 8945490}, + FieldElement{-32152748, 8917967, 6661220, -11677616, -1192060, -15793393, 7251489, -11182180, 24099109, -14456170}, + }, + { + FieldElement{5019558, -7907470, 4244127, -14714356, -26933272, 6453165, -19118182, -13289025, -6231896, -10280736}, + FieldElement{10853594, 10721687, 26480089, 5861829, -22995819, 1972175, -1866647, -10557898, -3363451, -6441124}, + FieldElement{-17002408, 5906790, 221599, -6563147, 7828208, -13248918, 24362661, -2008168, -13866408, 7421392}, + }, + { + FieldElement{8139927, -6546497, 32257646, -5890546, 30375719, 1886181, -21175108, 15441252, 28826358, -4123029}, + FieldElement{6267086, 9695052, 7709135, -16603597, -32869068, -1886135, 14795160, -7840124, 13746021, -1742048}, + FieldElement{28584902, 7787108, -6732942, -15050729, 22846041, -7571236, -3181936, -363524, 4771362, -8419958}, + }, + }, + { + { + FieldElement{24949256, 6376279, -27466481, -8174608, -18646154, -9930606, 33543569, -12141695, 3569627, 11342593}, + FieldElement{26514989, 4740088, 27912651, 3697550, 19331575, -11472339, 6809886, 4608608, 7325975, -14801071}, + FieldElement{-11618399, -14554430, -24321212, 7655128, -1369274, 5214312, -27400540, 10258390, -17646694, -8186692}, + }, + { + FieldElement{11431204, 15823007, 26570245, 14329124, 18029990, 4796082, -31446179, 15580664, 9280358, -3973687}, + FieldElement{-160783, -10326257, -22855316, -4304997, -20861367, -13621002, -32810901, -11181622, -15545091, 4387441}, + FieldElement{-20799378, 12194512, 3937617, -5805892, -27154820, 9340370, -24513992, 8548137, 20617071, -7482001}, + }, + { + FieldElement{-938825, -3930586, -8714311, 16124718, 24603125, -6225393, -13775352, -11875822, 24345683, 10325460}, + FieldElement{-19855277, -1568885, -22202708, 8714034, 14007766, 6928528, 16318175, -1010689, 4766743, 3552007}, + FieldElement{-21751364, -16730916, 1351763, -803421, -4009670, 3950935, 3217514, 14481909, 10988822, -3994762}, + }, + { + FieldElement{15564307, -14311570, 3101243, 5684148, 30446780, -8051356, 12677127, -6505343, -8295852, 13296005}, + FieldElement{-9442290, 6624296, -30298964, -11913677, -4670981, -2057379, 31521204, 9614054, -30000824, 12074674}, + FieldElement{4771191, -135239, 14290749, -13089852, 27992298, 14998318, -1413936, -1556716, 29832613, -16391035}, + }, + { + FieldElement{7064884, -7541174, -19161962, -5067537, -18891269, -2912736, 25825242, 5293297, -27122660, 13101590}, + FieldElement{-2298563, 2439670, -7466610, 1719965, -27267541, -16328445, 32512469, -5317593, -30356070, -4190957}, + FieldElement{-30006540, 10162316, -33180176, 3981723, -16482138, -13070044, 14413974, 9515896, 19568978, 9628812}, + }, + { + FieldElement{33053803, 199357, 15894591, 1583059, 27380243, -4580435, -17838894, -6106839, -6291786, 3437740}, + FieldElement{-18978877, 3884493, 19469877, 12726490, 15913552, 13614290, -22961733, 70104, 7463304, 4176122}, + FieldElement{-27124001, 10659917, 11482427, -16070381, 12771467, -6635117, -32719404, -5322751, 24216882, 5944158}, + }, + { + FieldElement{8894125, 7450974, -2664149, -9765752, -28080517, -12389115, 19345746, 14680796, 11632993, 5847885}, + FieldElement{26942781, -2315317, 9129564, -4906607, 26024105, 11769399, -11518837, 6367194, -9727230, 4782140}, + FieldElement{19916461, -4828410, -22910704, -11414391, 25606324, -5972441, 33253853, 8220911, 6358847, -1873857}, + }, + { + FieldElement{801428, -2081702, 16569428, 11065167, 29875704, 96627, 7908388, -4480480, -13538503, 1387155}, + FieldElement{19646058, 5720633, -11416706, 12814209, 11607948, 12749789, 14147075, 15156355, -21866831, 11835260}, + FieldElement{19299512, 1155910, 28703737, 14890794, 2925026, 7269399, 26121523, 15467869, -26560550, 5052483}, + }, + }, + { + { + FieldElement{-3017432, 10058206, 1980837, 3964243, 22160966, 12322533, -6431123, -12618185, 12228557, -7003677}, + FieldElement{32944382, 14922211, -22844894, 5188528, 21913450, -8719943, 4001465, 13238564, -6114803, 8653815}, + FieldElement{22865569, -4652735, 27603668, -12545395, 14348958, 8234005, 24808405, 5719875, 28483275, 2841751}, + }, + { + FieldElement{-16420968, -1113305, -327719, -12107856, 21886282, -15552774, -1887966, -315658, 19932058, -12739203}, + FieldElement{-11656086, 10087521, -8864888, -5536143, -19278573, -3055912, 3999228, 13239134, -4777469, -13910208}, + FieldElement{1382174, -11694719, 17266790, 9194690, -13324356, 9720081, 20403944, 11284705, -14013818, 3093230}, + }, + { + FieldElement{16650921, -11037932, -1064178, 1570629, -8329746, 7352753, -302424, 16271225, -24049421, -6691850}, + FieldElement{-21911077, -5927941, -4611316, -5560156, -31744103, -10785293, 24123614, 15193618, -21652117, -16739389}, + FieldElement{-9935934, -4289447, -25279823, 4372842, 2087473, 10399484, 31870908, 14690798, 17361620, 11864968}, + }, + { + FieldElement{-11307610, 6210372, 13206574, 5806320, -29017692, -13967200, -12331205, -7486601, -25578460, -16240689}, + FieldElement{14668462, -12270235, 26039039, 15305210, 25515617, 4542480, 10453892, 6577524, 9145645, -6443880}, + FieldElement{5974874, 3053895, -9433049, -10385191, -31865124, 3225009, -7972642, 3936128, -5652273, -3050304}, + }, + { + FieldElement{30625386, -4729400, -25555961, -12792866, -20484575, 7695099, 17097188, -16303496, -27999779, 1803632}, + FieldElement{-3553091, 9865099, -5228566, 4272701, -5673832, -16689700, 14911344, 12196514, -21405489, 7047412}, + FieldElement{20093277, 9920966, -11138194, -5343857, 13161587, 12044805, -32856851, 4124601, -32343828, -10257566}, + }, + { + FieldElement{-20788824, 14084654, -13531713, 7842147, 19119038, -13822605, 4752377, -8714640, -21679658, 2288038}, + FieldElement{-26819236, -3283715, 29965059, 3039786, -14473765, 2540457, 29457502, 14625692, -24819617, 12570232}, + FieldElement{-1063558, -11551823, 16920318, 12494842, 1278292, -5869109, -21159943, -3498680, -11974704, 4724943}, + }, + { + FieldElement{17960970, -11775534, -4140968, -9702530, -8876562, -1410617, -12907383, -8659932, -29576300, 1903856}, + FieldElement{23134274, -14279132, -10681997, -1611936, 20684485, 15770816, -12989750, 3190296, 26955097, 14109738}, + FieldElement{15308788, 5320727, -30113809, -14318877, 22902008, 7767164, 29425325, -11277562, 31960942, 11934971}, + }, + { + FieldElement{-27395711, 8435796, 4109644, 12222639, -24627868, 14818669, 20638173, 4875028, 10491392, 1379718}, + FieldElement{-13159415, 9197841, 3875503, -8936108, -1383712, -5879801, 33518459, 16176658, 21432314, 12180697}, + FieldElement{-11787308, 11500838, 13787581, -13832590, -22430679, 10140205, 1465425, 12689540, -10301319, -13872883}, + }, + }, + { + { + FieldElement{5414091, -15386041, -21007664, 9643570, 12834970, 1186149, -2622916, -1342231, 26128231, 6032912}, + FieldElement{-26337395, -13766162, 32496025, -13653919, 17847801, -12669156, 3604025, 8316894, -25875034, -10437358}, + FieldElement{3296484, 6223048, 24680646, -12246460, -23052020, 5903205, -8862297, -4639164, 12376617, 3188849}, + }, + { + FieldElement{29190488, -14659046, 27549113, -1183516, 3520066, -10697301, 32049515, -7309113, -16109234, -9852307}, + FieldElement{-14744486, -9309156, 735818, -598978, -20407687, -5057904, 25246078, -15795669, 18640741, -960977}, + FieldElement{-6928835, -16430795, 10361374, 5642961, 4910474, 12345252, -31638386, -494430, 10530747, 1053335}, + }, + { + FieldElement{-29265967, -14186805, -13538216, -12117373, -19457059, -10655384, -31462369, -2948985, 24018831, 15026644}, + FieldElement{-22592535, -3145277, -2289276, 5953843, -13440189, 9425631, 25310643, 13003497, -2314791, -15145616}, + FieldElement{-27419985, -603321, -8043984, -1669117, -26092265, 13987819, -27297622, 187899, -23166419, -2531735}, + }, + { + FieldElement{-21744398, -13810475, 1844840, 5021428, -10434399, -15911473, 9716667, 16266922, -5070217, 726099}, + FieldElement{29370922, -6053998, 7334071, -15342259, 9385287, 2247707, -13661962, -4839461, 30007388, -15823341}, + FieldElement{-936379, 16086691, 23751945, -543318, -1167538, -5189036, 9137109, 730663, 9835848, 4555336}, + }, + { + FieldElement{-23376435, 1410446, -22253753, -12899614, 30867635, 15826977, 17693930, 544696, -11985298, 12422646}, + FieldElement{31117226, -12215734, -13502838, 6561947, -9876867, -12757670, -5118685, -4096706, 29120153, 13924425}, + FieldElement{-17400879, -14233209, 19675799, -2734756, -11006962, -5858820, -9383939, -11317700, 7240931, -237388}, + }, + { + FieldElement{-31361739, -11346780, -15007447, -5856218, -22453340, -12152771, 1222336, 4389483, 3293637, -15551743}, + FieldElement{-16684801, -14444245, 11038544, 11054958, -13801175, -3338533, -24319580, 7733547, 12796905, -6335822}, + FieldElement{-8759414, -10817836, -25418864, 10783769, -30615557, -9746811, -28253339, 3647836, 3222231, -11160462}, + }, + { + FieldElement{18606113, 1693100, -25448386, -15170272, 4112353, 10045021, 23603893, -2048234, -7550776, 2484985}, + FieldElement{9255317, -3131197, -12156162, -1004256, 13098013, -9214866, 16377220, -2102812, -19802075, -3034702}, + FieldElement{-22729289, 7496160, -5742199, 11329249, 19991973, -3347502, -31718148, 9936966, -30097688, -10618797}, + }, + { + FieldElement{21878590, -5001297, 4338336, 13643897, -3036865, 13160960, 19708896, 5415497, -7360503, -4109293}, + FieldElement{27736861, 10103576, 12500508, 8502413, -3413016, -9633558, 10436918, -1550276, -23659143, -8132100}, + FieldElement{19492550, -12104365, -29681976, -852630, -3208171, 12403437, 30066266, 8367329, 13243957, 8709688}, + }, + }, + { + { + FieldElement{12015105, 2801261, 28198131, 10151021, 24818120, -4743133, -11194191, -5645734, 5150968, 7274186}, + FieldElement{2831366, -12492146, 1478975, 6122054, 23825128, -12733586, 31097299, 6083058, 31021603, -9793610}, + FieldElement{-2529932, -2229646, 445613, 10720828, -13849527, -11505937, -23507731, 16354465, 15067285, -14147707}, + }, + { + FieldElement{7840942, 14037873, -33364863, 15934016, -728213, -3642706, 21403988, 1057586, -19379462, -12403220}, + FieldElement{915865, -16469274, 15608285, -8789130, -24357026, 6060030, -17371319, 8410997, -7220461, 16527025}, + FieldElement{32922597, -556987, 20336074, -16184568, 10903705, -5384487, 16957574, 52992, 23834301, 6588044}, + }, + { + FieldElement{32752030, 11232950, 3381995, -8714866, 22652988, -10744103, 17159699, 16689107, -20314580, -1305992}, + FieldElement{-4689649, 9166776, -25710296, -10847306, 11576752, 12733943, 7924251, -2752281, 1976123, -7249027}, + FieldElement{21251222, 16309901, -2983015, -6783122, 30810597, 12967303, 156041, -3371252, 12331345, -8237197}, + }, + { + FieldElement{8651614, -4477032, -16085636, -4996994, 13002507, 2950805, 29054427, -5106970, 10008136, -4667901}, + FieldElement{31486080, 15114593, -14261250, 12951354, 14369431, -7387845, 16347321, -13662089, 8684155, -10532952}, + FieldElement{19443825, 11385320, 24468943, -9659068, -23919258, 2187569, -26263207, -6086921, 31316348, 14219878}, + }, + { + FieldElement{-28594490, 1193785, 32245219, 11392485, 31092169, 15722801, 27146014, 6992409, 29126555, 9207390}, + FieldElement{32382935, 1110093, 18477781, 11028262, -27411763, -7548111, -4980517, 10843782, -7957600, -14435730}, + FieldElement{2814918, 7836403, 27519878, -7868156, -20894015, -11553689, -21494559, 8550130, 28346258, 1994730}, + }, + { + FieldElement{-19578299, 8085545, -14000519, -3948622, 2785838, -16231307, -19516951, 7174894, 22628102, 8115180}, + FieldElement{-30405132, 955511, -11133838, -15078069, -32447087, -13278079, -25651578, 3317160, -9943017, 930272}, + FieldElement{-15303681, -6833769, 28856490, 1357446, 23421993, 1057177, 24091212, -1388970, -22765376, -10650715}, + }, + { + FieldElement{-22751231, -5303997, -12907607, -12768866, -15811511, -7797053, -14839018, -16554220, -1867018, 8398970}, + FieldElement{-31969310, 2106403, -4736360, 1362501, 12813763, 16200670, 22981545, -6291273, 18009408, -15772772}, + FieldElement{-17220923, -9545221, -27784654, 14166835, 29815394, 7444469, 29551787, -3727419, 19288549, 1325865}, + }, + { + FieldElement{15100157, -15835752, -23923978, -1005098, -26450192, 15509408, 12376730, -3479146, 33166107, -8042750}, + FieldElement{20909231, 13023121, -9209752, 16251778, -5778415, -8094914, 12412151, 10018715, 2213263, -13878373}, + FieldElement{32529814, -11074689, 30361439, -16689753, -9135940, 1513226, 22922121, 6382134, -5766928, 8371348}, + }, + }, + { + { + FieldElement{9923462, 11271500, 12616794, 3544722, -29998368, -1721626, 12891687, -8193132, -26442943, 10486144}, + FieldElement{-22597207, -7012665, 8587003, -8257861, 4084309, -12970062, 361726, 2610596, -23921530, -11455195}, + FieldElement{5408411, -1136691, -4969122, 10561668, 24145918, 14240566, 31319731, -4235541, 19985175, -3436086}, + }, + { + FieldElement{-13994457, 16616821, 14549246, 3341099, 32155958, 13648976, -17577068, 8849297, 65030, 8370684}, + FieldElement{-8320926, -12049626, 31204563, 5839400, -20627288, -1057277, -19442942, 6922164, 12743482, -9800518}, + FieldElement{-2361371, 12678785, 28815050, 4759974, -23893047, 4884717, 23783145, 11038569, 18800704, 255233}, + }, + { + FieldElement{-5269658, -1773886, 13957886, 7990715, 23132995, 728773, 13393847, 9066957, 19258688, -14753793}, + FieldElement{-2936654, -10827535, -10432089, 14516793, -3640786, 4372541, -31934921, 2209390, -1524053, 2055794}, + FieldElement{580882, 16705327, 5468415, -2683018, -30926419, -14696000, -7203346, -8994389, -30021019, 7394435}, + }, + { + FieldElement{23838809, 1822728, -15738443, 15242727, 8318092, -3733104, -21672180, -3492205, -4821741, 14799921}, + FieldElement{13345610, 9759151, 3371034, -16137791, 16353039, 8577942, 31129804, 13496856, -9056018, 7402518}, + FieldElement{2286874, -4435931, -20042458, -2008336, -13696227, 5038122, 11006906, -15760352, 8205061, 1607563}, + }, + { + FieldElement{14414086, -8002132, 3331830, -3208217, 22249151, -5594188, 18364661, -2906958, 30019587, -9029278}, + FieldElement{-27688051, 1585953, -10775053, 931069, -29120221, -11002319, -14410829, 12029093, 9944378, 8024}, + FieldElement{4368715, -3709630, 29874200, -15022983, -20230386, -11410704, -16114594, -999085, -8142388, 5640030}, + }, + { + FieldElement{10299610, 13746483, 11661824, 16234854, 7630238, 5998374, 9809887, -16694564, 15219798, -14327783}, + FieldElement{27425505, -5719081, 3055006, 10660664, 23458024, 595578, -15398605, -1173195, -18342183, 9742717}, + FieldElement{6744077, 2427284, 26042789, 2720740, -847906, 1118974, 32324614, 7406442, 12420155, 1994844}, + }, + { + FieldElement{14012521, -5024720, -18384453, -9578469, -26485342, -3936439, -13033478, -10909803, 24319929, -6446333}, + FieldElement{16412690, -4507367, 10772641, 15929391, -17068788, -4658621, 10555945, -10484049, -30102368, -4739048}, + FieldElement{22397382, -7767684, -9293161, -12792868, 17166287, -9755136, -27333065, 6199366, 21880021, -12250760}, + }, + { + FieldElement{-4283307, 5368523, -31117018, 8163389, -30323063, 3209128, 16557151, 8890729, 8840445, 4957760}, + FieldElement{-15447727, 709327, -6919446, -10870178, -29777922, 6522332, -21720181, 12130072, -14796503, 5005757}, + FieldElement{-2114751, -14308128, 23019042, 15765735, -25269683, 6002752, 10183197, -13239326, -16395286, -2176112}, + }, + }, + { + { + FieldElement{-19025756, 1632005, 13466291, -7995100, -23640451, 16573537, -32013908, -3057104, 22208662, 2000468}, + FieldElement{3065073, -1412761, -25598674, -361432, -17683065, -5703415, -8164212, 11248527, -3691214, -7414184}, + FieldElement{10379208, -6045554, 8877319, 1473647, -29291284, -12507580, 16690915, 2553332, -3132688, 16400289}, + }, + { + FieldElement{15716668, 1254266, -18472690, 7446274, -8448918, 6344164, -22097271, -7285580, 26894937, 9132066}, + FieldElement{24158887, 12938817, 11085297, -8177598, -28063478, -4457083, -30576463, 64452, -6817084, -2692882}, + FieldElement{13488534, 7794716, 22236231, 5989356, 25426474, -12578208, 2350710, -3418511, -4688006, 2364226}, + }, + { + FieldElement{16335052, 9132434, 25640582, 6678888, 1725628, 8517937, -11807024, -11697457, 15445875, -7798101}, + FieldElement{29004207, -7867081, 28661402, -640412, -12794003, -7943086, 31863255, -4135540, -278050, -15759279}, + FieldElement{-6122061, -14866665, -28614905, 14569919, -10857999, -3591829, 10343412, -6976290, -29828287, -10815811}, + }, + { + FieldElement{27081650, 3463984, 14099042, -4517604, 1616303, -6205604, 29542636, 15372179, 17293797, 960709}, + FieldElement{20263915, 11434237, -5765435, 11236810, 13505955, -10857102, -16111345, 6493122, -19384511, 7639714}, + FieldElement{-2830798, -14839232, 25403038, -8215196, -8317012, -16173699, 18006287, -16043750, 29994677, -15808121}, + }, + { + FieldElement{9769828, 5202651, -24157398, -13631392, -28051003, -11561624, -24613141, -13860782, -31184575, 709464}, + FieldElement{12286395, 13076066, -21775189, -1176622, -25003198, 4057652, -32018128, -8890874, 16102007, 13205847}, + FieldElement{13733362, 5599946, 10557076, 3195751, -5557991, 8536970, -25540170, 8525972, 10151379, 10394400}, + }, + { + FieldElement{4024660, -16137551, 22436262, 12276534, -9099015, -2686099, 19698229, 11743039, -33302334, 8934414}, + FieldElement{-15879800, -4525240, -8580747, -2934061, 14634845, -698278, -9449077, 3137094, -11536886, 11721158}, + FieldElement{17555939, -5013938, 8268606, 2331751, -22738815, 9761013, 9319229, 8835153, -9205489, -1280045}, + }, + { + FieldElement{-461409, -7830014, 20614118, 16688288, -7514766, -4807119, 22300304, 505429, 6108462, -6183415}, + FieldElement{-5070281, 12367917, -30663534, 3234473, 32617080, -8422642, 29880583, -13483331, -26898490, -7867459}, + FieldElement{-31975283, 5726539, 26934134, 10237677, -3173717, -605053, 24199304, 3795095, 7592688, -14992079}, + }, + { + FieldElement{21594432, -14964228, 17466408, -4077222, 32537084, 2739898, 6407723, 12018833, -28256052, 4298412}, + FieldElement{-20650503, -11961496, -27236275, 570498, 3767144, -1717540, 13891942, -1569194, 13717174, 10805743}, + FieldElement{-14676630, -15644296, 15287174, 11927123, 24177847, -8175568, -796431, 14860609, -26938930, -5863836}, + }, + }, + { + { + FieldElement{12962541, 5311799, -10060768, 11658280, 18855286, -7954201, 13286263, -12808704, -4381056, 9882022}, + FieldElement{18512079, 11319350, -20123124, 15090309, 18818594, 5271736, -22727904, 3666879, -23967430, -3299429}, + FieldElement{-6789020, -3146043, 16192429, 13241070, 15898607, -14206114, -10084880, -6661110, -2403099, 5276065}, + }, + { + FieldElement{30169808, -5317648, 26306206, -11750859, 27814964, 7069267, 7152851, 3684982, 1449224, 13082861}, + FieldElement{10342826, 3098505, 2119311, 193222, 25702612, 12233820, 23697382, 15056736, -21016438, -8202000}, + FieldElement{-33150110, 3261608, 22745853, 7948688, 19370557, -15177665, -26171976, 6482814, -10300080, -11060101}, + }, + { + FieldElement{32869458, -5408545, 25609743, 15678670, -10687769, -15471071, 26112421, 2521008, -22664288, 6904815}, + FieldElement{29506923, 4457497, 3377935, -9796444, -30510046, 12935080, 1561737, 3841096, -29003639, -6657642}, + FieldElement{10340844, -6630377, -18656632, -2278430, 12621151, -13339055, 30878497, -11824370, -25584551, 5181966}, + }, + { + FieldElement{25940115, -12658025, 17324188, -10307374, -8671468, 15029094, 24396252, -16450922, -2322852, -12388574}, + FieldElement{-21765684, 9916823, -1300409, 4079498, -1028346, 11909559, 1782390, 12641087, 20603771, -6561742}, + FieldElement{-18882287, -11673380, 24849422, 11501709, 13161720, -4768874, 1925523, 11914390, 4662781, 7820689}, + }, + { + FieldElement{12241050, -425982, 8132691, 9393934, 32846760, -1599620, 29749456, 12172924, 16136752, 15264020}, + FieldElement{-10349955, -14680563, -8211979, 2330220, -17662549, -14545780, 10658213, 6671822, 19012087, 3772772}, + FieldElement{3753511, -3421066, 10617074, 2028709, 14841030, -6721664, 28718732, -15762884, 20527771, 12988982}, + }, + { + FieldElement{-14822485, -5797269, -3707987, 12689773, -898983, -10914866, -24183046, -10564943, 3299665, -12424953}, + FieldElement{-16777703, -15253301, -9642417, 4978983, 3308785, 8755439, 6943197, 6461331, -25583147, 8991218}, + FieldElement{-17226263, 1816362, -1673288, -6086439, 31783888, -8175991, -32948145, 7417950, -30242287, 1507265}, + }, + { + FieldElement{29692663, 6829891, -10498800, 4334896, 20945975, -11906496, -28887608, 8209391, 14606362, -10647073}, + FieldElement{-3481570, 8707081, 32188102, 5672294, 22096700, 1711240, -33020695, 9761487, 4170404, -2085325}, + FieldElement{-11587470, 14855945, -4127778, -1531857, -26649089, 15084046, 22186522, 16002000, -14276837, -8400798}, + }, + { + FieldElement{-4811456, 13761029, -31703877, -2483919, -3312471, 7869047, -7113572, -9620092, 13240845, 10965870}, + FieldElement{-7742563, -8256762, -14768334, -13656260, -23232383, 12387166, 4498947, 14147411, 29514390, 4302863}, + FieldElement{-13413405, -12407859, 20757302, -13801832, 14785143, 8976368, -5061276, -2144373, 17846988, -13971927}, + }, + }, + { + { + FieldElement{-2244452, -754728, -4597030, -1066309, -6247172, 1455299, -21647728, -9214789, -5222701, 12650267}, + FieldElement{-9906797, -16070310, 21134160, 12198166, -27064575, 708126, 387813, 13770293, -19134326, 10958663}, + FieldElement{22470984, 12369526, 23446014, -5441109, -21520802, -9698723, -11772496, -11574455, -25083830, 4271862}, + }, + { + FieldElement{-25169565, -10053642, -19909332, 15361595, -5984358, 2159192, 75375, -4278529, -32526221, 8469673}, + FieldElement{15854970, 4148314, -8893890, 7259002, 11666551, 13824734, -30531198, 2697372, 24154791, -9460943}, + FieldElement{15446137, -15806644, 29759747, 14019369, 30811221, -9610191, -31582008, 12840104, 24913809, 9815020}, + }, + { + FieldElement{-4709286, -5614269, -31841498, -12288893, -14443537, 10799414, -9103676, 13438769, 18735128, 9466238}, + FieldElement{11933045, 9281483, 5081055, -5183824, -2628162, -4905629, -7727821, -10896103, -22728655, 16199064}, + FieldElement{14576810, 379472, -26786533, -8317236, -29426508, -10812974, -102766, 1876699, 30801119, 2164795}, + }, + { + FieldElement{15995086, 3199873, 13672555, 13712240, -19378835, -4647646, -13081610, -15496269, -13492807, 1268052}, + FieldElement{-10290614, -3659039, -3286592, 10948818, 23037027, 3794475, -3470338, -12600221, -17055369, 3565904}, + FieldElement{29210088, -9419337, -5919792, -4952785, 10834811, -13327726, -16512102, -10820713, -27162222, -14030531}, + }, + { + FieldElement{-13161890, 15508588, 16663704, -8156150, -28349942, 9019123, -29183421, -3769423, 2244111, -14001979}, + FieldElement{-5152875, -3800936, -9306475, -6071583, 16243069, 14684434, -25673088, -16180800, 13491506, 4641841}, + FieldElement{10813417, 643330, -19188515, -728916, 30292062, -16600078, 27548447, -7721242, 14476989, -12767431}, + }, + { + FieldElement{10292079, 9984945, 6481436, 8279905, -7251514, 7032743, 27282937, -1644259, -27912810, 12651324}, + FieldElement{-31185513, -813383, 22271204, 11835308, 10201545, 15351028, 17099662, 3988035, 21721536, -3148940}, + FieldElement{10202177, -6545839, -31373232, -9574638, -32150642, -8119683, -12906320, 3852694, 13216206, 14842320}, + }, + { + FieldElement{-15815640, -10601066, -6538952, -7258995, -6984659, -6581778, -31500847, 13765824, -27434397, 9900184}, + FieldElement{14465505, -13833331, -32133984, -14738873, -27443187, 12990492, 33046193, 15796406, -7051866, -8040114}, + FieldElement{30924417, -8279620, 6359016, -12816335, 16508377, 9071735, -25488601, 15413635, 9524356, -7018878}, + }, + { + FieldElement{12274201, -13175547, 32627641, -1785326, 6736625, 13267305, 5237659, -5109483, 15663516, 4035784}, + FieldElement{-2951309, 8903985, 17349946, 601635, -16432815, -4612556, -13732739, -15889334, -22258478, 4659091}, + FieldElement{-16916263, -4952973, -30393711, -15158821, 20774812, 15897498, 5736189, 15026997, -2178256, -13455585}, + }, + }, + { + { + FieldElement{-8858980, -2219056, 28571666, -10155518, -474467, -10105698, -3801496, 278095, 23440562, -290208}, + FieldElement{10226241, -5928702, 15139956, 120818, -14867693, 5218603, 32937275, 11551483, -16571960, -7442864}, + FieldElement{17932739, -12437276, -24039557, 10749060, 11316803, 7535897, 22503767, 5561594, -3646624, 3898661}, + }, + { + FieldElement{7749907, -969567, -16339731, -16464, -25018111, 15122143, -1573531, 7152530, 21831162, 1245233}, + FieldElement{26958459, -14658026, 4314586, 8346991, -5677764, 11960072, -32589295, -620035, -30402091, -16716212}, + FieldElement{-12165896, 9166947, 33491384, 13673479, 29787085, 13096535, 6280834, 14587357, -22338025, 13987525}, + }, + { + FieldElement{-24349909, 7778775, 21116000, 15572597, -4833266, -5357778, -4300898, -5124639, -7469781, -2858068}, + FieldElement{9681908, -6737123, -31951644, 13591838, -6883821, 386950, 31622781, 6439245, -14581012, 4091397}, + FieldElement{-8426427, 1470727, -28109679, -1596990, 3978627, -5123623, -19622683, 12092163, 29077877, -14741988}, + }, + { + FieldElement{5269168, -6859726, -13230211, -8020715, 25932563, 1763552, -5606110, -5505881, -20017847, 2357889}, + FieldElement{32264008, -15407652, -5387735, -1160093, -2091322, -3946900, 23104804, -12869908, 5727338, 189038}, + FieldElement{14609123, -8954470, -6000566, -16622781, -14577387, -7743898, -26745169, 10942115, -25888931, -14884697}, + }, + { + FieldElement{20513500, 5557931, -15604613, 7829531, 26413943, -2019404, -21378968, 7471781, 13913677, -5137875}, + FieldElement{-25574376, 11967826, 29233242, 12948236, -6754465, 4713227, -8940970, 14059180, 12878652, 8511905}, + FieldElement{-25656801, 3393631, -2955415, -7075526, -2250709, 9366908, -30223418, 6812974, 5568676, -3127656}, + }, + { + FieldElement{11630004, 12144454, 2116339, 13606037, 27378885, 15676917, -17408753, -13504373, -14395196, 8070818}, + FieldElement{27117696, -10007378, -31282771, -5570088, 1127282, 12772488, -29845906, 10483306, -11552749, -1028714}, + FieldElement{10637467, -5688064, 5674781, 1072708, -26343588, -6982302, -1683975, 9177853, -27493162, 15431203}, + }, + { + FieldElement{20525145, 10892566, -12742472, 12779443, -29493034, 16150075, -28240519, 14943142, -15056790, -7935931}, + FieldElement{-30024462, 5626926, -551567, -9981087, 753598, 11981191, 25244767, -3239766, -3356550, 9594024}, + FieldElement{-23752644, 2636870, -5163910, -10103818, 585134, 7877383, 11345683, -6492290, 13352335, -10977084}, + }, + { + FieldElement{-1931799, -5407458, 3304649, -12884869, 17015806, -4877091, -29783850, -7752482, -13215537, -319204}, + FieldElement{20239939, 6607058, 6203985, 3483793, -18386976, -779229, -20723742, 15077870, -22750759, 14523817}, + FieldElement{27406042, -6041657, 27423596, -4497394, 4996214, 10002360, -28842031, -4545494, -30172742, -4805667}, + }, + }, + { + { + FieldElement{11374242, 12660715, 17861383, -12540833, 10935568, 1099227, -13886076, -9091740, -27727044, 11358504}, + FieldElement{-12730809, 10311867, 1510375, 10778093, -2119455, -9145702, 32676003, 11149336, -26123651, 4985768}, + FieldElement{-19096303, 341147, -6197485, -239033, 15756973, -8796662, -983043, 13794114, -19414307, -15621255}, + }, + { + FieldElement{6490081, 11940286, 25495923, -7726360, 8668373, -8751316, 3367603, 6970005, -1691065, -9004790}, + FieldElement{1656497, 13457317, 15370807, 6364910, 13605745, 8362338, -19174622, -5475723, -16796596, -5031438}, + FieldElement{-22273315, -13524424, -64685, -4334223, -18605636, -10921968, -20571065, -7007978, -99853, -10237333}, + }, + { + FieldElement{17747465, 10039260, 19368299, -4050591, -20630635, -16041286, 31992683, -15857976, -29260363, -5511971}, + FieldElement{31932027, -4986141, -19612382, 16366580, 22023614, 88450, 11371999, -3744247, 4882242, -10626905}, + FieldElement{29796507, 37186, 19818052, 10115756, -11829032, 3352736, 18551198, 3272828, -5190932, -4162409}, + }, + { + FieldElement{12501286, 4044383, -8612957, -13392385, -32430052, 5136599, -19230378, -3529697, 330070, -3659409}, + FieldElement{6384877, 2899513, 17807477, 7663917, -2358888, 12363165, 25366522, -8573892, -271295, 12071499}, + FieldElement{-8365515, -4042521, 25133448, -4517355, -6211027, 2265927, -32769618, 1936675, -5159697, 3829363}, + }, + { + FieldElement{28425966, -5835433, -577090, -4697198, -14217555, 6870930, 7921550, -6567787, 26333140, 14267664}, + FieldElement{-11067219, 11871231, 27385719, -10559544, -4585914, -11189312, 10004786, -8709488, -21761224, 8930324}, + FieldElement{-21197785, -16396035, 25654216, -1725397, 12282012, 11008919, 1541940, 4757911, -26491501, -16408940}, + }, + { + FieldElement{13537262, -7759490, -20604840, 10961927, -5922820, -13218065, -13156584, 6217254, -15943699, 13814990}, + FieldElement{-17422573, 15157790, 18705543, 29619, 24409717, -260476, 27361681, 9257833, -1956526, -1776914}, + FieldElement{-25045300, -10191966, 15366585, 15166509, -13105086, 8423556, -29171540, 12361135, -18685978, 4578290}, + }, + { + FieldElement{24579768, 3711570, 1342322, -11180126, -27005135, 14124956, -22544529, 14074919, 21964432, 8235257}, + FieldElement{-6528613, -2411497, 9442966, -5925588, 12025640, -1487420, -2981514, -1669206, 13006806, 2355433}, + FieldElement{-16304899, -13605259, -6632427, -5142349, 16974359, -10911083, 27202044, 1719366, 1141648, -12796236}, + }, + { + FieldElement{-12863944, -13219986, -8318266, -11018091, -6810145, -4843894, 13475066, -3133972, 32674895, 13715045}, + FieldElement{11423335, -5468059, 32344216, 8962751, 24989809, 9241752, -13265253, 16086212, -28740881, -15642093}, + FieldElement{-1409668, 12530728, -6368726, 10847387, 19531186, -14132160, -11709148, 7791794, -27245943, 4383347}, + }, + }, + { + { + FieldElement{-28970898, 5271447, -1266009, -9736989, -12455236, 16732599, -4862407, -4906449, 27193557, 6245191}, + FieldElement{-15193956, 5362278, -1783893, 2695834, 4960227, 12840725, 23061898, 3260492, 22510453, 8577507}, + FieldElement{-12632451, 11257346, -32692994, 13548177, -721004, 10879011, 31168030, 13952092, -29571492, -3635906}, + }, + { + FieldElement{3877321, -9572739, 32416692, 5405324, -11004407, -13656635, 3759769, 11935320, 5611860, 8164018}, + FieldElement{-16275802, 14667797, 15906460, 12155291, -22111149, -9039718, 32003002, -8832289, 5773085, -8422109}, + FieldElement{-23788118, -8254300, 1950875, 8937633, 18686727, 16459170, -905725, 12376320, 31632953, 190926}, + }, + { + FieldElement{-24593607, -16138885, -8423991, 13378746, 14162407, 6901328, -8288749, 4508564, -25341555, -3627528}, + FieldElement{8884438, -5884009, 6023974, 10104341, -6881569, -4941533, 18722941, -14786005, -1672488, 827625}, + FieldElement{-32720583, -16289296, -32503547, 7101210, 13354605, 2659080, -1800575, -14108036, -24878478, 1541286}, + }, + { + FieldElement{2901347, -1117687, 3880376, -10059388, -17620940, -3612781, -21802117, -3567481, 20456845, -1885033}, + FieldElement{27019610, 12299467, -13658288, -1603234, -12861660, -4861471, -19540150, -5016058, 29439641, 15138866}, + FieldElement{21536104, -6626420, -32447818, -10690208, -22408077, 5175814, -5420040, -16361163, 7779328, 109896}, + }, + { + FieldElement{30279744, 14648750, -8044871, 6425558, 13639621, -743509, 28698390, 12180118, 23177719, -554075}, + FieldElement{26572847, 3405927, -31701700, 12890905, -19265668, 5335866, -6493768, 2378492, 4439158, -13279347}, + FieldElement{-22716706, 3489070, -9225266, -332753, 18875722, -1140095, 14819434, -12731527, -17717757, -5461437}, + }, + { + FieldElement{-5056483, 16566551, 15953661, 3767752, -10436499, 15627060, -820954, 2177225, 8550082, -15114165}, + FieldElement{-18473302, 16596775, -381660, 15663611, 22860960, 15585581, -27844109, -3582739, -23260460, -8428588}, + FieldElement{-32480551, 15707275, -8205912, -5652081, 29464558, 2713815, -22725137, 15860482, -21902570, 1494193}, + }, + { + FieldElement{-19562091, -14087393, -25583872, -9299552, 13127842, 759709, 21923482, 16529112, 8742704, 12967017}, + FieldElement{-28464899, 1553205, 32536856, -10473729, -24691605, -406174, -8914625, -2933896, -29903758, 15553883}, + FieldElement{21877909, 3230008, 9881174, 10539357, -4797115, 2841332, 11543572, 14513274, 19375923, -12647961}, + }, + { + FieldElement{8832269, -14495485, 13253511, 5137575, 5037871, 4078777, 24880818, -6222716, 2862653, 9455043}, + FieldElement{29306751, 5123106, 20245049, -14149889, 9592566, 8447059, -2077124, -2990080, 15511449, 4789663}, + FieldElement{-20679756, 7004547, 8824831, -9434977, -4045704, -3750736, -5754762, 108893, 23513200, 16652362}, + }, + }, + { + { + FieldElement{-33256173, 4144782, -4476029, -6579123, 10770039, -7155542, -6650416, -12936300, -18319198, 10212860}, + FieldElement{2756081, 8598110, 7383731, -6859892, 22312759, -1105012, 21179801, 2600940, -9988298, -12506466}, + FieldElement{-24645692, 13317462, -30449259, -15653928, 21365574, -10869657, 11344424, 864440, -2499677, -16710063}, + }, + { + FieldElement{-26432803, 6148329, -17184412, -14474154, 18782929, -275997, -22561534, 211300, 2719757, 4940997}, + FieldElement{-1323882, 3911313, -6948744, 14759765, -30027150, 7851207, 21690126, 8518463, 26699843, 5276295}, + FieldElement{-13149873, -6429067, 9396249, 365013, 24703301, -10488939, 1321586, 149635, -15452774, 7159369}, + }, + { + FieldElement{9987780, -3404759, 17507962, 9505530, 9731535, -2165514, 22356009, 8312176, 22477218, -8403385}, + FieldElement{18155857, -16504990, 19744716, 9006923, 15154154, -10538976, 24256460, -4864995, -22548173, 9334109}, + FieldElement{2986088, -4911893, 10776628, -3473844, 10620590, -7083203, -21413845, 14253545, -22587149, 536906}, + }, + { + FieldElement{4377756, 8115836, 24567078, 15495314, 11625074, 13064599, 7390551, 10589625, 10838060, -15420424}, + FieldElement{-19342404, 867880, 9277171, -3218459, -14431572, -1986443, 19295826, -15796950, 6378260, 699185}, + FieldElement{7895026, 4057113, -7081772, -13077756, -17886831, -323126, -716039, 15693155, -5045064, -13373962}, + }, + { + FieldElement{-7737563, -5869402, -14566319, -7406919, 11385654, 13201616, 31730678, -10962840, -3918636, -9669325}, + FieldElement{10188286, -15770834, -7336361, 13427543, 22223443, 14896287, 30743455, 7116568, -21786507, 5427593}, + FieldElement{696102, 13206899, 27047647, -10632082, 15285305, -9853179, 10798490, -4578720, 19236243, 12477404}, + }, + { + FieldElement{-11229439, 11243796, -17054270, -8040865, -788228, -8167967, -3897669, 11180504, -23169516, 7733644}, + FieldElement{17800790, -14036179, -27000429, -11766671, 23887827, 3149671, 23466177, -10538171, 10322027, 15313801}, + FieldElement{26246234, 11968874, 32263343, -5468728, 6830755, -13323031, -15794704, -101982, -24449242, 10890804}, + }, + { + FieldElement{-31365647, 10271363, -12660625, -6267268, 16690207, -13062544, -14982212, 16484931, 25180797, -5334884}, + FieldElement{-586574, 10376444, -32586414, -11286356, 19801893, 10997610, 2276632, 9482883, 316878, 13820577}, + FieldElement{-9882808, -4510367, -2115506, 16457136, -11100081, 11674996, 30756178, -7515054, 30696930, -3712849}, + }, + { + FieldElement{32988917, -9603412, 12499366, 7910787, -10617257, -11931514, -7342816, -9985397, -32349517, 7392473}, + FieldElement{-8855661, 15927861, 9866406, -3649411, -2396914, -16655781, -30409476, -9134995, 25112947, -2926644}, + FieldElement{-2504044, -436966, 25621774, -5678772, 15085042, -5479877, -24884878, -13526194, 5537438, -13914319}, + }, + }, + { + { + FieldElement{-11225584, 2320285, -9584280, 10149187, -33444663, 5808648, -14876251, -1729667, 31234590, 6090599}, + FieldElement{-9633316, 116426, 26083934, 2897444, -6364437, -2688086, 609721, 15878753, -6970405, -9034768}, + FieldElement{-27757857, 247744, -15194774, -9002551, 23288161, -10011936, -23869595, 6503646, 20650474, 1804084}, + }, + { + FieldElement{-27589786, 15456424, 8972517, 8469608, 15640622, 4439847, 3121995, -10329713, 27842616, -202328}, + FieldElement{-15306973, 2839644, 22530074, 10026331, 4602058, 5048462, 28248656, 5031932, -11375082, 12714369}, + FieldElement{20807691, -7270825, 29286141, 11421711, -27876523, -13868230, -21227475, 1035546, -19733229, 12796920}, + }, + { + FieldElement{12076899, -14301286, -8785001, -11848922, -25012791, 16400684, -17591495, -12899438, 3480665, -15182815}, + FieldElement{-32361549, 5457597, 28548107, 7833186, 7303070, -11953545, -24363064, -15921875, -33374054, 2771025}, + FieldElement{-21389266, 421932, 26597266, 6860826, 22486084, -6737172, -17137485, -4210226, -24552282, 15673397}, + }, + { + FieldElement{-20184622, 2338216, 19788685, -9620956, -4001265, -8740893, -20271184, 4733254, 3727144, -12934448}, + FieldElement{6120119, 814863, -11794402, -622716, 6812205, -15747771, 2019594, 7975683, 31123697, -10958981}, + FieldElement{30069250, -11435332, 30434654, 2958439, 18399564, -976289, 12296869, 9204260, -16432438, 9648165}, + }, + { + FieldElement{32705432, -1550977, 30705658, 7451065, -11805606, 9631813, 3305266, 5248604, -26008332, -11377501}, + FieldElement{17219865, 2375039, -31570947, -5575615, -19459679, 9219903, 294711, 15298639, 2662509, -16297073}, + FieldElement{-1172927, -7558695, -4366770, -4287744, -21346413, -8434326, 32087529, -1222777, 32247248, -14389861}, + }, + { + FieldElement{14312628, 1221556, 17395390, -8700143, -4945741, -8684635, -28197744, -9637817, -16027623, -13378845}, + FieldElement{-1428825, -9678990, -9235681, 6549687, -7383069, -468664, 23046502, 9803137, 17597934, 2346211}, + FieldElement{18510800, 15337574, 26171504, 981392, -22241552, 7827556, -23491134, -11323352, 3059833, -11782870}, + }, + { + FieldElement{10141598, 6082907, 17829293, -1947643, 9830092, 13613136, -25556636, -5544586, -33502212, 3592096}, + FieldElement{33114168, -15889352, -26525686, -13343397, 33076705, 8716171, 1151462, 1521897, -982665, -6837803}, + FieldElement{-32939165, -4255815, 23947181, -324178, -33072974, -12305637, -16637686, 3891704, 26353178, 693168}, + }, + { + FieldElement{30374239, 1595580, -16884039, 13186931, 4600344, 406904, 9585294, -400668, 31375464, 14369965}, + FieldElement{-14370654, -7772529, 1510301, 6434173, -18784789, -6262728, 32732230, -13108839, 17901441, 16011505}, + FieldElement{18171223, -11934626, -12500402, 15197122, -11038147, -15230035, -19172240, -16046376, 8764035, 12309598}, + }, + }, + { + { + FieldElement{5975908, -5243188, -19459362, -9681747, -11541277, 14015782, -23665757, 1228319, 17544096, -10593782}, + FieldElement{5811932, -1715293, 3442887, -2269310, -18367348, -8359541, -18044043, -15410127, -5565381, 12348900}, + FieldElement{-31399660, 11407555, 25755363, 6891399, -3256938, 14872274, -24849353, 8141295, -10632534, -585479}, + }, + { + FieldElement{-12675304, 694026, -5076145, 13300344, 14015258, -14451394, -9698672, -11329050, 30944593, 1130208}, + FieldElement{8247766, -6710942, -26562381, -7709309, -14401939, -14648910, 4652152, 2488540, 23550156, -271232}, + FieldElement{17294316, -3788438, 7026748, 15626851, 22990044, 113481, 2267737, -5908146, -408818, -137719}, + }, + { + FieldElement{16091085, -16253926, 18599252, 7340678, 2137637, -1221657, -3364161, 14550936, 3260525, -7166271}, + FieldElement{-4910104, -13332887, 18550887, 10864893, -16459325, -7291596, -23028869, -13204905, -12748722, 2701326}, + FieldElement{-8574695, 16099415, 4629974, -16340524, -20786213, -6005432, -10018363, 9276971, 11329923, 1862132}, + }, + { + FieldElement{14763076, -15903608, -30918270, 3689867, 3511892, 10313526, -21951088, 12219231, -9037963, -940300}, + FieldElement{8894987, -3446094, 6150753, 3013931, 301220, 15693451, -31981216, -2909717, -15438168, 11595570}, + FieldElement{15214962, 3537601, -26238722, -14058872, 4418657, -15230761, 13947276, 10730794, -13489462, -4363670}, + }, + { + FieldElement{-2538306, 7682793, 32759013, 263109, -29984731, -7955452, -22332124, -10188635, 977108, 699994}, + FieldElement{-12466472, 4195084, -9211532, 550904, -15565337, 12917920, 19118110, -439841, -30534533, -14337913}, + FieldElement{31788461, -14507657, 4799989, 7372237, 8808585, -14747943, 9408237, -10051775, 12493932, -5409317}, + }, + { + FieldElement{-25680606, 5260744, -19235809, -6284470, -3695942, 16566087, 27218280, 2607121, 29375955, 6024730}, + FieldElement{842132, -2794693, -4763381, -8722815, 26332018, -12405641, 11831880, 6985184, -9940361, 2854096}, + FieldElement{-4847262, -7969331, 2516242, -5847713, 9695691, -7221186, 16512645, 960770, 12121869, 16648078}, + }, + { + FieldElement{-15218652, 14667096, -13336229, 2013717, 30598287, -464137, -31504922, -7882064, 20237806, 2838411}, + FieldElement{-19288047, 4453152, 15298546, -16178388, 22115043, -15972604, 12544294, -13470457, 1068881, -12499905}, + FieldElement{-9558883, -16518835, 33238498, 13506958, 30505848, -1114596, -8486907, -2630053, 12521378, 4845654}, + }, + { + FieldElement{-28198521, 10744108, -2958380, 10199664, 7759311, -13088600, 3409348, -873400, -6482306, -12885870}, + FieldElement{-23561822, 6230156, -20382013, 10655314, -24040585, -11621172, 10477734, -1240216, -3113227, 13974498}, + FieldElement{12966261, 15550616, -32038948, -1615346, 21025980, -629444, 5642325, 7188737, 18895762, 12629579}, + }, + }, + { + { + FieldElement{14741879, -14946887, 22177208, -11721237, 1279741, 8058600, 11758140, 789443, 32195181, 3895677}, + FieldElement{10758205, 15755439, -4509950, 9243698, -4879422, 6879879, -2204575, -3566119, -8982069, 4429647}, + FieldElement{-2453894, 15725973, -20436342, -10410672, -5803908, -11040220, -7135870, -11642895, 18047436, -15281743}, + }, + { + FieldElement{-25173001, -11307165, 29759956, 11776784, -22262383, -15820455, 10993114, -12850837, -17620701, -9408468}, + FieldElement{21987233, 700364, -24505048, 14972008, -7774265, -5718395, 32155026, 2581431, -29958985, 8773375}, + FieldElement{-25568350, 454463, -13211935, 16126715, 25240068, 8594567, 20656846, 12017935, -7874389, -13920155}, + }, + { + FieldElement{6028182, 6263078, -31011806, -11301710, -818919, 2461772, -31841174, -5468042, -1721788, -2776725}, + FieldElement{-12278994, 16624277, 987579, -5922598, 32908203, 1248608, 7719845, -4166698, 28408820, 6816612}, + FieldElement{-10358094, -8237829, 19549651, -12169222, 22082623, 16147817, 20613181, 13982702, -10339570, 5067943}, + }, + { + FieldElement{-30505967, -3821767, 12074681, 13582412, -19877972, 2443951, -19719286, 12746132, 5331210, -10105944}, + FieldElement{30528811, 3601899, -1957090, 4619785, -27361822, -15436388, 24180793, -12570394, 27679908, -1648928}, + FieldElement{9402404, -13957065, 32834043, 10838634, -26580150, -13237195, 26653274, -8685565, 22611444, -12715406}, + }, + { + FieldElement{22190590, 1118029, 22736441, 15130463, -30460692, -5991321, 19189625, -4648942, 4854859, 6622139}, + FieldElement{-8310738, -2953450, -8262579, -3388049, -10401731, -271929, 13424426, -3567227, 26404409, 13001963}, + FieldElement{-31241838, -15415700, -2994250, 8939346, 11562230, -12840670, -26064365, -11621720, -15405155, 11020693}, + }, + { + FieldElement{1866042, -7949489, -7898649, -10301010, 12483315, 13477547, 3175636, -12424163, 28761762, 1406734}, + FieldElement{-448555, -1777666, 13018551, 3194501, -9580420, -11161737, 24760585, -4347088, 25577411, -13378680}, + FieldElement{-24290378, 4759345, -690653, -1852816, 2066747, 10693769, -29595790, 9884936, -9368926, 4745410}, + }, + { + FieldElement{-9141284, 6049714, -19531061, -4341411, -31260798, 9944276, -15462008, -11311852, 10931924, -11931931}, + FieldElement{-16561513, 14112680, -8012645, 4817318, -8040464, -11414606, -22853429, 10856641, -20470770, 13434654}, + FieldElement{22759489, -10073434, -16766264, -1871422, 13637442, -10168091, 1765144, -12654326, 28445307, -5364710}, + }, + { + FieldElement{29875063, 12493613, 2795536, -3786330, 1710620, 15181182, -10195717, -8788675, 9074234, 1167180}, + FieldElement{-26205683, 11014233, -9842651, -2635485, -26908120, 7532294, -18716888, -9535498, 3843903, 9367684}, + FieldElement{-10969595, -6403711, 9591134, 9582310, 11349256, 108879, 16235123, 8601684, -139197, 4242895}, + }, + }, + { + { + FieldElement{22092954, -13191123, -2042793, -11968512, 32186753, -11517388, -6574341, 2470660, -27417366, 16625501}, + FieldElement{-11057722, 3042016, 13770083, -9257922, 584236, -544855, -7770857, 2602725, -27351616, 14247413}, + FieldElement{6314175, -10264892, -32772502, 15957557, -10157730, 168750, -8618807, 14290061, 27108877, -1180880}, + }, + { + FieldElement{-8586597, -7170966, 13241782, 10960156, -32991015, -13794596, 33547976, -11058889, -27148451, 981874}, + FieldElement{22833440, 9293594, -32649448, -13618667, -9136966, 14756819, -22928859, -13970780, -10479804, -16197962}, + FieldElement{-7768587, 3326786, -28111797, 10783824, 19178761, 14905060, 22680049, 13906969, -15933690, 3797899}, + }, + { + FieldElement{21721356, -4212746, -12206123, 9310182, -3882239, -13653110, 23740224, -2709232, 20491983, -8042152}, + FieldElement{9209270, -15135055, -13256557, -6167798, -731016, 15289673, 25947805, 15286587, 30997318, -6703063}, + FieldElement{7392032, 16618386, 23946583, -8039892, -13265164, -1533858, -14197445, -2321576, 17649998, -250080}, + }, + { + FieldElement{-9301088, -14193827, 30609526, -3049543, -25175069, -1283752, -15241566, -9525724, -2233253, 7662146}, + FieldElement{-17558673, 1763594, -33114336, 15908610, -30040870, -12174295, 7335080, -8472199, -3174674, 3440183}, + FieldElement{-19889700, -5977008, -24111293, -9688870, 10799743, -16571957, 40450, -4431835, 4862400, 1133}, + }, + { + FieldElement{-32856209, -7873957, -5422389, 14860950, -16319031, 7956142, 7258061, 311861, -30594991, -7379421}, + FieldElement{-3773428, -1565936, 28985340, 7499440, 24445838, 9325937, 29727763, 16527196, 18278453, 15405622}, + FieldElement{-4381906, 8508652, -19898366, -3674424, -5984453, 15149970, -13313598, 843523, -21875062, 13626197}, + }, + { + FieldElement{2281448, -13487055, -10915418, -2609910, 1879358, 16164207, -10783882, 3953792, 13340839, 15928663}, + FieldElement{31727126, -7179855, -18437503, -8283652, 2875793, -16390330, -25269894, -7014826, -23452306, 5964753}, + FieldElement{4100420, -5959452, -17179337, 6017714, -18705837, 12227141, -26684835, 11344144, 2538215, -7570755}, + }, + { + FieldElement{-9433605, 6123113, 11159803, -2156608, 30016280, 14966241, -20474983, 1485421, -629256, -15958862}, + FieldElement{-26804558, 4260919, 11851389, 9658551, -32017107, 16367492, -20205425, -13191288, 11659922, -11115118}, + FieldElement{26180396, 10015009, -30844224, -8581293, 5418197, 9480663, 2231568, -10170080, 33100372, -1306171}, + }, + { + FieldElement{15121113, -5201871, -10389905, 15427821, -27509937, -15992507, 21670947, 4486675, -5931810, -14466380}, + FieldElement{16166486, -9483733, -11104130, 6023908, -31926798, -1364923, 2340060, -16254968, -10735770, -10039824}, + FieldElement{28042865, -3557089, -12126526, 12259706, -3717498, -6945899, 6766453, -8689599, 18036436, 5803270}, + }, + }, + { + { + FieldElement{-817581, 6763912, 11803561, 1585585, 10958447, -2671165, 23855391, 4598332, -6159431, -14117438}, + FieldElement{-31031306, -14256194, 17332029, -2383520, 31312682, -5967183, 696309, 50292, -20095739, 11763584}, + FieldElement{-594563, -2514283, -32234153, 12643980, 12650761, 14811489, 665117, -12613632, -19773211, -10713562}, + }, + { + FieldElement{30464590, -11262872, -4127476, -12734478, 19835327, -7105613, -24396175, 2075773, -17020157, 992471}, + FieldElement{18357185, -6994433, 7766382, 16342475, -29324918, 411174, 14578841, 8080033, -11574335, -10601610}, + FieldElement{19598397, 10334610, 12555054, 2555664, 18821899, -10339780, 21873263, 16014234, 26224780, 16452269}, + }, + { + FieldElement{-30223925, 5145196, 5944548, 16385966, 3976735, 2009897, -11377804, -7618186, -20533829, 3698650}, + FieldElement{14187449, 3448569, -10636236, -10810935, -22663880, -3433596, 7268410, -10890444, 27394301, 12015369}, + FieldElement{19695761, 16087646, 28032085, 12999827, 6817792, 11427614, 20244189, -1312777, -13259127, -3402461}, + }, + { + FieldElement{30860103, 12735208, -1888245, -4699734, -16974906, 2256940, -8166013, 12298312, -8550524, -10393462}, + FieldElement{-5719826, -11245325, -1910649, 15569035, 26642876, -7587760, -5789354, -15118654, -4976164, 12651793}, + FieldElement{-2848395, 9953421, 11531313, -5282879, 26895123, -12697089, -13118820, -16517902, 9768698, -2533218}, + }, + { + FieldElement{-24719459, 1894651, -287698, -4704085, 15348719, -8156530, 32767513, 12765450, 4940095, 10678226}, + FieldElement{18860224, 15980149, -18987240, -1562570, -26233012, -11071856, -7843882, 13944024, -24372348, 16582019}, + FieldElement{-15504260, 4970268, -29893044, 4175593, -20993212, -2199756, -11704054, 15444560, -11003761, 7989037}, + }, + { + FieldElement{31490452, 5568061, -2412803, 2182383, -32336847, 4531686, -32078269, 6200206, -19686113, -14800171}, + FieldElement{-17308668, -15879940, -31522777, -2831, -32887382, 16375549, 8680158, -16371713, 28550068, -6857132}, + FieldElement{-28126887, -5688091, 16837845, -1820458, -6850681, 12700016, -30039981, 4364038, 1155602, 5988841}, + }, + { + FieldElement{21890435, -13272907, -12624011, 12154349, -7831873, 15300496, 23148983, -4470481, 24618407, 8283181}, + FieldElement{-33136107, -10512751, 9975416, 6841041, -31559793, 16356536, 3070187, -7025928, 1466169, 10740210}, + FieldElement{-1509399, -15488185, -13503385, -10655916, 32799044, 909394, -13938903, -5779719, -32164649, -15327040}, + }, + { + FieldElement{3960823, -14267803, -28026090, -15918051, -19404858, 13146868, 15567327, 951507, -3260321, -573935}, + FieldElement{24740841, 5052253, -30094131, 8961361, 25877428, 6165135, -24368180, 14397372, -7380369, -6144105}, + FieldElement{-28888365, 3510803, -28103278, -1158478, -11238128, -10631454, -15441463, -14453128, -1625486, -6494814}, + }, + }, + { + { + FieldElement{793299, -9230478, 8836302, -6235707, -27360908, -2369593, 33152843, -4885251, -9906200, -621852}, + FieldElement{5666233, 525582, 20782575, -8038419, -24538499, 14657740, 16099374, 1468826, -6171428, -15186581}, + FieldElement{-4859255, -3779343, -2917758, -6748019, 7778750, 11688288, -30404353, -9871238, -1558923, -9863646}, + }, + { + FieldElement{10896332, -7719704, 824275, 472601, -19460308, 3009587, 25248958, 14783338, -30581476, -15757844}, + FieldElement{10566929, 12612572, -31944212, 11118703, -12633376, 12362879, 21752402, 8822496, 24003793, 14264025}, + FieldElement{27713862, -7355973, -11008240, 9227530, 27050101, 2504721, 23886875, -13117525, 13958495, -5732453}, + }, + { + FieldElement{-23481610, 4867226, -27247128, 3900521, 29838369, -8212291, -31889399, -10041781, 7340521, -15410068}, + FieldElement{4646514, -8011124, -22766023, -11532654, 23184553, 8566613, 31366726, -1381061, -15066784, -10375192}, + FieldElement{-17270517, 12723032, -16993061, 14878794, 21619651, -6197576, 27584817, 3093888, -8843694, 3849921}, + }, + { + FieldElement{-9064912, 2103172, 25561640, -15125738, -5239824, 9582958, 32477045, -9017955, 5002294, -15550259}, + FieldElement{-12057553, -11177906, 21115585, -13365155, 8808712, -12030708, 16489530, 13378448, -25845716, 12741426}, + FieldElement{-5946367, 10645103, -30911586, 15390284, -3286982, -7118677, 24306472, 15852464, 28834118, -7646072}, + }, + { + FieldElement{-17335748, -9107057, -24531279, 9434953, -8472084, -583362, -13090771, 455841, 20461858, 5491305}, + FieldElement{13669248, -16095482, -12481974, -10203039, -14569770, -11893198, -24995986, 11293807, -28588204, -9421832}, + FieldElement{28497928, 6272777, -33022994, 14470570, 8906179, -1225630, 18504674, -14165166, 29867745, -8795943}, + }, + { + FieldElement{-16207023, 13517196, -27799630, -13697798, 24009064, -6373891, -6367600, -13175392, 22853429, -4012011}, + FieldElement{24191378, 16712145, -13931797, 15217831, 14542237, 1646131, 18603514, -11037887, 12876623, -2112447}, + FieldElement{17902668, 4518229, -411702, -2829247, 26878217, 5258055, -12860753, 608397, 16031844, 3723494}, + }, + { + FieldElement{-28632773, 12763728, -20446446, 7577504, 33001348, -13017745, 17558842, -7872890, 23896954, -4314245}, + FieldElement{-20005381, -12011952, 31520464, 605201, 2543521, 5991821, -2945064, 7229064, -9919646, -8826859}, + FieldElement{28816045, 298879, -28165016, -15920938, 19000928, -1665890, -12680833, -2949325, -18051778, -2082915}, + }, + { + FieldElement{16000882, -344896, 3493092, -11447198, -29504595, -13159789, 12577740, 16041268, -19715240, 7847707}, + FieldElement{10151868, 10572098, 27312476, 7922682, 14825339, 4723128, -32855931, -6519018, -10020567, 3852848}, + FieldElement{-11430470, 15697596, -21121557, -4420647, 5386314, 15063598, 16514493, -15932110, 29330899, -15076224}, + }, + }, + { + { + FieldElement{-25499735, -4378794, -15222908, -6901211, 16615731, 2051784, 3303702, 15490, -27548796, 12314391}, + FieldElement{15683520, -6003043, 18109120, -9980648, 15337968, -5997823, -16717435, 15921866, 16103996, -3731215}, + FieldElement{-23169824, -10781249, 13588192, -1628807, -3798557, -1074929, -19273607, 5402699, -29815713, -9841101}, + }, + { + FieldElement{23190676, 2384583, -32714340, 3462154, -29903655, -1529132, -11266856, 8911517, -25205859, 2739713}, + FieldElement{21374101, -3554250, -33524649, 9874411, 15377179, 11831242, -33529904, 6134907, 4931255, 11987849}, + FieldElement{-7732, -2978858, -16223486, 7277597, 105524, -322051, -31480539, 13861388, -30076310, 10117930}, + }, + { + FieldElement{-29501170, -10744872, -26163768, 13051539, -25625564, 5089643, -6325503, 6704079, 12890019, 15728940}, + FieldElement{-21972360, -11771379, -951059, -4418840, 14704840, 2695116, 903376, -10428139, 12885167, 8311031}, + FieldElement{-17516482, 5352194, 10384213, -13811658, 7506451, 13453191, 26423267, 4384730, 1888765, -5435404}, + }, + { + FieldElement{-25817338, -3107312, -13494599, -3182506, 30896459, -13921729, -32251644, -12707869, -19464434, -3340243}, + FieldElement{-23607977, -2665774, -526091, 4651136, 5765089, 4618330, 6092245, 14845197, 17151279, -9854116}, + FieldElement{-24830458, -12733720, -15165978, 10367250, -29530908, -265356, 22825805, -7087279, -16866484, 16176525}, + }, + { + FieldElement{-23583256, 6564961, 20063689, 3798228, -4740178, 7359225, 2006182, -10363426, -28746253, -10197509}, + FieldElement{-10626600, -4486402, -13320562, -5125317, 3432136, -6393229, 23632037, -1940610, 32808310, 1099883}, + FieldElement{15030977, 5768825, -27451236, -2887299, -6427378, -15361371, -15277896, -6809350, 2051441, -15225865}, + }, + { + FieldElement{-3362323, -7239372, 7517890, 9824992, 23555850, 295369, 5148398, -14154188, -22686354, 16633660}, + FieldElement{4577086, -16752288, 13249841, -15304328, 19958763, -14537274, 18559670, -10759549, 8402478, -9864273}, + FieldElement{-28406330, -1051581, -26790155, -907698, -17212414, -11030789, 9453451, -14980072, 17983010, 9967138}, + }, + { + FieldElement{-25762494, 6524722, 26585488, 9969270, 24709298, 1220360, -1677990, 7806337, 17507396, 3651560}, + FieldElement{-10420457, -4118111, 14584639, 15971087, -15768321, 8861010, 26556809, -5574557, -18553322, -11357135}, + FieldElement{2839101, 14284142, 4029895, 3472686, 14402957, 12689363, -26642121, 8459447, -5605463, -7621941}, + }, + { + FieldElement{-4839289, -3535444, 9744961, 2871048, 25113978, 3187018, -25110813, -849066, 17258084, -7977739}, + FieldElement{18164541, -10595176, -17154882, -1542417, 19237078, -9745295, 23357533, -15217008, 26908270, 12150756}, + FieldElement{-30264870, -7647865, 5112249, -7036672, -1499807, -6974257, 43168, -5537701, -32302074, 16215819}, + }, + }, + { + { + FieldElement{-6898905, 9824394, -12304779, -4401089, -31397141, -6276835, 32574489, 12532905, -7503072, -8675347}, + FieldElement{-27343522, -16515468, -27151524, -10722951, 946346, 16291093, 254968, 7168080, 21676107, -1943028}, + FieldElement{21260961, -8424752, -16831886, -11920822, -23677961, 3968121, -3651949, -6215466, -3556191, -7913075}, + }, + { + FieldElement{16544754, 13250366, -16804428, 15546242, -4583003, 12757258, -2462308, -8680336, -18907032, -9662799}, + FieldElement{-2415239, -15577728, 18312303, 4964443, -15272530, -12653564, 26820651, 16690659, 25459437, -4564609}, + FieldElement{-25144690, 11425020, 28423002, -11020557, -6144921, -15826224, 9142795, -2391602, -6432418, -1644817}, + }, + { + FieldElement{-23104652, 6253476, 16964147, -3768872, -25113972, -12296437, -27457225, -16344658, 6335692, 7249989}, + FieldElement{-30333227, 13979675, 7503222, -12368314, -11956721, -4621693, -30272269, 2682242, 25993170, -12478523}, + FieldElement{4364628, 5930691, 32304656, -10044554, -8054781, 15091131, 22857016, -10598955, 31820368, 15075278}, + }, + { + FieldElement{31879134, -8918693, 17258761, 90626, -8041836, -4917709, 24162788, -9650886, -17970238, 12833045}, + FieldElement{19073683, 14851414, -24403169, -11860168, 7625278, 11091125, -19619190, 2074449, -9413939, 14905377}, + FieldElement{24483667, -11935567, -2518866, -11547418, -1553130, 15355506, -25282080, 9253129, 27628530, -7555480}, + }, + { + FieldElement{17597607, 8340603, 19355617, 552187, 26198470, -3176583, 4593324, -9157582, -14110875, 15297016}, + FieldElement{510886, 14337390, -31785257, 16638632, 6328095, 2713355, -20217417, -11864220, 8683221, 2921426}, + FieldElement{18606791, 11874196, 27155355, -5281482, -24031742, 6265446, -25178240, -1278924, 4674690, 13890525}, + }, + { + FieldElement{13609624, 13069022, -27372361, -13055908, 24360586, 9592974, 14977157, 9835105, 4389687, 288396}, + FieldElement{9922506, -519394, 13613107, 5883594, -18758345, -434263, -12304062, 8317628, 23388070, 16052080}, + FieldElement{12720016, 11937594, -31970060, -5028689, 26900120, 8561328, -20155687, -11632979, -14754271, -10812892}, + }, + { + FieldElement{15961858, 14150409, 26716931, -665832, -22794328, 13603569, 11829573, 7467844, -28822128, 929275}, + FieldElement{11038231, -11582396, -27310482, -7316562, -10498527, -16307831, -23479533, -9371869, -21393143, 2465074}, + FieldElement{20017163, -4323226, 27915242, 1529148, 12396362, 15675764, 13817261, -9658066, 2463391, -4622140}, + }, + { + FieldElement{-16358878, -12663911, -12065183, 4996454, -1256422, 1073572, 9583558, 12851107, 4003896, 12673717}, + FieldElement{-1731589, -15155870, -3262930, 16143082, 19294135, 13385325, 14741514, -9103726, 7903886, 2348101}, + FieldElement{24536016, -16515207, 12715592, -3862155, 1511293, 10047386, -3842346, -7129159, -28377538, 10048127}, + }, + }, + { + { + FieldElement{-12622226, -6204820, 30718825, 2591312, -10617028, 12192840, 18873298, -7297090, -32297756, 15221632}, + FieldElement{-26478122, -11103864, 11546244, -1852483, 9180880, 7656409, -21343950, 2095755, 29769758, 6593415}, + FieldElement{-31994208, -2907461, 4176912, 3264766, 12538965, -868111, 26312345, -6118678, 30958054, 8292160}, + }, + { + FieldElement{31429822, -13959116, 29173532, 15632448, 12174511, -2760094, 32808831, 3977186, 26143136, -3148876}, + FieldElement{22648901, 1402143, -22799984, 13746059, 7936347, 365344, -8668633, -1674433, -3758243, -2304625}, + FieldElement{-15491917, 8012313, -2514730, -12702462, -23965846, -10254029, -1612713, -1535569, -16664475, 8194478}, + }, + { + FieldElement{27338066, -7507420, -7414224, 10140405, -19026427, -6589889, 27277191, 8855376, 28572286, 3005164}, + FieldElement{26287124, 4821776, 25476601, -4145903, -3764513, -15788984, -18008582, 1182479, -26094821, -13079595}, + FieldElement{-7171154, 3178080, 23970071, 6201893, -17195577, -4489192, -21876275, -13982627, 32208683, -1198248}, + }, + { + FieldElement{-16657702, 2817643, -10286362, 14811298, 6024667, 13349505, -27315504, -10497842, -27672585, -11539858}, + FieldElement{15941029, -9405932, -21367050, 8062055, 31876073, -238629, -15278393, -1444429, 15397331, -4130193}, + FieldElement{8934485, -13485467, -23286397, -13423241, -32446090, 14047986, 31170398, -1441021, -27505566, 15087184}, + }, + { + FieldElement{-18357243, -2156491, 24524913, -16677868, 15520427, -6360776, -15502406, 11461896, 16788528, -5868942}, + FieldElement{-1947386, 16013773, 21750665, 3714552, -17401782, -16055433, -3770287, -10323320, 31322514, -11615635}, + FieldElement{21426655, -5650218, -13648287, -5347537, -28812189, -4920970, -18275391, -14621414, 13040862, -12112948}, + }, + { + FieldElement{11293895, 12478086, -27136401, 15083750, -29307421, 14748872, 14555558, -13417103, 1613711, 4896935}, + FieldElement{-25894883, 15323294, -8489791, -8057900, 25967126, -13425460, 2825960, -4897045, -23971776, -11267415}, + FieldElement{-15924766, -5229880, -17443532, 6410664, 3622847, 10243618, 20615400, 12405433, -23753030, -8436416}, + }, + { + FieldElement{-7091295, 12556208, -20191352, 9025187, -17072479, 4333801, 4378436, 2432030, 23097949, -566018}, + FieldElement{4565804, -16025654, 20084412, -7842817, 1724999, 189254, 24767264, 10103221, -18512313, 2424778}, + FieldElement{366633, -11976806, 8173090, -6890119, 30788634, 5745705, -7168678, 1344109, -3642553, 12412659}, + }, + { + FieldElement{-24001791, 7690286, 14929416, -168257, -32210835, -13412986, 24162697, -15326504, -3141501, 11179385}, + FieldElement{18289522, -14724954, 8056945, 16430056, -21729724, 7842514, -6001441, -1486897, -18684645, -11443503}, + FieldElement{476239, 6601091, -6152790, -9723375, 17503545, -4863900, 27672959, 13403813, 11052904, 5219329}, + }, + }, + { + { + FieldElement{20678546, -8375738, -32671898, 8849123, -5009758, 14574752, 31186971, -3973730, 9014762, -8579056}, + FieldElement{-13644050, -10350239, -15962508, 5075808, -1514661, -11534600, -33102500, 9160280, 8473550, -3256838}, + FieldElement{24900749, 14435722, 17209120, -15292541, -22592275, 9878983, -7689309, -16335821, -24568481, 11788948}, + }, + { + FieldElement{-3118155, -11395194, -13802089, 14797441, 9652448, -6845904, -20037437, 10410733, -24568470, -1458691}, + FieldElement{-15659161, 16736706, -22467150, 10215878, -9097177, 7563911, 11871841, -12505194, -18513325, 8464118}, + FieldElement{-23400612, 8348507, -14585951, -861714, -3950205, -6373419, 14325289, 8628612, 33313881, -8370517}, + }, + { + FieldElement{-20186973, -4967935, 22367356, 5271547, -1097117, -4788838, -24805667, -10236854, -8940735, -5818269}, + FieldElement{-6948785, -1795212, -32625683, -16021179, 32635414, -7374245, 15989197, -12838188, 28358192, -4253904}, + FieldElement{-23561781, -2799059, -32351682, -1661963, -9147719, 10429267, -16637684, 4072016, -5351664, 5596589}, + }, + { + FieldElement{-28236598, -3390048, 12312896, 6213178, 3117142, 16078565, 29266239, 2557221, 1768301, 15373193}, + FieldElement{-7243358, -3246960, -4593467, -7553353, -127927, -912245, -1090902, -4504991, -24660491, 3442910}, + FieldElement{-30210571, 5124043, 14181784, 8197961, 18964734, -11939093, 22597931, 7176455, -18585478, 13365930}, + }, + { + FieldElement{-7877390, -1499958, 8324673, 4690079, 6261860, 890446, 24538107, -8570186, -9689599, -3031667}, + FieldElement{25008904, -10771599, -4305031, -9638010, 16265036, 15721635, 683793, -11823784, 15723479, -15163481}, + FieldElement{-9660625, 12374379, -27006999, -7026148, -7724114, -12314514, 11879682, 5400171, 519526, -1235876}, + }, + { + FieldElement{22258397, -16332233, -7869817, 14613016, -22520255, -2950923, -20353881, 7315967, 16648397, 7605640}, + FieldElement{-8081308, -8464597, -8223311, 9719710, 19259459, -15348212, 23994942, -5281555, -9468848, 4763278}, + FieldElement{-21699244, 9220969, -15730624, 1084137, -25476107, -2852390, 31088447, -7764523, -11356529, 728112}, + }, + { + FieldElement{26047220, -11751471, -6900323, -16521798, 24092068, 9158119, -4273545, -12555558, -29365436, -5498272}, + FieldElement{17510331, -322857, 5854289, 8403524, 17133918, -3112612, -28111007, 12327945, 10750447, 10014012}, + FieldElement{-10312768, 3936952, 9156313, -8897683, 16498692, -994647, -27481051, -666732, 3424691, 7540221}, + }, + { + FieldElement{30322361, -6964110, 11361005, -4143317, 7433304, 4989748, -7071422, -16317219, -9244265, 15258046}, + FieldElement{13054562, -2779497, 19155474, 469045, -12482797, 4566042, 5631406, 2711395, 1062915, -5136345}, + FieldElement{-19240248, -11254599, -29509029, -7499965, -5835763, 13005411, -6066489, 12194497, 32960380, 1459310}, + }, + }, + { + { + FieldElement{19852034, 7027924, 23669353, 10020366, 8586503, -6657907, 394197, -6101885, 18638003, -11174937}, + FieldElement{31395534, 15098109, 26581030, 8030562, -16527914, -5007134, 9012486, -7584354, -6643087, -5442636}, + FieldElement{-9192165, -2347377, -1997099, 4529534, 25766844, 607986, -13222, 9677543, -32294889, -6456008}, + }, + { + FieldElement{-2444496, -149937, 29348902, 8186665, 1873760, 12489863, -30934579, -7839692, -7852844, -8138429}, + FieldElement{-15236356, -15433509, 7766470, 746860, 26346930, -10221762, -27333451, 10754588, -9431476, 5203576}, + FieldElement{31834314, 14135496, -770007, 5159118, 20917671, -16768096, -7467973, -7337524, 31809243, 7347066}, + }, + { + FieldElement{-9606723, -11874240, 20414459, 13033986, 13716524, -11691881, 19797970, -12211255, 15192876, -2087490}, + FieldElement{-12663563, -2181719, 1168162, -3804809, 26747877, -14138091, 10609330, 12694420, 33473243, -13382104}, + FieldElement{33184999, 11180355, 15832085, -11385430, -1633671, 225884, 15089336, -11023903, -6135662, 14480053}, + }, + { + FieldElement{31308717, -5619998, 31030840, -1897099, 15674547, -6582883, 5496208, 13685227, 27595050, 8737275}, + FieldElement{-20318852, -15150239, 10933843, -16178022, 8335352, -7546022, -31008351, -12610604, 26498114, 66511}, + FieldElement{22644454, -8761729, -16671776, 4884562, -3105614, -13559366, 30540766, -4286747, -13327787, -7515095}, + }, + { + FieldElement{-28017847, 9834845, 18617207, -2681312, -3401956, -13307506, 8205540, 13585437, -17127465, 15115439}, + FieldElement{23711543, -672915, 31206561, -8362711, 6164647, -9709987, -33535882, -1426096, 8236921, 16492939}, + FieldElement{-23910559, -13515526, -26299483, -4503841, 25005590, -7687270, 19574902, 10071562, 6708380, -6222424}, + }, + { + FieldElement{2101391, -4930054, 19702731, 2367575, -15427167, 1047675, 5301017, 9328700, 29955601, -11678310}, + FieldElement{3096359, 9271816, -21620864, -15521844, -14847996, -7592937, -25892142, -12635595, -9917575, 6216608}, + FieldElement{-32615849, 338663, -25195611, 2510422, -29213566, -13820213, 24822830, -6146567, -26767480, 7525079}, + }, + { + FieldElement{-23066649, -13985623, 16133487, -7896178, -3389565, 778788, -910336, -2782495, -19386633, 11994101}, + FieldElement{21691500, -13624626, -641331, -14367021, 3285881, -3483596, -25064666, 9718258, -7477437, 13381418}, + FieldElement{18445390, -4202236, 14979846, 11622458, -1727110, -3582980, 23111648, -6375247, 28535282, 15779576}, + }, + { + FieldElement{30098053, 3089662, -9234387, 16662135, -21306940, 11308411, -14068454, 12021730, 9955285, -16303356}, + FieldElement{9734894, -14576830, -7473633, -9138735, 2060392, 11313496, -18426029, 9924399, 20194861, 13380996}, + FieldElement{-26378102, -7965207, -22167821, 15789297, -18055342, -6168792, -1984914, 15707771, 26342023, 10146099}, + }, + }, + { + { + FieldElement{-26016874, -219943, 21339191, -41388, 19745256, -2878700, -29637280, 2227040, 21612326, -545728}, + FieldElement{-13077387, 1184228, 23562814, -5970442, -20351244, -6348714, 25764461, 12243797, -20856566, 11649658}, + FieldElement{-10031494, 11262626, 27384172, 2271902, 26947504, -15997771, 39944, 6114064, 33514190, 2333242}, + }, + { + FieldElement{-21433588, -12421821, 8119782, 7219913, -21830522, -9016134, -6679750, -12670638, 24350578, -13450001}, + FieldElement{-4116307, -11271533, -23886186, 4843615, -30088339, 690623, -31536088, -10406836, 8317860, 12352766}, + FieldElement{18200138, -14475911, -33087759, -2696619, -23702521, -9102511, -23552096, -2287550, 20712163, 6719373}, + }, + { + FieldElement{26656208, 6075253, -7858556, 1886072, -28344043, 4262326, 11117530, -3763210, 26224235, -3297458}, + FieldElement{-17168938, -14854097, -3395676, -16369877, -19954045, 14050420, 21728352, 9493610, 18620611, -16428628}, + FieldElement{-13323321, 13325349, 11432106, 5964811, 18609221, 6062965, -5269471, -9725556, -30701573, -16479657}, + }, + { + FieldElement{-23860538, -11233159, 26961357, 1640861, -32413112, -16737940, 12248509, -5240639, 13735342, 1934062}, + FieldElement{25089769, 6742589, 17081145, -13406266, 21909293, -16067981, -15136294, -3765346, -21277997, 5473616}, + FieldElement{31883677, -7961101, 1083432, -11572403, 22828471, 13290673, -7125085, 12469656, 29111212, -5451014}, + }, + { + FieldElement{24244947, -15050407, -26262976, 2791540, -14997599, 16666678, 24367466, 6388839, -10295587, 452383}, + FieldElement{-25640782, -3417841, 5217916, 16224624, 19987036, -4082269, -24236251, -5915248, 15766062, 8407814}, + FieldElement{-20406999, 13990231, 15495425, 16395525, 5377168, 15166495, -8917023, -4388953, -8067909, 2276718}, + }, + { + FieldElement{30157918, 12924066, -17712050, 9245753, 19895028, 3368142, -23827587, 5096219, 22740376, -7303417}, + FieldElement{2041139, -14256350, 7783687, 13876377, -25946985, -13352459, 24051124, 13742383, -15637599, 13295222}, + FieldElement{33338237, -8505733, 12532113, 7977527, 9106186, -1715251, -17720195, -4612972, -4451357, -14669444}, + }, + { + FieldElement{-20045281, 5454097, -14346548, 6447146, 28862071, 1883651, -2469266, -4141880, 7770569, 9620597}, + FieldElement{23208068, 7979712, 33071466, 8149229, 1758231, -10834995, 30945528, -1694323, -33502340, -14767970}, + FieldElement{1439958, -16270480, -1079989, -793782, 4625402, 10647766, -5043801, 1220118, 30494170, -11440799}, + }, + { + FieldElement{-5037580, -13028295, -2970559, -3061767, 15640974, -6701666, -26739026, 926050, -1684339, -13333647}, + FieldElement{13908495, -3549272, 30919928, -6273825, -21521863, 7989039, 9021034, 9078865, 3353509, 4033511}, + FieldElement{-29663431, -15113610, 32259991, -344482, 24295849, -12912123, 23161163, 8839127, 27485041, 7356032}, + }, + }, + { + { + FieldElement{9661027, 705443, 11980065, -5370154, -1628543, 14661173, -6346142, 2625015, 28431036, -16771834}, + FieldElement{-23839233, -8311415, -25945511, 7480958, -17681669, -8354183, -22545972, 14150565, 15970762, 4099461}, + FieldElement{29262576, 16756590, 26350592, -8793563, 8529671, -11208050, 13617293, -9937143, 11465739, 8317062}, + }, + { + FieldElement{-25493081, -6962928, 32500200, -9419051, -23038724, -2302222, 14898637, 3848455, 20969334, -5157516}, + FieldElement{-20384450, -14347713, -18336405, 13884722, -33039454, 2842114, -21610826, -3649888, 11177095, 14989547}, + FieldElement{-24496721, -11716016, 16959896, 2278463, 12066309, 10137771, 13515641, 2581286, -28487508, 9930240}, + }, + { + FieldElement{-17751622, -2097826, 16544300, -13009300, -15914807, -14949081, 18345767, -13403753, 16291481, -5314038}, + FieldElement{-33229194, 2553288, 32678213, 9875984, 8534129, 6889387, -9676774, 6957617, 4368891, 9788741}, + FieldElement{16660756, 7281060, -10830758, 12911820, 20108584, -8101676, -21722536, -8613148, 16250552, -11111103}, + }, + { + FieldElement{-19765507, 2390526, -16551031, 14161980, 1905286, 6414907, 4689584, 10604807, -30190403, 4782747}, + FieldElement{-1354539, 14736941, -7367442, -13292886, 7710542, -14155590, -9981571, 4383045, 22546403, 437323}, + FieldElement{31665577, -12180464, -16186830, 1491339, -18368625, 3294682, 27343084, 2786261, -30633590, -14097016}, + }, + { + FieldElement{-14467279, -683715, -33374107, 7448552, 19294360, 14334329, -19690631, 2355319, -19284671, -6114373}, + FieldElement{15121312, -15796162, 6377020, -6031361, -10798111, -12957845, 18952177, 15496498, -29380133, 11754228}, + FieldElement{-2637277, -13483075, 8488727, -14303896, 12728761, -1622493, 7141596, 11724556, 22761615, -10134141}, + }, + { + FieldElement{16918416, 11729663, -18083579, 3022987, -31015732, -13339659, -28741185, -12227393, 32851222, 11717399}, + FieldElement{11166634, 7338049, -6722523, 4531520, -29468672, -7302055, 31474879, 3483633, -1193175, -4030831}, + FieldElement{-185635, 9921305, 31456609, -13536438, -12013818, 13348923, 33142652, 6546660, -19985279, -3948376}, + }, + { + FieldElement{-32460596, 11266712, -11197107, -7899103, 31703694, 3855903, -8537131, -12833048, -30772034, -15486313}, + FieldElement{-18006477, 12709068, 3991746, -6479188, -21491523, -10550425, -31135347, -16049879, 10928917, 3011958}, + FieldElement{-6957757, -15594337, 31696059, 334240, 29576716, 14796075, -30831056, -12805180, 18008031, 10258577}, + }, + { + FieldElement{-22448644, 15655569, 7018479, -4410003, -30314266, -1201591, -1853465, 1367120, 25127874, 6671743}, + FieldElement{29701166, -14373934, -10878120, 9279288, -17568, 13127210, 21382910, 11042292, 25838796, 4642684}, + FieldElement{-20430234, 14955537, -24126347, 8124619, -5369288, -5990470, 30468147, -13900640, 18423289, 4177476}, + }, + }, +} diff --git a/vendor/src/github.com/agl/ed25519/edwards25519/edwards25519.go b/vendor/src/github.com/agl/ed25519/edwards25519/edwards25519.go new file mode 100644 index 0000000000..184b4a8596 --- /dev/null +++ b/vendor/src/github.com/agl/ed25519/edwards25519/edwards25519.go @@ -0,0 +1,2127 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package edwards25519 implements operations in GF(2**255-19) and on an +// Edwards curve that is isomorphic to curve25519. See +// http://ed25519.cr.yp.to/. +package edwards25519 + +// This code is a port of the public domain, "ref10" implementation of ed25519 +// from SUPERCOP. + +// FieldElement represents an element of the field GF(2^255 - 19). An element +// t, entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77 +// t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on +// context. +type FieldElement [10]int32 + +func FeZero(fe *FieldElement) { + for i := range fe { + fe[i] = 0 + } +} + +func FeOne(fe *FieldElement) { + FeZero(fe) + fe[0] = 1 +} + +func FeAdd(dst, a, b *FieldElement) { + for i := range dst { + dst[i] = a[i] + b[i] + } +} + +func FeSub(dst, a, b *FieldElement) { + for i := range dst { + dst[i] = a[i] - b[i] + } +} + +func FeCopy(dst, src *FieldElement) { + for i := range dst { + dst[i] = src[i] + } +} + +// Replace (f,g) with (g,g) if b == 1; +// replace (f,g) with (f,g) if b == 0. +// +// Preconditions: b in {0,1}. +func FeCMove(f, g *FieldElement, b int32) { + var x FieldElement + b = -b + for i := range x { + x[i] = b & (f[i] ^ g[i]) + } + + for i := range f { + f[i] ^= x[i] + } +} + +func load3(in []byte) int64 { + var r int64 + r = int64(in[0]) + r |= int64(in[1]) << 8 + r |= int64(in[2]) << 16 + return r +} + +func load4(in []byte) int64 { + var r int64 + r = int64(in[0]) + r |= int64(in[1]) << 8 + r |= int64(in[2]) << 16 + r |= int64(in[3]) << 24 + return r +} + +func FeFromBytes(dst *FieldElement, src *[32]byte) { + h0 := load4(src[:]) + h1 := load3(src[4:]) << 6 + h2 := load3(src[7:]) << 5 + h3 := load3(src[10:]) << 3 + h4 := load3(src[13:]) << 2 + h5 := load4(src[16:]) + h6 := load3(src[20:]) << 7 + h7 := load3(src[23:]) << 5 + h8 := load3(src[26:]) << 4 + h9 := (load3(src[29:]) & 8388607) << 2 + + var carry [10]int64 + carry[9] = (h9 + 1<<24) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + carry[1] = (h1 + 1<<24) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[3] = (h3 + 1<<24) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[5] = (h5 + 1<<24) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + carry[7] = (h7 + 1<<24) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[0] = (h0 + 1<<25) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[2] = (h2 + 1<<25) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[4] = (h4 + 1<<25) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[6] = (h6 + 1<<25) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + carry[8] = (h8 + 1<<25) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + dst[0] = int32(h0) + dst[1] = int32(h1) + dst[2] = int32(h2) + dst[3] = int32(h3) + dst[4] = int32(h4) + dst[5] = int32(h5) + dst[6] = int32(h6) + dst[7] = int32(h7) + dst[8] = int32(h8) + dst[9] = int32(h9) +} + +// FeToBytes marshals h to s. +// Preconditions: +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Write p=2^255-19; q=floor(h/p). +// Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))). +// +// Proof: +// Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4. +// Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4. +// +// Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9). +// Then 0> 25 + q = (h[0] + q) >> 26 + q = (h[1] + q) >> 25 + q = (h[2] + q) >> 26 + q = (h[3] + q) >> 25 + q = (h[4] + q) >> 26 + q = (h[5] + q) >> 25 + q = (h[6] + q) >> 26 + q = (h[7] + q) >> 25 + q = (h[8] + q) >> 26 + q = (h[9] + q) >> 25 + + // Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. + h[0] += 19 * q + // Goal: Output h-2^255 q, which is between 0 and 2^255-20. + + carry[0] = h[0] >> 26 + h[1] += carry[0] + h[0] -= carry[0] << 26 + carry[1] = h[1] >> 25 + h[2] += carry[1] + h[1] -= carry[1] << 25 + carry[2] = h[2] >> 26 + h[3] += carry[2] + h[2] -= carry[2] << 26 + carry[3] = h[3] >> 25 + h[4] += carry[3] + h[3] -= carry[3] << 25 + carry[4] = h[4] >> 26 + h[5] += carry[4] + h[4] -= carry[4] << 26 + carry[5] = h[5] >> 25 + h[6] += carry[5] + h[5] -= carry[5] << 25 + carry[6] = h[6] >> 26 + h[7] += carry[6] + h[6] -= carry[6] << 26 + carry[7] = h[7] >> 25 + h[8] += carry[7] + h[7] -= carry[7] << 25 + carry[8] = h[8] >> 26 + h[9] += carry[8] + h[8] -= carry[8] << 26 + carry[9] = h[9] >> 25 + h[9] -= carry[9] << 25 + // h10 = carry9 + + // Goal: Output h[0]+...+2^255 h10-2^255 q, which is between 0 and 2^255-20. + // Have h[0]+...+2^230 h[9] between 0 and 2^255-1; + // evidently 2^255 h10-2^255 q = 0. + // Goal: Output h[0]+...+2^230 h[9]. + + s[0] = byte(h[0] >> 0) + s[1] = byte(h[0] >> 8) + s[2] = byte(h[0] >> 16) + s[3] = byte((h[0] >> 24) | (h[1] << 2)) + s[4] = byte(h[1] >> 6) + s[5] = byte(h[1] >> 14) + s[6] = byte((h[1] >> 22) | (h[2] << 3)) + s[7] = byte(h[2] >> 5) + s[8] = byte(h[2] >> 13) + s[9] = byte((h[2] >> 21) | (h[3] << 5)) + s[10] = byte(h[3] >> 3) + s[11] = byte(h[3] >> 11) + s[12] = byte((h[3] >> 19) | (h[4] << 6)) + s[13] = byte(h[4] >> 2) + s[14] = byte(h[4] >> 10) + s[15] = byte(h[4] >> 18) + s[16] = byte(h[5] >> 0) + s[17] = byte(h[5] >> 8) + s[18] = byte(h[5] >> 16) + s[19] = byte((h[5] >> 24) | (h[6] << 1)) + s[20] = byte(h[6] >> 7) + s[21] = byte(h[6] >> 15) + s[22] = byte((h[6] >> 23) | (h[7] << 3)) + s[23] = byte(h[7] >> 5) + s[24] = byte(h[7] >> 13) + s[25] = byte((h[7] >> 21) | (h[8] << 4)) + s[26] = byte(h[8] >> 4) + s[27] = byte(h[8] >> 12) + s[28] = byte((h[8] >> 20) | (h[9] << 6)) + s[29] = byte(h[9] >> 2) + s[30] = byte(h[9] >> 10) + s[31] = byte(h[9] >> 18) +} + +func FeIsNegative(f *FieldElement) byte { + var s [32]byte + FeToBytes(&s, f) + return s[0] & 1 +} + +func FeIsNonZero(f *FieldElement) int32 { + var s [32]byte + FeToBytes(&s, f) + var x uint8 + for _, b := range s { + x |= b + } + x |= x >> 4 + x |= x >> 2 + x |= x >> 1 + return int32(x & 1) +} + +// FeNeg sets h = -f +// +// Preconditions: +// |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Postconditions: +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +func FeNeg(h, f *FieldElement) { + for i := range h { + h[i] = -f[i] + } +} + +// FeMul calculates h = f * g +// Can overlap h with f or g. +// +// Preconditions: +// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// +// Postconditions: +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +// +// Notes on implementation strategy: +// +// Using schoolbook multiplication. +// Karatsuba would save a little in some cost models. +// +// Most multiplications by 2 and 19 are 32-bit precomputations; +// cheaper than 64-bit postcomputations. +// +// There is one remaining multiplication by 19 in the carry chain; +// one *19 precomputation can be merged into this, +// but the resulting data flow is considerably less clean. +// +// There are 12 carries below. +// 10 of them are 2-way parallelizable and vectorizable. +// Can get away with 11 carries, but then data flow is much deeper. +// +// With tighter constraints on inputs can squeeze carries into int32. +func FeMul(h, f, g *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + g0 := g[0] + g1 := g[1] + g2 := g[2] + g3 := g[3] + g4 := g[4] + g5 := g[5] + g6 := g[6] + g7 := g[7] + g8 := g[8] + g9 := g[9] + g1_19 := 19 * g1 /* 1.4*2^29 */ + g2_19 := 19 * g2 /* 1.4*2^30; still ok */ + g3_19 := 19 * g3 + g4_19 := 19 * g4 + g5_19 := 19 * g5 + g6_19 := 19 * g6 + g7_19 := 19 * g7 + g8_19 := 19 * g8 + g9_19 := 19 * g9 + f1_2 := 2 * f1 + f3_2 := 2 * f3 + f5_2 := 2 * f5 + f7_2 := 2 * f7 + f9_2 := 2 * f9 + f0g0 := int64(f0) * int64(g0) + f0g1 := int64(f0) * int64(g1) + f0g2 := int64(f0) * int64(g2) + f0g3 := int64(f0) * int64(g3) + f0g4 := int64(f0) * int64(g4) + f0g5 := int64(f0) * int64(g5) + f0g6 := int64(f0) * int64(g6) + f0g7 := int64(f0) * int64(g7) + f0g8 := int64(f0) * int64(g8) + f0g9 := int64(f0) * int64(g9) + f1g0 := int64(f1) * int64(g0) + f1g1_2 := int64(f1_2) * int64(g1) + f1g2 := int64(f1) * int64(g2) + f1g3_2 := int64(f1_2) * int64(g3) + f1g4 := int64(f1) * int64(g4) + f1g5_2 := int64(f1_2) * int64(g5) + f1g6 := int64(f1) * int64(g6) + f1g7_2 := int64(f1_2) * int64(g7) + f1g8 := int64(f1) * int64(g8) + f1g9_38 := int64(f1_2) * int64(g9_19) + f2g0 := int64(f2) * int64(g0) + f2g1 := int64(f2) * int64(g1) + f2g2 := int64(f2) * int64(g2) + f2g3 := int64(f2) * int64(g3) + f2g4 := int64(f2) * int64(g4) + f2g5 := int64(f2) * int64(g5) + f2g6 := int64(f2) * int64(g6) + f2g7 := int64(f2) * int64(g7) + f2g8_19 := int64(f2) * int64(g8_19) + f2g9_19 := int64(f2) * int64(g9_19) + f3g0 := int64(f3) * int64(g0) + f3g1_2 := int64(f3_2) * int64(g1) + f3g2 := int64(f3) * int64(g2) + f3g3_2 := int64(f3_2) * int64(g3) + f3g4 := int64(f3) * int64(g4) + f3g5_2 := int64(f3_2) * int64(g5) + f3g6 := int64(f3) * int64(g6) + f3g7_38 := int64(f3_2) * int64(g7_19) + f3g8_19 := int64(f3) * int64(g8_19) + f3g9_38 := int64(f3_2) * int64(g9_19) + f4g0 := int64(f4) * int64(g0) + f4g1 := int64(f4) * int64(g1) + f4g2 := int64(f4) * int64(g2) + f4g3 := int64(f4) * int64(g3) + f4g4 := int64(f4) * int64(g4) + f4g5 := int64(f4) * int64(g5) + f4g6_19 := int64(f4) * int64(g6_19) + f4g7_19 := int64(f4) * int64(g7_19) + f4g8_19 := int64(f4) * int64(g8_19) + f4g9_19 := int64(f4) * int64(g9_19) + f5g0 := int64(f5) * int64(g0) + f5g1_2 := int64(f5_2) * int64(g1) + f5g2 := int64(f5) * int64(g2) + f5g3_2 := int64(f5_2) * int64(g3) + f5g4 := int64(f5) * int64(g4) + f5g5_38 := int64(f5_2) * int64(g5_19) + f5g6_19 := int64(f5) * int64(g6_19) + f5g7_38 := int64(f5_2) * int64(g7_19) + f5g8_19 := int64(f5) * int64(g8_19) + f5g9_38 := int64(f5_2) * int64(g9_19) + f6g0 := int64(f6) * int64(g0) + f6g1 := int64(f6) * int64(g1) + f6g2 := int64(f6) * int64(g2) + f6g3 := int64(f6) * int64(g3) + f6g4_19 := int64(f6) * int64(g4_19) + f6g5_19 := int64(f6) * int64(g5_19) + f6g6_19 := int64(f6) * int64(g6_19) + f6g7_19 := int64(f6) * int64(g7_19) + f6g8_19 := int64(f6) * int64(g8_19) + f6g9_19 := int64(f6) * int64(g9_19) + f7g0 := int64(f7) * int64(g0) + f7g1_2 := int64(f7_2) * int64(g1) + f7g2 := int64(f7) * int64(g2) + f7g3_38 := int64(f7_2) * int64(g3_19) + f7g4_19 := int64(f7) * int64(g4_19) + f7g5_38 := int64(f7_2) * int64(g5_19) + f7g6_19 := int64(f7) * int64(g6_19) + f7g7_38 := int64(f7_2) * int64(g7_19) + f7g8_19 := int64(f7) * int64(g8_19) + f7g9_38 := int64(f7_2) * int64(g9_19) + f8g0 := int64(f8) * int64(g0) + f8g1 := int64(f8) * int64(g1) + f8g2_19 := int64(f8) * int64(g2_19) + f8g3_19 := int64(f8) * int64(g3_19) + f8g4_19 := int64(f8) * int64(g4_19) + f8g5_19 := int64(f8) * int64(g5_19) + f8g6_19 := int64(f8) * int64(g6_19) + f8g7_19 := int64(f8) * int64(g7_19) + f8g8_19 := int64(f8) * int64(g8_19) + f8g9_19 := int64(f8) * int64(g9_19) + f9g0 := int64(f9) * int64(g0) + f9g1_38 := int64(f9_2) * int64(g1_19) + f9g2_19 := int64(f9) * int64(g2_19) + f9g3_38 := int64(f9_2) * int64(g3_19) + f9g4_19 := int64(f9) * int64(g4_19) + f9g5_38 := int64(f9_2) * int64(g5_19) + f9g6_19 := int64(f9) * int64(g6_19) + f9g7_38 := int64(f9_2) * int64(g7_19) + f9g8_19 := int64(f9) * int64(g8_19) + f9g9_38 := int64(f9_2) * int64(g9_19) + h0 := f0g0 + f1g9_38 + f2g8_19 + f3g7_38 + f4g6_19 + f5g5_38 + f6g4_19 + f7g3_38 + f8g2_19 + f9g1_38 + h1 := f0g1 + f1g0 + f2g9_19 + f3g8_19 + f4g7_19 + f5g6_19 + f6g5_19 + f7g4_19 + f8g3_19 + f9g2_19 + h2 := f0g2 + f1g1_2 + f2g0 + f3g9_38 + f4g8_19 + f5g7_38 + f6g6_19 + f7g5_38 + f8g4_19 + f9g3_38 + h3 := f0g3 + f1g2 + f2g1 + f3g0 + f4g9_19 + f5g8_19 + f6g7_19 + f7g6_19 + f8g5_19 + f9g4_19 + h4 := f0g4 + f1g3_2 + f2g2 + f3g1_2 + f4g0 + f5g9_38 + f6g8_19 + f7g7_38 + f8g6_19 + f9g5_38 + h5 := f0g5 + f1g4 + f2g3 + f3g2 + f4g1 + f5g0 + f6g9_19 + f7g8_19 + f8g7_19 + f9g6_19 + h6 := f0g6 + f1g5_2 + f2g4 + f3g3_2 + f4g2 + f5g1_2 + f6g0 + f7g9_38 + f8g8_19 + f9g7_38 + h7 := f0g7 + f1g6 + f2g5 + f3g4 + f4g3 + f5g2 + f6g1 + f7g0 + f8g9_19 + f9g8_19 + h8 := f0g8 + f1g7_2 + f2g6 + f3g5_2 + f4g4 + f5g3_2 + f6g2 + f7g1_2 + f8g0 + f9g9_38 + h9 := f0g9 + f1g8 + f2g7 + f3g6 + f4g5 + f5g4 + f6g3 + f7g2 + f8g1 + f9g0 + var carry [10]int64 + + /* + |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38)) + i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8 + |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19)) + i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9 + */ + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + /* |h0| <= 2^25 */ + /* |h4| <= 2^25 */ + /* |h1| <= 1.51*2^58 */ + /* |h5| <= 1.51*2^58 */ + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + /* |h1| <= 2^24; from now on fits into int32 */ + /* |h5| <= 2^24; from now on fits into int32 */ + /* |h2| <= 1.21*2^59 */ + /* |h6| <= 1.21*2^59 */ + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + /* |h2| <= 2^25; from now on fits into int32 unchanged */ + /* |h6| <= 2^25; from now on fits into int32 unchanged */ + /* |h3| <= 1.51*2^58 */ + /* |h7| <= 1.51*2^58 */ + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + /* |h3| <= 2^24; from now on fits into int32 unchanged */ + /* |h7| <= 2^24; from now on fits into int32 unchanged */ + /* |h4| <= 1.52*2^33 */ + /* |h8| <= 1.52*2^33 */ + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + /* |h4| <= 2^25; from now on fits into int32 unchanged */ + /* |h8| <= 2^25; from now on fits into int32 unchanged */ + /* |h5| <= 1.01*2^24 */ + /* |h9| <= 1.51*2^58 */ + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + /* |h9| <= 2^24; from now on fits into int32 unchanged */ + /* |h0| <= 1.8*2^37 */ + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + /* |h0| <= 2^25; from now on fits into int32 unchanged */ + /* |h1| <= 1.01*2^24 */ + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +// FeSquare calculates h = f*f. Can overlap h with f. +// +// Preconditions: +// |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. +// +// Postconditions: +// |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. +func FeSquare(h, f *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + f0_2 := 2 * f0 + f1_2 := 2 * f1 + f2_2 := 2 * f2 + f3_2 := 2 * f3 + f4_2 := 2 * f4 + f5_2 := 2 * f5 + f6_2 := 2 * f6 + f7_2 := 2 * f7 + f5_38 := 38 * f5 // 1.31*2^30 + f6_19 := 19 * f6 // 1.31*2^30 + f7_38 := 38 * f7 // 1.31*2^30 + f8_19 := 19 * f8 // 1.31*2^30 + f9_38 := 38 * f9 // 1.31*2^30 + f0f0 := int64(f0) * int64(f0) + f0f1_2 := int64(f0_2) * int64(f1) + f0f2_2 := int64(f0_2) * int64(f2) + f0f3_2 := int64(f0_2) * int64(f3) + f0f4_2 := int64(f0_2) * int64(f4) + f0f5_2 := int64(f0_2) * int64(f5) + f0f6_2 := int64(f0_2) * int64(f6) + f0f7_2 := int64(f0_2) * int64(f7) + f0f8_2 := int64(f0_2) * int64(f8) + f0f9_2 := int64(f0_2) * int64(f9) + f1f1_2 := int64(f1_2) * int64(f1) + f1f2_2 := int64(f1_2) * int64(f2) + f1f3_4 := int64(f1_2) * int64(f3_2) + f1f4_2 := int64(f1_2) * int64(f4) + f1f5_4 := int64(f1_2) * int64(f5_2) + f1f6_2 := int64(f1_2) * int64(f6) + f1f7_4 := int64(f1_2) * int64(f7_2) + f1f8_2 := int64(f1_2) * int64(f8) + f1f9_76 := int64(f1_2) * int64(f9_38) + f2f2 := int64(f2) * int64(f2) + f2f3_2 := int64(f2_2) * int64(f3) + f2f4_2 := int64(f2_2) * int64(f4) + f2f5_2 := int64(f2_2) * int64(f5) + f2f6_2 := int64(f2_2) * int64(f6) + f2f7_2 := int64(f2_2) * int64(f7) + f2f8_38 := int64(f2_2) * int64(f8_19) + f2f9_38 := int64(f2) * int64(f9_38) + f3f3_2 := int64(f3_2) * int64(f3) + f3f4_2 := int64(f3_2) * int64(f4) + f3f5_4 := int64(f3_2) * int64(f5_2) + f3f6_2 := int64(f3_2) * int64(f6) + f3f7_76 := int64(f3_2) * int64(f7_38) + f3f8_38 := int64(f3_2) * int64(f8_19) + f3f9_76 := int64(f3_2) * int64(f9_38) + f4f4 := int64(f4) * int64(f4) + f4f5_2 := int64(f4_2) * int64(f5) + f4f6_38 := int64(f4_2) * int64(f6_19) + f4f7_38 := int64(f4) * int64(f7_38) + f4f8_38 := int64(f4_2) * int64(f8_19) + f4f9_38 := int64(f4) * int64(f9_38) + f5f5_38 := int64(f5) * int64(f5_38) + f5f6_38 := int64(f5_2) * int64(f6_19) + f5f7_76 := int64(f5_2) * int64(f7_38) + f5f8_38 := int64(f5_2) * int64(f8_19) + f5f9_76 := int64(f5_2) * int64(f9_38) + f6f6_19 := int64(f6) * int64(f6_19) + f6f7_38 := int64(f6) * int64(f7_38) + f6f8_38 := int64(f6_2) * int64(f8_19) + f6f9_38 := int64(f6) * int64(f9_38) + f7f7_38 := int64(f7) * int64(f7_38) + f7f8_38 := int64(f7_2) * int64(f8_19) + f7f9_76 := int64(f7_2) * int64(f9_38) + f8f8_19 := int64(f8) * int64(f8_19) + f8f9_38 := int64(f8) * int64(f9_38) + f9f9_38 := int64(f9) * int64(f9_38) + h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38 + h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38 + h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19 + h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38 + h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38 + h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38 + h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19 + h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38 + h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38 + h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2 + var carry [10]int64 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +// FeSquare2 sets h = 2 * f * f +// +// Can overlap h with f. +// +// Preconditions: +// |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. +// +// Postconditions: +// |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. +// See fe_mul.c for discussion of implementation strategy. +func FeSquare2(h, f *FieldElement) { + f0 := f[0] + f1 := f[1] + f2 := f[2] + f3 := f[3] + f4 := f[4] + f5 := f[5] + f6 := f[6] + f7 := f[7] + f8 := f[8] + f9 := f[9] + f0_2 := 2 * f0 + f1_2 := 2 * f1 + f2_2 := 2 * f2 + f3_2 := 2 * f3 + f4_2 := 2 * f4 + f5_2 := 2 * f5 + f6_2 := 2 * f6 + f7_2 := 2 * f7 + f5_38 := 38 * f5 // 1.959375*2^30 + f6_19 := 19 * f6 // 1.959375*2^30 + f7_38 := 38 * f7 // 1.959375*2^30 + f8_19 := 19 * f8 // 1.959375*2^30 + f9_38 := 38 * f9 // 1.959375*2^30 + f0f0 := int64(f0) * int64(f0) + f0f1_2 := int64(f0_2) * int64(f1) + f0f2_2 := int64(f0_2) * int64(f2) + f0f3_2 := int64(f0_2) * int64(f3) + f0f4_2 := int64(f0_2) * int64(f4) + f0f5_2 := int64(f0_2) * int64(f5) + f0f6_2 := int64(f0_2) * int64(f6) + f0f7_2 := int64(f0_2) * int64(f7) + f0f8_2 := int64(f0_2) * int64(f8) + f0f9_2 := int64(f0_2) * int64(f9) + f1f1_2 := int64(f1_2) * int64(f1) + f1f2_2 := int64(f1_2) * int64(f2) + f1f3_4 := int64(f1_2) * int64(f3_2) + f1f4_2 := int64(f1_2) * int64(f4) + f1f5_4 := int64(f1_2) * int64(f5_2) + f1f6_2 := int64(f1_2) * int64(f6) + f1f7_4 := int64(f1_2) * int64(f7_2) + f1f8_2 := int64(f1_2) * int64(f8) + f1f9_76 := int64(f1_2) * int64(f9_38) + f2f2 := int64(f2) * int64(f2) + f2f3_2 := int64(f2_2) * int64(f3) + f2f4_2 := int64(f2_2) * int64(f4) + f2f5_2 := int64(f2_2) * int64(f5) + f2f6_2 := int64(f2_2) * int64(f6) + f2f7_2 := int64(f2_2) * int64(f7) + f2f8_38 := int64(f2_2) * int64(f8_19) + f2f9_38 := int64(f2) * int64(f9_38) + f3f3_2 := int64(f3_2) * int64(f3) + f3f4_2 := int64(f3_2) * int64(f4) + f3f5_4 := int64(f3_2) * int64(f5_2) + f3f6_2 := int64(f3_2) * int64(f6) + f3f7_76 := int64(f3_2) * int64(f7_38) + f3f8_38 := int64(f3_2) * int64(f8_19) + f3f9_76 := int64(f3_2) * int64(f9_38) + f4f4 := int64(f4) * int64(f4) + f4f5_2 := int64(f4_2) * int64(f5) + f4f6_38 := int64(f4_2) * int64(f6_19) + f4f7_38 := int64(f4) * int64(f7_38) + f4f8_38 := int64(f4_2) * int64(f8_19) + f4f9_38 := int64(f4) * int64(f9_38) + f5f5_38 := int64(f5) * int64(f5_38) + f5f6_38 := int64(f5_2) * int64(f6_19) + f5f7_76 := int64(f5_2) * int64(f7_38) + f5f8_38 := int64(f5_2) * int64(f8_19) + f5f9_76 := int64(f5_2) * int64(f9_38) + f6f6_19 := int64(f6) * int64(f6_19) + f6f7_38 := int64(f6) * int64(f7_38) + f6f8_38 := int64(f6_2) * int64(f8_19) + f6f9_38 := int64(f6) * int64(f9_38) + f7f7_38 := int64(f7) * int64(f7_38) + f7f8_38 := int64(f7_2) * int64(f8_19) + f7f9_76 := int64(f7_2) * int64(f9_38) + f8f8_19 := int64(f8) * int64(f8_19) + f8f9_38 := int64(f8) * int64(f9_38) + f9f9_38 := int64(f9) * int64(f9_38) + h0 := f0f0 + f1f9_76 + f2f8_38 + f3f7_76 + f4f6_38 + f5f5_38 + h1 := f0f1_2 + f2f9_38 + f3f8_38 + f4f7_38 + f5f6_38 + h2 := f0f2_2 + f1f1_2 + f3f9_76 + f4f8_38 + f5f7_76 + f6f6_19 + h3 := f0f3_2 + f1f2_2 + f4f9_38 + f5f8_38 + f6f7_38 + h4 := f0f4_2 + f1f3_4 + f2f2 + f5f9_76 + f6f8_38 + f7f7_38 + h5 := f0f5_2 + f1f4_2 + f2f3_2 + f6f9_38 + f7f8_38 + h6 := f0f6_2 + f1f5_4 + f2f4_2 + f3f3_2 + f7f9_76 + f8f8_19 + h7 := f0f7_2 + f1f6_2 + f2f5_2 + f3f4_2 + f8f9_38 + h8 := f0f8_2 + f1f7_4 + f2f6_2 + f3f5_4 + f4f4 + f9f9_38 + h9 := f0f9_2 + f1f8_2 + f2f7_2 + f3f6_2 + f4f5_2 + var carry [10]int64 + + h0 += h0 + h1 += h1 + h2 += h2 + h3 += h3 + h4 += h4 + h5 += h5 + h6 += h6 + h7 += h7 + h8 += h8 + h9 += h9 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + + carry[1] = (h1 + (1 << 24)) >> 25 + h2 += carry[1] + h1 -= carry[1] << 25 + carry[5] = (h5 + (1 << 24)) >> 25 + h6 += carry[5] + h5 -= carry[5] << 25 + + carry[2] = (h2 + (1 << 25)) >> 26 + h3 += carry[2] + h2 -= carry[2] << 26 + carry[6] = (h6 + (1 << 25)) >> 26 + h7 += carry[6] + h6 -= carry[6] << 26 + + carry[3] = (h3 + (1 << 24)) >> 25 + h4 += carry[3] + h3 -= carry[3] << 25 + carry[7] = (h7 + (1 << 24)) >> 25 + h8 += carry[7] + h7 -= carry[7] << 25 + + carry[4] = (h4 + (1 << 25)) >> 26 + h5 += carry[4] + h4 -= carry[4] << 26 + carry[8] = (h8 + (1 << 25)) >> 26 + h9 += carry[8] + h8 -= carry[8] << 26 + + carry[9] = (h9 + (1 << 24)) >> 25 + h0 += carry[9] * 19 + h9 -= carry[9] << 25 + + carry[0] = (h0 + (1 << 25)) >> 26 + h1 += carry[0] + h0 -= carry[0] << 26 + + h[0] = int32(h0) + h[1] = int32(h1) + h[2] = int32(h2) + h[3] = int32(h3) + h[4] = int32(h4) + h[5] = int32(h5) + h[6] = int32(h6) + h[7] = int32(h7) + h[8] = int32(h8) + h[9] = int32(h9) +} + +func FeInvert(out, z *FieldElement) { + var t0, t1, t2, t3 FieldElement + var i int + + FeSquare(&t0, z) // 2^1 + FeSquare(&t1, &t0) // 2^2 + for i = 1; i < 2; i++ { // 2^3 + FeSquare(&t1, &t1) + } + FeMul(&t1, z, &t1) // 2^3 + 2^0 + FeMul(&t0, &t0, &t1) // 2^3 + 2^1 + 2^0 + FeSquare(&t2, &t0) // 2^4 + 2^2 + 2^1 + FeMul(&t1, &t1, &t2) // 2^4 + 2^3 + 2^2 + 2^1 + 2^0 + FeSquare(&t2, &t1) // 5,4,3,2,1 + for i = 1; i < 5; i++ { // 9,8,7,6,5 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 9,8,7,6,5,4,3,2,1,0 + FeSquare(&t2, &t1) // 10..1 + for i = 1; i < 10; i++ { // 19..10 + FeSquare(&t2, &t2) + } + FeMul(&t2, &t2, &t1) // 19..0 + FeSquare(&t3, &t2) // 20..1 + for i = 1; i < 20; i++ { // 39..20 + FeSquare(&t3, &t3) + } + FeMul(&t2, &t3, &t2) // 39..0 + FeSquare(&t2, &t2) // 40..1 + for i = 1; i < 10; i++ { // 49..10 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 49..0 + FeSquare(&t2, &t1) // 50..1 + for i = 1; i < 50; i++ { // 99..50 + FeSquare(&t2, &t2) + } + FeMul(&t2, &t2, &t1) // 99..0 + FeSquare(&t3, &t2) // 100..1 + for i = 1; i < 100; i++ { // 199..100 + FeSquare(&t3, &t3) + } + FeMul(&t2, &t3, &t2) // 199..0 + FeSquare(&t2, &t2) // 200..1 + for i = 1; i < 50; i++ { // 249..50 + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) // 249..0 + FeSquare(&t1, &t1) // 250..1 + for i = 1; i < 5; i++ { // 254..5 + FeSquare(&t1, &t1) + } + FeMul(out, &t1, &t0) // 254..5,3,1,0 +} + +func fePow22523(out, z *FieldElement) { + var t0, t1, t2 FieldElement + var i int + + FeSquare(&t0, z) + for i = 1; i < 1; i++ { + FeSquare(&t0, &t0) + } + FeSquare(&t1, &t0) + for i = 1; i < 2; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, z, &t1) + FeMul(&t0, &t0, &t1) + FeSquare(&t0, &t0) + for i = 1; i < 1; i++ { + FeSquare(&t0, &t0) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 5; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 10; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, &t1, &t0) + FeSquare(&t2, &t1) + for i = 1; i < 20; i++ { + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) + FeSquare(&t1, &t1) + for i = 1; i < 10; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t1, &t0) + for i = 1; i < 50; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t1, &t1, &t0) + FeSquare(&t2, &t1) + for i = 1; i < 100; i++ { + FeSquare(&t2, &t2) + } + FeMul(&t1, &t2, &t1) + FeSquare(&t1, &t1) + for i = 1; i < 50; i++ { + FeSquare(&t1, &t1) + } + FeMul(&t0, &t1, &t0) + FeSquare(&t0, &t0) + for i = 1; i < 2; i++ { + FeSquare(&t0, &t0) + } + FeMul(out, &t0, z) +} + +// Group elements are members of the elliptic curve -x^2 + y^2 = 1 + d * x^2 * +// y^2 where d = -121665/121666. +// +// Several representations are used: +// ProjectiveGroupElement: (X:Y:Z) satisfying x=X/Z, y=Y/Z +// ExtendedGroupElement: (X:Y:Z:T) satisfying x=X/Z, y=Y/Z, XY=ZT +// CompletedGroupElement: ((X:Z),(Y:T)) satisfying x=X/Z, y=Y/T +// PreComputedGroupElement: (y+x,y-x,2dxy) + +type ProjectiveGroupElement struct { + X, Y, Z FieldElement +} + +type ExtendedGroupElement struct { + X, Y, Z, T FieldElement +} + +type CompletedGroupElement struct { + X, Y, Z, T FieldElement +} + +type PreComputedGroupElement struct { + yPlusX, yMinusX, xy2d FieldElement +} + +type CachedGroupElement struct { + yPlusX, yMinusX, Z, T2d FieldElement +} + +func (p *ProjectiveGroupElement) Zero() { + FeZero(&p.X) + FeOne(&p.Y) + FeOne(&p.Z) +} + +func (p *ProjectiveGroupElement) Double(r *CompletedGroupElement) { + var t0 FieldElement + + FeSquare(&r.X, &p.X) + FeSquare(&r.Z, &p.Y) + FeSquare2(&r.T, &p.Z) + FeAdd(&r.Y, &p.X, &p.Y) + FeSquare(&t0, &r.Y) + FeAdd(&r.Y, &r.Z, &r.X) + FeSub(&r.Z, &r.Z, &r.X) + FeSub(&r.X, &t0, &r.Y) + FeSub(&r.T, &r.T, &r.Z) +} + +func (p *ProjectiveGroupElement) ToBytes(s *[32]byte) { + var recip, x, y FieldElement + + FeInvert(&recip, &p.Z) + FeMul(&x, &p.X, &recip) + FeMul(&y, &p.Y, &recip) + FeToBytes(s, &y) + s[31] ^= FeIsNegative(&x) << 7 +} + +func (p *ExtendedGroupElement) Zero() { + FeZero(&p.X) + FeOne(&p.Y) + FeOne(&p.Z) + FeZero(&p.T) +} + +func (p *ExtendedGroupElement) Double(r *CompletedGroupElement) { + var q ProjectiveGroupElement + p.ToProjective(&q) + q.Double(r) +} + +func (p *ExtendedGroupElement) ToCached(r *CachedGroupElement) { + FeAdd(&r.yPlusX, &p.Y, &p.X) + FeSub(&r.yMinusX, &p.Y, &p.X) + FeCopy(&r.Z, &p.Z) + FeMul(&r.T2d, &p.T, &d2) +} + +func (p *ExtendedGroupElement) ToProjective(r *ProjectiveGroupElement) { + FeCopy(&r.X, &p.X) + FeCopy(&r.Y, &p.Y) + FeCopy(&r.Z, &p.Z) +} + +func (p *ExtendedGroupElement) ToBytes(s *[32]byte) { + var recip, x, y FieldElement + + FeInvert(&recip, &p.Z) + FeMul(&x, &p.X, &recip) + FeMul(&y, &p.Y, &recip) + FeToBytes(s, &y) + s[31] ^= FeIsNegative(&x) << 7 +} + +func (p *ExtendedGroupElement) FromBytes(s *[32]byte) bool { + var u, v, v3, vxx, check FieldElement + + FeFromBytes(&p.Y, s) + FeOne(&p.Z) + FeSquare(&u, &p.Y) + FeMul(&v, &u, &d) + FeSub(&u, &u, &p.Z) // y = y^2-1 + FeAdd(&v, &v, &p.Z) // v = dy^2+1 + + FeSquare(&v3, &v) + FeMul(&v3, &v3, &v) // v3 = v^3 + FeSquare(&p.X, &v3) + FeMul(&p.X, &p.X, &v) + FeMul(&p.X, &p.X, &u) // x = uv^7 + + fePow22523(&p.X, &p.X) // x = (uv^7)^((q-5)/8) + FeMul(&p.X, &p.X, &v3) + FeMul(&p.X, &p.X, &u) // x = uv^3(uv^7)^((q-5)/8) + + var tmpX, tmp2 [32]byte + + FeSquare(&vxx, &p.X) + FeMul(&vxx, &vxx, &v) + FeSub(&check, &vxx, &u) // vx^2-u + if FeIsNonZero(&check) == 1 { + FeAdd(&check, &vxx, &u) // vx^2+u + if FeIsNonZero(&check) == 1 { + return false + } + FeMul(&p.X, &p.X, &SqrtM1) + + FeToBytes(&tmpX, &p.X) + for i, v := range tmpX { + tmp2[31-i] = v + } + } + + if FeIsNegative(&p.X) == (s[31] >> 7) { + FeNeg(&p.X, &p.X) + } + + FeMul(&p.T, &p.X, &p.Y) + return true +} + +func (p *CompletedGroupElement) ToProjective(r *ProjectiveGroupElement) { + FeMul(&r.X, &p.X, &p.T) + FeMul(&r.Y, &p.Y, &p.Z) + FeMul(&r.Z, &p.Z, &p.T) +} + +func (p *CompletedGroupElement) ToExtended(r *ExtendedGroupElement) { + FeMul(&r.X, &p.X, &p.T) + FeMul(&r.Y, &p.Y, &p.Z) + FeMul(&r.Z, &p.Z, &p.T) + FeMul(&r.T, &p.X, &p.Y) +} + +func (p *PreComputedGroupElement) Zero() { + FeOne(&p.yPlusX) + FeOne(&p.yMinusX) + FeZero(&p.xy2d) +} + +func geAdd(r *CompletedGroupElement, p *ExtendedGroupElement, q *CachedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yPlusX) + FeMul(&r.Y, &r.Y, &q.yMinusX) + FeMul(&r.T, &q.T2d, &p.T) + FeMul(&r.X, &p.Z, &q.Z) + FeAdd(&t0, &r.X, &r.X) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeAdd(&r.Z, &t0, &r.T) + FeSub(&r.T, &t0, &r.T) +} + +func geSub(r *CompletedGroupElement, p *ExtendedGroupElement, q *CachedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yMinusX) + FeMul(&r.Y, &r.Y, &q.yPlusX) + FeMul(&r.T, &q.T2d, &p.T) + FeMul(&r.X, &p.Z, &q.Z) + FeAdd(&t0, &r.X, &r.X) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeSub(&r.Z, &t0, &r.T) + FeAdd(&r.T, &t0, &r.T) +} + +func geMixedAdd(r *CompletedGroupElement, p *ExtendedGroupElement, q *PreComputedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yPlusX) + FeMul(&r.Y, &r.Y, &q.yMinusX) + FeMul(&r.T, &q.xy2d, &p.T) + FeAdd(&t0, &p.Z, &p.Z) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeAdd(&r.Z, &t0, &r.T) + FeSub(&r.T, &t0, &r.T) +} + +func geMixedSub(r *CompletedGroupElement, p *ExtendedGroupElement, q *PreComputedGroupElement) { + var t0 FieldElement + + FeAdd(&r.X, &p.Y, &p.X) + FeSub(&r.Y, &p.Y, &p.X) + FeMul(&r.Z, &r.X, &q.yMinusX) + FeMul(&r.Y, &r.Y, &q.yPlusX) + FeMul(&r.T, &q.xy2d, &p.T) + FeAdd(&t0, &p.Z, &p.Z) + FeSub(&r.X, &r.Z, &r.Y) + FeAdd(&r.Y, &r.Z, &r.Y) + FeSub(&r.Z, &t0, &r.T) + FeAdd(&r.T, &t0, &r.T) +} + +func slide(r *[256]int8, a *[32]byte) { + for i := range r { + r[i] = int8(1 & (a[i>>3] >> uint(i&7))) + } + + for i := range r { + if r[i] != 0 { + for b := 1; b <= 6 && i+b < 256; b++ { + if r[i+b] != 0 { + if r[i]+(r[i+b]<= -15 { + r[i] -= r[i+b] << uint(b) + for k := i + b; k < 256; k++ { + if r[k] == 0 { + r[k] = 1 + break + } + r[k] = 0 + } + } else { + break + } + } + } + } + } +} + +// GeDoubleScalarMultVartime sets r = a*A + b*B +// where a = a[0]+256*a[1]+...+256^31 a[31]. +// and b = b[0]+256*b[1]+...+256^31 b[31]. +// B is the Ed25519 base point (x,4/5) with x positive. +func GeDoubleScalarMultVartime(r *ProjectiveGroupElement, a *[32]byte, A *ExtendedGroupElement, b *[32]byte) { + var aSlide, bSlide [256]int8 + var Ai [8]CachedGroupElement // A,3A,5A,7A,9A,11A,13A,15A + var t CompletedGroupElement + var u, A2 ExtendedGroupElement + var i int + + slide(&aSlide, a) + slide(&bSlide, b) + + A.ToCached(&Ai[0]) + A.Double(&t) + t.ToExtended(&A2) + + for i := 0; i < 7; i++ { + geAdd(&t, &A2, &Ai[i]) + t.ToExtended(&u) + u.ToCached(&Ai[i+1]) + } + + r.Zero() + + for i = 255; i >= 0; i-- { + if aSlide[i] != 0 || bSlide[i] != 0 { + break + } + } + + for ; i >= 0; i-- { + r.Double(&t) + + if aSlide[i] > 0 { + t.ToExtended(&u) + geAdd(&t, &u, &Ai[aSlide[i]/2]) + } else if aSlide[i] < 0 { + t.ToExtended(&u) + geSub(&t, &u, &Ai[(-aSlide[i])/2]) + } + + if bSlide[i] > 0 { + t.ToExtended(&u) + geMixedAdd(&t, &u, &bi[bSlide[i]/2]) + } else if bSlide[i] < 0 { + t.ToExtended(&u) + geMixedSub(&t, &u, &bi[(-bSlide[i])/2]) + } + + t.ToProjective(r) + } +} + +// equal returns 1 if b == c and 0 otherwise. +func equal(b, c int32) int32 { + x := uint32(b ^ c) + x-- + return int32(x >> 31) +} + +// negative returns 1 if b < 0 and 0 otherwise. +func negative(b int32) int32 { + return (b >> 31) & 1 +} + +func PreComputedGroupElementCMove(t, u *PreComputedGroupElement, b int32) { + FeCMove(&t.yPlusX, &u.yPlusX, b) + FeCMove(&t.yMinusX, &u.yMinusX, b) + FeCMove(&t.xy2d, &u.xy2d, b) +} + +func selectPoint(t *PreComputedGroupElement, pos int32, b int32) { + var minusT PreComputedGroupElement + bNegative := negative(b) + bAbs := b - (((-bNegative) & b) << 1) + + t.Zero() + for i := int32(0); i < 8; i++ { + PreComputedGroupElementCMove(t, &base[pos][i], equal(bAbs, i+1)) + } + FeCopy(&minusT.yPlusX, &t.yMinusX) + FeCopy(&minusT.yMinusX, &t.yPlusX) + FeNeg(&minusT.xy2d, &t.xy2d) + PreComputedGroupElementCMove(t, &minusT, bNegative) +} + +// GeScalarMultBase computes h = a*B, where +// a = a[0]+256*a[1]+...+256^31 a[31] +// B is the Ed25519 base point (x,4/5) with x positive. +// +// Preconditions: +// a[31] <= 127 +func GeScalarMultBase(h *ExtendedGroupElement, a *[32]byte) { + var e [64]int8 + + for i, v := range a { + e[2*i] = int8(v & 15) + e[2*i+1] = int8((v >> 4) & 15) + } + + // each e[i] is between 0 and 15 and e[63] is between 0 and 7. + + carry := int8(0) + for i := 0; i < 63; i++ { + e[i] += carry + carry = (e[i] + 8) >> 4 + e[i] -= carry << 4 + } + e[63] += carry + // each e[i] is between -8 and 8. + + h.Zero() + var t PreComputedGroupElement + var r CompletedGroupElement + for i := int32(1); i < 64; i += 2 { + selectPoint(&t, i/2, int32(e[i])) + geMixedAdd(&r, h, &t) + r.ToExtended(h) + } + + var s ProjectiveGroupElement + + h.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToProjective(&s) + s.Double(&r) + r.ToExtended(h) + + for i := int32(0); i < 64; i += 2 { + selectPoint(&t, i/2, int32(e[i])) + geMixedAdd(&r, h, &t) + r.ToExtended(h) + } +} + +// The scalars are GF(2^252 + 27742317777372353535851937790883648493). + +// Input: +// a[0]+256*a[1]+...+256^31*a[31] = a +// b[0]+256*b[1]+...+256^31*b[31] = b +// c[0]+256*c[1]+...+256^31*c[31] = c +// +// Output: +// s[0]+256*s[1]+...+256^31*s[31] = (ab+c) mod l +// where l = 2^252 + 27742317777372353535851937790883648493. +func ScMulAdd(s, a, b, c *[32]byte) { + a0 := 2097151 & load3(a[:]) + a1 := 2097151 & (load4(a[2:]) >> 5) + a2 := 2097151 & (load3(a[5:]) >> 2) + a3 := 2097151 & (load4(a[7:]) >> 7) + a4 := 2097151 & (load4(a[10:]) >> 4) + a5 := 2097151 & (load3(a[13:]) >> 1) + a6 := 2097151 & (load4(a[15:]) >> 6) + a7 := 2097151 & (load3(a[18:]) >> 3) + a8 := 2097151 & load3(a[21:]) + a9 := 2097151 & (load4(a[23:]) >> 5) + a10 := 2097151 & (load3(a[26:]) >> 2) + a11 := (load4(a[28:]) >> 7) + b0 := 2097151 & load3(b[:]) + b1 := 2097151 & (load4(b[2:]) >> 5) + b2 := 2097151 & (load3(b[5:]) >> 2) + b3 := 2097151 & (load4(b[7:]) >> 7) + b4 := 2097151 & (load4(b[10:]) >> 4) + b5 := 2097151 & (load3(b[13:]) >> 1) + b6 := 2097151 & (load4(b[15:]) >> 6) + b7 := 2097151 & (load3(b[18:]) >> 3) + b8 := 2097151 & load3(b[21:]) + b9 := 2097151 & (load4(b[23:]) >> 5) + b10 := 2097151 & (load3(b[26:]) >> 2) + b11 := (load4(b[28:]) >> 7) + c0 := 2097151 & load3(c[:]) + c1 := 2097151 & (load4(c[2:]) >> 5) + c2 := 2097151 & (load3(c[5:]) >> 2) + c3 := 2097151 & (load4(c[7:]) >> 7) + c4 := 2097151 & (load4(c[10:]) >> 4) + c5 := 2097151 & (load3(c[13:]) >> 1) + c6 := 2097151 & (load4(c[15:]) >> 6) + c7 := 2097151 & (load3(c[18:]) >> 3) + c8 := 2097151 & load3(c[21:]) + c9 := 2097151 & (load4(c[23:]) >> 5) + c10 := 2097151 & (load3(c[26:]) >> 2) + c11 := (load4(c[28:]) >> 7) + var carry [23]int64 + + s0 := c0 + a0*b0 + s1 := c1 + a0*b1 + a1*b0 + s2 := c2 + a0*b2 + a1*b1 + a2*b0 + s3 := c3 + a0*b3 + a1*b2 + a2*b1 + a3*b0 + s4 := c4 + a0*b4 + a1*b3 + a2*b2 + a3*b1 + a4*b0 + s5 := c5 + a0*b5 + a1*b4 + a2*b3 + a3*b2 + a4*b1 + a5*b0 + s6 := c6 + a0*b6 + a1*b5 + a2*b4 + a3*b3 + a4*b2 + a5*b1 + a6*b0 + s7 := c7 + a0*b7 + a1*b6 + a2*b5 + a3*b4 + a4*b3 + a5*b2 + a6*b1 + a7*b0 + s8 := c8 + a0*b8 + a1*b7 + a2*b6 + a3*b5 + a4*b4 + a5*b3 + a6*b2 + a7*b1 + a8*b0 + s9 := c9 + a0*b9 + a1*b8 + a2*b7 + a3*b6 + a4*b5 + a5*b4 + a6*b3 + a7*b2 + a8*b1 + a9*b0 + s10 := c10 + a0*b10 + a1*b9 + a2*b8 + a3*b7 + a4*b6 + a5*b5 + a6*b4 + a7*b3 + a8*b2 + a9*b1 + a10*b0 + s11 := c11 + a0*b11 + a1*b10 + a2*b9 + a3*b8 + a4*b7 + a5*b6 + a6*b5 + a7*b4 + a8*b3 + a9*b2 + a10*b1 + a11*b0 + s12 := a1*b11 + a2*b10 + a3*b9 + a4*b8 + a5*b7 + a6*b6 + a7*b5 + a8*b4 + a9*b3 + a10*b2 + a11*b1 + s13 := a2*b11 + a3*b10 + a4*b9 + a5*b8 + a6*b7 + a7*b6 + a8*b5 + a9*b4 + a10*b3 + a11*b2 + s14 := a3*b11 + a4*b10 + a5*b9 + a6*b8 + a7*b7 + a8*b6 + a9*b5 + a10*b4 + a11*b3 + s15 := a4*b11 + a5*b10 + a6*b9 + a7*b8 + a8*b7 + a9*b6 + a10*b5 + a11*b4 + s16 := a5*b11 + a6*b10 + a7*b9 + a8*b8 + a9*b7 + a10*b6 + a11*b5 + s17 := a6*b11 + a7*b10 + a8*b9 + a9*b8 + a10*b7 + a11*b6 + s18 := a7*b11 + a8*b10 + a9*b9 + a10*b8 + a11*b7 + s19 := a8*b11 + a9*b10 + a10*b9 + a11*b8 + s20 := a9*b11 + a10*b10 + a11*b9 + s21 := a10*b11 + a11*b10 + s22 := a11 * b11 + s23 := int64(0) + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + carry[18] = (s18 + (1 << 20)) >> 21 + s19 += carry[18] + s18 -= carry[18] << 21 + carry[20] = (s20 + (1 << 20)) >> 21 + s21 += carry[20] + s20 -= carry[20] << 21 + carry[22] = (s22 + (1 << 20)) >> 21 + s23 += carry[22] + s22 -= carry[22] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + carry[17] = (s17 + (1 << 20)) >> 21 + s18 += carry[17] + s17 -= carry[17] << 21 + carry[19] = (s19 + (1 << 20)) >> 21 + s20 += carry[19] + s19 -= carry[19] << 21 + carry[21] = (s21 + (1 << 20)) >> 21 + s22 += carry[21] + s21 -= carry[21] << 21 + + s11 += s23 * 666643 + s12 += s23 * 470296 + s13 += s23 * 654183 + s14 -= s23 * 997805 + s15 += s23 * 136657 + s16 -= s23 * 683901 + s23 = 0 + + s10 += s22 * 666643 + s11 += s22 * 470296 + s12 += s22 * 654183 + s13 -= s22 * 997805 + s14 += s22 * 136657 + s15 -= s22 * 683901 + s22 = 0 + + s9 += s21 * 666643 + s10 += s21 * 470296 + s11 += s21 * 654183 + s12 -= s21 * 997805 + s13 += s21 * 136657 + s14 -= s21 * 683901 + s21 = 0 + + s8 += s20 * 666643 + s9 += s20 * 470296 + s10 += s20 * 654183 + s11 -= s20 * 997805 + s12 += s20 * 136657 + s13 -= s20 * 683901 + s20 = 0 + + s7 += s19 * 666643 + s8 += s19 * 470296 + s9 += s19 * 654183 + s10 -= s19 * 997805 + s11 += s19 * 136657 + s12 -= s19 * 683901 + s19 = 0 + + s6 += s18 * 666643 + s7 += s18 * 470296 + s8 += s18 * 654183 + s9 -= s18 * 997805 + s10 += s18 * 136657 + s11 -= s18 * 683901 + s18 = 0 + + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + + s5 += s17 * 666643 + s6 += s17 * 470296 + s7 += s17 * 654183 + s8 -= s17 * 997805 + s9 += s17 * 136657 + s10 -= s17 * 683901 + s17 = 0 + + s4 += s16 * 666643 + s5 += s16 * 470296 + s6 += s16 * 654183 + s7 -= s16 * 997805 + s8 += s16 * 136657 + s9 -= s16 * 683901 + s16 = 0 + + s3 += s15 * 666643 + s4 += s15 * 470296 + s5 += s15 * 654183 + s6 -= s15 * 997805 + s7 += s15 * 136657 + s8 -= s15 * 683901 + s15 = 0 + + s2 += s14 * 666643 + s3 += s14 * 470296 + s4 += s14 * 654183 + s5 -= s14 * 997805 + s6 += s14 * 136657 + s7 -= s14 * 683901 + s14 = 0 + + s1 += s13 * 666643 + s2 += s13 * 470296 + s3 += s13 * 654183 + s4 -= s13 * 997805 + s5 += s13 * 136657 + s6 -= s13 * 683901 + s13 = 0 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[11] = s11 >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + s[0] = byte(s0 >> 0) + s[1] = byte(s0 >> 8) + s[2] = byte((s0 >> 16) | (s1 << 5)) + s[3] = byte(s1 >> 3) + s[4] = byte(s1 >> 11) + s[5] = byte((s1 >> 19) | (s2 << 2)) + s[6] = byte(s2 >> 6) + s[7] = byte((s2 >> 14) | (s3 << 7)) + s[8] = byte(s3 >> 1) + s[9] = byte(s3 >> 9) + s[10] = byte((s3 >> 17) | (s4 << 4)) + s[11] = byte(s4 >> 4) + s[12] = byte(s4 >> 12) + s[13] = byte((s4 >> 20) | (s5 << 1)) + s[14] = byte(s5 >> 7) + s[15] = byte((s5 >> 15) | (s6 << 6)) + s[16] = byte(s6 >> 2) + s[17] = byte(s6 >> 10) + s[18] = byte((s6 >> 18) | (s7 << 3)) + s[19] = byte(s7 >> 5) + s[20] = byte(s7 >> 13) + s[21] = byte(s8 >> 0) + s[22] = byte(s8 >> 8) + s[23] = byte((s8 >> 16) | (s9 << 5)) + s[24] = byte(s9 >> 3) + s[25] = byte(s9 >> 11) + s[26] = byte((s9 >> 19) | (s10 << 2)) + s[27] = byte(s10 >> 6) + s[28] = byte((s10 >> 14) | (s11 << 7)) + s[29] = byte(s11 >> 1) + s[30] = byte(s11 >> 9) + s[31] = byte(s11 >> 17) +} + +// Input: +// s[0]+256*s[1]+...+256^63*s[63] = s +// +// Output: +// s[0]+256*s[1]+...+256^31*s[31] = s mod l +// where l = 2^252 + 27742317777372353535851937790883648493. +func ScReduce(out *[32]byte, s *[64]byte) { + s0 := 2097151 & load3(s[:]) + s1 := 2097151 & (load4(s[2:]) >> 5) + s2 := 2097151 & (load3(s[5:]) >> 2) + s3 := 2097151 & (load4(s[7:]) >> 7) + s4 := 2097151 & (load4(s[10:]) >> 4) + s5 := 2097151 & (load3(s[13:]) >> 1) + s6 := 2097151 & (load4(s[15:]) >> 6) + s7 := 2097151 & (load3(s[18:]) >> 3) + s8 := 2097151 & load3(s[21:]) + s9 := 2097151 & (load4(s[23:]) >> 5) + s10 := 2097151 & (load3(s[26:]) >> 2) + s11 := 2097151 & (load4(s[28:]) >> 7) + s12 := 2097151 & (load4(s[31:]) >> 4) + s13 := 2097151 & (load3(s[34:]) >> 1) + s14 := 2097151 & (load4(s[36:]) >> 6) + s15 := 2097151 & (load3(s[39:]) >> 3) + s16 := 2097151 & load3(s[42:]) + s17 := 2097151 & (load4(s[44:]) >> 5) + s18 := 2097151 & (load3(s[47:]) >> 2) + s19 := 2097151 & (load4(s[49:]) >> 7) + s20 := 2097151 & (load4(s[52:]) >> 4) + s21 := 2097151 & (load3(s[55:]) >> 1) + s22 := 2097151 & (load4(s[57:]) >> 6) + s23 := (load4(s[60:]) >> 3) + + s11 += s23 * 666643 + s12 += s23 * 470296 + s13 += s23 * 654183 + s14 -= s23 * 997805 + s15 += s23 * 136657 + s16 -= s23 * 683901 + s23 = 0 + + s10 += s22 * 666643 + s11 += s22 * 470296 + s12 += s22 * 654183 + s13 -= s22 * 997805 + s14 += s22 * 136657 + s15 -= s22 * 683901 + s22 = 0 + + s9 += s21 * 666643 + s10 += s21 * 470296 + s11 += s21 * 654183 + s12 -= s21 * 997805 + s13 += s21 * 136657 + s14 -= s21 * 683901 + s21 = 0 + + s8 += s20 * 666643 + s9 += s20 * 470296 + s10 += s20 * 654183 + s11 -= s20 * 997805 + s12 += s20 * 136657 + s13 -= s20 * 683901 + s20 = 0 + + s7 += s19 * 666643 + s8 += s19 * 470296 + s9 += s19 * 654183 + s10 -= s19 * 997805 + s11 += s19 * 136657 + s12 -= s19 * 683901 + s19 = 0 + + s6 += s18 * 666643 + s7 += s18 * 470296 + s8 += s18 * 654183 + s9 -= s18 * 997805 + s10 += s18 * 136657 + s11 -= s18 * 683901 + s18 = 0 + + var carry [17]int64 + + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[12] = (s12 + (1 << 20)) >> 21 + s13 += carry[12] + s12 -= carry[12] << 21 + carry[14] = (s14 + (1 << 20)) >> 21 + s15 += carry[14] + s14 -= carry[14] << 21 + carry[16] = (s16 + (1 << 20)) >> 21 + s17 += carry[16] + s16 -= carry[16] << 21 + + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + carry[13] = (s13 + (1 << 20)) >> 21 + s14 += carry[13] + s13 -= carry[13] << 21 + carry[15] = (s15 + (1 << 20)) >> 21 + s16 += carry[15] + s15 -= carry[15] << 21 + + s5 += s17 * 666643 + s6 += s17 * 470296 + s7 += s17 * 654183 + s8 -= s17 * 997805 + s9 += s17 * 136657 + s10 -= s17 * 683901 + s17 = 0 + + s4 += s16 * 666643 + s5 += s16 * 470296 + s6 += s16 * 654183 + s7 -= s16 * 997805 + s8 += s16 * 136657 + s9 -= s16 * 683901 + s16 = 0 + + s3 += s15 * 666643 + s4 += s15 * 470296 + s5 += s15 * 654183 + s6 -= s15 * 997805 + s7 += s15 * 136657 + s8 -= s15 * 683901 + s15 = 0 + + s2 += s14 * 666643 + s3 += s14 * 470296 + s4 += s14 * 654183 + s5 -= s14 * 997805 + s6 += s14 * 136657 + s7 -= s14 * 683901 + s14 = 0 + + s1 += s13 * 666643 + s2 += s13 * 470296 + s3 += s13 * 654183 + s4 -= s13 * 997805 + s5 += s13 * 136657 + s6 -= s13 * 683901 + s13 = 0 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = (s0 + (1 << 20)) >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[2] = (s2 + (1 << 20)) >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[4] = (s4 + (1 << 20)) >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[6] = (s6 + (1 << 20)) >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[8] = (s8 + (1 << 20)) >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[10] = (s10 + (1 << 20)) >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + carry[1] = (s1 + (1 << 20)) >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[3] = (s3 + (1 << 20)) >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[5] = (s5 + (1 << 20)) >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[7] = (s7 + (1 << 20)) >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[9] = (s9 + (1 << 20)) >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[11] = (s11 + (1 << 20)) >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + carry[11] = s11 >> 21 + s12 += carry[11] + s11 -= carry[11] << 21 + + s0 += s12 * 666643 + s1 += s12 * 470296 + s2 += s12 * 654183 + s3 -= s12 * 997805 + s4 += s12 * 136657 + s5 -= s12 * 683901 + s12 = 0 + + carry[0] = s0 >> 21 + s1 += carry[0] + s0 -= carry[0] << 21 + carry[1] = s1 >> 21 + s2 += carry[1] + s1 -= carry[1] << 21 + carry[2] = s2 >> 21 + s3 += carry[2] + s2 -= carry[2] << 21 + carry[3] = s3 >> 21 + s4 += carry[3] + s3 -= carry[3] << 21 + carry[4] = s4 >> 21 + s5 += carry[4] + s4 -= carry[4] << 21 + carry[5] = s5 >> 21 + s6 += carry[5] + s5 -= carry[5] << 21 + carry[6] = s6 >> 21 + s7 += carry[6] + s6 -= carry[6] << 21 + carry[7] = s7 >> 21 + s8 += carry[7] + s7 -= carry[7] << 21 + carry[8] = s8 >> 21 + s9 += carry[8] + s8 -= carry[8] << 21 + carry[9] = s9 >> 21 + s10 += carry[9] + s9 -= carry[9] << 21 + carry[10] = s10 >> 21 + s11 += carry[10] + s10 -= carry[10] << 21 + + out[0] = byte(s0 >> 0) + out[1] = byte(s0 >> 8) + out[2] = byte((s0 >> 16) | (s1 << 5)) + out[3] = byte(s1 >> 3) + out[4] = byte(s1 >> 11) + out[5] = byte((s1 >> 19) | (s2 << 2)) + out[6] = byte(s2 >> 6) + out[7] = byte((s2 >> 14) | (s3 << 7)) + out[8] = byte(s3 >> 1) + out[9] = byte(s3 >> 9) + out[10] = byte((s3 >> 17) | (s4 << 4)) + out[11] = byte(s4 >> 4) + out[12] = byte(s4 >> 12) + out[13] = byte((s4 >> 20) | (s5 << 1)) + out[14] = byte(s5 >> 7) + out[15] = byte((s5 >> 15) | (s6 << 6)) + out[16] = byte(s6 >> 2) + out[17] = byte(s6 >> 10) + out[18] = byte((s6 >> 18) | (s7 << 3)) + out[19] = byte(s7 >> 5) + out[20] = byte(s7 >> 13) + out[21] = byte(s8 >> 0) + out[22] = byte(s8 >> 8) + out[23] = byte((s8 >> 16) | (s9 << 5)) + out[24] = byte(s9 >> 3) + out[25] = byte(s9 >> 11) + out[26] = byte((s9 >> 19) | (s10 << 2)) + out[27] = byte(s10 >> 6) + out[28] = byte((s10 >> 14) | (s11 << 7)) + out[29] = byte(s11 >> 1) + out[30] = byte(s11 >> 9) + out[31] = byte(s11 >> 17) +} diff --git a/vendor/src/github.com/docker/notary/client/changelist/change.go b/vendor/src/github.com/docker/notary/client/changelist/change.go new file mode 100644 index 0000000000..77544dc661 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/changelist/change.go @@ -0,0 +1,47 @@ +package changelist + +// TufChange represents a change to a TUF repo +type TufChange struct { + // Abbreviated because Go doesn't permit a field and method of the same name + Actn int `json:"action"` + Role string `json:"role"` + ChangeType string `json:"type"` + ChangePath string `json:"path"` + Data []byte `json:"data"` +} + +// NewTufChange initializes a tufChange object +func NewTufChange(action int, role, changeType, changePath string, content []byte) *TufChange { + return &TufChange{ + Actn: action, + Role: role, + ChangeType: changeType, + ChangePath: changePath, + Data: content, + } +} + +// Action return c.Actn +func (c TufChange) Action() int { + return c.Actn +} + +// Scope returns c.Role +func (c TufChange) Scope() string { + return c.Role +} + +// Type returns c.ChangeType +func (c TufChange) Type() string { + return c.ChangeType +} + +// Path return c.ChangePath +func (c TufChange) Path() string { + return c.ChangePath +} + +// Content returns c.Data +func (c TufChange) Content() []byte { + return c.Data +} diff --git a/vendor/src/github.com/docker/notary/client/changelist/changelist.go b/vendor/src/github.com/docker/notary/client/changelist/changelist.go new file mode 100644 index 0000000000..aef4970119 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/changelist/changelist.go @@ -0,0 +1,29 @@ +package changelist + +// memChangeList implements a simple in memory change list. +type memChangelist struct { + changes []Change +} + +// List returns a list of Changes +func (cl memChangelist) List() []Change { + return cl.changes +} + +// Add adds a change to the in-memory change list +func (cl *memChangelist) Add(c Change) error { + cl.changes = append(cl.changes, c) + return nil +} + +// Clear empties the changelist file. +func (cl *memChangelist) Clear(archive string) error { + // appending to a nil list initializes it. + cl.changes = nil + return nil +} + +// Close is a no-op in this in-memory change-list +func (cl *memChangelist) Close() error { + return nil +} diff --git a/vendor/src/github.com/docker/notary/client/changelist/files_changelist.go b/vendor/src/github.com/docker/notary/client/changelist/files_changelist.go new file mode 100644 index 0000000000..afee0cbcb9 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/changelist/files_changelist.go @@ -0,0 +1,114 @@ +package changelist + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path" + "sort" + "time" + + "github.com/Sirupsen/logrus" + "github.com/docker/distribution/uuid" +) + +// FileChangelist stores all the changes as files +type FileChangelist struct { + dir string +} + +// NewFileChangelist is a convenience method for returning FileChangeLists +func NewFileChangelist(dir string) (*FileChangelist, error) { + logrus.Debug("Making dir path: ", dir) + err := os.MkdirAll(dir, 0700) + if err != nil { + return nil, err + } + return &FileChangelist{dir: dir}, nil +} + +// List returns a list of sorted changes +func (cl FileChangelist) List() []Change { + var changes []Change + dir, err := os.Open(cl.dir) + if err != nil { + return changes + } + defer dir.Close() + fileInfos, err := dir.Readdir(0) + if err != nil { + return changes + } + sort.Sort(fileChanges(fileInfos)) + for _, f := range fileInfos { + if f.IsDir() { + continue + } + raw, err := ioutil.ReadFile(path.Join(cl.dir, f.Name())) + if err != nil { + logrus.Warn(err.Error()) + continue + } + c := &TufChange{} + err = json.Unmarshal(raw, c) + if err != nil { + logrus.Warn(err.Error()) + continue + } + changes = append(changes, c) + } + return changes +} + +// Add adds a change to the file change list +func (cl FileChangelist) Add(c Change) error { + cJSON, err := json.Marshal(c) + if err != nil { + return err + } + filename := fmt.Sprintf("%020d_%s.change", time.Now().UnixNano(), uuid.Generate()) + return ioutil.WriteFile(path.Join(cl.dir, filename), cJSON, 0644) +} + +// Clear clears the change list +func (cl FileChangelist) Clear(archive string) error { + dir, err := os.Open(cl.dir) + if err != nil { + return err + } + defer dir.Close() + files, err := dir.Readdir(0) + if err != nil { + return err + } + for _, f := range files { + os.Remove(path.Join(cl.dir, f.Name())) + } + return nil +} + +// Close is a no-op +func (cl FileChangelist) Close() error { + // Nothing to do here + return nil +} + +type fileChanges []os.FileInfo + +// Len returns the length of a file change list +func (cs fileChanges) Len() int { + return len(cs) +} + +// Less compares the names of two different file changes +func (cs fileChanges) Less(i, j int) bool { + return cs[i].Name() < cs[j].Name() +} + +// Swap swaps the position of two file changes +func (cs fileChanges) Swap(i, j int) { + tmp := cs[i] + cs[i] = cs[j] + cs[j] = tmp +} diff --git a/vendor/src/github.com/docker/notary/client/changelist/interface.go b/vendor/src/github.com/docker/notary/client/changelist/interface.go new file mode 100644 index 0000000000..fd24b65c54 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/changelist/interface.go @@ -0,0 +1,59 @@ +package changelist + +// Changelist is the interface for all TUF change lists +type Changelist interface { + // List returns the ordered list of changes + // currently stored + List() []Change + + // Add change appends the provided change to + // the list of changes + Add(Change) error + + // Clear empties the current change list. + // Archive may be provided as a directory path + // to save a copy of the changelist in that location + Clear(archive string) error + + // Close syncronizes any pending writes to the underlying + // storage and closes the file/connection + Close() error +} + +const ( + // ActionCreate represents a Create action + ActionCreate = iota + // ActionUpdate represents an Update action + ActionUpdate + // ActionDelete represents a Delete action + ActionDelete +) + +// Change is the interface for a TUF Change +type Change interface { + // "create","update", or "delete" + Action() int + + // Where the change should be made. + // For TUF this will be the role + Scope() string + + // The content type being affected. + // For TUF this will be "target", or "delegation". + // If the type is "delegation", the Scope will be + // used to determine if a root role is being updated + // or a target delegation. + Type() string + + // Path indicates the entry within a role to be affected by the + // change. For targets, this is simply the target's path, + // for delegations it's the delegated role name. + Path() string + + // Serialized content that the interpreter of a changelist + // can use to apply the change. + // For TUF this will be the serialized JSON that needs + // to be inserted or merged. In the case of a "delete" + // action, it will be nil. + Content() []byte +} diff --git a/vendor/src/github.com/docker/notary/client/client.go b/vendor/src/github.com/docker/notary/client/client.go new file mode 100644 index 0000000000..6c8e3a8aa9 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/client.go @@ -0,0 +1,569 @@ +package client + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "os" + "path/filepath" + + "github.com/Sirupsen/logrus" + "github.com/docker/notary/client/changelist" + "github.com/docker/notary/cryptoservice" + "github.com/docker/notary/keystoremanager" + "github.com/docker/notary/pkg/passphrase" + "github.com/docker/notary/trustmanager" + "github.com/endophage/gotuf" + tufclient "github.com/endophage/gotuf/client" + "github.com/endophage/gotuf/data" + tuferrors "github.com/endophage/gotuf/errors" + "github.com/endophage/gotuf/keys" + "github.com/endophage/gotuf/signed" + "github.com/endophage/gotuf/store" +) + +const maxSize = 5 << 20 + +func init() { + data.SetDefaultExpiryTimes( + map[string]int{ + "root": 3650, + "targets": 1095, + "snapshot": 1095, + }, + ) +} + +// ErrRepoNotInitialized is returned when trying to can publish on an uninitialized +// notary repository +type ErrRepoNotInitialized struct{} + +// ErrRepoNotInitialized is returned when trying to can publish on an uninitialized +// notary repository +func (err *ErrRepoNotInitialized) Error() string { + return "Repository has not been initialized" +} + +// ErrExpired is returned when the metadata for a role has expired +type ErrExpired struct { + signed.ErrExpired +} + +const ( + tufDir = "tuf" +) + +// ErrRepositoryNotExist gets returned when trying to make an action over a repository +/// that doesn't exist. +var ErrRepositoryNotExist = errors.New("repository does not exist") + +// NotaryRepository stores all the information needed to operate on a notary +// repository. +type NotaryRepository struct { + baseDir string + gun string + baseURL string + tufRepoPath string + fileStore store.MetadataStore + cryptoService signed.CryptoService + tufRepo *tuf.TufRepo + roundTrip http.RoundTripper + KeyStoreManager *keystoremanager.KeyStoreManager +} + +// Target represents a simplified version of the data TUF operates on, so external +// applications don't have to depend on tuf data types. +type Target struct { + Name string + Hashes data.Hashes + Length int64 +} + +// NewTarget is a helper method that returns a Target +func NewTarget(targetName string, targetPath string) (*Target, error) { + b, err := ioutil.ReadFile(targetPath) + if err != nil { + return nil, err + } + + meta, err := data.NewFileMeta(bytes.NewBuffer(b)) + if err != nil { + return nil, err + } + + return &Target{Name: targetName, Hashes: meta.Hashes, Length: meta.Length}, nil +} + +// NewNotaryRepository is a helper method that returns a new notary repository. +// It takes the base directory under where all the trust files will be stored +// (usually ~/.docker/trust/). +func NewNotaryRepository(baseDir, gun, baseURL string, rt http.RoundTripper, + passphraseRetriever passphrase.Retriever) (*NotaryRepository, error) { + + keyStoreManager, err := keystoremanager.NewKeyStoreManager(baseDir, passphraseRetriever) + if err != nil { + return nil, err + } + + cryptoService := cryptoservice.NewCryptoService(gun, keyStoreManager.NonRootKeyStore()) + + nRepo := &NotaryRepository{ + gun: gun, + baseDir: baseDir, + baseURL: baseURL, + tufRepoPath: filepath.Join(baseDir, tufDir, filepath.FromSlash(gun)), + cryptoService: cryptoService, + roundTrip: rt, + KeyStoreManager: keyStoreManager, + } + + fileStore, err := store.NewFilesystemStore( + nRepo.tufRepoPath, + "metadata", + "json", + "", + ) + if err != nil { + return nil, err + } + nRepo.fileStore = fileStore + + return nRepo, nil +} + +// Initialize creates a new repository by using rootKey as the root Key for the +// TUF repository. +func (r *NotaryRepository) Initialize(uCryptoService *cryptoservice.UnlockedCryptoService) error { + rootCert, err := uCryptoService.GenerateCertificate(r.gun) + if err != nil { + return err + } + r.KeyStoreManager.AddTrustedCert(rootCert) + + // The root key gets stored in the TUF metadata X509 encoded, linking + // the tuf root.json to our X509 PKI. + // If the key is RSA, we store it as type RSAx509, if it is ECDSA we store it + // as ECDSAx509 to allow the gotuf verifiers to correctly decode the + // key on verification of signatures. + var algorithmType data.KeyAlgorithm + algorithm := uCryptoService.PrivKey.Algorithm() + switch algorithm { + case data.RSAKey: + algorithmType = data.RSAx509Key + case data.ECDSAKey: + algorithmType = data.ECDSAx509Key + default: + return fmt.Errorf("invalid format for root key: %s", algorithm) + } + + // Generate a x509Key using the rootCert as the public key + rootKey := data.NewPublicKey(algorithmType, trustmanager.CertToPEM(rootCert)) + + // Creates a symlink between the certificate ID and the real public key it + // is associated with. This is used to be able to retrieve the root private key + // associated with a particular certificate + logrus.Debugf("Linking %s to %s.", rootKey.ID(), uCryptoService.ID()) + err = r.KeyStoreManager.RootKeyStore().Link(uCryptoService.ID()+"_root", rootKey.ID()+"_root") + if err != nil { + return err + } + + // All the timestamp keys are generated by the remote server. + remote, err := getRemoteStore(r.baseURL, r.gun, r.roundTrip) + rawTSKey, err := remote.GetKey("timestamp") + if err != nil { + return err + } + + parsedKey := &data.TUFKey{} + err = json.Unmarshal(rawTSKey, parsedKey) + if err != nil { + return err + } + + // Turn the JSON timestamp key from the remote server into a TUFKey + timestampKey := data.NewPublicKey(parsedKey.Algorithm(), parsedKey.Public()) + logrus.Debugf("got remote %s timestamp key with keyID: %s", parsedKey.Algorithm(), timestampKey.ID()) + + // This is currently hardcoding the targets and snapshots keys to ECDSA + // Targets and snapshot keys are always generated locally. + targetsKey, err := r.cryptoService.Create("targets", data.ECDSAKey) + if err != nil { + return err + } + snapshotKey, err := r.cryptoService.Create("snapshot", data.ECDSAKey) + if err != nil { + return err + } + + kdb := keys.NewDB() + + kdb.AddKey(rootKey) + kdb.AddKey(targetsKey) + kdb.AddKey(snapshotKey) + kdb.AddKey(timestampKey) + + err = initRoles(kdb, rootKey, targetsKey, snapshotKey, timestampKey) + if err != nil { + return err + } + + r.tufRepo = tuf.NewTufRepo(kdb, r.cryptoService) + + err = r.tufRepo.InitRoot(false) + if err != nil { + logrus.Debug("Error on InitRoot: ", err.Error()) + switch err.(type) { + case tuferrors.ErrInsufficientSignatures, trustmanager.ErrPasswordInvalid: + default: + return err + } + } + err = r.tufRepo.InitTargets() + if err != nil { + logrus.Debug("Error on InitTargets: ", err.Error()) + return err + } + err = r.tufRepo.InitSnapshot() + if err != nil { + logrus.Debug("Error on InitSnapshot: ", err.Error()) + return err + } + + return r.saveMetadata(uCryptoService.CryptoService) +} + +// AddTarget adds a new target to the repository, forcing a timestamps check from TUF +func (r *NotaryRepository) AddTarget(target *Target) error { + cl, err := changelist.NewFileChangelist(filepath.Join(r.tufRepoPath, "changelist")) + if err != nil { + return err + } + logrus.Debugf("Adding target \"%s\" with sha256 \"%x\" and size %d bytes.\n", target.Name, target.Hashes["sha256"], target.Length) + + meta := data.FileMeta{Length: target.Length, Hashes: target.Hashes} + metaJSON, err := json.Marshal(meta) + if err != nil { + return err + } + + c := changelist.NewTufChange(changelist.ActionCreate, "targets", "target", target.Name, metaJSON) + err = cl.Add(c) + if err != nil { + return err + } + return cl.Close() +} + +// ListTargets lists all targets for the current repository +func (r *NotaryRepository) ListTargets() ([]*Target, error) { + c, err := r.bootstrapClient() + if err != nil { + return nil, err + } + + err = c.Update() + if err != nil { + if err, ok := err.(signed.ErrExpired); ok { + return nil, ErrExpired{err} + } + return nil, err + } + + var targetList []*Target + for name, meta := range r.tufRepo.Targets["targets"].Signed.Targets { + target := &Target{Name: name, Hashes: meta.Hashes, Length: meta.Length} + targetList = append(targetList, target) + } + + return targetList, nil +} + +// GetTargetByName returns a target given a name +func (r *NotaryRepository) GetTargetByName(name string) (*Target, error) { + c, err := r.bootstrapClient() + if err != nil { + return nil, err + } + + err = c.Update() + if err != nil { + if err, ok := err.(signed.ErrExpired); ok { + return nil, ErrExpired{err} + } + return nil, err + } + + meta, err := c.TargetMeta(name) + if meta == nil { + return nil, fmt.Errorf("No trust data for %s", name) + } else if err != nil { + return nil, err + } + + return &Target{Name: name, Hashes: meta.Hashes, Length: meta.Length}, nil +} + +// Publish pushes the local changes in signed material to the remote notary-server +// Conceptually it performs an operation similar to a `git rebase` +func (r *NotaryRepository) Publish() error { + var updateRoot bool + var root *data.Signed + // attempt to initialize the repo from the remote store + c, err := r.bootstrapClient() + if err != nil { + if _, ok := err.(store.ErrMetaNotFound); ok { + // if the remote store return a 404 (translated into ErrMetaNotFound), + // the repo hasn't been initialized yet. Attempt to load it from disk. + err := r.bootstrapRepo() + if err != nil { + // Repo hasn't been initialized, It must be initialized before + // it can be published. Return an error and let caller determine + // what it wants to do. + logrus.Debug(err.Error()) + logrus.Debug("Repository not initialized during Publish") + return &ErrRepoNotInitialized{} + } + // We had local data but the server doesn't know about the repo yet, + // ensure we will push the initial root file + root, err = r.tufRepo.Root.ToSigned() + if err != nil { + return err + } + updateRoot = true + } else { + // The remote store returned an error other than 404. We're + // unable to determine if the repo has been initialized or not. + logrus.Error("Could not publish Repository: ", err.Error()) + return err + } + } else { + // If we were successfully able to bootstrap the client (which only pulls + // root.json), update it the rest of the tuf metadata in preparation for + // applying the changelist. + err = c.Update() + if err != nil { + if err, ok := err.(signed.ErrExpired); ok { + return ErrExpired{err} + } + return err + } + } + // load the changelist for this repo + changelistDir := filepath.Join(r.tufRepoPath, "changelist") + cl, err := changelist.NewFileChangelist(changelistDir) + if err != nil { + logrus.Debug("Error initializing changelist") + return err + } + // apply the changelist to the repo + err = applyChangelist(r.tufRepo, cl) + if err != nil { + logrus.Debug("Error applying changelist") + return err + } + + // check if our root file is nearing expiry. Resign if it is. + if nearExpiry(r.tufRepo.Root) || r.tufRepo.Root.Dirty { + if err != nil { + return err + } + rootKeyID := r.tufRepo.Root.Signed.Roles["root"].KeyIDs[0] + rootCryptoService, err := r.KeyStoreManager.GetRootCryptoService(rootKeyID) + if err != nil { + return err + } + root, err = r.tufRepo.SignRoot(data.DefaultExpires("root"), rootCryptoService.CryptoService) + if err != nil { + return err + } + updateRoot = true + } + // we will always resign targets and snapshots + targets, err := r.tufRepo.SignTargets("targets", data.DefaultExpires("targets"), nil) + if err != nil { + return err + } + snapshot, err := r.tufRepo.SignSnapshot(data.DefaultExpires("snapshot"), nil) + if err != nil { + return err + } + + remote, err := getRemoteStore(r.baseURL, r.gun, r.roundTrip) + if err != nil { + return err + } + + // ensure we can marshal all the json before sending anything to remote + targetsJSON, err := json.Marshal(targets) + if err != nil { + return err + } + snapshotJSON, err := json.Marshal(snapshot) + if err != nil { + return err + } + update := make(map[string][]byte) + // if we need to update the root, marshal it and push the update to remote + if updateRoot { + rootJSON, err := json.Marshal(root) + if err != nil { + return err + } + update["root"] = rootJSON + } + update["targets"] = targetsJSON + update["snapshot"] = snapshotJSON + err = remote.SetMultiMeta(update) + if err != nil { + return err + } + err = cl.Clear("") + if err != nil { + // This is not a critical problem when only a single host is pushing + // but will cause weird behaviour if changelist cleanup is failing + // and there are multiple hosts writing to the repo. + logrus.Warn("Unable to clear changelist. You may want to manually delete the folder ", changelistDir) + } + return nil +} + +func (r *NotaryRepository) bootstrapRepo() error { + kdb := keys.NewDB() + tufRepo := tuf.NewTufRepo(kdb, r.cryptoService) + + logrus.Debugf("Loading trusted collection.") + rootJSON, err := r.fileStore.GetMeta("root", 0) + if err != nil { + return err + } + root := &data.Signed{} + err = json.Unmarshal(rootJSON, root) + if err != nil { + return err + } + tufRepo.SetRoot(root) + targetsJSON, err := r.fileStore.GetMeta("targets", 0) + if err != nil { + return err + } + targets := &data.Signed{} + err = json.Unmarshal(targetsJSON, targets) + if err != nil { + return err + } + tufRepo.SetTargets("targets", targets) + snapshotJSON, err := r.fileStore.GetMeta("snapshot", 0) + if err != nil { + return err + } + snapshot := &data.Signed{} + err = json.Unmarshal(snapshotJSON, snapshot) + if err != nil { + return err + } + tufRepo.SetSnapshot(snapshot) + + r.tufRepo = tufRepo + + return nil +} + +func (r *NotaryRepository) saveMetadata(rootCryptoService signed.CryptoService) error { + logrus.Debugf("Saving changes to Trusted Collection.") + signedRoot, err := r.tufRepo.SignRoot(data.DefaultExpires("root"), rootCryptoService) + if err != nil { + return err + } + rootJSON, err := json.Marshal(signedRoot) + if err != nil { + return err + } + + targetsToSave := make(map[string][]byte) + for t := range r.tufRepo.Targets { + signedTargets, err := r.tufRepo.SignTargets(t, data.DefaultExpires("targets"), nil) + if err != nil { + return err + } + targetsJSON, err := json.Marshal(signedTargets) + if err != nil { + return err + } + targetsToSave[t] = targetsJSON + } + + signedSnapshot, err := r.tufRepo.SignSnapshot(data.DefaultExpires("snapshot"), nil) + if err != nil { + return err + } + snapshotJSON, err := json.Marshal(signedSnapshot) + if err != nil { + return err + } + + err = r.fileStore.SetMeta("root", rootJSON) + if err != nil { + return err + } + + for role, blob := range targetsToSave { + parentDir := filepath.Dir(role) + os.MkdirAll(parentDir, 0755) + r.fileStore.SetMeta(role, blob) + } + + return r.fileStore.SetMeta("snapshot", snapshotJSON) +} + +func (r *NotaryRepository) bootstrapClient() (*tufclient.Client, error) { + var rootJSON []byte + remote, err := getRemoteStore(r.baseURL, r.gun, r.roundTrip) + if err == nil { + // if remote store successfully set up, try and get root from remote + rootJSON, err = remote.GetMeta("root", maxSize) + } + + // if remote store couldn't be setup, or we failed to get a root from it + // load the root from cache (offline operation) + if err != nil { + if err, ok := err.(store.ErrMetaNotFound); ok { + // if the error was MetaNotFound then we successfully contacted + // the store and it doesn't know about the repo. + return nil, err + } + rootJSON, err = r.fileStore.GetMeta("root", maxSize) + if err != nil { + // if cache didn't return a root, we cannot proceed + return nil, store.ErrMetaNotFound{} + } + } + root := &data.Signed{} + err = json.Unmarshal(rootJSON, root) + if err != nil { + return nil, err + } + + err = r.KeyStoreManager.ValidateRoot(root, r.gun) + if err != nil { + return nil, err + } + + kdb := keys.NewDB() + r.tufRepo = tuf.NewTufRepo(kdb, r.cryptoService) + + err = r.tufRepo.SetRoot(root) + if err != nil { + return nil, err + } + + return tufclient.NewClient( + r.tufRepo, + remote, + kdb, + r.fileStore, + ), nil +} diff --git a/vendor/src/github.com/docker/notary/client/helpers.go b/vendor/src/github.com/docker/notary/client/helpers.go new file mode 100644 index 0000000000..003f73fa91 --- /dev/null +++ b/vendor/src/github.com/docker/notary/client/helpers.go @@ -0,0 +1,96 @@ +package client + +import ( + "encoding/json" + "net/http" + "time" + + "github.com/docker/notary/client/changelist" + "github.com/endophage/gotuf" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/keys" + "github.com/endophage/gotuf/store" +) + +// Use this to initialize remote HTTPStores from the config settings +func getRemoteStore(baseURL, gun string, rt http.RoundTripper) (store.RemoteStore, error) { + return store.NewHTTPStore( + baseURL+"/v2/"+gun+"/_trust/tuf/", + "", + "json", + "", + "key", + rt, + ) +} + +func applyChangelist(repo *tuf.TufRepo, cl changelist.Changelist) error { + changes := cl.List() + var err error + for _, c := range changes { + if c.Scope() == "targets" { + applyTargetsChange(repo, c) + } + if err != nil { + return err + } + } + return nil +} + +func applyTargetsChange(repo *tuf.TufRepo, c changelist.Change) error { + var err error + meta := &data.FileMeta{} + err = json.Unmarshal(c.Content(), meta) + if err != nil { + return nil + } + if c.Action() == changelist.ActionCreate { + files := data.Files{c.Path(): *meta} + _, err = repo.AddTargets("targets", files) + } else if c.Action() == changelist.ActionDelete { + err = repo.RemoveTargets("targets", c.Path()) + } + if err != nil { + return err + } + return nil +} + +func nearExpiry(r *data.SignedRoot) bool { + plus6mo := time.Now().AddDate(0, 6, 0) + return r.Signed.Expires.Before(plus6mo) +} + +func initRoles(kdb *keys.KeyDB, rootKey, targetsKey, snapshotKey, timestampKey data.PublicKey) error { + rootRole, err := data.NewRole("root", 1, []string{rootKey.ID()}, nil, nil) + if err != nil { + return err + } + targetsRole, err := data.NewRole("targets", 1, []string{targetsKey.ID()}, nil, nil) + if err != nil { + return err + } + snapshotRole, err := data.NewRole("snapshot", 1, []string{snapshotKey.ID()}, nil, nil) + if err != nil { + return err + } + timestampRole, err := data.NewRole("timestamp", 1, []string{timestampKey.ID()}, nil, nil) + if err != nil { + return err + } + + if err := kdb.AddRole(rootRole); err != nil { + return err + } + if err := kdb.AddRole(targetsRole); err != nil { + return err + } + if err := kdb.AddRole(snapshotRole); err != nil { + return err + } + if err := kdb.AddRole(timestampRole); err != nil { + return err + } + return nil +} diff --git a/vendor/src/github.com/docker/notary/cryptoservice/crypto_service.go b/vendor/src/github.com/docker/notary/cryptoservice/crypto_service.go new file mode 100644 index 0000000000..a30a3fcc05 --- /dev/null +++ b/vendor/src/github.com/docker/notary/cryptoservice/crypto_service.go @@ -0,0 +1,199 @@ +package cryptoservice + +import ( + "crypto" + "crypto/ecdsa" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "fmt" + "path/filepath" + + "github.com/Sirupsen/logrus" + "github.com/agl/ed25519" + "github.com/docker/notary/trustmanager" + "github.com/endophage/gotuf/data" +) + +const ( + rsaKeySize = 2048 // Used for snapshots and targets keys +) + +// CryptoService implements Sign and Create, holding a specific GUN and keystore to +// operate on +type CryptoService struct { + gun string + keyStore trustmanager.KeyStore +} + +// NewCryptoService returns an instance of CryptoService +func NewCryptoService(gun string, keyStore trustmanager.KeyStore) *CryptoService { + return &CryptoService{gun: gun, keyStore: keyStore} +} + +// Create is used to generate keys for targets, snapshots and timestamps +func (ccs *CryptoService) Create(role string, algorithm data.KeyAlgorithm) (data.PublicKey, error) { + var privKey data.PrivateKey + var err error + + switch algorithm { + case data.RSAKey: + privKey, err = trustmanager.GenerateRSAKey(rand.Reader, rsaKeySize) + if err != nil { + return nil, fmt.Errorf("failed to generate RSA key: %v", err) + } + case data.ECDSAKey: + privKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + if err != nil { + return nil, fmt.Errorf("failed to generate EC key: %v", err) + } + case data.ED25519Key: + privKey, err = trustmanager.GenerateED25519Key(rand.Reader) + if err != nil { + return nil, fmt.Errorf("failed to generate ED25519 key: %v", err) + } + default: + return nil, fmt.Errorf("private key type not supported for key generation: %s", algorithm) + } + logrus.Debugf("generated new %s key for role: %s and keyID: %s", algorithm, role, privKey.ID()) + + // Store the private key into our keystore with the name being: /GUN/ID.key with an alias of role + err = ccs.keyStore.AddKey(filepath.Join(ccs.gun, privKey.ID()), role, privKey) + if err != nil { + return nil, fmt.Errorf("failed to add key to filestore: %v", err) + } + return data.PublicKeyFromPrivate(privKey), nil +} + +// GetKey returns a key by ID +func (ccs *CryptoService) GetKey(keyID string) data.PublicKey { + key, _, err := ccs.keyStore.GetKey(keyID) + if err != nil { + return nil + } + return data.PublicKeyFromPrivate(key) +} + +// RemoveKey deletes a key by ID +func (ccs *CryptoService) RemoveKey(keyID string) error { + return ccs.keyStore.RemoveKey(keyID) +} + +// Sign returns the signatures for the payload with a set of keyIDs. It ignores +// errors to sign and expects the called to validate if the number of returned +// signatures is adequate. +func (ccs *CryptoService) Sign(keyIDs []string, payload []byte) ([]data.Signature, error) { + signatures := make([]data.Signature, 0, len(keyIDs)) + for _, keyid := range keyIDs { + // ccs.gun will be empty if this is the root key + keyName := filepath.Join(ccs.gun, keyid) + + var privKey data.PrivateKey + var err error + + privKey, _, err = ccs.keyStore.GetKey(keyName) + if err != nil { + logrus.Debugf("error attempting to retrieve key ID: %s, %v", keyid, err) + return nil, err + } + + algorithm := privKey.Algorithm() + var sigAlgorithm data.SigAlgorithm + var sig []byte + + switch algorithm { + case data.RSAKey: + sig, err = rsaSign(privKey, payload) + sigAlgorithm = data.RSAPSSSignature + case data.ECDSAKey: + sig, err = ecdsaSign(privKey, payload) + sigAlgorithm = data.ECDSASignature + case data.ED25519Key: + // ED25519 does not operate on a SHA256 hash + sig, err = ed25519Sign(privKey, payload) + sigAlgorithm = data.EDDSASignature + } + if err != nil { + logrus.Debugf("ignoring error attempting to %s sign with keyID: %s, %v", algorithm, keyid, err) + return nil, err + } + + logrus.Debugf("appending %s signature with Key ID: %s", algorithm, keyid) + + // Append signatures to result array + signatures = append(signatures, data.Signature{ + KeyID: keyid, + Method: sigAlgorithm, + Signature: sig[:], + }) + } + + return signatures, nil +} + +func rsaSign(privKey data.PrivateKey, message []byte) ([]byte, error) { + if privKey.Algorithm() != data.RSAKey { + return nil, fmt.Errorf("private key type not supported: %s", privKey.Algorithm()) + } + + hashed := sha256.Sum256(message) + + // Create an rsa.PrivateKey out of the private key bytes + rsaPrivKey, err := x509.ParsePKCS1PrivateKey(privKey.Private()) + if err != nil { + return nil, err + } + + // Use the RSA key to RSASSA-PSS sign the data + sig, err := rsa.SignPSS(rand.Reader, rsaPrivKey, crypto.SHA256, hashed[:], &rsa.PSSOptions{SaltLength: rsa.PSSSaltLengthEqualsHash}) + if err != nil { + return nil, err + } + + return sig, nil +} + +func ecdsaSign(privKey data.PrivateKey, message []byte) ([]byte, error) { + if privKey.Algorithm() != data.ECDSAKey { + return nil, fmt.Errorf("private key type not supported: %s", privKey.Algorithm()) + } + + hashed := sha256.Sum256(message) + + // Create an ecdsa.PrivateKey out of the private key bytes + ecdsaPrivKey, err := x509.ParseECPrivateKey(privKey.Private()) + if err != nil { + return nil, err + } + + // Use the ECDSA key to sign the data + r, s, err := ecdsa.Sign(rand.Reader, ecdsaPrivKey, hashed[:]) + if err != nil { + return nil, err + } + + rBytes, sBytes := r.Bytes(), s.Bytes() + octetLength := (ecdsaPrivKey.Params().BitSize + 7) >> 3 + + // MUST include leading zeros in the output + rBuf := make([]byte, octetLength-len(rBytes), octetLength) + sBuf := make([]byte, octetLength-len(sBytes), octetLength) + + rBuf = append(rBuf, rBytes...) + sBuf = append(sBuf, sBytes...) + + return append(rBuf, sBuf...), nil +} + +func ed25519Sign(privKey data.PrivateKey, message []byte) ([]byte, error) { + if privKey.Algorithm() != data.ED25519Key { + return nil, fmt.Errorf("private key type not supported: %s", privKey.Algorithm()) + } + + priv := [ed25519.PrivateKeySize]byte{} + copy(priv[:], privKey.Private()[ed25519.PublicKeySize:]) + sig := ed25519.Sign(&priv, message) + + return sig[:], nil +} diff --git a/vendor/src/github.com/docker/notary/cryptoservice/unlocked_crypto_service.go b/vendor/src/github.com/docker/notary/cryptoservice/unlocked_crypto_service.go new file mode 100644 index 0000000000..8da5d444f0 --- /dev/null +++ b/vendor/src/github.com/docker/notary/cryptoservice/unlocked_crypto_service.go @@ -0,0 +1,83 @@ +package cryptoservice + +import ( + "crypto" + "crypto/ecdsa" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "fmt" + + "github.com/docker/notary/trustmanager" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/signed" +) + +// UnlockedCryptoService encapsulates a private key and a cryptoservice that +// uses that private key, providing convinience methods for generation of +// certificates. +type UnlockedCryptoService struct { + PrivKey data.PrivateKey + CryptoService signed.CryptoService +} + +// NewUnlockedCryptoService creates an UnlockedCryptoService instance +func NewUnlockedCryptoService(privKey data.PrivateKey, cryptoService signed.CryptoService) *UnlockedCryptoService { + return &UnlockedCryptoService{ + PrivKey: privKey, + CryptoService: cryptoService, + } +} + +// ID gets a consistent ID based on the PrivateKey bytes and algorithm type +func (ucs *UnlockedCryptoService) ID() string { + return ucs.PublicKey().ID() +} + +// PublicKey Returns the public key associated with the private key +func (ucs *UnlockedCryptoService) PublicKey() data.PublicKey { + return data.PublicKeyFromPrivate(ucs.PrivKey) +} + +// GenerateCertificate generates an X509 Certificate from a template, given a GUN +func (ucs *UnlockedCryptoService) GenerateCertificate(gun string) (*x509.Certificate, error) { + algorithm := ucs.PrivKey.Algorithm() + var publicKey crypto.PublicKey + var privateKey crypto.PrivateKey + var err error + switch algorithm { + case data.RSAKey: + var rsaPrivateKey *rsa.PrivateKey + rsaPrivateKey, err = x509.ParsePKCS1PrivateKey(ucs.PrivKey.Private()) + privateKey = rsaPrivateKey + publicKey = rsaPrivateKey.Public() + case data.ECDSAKey: + var ecdsaPrivateKey *ecdsa.PrivateKey + ecdsaPrivateKey, err = x509.ParseECPrivateKey(ucs.PrivKey.Private()) + privateKey = ecdsaPrivateKey + publicKey = ecdsaPrivateKey.Public() + default: + return nil, fmt.Errorf("only RSA or ECDSA keys are currently supported. Found: %s", algorithm) + } + if err != nil { + return nil, fmt.Errorf("failed to parse root key: %s (%v)", gun, err) + } + + template, err := trustmanager.NewCertificate(gun) + if err != nil { + return nil, fmt.Errorf("failed to create the certificate template for: %s (%v)", gun, err) + } + + derBytes, err := x509.CreateCertificate(rand.Reader, template, template, publicKey, privateKey) + if err != nil { + return nil, fmt.Errorf("failed to create the certificate for: %s (%v)", gun, err) + } + + // Encode the new certificate into PEM + cert, err := x509.ParseCertificate(derBytes) + if err != nil { + return nil, fmt.Errorf("failed to parse the certificate for key: %s (%v)", gun, err) + } + + return cert, nil +} diff --git a/vendor/src/github.com/docker/notary/keystoremanager/import_export.go b/vendor/src/github.com/docker/notary/keystoremanager/import_export.go new file mode 100644 index 0000000000..bd87244681 --- /dev/null +++ b/vendor/src/github.com/docker/notary/keystoremanager/import_export.go @@ -0,0 +1,304 @@ +package keystoremanager + +import ( + "archive/zip" + "crypto/x509" + "encoding/pem" + "errors" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/Sirupsen/logrus" + "github.com/docker/notary/pkg/passphrase" + "github.com/docker/notary/trustmanager" +) + +var ( + // ErrNoValidPrivateKey is returned if a key being imported doesn't + // look like a private key + ErrNoValidPrivateKey = errors.New("no valid private key found") + + // ErrRootKeyNotEncrypted is returned if a root key being imported is + // unencrypted + ErrRootKeyNotEncrypted = errors.New("only encrypted root keys may be imported") + + // ErrNoKeysFoundForGUN is returned if no keys are found for the + // specified GUN during export + ErrNoKeysFoundForGUN = errors.New("no keys found for specified GUN") +) + +// ExportRootKey exports the specified root key to an io.Writer in PEM format. +// The key's existing encryption is preserved. +func (km *KeyStoreManager) ExportRootKey(dest io.Writer, keyID string) error { + pemBytes, err := km.rootKeyStore.Get(keyID + "_root") + if err != nil { + return err + } + + _, err = dest.Write(pemBytes) + return err +} + +// checkRootKeyIsEncrypted makes sure the root key is encrypted. We have +// internal assumptions that depend on this. +func checkRootKeyIsEncrypted(pemBytes []byte) error { + block, _ := pem.Decode(pemBytes) + if block == nil { + return ErrNoValidPrivateKey + } + + if !x509.IsEncryptedPEMBlock(block) { + return ErrRootKeyNotEncrypted + } + + return nil +} + +// ImportRootKey imports a root in PEM format key from an io.Reader +// The key's existing encryption is preserved. The keyID parameter is +// necessary because otherwise we'd need the passphrase to decrypt the key +// in order to compute the ID. +func (km *KeyStoreManager) ImportRootKey(source io.Reader, keyID string) error { + pemBytes, err := ioutil.ReadAll(source) + if err != nil { + return err + } + + if err = checkRootKeyIsEncrypted(pemBytes); err != nil { + return err + } + + if err = km.rootKeyStore.Add(keyID+"_root", pemBytes); err != nil { + return err + } + + return err +} + +func moveKeys(oldKeyStore, newKeyStore *trustmanager.KeyFileStore) error { + // List all files but no symlinks + for _, f := range oldKeyStore.ListKeys() { + pemBytes, alias, err := oldKeyStore.GetKey(f) + if err != nil { + return err + } + + err = newKeyStore.AddKey(f, alias, pemBytes) + + if err != nil { + return err + } + } + + return nil +} + +func addKeysToArchive(zipWriter *zip.Writer, newKeyStore *trustmanager.KeyFileStore, subDir string) error { + // List all files but no symlinks + for _, relKeyPath := range newKeyStore.ListFiles(false) { + fullKeyPath := filepath.Join(newKeyStore.BaseDir(), relKeyPath) + + fi, err := os.Stat(fullKeyPath) + if err != nil { + return err + } + + infoHeader, err := zip.FileInfoHeader(fi) + if err != nil { + return err + } + + infoHeader.Name = filepath.Join(subDir, relKeyPath) + zipFileEntryWriter, err := zipWriter.CreateHeader(infoHeader) + if err != nil { + return err + } + + fileContents, err := ioutil.ReadFile(fullKeyPath) + if err != nil { + return err + } + if _, err = zipFileEntryWriter.Write(fileContents); err != nil { + return err + } + } + + return nil +} + +// ExportAllKeys exports all keys to an io.Writer in zip format. +// newPassphraseRetriever will be used to obtain passphrases to use to encrypt the existing keys. +func (km *KeyStoreManager) ExportAllKeys(dest io.Writer, newPassphraseRetriever passphrase.Retriever) error { + tempBaseDir, err := ioutil.TempDir("", "notary-key-export-") + defer os.RemoveAll(tempBaseDir) + + privNonRootKeysSubdir := filepath.Join(privDir, nonRootKeysSubdir) + privRootKeysSubdir := filepath.Join(privDir, rootKeysSubdir) + + // Create temporary keystores to use as a staging area + tempNonRootKeysPath := filepath.Join(tempBaseDir, privNonRootKeysSubdir) + tempNonRootKeyStore, err := trustmanager.NewKeyFileStore(tempNonRootKeysPath, newPassphraseRetriever) + if err != nil { + return err + } + + tempRootKeysPath := filepath.Join(tempBaseDir, privRootKeysSubdir) + tempRootKeyStore, err := trustmanager.NewKeyFileStore(tempRootKeysPath, newPassphraseRetriever) + if err != nil { + return err + } + + if err := moveKeys(km.rootKeyStore, tempRootKeyStore); err != nil { + return err + } + if err := moveKeys(km.nonRootKeyStore, tempNonRootKeyStore); err != nil { + return err + } + + zipWriter := zip.NewWriter(dest) + + if err := addKeysToArchive(zipWriter, tempRootKeyStore, privRootKeysSubdir); err != nil { + return err + } + if err := addKeysToArchive(zipWriter, tempNonRootKeyStore, privNonRootKeysSubdir); err != nil { + + return err + } + + zipWriter.Close() + + return nil +} + +// ImportKeysZip imports keys from a zip file provided as an io.ReaderAt. The +// keys in the root_keys directory are left encrypted, but the other keys are +// decrypted with the specified passphrase. +func (km *KeyStoreManager) ImportKeysZip(zipReader zip.Reader) error { + // Temporarily store the keys in maps, so we can bail early if there's + // an error (for example, wrong passphrase), without leaving the key + // store in an inconsistent state + newRootKeys := make(map[string][]byte) + newNonRootKeys := make(map[string][]byte) + + // Note that using / as a separator is okay here - the zip package + // guarantees that the separator will be / + rootKeysPrefix := privDir + "/" + rootKeysSubdir + "/" + nonRootKeysPrefix := privDir + "/" + nonRootKeysSubdir + "/" + + // Iterate through the files in the archive. Don't add the keys + for _, f := range zipReader.File { + fNameTrimmed := strings.TrimSuffix(f.Name, filepath.Ext(f.Name)) + + rc, err := f.Open() + if err != nil { + return err + } + + fileBytes, err := ioutil.ReadAll(rc) + if err != nil { + return nil + } + + // Is this in the root_keys directory? + // Note that using / as a separator is okay here - the zip + // package guarantees that the separator will be / + if strings.HasPrefix(fNameTrimmed, rootKeysPrefix) { + if err = checkRootKeyIsEncrypted(fileBytes); err != nil { + rc.Close() + return err + } + // Root keys are preserved without decrypting + keyName := strings.TrimPrefix(fNameTrimmed, rootKeysPrefix) + newRootKeys[keyName] = fileBytes + } else if strings.HasPrefix(fNameTrimmed, nonRootKeysPrefix) { + // Nonroot keys are preserved without decrypting + keyName := strings.TrimPrefix(fNameTrimmed, nonRootKeysPrefix) + newNonRootKeys[keyName] = fileBytes + } else { + // This path inside the zip archive doesn't look like a + // root key, non-root key, or alias. To avoid adding a file + // to the filestore that we won't be able to use, skip + // this file in the import. + logrus.Warnf("skipping import of key with a path that doesn't begin with %s or %s: %s", rootKeysPrefix, nonRootKeysPrefix, f.Name) + rc.Close() + continue + } + + rc.Close() + } + + for keyName, pemBytes := range newRootKeys { + if err := km.rootKeyStore.Add(keyName, pemBytes); err != nil { + return err + } + } + + for keyName, pemBytes := range newNonRootKeys { + if err := km.nonRootKeyStore.Add(keyName, pemBytes); err != nil { + return err + } + } + + return nil +} + +func moveKeysByGUN(oldKeyStore, newKeyStore *trustmanager.KeyFileStore, gun string) error { + // List all files but no symlinks + for _, relKeyPath := range oldKeyStore.ListKeys() { + + // Skip keys that aren't associated with this GUN + if !strings.HasPrefix(relKeyPath, filepath.FromSlash(gun)) { + continue + } + + privKey, alias, err := oldKeyStore.GetKey(relKeyPath) + if err != nil { + return err + } + + err = newKeyStore.AddKey(relKeyPath, alias, privKey) + if err != nil { + return err + } + } + + return nil +} + +// ExportKeysByGUN exports all keys associated with a specified GUN to an +// io.Writer in zip format. passphraseRetriever is used to select new passphrases to use to +// encrypt the keys. +func (km *KeyStoreManager) ExportKeysByGUN(dest io.Writer, gun string, passphraseRetriever passphrase.Retriever) error { + tempBaseDir, err := ioutil.TempDir("", "notary-key-export-") + defer os.RemoveAll(tempBaseDir) + + privNonRootKeysSubdir := filepath.Join(privDir, nonRootKeysSubdir) + + // Create temporary keystore to use as a staging area + tempNonRootKeysPath := filepath.Join(tempBaseDir, privNonRootKeysSubdir) + tempNonRootKeyStore, err := trustmanager.NewKeyFileStore(tempNonRootKeysPath, passphraseRetriever) + if err != nil { + return err + } + + if err := moveKeysByGUN(km.nonRootKeyStore, tempNonRootKeyStore, gun); err != nil { + return err + } + + zipWriter := zip.NewWriter(dest) + + if len(tempNonRootKeyStore.ListKeys()) == 0 { + return ErrNoKeysFoundForGUN + } + + if err := addKeysToArchive(zipWriter, tempNonRootKeyStore, privNonRootKeysSubdir); err != nil { + return err + } + + zipWriter.Close() + + return nil +} diff --git a/vendor/src/github.com/docker/notary/keystoremanager/keystoremanager.go b/vendor/src/github.com/docker/notary/keystoremanager/keystoremanager.go new file mode 100644 index 0000000000..bf39d7faa5 --- /dev/null +++ b/vendor/src/github.com/docker/notary/keystoremanager/keystoremanager.go @@ -0,0 +1,426 @@ +package keystoremanager + +import ( + "crypto/rand" + "crypto/x509" + "errors" + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/Sirupsen/logrus" + "github.com/docker/notary/cryptoservice" + "github.com/docker/notary/pkg/passphrase" + "github.com/docker/notary/trustmanager" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/signed" +) + +// KeyStoreManager is an abstraction around the root and non-root key stores, +// and related CA stores +type KeyStoreManager struct { + rootKeyStore *trustmanager.KeyFileStore + nonRootKeyStore *trustmanager.KeyFileStore + + trustedCAStore trustmanager.X509Store + trustedCertificateStore trustmanager.X509Store +} + +const ( + trustDir = "trusted_certificates" + privDir = "private" + rootKeysSubdir = "root_keys" + nonRootKeysSubdir = "tuf_keys" + rsaRootKeySize = 4096 // Used for new root keys +) + +// ErrValidationFail is returned when there is no valid trusted certificates +// being served inside of the roots.json +type ErrValidationFail struct { + Reason string +} + +// ErrValidationFail is returned when there is no valid trusted certificates +// being served inside of the roots.json +func (err ErrValidationFail) Error() string { + return fmt.Sprintf("could not validate the path to a trusted root: %s", err.Reason) +} + +// ErrRootRotationFail is returned when we fail to do a full root key rotation +// by either failing to add the new root certificate, or delete the old ones +type ErrRootRotationFail struct { + Reason string +} + +// ErrRootRotationFail is returned when we fail to do a full root key rotation +// by either failing to add the new root certificate, or delete the old ones +func (err ErrRootRotationFail) Error() string { + return fmt.Sprintf("could not rotate trust to a new trusted root: %s", err.Reason) +} + +// NewKeyStoreManager returns an initialized KeyStoreManager, or an error +// if it fails to create the KeyFileStores or load certificates +func NewKeyStoreManager(baseDir string, passphraseRetriever passphrase.Retriever) (*KeyStoreManager, error) { + nonRootKeysPath := filepath.Join(baseDir, privDir, nonRootKeysSubdir) + nonRootKeyStore, err := trustmanager.NewKeyFileStore(nonRootKeysPath, passphraseRetriever) + if err != nil { + return nil, err + } + + // Load the keystore that will hold all of our encrypted Root Private Keys + rootKeysPath := filepath.Join(baseDir, privDir, rootKeysSubdir) + rootKeyStore, err := trustmanager.NewKeyFileStore(rootKeysPath, passphraseRetriever) + if err != nil { + return nil, err + } + + trustPath := filepath.Join(baseDir, trustDir) + + // Load all CAs that aren't expired and don't use SHA1 + trustedCAStore, err := trustmanager.NewX509FilteredFileStore(trustPath, func(cert *x509.Certificate) bool { + return cert.IsCA && cert.BasicConstraintsValid && cert.SubjectKeyId != nil && + time.Now().Before(cert.NotAfter) && + cert.SignatureAlgorithm != x509.SHA1WithRSA && + cert.SignatureAlgorithm != x509.DSAWithSHA1 && + cert.SignatureAlgorithm != x509.ECDSAWithSHA1 + }) + if err != nil { + return nil, err + } + + // Load all individual (non-CA) certificates that aren't expired and don't use SHA1 + trustedCertificateStore, err := trustmanager.NewX509FilteredFileStore(trustPath, func(cert *x509.Certificate) bool { + return !cert.IsCA && + time.Now().Before(cert.NotAfter) && + cert.SignatureAlgorithm != x509.SHA1WithRSA && + cert.SignatureAlgorithm != x509.DSAWithSHA1 && + cert.SignatureAlgorithm != x509.ECDSAWithSHA1 + }) + if err != nil { + return nil, err + } + + return &KeyStoreManager{ + rootKeyStore: rootKeyStore, + nonRootKeyStore: nonRootKeyStore, + trustedCAStore: trustedCAStore, + trustedCertificateStore: trustedCertificateStore, + }, nil +} + +// RootKeyStore returns the root key store being managed by this +// KeyStoreManager +func (km *KeyStoreManager) RootKeyStore() *trustmanager.KeyFileStore { + return km.rootKeyStore +} + +// NonRootKeyStore returns the non-root key store being managed by this +// KeyStoreManager +func (km *KeyStoreManager) NonRootKeyStore() *trustmanager.KeyFileStore { + return km.nonRootKeyStore +} + +// TrustedCertificateStore returns the trusted certificate store being managed +// by this KeyStoreManager +func (km *KeyStoreManager) TrustedCertificateStore() trustmanager.X509Store { + return km.trustedCertificateStore +} + +// TrustedCAStore returns the CA store being managed by this KeyStoreManager +func (km *KeyStoreManager) TrustedCAStore() trustmanager.X509Store { + return km.trustedCAStore +} + +// AddTrustedCert adds a cert to the trusted certificate store (not the CA +// store) +func (km *KeyStoreManager) AddTrustedCert(cert *x509.Certificate) { + km.trustedCertificateStore.AddCert(cert) +} + +// AddTrustedCACert adds a cert to the trusted CA certificate store +func (km *KeyStoreManager) AddTrustedCACert(cert *x509.Certificate) { + km.trustedCAStore.AddCert(cert) +} + +// GenRootKey generates a new root key +func (km *KeyStoreManager) GenRootKey(algorithm string) (string, error) { + var err error + var privKey data.PrivateKey + + // We don't want external API callers to rely on internal TUF data types, so + // the API here should continue to receive a string algorithm, and ensure + // that it is downcased + switch data.KeyAlgorithm(strings.ToLower(algorithm)) { + case data.RSAKey: + privKey, err = trustmanager.GenerateRSAKey(rand.Reader, rsaRootKeySize) + case data.ECDSAKey: + privKey, err = trustmanager.GenerateECDSAKey(rand.Reader) + default: + return "", fmt.Errorf("only RSA or ECDSA keys are currently supported. Found: %s", algorithm) + + } + if err != nil { + return "", fmt.Errorf("failed to generate private key: %v", err) + } + + // Changing the root + km.rootKeyStore.AddKey(privKey.ID(), "root", privKey) + + return privKey.ID(), nil +} + +// GetRootCryptoService retrieves a root key and a cryptoservice to use with it +// TODO(mccauley): remove this as its no longer needed once we have key caching in the keystores +func (km *KeyStoreManager) GetRootCryptoService(rootKeyID string) (*cryptoservice.UnlockedCryptoService, error) { + privKey, _, err := km.rootKeyStore.GetKey(rootKeyID) + if err != nil { + return nil, fmt.Errorf("could not get decrypted root key with keyID: %s, %v", rootKeyID, err) + } + + cryptoService := cryptoservice.NewCryptoService("", km.rootKeyStore) + + return cryptoservice.NewUnlockedCryptoService(privKey, cryptoService), nil +} + +/* +ValidateRoot receives a new root, validates its correctness and attempts to +do root key rotation if needed. + +First we list the current trusted certificates we have for a particular GUN. If +that list is non-empty means that we've already seen this repository before, and +have a list of trusted certificates for it. In this case, we use this list of +certificates to attempt to validate this root file. + +If the previous validation suceeds, or in the case where we found no trusted +certificates for this particular GUN, we check the integrity of the root by +making sure that it is validated by itself. This means that we will attempt to +validate the root data with the certificates that are included in the root keys +themselves. + +If this last steps succeeds, we attempt to do root rotation, by ensuring that +we only trust the certificates that are present in the new root. + +This mechanism of operation is essentially Trust On First Use (TOFU): if we +have never seen a certificate for a particular CN, we trust it. If later we see +a different certificate for that certificate, we return an ErrValidationFailed error. + +Note that since we only allow trust data to be downloaded over an HTTPS channel +we are using the current public PKI to validate the first download of the certificate +adding an extra layer of security over the normal (SSH style) trust model. +We shall call this: TOFUS. +*/ +func (km *KeyStoreManager) ValidateRoot(root *data.Signed, gun string) error { + logrus.Debugf("entered ValidateRoot with dns: %s", gun) + signedRoot, err := data.RootFromSigned(root) + if err != nil { + return err + } + + // Retrieve all the leaf certificates in root for which the CN matches the GUN + allValidCerts, err := validRootLeafCerts(signedRoot, gun) + if err != nil { + logrus.Debugf("error retrieving valid leaf certificates for: %s, %v", gun, err) + return &ErrValidationFail{Reason: "unable to retrieve valid leaf certificates"} + } + + // Retrieve all the trusted certificates that match this gun + certsForCN, err := km.trustedCertificateStore.GetCertificatesByCN(gun) + if err != nil { + // If the error that we get back is different than ErrNoCertificatesFound + // we couldn't check if there are any certificates with this CN already + // trusted. Let's take the conservative approach and return a failed validation + if _, ok := err.(*trustmanager.ErrNoCertificatesFound); !ok { + logrus.Debugf("error retrieving trusted certificates for: %s, %v", gun, err) + return &ErrValidationFail{Reason: "unable to retrieve trusted certificates"} + } + } + + // If we have certificates that match this specific GUN, let's make sure to + // use them first to validate that this new root is valid. + if len(certsForCN) != 0 { + logrus.Debugf("found %d valid root certificates for %s", len(certsForCN), gun) + err = signed.VerifyRoot(root, 0, trustmanager.CertsToKeys(certsForCN)) + if err != nil { + logrus.Debugf("failed to verify TUF data for: %s, %v", gun, err) + return &ErrValidationFail{Reason: "failed to validate data with current trusted certificates"} + } + } else { + logrus.Debugf("found no currently valid root certificates for %s", gun) + } + + // Validate the integrity of the new root (does it have valid signatures) + err = signed.VerifyRoot(root, 0, trustmanager.CertsToKeys(allValidCerts)) + if err != nil { + logrus.Debugf("failed to verify TUF data for: %s, %v", gun, err) + return &ErrValidationFail{Reason: "failed to validate integrity of roots"} + } + + // Getting here means A) we had trusted certificates and both the + // old and new validated this root; or B) we had no trusted certificates but + // the new set of certificates has integrity (self-signed) + logrus.Debugf("entering root certificate rotation for: %s", gun) + + // Do root certificate rotation: we trust only the certs present in the new root + // First we add all the new certificates (even if they already exist) + for _, cert := range allValidCerts { + err := km.trustedCertificateStore.AddCert(cert) + if err != nil { + // If the error is already exists we don't fail the rotation + if _, ok := err.(*trustmanager.ErrCertExists); ok { + logrus.Debugf("ignoring certificate addition to: %s", gun) + continue + } + logrus.Debugf("error adding new trusted certificate for: %s, %v", gun, err) + } + } + + // Now we delete old certificates that aren't present in the new root + for certID, cert := range certsToRemove(certsForCN, allValidCerts) { + logrus.Debugf("removing certificate with certID: %s", certID) + err = km.trustedCertificateStore.RemoveCert(cert) + if err != nil { + logrus.Debugf("failed to remove trusted certificate with keyID: %s, %v", certID, err) + return &ErrRootRotationFail{Reason: "failed to rotate root keys"} + } + } + + logrus.Debugf("Root validation succeeded for %s", gun) + return nil +} + +// validRootLeafCerts returns a list of non-exipired, non-sha1 certificates whoose +// Common-Names match the provided GUN +func validRootLeafCerts(root *data.SignedRoot, gun string) ([]*x509.Certificate, error) { + // Get a list of all of the leaf certificates present in root + allLeafCerts, _ := parseAllCerts(root) + var validLeafCerts []*x509.Certificate + + // Go through every leaf certificate and check that the CN matches the gun + for _, cert := range allLeafCerts { + // Validate that this leaf certificate has a CN that matches the exact gun + if cert.Subject.CommonName != gun { + logrus.Debugf("error leaf certificate CN: %s doesn't match the given GUN: %s", cert.Subject.CommonName) + continue + } + // Make sure the certificate is not expired + if time.Now().After(cert.NotAfter) { + logrus.Debugf("error leaf certificate is expired") + continue + } + + // We don't allow root certificates that use SHA1 + if cert.SignatureAlgorithm == x509.SHA1WithRSA || + cert.SignatureAlgorithm == x509.DSAWithSHA1 || + cert.SignatureAlgorithm == x509.ECDSAWithSHA1 { + + logrus.Debugf("error certificate uses deprecated hashing algorithm (SHA1)") + continue + } + + validLeafCerts = append(validLeafCerts, cert) + } + + if len(validLeafCerts) < 1 { + logrus.Debugf("didn't find any valid leaf certificates for %s", gun) + return nil, errors.New("no valid leaf certificates found in any of the root keys") + } + + logrus.Debugf("found %d valid leaf certificates for %s", len(validLeafCerts), gun) + return validLeafCerts, nil +} + +// parseAllCerts returns two maps, one with all of the leafCertificates and one +// with all the intermediate certificates found in signedRoot +func parseAllCerts(signedRoot *data.SignedRoot) (map[string]*x509.Certificate, map[string][]*x509.Certificate) { + leafCerts := make(map[string]*x509.Certificate) + intCerts := make(map[string][]*x509.Certificate) + + // Before we loop through all root keys available, make sure any exist + rootRoles, ok := signedRoot.Signed.Roles["root"] + if !ok { + logrus.Debugf("tried to parse certificates from invalid root signed data") + return nil, nil + } + + logrus.Debugf("found the following root keys: %v", rootRoles.KeyIDs) + // Iterate over every keyID for the root role inside of roots.json + for _, keyID := range rootRoles.KeyIDs { + // check that the key exists in the signed root keys map + key, ok := signedRoot.Signed.Keys[keyID] + if !ok { + logrus.Debugf("error while getting data for keyID: %s", keyID) + continue + } + + // Decode all the x509 certificates that were bundled with this + // Specific root key + decodedCerts, err := trustmanager.LoadCertBundleFromPEM(key.Public()) + if err != nil { + logrus.Debugf("error while parsing root certificate with keyID: %s, %v", keyID, err) + continue + } + + // Get all non-CA certificates in the decoded certificates + leafCertList := trustmanager.GetLeafCerts(decodedCerts) + + // If we got no leaf certificates or we got more than one, fail + if len(leafCertList) != 1 { + logrus.Debugf("invalid chain due to leaf certificate missing or too many leaf certificates for keyID: %s", keyID) + continue + } + + // Get the ID of the leaf certificate + leafCert := leafCertList[0] + leafID, err := trustmanager.FingerprintCert(leafCert) + if err != nil { + logrus.Debugf("error while fingerprinting root certificate with keyID: %s, %v", keyID, err) + continue + } + + // Store the leaf cert in the map + leafCerts[leafID] = leafCert + + // Get all the remainder certificates marked as a CA to be used as intermediates + intermediateCerts := trustmanager.GetIntermediateCerts(decodedCerts) + intCerts[leafID] = intermediateCerts + } + + return leafCerts, intCerts +} + +// certsToRemove returns all the certifificates from oldCerts that aren't present +// in newCerts +func certsToRemove(oldCerts, newCerts []*x509.Certificate) map[string]*x509.Certificate { + certsToRemove := make(map[string]*x509.Certificate) + + // If no newCerts were provided + if len(newCerts) == 0 { + return certsToRemove + } + + // Populate a map with all the IDs from newCert + var newCertMap = make(map[string]struct{}) + for _, cert := range newCerts { + certID, err := trustmanager.FingerprintCert(cert) + if err != nil { + logrus.Debugf("error while fingerprinting root certificate with keyID: %s, %v", certID, err) + continue + } + newCertMap[certID] = struct{}{} + } + + // Iterate over all the old certificates and check to see if we should remove them + for _, cert := range oldCerts { + certID, err := trustmanager.FingerprintCert(cert) + if err != nil { + logrus.Debugf("error while fingerprinting root certificate with certID: %s, %v", certID, err) + continue + } + if _, ok := newCertMap[certID]; !ok { + certsToRemove[certID] = cert + } + } + + return certsToRemove +} diff --git a/vendor/src/github.com/docker/notary/pkg/passphrase/passphrase.go b/vendor/src/github.com/docker/notary/pkg/passphrase/passphrase.go new file mode 100644 index 0000000000..aae28170b4 --- /dev/null +++ b/vendor/src/github.com/docker/notary/pkg/passphrase/passphrase.go @@ -0,0 +1,148 @@ +// Package passphrase is a utility function for managing passphrase +// for TUF and Notary keys. +package passphrase + +import ( + "bufio" + "errors" + "fmt" + "io" + "os" + "strings" + + "path/filepath" + + "github.com/docker/docker/pkg/term" +) + +// Retriever is a callback function that should retrieve a passphrase +// for a given named key. If it should be treated as new passphrase (e.g. with +// confirmation), createNew will be true. Attempts is passed in so that implementers +// decide how many chances to give to a human, for example. +type Retriever func(keyName, alias string, createNew bool, attempts int) (passphrase string, giveup bool, err error) + +const ( + idBytesToDisplay = 5 + tufRootAlias = "root" + tufTargetsAlias = "targets" + tufSnapshotAlias = "snapshot" + tufRootKeyGenerationWarning = `You are about to create a new root signing key passphrase. This passphrase will be used to protect +the most sensitive key in your signing system. Please choose a long, complex passphrase and be careful +to keep the password and the key file itself secure and backed up. It is highly recommended that you use +a password manager to generate the passphrase and keep it safe. There will be no way to recover this key. +You can find the key in your config directory.` +) + +// PromptRetriever returns a new Retriever which will provide a prompt on stdin +// and stdout to retrieve a passphrase. The passphrase will be cached such that +// subsequent prompts will produce the same passphrase. +func PromptRetriever() Retriever { + return PromptRetrieverWithInOut(os.Stdin, os.Stdout) +} + +// PromptRetrieverWithInOut returns a new Retriever which will provide a +// prompt using the given in and out readers. The passphrase will be cached +// such that subsequent prompts will produce the same passphrase. +func PromptRetrieverWithInOut(in io.Reader, out io.Writer) Retriever { + userEnteredTargetsSnapshotsPass := false + targetsSnapshotsPass := "" + userEnteredRootsPass := false + rootsPass := "" + + return func(keyName string, alias string, createNew bool, numAttempts int) (string, bool, error) { + if alias == tufRootAlias && createNew && numAttempts == 0 { + fmt.Fprintln(out, tufRootKeyGenerationWarning) + } + if numAttempts > 0 { + if createNew { + fmt.Fprintln(out, "Passphrases do not match. Please retry.") + + } else { + fmt.Fprintln(out, "Passphrase incorrect. Please retry.") + } + } + + // First, check if we have a password cached for this alias. + if numAttempts == 0 { + if userEnteredTargetsSnapshotsPass && (alias == tufSnapshotAlias || alias == tufTargetsAlias) { + return targetsSnapshotsPass, false, nil + } + if userEnteredRootsPass && (alias == "root") { + return rootsPass, false, nil + } + } + + if numAttempts > 3 && !createNew { + return "", true, errors.New("Too many attempts") + } + + state, err := term.SaveState(0) + if err != nil { + return "", false, err + } + term.DisableEcho(0, state) + defer term.RestoreTerminal(0, state) + + stdin := bufio.NewReader(in) + + indexOfLastSeparator := strings.LastIndex(keyName, string(filepath.Separator)) + + if len(keyName) > indexOfLastSeparator+idBytesToDisplay+1 { + keyName = keyName[:indexOfLastSeparator+idBytesToDisplay+1] + } + + if createNew { + fmt.Fprintf(out, "Enter passphrase for new %s key with id %s: ", alias, keyName) + } else { + fmt.Fprintf(out, "Enter key passphrase for %s key with id %s: ", alias, keyName) + } + + passphrase, err := stdin.ReadBytes('\n') + fmt.Fprintln(out) + if err != nil { + return "", false, err + } + + retPass := strings.TrimSpace(string(passphrase)) + + if !createNew { + if alias == tufSnapshotAlias || alias == tufTargetsAlias { + userEnteredTargetsSnapshotsPass = true + targetsSnapshotsPass = retPass + } + if alias == tufRootAlias { + userEnteredRootsPass = true + rootsPass = retPass + } + return retPass, false, nil + } + + if len(retPass) < 8 { + fmt.Fprintln(out, "Please use a password manager to generate and store a good random passphrase.") + return "", false, errors.New("Passphrase too short") + } + + fmt.Fprintf(out, "Repeat passphrase for new %s key with id %s: ", alias, keyName) + confirmation, err := stdin.ReadBytes('\n') + fmt.Fprintln(out) + if err != nil { + return "", false, err + } + confirmationStr := strings.TrimSpace(string(confirmation)) + + if retPass != confirmationStr { + return "", false, errors.New("The entered passphrases do not match") + } + + if alias == tufSnapshotAlias || alias == tufTargetsAlias { + userEnteredTargetsSnapshotsPass = true + targetsSnapshotsPass = retPass + } + if alias == tufRootAlias { + userEnteredRootsPass = true + rootsPass = retPass + } + + return retPass, false, nil + } +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/filestore.go b/vendor/src/github.com/docker/notary/trustmanager/filestore.go new file mode 100644 index 0000000000..d573888d6a --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/filestore.go @@ -0,0 +1,293 @@ +package trustmanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + "sync" +) + +const ( + visible os.FileMode = 0755 + private os.FileMode = 0700 +) + +var ( + // ErrPathOutsideStore indicates that the returned path would be + // outside the store + ErrPathOutsideStore = errors.New("path outside file store") +) + +// LimitedFileStore implements the bare bones primitives (no symlinks or +// hierarchy) +type LimitedFileStore interface { + Add(fileName string, data []byte) error + Remove(fileName string) error + Get(fileName string) ([]byte, error) + ListFiles(symlinks bool) []string +} + +// FileStore is the interface for full-featured FileStores +type FileStore interface { + LimitedFileStore + + RemoveDir(directoryName string) error + GetPath(fileName string) (string, error) + ListDir(directoryName string, symlinks bool) []string + Link(src, dst string) error + BaseDir() string +} + +// SimpleFileStore implements FileStore +type SimpleFileStore struct { + baseDir string + fileExt string + perms os.FileMode +} + +// NewSimpleFileStore creates a directory with 755 permissions +func NewSimpleFileStore(baseDir string, fileExt string) (*SimpleFileStore, error) { + baseDir = filepath.Clean(baseDir) + + if err := CreateDirectory(baseDir); err != nil { + return nil, err + } + + return &SimpleFileStore{ + baseDir: baseDir, + fileExt: fileExt, + perms: visible, + }, nil +} + +// NewPrivateSimpleFileStore creates a directory with 700 permissions +func NewPrivateSimpleFileStore(baseDir string, fileExt string) (*SimpleFileStore, error) { + if err := CreatePrivateDirectory(baseDir); err != nil { + return nil, err + } + + return &SimpleFileStore{ + baseDir: baseDir, + fileExt: fileExt, + perms: private, + }, nil +} + +// Add writes data to a file with a given name +func (f *SimpleFileStore) Add(name string, data []byte) error { + filePath, err := f.GetPath(name) + if err != nil { + return err + } + createDirectory(filepath.Dir(filePath), f.perms) + return ioutil.WriteFile(filePath, data, f.perms) +} + +// Remove removes a file identified by name +func (f *SimpleFileStore) Remove(name string) error { + // Attempt to remove + filePath, err := f.GetPath(name) + if err != nil { + return err + } + return os.Remove(filePath) +} + +// RemoveDir removes the directory identified by name +func (f *SimpleFileStore) RemoveDir(name string) error { + dirPath := filepath.Join(f.baseDir, name) + + // Check to see if directory exists + fi, err := os.Stat(dirPath) + if err != nil { + return err + } + + // Check to see if it is a directory + if !fi.IsDir() { + return fmt.Errorf("directory not found: %s", name) + } + + return os.RemoveAll(dirPath) +} + +// Get returns the data given a file name +func (f *SimpleFileStore) Get(name string) ([]byte, error) { + filePath, err := f.GetPath(name) + if err != nil { + return nil, err + } + data, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, err + } + + return data, nil +} + +// GetPath returns the full final path of a file with a given name +func (f *SimpleFileStore) GetPath(name string) (string, error) { + fileName := f.genFileName(name) + fullPath := filepath.Clean(filepath.Join(f.baseDir, fileName)) + + if !strings.HasPrefix(fullPath, f.baseDir) { + return "", ErrPathOutsideStore + } + return fullPath, nil +} + +// ListFiles lists all the files inside of a store +func (f *SimpleFileStore) ListFiles(symlinks bool) []string { + return f.list(f.baseDir, symlinks) +} + +// ListDir lists all the files inside of a directory identified by a name +func (f *SimpleFileStore) ListDir(name string, symlinks bool) []string { + fullPath := filepath.Join(f.baseDir, name) + return f.list(fullPath, symlinks) +} + +// list lists all the files in a directory given a full path. Ignores symlinks. +func (f *SimpleFileStore) list(path string, symlinks bool) []string { + files := make([]string, 0, 0) + filepath.Walk(path, func(fp string, fi os.FileInfo, err error) error { + // If there are errors, ignore this particular file + if err != nil { + return nil + } + // Ignore if it is a directory + if fi.IsDir() { + return nil + } + + // If this is a symlink, and symlinks is true, ignore it + if !symlinks && fi.Mode()&os.ModeSymlink == os.ModeSymlink { + return nil + } + + // Only allow matches that end with our certificate extension (e.g. *.crt) + matched, _ := filepath.Match("*"+f.fileExt, fi.Name()) + + if matched { + // Find the relative path for this file relative to the base path. + fp, err = filepath.Rel(path, fp) + if err != nil { + return err + } + files = append(files, fp) + } + return nil + }) + return files +} + +// genFileName returns the name using the right extension +func (f *SimpleFileStore) genFileName(name string) string { + return fmt.Sprintf("%s.%s", name, f.fileExt) +} + +// Link creates a symlink beetween the ID of the certificate used by a repository +// and the ID of the root key that is being used. +// We use full path for the source and local for the destination to use relative +// path for the symlink +func (f *SimpleFileStore) Link(oldname, newname string) error { + newnamePath, err := f.GetPath(newname) + if err != nil { + return err + } + + return os.Symlink(f.genFileName(oldname), newnamePath) +} + +// BaseDir returns the base directory of the filestore +func (f *SimpleFileStore) BaseDir() string { + return f.baseDir +} + +// CreateDirectory uses createDirectory to create a chmod 755 Directory +func CreateDirectory(dir string) error { + return createDirectory(dir, visible) +} + +// CreatePrivateDirectory uses createDirectory to create a chmod 700 Directory +func CreatePrivateDirectory(dir string) error { + return createDirectory(dir, private) +} + +// createDirectory receives a string of the path to a directory. +// It does not support passing files, so the caller has to remove +// the filename by doing filepath.Dir(full_path_to_file) +func createDirectory(dir string, perms os.FileMode) error { + // This prevents someone passing /path/to/dir and 'dir' not being created + // If two '//' exist, MkdirAll deals it with correctly + dir = dir + "/" + return os.MkdirAll(dir, perms) +} + +// MemoryFileStore is an implementation of LimitedFileStore that keeps +// the contents in memory. +type MemoryFileStore struct { + sync.Mutex + + files map[string][]byte +} + +// NewMemoryFileStore creates a MemoryFileStore +func NewMemoryFileStore() *MemoryFileStore { + return &MemoryFileStore{ + files: make(map[string][]byte), + } +} + +// ErrMemFileNotFound is returned for a nonexistent "file" in the memory file +// store +var ErrMemFileNotFound = errors.New("key not found in memory file store") + +// Add writes data to a file with a given name +func (f *MemoryFileStore) Add(name string, data []byte) error { + f.Lock() + defer f.Unlock() + + f.files[name] = data + return nil +} + +// Remove removes a file identified by name +func (f *MemoryFileStore) Remove(name string) error { + f.Lock() + defer f.Unlock() + + if _, present := f.files[name]; !present { + return ErrMemFileNotFound + } + delete(f.files, name) + + return nil +} + +// Get returns the data given a file name +func (f *MemoryFileStore) Get(name string) ([]byte, error) { + f.Lock() + defer f.Unlock() + + fileData, present := f.files[name] + if !present { + return nil, ErrMemFileNotFound + } + + return fileData, nil +} + +// ListFiles lists all the files inside of a store +func (f *MemoryFileStore) ListFiles(symlinks bool) []string { + var list []string + + for name := range f.files { + list = append(list, name) + } + + return list +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/keyfilestore.go b/vendor/src/github.com/docker/notary/trustmanager/keyfilestore.go new file mode 100644 index 0000000000..b8d5fd175c --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/keyfilestore.go @@ -0,0 +1,288 @@ +package trustmanager + +import ( + "path/filepath" + "strings" + "sync" + + "fmt" + + "github.com/docker/notary/pkg/passphrase" + "github.com/endophage/gotuf/data" +) + +const ( + keyExtension = "key" +) + +// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key +type ErrAttemptsExceeded struct{} + +// ErrAttemptsExceeded is returned when too many attempts have been made to decrypt a key +func (err ErrAttemptsExceeded) Error() string { + return "maximum number of passphrase attempts exceeded" +} + +// ErrPasswordInvalid is returned when signing fails. It could also mean the signing +// key file was corrupted, but we have no way to distinguish. +type ErrPasswordInvalid struct{} + +// ErrPasswordInvalid is returned when signing fails. It could also mean the signing +// key file was corrupted, but we have no way to distinguish. +func (err ErrPasswordInvalid) Error() string { + return "password invalid, operation has failed." +} + +// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. +type ErrKeyNotFound struct { + KeyID string +} + +// ErrKeyNotFound is returned when the keystore fails to retrieve a specific key. +func (err ErrKeyNotFound) Error() string { + return fmt.Sprintf("signing key not found: %s", err.KeyID) +} + +// KeyStore is a generic interface for private key storage +type KeyStore interface { + LimitedFileStore + + AddKey(name, alias string, privKey data.PrivateKey) error + GetKey(name string) (data.PrivateKey, string, error) + ListKeys() []string + RemoveKey(name string) error +} + +type cachedKey struct { + alias string + key data.PrivateKey +} + +// PassphraseRetriever is a callback function that should retrieve a passphrase +// for a given named key. If it should be treated as new passphrase (e.g. with +// confirmation), createNew will be true. Attempts is passed in so that implementers +// decide how many chances to give to a human, for example. +type PassphraseRetriever func(keyId, alias string, createNew bool, attempts int) (passphrase string, giveup bool, err error) + +// KeyFileStore persists and manages private keys on disk +type KeyFileStore struct { + sync.Mutex + SimpleFileStore + passphrase.Retriever + cachedKeys map[string]*cachedKey +} + +// KeyMemoryStore manages private keys in memory +type KeyMemoryStore struct { + sync.Mutex + MemoryFileStore + passphrase.Retriever + cachedKeys map[string]*cachedKey +} + +// NewKeyFileStore returns a new KeyFileStore creating a private directory to +// hold the keys. +func NewKeyFileStore(baseDir string, passphraseRetriever passphrase.Retriever) (*KeyFileStore, error) { + fileStore, err := NewPrivateSimpleFileStore(baseDir, keyExtension) + if err != nil { + return nil, err + } + cachedKeys := make(map[string]*cachedKey) + + return &KeyFileStore{SimpleFileStore: *fileStore, + Retriever: passphraseRetriever, + cachedKeys: cachedKeys}, nil +} + +// AddKey stores the contents of a PEM-encoded private key as a PEM block +func (s *KeyFileStore) AddKey(name, alias string, privKey data.PrivateKey) error { + s.Lock() + defer s.Unlock() + return addKey(s, s.Retriever, s.cachedKeys, name, alias, privKey) +} + +// GetKey returns the PrivateKey given a KeyID +func (s *KeyFileStore) GetKey(name string) (data.PrivateKey, string, error) { + s.Lock() + defer s.Unlock() + return getKey(s, s.Retriever, s.cachedKeys, name) +} + +// ListKeys returns a list of unique PublicKeys present on the KeyFileStore. +// There might be symlinks associating Certificate IDs to Public Keys, so this +// method only returns the IDs that aren't symlinks +func (s *KeyFileStore) ListKeys() []string { + return listKeys(s) +} + +// RemoveKey removes the key from the keyfilestore +func (s *KeyFileStore) RemoveKey(name string) error { + s.Lock() + defer s.Unlock() + return removeKey(s, s.cachedKeys, name) +} + +// NewKeyMemoryStore returns a new KeyMemoryStore which holds keys in memory +func NewKeyMemoryStore(passphraseRetriever passphrase.Retriever) *KeyMemoryStore { + memStore := NewMemoryFileStore() + cachedKeys := make(map[string]*cachedKey) + + return &KeyMemoryStore{MemoryFileStore: *memStore, + Retriever: passphraseRetriever, + cachedKeys: cachedKeys} +} + +// AddKey stores the contents of a PEM-encoded private key as a PEM block +func (s *KeyMemoryStore) AddKey(name, alias string, privKey data.PrivateKey) error { + s.Lock() + defer s.Unlock() + return addKey(s, s.Retriever, s.cachedKeys, name, alias, privKey) +} + +// GetKey returns the PrivateKey given a KeyID +func (s *KeyMemoryStore) GetKey(name string) (data.PrivateKey, string, error) { + s.Lock() + defer s.Unlock() + return getKey(s, s.Retriever, s.cachedKeys, name) +} + +// ListKeys returns a list of unique PublicKeys present on the KeyFileStore. +// There might be symlinks associating Certificate IDs to Public Keys, so this +// method only returns the IDs that aren't symlinks +func (s *KeyMemoryStore) ListKeys() []string { + return listKeys(s) +} + +// RemoveKey removes the key from the keystore +func (s *KeyMemoryStore) RemoveKey(name string) error { + s.Lock() + defer s.Unlock() + return removeKey(s, s.cachedKeys, name) +} + +func addKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name, alias string, privKey data.PrivateKey) error { + pemPrivKey, err := KeyToPEM(privKey) + if err != nil { + return err + } + + attempts := 0 + passphrase := "" + giveup := false + for { + passphrase, giveup, err = passphraseRetriever(name, alias, true, attempts) + if err != nil { + attempts++ + continue + } + if giveup { + return ErrAttemptsExceeded{} + } + if attempts > 10 { + return ErrAttemptsExceeded{} + } + break + } + + if passphrase != "" { + pemPrivKey, err = EncryptPrivateKey(privKey, passphrase) + if err != nil { + return err + } + } + + cachedKeys[name] = &cachedKey{alias: alias, key: privKey} + return s.Add(name+"_"+alias, pemPrivKey) +} + +func getKeyAlias(s LimitedFileStore, keyID string) (string, error) { + files := s.ListFiles(true) + name := strings.TrimSpace(strings.TrimSuffix(filepath.Base(keyID), filepath.Ext(keyID))) + + for _, file := range files { + filename := filepath.Base(file) + + if strings.HasPrefix(filename, name) { + aliasPlusDotKey := strings.TrimPrefix(filename, name+"_") + retVal := strings.TrimSuffix(aliasPlusDotKey, "."+keyExtension) + return retVal, nil + } + } + + return "", &ErrKeyNotFound{KeyID: keyID} +} + +// GetKey returns the PrivateKey given a KeyID +func getKey(s LimitedFileStore, passphraseRetriever passphrase.Retriever, cachedKeys map[string]*cachedKey, name string) (data.PrivateKey, string, error) { + cachedKeyEntry, ok := cachedKeys[name] + if ok { + return cachedKeyEntry.key, cachedKeyEntry.alias, nil + } + keyAlias, err := getKeyAlias(s, name) + if err != nil { + return nil, "", err + } + + keyBytes, err := s.Get(name + "_" + keyAlias) + if err != nil { + return nil, "", err + } + + var retErr error + // See if the key is encrypted. If its encrypted we'll fail to parse the private key + privKey, err := ParsePEMPrivateKey(keyBytes, "") + if err != nil { + // We need to decrypt the key, lets get a passphrase + for attempts := 0; ; attempts++ { + passphrase, giveup, err := passphraseRetriever(name, string(keyAlias), false, attempts) + // Check if the passphrase retriever got an error or if it is telling us to give up + if giveup || err != nil { + return nil, "", ErrPasswordInvalid{} + } + if attempts > 10 { + return nil, "", ErrAttemptsExceeded{} + } + + // Try to convert PEM encoded bytes back to a PrivateKey using the passphrase + privKey, err = ParsePEMPrivateKey(keyBytes, passphrase) + if err != nil { + retErr = ErrPasswordInvalid{} + } else { + // We managed to parse the PrivateKey. We've succeeded! + retErr = nil + break + } + } + } + if retErr != nil { + return nil, "", retErr + } + cachedKeys[name] = &cachedKey{alias: keyAlias, key: privKey} + return privKey, keyAlias, nil +} + +// ListKeys returns a list of unique PublicKeys present on the KeyFileStore. +// There might be symlinks associating Certificate IDs to Public Keys, so this +// method only returns the IDs that aren't symlinks +func listKeys(s LimitedFileStore) []string { + var keyIDList []string + + for _, f := range s.ListFiles(false) { + keyID := strings.TrimSpace(strings.TrimSuffix(f, filepath.Ext(f))) + keyID = keyID[:strings.LastIndex(keyID, "_")] + keyIDList = append(keyIDList, keyID) + } + return keyIDList +} + +// RemoveKey removes the key from the keyfilestore +func removeKey(s LimitedFileStore, cachedKeys map[string]*cachedKey, name string) error { + keyAlias, err := getKeyAlias(s, name) + if err != nil { + return err + } + + delete(cachedKeys, name) + + return s.Remove(name + "_" + keyAlias) +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/x509filestore.go b/vendor/src/github.com/docker/notary/trustmanager/x509filestore.go new file mode 100644 index 0000000000..1df6233191 --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/x509filestore.go @@ -0,0 +1,270 @@ +package trustmanager + +import ( + "crypto/x509" + "errors" + "os" + "path" + + "github.com/Sirupsen/logrus" +) + +// X509FileStore implements X509Store that persists on disk +type X509FileStore struct { + validate Validator + fileMap map[CertID]string + fingerprintMap map[CertID]*x509.Certificate + nameMap map[string][]CertID + fileStore FileStore +} + +// NewX509FileStore returns a new X509FileStore. +func NewX509FileStore(directory string) (*X509FileStore, error) { + validate := ValidatorFunc(func(cert *x509.Certificate) bool { return true }) + return newX509FileStore(directory, validate) +} + +// NewX509FilteredFileStore returns a new X509FileStore that validates certificates +// that are added. +func NewX509FilteredFileStore(directory string, validate func(*x509.Certificate) bool) (*X509FileStore, error) { + return newX509FileStore(directory, validate) +} + +func newX509FileStore(directory string, validate func(*x509.Certificate) bool) (*X509FileStore, error) { + fileStore, err := NewSimpleFileStore(directory, certExtension) + if err != nil { + return nil, err + } + + s := &X509FileStore{ + validate: ValidatorFunc(validate), + fileMap: make(map[CertID]string), + fingerprintMap: make(map[CertID]*x509.Certificate), + nameMap: make(map[string][]CertID), + fileStore: fileStore, + } + + err = loadCertsFromDir(s) + if err != nil { + return nil, err + } + + return s, nil +} + +// AddCert creates a filename for a given cert and adds a certificate with that name +func (s *X509FileStore) AddCert(cert *x509.Certificate) error { + if cert == nil { + return errors.New("adding nil Certificate to X509Store") + } + + // Check if this certificate meets our validation criteria + if !s.validate.Validate(cert) { + return &ErrCertValidation{} + } + // Attempt to write the certificate to the file + if err := s.addNamedCert(cert); err != nil { + return err + } + + return nil +} + +// addNamedCert allows adding a certificate while controlling the filename it gets +// stored under. If the file does not exist on disk, saves it. +func (s *X509FileStore) addNamedCert(cert *x509.Certificate) error { + fileName, certID, err := fileName(cert) + if err != nil { + return err + } + + logrus.Debug("Adding cert with certID: ", certID) + // Validate if we already added this certificate before + if _, ok := s.fingerprintMap[certID]; ok { + return &ErrCertExists{} + } + + // Convert certificate to PEM + certBytes := CertToPEM(cert) + + // Save the file to disk if not already there. + filePath, err := s.fileStore.GetPath(fileName) + if err != nil { + return err + } + if _, err := os.Stat(filePath); os.IsNotExist(err) { + if err := s.fileStore.Add(fileName, certBytes); err != nil { + return err + } + } else if err != nil { + return err + } + + // We wrote the certificate succcessfully, add it to our in-memory storage + s.fingerprintMap[certID] = cert + s.fileMap[certID] = fileName + + name := string(cert.Subject.CommonName) + s.nameMap[name] = append(s.nameMap[name], certID) + + return nil +} + +// RemoveCert removes a certificate from a X509FileStore. +func (s *X509FileStore) RemoveCert(cert *x509.Certificate) error { + if cert == nil { + return errors.New("removing nil Certificate from X509Store") + } + + certID, err := fingerprintCert(cert) + if err != nil { + return err + } + delete(s.fingerprintMap, certID) + filename := s.fileMap[certID] + delete(s.fileMap, certID) + + name := string(cert.Subject.CommonName) + + // Filter the fingerprint out of this name entry + fpList := s.nameMap[name] + newfpList := fpList[:0] + for _, x := range fpList { + if x != certID { + newfpList = append(newfpList, x) + } + } + + s.nameMap[name] = newfpList + + if err := s.fileStore.Remove(filename); err != nil { + return err + } + + return nil +} + +// RemoveAll removes all the certificates from the store +func (s *X509FileStore) RemoveAll() error { + for _, filename := range s.fileMap { + if err := s.fileStore.Remove(filename); err != nil { + return err + } + } + s.fileMap = make(map[CertID]string) + s.fingerprintMap = make(map[CertID]*x509.Certificate) + s.nameMap = make(map[string][]CertID) + + return nil +} + +// AddCertFromPEM adds the first certificate that it finds in the byte[], returning +// an error if no Certificates are found +func (s X509FileStore) AddCertFromPEM(pemBytes []byte) error { + cert, err := LoadCertFromPEM(pemBytes) + if err != nil { + return err + } + return s.AddCert(cert) +} + +// AddCertFromFile tries to adds a X509 certificate to the store given a filename +func (s *X509FileStore) AddCertFromFile(filename string) error { + cert, err := LoadCertFromFile(filename) + if err != nil { + return err + } + + return s.AddCert(cert) +} + +// GetCertificates returns an array with all of the current X509 Certificates. +func (s *X509FileStore) GetCertificates() []*x509.Certificate { + certs := make([]*x509.Certificate, len(s.fingerprintMap)) + i := 0 + for _, v := range s.fingerprintMap { + certs[i] = v + i++ + } + return certs +} + +// GetCertificatePool returns an x509 CertPool loaded with all the certificates +// in the store. +func (s *X509FileStore) GetCertificatePool() *x509.CertPool { + pool := x509.NewCertPool() + + for _, v := range s.fingerprintMap { + pool.AddCert(v) + } + return pool +} + +// GetCertificateByCertID returns the certificate that matches a certain certID +func (s *X509FileStore) GetCertificateByCertID(certID string) (*x509.Certificate, error) { + return s.getCertificateByCertID(CertID(certID)) +} + +// getCertificateByCertID returns the certificate that matches a certain certID +func (s *X509FileStore) getCertificateByCertID(certID CertID) (*x509.Certificate, error) { + // If it does not look like a hex encoded sha256 hash, error + if len(certID) != 64 { + return nil, errors.New("invalid Subject Key Identifier") + } + + // Check to see if this subject key identifier exists + if cert, ok := s.fingerprintMap[CertID(certID)]; ok { + return cert, nil + + } + return nil, &ErrNoCertificatesFound{query: string(certID)} +} + +// GetCertificatesByCN returns all the certificates that match a specific +// CommonName +func (s *X509FileStore) GetCertificatesByCN(cn string) ([]*x509.Certificate, error) { + var certs []*x509.Certificate + if ids, ok := s.nameMap[cn]; ok { + for _, v := range ids { + cert, err := s.getCertificateByCertID(v) + if err != nil { + // This error should never happen. This would mean that we have + // an inconsistent X509FileStore + return nil, &ErrBadCertificateStore{} + } + certs = append(certs, cert) + } + } + if len(certs) == 0 { + return nil, &ErrNoCertificatesFound{query: cn} + } + + return certs, nil +} + +// GetVerifyOptions returns VerifyOptions with the certificates within the KeyStore +// as part of the roots list. This never allows the use of system roots, returning +// an error if there are no root CAs. +func (s *X509FileStore) GetVerifyOptions(dnsName string) (x509.VerifyOptions, error) { + // If we have no Certificates loaded return error (we don't want to rever to using + // system CAs). + if len(s.fingerprintMap) == 0 { + return x509.VerifyOptions{}, errors.New("no root CAs available") + } + + opts := x509.VerifyOptions{ + DNSName: dnsName, + Roots: s.GetCertificatePool(), + } + + return opts, nil +} + +func fileName(cert *x509.Certificate) (string, CertID, error) { + certID, err := fingerprintCert(cert) + if err != nil { + return "", "", err + } + + return path.Join(cert.Subject.CommonName, string(certID)), certID, nil +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/x509memstore.go b/vendor/src/github.com/docker/notary/trustmanager/x509memstore.go new file mode 100644 index 0000000000..55666c0976 --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/x509memstore.go @@ -0,0 +1,203 @@ +package trustmanager + +import ( + "crypto/x509" + "errors" + + "github.com/Sirupsen/logrus" +) + +// X509MemStore implements X509Store as an in-memory object with no persistence +type X509MemStore struct { + validate Validator + fingerprintMap map[CertID]*x509.Certificate + nameMap map[string][]CertID +} + +// NewX509MemStore returns a new X509MemStore. +func NewX509MemStore() *X509MemStore { + validate := ValidatorFunc(func(cert *x509.Certificate) bool { return true }) + + return &X509MemStore{ + validate: validate, + fingerprintMap: make(map[CertID]*x509.Certificate), + nameMap: make(map[string][]CertID), + } +} + +// NewX509FilteredMemStore returns a new X509Memstore that validates certificates +// that are added. +func NewX509FilteredMemStore(validate func(*x509.Certificate) bool) *X509MemStore { + s := &X509MemStore{ + + validate: ValidatorFunc(validate), + fingerprintMap: make(map[CertID]*x509.Certificate), + nameMap: make(map[string][]CertID), + } + + return s +} + +// AddCert adds a certificate to the store +func (s *X509MemStore) AddCert(cert *x509.Certificate) error { + if cert == nil { + return errors.New("adding nil Certificate to X509Store") + } + + if !s.validate.Validate(cert) { + return &ErrCertValidation{} + } + + certID, err := fingerprintCert(cert) + if err != nil { + return err + } + + logrus.Debug("Adding cert with certID: ", certID) + + // In this store we overwrite the certificate if it already exists + s.fingerprintMap[certID] = cert + name := string(cert.RawSubject) + s.nameMap[name] = append(s.nameMap[name], certID) + + return nil +} + +// RemoveCert removes a certificate from a X509MemStore. +func (s *X509MemStore) RemoveCert(cert *x509.Certificate) error { + if cert == nil { + return errors.New("removing nil Certificate to X509Store") + } + + certID, err := fingerprintCert(cert) + if err != nil { + return err + } + delete(s.fingerprintMap, certID) + name := string(cert.RawSubject) + + // Filter the fingerprint out of this name entry + fpList := s.nameMap[name] + newfpList := fpList[:0] + for _, x := range fpList { + if x != certID { + newfpList = append(newfpList, x) + } + } + + s.nameMap[name] = newfpList + return nil +} + +// RemoveAll removes all the certificates from the store +func (s *X509MemStore) RemoveAll() error { + + for _, cert := range s.fingerprintMap { + if err := s.RemoveCert(cert); err != nil { + return err + } + } + + return nil +} + +// AddCertFromPEM adds a certificate to the store from a PEM blob +func (s *X509MemStore) AddCertFromPEM(pemBytes []byte) error { + cert, err := LoadCertFromPEM(pemBytes) + if err != nil { + return err + } + return s.AddCert(cert) +} + +// AddCertFromFile tries to adds a X509 certificate to the store given a filename +func (s *X509MemStore) AddCertFromFile(originFilname string) error { + cert, err := LoadCertFromFile(originFilname) + if err != nil { + return err + } + + return s.AddCert(cert) +} + +// GetCertificates returns an array with all of the current X509 Certificates. +func (s *X509MemStore) GetCertificates() []*x509.Certificate { + certs := make([]*x509.Certificate, len(s.fingerprintMap)) + i := 0 + for _, v := range s.fingerprintMap { + certs[i] = v + i++ + } + return certs +} + +// GetCertificatePool returns an x509 CertPool loaded with all the certificates +// in the store. +func (s *X509MemStore) GetCertificatePool() *x509.CertPool { + pool := x509.NewCertPool() + + for _, v := range s.fingerprintMap { + pool.AddCert(v) + } + return pool +} + +// GetCertificateByCertID returns the certificate that matches a certain certID +func (s *X509MemStore) GetCertificateByCertID(certID string) (*x509.Certificate, error) { + return s.getCertificateByCertID(CertID(certID)) +} + +// getCertificateByCertID returns the certificate that matches a certain certID or error +func (s *X509MemStore) getCertificateByCertID(certID CertID) (*x509.Certificate, error) { + // If it does not look like a hex encoded sha256 hash, error + if len(certID) != 64 { + return nil, errors.New("invalid Subject Key Identifier") + } + + // Check to see if this subject key identifier exists + if cert, ok := s.fingerprintMap[CertID(certID)]; ok { + return cert, nil + + } + return nil, &ErrNoCertificatesFound{query: string(certID)} +} + +// GetCertificatesByCN returns all the certificates that match a specific +// CommonName +func (s *X509MemStore) GetCertificatesByCN(cn string) ([]*x509.Certificate, error) { + var certs []*x509.Certificate + if ids, ok := s.nameMap[cn]; ok { + for _, v := range ids { + cert, err := s.getCertificateByCertID(v) + if err != nil { + // This error should never happen. This would mean that we have + // an inconsistent X509MemStore + return nil, err + } + certs = append(certs, cert) + } + } + if len(certs) == 0 { + return nil, &ErrNoCertificatesFound{query: cn} + } + + return certs, nil +} + +// GetVerifyOptions returns VerifyOptions with the certificates within the KeyStore +// as part of the roots list. This never allows the use of system roots, returning +// an error if there are no root CAs. +func (s *X509MemStore) GetVerifyOptions(dnsName string) (x509.VerifyOptions, error) { + // If we have no Certificates loaded return error (we don't want to rever to using + // system CAs). + if len(s.fingerprintMap) == 0 { + return x509.VerifyOptions{}, errors.New("no root CAs available") + } + + opts := x509.VerifyOptions{ + DNSName: dnsName, + Roots: s.GetCertificatePool(), + } + + return opts, nil +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/x509store.go b/vendor/src/github.com/docker/notary/trustmanager/x509store.go new file mode 100644 index 0000000000..3736ff632c --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/x509store.go @@ -0,0 +1,144 @@ +package trustmanager + +import ( + "crypto/x509" + "errors" + "fmt" +) + +const certExtension string = "crt" + +// ErrNoCertificatesFound is returned when no certificates are found for a +// GetCertificatesBy* +type ErrNoCertificatesFound struct { + query string +} + +// ErrNoCertificatesFound is returned when no certificates are found for a +// GetCertificatesBy* +func (err ErrNoCertificatesFound) Error() string { + return fmt.Sprintf("error, no certificates found in the keystore match: %s", err.query) +} + +// ErrCertValidation is returned when a certificate doesn't pass the store specific +// validations +type ErrCertValidation struct { +} + +// ErrCertValidation is returned when a certificate doesn't pass the store specific +// validations +func (err ErrCertValidation) Error() string { + return fmt.Sprintf("store-specific certificate validations failed") +} + +// ErrCertExists is returned when a Certificate already exists in the key store +type ErrCertExists struct { +} + +// ErrCertExists is returned when a Certificate already exists in the key store +func (err ErrCertExists) Error() string { + return fmt.Sprintf("certificate already in the store") +} + +// ErrBadCertificateStore is returned when there is an internal inconsistency +// in our x509 store +type ErrBadCertificateStore struct { +} + +// ErrBadCertificateStore is returned when there is an internal inconsistency +// in our x509 store +func (err ErrBadCertificateStore) Error() string { + return fmt.Sprintf("inconsistent certificate store") +} + +// X509Store is the interface for all X509Stores +type X509Store interface { + AddCert(cert *x509.Certificate) error + AddCertFromPEM(pemCerts []byte) error + AddCertFromFile(filename string) error + RemoveCert(cert *x509.Certificate) error + RemoveAll() error + GetCertificateByCertID(certID string) (*x509.Certificate, error) + GetCertificatesByCN(cn string) ([]*x509.Certificate, error) + GetCertificates() []*x509.Certificate + GetCertificatePool() *x509.CertPool + GetVerifyOptions(dnsName string) (x509.VerifyOptions, error) +} + +// CertID represent the ID used to identify certificates +type CertID string + +// Validator is a convenience type to create validating function that filters +// certificates that get added to the store +type Validator interface { + Validate(cert *x509.Certificate) bool +} + +// ValidatorFunc is a convenience type to create functions that implement +// the Validator interface +type ValidatorFunc func(cert *x509.Certificate) bool + +// Validate implements the Validator interface to allow for any func() bool method +// to be passed as a Validator +func (vf ValidatorFunc) Validate(cert *x509.Certificate) bool { + return vf(cert) +} + +// Verify operates on an X509Store and validates the existence of a chain of trust +// between a leafCertificate and a CA present inside of the X509 Store. +// It requires at least two certificates in certList, a leaf Certificate and an +// intermediate CA certificate. +func Verify(s X509Store, dnsName string, certList []*x509.Certificate) error { + // If we have no Certificates loaded return error (we don't want to revert to using + // system CAs). + if len(s.GetCertificates()) == 0 { + return errors.New("no root CAs available") + } + + // At a minimum we should be provided a leaf cert and an intermediate. + if len(certList) < 2 { + return errors.New("certificate and at least one intermediate needed") + } + + // Get the VerifyOptions from the keystore for a base dnsName + opts, err := s.GetVerifyOptions(dnsName) + if err != nil { + return err + } + + // Create a Certificate Pool for our intermediate certificates + intPool := x509.NewCertPool() + var leafCert *x509.Certificate + + // Iterate through all the certificates + for _, c := range certList { + // If the cert is a CA, we add it to the intermediates pool. If not, we call + // it the leaf cert + if c.IsCA { + intPool.AddCert(c) + continue + } + // Certificate is not a CA, it must be our leaf certificate. + // If we already found one, bail with error + if leafCert != nil { + return errors.New("more than one leaf certificate found") + } + leafCert = c + } + + // We exited the loop with no leaf certificates + if leafCert == nil { + return errors.New("no leaf certificates found") + } + + // We have one leaf certificate and at least one intermediate. Lets add this + // Cert Pool as the Intermediates list on our VerifyOptions + opts.Intermediates = intPool + + // Finally, let's call Verify on our leafCert with our fully configured options + chains, err := leafCert.Verify(opts) + if len(chains) == 0 || err != nil { + return fmt.Errorf("certificate verification failed: %v", err) + } + return nil +} diff --git a/vendor/src/github.com/docker/notary/trustmanager/x509utils.go b/vendor/src/github.com/docker/notary/trustmanager/x509utils.go new file mode 100644 index 0000000000..396bd052e0 --- /dev/null +++ b/vendor/src/github.com/docker/notary/trustmanager/x509utils.go @@ -0,0 +1,497 @@ +package trustmanager + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "errors" + "fmt" + "io" + "io/ioutil" + "math/big" + "net/http" + "net/url" + "path/filepath" + "time" + + "github.com/Sirupsen/logrus" + "github.com/agl/ed25519" + "github.com/endophage/gotuf/data" +) + +// GetCertFromURL tries to get a X509 certificate given a HTTPS URL +func GetCertFromURL(urlStr string) (*x509.Certificate, error) { + url, err := url.Parse(urlStr) + if err != nil { + return nil, err + } + + // Check if we are adding via HTTPS + if url.Scheme != "https" { + return nil, errors.New("only HTTPS URLs allowed") + } + + // Download the certificate and write to directory + resp, err := http.Get(url.String()) + if err != nil { + return nil, err + } + + // Copy the content to certBytes + defer resp.Body.Close() + certBytes, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + // Try to extract the first valid PEM certificate from the bytes + cert, err := LoadCertFromPEM(certBytes) + if err != nil { + return nil, err + } + + return cert, nil +} + +// CertToPEM is an utility function returns a PEM encoded x509 Certificate +func CertToPEM(cert *x509.Certificate) []byte { + pemCert := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}) + + return pemCert +} + +// LoadCertFromPEM returns the first certificate found in a bunch of bytes or error +// if nothing is found. Taken from https://golang.org/src/crypto/x509/cert_pool.go#L85. +func LoadCertFromPEM(pemBytes []byte) (*x509.Certificate, error) { + for len(pemBytes) > 0 { + var block *pem.Block + block, pemBytes = pem.Decode(pemBytes) + if block == nil { + return nil, errors.New("no certificates found in PEM data") + } + if block.Type != "CERTIFICATE" || len(block.Headers) != 0 { + continue + } + + cert, err := x509.ParseCertificate(block.Bytes) + if err != nil { + continue + } + + return cert, nil + } + + return nil, errors.New("no certificates found in PEM data") +} + +// FingerprintCert returns a TUF compliant fingerprint for a X509 Certificate +func FingerprintCert(cert *x509.Certificate) (string, error) { + certID, err := fingerprintCert(cert) + if err != nil { + return "", err + } + + return string(certID), nil +} + +func fingerprintCert(cert *x509.Certificate) (CertID, error) { + block := pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw} + pemdata := pem.EncodeToMemory(&block) + + var keyType data.KeyAlgorithm + switch cert.PublicKeyAlgorithm { + case x509.RSA: + keyType = data.RSAx509Key + case x509.ECDSA: + keyType = data.ECDSAx509Key + default: + return "", fmt.Errorf("got Unknown key type while fingerprinting certificate") + } + + // Create new TUF Key so we can compute the TUF-compliant CertID + tufKey := data.NewPublicKey(keyType, pemdata) + + return CertID(tufKey.ID()), nil +} + +// loadCertsFromDir receives a store AddCertFromFile for each certificate found +func loadCertsFromDir(s *X509FileStore) error { + certFiles := s.fileStore.ListFiles(true) + for _, f := range certFiles { + // ListFiles returns relative paths + fullPath := filepath.Join(s.fileStore.BaseDir(), f) + err := s.AddCertFromFile(fullPath) + if err != nil { + if _, ok := err.(*ErrCertValidation); ok { + logrus.Debugf("ignoring certificate, did not pass validation: %s", f) + continue + } + if _, ok := err.(*ErrCertExists); ok { + logrus.Debugf("ignoring certificate, already exists in the store: %s", f) + continue + } + + return err + } + } + return nil +} + +// LoadCertFromFile loads the first certificate from the file provided. The +// data is expected to be PEM Encoded and contain one of more certificates +// with PEM type "CERTIFICATE" +func LoadCertFromFile(filename string) (*x509.Certificate, error) { + certs, err := LoadCertBundleFromFile(filename) + if err != nil { + return nil, err + } + return certs[0], nil +} + +// LoadCertBundleFromFile loads certificates from the []byte provided. The +// data is expected to be PEM Encoded and contain one of more certificates +// with PEM type "CERTIFICATE" +func LoadCertBundleFromFile(filename string) ([]*x509.Certificate, error) { + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + return LoadCertBundleFromPEM(b) +} + +// LoadCertBundleFromPEM loads certificates from the []byte provided. The +// data is expected to be PEM Encoded and contain one of more certificates +// with PEM type "CERTIFICATE" +func LoadCertBundleFromPEM(pemBytes []byte) ([]*x509.Certificate, error) { + certificates := []*x509.Certificate{} + var block *pem.Block + block, pemBytes = pem.Decode(pemBytes) + for ; block != nil; block, pemBytes = pem.Decode(pemBytes) { + if block.Type == "CERTIFICATE" { + cert, err := x509.ParseCertificate(block.Bytes) + if err != nil { + return nil, err + } + certificates = append(certificates, cert) + } else { + return nil, fmt.Errorf("invalid pem block type: %s", block.Type) + } + } + + if len(certificates) == 0 { + return nil, fmt.Errorf("no valid certificates found") + } + + return certificates, nil +} + +// GetLeafCerts parses a list of x509 Certificates and returns all of them +// that aren't CA +func GetLeafCerts(certs []*x509.Certificate) []*x509.Certificate { + var leafCerts []*x509.Certificate + for _, cert := range certs { + if cert.IsCA { + continue + } + leafCerts = append(leafCerts, cert) + } + return leafCerts +} + +// GetIntermediateCerts parses a list of x509 Certificates and returns all of the +// ones marked as a CA, to be used as intermediates +func GetIntermediateCerts(certs []*x509.Certificate) (intCerts []*x509.Certificate) { + for _, cert := range certs { + if cert.IsCA { + intCerts = append(intCerts, cert) + } + } + return intCerts +} + +// ParsePEMPrivateKey returns a data.PrivateKey from a PEM encoded private key. It +// only supports RSA (PKCS#1) and attempts to decrypt using the passphrase, if encrypted. +func ParsePEMPrivateKey(pemBytes []byte, passphrase string) (data.PrivateKey, error) { + block, _ := pem.Decode(pemBytes) + if block == nil { + return nil, errors.New("no valid private key found") + } + + switch block.Type { + case "RSA PRIVATE KEY": + var privKeyBytes []byte + var err error + + if x509.IsEncryptedPEMBlock(block) { + privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase)) + if err != nil { + return nil, errors.New("could not decrypt private key") + } + } else { + privKeyBytes = block.Bytes + } + + rsaPrivKey, err := x509.ParsePKCS1PrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not parse DER encoded key: %v", err) + } + + tufRSAPrivateKey, err := RSAToPrivateKey(rsaPrivKey) + if err != nil { + return nil, fmt.Errorf("could not convert rsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufRSAPrivateKey, nil + case "EC PRIVATE KEY": + var privKeyBytes []byte + var err error + + if x509.IsEncryptedPEMBlock(block) { + privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase)) + if err != nil { + return nil, errors.New("could not decrypt private key") + } + } else { + privKeyBytes = block.Bytes + } + + ecdsaPrivKey, err := x509.ParseECPrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not parse DER encoded private key: %v", err) + } + + tufECDSAPrivateKey, err := ECDSAToPrivateKey(ecdsaPrivKey) + if err != nil { + return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufECDSAPrivateKey, nil + case "ED25519 PRIVATE KEY": + // We serialize ED25519 keys by concatenating the private key + // to the public key and encoding with PEM. See the + // ED25519ToPrivateKey function. + var privKeyBytes []byte + var err error + + if x509.IsEncryptedPEMBlock(block) { + privKeyBytes, err = x509.DecryptPEMBlock(block, []byte(passphrase)) + if err != nil { + return nil, errors.New("could not decrypt private key") + } + } else { + privKeyBytes = block.Bytes + } + + tufECDSAPrivateKey, err := ED25519ToPrivateKey(privKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not convert ecdsa.PrivateKey to data.PrivateKey: %v", err) + } + + return tufECDSAPrivateKey, nil + + default: + return nil, fmt.Errorf("unsupported key type %q", block.Type) + } +} + +// GenerateRSAKey generates an RSA private key and returns a TUF PrivateKey +func GenerateRSAKey(random io.Reader, bits int) (data.PrivateKey, error) { + rsaPrivKey, err := rsa.GenerateKey(random, bits) + if err != nil { + return nil, fmt.Errorf("could not generate private key: %v", err) + } + + tufPrivKey, err := RSAToPrivateKey(rsaPrivKey) + if err != nil { + return nil, err + } + + logrus.Debugf("generated RSA key with keyID: %s", tufPrivKey.ID()) + + return tufPrivKey, nil +} + +// RSAToPrivateKey converts an rsa.Private key to a TUF data.PrivateKey type +func RSAToPrivateKey(rsaPrivKey *rsa.PrivateKey) (data.PrivateKey, error) { + // Get a DER-encoded representation of the PublicKey + rsaPubBytes, err := x509.MarshalPKIXPublicKey(&rsaPrivKey.PublicKey) + if err != nil { + return nil, fmt.Errorf("failed to marshal public key: %v", err) + } + + // Get a DER-encoded representation of the PrivateKey + rsaPrivBytes := x509.MarshalPKCS1PrivateKey(rsaPrivKey) + + return data.NewPrivateKey(data.RSAKey, rsaPubBytes, rsaPrivBytes), nil +} + +// GenerateECDSAKey generates an ECDSA Private key and returns a TUF PrivateKey +func GenerateECDSAKey(random io.Reader) (data.PrivateKey, error) { + ecdsaPrivKey, err := ecdsa.GenerateKey(elliptic.P256(), random) + if err != nil { + return nil, err + } + + tufPrivKey, err := ECDSAToPrivateKey(ecdsaPrivKey) + if err != nil { + return nil, err + } + + logrus.Debugf("generated ECDSA key with keyID: %s", tufPrivKey.ID()) + + return tufPrivKey, nil +} + +// GenerateED25519Key generates an ED25519 private key and returns a TUF +// PrivateKey. The serialization format we use is just the public key bytes +// followed by the private key bytes +func GenerateED25519Key(random io.Reader) (data.PrivateKey, error) { + pub, priv, err := ed25519.GenerateKey(rand.Reader) + if err != nil { + return nil, err + } + + var serialized [ed25519.PublicKeySize + ed25519.PrivateKeySize]byte + copy(serialized[:], pub[:]) + copy(serialized[ed25519.PublicKeySize:], priv[:]) + + tufPrivKey, err := ED25519ToPrivateKey(serialized[:]) + if err != nil { + return nil, err + } + + logrus.Debugf("generated ED25519 key with keyID: %s", tufPrivKey.ID()) + + return tufPrivKey, nil +} + +// ECDSAToPrivateKey converts an rsa.Private key to a TUF data.PrivateKey type +func ECDSAToPrivateKey(ecdsaPrivKey *ecdsa.PrivateKey) (data.PrivateKey, error) { + // Get a DER-encoded representation of the PublicKey + ecdsaPubBytes, err := x509.MarshalPKIXPublicKey(&ecdsaPrivKey.PublicKey) + if err != nil { + return nil, fmt.Errorf("failed to marshal public key: %v", err) + } + + // Get a DER-encoded representation of the PrivateKey + ecdsaPrivKeyBytes, err := x509.MarshalECPrivateKey(ecdsaPrivKey) + if err != nil { + return nil, fmt.Errorf("failed to marshal private key: %v", err) + } + + return data.NewPrivateKey(data.ECDSAKey, ecdsaPubBytes, ecdsaPrivKeyBytes), nil +} + +// ED25519ToPrivateKey converts a serialized ED25519 key to a TUF +// data.PrivateKey type +func ED25519ToPrivateKey(privKeyBytes []byte) (data.PrivateKey, error) { + if len(privKeyBytes) != ed25519.PublicKeySize+ed25519.PrivateKeySize { + return nil, errors.New("malformed ed25519 private key") + } + + return data.NewPrivateKey(data.ED25519Key, privKeyBytes[:ed25519.PublicKeySize], privKeyBytes), nil +} + +func blockType(algorithm data.KeyAlgorithm) (string, error) { + switch algorithm { + case data.RSAKey: + return "RSA PRIVATE KEY", nil + case data.ECDSAKey: + return "EC PRIVATE KEY", nil + case data.ED25519Key: + return "ED25519 PRIVATE KEY", nil + default: + return "", fmt.Errorf("algorithm %s not supported", algorithm) + } +} + +// KeyToPEM returns a PEM encoded key from a Private Key +func KeyToPEM(privKey data.PrivateKey) ([]byte, error) { + blockType, err := blockType(privKey.Algorithm()) + if err != nil { + return nil, err + } + + return pem.EncodeToMemory(&pem.Block{Type: blockType, Bytes: privKey.Private()}), nil +} + +// EncryptPrivateKey returns an encrypted PEM key given a Privatekey +// and a passphrase +func EncryptPrivateKey(key data.PrivateKey, passphrase string) ([]byte, error) { + blockType, err := blockType(key.Algorithm()) + if err != nil { + return nil, err + } + + password := []byte(passphrase) + cipherType := x509.PEMCipherAES256 + + encryptedPEMBlock, err := x509.EncryptPEMBlock(rand.Reader, + blockType, + key.Private(), + password, + cipherType) + if err != nil { + return nil, err + } + + return pem.EncodeToMemory(encryptedPEMBlock), nil +} + +// CertsToKeys transforms each of the input certificates into it's corresponding +// PublicKey +func CertsToKeys(certs []*x509.Certificate) map[string]data.PublicKey { + keys := make(map[string]data.PublicKey) + for _, cert := range certs { + block := pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw} + pemdata := pem.EncodeToMemory(&block) + + var keyType data.KeyAlgorithm + switch cert.PublicKeyAlgorithm { + case x509.RSA: + keyType = data.RSAx509Key + case x509.ECDSA: + keyType = data.ECDSAx509Key + default: + logrus.Debugf("unknown certificate type found, ignoring") + } + + // Create new the appropriate PublicKey + newKey := data.NewPublicKey(keyType, pemdata) + keys[newKey.ID()] = newKey + } + + return keys +} + +// NewCertificate returns an X509 Certificate following a template, given a GUN. +func NewCertificate(gun string) (*x509.Certificate, error) { + notBefore := time.Now() + // Certificates will expire in 10 years + notAfter := notBefore.Add(time.Hour * 24 * 365 * 10) + + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + + serialNumber, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return nil, fmt.Errorf("failed to generate new certificate: %v", err) + } + + return &x509.Certificate{ + SerialNumber: serialNumber, + Subject: pkix.Name{ + CommonName: gun, + }, + NotBefore: notBefore, + NotAfter: notAfter, + + KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature, + ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageCodeSigning}, + BasicConstraintsValid: true, + }, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/.gitignore b/vendor/src/github.com/endophage/gotuf/.gitignore new file mode 100644 index 0000000000..2928b6ae98 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/.gitignore @@ -0,0 +1,3 @@ +/db/ +*.bkp +*.swp diff --git a/vendor/src/github.com/endophage/gotuf/.travis.yml b/vendor/src/github.com/endophage/gotuf/.travis.yml new file mode 100644 index 0000000000..b0822a6c99 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/.travis.yml @@ -0,0 +1,30 @@ +language: go +go: + - 1.4 + - tip + +sudo: false + +before_install: + - go get golang.org/x/tools/cmd/cover + +script: + - go test -race -cover ./... + +notifications: + irc: + channels: + - "chat.freenode.net#flynn" + use_notice: true + skip_join: true + on_success: change + on_failure: always + template: + - "%{repository}/%{branch} - %{commit}: %{message} %{build_url}" + email: + on_success: never + on_failure: always + +matrix: + allow_failures: + - go: tip diff --git a/vendor/src/github.com/endophage/gotuf/CONTRIBUTORS b/vendor/src/github.com/endophage/gotuf/CONTRIBUTORS new file mode 100644 index 0000000000..5f484889cb --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/CONTRIBUTORS @@ -0,0 +1,3 @@ +Aaron Lehmann (github: aaronlehmann) +Lewis Marshall (github: lmars) +Jonathan Rudenberg (github: titanous) diff --git a/vendor/src/github.com/endophage/gotuf/LICENSE b/vendor/src/github.com/endophage/gotuf/LICENSE new file mode 100644 index 0000000000..d92ae9ee14 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/LICENSE @@ -0,0 +1,30 @@ +Copyright (c) 2015, Docker Inc. +Copyright (c) 2014-2015 Prime Directive, Inc. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Prime Directive, Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/src/github.com/endophage/gotuf/MAINTAINERS b/vendor/src/github.com/endophage/gotuf/MAINTAINERS new file mode 100644 index 0000000000..d7dda3f4fc --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/MAINTAINERS @@ -0,0 +1 @@ +David Lawrence (github: endophage) diff --git a/vendor/src/github.com/endophage/gotuf/Makefile b/vendor/src/github.com/endophage/gotuf/Makefile new file mode 100644 index 0000000000..02c6d4d1b3 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/Makefile @@ -0,0 +1,34 @@ +# Set an output prefix, which is the local directory if not specified +PREFIX?=$(shell pwd) + +vet: + @echo "+ $@" + @go vet ./... + +fmt: + @echo "+ $@" + @test -z "$$(gofmt -s -l . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)" || \ + echo "+ please format Go code with 'gofmt -s'" + +lint: + @echo "+ $@" + @test -z "$$(golint ./... | grep -v Godeps/_workspace/src/ | tee /dev/stderr)" + +build: + @echo "+ $@" + @go build -v ${GO_LDFLAGS} ./... + +test: + @echo "+ $@" + @go test -test.short ./... + +test-full: + @echo "+ $@" + @go test ./... + +binaries: ${PREFIX}/bin/registry ${PREFIX}/bin/registry-api-descriptor-template ${PREFIX}/bin/dist + @echo "+ $@" + +clean: + @echo "+ $@" + @rm -rf "${PREFIX}/bin/registry" "${PREFIX}/bin/registry-api-descriptor-template" diff --git a/vendor/src/github.com/endophage/gotuf/README.md b/vendor/src/github.com/endophage/gotuf/README.md new file mode 100644 index 0000000000..ac8d6d1132 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/README.md @@ -0,0 +1,36 @@ +# GOTUF + +This is still a work in progress but will shortly be a fully compliant +Go implementation of [The Update Framework (TUF)](http://theupdateframework.com/). + +## Where's the CLI + +This repository provides a library only. The [Notary project](https://github.com/docker/notary) +from Docker should be considered the official CLI to be used with this implementation of TUF. + +## TODOs: + +- [X] Add Targets to existing repo +- [X] Sign metadata files +- [X] Refactor TufRepo to take care of signing ~~and verification~~ +- [ ] Ensure consistent capitalization in naming (TUF\_\_\_ vs Tuf\_\_\_) +- [X] Make caching of metadata files smarter - PR #5 +- [ ] ~~Add configuration for CLI commands. Order of configuration priority from most to least: flags, config file, defaults~~ Notary should be the official CLI +- [X] Reasses organization of data types. Possibly consolidate a few things into the data package but break up package into a few more distinct files +- [ ] Comprehensive test cases +- [ ] Delete files no longer in use +- [ ] Fix up errors. Some have to be instantiated, others don't, the inconsistency is annoying. +- [X] Bump version numbers in meta files (could probably be done better) + +## Credits + +This implementation was originally forked from [flynn/go-tuf](https://github.com/flynn/go-tuf), +however in attempting to add delegations I found I was making such +significant changes that I could not maintain backwards compatibility +without the code becoming overly convoluted. + +Some features such as pluggable verifiers have alreayd been merged upstream to flynn/go-tuf +and we are in discussion with [titanous](https://github.com/titanous) about working to merge the 2 implementations. + +This implementation retains the same 3 Clause BSD license present on +the original flynn implementation. diff --git a/vendor/src/github.com/endophage/gotuf/client/client.go b/vendor/src/github.com/endophage/gotuf/client/client.go new file mode 100644 index 0000000000..7d7c63a369 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/client/client.go @@ -0,0 +1,538 @@ +package client + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "path" + "path/filepath" + "strings" + + "github.com/Sirupsen/logrus" + tuf "github.com/endophage/gotuf" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/keys" + "github.com/endophage/gotuf/signed" + "github.com/endophage/gotuf/store" + "github.com/endophage/gotuf/utils" +) + +const maxSize int64 = 5 << 20 + +type Client struct { + local *tuf.TufRepo + remote store.RemoteStore + keysDB *keys.KeyDB + cache store.MetadataStore +} + +func NewClient(local *tuf.TufRepo, remote store.RemoteStore, keysDB *keys.KeyDB, cache store.MetadataStore) *Client { + return &Client{ + local: local, + remote: remote, + keysDB: keysDB, + cache: cache, + } +} + +func (c *Client) Update() error { + // 1. Get timestamp + // a. If timestamp error (verification, expired, etc...) download new root and return to 1. + // 2. Check if local snapshot is up to date + // a. If out of date, get updated snapshot + // i. If snapshot error, download new root and return to 1. + // 3. Check if root correct against snapshot + // a. If incorrect, download new root and return to 1. + // 4. Iteratively download and search targets and delegations to find target meta + logrus.Debug("updating TUF client") + err := c.update() + if err != nil { + switch err.(type) { + case signed.ErrRoleThreshold, signed.ErrExpired, tuf.ErrLocalRootExpired: + logrus.Debug("retryable error occurred. Root will be downloaded and another update attempted") + if err := c.downloadRoot(); err != nil { + logrus.Errorf("client Update (Root):", err) + return err + } + default: + logrus.Error("an unexpected error occurred while updating TUF client") + return err + } + // If we error again, we now have the latest root and just want to fail + // out as there's no expectation the problem can be resolved automatically + logrus.Debug("retrying TUF client update") + return c.update() + } + return nil +} + +func (c *Client) update() error { + err := c.downloadTimestamp() + if err != nil { + logrus.Errorf("Client Update (Timestamp): %s", err.Error()) + return err + } + err = c.downloadSnapshot() + if err != nil { + logrus.Errorf("Client Update (Snapshot): %s", err.Error()) + return err + } + err = c.checkRoot() + if err != nil { + // In this instance the root has not expired base on time, but is + // expired based on the snapshot dictating a new root has been produced. + logrus.Info(err.Error()) + return tuf.ErrLocalRootExpired{} + } + // will always need top level targets at a minimum + err = c.downloadTargets("targets") + if err != nil { + logrus.Errorf("Client Update (Targets): %s", err.Error()) + return err + } + return nil +} + +// checkRoot determines if the hash, and size are still those reported +// in the snapshot file. It will also check the expiry, however, if the +// hash and size in snapshot are unchanged but the root file has expired, +// there is little expectation that the situation can be remedied. +func (c Client) checkRoot() error { + role := data.RoleName("root") + size := c.local.Snapshot.Signed.Meta[role].Length + hashSha256 := c.local.Snapshot.Signed.Meta[role].Hashes["sha256"] + + raw, err := c.cache.GetMeta("root", size) + if err != nil { + return err + } + + hash := sha256.Sum256(raw) + if !bytes.Equal(hash[:], hashSha256) { + return fmt.Errorf("Cached root sha256 did not match snapshot root sha256") + } + return nil +} + +// downloadRoot is responsible for downloading the root.json +func (c *Client) downloadRoot() error { + role := data.RoleName("root") + size := maxSize + var expectedSha256 []byte = nil + if c.local.Snapshot != nil { + size = c.local.Snapshot.Signed.Meta[role].Length + expectedSha256 = c.local.Snapshot.Signed.Meta[role].Hashes["sha256"] + } + + // if we're bootstrapping we may not have a cached root, an + // error will result in the "previous root version" being + // interpreted as 0. + var download bool + var err error + var cachedRoot []byte = nil + old := &data.Signed{} + version := 0 + + if expectedSha256 != nil { + // can only trust cache if we have an expected sha256 to trust + cachedRoot, err = c.cache.GetMeta(role, size) + } + + if cachedRoot == nil || err != nil { + logrus.Debug("didn't find a cached root, must download") + download = true + } else { + hash := sha256.Sum256(cachedRoot) + if !bytes.Equal(hash[:], expectedSha256) { + logrus.Debug("cached root's hash didn't match expected, must download") + download = true + } + err := json.Unmarshal(cachedRoot, old) + if err == nil { + root, err := data.RootFromSigned(old) + if err == nil { + version = root.Signed.Version + } else { + logrus.Debug("couldn't parse Signed part of cached root, must download") + download = true + } + } else { + logrus.Debug("couldn't parse cached root, must download") + download = true + } + } + var s *data.Signed + var raw []byte + if download { + logrus.Debug("downloading new root") + raw, err = c.remote.GetMeta(role, size) + if err != nil { + return err + } + hash := sha256.Sum256(raw) + if expectedSha256 != nil && !bytes.Equal(hash[:], expectedSha256) { + // if we don't have an expected sha256, we're going to trust the root + // based purely on signature and expiry time validation + return fmt.Errorf("Remote root sha256 did not match snapshot root sha256: %#x vs. %#x", hash, []byte(expectedSha256)) + } + s = &data.Signed{} + err = json.Unmarshal(raw, s) + if err != nil { + return err + } + } else { + logrus.Debug("using cached root") + s = old + } + if err := c.verifyRoot(role, s, version); err != nil { + return err + } + if download { + logrus.Debug("caching downloaded root") + // Now that we have accepted new root, write it to cache + if err = c.cache.SetMeta(role, raw); err != nil { + logrus.Errorf("Failed to write root to local cache: %s", err.Error()) + } + } + return nil +} + +func (c Client) verifyRoot(role string, s *data.Signed, minVersion int) error { + // this will confirm that the root has been signed by the old root role + // as c.keysDB contains the root keys we bootstrapped with. + // Still need to determine if there has been a root key update and + // confirm signature with new root key + logrus.Debug("verifying root with existing keys") + err := signed.Verify(s, role, minVersion, c.keysDB) + if err != nil { + logrus.Debug("root did not verify with existing keys") + return err + } + + // This will cause keyDB to get updated, overwriting any keyIDs associated + // with the roles in root.json + logrus.Debug("updating known root roles and keys") + err = c.local.SetRoot(s) + if err != nil { + logrus.Error(err.Error()) + return err + } + // verify again now that the old keys have been replaced with the new keys. + // TODO(endophage): be more intelligent and only re-verify if we detect + // there has been a change in root keys + logrus.Debug("verifying root with updated keys") + err = signed.Verify(s, role, minVersion, c.keysDB) + if err != nil { + logrus.Debug("root did not verify with new keys") + return err + } + logrus.Debug("successfully verified root") + return nil +} + +// downloadTimestamp is responsible for downloading the timestamp.json +func (c *Client) downloadTimestamp() error { + logrus.Debug("downloadTimestamp") + role := data.RoleName("timestamp") + + // We may not have a cached timestamp if this is the first time + // we're interacting with the repo. This will result in the + // version being 0 + var download bool + old := &data.Signed{} + version := 0 + cachedTS, err := c.cache.GetMeta(role, maxSize) + if err == nil { + err := json.Unmarshal(cachedTS, old) + if err == nil { + ts, err := data.TimestampFromSigned(old) + if err == nil { + version = ts.Signed.Version + } + } else { + old = nil + } + } + // unlike root, targets and snapshot, always try and download timestamps + // from remote, only using the cache one if we couldn't reach remote. + logrus.Debug("Downloading timestamp") + raw, err := c.remote.GetMeta(role, maxSize) + var s *data.Signed + if err != nil || len(raw) == 0 { + if err, ok := err.(store.ErrMetaNotFound); ok { + return err + } + if old == nil { + if err == nil { + // couldn't retrieve data from server and don't have valid + // data in cache. + return store.ErrMetaNotFound{} + } + return err + } + s = old + } else { + download = true + s = &data.Signed{} + err = json.Unmarshal(raw, s) + if err != nil { + return err + } + } + err = signed.Verify(s, role, version, c.keysDB) + if err != nil { + return err + } + logrus.Debug("successfully verified timestamp") + if download { + c.cache.SetMeta(role, raw) + } + c.local.SetTimestamp(s) + return nil +} + +// downloadSnapshot is responsible for downloading the snapshot.json +func (c *Client) downloadSnapshot() error { + logrus.Debug("downloadSnapshot") + role := data.RoleName("snapshot") + size := c.local.Timestamp.Signed.Meta[role].Length + expectedSha256, ok := c.local.Timestamp.Signed.Meta[role].Hashes["sha256"] + if !ok { + return fmt.Errorf("Sha256 is currently the only hash supported by this client. No Sha256 found for snapshot") + } + + var download bool + old := &data.Signed{} + version := 0 + raw, err := c.cache.GetMeta(role, size) + if raw == nil || err != nil { + logrus.Debug("no snapshot in cache, must download") + download = true + } else { + // file may have been tampered with on disk. Always check the hash! + genHash := sha256.Sum256(raw) + if !bytes.Equal(genHash[:], expectedSha256) { + logrus.Debug("hash of snapshot in cache did not match expected hash, must download") + download = true + } + err := json.Unmarshal(raw, old) + if err == nil { + snap, err := data.TimestampFromSigned(old) + if err == nil { + version = snap.Signed.Version + } else { + logrus.Debug("Could not parse Signed part of snapshot, must download") + download = true + } + } else { + logrus.Debug("Could not parse snapshot, must download") + download = true + } + } + var s *data.Signed + if download { + logrus.Debug("downloading new snapshot") + raw, err = c.remote.GetMeta(role, size) + if err != nil { + return err + } + genHash := sha256.Sum256(raw) + if !bytes.Equal(genHash[:], expectedSha256) { + return fmt.Errorf("Retrieved snapshot did not verify against hash in timestamp.") + } + s = &data.Signed{} + err = json.Unmarshal(raw, s) + if err != nil { + return err + } + } else { + logrus.Debug("using cached snapshot") + s = old + } + + err = signed.Verify(s, role, version, c.keysDB) + if err != nil { + return err + } + logrus.Debug("successfully verified snapshot") + c.local.SetSnapshot(s) + if download { + err = c.cache.SetMeta(role, raw) + if err != nil { + logrus.Errorf("Failed to write snapshot to local cache: %s", err.Error()) + } + } + return nil +} + +// downloadTargets is responsible for downloading any targets file +// including delegates roles. It will download the whole tree of +// delegated roles below the given one +func (c *Client) downloadTargets(role string) error { + role = data.RoleName(role) // this will really only do something for base targets role + snap := c.local.Snapshot.Signed + root := c.local.Root.Signed + r := c.keysDB.GetRole(role) + if r == nil { + return fmt.Errorf("Invalid role: %s", role) + } + keyIDs := r.KeyIDs + s, err := c.GetTargetsFile(role, keyIDs, snap.Meta, root.ConsistentSnapshot, r.Threshold) + if err != nil { + logrus.Error("Error getting targets file:", err) + return err + } + err = c.local.SetTargets(role, s) + if err != nil { + return err + } + + return nil +} + +func (c Client) GetTargetsFile(role string, keyIDs []string, snapshotMeta data.Files, consistent bool, threshold int) (*data.Signed, error) { + // require role exists in snapshots + roleMeta, ok := snapshotMeta[role] + if !ok { + return nil, fmt.Errorf("Snapshot does not contain target role") + } + expectedSha256, ok := snapshotMeta[role].Hashes["sha256"] + if !ok { + return nil, fmt.Errorf("Sha256 is currently the only hash supported by this client. No Sha256 found for targets role %s", role) + } + + // try to get meta file from content addressed cache + var download bool + old := &data.Signed{} + version := 0 + raw, err := c.cache.GetMeta(role, roleMeta.Length) + if err != nil || raw == nil { + logrus.Debugf("Couldn't not find cached %s, must download", role) + download = true + } else { + // file may have been tampered with on disk. Always check the hash! + genHash := sha256.Sum256(raw) + if !bytes.Equal(genHash[:], expectedSha256) { + download = true + } + err := json.Unmarshal(raw, old) + if err == nil { + targ, err := data.TargetsFromSigned(old) + if err == nil { + version = targ.Signed.Version + } else { + download = true + } + } else { + download = true + } + + } + + var s *data.Signed + if download { + rolePath, err := c.RoleTargetsPath(role, hex.EncodeToString(expectedSha256), consistent) + if err != nil { + return nil, err + } + raw, err = c.remote.GetMeta(rolePath, snapshotMeta[role].Length) + if err != nil { + return nil, err + } + s = &data.Signed{} + err = json.Unmarshal(raw, s) + if err != nil { + logrus.Error("Error unmarshalling targets file:", err) + return nil, err + } + } else { + logrus.Debug("using cached ", role) + s = old + } + + err = signed.Verify(s, role, version, c.keysDB) + if err != nil { + return nil, err + } + logrus.Debugf("successfully verified %s", role) + if download { + // if we error when setting meta, we should continue. + err = c.cache.SetMeta(role, raw) + if err != nil { + logrus.Errorf("Failed to write snapshot to local cache: %s", err.Error()) + } + } + return s, nil +} + +// RoleTargetsPath generates the appropriate filename for the targets file, +// based on whether the repo is marked as consistent. +func (c Client) RoleTargetsPath(role string, hashSha256 string, consistent bool) (string, error) { + if consistent { + dir := filepath.Dir(role) + if strings.Contains(role, "/") { + lastSlashIdx := strings.LastIndex(role, "/") + role = role[lastSlashIdx+1:] + } + role = path.Join( + dir, + fmt.Sprintf("%s.%s.json", hashSha256, role), + ) + } + return role, nil +} + +// TargetMeta ensures the repo is up to date, downloading the minimum +// necessary metadata files +func (c Client) TargetMeta(path string) (*data.FileMeta, error) { + c.Update() + var meta *data.FileMeta + + pathDigest := sha256.Sum256([]byte(path)) + pathHex := hex.EncodeToString(pathDigest[:]) + + // FIFO list of targets delegations to inspect for target + roles := []string{data.ValidRoles["targets"]} + var role string + for len(roles) > 0 { + // have to do these lines here because of order of execution in for statement + role = roles[0] + roles = roles[1:] + + // Download the target role file if necessary + err := c.downloadTargets(role) + if err != nil { + // as long as we find a valid target somewhere we're happy. + // continue and search other delegated roles if any + continue + } + + meta = c.local.TargetMeta(role, path) + if meta != nil { + // we found the target! + return meta, nil + } + delegations := c.local.TargetDelegations(role, path, pathHex) + for _, d := range delegations { + roles = append(roles, d.Name) + } + } + return meta, nil +} + +func (c Client) DownloadTarget(dst io.Writer, path string, meta *data.FileMeta) error { + reader, err := c.remote.GetTarget(path) + if err != nil { + return err + } + defer reader.Close() + r := io.TeeReader( + io.LimitReader(reader, meta.Length), + dst, + ) + err = utils.ValidateTarget(r, meta) + return err +} diff --git a/vendor/src/github.com/endophage/gotuf/client/errors.go b/vendor/src/github.com/endophage/gotuf/client/errors.go new file mode 100644 index 0000000000..92df3e2deb --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/client/errors.go @@ -0,0 +1,106 @@ +package client + +import ( + "errors" + "fmt" +) + +var ( + ErrNoRootKeys = errors.New("tuf: no root keys found in local meta store") + ErrInsufficientKeys = errors.New("tuf: insufficient keys to meet threshold") +) + +type ErrMissingRemoteMetadata struct { + Name string +} + +func (e ErrMissingRemoteMetadata) Error() string { + return fmt.Sprintf("tuf: missing remote metadata %s", e.Name) +} + +type ErrDownloadFailed struct { + File string + Err error +} + +func (e ErrDownloadFailed) Error() string { + return fmt.Sprintf("tuf: failed to download %s: %s", e.File, e.Err) +} + +type ErrDecodeFailed struct { + File string + Err error +} + +func (e ErrDecodeFailed) Error() string { + return fmt.Sprintf("tuf: failed to decode %s: %s", e.File, e.Err) +} + +func isDecodeFailedWithErr(err, expected error) bool { + e, ok := err.(ErrDecodeFailed) + if !ok { + return false + } + return e.Err == expected +} + +type ErrNotFound struct { + File string +} + +func (e ErrNotFound) Error() string { + return fmt.Sprintf("tuf: file not found: %s", e.File) +} + +func IsNotFound(err error) bool { + _, ok := err.(ErrNotFound) + return ok +} + +type ErrWrongSize struct { + File string + Actual int64 + Expected int64 +} + +func (e ErrWrongSize) Error() string { + return fmt.Sprintf("tuf: unexpected file size: %s (expected %d bytes, got %d bytes)", e.File, e.Expected, e.Actual) +} + +type ErrLatestSnapshot struct { + Version int +} + +func (e ErrLatestSnapshot) Error() string { + return fmt.Sprintf("tuf: the local snapshot version (%d) is the latest", e.Version) +} + +func IsLatestSnapshot(err error) bool { + _, ok := err.(ErrLatestSnapshot) + return ok +} + +type ErrUnknownTarget struct { + Name string +} + +func (e ErrUnknownTarget) Error() string { + return fmt.Sprintf("tuf: unknown target file: %s", e.Name) +} + +type ErrMetaTooLarge struct { + Name string + Size int64 +} + +func (e ErrMetaTooLarge) Error() string { + return fmt.Sprintf("tuf: %s size %d bytes greater than maximum", e.Name, e.Size) +} + +type ErrInvalidURL struct { + URL string +} + +func (e ErrInvalidURL) Error() string { + return fmt.Sprintf("tuf: invalid repository URL %s", e.URL) +} diff --git a/vendor/src/github.com/endophage/gotuf/data/keys.go b/vendor/src/github.com/endophage/gotuf/data/keys.go new file mode 100644 index 0000000000..118617ce24 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/keys.go @@ -0,0 +1,96 @@ +package data + +import ( + "crypto/sha256" + "encoding/hex" + + "github.com/Sirupsen/logrus" + cjson "github.com/tent/canonical-json-go" +) + +type Key interface { + ID() string + Algorithm() KeyAlgorithm + Public() []byte +} + +type PublicKey interface { + Key +} + +type PrivateKey interface { + Key + + Private() []byte +} + +type KeyPair struct { + Public []byte `json:"public"` + Private []byte `json:"private"` +} + +// TUFKey is the structure used for both public and private keys in TUF. +// Normally it would make sense to use a different structures for public and +// private keys, but that would change the key ID algorithm (since the canonical +// JSON would be different). This structure should normally be accessed through +// the PublicKey or PrivateKey interfaces. +type TUFKey struct { + id string `json:"-"` + Type KeyAlgorithm `json:"keytype"` + Value KeyPair `json:"keyval"` +} + +func NewPrivateKey(algorithm KeyAlgorithm, public, private []byte) *TUFKey { + return &TUFKey{ + Type: algorithm, + Value: KeyPair{ + Public: public, + Private: private, + }, + } +} + +func (k TUFKey) Algorithm() KeyAlgorithm { + return k.Type +} + +func (k *TUFKey) ID() string { + if k.id == "" { + pubK := NewPublicKey(k.Algorithm(), k.Public()) + data, err := cjson.Marshal(&pubK) + if err != nil { + logrus.Error("Error generating key ID:", err) + } + digest := sha256.Sum256(data) + k.id = hex.EncodeToString(digest[:]) + } + return k.id +} + +func (k TUFKey) Public() []byte { + return k.Value.Public +} + +func (k *TUFKey) Private() []byte { + return k.Value.Private +} + +func NewPublicKey(algorithm KeyAlgorithm, public []byte) PublicKey { + return &TUFKey{ + Type: algorithm, + Value: KeyPair{ + Public: public, + Private: nil, + }, + } +} + +func PublicKeyFromPrivate(pk PrivateKey) PublicKey { + return &TUFKey{ + Type: pk.Algorithm(), + Value: KeyPair{ + Public: pk.Public(), + Private: nil, + }, + } +} diff --git a/vendor/src/github.com/endophage/gotuf/data/roles.go b/vendor/src/github.com/endophage/gotuf/data/roles.go new file mode 100644 index 0000000000..d77529bb04 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/roles.go @@ -0,0 +1,117 @@ +package data + +import ( + "fmt" + "strings" + + "github.com/endophage/gotuf/errors" +) + +var ValidRoles = map[string]string{ + "root": "root", + "targets": "targets", + "snapshot": "snapshot", + "timestamp": "timestamp", +} + +func SetValidRoles(rs map[string]string) { + for k, v := range rs { + ValidRoles[strings.ToLower(k)] = strings.ToLower(v) + } +} + +func RoleName(role string) string { + if r, ok := ValidRoles[role]; ok { + return r + } + return role +} + +// ValidRole only determines the name is semantically +// correct. For target delegated roles, it does NOT check +// the the appropriate parent roles exist. +func ValidRole(name string) bool { + name = strings.ToLower(name) + if v, ok := ValidRoles[name]; ok { + return name == v + } + targetsBase := fmt.Sprintf("%s/", ValidRoles["targets"]) + if strings.HasPrefix(name, targetsBase) { + return true + } + for _, v := range ValidRoles { + if name == v { + return true + } + } + return false +} + +type RootRole struct { + KeyIDs []string `json:"keyids"` + Threshold int `json:"threshold"` +} +type Role struct { + RootRole + Name string `json:"name"` + Paths []string `json:"paths,omitempty"` + PathHashPrefixes []string `json:"path_hash_prefixes,omitempty"` +} + +func NewRole(name string, threshold int, keyIDs, paths, pathHashPrefixes []string) (*Role, error) { + if len(paths) > 0 && len(pathHashPrefixes) > 0 { + return nil, errors.ErrInvalidRole{} + } + if threshold < 1 { + return nil, errors.ErrInvalidRole{} + } + if !ValidRole(name) { + return nil, errors.ErrInvalidRole{} + } + return &Role{ + RootRole: RootRole{ + KeyIDs: keyIDs, + Threshold: threshold, + }, + Name: name, + Paths: paths, + PathHashPrefixes: pathHashPrefixes, + }, nil + +} + +func (r Role) IsValid() bool { + return !(len(r.Paths) > 0 && len(r.PathHashPrefixes) > 0) +} + +func (r Role) ValidKey(id string) bool { + for _, key := range r.KeyIDs { + if key == id { + return true + } + } + return false +} + +func (r Role) CheckPaths(path string) bool { + for _, p := range r.Paths { + if strings.HasPrefix(path, p) { + return true + } + } + return false +} + +func (r Role) CheckPrefixes(hash string) bool { + for _, p := range r.PathHashPrefixes { + if strings.HasPrefix(hash, p) { + return true + } + } + return false +} + +func (r Role) IsDelegation() bool { + targetsBase := fmt.Sprintf("%s/", ValidRoles["targets"]) + return strings.HasPrefix(r.Name, targetsBase) +} diff --git a/vendor/src/github.com/endophage/gotuf/data/root.go b/vendor/src/github.com/endophage/gotuf/data/root.go new file mode 100644 index 0000000000..b290322d44 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/root.go @@ -0,0 +1,90 @@ +package data + +import ( + "encoding/json" + "time" + + cjson "github.com/tent/canonical-json-go" +) + +type SignedRoot struct { + Signatures []Signature + Signed Root + Dirty bool +} + +type Root struct { + Type string `json:"_type"` + Version int `json:"version"` + Expires time.Time `json:"expires"` + // These keys are public keys. We use TUFKey instead of PublicKey to + // support direct JSON unmarshaling. + Keys map[string]*TUFKey `json:"keys"` + Roles map[string]*RootRole `json:"roles"` + ConsistentSnapshot bool `json:"consistent_snapshot"` +} + +func NewRoot(keys map[string]PublicKey, roles map[string]*RootRole, consistent bool) (*SignedRoot, error) { + signedRoot := &SignedRoot{ + Signatures: make([]Signature, 0), + Signed: Root{ + Type: TUFTypes["root"], + Version: 0, + Expires: DefaultExpires("root"), + Keys: make(map[string]*TUFKey), + Roles: roles, + ConsistentSnapshot: consistent, + }, + Dirty: true, + } + + // Convert PublicKeys to TUFKey structures + // The Signed.Keys map needs to have *TUFKey values, since this + // structure gets directly unmarshalled from JSON, and it's not + // possible to unmarshal into an interface type. But this function + // takes a map with PublicKey values to avoid exposing this ugliness. + // The loop below converts to the TUFKey type. + for k, v := range keys { + signedRoot.Signed.Keys[k] = &TUFKey{ + Type: v.Algorithm(), + Value: KeyPair{ + Public: v.Public(), + Private: nil, + }, + } + } + + return signedRoot, nil +} + +func (r SignedRoot) ToSigned() (*Signed, error) { + s, err := cjson.Marshal(r.Signed) + if err != nil { + return nil, err + } + signed := json.RawMessage{} + err = signed.UnmarshalJSON(s) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(r.Signatures)) + copy(sigs, r.Signatures) + return &Signed{ + Signatures: sigs, + Signed: signed, + }, nil +} + +func RootFromSigned(s *Signed) (*SignedRoot, error) { + r := Root{} + err := json.Unmarshal(s.Signed, &r) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(s.Signatures)) + copy(sigs, s.Signatures) + return &SignedRoot{ + Signatures: sigs, + Signed: r, + }, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/data/snapshot.go b/vendor/src/github.com/endophage/gotuf/data/snapshot.go new file mode 100644 index 0000000000..1a88915eae --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/snapshot.go @@ -0,0 +1,98 @@ +package data + +import ( + "bytes" + "encoding/json" + "time" + + "github.com/Sirupsen/logrus" + cjson "github.com/tent/canonical-json-go" +) + +type SignedSnapshot struct { + Signatures []Signature + Signed Snapshot + Dirty bool +} + +type Snapshot struct { + Type string `json:"_type"` + Version int `json:"version"` + Expires time.Time `json:"expires"` + Meta Files `json:"meta"` +} + +func NewSnapshot(root *Signed, targets *Signed) (*SignedSnapshot, error) { + logrus.Debug("generating new snapshot...") + targetsJSON, err := json.Marshal(targets) + if err != nil { + logrus.Debug("Error Marshalling Targets") + return nil, err + } + rootJSON, err := json.Marshal(root) + if err != nil { + logrus.Debug("Error Marshalling Root") + return nil, err + } + rootMeta, err := NewFileMeta(bytes.NewReader(rootJSON), "sha256") + if err != nil { + return nil, err + } + targetsMeta, err := NewFileMeta(bytes.NewReader(targetsJSON), "sha256") + if err != nil { + return nil, err + } + return &SignedSnapshot{ + Signatures: make([]Signature, 0), + Signed: Snapshot{ + Type: TUFTypes["snapshot"], + Version: 0, + Expires: DefaultExpires("snapshot"), + Meta: Files{ + ValidRoles["root"]: rootMeta, + ValidRoles["targets"]: targetsMeta, + }, + }, + }, nil +} + +func (sp *SignedSnapshot) hashForRole(role string) []byte { + return sp.Signed.Meta[role].Hashes["sha256"] +} + +func (sp SignedSnapshot) ToSigned() (*Signed, error) { + s, err := cjson.Marshal(sp.Signed) + if err != nil { + return nil, err + } + signed := json.RawMessage{} + err = signed.UnmarshalJSON(s) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(sp.Signatures)) + copy(sigs, sp.Signatures) + return &Signed{ + Signatures: sigs, + Signed: signed, + }, nil +} + +func (sp *SignedSnapshot) AddMeta(role string, meta FileMeta) { + sp.Signed.Meta[role] = meta + sp.Dirty = true +} + +func SnapshotFromSigned(s *Signed) (*SignedSnapshot, error) { + sp := Snapshot{} + err := json.Unmarshal(s.Signed, &sp) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(s.Signatures)) + copy(sigs, s.Signatures) + return &SignedSnapshot{ + Signatures: sigs, + Signed: sp, + }, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/data/targets.go b/vendor/src/github.com/endophage/gotuf/data/targets.go new file mode 100644 index 0000000000..74c2c0b176 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/targets.go @@ -0,0 +1,118 @@ +package data + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "time" + + cjson "github.com/tent/canonical-json-go" +) + +type SignedTargets struct { + Signatures []Signature + Signed Targets + Dirty bool +} + +type Targets struct { + Type string `json:"_type"` + Version int `json:"version"` + Expires time.Time `json:"expires"` + Targets Files `json:"targets"` + Delegations Delegations `json:"delegations,omitempty"` +} + +func NewTargets() *SignedTargets { + return &SignedTargets{ + Signatures: make([]Signature, 0), + Signed: Targets{ + Type: TUFTypes["targets"], + Version: 0, + Expires: DefaultExpires("targets"), + Targets: make(Files), + Delegations: *NewDelegations(), + }, + Dirty: true, + } +} + +// GetMeta attempts to find the targets entry for the path. It +// will return nil in the case of the target not being found. +func (t SignedTargets) GetMeta(path string) *FileMeta { + for p, meta := range t.Signed.Targets { + if p == path { + return &meta + } + } + return nil +} + +// GetDelegations filters the roles and associated keys that may be +// the signers for the given target path. If no appropriate roles +// can be found, it will simply return nil for the return values. +// The returned slice of Role will have order maintained relative +// to the role slice on Delegations per TUF spec proposal on using +// order to determine priority. +func (t SignedTargets) GetDelegations(path string) []*Role { + roles := make([]*Role, 0) + pathHashBytes := sha256.Sum256([]byte(path)) + pathHash := hex.EncodeToString(pathHashBytes[:]) + for _, r := range t.Signed.Delegations.Roles { + if !r.IsValid() { + // Role has both Paths and PathHashPrefixes. + continue + } + if r.CheckPaths(path) { + roles = append(roles, r) + continue + } + if r.CheckPrefixes(pathHash) { + roles = append(roles, r) + continue + } + //keysDB.AddRole(r) + } + return roles +} + +func (t *SignedTargets) AddTarget(path string, meta FileMeta) { + t.Signed.Targets[path] = meta + t.Dirty = true +} + +func (t *SignedTargets) AddDelegation(role *Role, keys []*PublicKey) error { + return nil +} + +func (t SignedTargets) ToSigned() (*Signed, error) { + s, err := cjson.Marshal(t.Signed) + if err != nil { + return nil, err + } + signed := json.RawMessage{} + err = signed.UnmarshalJSON(s) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(t.Signatures)) + copy(sigs, t.Signatures) + return &Signed{ + Signatures: sigs, + Signed: signed, + }, nil +} + +func TargetsFromSigned(s *Signed) (*SignedTargets, error) { + t := Targets{} + err := json.Unmarshal(s.Signed, &t) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(s.Signatures)) + copy(sigs, s.Signatures) + return &SignedTargets{ + Signatures: sigs, + Signed: t, + }, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/data/timestamp.go b/vendor/src/github.com/endophage/gotuf/data/timestamp.go new file mode 100644 index 0000000000..3a26df1b3b --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/timestamp.go @@ -0,0 +1,76 @@ +package data + +import ( + "bytes" + "encoding/json" + "time" + + cjson "github.com/tent/canonical-json-go" +) + +type SignedTimestamp struct { + Signatures []Signature + Signed Timestamp + Dirty bool +} + +type Timestamp struct { + Type string `json:"_type"` + Version int `json:"version"` + Expires time.Time `json:"expires"` + Meta Files `json:"meta"` +} + +func NewTimestamp(snapshot *Signed) (*SignedTimestamp, error) { + snapshotJSON, err := json.Marshal(snapshot) + if err != nil { + return nil, err + } + snapshotMeta, err := NewFileMeta(bytes.NewReader(snapshotJSON), "sha256") + if err != nil { + return nil, err + } + return &SignedTimestamp{ + Signatures: make([]Signature, 0), + Signed: Timestamp{ + Type: TUFTypes["timestamp"], + Version: 0, + Expires: DefaultExpires("timestamp"), + Meta: Files{ + ValidRoles["snapshot"]: snapshotMeta, + }, + }, + }, nil +} + +func (ts SignedTimestamp) ToSigned() (*Signed, error) { + s, err := cjson.Marshal(ts.Signed) + if err != nil { + return nil, err + } + signed := json.RawMessage{} + err = signed.UnmarshalJSON(s) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(ts.Signatures)) + copy(sigs, ts.Signatures) + return &Signed{ + Signatures: sigs, + Signed: signed, + }, nil +} + +func TimestampFromSigned(s *Signed) (*SignedTimestamp, error) { + ts := Timestamp{} + err := json.Unmarshal(s.Signed, &ts) + if err != nil { + return nil, err + } + sigs := make([]Signature, len(s.Signatures)) + copy(sigs, s.Signatures) + return &SignedTimestamp{ + Signatures: sigs, + Signed: ts, + }, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/data/types.go b/vendor/src/github.com/endophage/gotuf/data/types.go new file mode 100644 index 0000000000..9d4667165c --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/data/types.go @@ -0,0 +1,177 @@ +package data + +import ( + "crypto/sha256" + "crypto/sha512" + "encoding/json" + "fmt" + "hash" + "io" + "io/ioutil" + "strings" + "time" + + "github.com/Sirupsen/logrus" +) + +type KeyAlgorithm string + +func (k KeyAlgorithm) String() string { + return string(k) +} + +type SigAlgorithm string + +func (k SigAlgorithm) String() string { + return string(k) +} + +const ( + defaultHashAlgorithm = "sha256" + + EDDSASignature SigAlgorithm = "eddsa" + RSAPSSSignature SigAlgorithm = "rsapss" + RSAPKCS1v15Signature SigAlgorithm = "rsapkcs1v15" + ECDSASignature SigAlgorithm = "ecdsa" + PyCryptoSignature SigAlgorithm = "pycrypto-pkcs#1 pss" + + ED25519Key KeyAlgorithm = "ed25519" + RSAKey KeyAlgorithm = "rsa" + RSAx509Key KeyAlgorithm = "rsa-x509" + ECDSAKey KeyAlgorithm = "ecdsa" + ECDSAx509Key KeyAlgorithm = "ecdsa-x509" +) + +var TUFTypes = map[string]string{ + "targets": "Targets", + "root": "Root", + "snapshot": "Snapshot", + "timestamp": "Timestamp", +} + +// SetTUFTypes allows one to override some or all of the default +// type names in TUF. +func SetTUFTypes(ts map[string]string) { + for k, v := range ts { + TUFTypes[k] = v + } +} + +// Checks if type is correct. +func ValidTUFType(t string) bool { + // most people will just use the defaults so have this optimal check + // first. Do comparison just in case there is some unknown vulnerability + // if a key and value in the map differ. + if v, ok := TUFTypes[t]; ok { + return t == v + } + // For people that feel the need to change the default type names. + for _, v := range TUFTypes { + if t == v { + return true + } + } + return false +} + +type Signed struct { + Signed json.RawMessage `json:"signed"` + Signatures []Signature `json:"signatures"` +} + +type Signature struct { + KeyID string `json:"keyid"` + Method SigAlgorithm `json:"method"` + Signature []byte `json:"sig"` +} + +type Files map[string]FileMeta + +type Hashes map[string][]byte + +type FileMeta struct { + Length int64 `json:"length"` + Hashes Hashes `json:"hashes"` + Custom json.RawMessage `json:"custom,omitempty"` +} + +func NewFileMeta(r io.Reader, hashAlgorithms ...string) (FileMeta, error) { + if len(hashAlgorithms) == 0 { + hashAlgorithms = []string{defaultHashAlgorithm} + } + hashes := make(map[string]hash.Hash, len(hashAlgorithms)) + for _, hashAlgorithm := range hashAlgorithms { + var h hash.Hash + switch hashAlgorithm { + case "sha256": + h = sha256.New() + case "sha512": + h = sha512.New() + default: + return FileMeta{}, fmt.Errorf("Unknown Hash Algorithm: %s", hashAlgorithm) + } + hashes[hashAlgorithm] = h + r = io.TeeReader(r, h) + } + n, err := io.Copy(ioutil.Discard, r) + if err != nil { + return FileMeta{}, err + } + m := FileMeta{Length: n, Hashes: make(Hashes, len(hashes))} + for hashAlgorithm, h := range hashes { + m.Hashes[hashAlgorithm] = h.Sum(nil) + } + return m, nil +} + +type Delegations struct { + Keys map[string]PublicKey `json:"keys"` + Roles []*Role `json:"roles"` +} + +func NewDelegations() *Delegations { + return &Delegations{ + Keys: make(map[string]PublicKey), + Roles: make([]*Role, 0), + } +} + +// defines number of days in which something should expire +var defaultExpiryTimes = map[string]int{ + "root": 365, + "targets": 90, + "snapshot": 7, + "timestamp": 1, +} + +// SetDefaultExpiryTimes allows one to change the default expiries. +func SetDefaultExpiryTimes(times map[string]int) { + for key, value := range times { + if _, ok := defaultExpiryTimes[key]; !ok { + logrus.Errorf("Attempted to set default expiry for an unknown role: %s", key) + continue + } + defaultExpiryTimes[key] = value + } +} + +func DefaultExpires(role string) time.Time { + var t time.Time + if t, ok := defaultExpiryTimes[role]; ok { + return time.Now().AddDate(0, 0, t) + } + return t.UTC().Round(time.Second) +} + +type unmarshalledSignature Signature + +func (s *Signature) UnmarshalJSON(data []byte) error { + uSignature := unmarshalledSignature{} + err := json.Unmarshal(data, &uSignature) + if err != nil { + return err + } + uSignature.Method = SigAlgorithm(strings.ToLower(string(uSignature.Method))) + *s = Signature(uSignature) + return nil +} diff --git a/vendor/src/github.com/endophage/gotuf/errors/errors.go b/vendor/src/github.com/endophage/gotuf/errors/errors.go new file mode 100644 index 0000000000..b5bb40a7a8 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/errors/errors.go @@ -0,0 +1,85 @@ +package errors + +import ( + "errors" + "fmt" + "time" +) + +var ErrInitNotAllowed = errors.New("tuf: repository already initialized") + +type ErrMissingMetadata struct { + Name string +} + +func (e ErrMissingMetadata) Error() string { + return fmt.Sprintf("tuf: missing metadata %s", e.Name) +} + +type ErrFileNotFound struct { + Path string +} + +func (e ErrFileNotFound) Error() string { + return fmt.Sprintf("tuf: file not found %s", e.Path) +} + +type ErrInsufficientKeys struct { + Name string +} + +func (e ErrInsufficientKeys) Error() string { + return fmt.Sprintf("tuf: insufficient keys to sign %s", e.Name) +} + +type ErrInsufficientSignatures struct { + Name string + Err error +} + +func (e ErrInsufficientSignatures) Error() string { + return fmt.Sprintf("tuf: insufficient signatures for %s: %s", e.Name, e.Err) +} + +type ErrInvalidRole struct { + Role string +} + +func (e ErrInvalidRole) Error() string { + return fmt.Sprintf("tuf: invalid role %s", e.Role) +} + +type ErrInvalidExpires struct { + Expires time.Time +} + +func (e ErrInvalidExpires) Error() string { + return fmt.Sprintf("tuf: invalid expires: %s", e.Expires) +} + +type ErrKeyNotFound struct { + Role string + KeyID string +} + +func (e ErrKeyNotFound) Error() string { + return fmt.Sprintf(`tuf: no key with id "%s" exists for the %s role`, e.KeyID, e.Role) +} + +type ErrNotEnoughKeys struct { + Role string + Keys int + Threshold int +} + +func (e ErrNotEnoughKeys) Error() string { + return fmt.Sprintf("tuf: %s role has insufficient keys for threshold (has %d keys, threshold is %d)", e.Role, e.Keys, e.Threshold) +} + +type ErrPassphraseRequired struct { + Role string +} + +func (e ErrPassphraseRequired) Error() string { + return fmt.Sprintf("tuf: a passphrase is required to access the encrypted %s keys file", e.Role) +} diff --git a/vendor/src/github.com/endophage/gotuf/keys/db.go b/vendor/src/github.com/endophage/gotuf/keys/db.go new file mode 100644 index 0000000000..9ac5c96e84 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/keys/db.go @@ -0,0 +1,60 @@ +package keys + +import ( + "errors" + + "github.com/endophage/gotuf/data" +) + +var ( + ErrWrongType = errors.New("tuf: invalid key type") + ErrExists = errors.New("tuf: key already in db") + ErrWrongID = errors.New("tuf: key id mismatch") + ErrInvalidKey = errors.New("tuf: invalid key") + ErrInvalidRole = errors.New("tuf: invalid role") + ErrInvalidKeyID = errors.New("tuf: invalid key id") + ErrInvalidThreshold = errors.New("tuf: invalid role threshold") +) + +type KeyDB struct { + roles map[string]*data.Role + keys map[string]data.PublicKey +} + +func NewDB() *KeyDB { + return &KeyDB{ + roles: make(map[string]*data.Role), + keys: make(map[string]data.PublicKey), + } +} + +func (db *KeyDB) AddKey(k data.PublicKey) { + db.keys[k.ID()] = k +} + +func (db *KeyDB) AddRole(r *data.Role) error { + if !data.ValidRole(r.Name) { + return ErrInvalidRole + } + if r.Threshold < 1 { + return ErrInvalidThreshold + } + + // validate all key ids are in the keys maps + for _, id := range r.KeyIDs { + if _, ok := db.keys[id]; !ok { + return ErrInvalidKeyID + } + } + + db.roles[r.Name] = r + return nil +} + +func (db *KeyDB) GetKey(id string) data.PublicKey { + return db.keys[id] +} + +func (db *KeyDB) GetRole(name string) *data.Role { + return db.roles[name] +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/ed25519.go b/vendor/src/github.com/endophage/gotuf/signed/ed25519.go new file mode 100644 index 0000000000..ca85748686 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/ed25519.go @@ -0,0 +1,75 @@ +package signed + +import ( + "crypto/rand" + "errors" + + "github.com/agl/ed25519" + "github.com/endophage/gotuf/data" +) + +// Ed25519 implements a simple in memory cryptosystem for ED25519 keys +type Ed25519 struct { + keys map[string]data.PrivateKey +} + +func NewEd25519() *Ed25519 { + return &Ed25519{ + make(map[string]data.PrivateKey), + } +} + +// addKey allows you to add a private key +func (e *Ed25519) addKey(k data.PrivateKey) { + e.keys[k.ID()] = k +} + +func (e *Ed25519) RemoveKey(keyID string) error { + delete(e.keys, keyID) + return nil +} + +func (e *Ed25519) Sign(keyIDs []string, toSign []byte) ([]data.Signature, error) { + signatures := make([]data.Signature, 0, len(keyIDs)) + for _, kID := range keyIDs { + priv := [ed25519.PrivateKeySize]byte{} + copy(priv[:], e.keys[kID].Private()) + sig := ed25519.Sign(&priv, toSign) + signatures = append(signatures, data.Signature{ + KeyID: kID, + Method: data.EDDSASignature, + Signature: sig[:], + }) + } + return signatures, nil + +} + +func (e *Ed25519) Create(role string, algorithm data.KeyAlgorithm) (data.PublicKey, error) { + if algorithm != data.ED25519Key { + return nil, errors.New("only ED25519 supported by this cryptoservice") + } + + pub, priv, err := ed25519.GenerateKey(rand.Reader) + if err != nil { + return nil, err + } + public := data.NewPublicKey(data.ED25519Key, pub[:]) + private := data.NewPrivateKey(data.ED25519Key, pub[:], priv[:]) + e.addKey(private) + return public, nil +} + +func (e *Ed25519) PublicKeys(keyIDs ...string) (map[string]data.PublicKey, error) { + k := make(map[string]data.PublicKey) + for _, kID := range keyIDs { + if key, ok := e.keys[kID]; ok { + k[kID] = data.PublicKeyFromPrivate(key) + } + } + return k, nil +} + +func (e *Ed25519) GetKey(keyID string) data.PublicKey { + return data.PublicKeyFromPrivate(e.keys[keyID]) +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/errors.go b/vendor/src/github.com/endophage/gotuf/signed/errors.go new file mode 100644 index 0000000000..7aec7c7232 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/errors.go @@ -0,0 +1,29 @@ +package signed + +import ( + "fmt" +) + +type ErrExpired struct { + Role string + Expired string +} + +func (e ErrExpired) Error() string { + return fmt.Sprintf("%s expired at %v", e.Role, e.Expired) +} + +type ErrLowVersion struct { + Actual int + Current int +} + +func (e ErrLowVersion) Error() string { + return fmt.Sprintf("version %d is lower than current version %d", e.Actual, e.Current) +} + +type ErrRoleThreshold struct{} + +func (e ErrRoleThreshold) Error() string { + return "valid signatures did not meet threshold" +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/interface.go b/vendor/src/github.com/endophage/gotuf/signed/interface.go new file mode 100644 index 0000000000..7a46bdee26 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/interface.go @@ -0,0 +1,44 @@ +package signed + +import ( + "github.com/endophage/gotuf/data" +) + +// SigningService defines the necessary functions to determine +// if a user is able to sign with a key, and to perform signing. +type SigningService interface { + // Sign takes a slice of keyIDs and a piece of data to sign + // and returns a slice of signatures and an error + Sign(keyIDs []string, data []byte) ([]data.Signature, error) +} + +// KeyService provides management of keys locally. It will never +// accept or provide private keys. Communication between the KeyService +// and a SigningService happen behind the Create function. +type KeyService interface { + // Create issues a new key pair and is responsible for loading + // the private key into the appropriate signing service. + // The role isn't currently used for anything, but it's here to support + // future features + Create(role string, algorithm data.KeyAlgorithm) (data.PublicKey, error) + + // GetKey retrieves the public key if present, otherwise it returns nil + GetKey(keyID string) data.PublicKey + + // RemoveKey deletes the specified key + RemoveKey(keyID string) error +} + +// CryptoService defines a unified Signing and Key Service as this +// will be most useful for most applications. +type CryptoService interface { + SigningService + KeyService +} + +// Verifier defines an interface for verfying signatures. An implementer +// of this interface should verify signatures for one and only one +// signing scheme. +type Verifier interface { + Verify(key data.PublicKey, sig []byte, msg []byte) error +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/sign.go b/vendor/src/github.com/endophage/gotuf/signed/sign.go new file mode 100644 index 0000000000..8bab441009 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/sign.go @@ -0,0 +1,43 @@ +package signed + +import ( + "fmt" + "github.com/Sirupsen/logrus" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/errors" + "strings" +) + +// Sign takes a data.Signed and a key, calculated and adds the signature +// to the data.Signed +func Sign(service CryptoService, s *data.Signed, keys ...data.PublicKey) error { + logrus.Debugf("sign called with %d keys", len(keys)) + signatures := make([]data.Signature, 0, len(s.Signatures)+1) + keyIDMemb := make(map[string]struct{}) + keyIDs := make([]string, 0, len(keys)) + + for _, key := range keys { + keyIDMemb[key.ID()] = struct{}{} + keyIDs = append(keyIDs, key.ID()) + } + logrus.Debugf("Generated list of signing IDs: %s", strings.Join(keyIDs, ", ")) + for _, sig := range s.Signatures { + if _, ok := keyIDMemb[sig.KeyID]; ok { + continue + } + signatures = append(signatures, sig) + } + newSigs, err := service.Sign(keyIDs, s.Signed) + if err != nil { + return err + } + if len(newSigs) < 1 { + return errors.ErrInsufficientSignatures{ + Name: fmt.Sprint("Cryptoservice failed to produce any signatures for keys with IDs: %s", strings.Join(keyIDs, ", ")), + Err: nil, + } + } + logrus.Debugf("appending %d new signatures", len(newSigs)) + s.Signatures = append(signatures, newSigs...) + return nil +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/verifiers.go b/vendor/src/github.com/endophage/gotuf/signed/verifiers.go new file mode 100644 index 0000000000..fd919035dc --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/verifiers.go @@ -0,0 +1,235 @@ +package signed + +import ( + "crypto" + "crypto/ecdsa" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/pem" + "math/big" + "reflect" + + "github.com/Sirupsen/logrus" + "github.com/agl/ed25519" + "github.com/endophage/gotuf/data" +) + +// Verifiers serves as a map of all verifiers available on the system and +// can be injected into a verificationService. For testing and configuration +// purposes, it will not be used by default. +var Verifiers = map[data.SigAlgorithm]Verifier{ + data.RSAPSSSignature: RSAPSSVerifier{}, + data.RSAPKCS1v15Signature: RSAPKCS1v15Verifier{}, + data.PyCryptoSignature: RSAPyCryptoVerifier{}, + data.ECDSASignature: ECDSAVerifier{}, + data.EDDSASignature: Ed25519Verifier{}, +} + +// RegisterVerifier provides a convenience function for init() functions +// to register additional verifiers or replace existing ones. +func RegisterVerifier(algorithm data.SigAlgorithm, v Verifier) { + curr, ok := Verifiers[algorithm] + if ok { + typOld := reflect.TypeOf(curr) + typNew := reflect.TypeOf(v) + logrus.Debugf( + "replacing already loaded verifier %s:%s with %s:%s", + typOld.PkgPath(), typOld.Name(), + typNew.PkgPath(), typNew.Name(), + ) + } else { + logrus.Debug("adding verifier for: ", algorithm) + } + Verifiers[algorithm] = v +} + +type Ed25519Verifier struct{} + +func (v Ed25519Verifier) Verify(key data.PublicKey, sig []byte, msg []byte) error { + var sigBytes [ed25519.SignatureSize]byte + if len(sig) != len(sigBytes) { + logrus.Infof("signature length is incorrect, must be %d, was %d.", ed25519.SignatureSize, len(sig)) + return ErrInvalid + } + copy(sigBytes[:], sig) + + var keyBytes [ed25519.PublicKeySize]byte + copy(keyBytes[:], key.Public()) + + if !ed25519.Verify(&keyBytes, msg, &sigBytes) { + logrus.Infof("failed ed25519 verification") + return ErrInvalid + } + return nil +} + +func verifyPSS(key interface{}, digest, sig []byte) error { + rsaPub, ok := key.(*rsa.PublicKey) + if !ok { + logrus.Infof("value was not an RSA public key") + return ErrInvalid + } + + opts := rsa.PSSOptions{SaltLength: sha256.Size, Hash: crypto.SHA256} + if err := rsa.VerifyPSS(rsaPub, crypto.SHA256, digest[:], sig, &opts); err != nil { + logrus.Infof("failed RSAPSS verification: %s", err) + return ErrInvalid + } + return nil +} + +func getRSAPubKey(key data.PublicKey) (crypto.PublicKey, error) { + algorithm := key.Algorithm() + var pubKey crypto.PublicKey + + switch algorithm { + case data.RSAx509Key: + pemCert, _ := pem.Decode([]byte(key.Public())) + if pemCert == nil { + logrus.Infof("failed to decode PEM-encoded x509 certificate") + return nil, ErrInvalid + } + cert, err := x509.ParseCertificate(pemCert.Bytes) + if err != nil { + logrus.Infof("failed to parse x509 certificate: %s\n", err) + return nil, ErrInvalid + } + pubKey = cert.PublicKey + case data.RSAKey: + var err error + pubKey, err = x509.ParsePKIXPublicKey(key.Public()) + if err != nil { + logrus.Infof("failed to parse public key: %s\n", err) + return nil, ErrInvalid + } + default: + logrus.Infof("invalid key type for RSAPSS verifier: %s", algorithm) + return nil, ErrInvalid + } + + return pubKey, nil +} + +// RSAPSSVerifier checks RSASSA-PSS signatures +type RSAPSSVerifier struct{} + +// Verify does the actual check. +func (v RSAPSSVerifier) Verify(key data.PublicKey, sig []byte, msg []byte) error { + pubKey, err := getRSAPubKey(key) + if err != nil { + return err + } + + digest := sha256.Sum256(msg) + + return verifyPSS(pubKey, digest[:], sig) +} + +// RSAPKCS1v15SVerifier checks RSA PKCS1v15 signatures +type RSAPKCS1v15Verifier struct{} + +func (v RSAPKCS1v15Verifier) Verify(key data.PublicKey, sig []byte, msg []byte) error { + pubKey, err := getRSAPubKey(key) + if err != nil { + return err + } + digest := sha256.Sum256(msg) + + rsaPub, ok := pubKey.(*rsa.PublicKey) + if !ok { + logrus.Infof("value was not an RSA public key") + return ErrInvalid + } + + if err = rsa.VerifyPKCS1v15(rsaPub, crypto.SHA256, digest[:], sig); err != nil { + logrus.Errorf("Failed verification: %s", err.Error()) + return ErrInvalid + } + return nil +} + +// RSAPSSVerifier checks RSASSA-PSS signatures +type RSAPyCryptoVerifier struct{} + +// Verify does the actual check. +// N.B. We have not been able to make this work in a way that is compatible +// with PyCrypto. +func (v RSAPyCryptoVerifier) Verify(key data.PublicKey, sig []byte, msg []byte) error { + digest := sha256.Sum256(msg) + + k, _ := pem.Decode([]byte(key.Public())) + if k == nil { + logrus.Infof("failed to decode PEM-encoded x509 certificate") + return ErrInvalid + } + + pub, err := x509.ParsePKIXPublicKey(k.Bytes) + if err != nil { + logrus.Infof("failed to parse public key: %s\n", err) + return ErrInvalid + } + + return verifyPSS(pub, digest[:], sig) +} + +// ECDSAVerifier checks ECDSA signatures, decoding the keyType appropriately +type ECDSAVerifier struct{} + +// Verify does the actual check. +func (v ECDSAVerifier) Verify(key data.PublicKey, sig []byte, msg []byte) error { + algorithm := key.Algorithm() + var pubKey crypto.PublicKey + + switch algorithm { + case data.ECDSAx509Key: + pemCert, _ := pem.Decode([]byte(key.Public())) + if pemCert == nil { + logrus.Infof("failed to decode PEM-encoded x509 certificate for keyID: %s", key.ID()) + logrus.Debugf("certificate bytes: %s", string(key.Public())) + return ErrInvalid + } + cert, err := x509.ParseCertificate(pemCert.Bytes) + if err != nil { + logrus.Infof("failed to parse x509 certificate: %s\n", err) + return ErrInvalid + } + pubKey = cert.PublicKey + case data.ECDSAKey: + var err error + pubKey, err = x509.ParsePKIXPublicKey(key.Public()) + if err != nil { + logrus.Infof("Failed to parse private key for keyID: %s, %s\n", key.ID(), err) + return ErrInvalid + } + default: + logrus.Infof("invalid key type for ECDSA verifier: %s", algorithm) + return ErrInvalid + } + + ecdsaPubKey, ok := pubKey.(*ecdsa.PublicKey) + if !ok { + logrus.Infof("value isn't an ECDSA public key") + return ErrInvalid + } + + sigLength := len(sig) + expectedOctetLength := 2 * ((ecdsaPubKey.Params().BitSize + 7) >> 3) + if sigLength != expectedOctetLength { + logrus.Infof("signature had an unexpected length") + return ErrInvalid + } + + rBytes, sBytes := sig[:sigLength/2], sig[sigLength/2:] + r := new(big.Int).SetBytes(rBytes) + s := new(big.Int).SetBytes(sBytes) + + digest := sha256.Sum256(msg) + + if !ecdsa.Verify(ecdsaPubKey, digest[:], r, s) { + logrus.Infof("failed ECDSA signature validation") + return ErrInvalid + } + + return nil +} diff --git a/vendor/src/github.com/endophage/gotuf/signed/verify.go b/vendor/src/github.com/endophage/gotuf/signed/verify.go new file mode 100644 index 0000000000..f6b6d91677 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/signed/verify.go @@ -0,0 +1,186 @@ +package signed + +import ( + "encoding/json" + "errors" + "strings" + "time" + + "github.com/Sirupsen/logrus" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/keys" + "github.com/tent/canonical-json-go" +) + +var ( + ErrMissingKey = errors.New("tuf: missing key") + ErrNoSignatures = errors.New("tuf: data has no signatures") + ErrInvalid = errors.New("tuf: signature verification failed") + ErrWrongMethod = errors.New("tuf: invalid signature type") + ErrUnknownRole = errors.New("tuf: unknown role") + ErrWrongType = errors.New("tuf: meta file has wrong type") +) + +type signedMeta struct { + Type string `json:"_type"` + Expires string `json:"expires"` + Version int `json:"version"` +} + +// VerifyRoot checks if a given root file is valid against a known set of keys. +// Threshold is always assumed to be 1 +func VerifyRoot(s *data.Signed, minVersion int, keys map[string]data.PublicKey) error { + if len(s.Signatures) == 0 { + return ErrNoSignatures + } + + var decoded map[string]interface{} + if err := json.Unmarshal(s.Signed, &decoded); err != nil { + return err + } + msg, err := cjson.Marshal(decoded) + if err != nil { + return err + } + + for _, sig := range s.Signatures { + // method lookup is consistent due to Unmarshal JSON doing lower case for us. + method := sig.Method + verifier, ok := Verifiers[method] + if !ok { + logrus.Debugf("continuing b/c signing method is not supported for verify root: %s\n", sig.Method) + continue + } + + key, ok := keys[sig.KeyID] + if !ok { + logrus.Debugf("continuing b/c signing key isn't present in keys: %s\n", sig.KeyID) + continue + } + + if err := verifier.Verify(key, sig.Signature, msg); err != nil { + logrus.Debugf("continuing b/c signature was invalid\n") + continue + } + // threshold of 1 so return on first success + return verifyMeta(s, "root", minVersion) + } + return ErrRoleThreshold{} +} + +func Verify(s *data.Signed, role string, minVersion int, db *keys.KeyDB) error { + if err := VerifySignatures(s, role, db); err != nil { + return err + } + return verifyMeta(s, role, minVersion) +} + +func verifyMeta(s *data.Signed, role string, minVersion int) error { + sm := &signedMeta{} + if err := json.Unmarshal(s.Signed, sm); err != nil { + return err + } + if !data.ValidTUFType(sm.Type) { + return ErrWrongType + } + if IsExpired(sm.Expires) { + logrus.Errorf("Metadata for %s expired", role) + return ErrExpired{Role: role, Expired: sm.Expires} + } + if sm.Version < minVersion { + return ErrLowVersion{sm.Version, minVersion} + } + + return nil +} + +var IsExpired = func(t string) bool { + ts, err := time.Parse(time.RFC3339, t) + if err != nil { + ts, err = time.Parse("2006-01-02 15:04:05 MST", t) + if err != nil { + return false + } + } + return ts.Sub(time.Now()) <= 0 +} + +func VerifySignatures(s *data.Signed, role string, db *keys.KeyDB) error { + if len(s.Signatures) == 0 { + return ErrNoSignatures + } + + roleData := db.GetRole(role) + if roleData == nil { + return ErrUnknownRole + } + + if roleData.Threshold < 1 { + return ErrRoleThreshold{} + } + logrus.Debugf("%s role has key IDs: %s", role, strings.Join(roleData.KeyIDs, ",")) + + var decoded map[string]interface{} + if err := json.Unmarshal(s.Signed, &decoded); err != nil { + return err + } + msg, err := cjson.Marshal(decoded) + if err != nil { + return err + } + + valid := make(map[string]struct{}) + for _, sig := range s.Signatures { + logrus.Debug("verifying signature for key ID: ", sig.KeyID) + if !roleData.ValidKey(sig.KeyID) { + logrus.Debugf("continuing b/c keyid was invalid: %s for roledata %s\n", sig.KeyID, roleData) + continue + } + key := db.GetKey(sig.KeyID) + if key == nil { + logrus.Debugf("continuing b/c keyid lookup was nil: %s\n", sig.KeyID) + continue + } + // method lookup is consistent due to Unmarshal JSON doing lower case for us. + method := sig.Method + verifier, ok := Verifiers[method] + if !ok { + logrus.Debugf("continuing b/c signing method is not supported: %s\n", sig.Method) + continue + } + + if err := verifier.Verify(key, sig.Signature, msg); err != nil { + logrus.Debugf("continuing b/c signature was invalid\n") + continue + } + valid[sig.KeyID] = struct{}{} + + } + if len(valid) < roleData.Threshold { + return ErrRoleThreshold{} + } + + return nil +} + +func Unmarshal(b []byte, v interface{}, role string, minVersion int, db *keys.KeyDB) error { + s := &data.Signed{} + if err := json.Unmarshal(b, s); err != nil { + return err + } + if err := Verify(s, role, minVersion, db); err != nil { + return err + } + return json.Unmarshal(s.Signed, v) +} + +func UnmarshalTrusted(b []byte, v interface{}, role string, db *keys.KeyDB) error { + s := &data.Signed{} + if err := json.Unmarshal(b, s); err != nil { + return err + } + if err := VerifySignatures(s, role, db); err != nil { + return err + } + return json.Unmarshal(s.Signed, v) +} diff --git a/vendor/src/github.com/endophage/gotuf/store/dbstore.go b/vendor/src/github.com/endophage/gotuf/store/dbstore.go new file mode 100644 index 0000000000..f67f3729e0 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/dbstore.go @@ -0,0 +1,252 @@ +package store + +import ( + "database/sql" + "encoding/hex" + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path" + + logrus "github.com/Sirupsen/logrus" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/utils" +) + +const ( + tufLoc string = "/tmp/tuf" + metadataSubDir string = "metadata" +) + +// implements LocalStore +type dbStore struct { + db sql.DB + imageName string +} + +// DBStore takes a database connection and the QDN of the image +func DBStore(db *sql.DB, imageName string) *dbStore { + store := dbStore{ + db: *db, + imageName: imageName, + } + + return &store +} + +// GetMeta loads existing TUF metadata files +func (dbs *dbStore) GetMeta(name string) ([]byte, error) { + data, err := dbs.readFile(name) + if err != nil { + return nil, err + } + return data, err +} + +// SetMeta writes individual TUF metadata files +func (dbs *dbStore) SetMeta(name string, meta []byte) error { + return dbs.writeFile(name, meta) +} + +// WalkStagedTargets walks all targets in scope +func (dbs *dbStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error { + if len(paths) == 0 { + files := dbs.loadTargets("") + for path, meta := range files { + if err := targetsFn(path, meta); err != nil { + return err + } + } + return nil + } + + for _, path := range paths { + files := dbs.loadTargets(path) + meta, ok := files[path] + if !ok { + return fmt.Errorf("File Not Found") + } + if err := targetsFn(path, meta); err != nil { + return err + } + } + return nil +} + +// Commit writes a set of consistent (possibly) TUF metadata files +func (dbs *dbStore) Commit(metafiles map[string][]byte, consistent bool, hashes map[string]data.Hashes) error { + // TODO (endophage): write meta files to cache + return nil + +} + +// GetKeys returns private keys +func (dbs *dbStore) GetKeys(role string) ([]data.PrivateKey, error) { + keys := []data.PrivateKey{} + var r *sql.Rows + var err error + sql := "SELECT `key` FROM `keys` WHERE `role` = ? AND `namespace` = ?;" + tx, err := dbs.db.Begin() + defer tx.Rollback() + r, err = tx.Query(sql, role, dbs.imageName) + if err != nil { + return nil, err + } + defer r.Close() + for r.Next() { + var jsonStr string + key := new(data.TUFKey) + r.Scan(&jsonStr) + err := json.Unmarshal([]byte(jsonStr), key) + if err != nil { + return nil, err + } + keys = append(keys, key) + } + return keys, nil +} + +// SaveKey saves a new private key +func (dbs *dbStore) SaveKey(role string, key data.PrivateKey) error { + jsonBytes, err := json.Marshal(key) + if err != nil { + return fmt.Errorf("Could not JSON Marshal Key") + } + tx, err := dbs.db.Begin() + if err != nil { + logrus.Error(err) + return err + } + _, err = tx.Exec("INSERT INTO `keys` (`namespace`, `role`, `key`) VALUES (?,?,?);", dbs.imageName, role, string(jsonBytes)) + tx.Commit() + return err +} + +// Clean removes staged targets +func (dbs *dbStore) Clean() error { + // TODO (endophage): purge stale items from db? May just/also need a remove method + return nil +} + +// AddBlob adds an object to the store +func (dbs *dbStore) AddBlob(path string, meta data.FileMeta) { + path = utils.NormalizeTarget(path) + jsonbytes := []byte{} + if meta.Custom != nil { + jsonbytes, _ = meta.Custom.MarshalJSON() + } + + tx, err := dbs.db.Begin() + if err != nil { + logrus.Error(err) + return + } + _, err = tx.Exec("INSERT OR REPLACE INTO `filemeta` VALUES (?,?,?,?);", dbs.imageName, path, meta.Length, jsonbytes) + if err != nil { + logrus.Error(err) + } + tx.Commit() + dbs.addBlobHashes(path, meta.Hashes) +} + +func (dbs *dbStore) addBlobHashes(path string, hashes data.Hashes) { + tx, err := dbs.db.Begin() + if err != nil { + logrus.Error(err) + } + for alg, hash := range hashes { + _, err := tx.Exec("INSERT OR REPLACE INTO `filehashes` VALUES (?,?,?,?);", dbs.imageName, path, alg, hex.EncodeToString(hash)) + if err != nil { + logrus.Error(err) + } + } + tx.Commit() +} + +// RemoveBlob removes an object from the store +func (dbs *dbStore) RemoveBlob(path string) error { + tx, err := dbs.db.Begin() + if err != nil { + logrus.Error(err) + return err + } + _, err = tx.Exec("DELETE FROM `filemeta` WHERE `path`=? AND `namespace`=?", path, dbs.imageName) + if err == nil { + tx.Commit() + } else { + tx.Rollback() + } + return err +} + +func (dbs *dbStore) loadTargets(path string) map[string]data.FileMeta { + var err error + var r *sql.Rows + tx, err := dbs.db.Begin() + defer tx.Rollback() + files := make(map[string]data.FileMeta) + sql := "SELECT `filemeta`.`path`, `size`, `alg`, `hash`, `custom` FROM `filemeta` JOIN `filehashes` ON `filemeta`.`path` = `filehashes`.`path` AND `filemeta`.`namespace` = `filehashes`.`namespace` WHERE `filemeta`.`namespace`=?" + if path != "" { + sql = fmt.Sprintf("%s %s", sql, "AND `filemeta`.`path`=?") + r, err = tx.Query(sql, dbs.imageName, path) + } else { + r, err = tx.Query(sql, dbs.imageName) + } + if err != nil { + return files + } + defer r.Close() + for r.Next() { + var absPath, alg, hash string + var size int64 + var custom []byte + r.Scan(&absPath, &size, &alg, &hash, &custom) + hashBytes, err := hex.DecodeString(hash) + if err != nil { + // We're going to skip items with unparseable hashes as they + // won't be valid in the targets + logrus.Debug("Hash was not stored in hex as expected") + continue + } + if file, ok := files[absPath]; ok { + file.Hashes[alg] = hashBytes + } else { + file = data.FileMeta{ + Length: size, + Hashes: data.Hashes{ + alg: hashBytes, + }, + } + if custom != nil { + file.Custom = json.RawMessage(custom) + } + files[absPath] = file + } + } + return files +} + +func (dbs *dbStore) writeFile(name string, content []byte) error { + jsonName := fmt.Sprintf("%s.json", name) + fullPath := path.Join(tufLoc, metadataSubDir, dbs.imageName, jsonName) + dirPath := path.Dir(fullPath) + err := os.MkdirAll(dirPath, 0744) + if err != nil { + logrus.Error("error creating directory path to TUF cache") + return err + } + + err = ioutil.WriteFile(fullPath, content, 0744) + if err != nil { + logrus.Error("Error writing file") + } + return err +} + +func (dbs *dbStore) readFile(name string) ([]byte, error) { + jsonName := fmt.Sprintf("%s.json", name) + fullPath := path.Join(tufLoc, metadataSubDir, dbs.imageName, jsonName) + content, err := ioutil.ReadFile(fullPath) + return content, err +} diff --git a/vendor/src/github.com/endophage/gotuf/store/errors.go b/vendor/src/github.com/endophage/gotuf/store/errors.go new file mode 100644 index 0000000000..0bc8272f96 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/errors.go @@ -0,0 +1,7 @@ +package store + +type ErrMetaNotFound struct{} + +func (err ErrMetaNotFound) Error() string { + return "no trust data available" +} diff --git a/vendor/src/github.com/endophage/gotuf/store/filestore.go b/vendor/src/github.com/endophage/gotuf/store/filestore.go new file mode 100644 index 0000000000..2ebd9b5bf6 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/filestore.go @@ -0,0 +1,67 @@ +package store + +import ( + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" +) + +func NewFilesystemStore(baseDir, metaSubDir, metaExtension, targetsSubDir string) (*filesystemStore, error) { + metaDir := path.Join(baseDir, metaSubDir) + targetsDir := path.Join(baseDir, targetsSubDir) + + // Make sure we can create the necessary dirs and they are writable + err := os.MkdirAll(metaDir, 0700) + if err != nil { + return nil, err + } + err = os.MkdirAll(targetsDir, 0700) + if err != nil { + return nil, err + } + + return &filesystemStore{ + baseDir: baseDir, + metaDir: metaDir, + metaExtension: metaExtension, + targetsDir: targetsDir, + }, nil +} + +type filesystemStore struct { + baseDir string + metaDir string + metaExtension string + targetsDir string +} + +func (f *filesystemStore) GetMeta(name string, size int64) ([]byte, error) { + fileName := fmt.Sprintf("%s.%s", name, f.metaExtension) + path := filepath.Join(f.metaDir, fileName) + meta, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return meta, nil +} + +func (f *filesystemStore) SetMultiMeta(metas map[string][]byte) error { + for role, blob := range metas { + err := f.SetMeta(role, blob) + if err != nil { + return err + } + } + return nil +} + +func (f *filesystemStore) SetMeta(name string, meta []byte) error { + fileName := fmt.Sprintf("%s.%s", name, f.metaExtension) + path := filepath.Join(f.metaDir, fileName) + if err := ioutil.WriteFile(path, meta, 0600); err != nil { + return err + } + return nil +} diff --git a/vendor/src/github.com/endophage/gotuf/store/httpstore.go b/vendor/src/github.com/endophage/gotuf/store/httpstore.go new file mode 100644 index 0000000000..1287a6d7ab --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/httpstore.go @@ -0,0 +1,213 @@ +package store + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "mime/multipart" + "net/http" + "net/url" + "path" + + "github.com/Sirupsen/logrus" +) + +type ErrServerUnavailable struct{} + +func (err ErrServerUnavailable) Error() string { + return "Unable to reach trust server at this time." +} + +type ErrShortRead struct{} + +func (err ErrShortRead) Error() string { + return "Trust server returned incompelete response." +} + +type ErrMaliciousServer struct{} + +func (err ErrMaliciousServer) Error() string { + return "Trust server returned a bad response." +} + +// HTTPStore manages pulling and pushing metadata from and to a remote +// service over HTTP. It assumes the URL structure of the remote service +// maps identically to the structure of the TUF repo: +// //(root|targets|snapshot|timestamp).json +// //foo.sh +// +// If consistent snapshots are disabled, it is advised that caching is not +// enabled. Simple set a cachePath (and ensure it's writeable) to enable +// caching. +type HTTPStore struct { + baseURL url.URL + metaPrefix string + metaExtension string + targetsPrefix string + keyExtension string + roundTrip http.RoundTripper +} + +func NewHTTPStore(baseURL, metaPrefix, metaExtension, targetsPrefix, keyExtension string, roundTrip http.RoundTripper) (*HTTPStore, error) { + base, err := url.Parse(baseURL) + if err != nil { + return nil, err + } + if !base.IsAbs() { + return nil, errors.New("HTTPStore requires an absolute baseURL") + } + return &HTTPStore{ + baseURL: *base, + metaPrefix: metaPrefix, + metaExtension: metaExtension, + targetsPrefix: targetsPrefix, + keyExtension: keyExtension, + roundTrip: roundTrip, + }, nil +} + +// GetMeta downloads the named meta file with the given size. A short body +// is acceptable because in the case of timestamp.json, the size is a cap, +// not an exact length. +func (s HTTPStore) GetMeta(name string, size int64) ([]byte, error) { + url, err := s.buildMetaURL(name) + if err != nil { + return nil, err + } + req, err := http.NewRequest("GET", url.String(), nil) + if err != nil { + return nil, err + } + resp, err := s.roundTrip.RoundTrip(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.ContentLength > size { + return nil, ErrMaliciousServer{} + } + logrus.Debugf("%d when retrieving metadata for %s", resp.StatusCode, name) + if resp.StatusCode == http.StatusNotFound { + return nil, ErrMetaNotFound{} + } + b := io.LimitReader(resp.Body, size) + body, err := ioutil.ReadAll(b) + if resp.ContentLength > 0 && int64(len(body)) < resp.ContentLength { + return nil, ErrShortRead{} + } + + if err != nil { + return nil, err + } + return body, nil +} + +func (s HTTPStore) SetMeta(name string, blob []byte) error { + url, err := s.buildMetaURL("") + if err != nil { + return err + } + req, err := http.NewRequest("POST", url.String(), bytes.NewReader(blob)) + if err != nil { + return err + } + _, err = s.roundTrip.RoundTrip(req) + return err +} + +func (s HTTPStore) SetMultiMeta(metas map[string][]byte) error { + url, err := s.buildMetaURL("") + if err != nil { + return err + } + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + for role, blob := range metas { + part, err := writer.CreateFormFile("files", role) + _, err = io.Copy(part, bytes.NewBuffer(blob)) + if err != nil { + return err + } + } + err = writer.Close() + if err != nil { + return err + } + req, err := http.NewRequest("POST", url.String(), body) + req.Header.Set("Content-Type", writer.FormDataContentType()) + if err != nil { + return err + } + _, err = s.roundTrip.RoundTrip(req) + return err +} + +func (s HTTPStore) buildMetaURL(name string) (*url.URL, error) { + var filename string + if name != "" { + filename = fmt.Sprintf("%s.%s", name, s.metaExtension) + } + uri := path.Join(s.metaPrefix, filename) + return s.buildURL(uri) +} + +func (s HTTPStore) buildTargetsURL(name string) (*url.URL, error) { + uri := path.Join(s.targetsPrefix, name) + return s.buildURL(uri) +} + +func (s HTTPStore) buildKeyURL(name string) (*url.URL, error) { + filename := fmt.Sprintf("%s.%s", name, s.keyExtension) + uri := path.Join(s.metaPrefix, filename) + return s.buildURL(uri) +} + +func (s HTTPStore) buildURL(uri string) (*url.URL, error) { + sub, err := url.Parse(uri) + if err != nil { + return nil, err + } + return s.baseURL.ResolveReference(sub), nil +} + +// GetTarget returns a reader for the desired target or an error. +// N.B. The caller is responsible for closing the reader. +func (s HTTPStore) GetTarget(path string) (io.ReadCloser, error) { + url, err := s.buildTargetsURL(path) + if err != nil { + return nil, err + } + logrus.Debug("Attempting to download target: ", url.String()) + req, err := http.NewRequest("GET", url.String(), nil) + if err != nil { + return nil, err + } + resp, err := s.roundTrip.RoundTrip(req) + if err != nil { + return nil, err + } + return resp.Body, nil +} + +func (s HTTPStore) GetKey(role string) ([]byte, error) { + url, err := s.buildKeyURL(role) + if err != nil { + return nil, err + } + req, err := http.NewRequest("GET", url.String(), nil) + if err != nil { + return nil, err + } + resp, err := s.roundTrip.RoundTrip(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, err + } + return body, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/store/interfaces.go b/vendor/src/github.com/endophage/gotuf/store/interfaces.go new file mode 100644 index 0000000000..093f3851b3 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/interfaces.go @@ -0,0 +1,35 @@ +package store + +import ( + "io" + + "github.com/endophage/gotuf/data" +) + +type targetsWalkFunc func(path string, meta data.FileMeta) error + +type MetadataStore interface { + GetMeta(name string, size int64) ([]byte, error) + SetMeta(name string, blob []byte) error + SetMultiMeta(map[string][]byte) error +} + +type PublicKeyStore interface { + GetKey(role string) ([]byte, error) +} + +// [endophage] I'm of the opinion this should go away. +type TargetStore interface { + WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error +} + +type LocalStore interface { + MetadataStore + TargetStore +} + +type RemoteStore interface { + MetadataStore + PublicKeyStore + GetTarget(path string) (io.ReadCloser, error) +} diff --git a/vendor/src/github.com/endophage/gotuf/store/memorystore.go b/vendor/src/github.com/endophage/gotuf/store/memorystore.go new file mode 100644 index 0000000000..d32c9a4f3d --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/store/memorystore.go @@ -0,0 +1,89 @@ +package store + +import ( + "bytes" + "fmt" + "io" + + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/errors" + "github.com/endophage/gotuf/utils" +) + +func NewMemoryStore(meta map[string][]byte, files map[string][]byte) *memoryStore { + if meta == nil { + meta = make(map[string][]byte) + } + if files == nil { + files = make(map[string][]byte) + } + return &memoryStore{ + meta: meta, + files: files, + keys: make(map[string][]data.PrivateKey), + } +} + +type memoryStore struct { + meta map[string][]byte + files map[string][]byte + keys map[string][]data.PrivateKey +} + +func (m *memoryStore) GetMeta(name string, size int64) ([]byte, error) { + return m.meta[name], nil +} + +func (m *memoryStore) SetMeta(name string, meta []byte) error { + m.meta[name] = meta + return nil +} + +func (m *memoryStore) SetMultiMeta(metas map[string][]byte) error { + for role, blob := range metas { + m.SetMeta(role, blob) + } + return nil +} + +func (m *memoryStore) GetTarget(path string) (io.ReadCloser, error) { + return &utils.NoopCloser{Reader: bytes.NewReader(m.files[path])}, nil +} + +func (m *memoryStore) WalkStagedTargets(paths []string, targetsFn targetsWalkFunc) error { + if len(paths) == 0 { + for path, dat := range m.files { + meta, err := data.NewFileMeta(bytes.NewReader(dat), "sha256") + if err != nil { + return err + } + if err = targetsFn(path, meta); err != nil { + return err + } + } + return nil + } + + for _, path := range paths { + dat, ok := m.files[path] + if !ok { + return errors.ErrFileNotFound{path} + } + meta, err := data.NewFileMeta(bytes.NewReader(dat), "sha256") + if err != nil { + return err + } + if err = targetsFn(path, meta); err != nil { + return err + } + } + return nil +} + +func (m *memoryStore) Commit(map[string][]byte, bool, map[string]data.Hashes) error { + return nil +} + +func (m *memoryStore) GetKey(role string) ([]byte, error) { + return nil, fmt.Errorf("GetKey is not implemented for the memoryStore") +} diff --git a/vendor/src/github.com/endophage/gotuf/tuf.go b/vendor/src/github.com/endophage/gotuf/tuf.go new file mode 100644 index 0000000000..23d04ca4a0 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/tuf.go @@ -0,0 +1,575 @@ +// tuf defines the core TUF logic around manipulating a repo. +package tuf + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/Sirupsen/logrus" + "github.com/endophage/gotuf/data" + "github.com/endophage/gotuf/errors" + "github.com/endophage/gotuf/keys" + "github.com/endophage/gotuf/signed" + "github.com/endophage/gotuf/utils" +) + +type ErrSigVerifyFail struct{} + +func (e ErrSigVerifyFail) Error() string { + return "Error: Signature verification failed" +} + +type ErrMetaExpired struct{} + +func (e ErrMetaExpired) Error() string { + return "Error: Metadata has expired" +} + +type ErrLocalRootExpired struct{} + +func (e ErrLocalRootExpired) Error() string { + return "Error: Local Root Has Expired" +} + +type ErrNotLoaded struct { + role string +} + +func (err ErrNotLoaded) Error() string { + return fmt.Sprintf("%s role has not been loaded", err.role) +} + +// TufRepo is an in memory representation of the TUF Repo. +// It operates at the data.Signed level, accepting and producing +// data.Signed objects. Users of a TufRepo are responsible for +// fetching raw JSON and using the Set* functions to populate +// the TufRepo instance. +type TufRepo struct { + Root *data.SignedRoot + Targets map[string]*data.SignedTargets + Snapshot *data.SignedSnapshot + Timestamp *data.SignedTimestamp + keysDB *keys.KeyDB + cryptoService signed.CryptoService +} + +// NewTufRepo initializes a TufRepo instance with a keysDB and a signer. +// If the TufRepo will only be used for reading, the signer should be nil. +func NewTufRepo(keysDB *keys.KeyDB, cryptoService signed.CryptoService) *TufRepo { + repo := &TufRepo{ + Targets: make(map[string]*data.SignedTargets), + keysDB: keysDB, + cryptoService: cryptoService, + } + return repo +} + +// AddBaseKeys is used to add keys to the role in root.json +func (tr *TufRepo) AddBaseKeys(role string, keys ...*data.TUFKey) error { + if tr.Root == nil { + return ErrNotLoaded{role: "root"} + } + for _, k := range keys { + // Store only the public portion + pubKey := *k + pubKey.Value.Private = nil + tr.Root.Signed.Keys[pubKey.ID()] = &pubKey + tr.keysDB.AddKey(&pubKey) + tr.Root.Signed.Roles[role].KeyIDs = append(tr.Root.Signed.Roles[role].KeyIDs, pubKey.ID()) + } + tr.Root.Dirty = true + return nil + +} + +// RemoveKeys is used to remove keys from the roles in root.json +func (tr *TufRepo) RemoveBaseKeys(role string, keyIDs ...string) error { + if tr.Root == nil { + return ErrNotLoaded{role: "root"} + } + keep := make([]string, 0) + toDelete := make(map[string]struct{}) + // remove keys from specified role + for _, k := range keyIDs { + toDelete[k] = struct{}{} + for _, rk := range tr.Root.Signed.Roles[role].KeyIDs { + if k != rk { + keep = append(keep, rk) + } + } + } + tr.Root.Signed.Roles[role].KeyIDs = keep + + // determine which keys are no longer in use by any roles + for roleName, r := range tr.Root.Signed.Roles { + if roleName == role { + continue + } + for _, rk := range r.KeyIDs { + if _, ok := toDelete[rk]; ok { + delete(toDelete, rk) + } + } + } + + // remove keys no longer in use by any roles + for k, _ := range toDelete { + delete(tr.Root.Signed.Keys, k) + } + tr.Root.Dirty = true + return nil +} + +// UpdateDelegations updates the appropriate delegations, either adding +// a new delegation or updating an existing one. If keys are +// provided, the IDs will be added to the role (if they do not exist +// there already), and the keys will be added to the targets file. +// The "before" argument specifies another role which this new role +// will be added in front of (i.e. higher priority) in the delegation list. +// An empty before string indicates to add the role to the end of the +// delegation list. +// A new, empty, targets file will be created for the new role. +func (tr *TufRepo) UpdateDelegations(role *data.Role, keys []data.Key, before string) error { + if !role.IsDelegation() || !role.IsValid() { + return errors.ErrInvalidRole{} + } + parent := filepath.Dir(role.Name) + p, ok := tr.Targets[parent] + if !ok { + return errors.ErrInvalidRole{} + } + for _, k := range keys { + key := data.NewPublicKey(k.Algorithm(), k.Public()) + if !utils.StrSliceContains(role.KeyIDs, key.ID()) { + role.KeyIDs = append(role.KeyIDs, key.ID()) + } + p.Signed.Delegations.Keys[key.ID()] = key + tr.keysDB.AddKey(key) + } + + i := -1 + var r *data.Role + for i, r = range p.Signed.Delegations.Roles { + if r.Name == role.Name { + break + } + } + if i >= 0 { + p.Signed.Delegations.Roles[i] = role + } else { + p.Signed.Delegations.Roles = append(p.Signed.Delegations.Roles, role) + } + p.Dirty = true + + roleTargets := data.NewTargets() // NewTargets always marked Dirty + tr.Targets[role.Name] = roleTargets + + tr.keysDB.AddRole(role) + + return nil +} + +// InitRepo creates the base files for a repo. It inspects data.ValidRoles and +// data.ValidTypes to determine what the role names and filename should be. It +// also relies on the keysDB having already been populated with the keys and +// roles. +func (tr *TufRepo) InitRepo(consistent bool) error { + if err := tr.InitRoot(consistent); err != nil { + return err + } + if err := tr.InitTargets(); err != nil { + return err + } + if err := tr.InitSnapshot(); err != nil { + return err + } + return tr.InitTimestamp() +} + +func (tr *TufRepo) InitRoot(consistent bool) error { + rootRoles := make(map[string]*data.RootRole) + rootKeys := make(map[string]data.PublicKey) + for _, r := range data.ValidRoles { + role := tr.keysDB.GetRole(r) + if role == nil { + return errors.ErrInvalidRole{} + } + rootRoles[r] = &role.RootRole + for _, kid := range role.KeyIDs { + // don't need to check if GetKey returns nil, Key presence was + // checked by KeyDB when role was added. + key := tr.keysDB.GetKey(kid) + // Create new key object to doubly ensure private key is excluded + k := data.NewPublicKey(key.Algorithm(), key.Public()) + rootKeys[kid] = k + } + } + root, err := data.NewRoot(rootKeys, rootRoles, consistent) + if err != nil { + return err + } + tr.Root = root + return nil +} + +func (tr *TufRepo) InitTargets() error { + targets := data.NewTargets() + tr.Targets[data.ValidRoles["targets"]] = targets + return nil +} + +func (tr *TufRepo) InitSnapshot() error { + root, err := tr.Root.ToSigned() + if err != nil { + return err + } + targets, err := tr.Targets[data.ValidRoles["targets"]].ToSigned() + if err != nil { + return err + } + snapshot, err := data.NewSnapshot(root, targets) + if err != nil { + return err + } + tr.Snapshot = snapshot + return nil +} + +func (tr *TufRepo) InitTimestamp() error { + snap, err := tr.Snapshot.ToSigned() + if err != nil { + return err + } + timestamp, err := data.NewTimestamp(snap) + if err != nil { + return err + } + + tr.Timestamp = timestamp + return nil +} + +// SetRoot parses the Signed object into a SignedRoot object, sets +// the keys and roles in the KeyDB, and sets the TufRepo.Root field +// to the SignedRoot object. +func (tr *TufRepo) SetRoot(s *data.Signed) error { + r, err := data.RootFromSigned(s) + if err != nil { + return err + } + for _, key := range r.Signed.Keys { + logrus.Debug("Adding key ", key.ID()) + tr.keysDB.AddKey(key) + } + for roleName, role := range r.Signed.Roles { + logrus.Debugf("Adding role %s with keys %s", roleName, strings.Join(role.KeyIDs, ",")) + baseRole, err := data.NewRole( + roleName, + role.Threshold, + role.KeyIDs, + nil, + nil, + ) + if err != nil { + return err + } + err = tr.keysDB.AddRole(baseRole) + if err != nil { + return err + } + } + tr.Root = r + return nil +} + +// SetTimestamp parses the Signed object into a SignedTimestamp object +// and sets the TufRepo.Timestamp field. +func (tr *TufRepo) SetTimestamp(s *data.Signed) error { + ts, err := data.TimestampFromSigned(s) + if err != nil { + return err + } + tr.Timestamp = ts + return nil +} + +// SetSnapshot parses the Signed object into a SignedSnapshots object +// and sets the TufRepo.Snapshot field. +func (tr *TufRepo) SetSnapshot(s *data.Signed) error { + snap, err := data.SnapshotFromSigned(s) + if err != nil { + return err + } + + tr.Snapshot = snap + return nil +} + +// SetTargets parses the Signed object into a SignedTargets object, +// reads the delegated roles and keys into the KeyDB, and sets the +// SignedTargets object agaist the role in the TufRepo.Targets map. +func (tr *TufRepo) SetTargets(role string, s *data.Signed) error { + t, err := data.TargetsFromSigned(s) + if err != nil { + return err + } + for _, k := range t.Signed.Delegations.Keys { + tr.keysDB.AddKey(k) + } + for _, r := range t.Signed.Delegations.Roles { + tr.keysDB.AddRole(r) + } + tr.Targets[role] = t + return nil +} + +// TargetMeta returns the FileMeta entry for the given path in the +// targets file associated with the given role. This may be nil if +// the target isn't found in the targets file. +func (tr TufRepo) TargetMeta(role, path string) *data.FileMeta { + if t, ok := tr.Targets[role]; ok { + if m, ok := t.Signed.Targets[path]; ok { + return &m + } + } + return nil +} + +// TargetDelegations returns a slice of Roles that are valid publishers +// for the target path provided. +func (tr TufRepo) TargetDelegations(role, path, pathHex string) []*data.Role { + if pathHex == "" { + pathDigest := sha256.Sum256([]byte(path)) + pathHex = hex.EncodeToString(pathDigest[:]) + } + roles := make([]*data.Role, 0) + if t, ok := tr.Targets[role]; ok { + for _, r := range t.Signed.Delegations.Roles { + if r.CheckPrefixes(pathHex) || r.CheckPaths(path) { + roles = append(roles, r) + } + } + } + return roles +} + +// FindTarget attempts to find the target represented by the given +// path by starting at the top targets file and traversing +// appropriate delegations until the first entry is found or it +// runs out of locations to search. +// N.B. Multiple entries may exist in different delegated roles +// for the same target. Only the first one encountered is returned. +func (tr TufRepo) FindTarget(path string) *data.FileMeta { + pathDigest := sha256.Sum256([]byte(path)) + pathHex := hex.EncodeToString(pathDigest[:]) + + var walkTargets func(role string) *data.FileMeta + walkTargets = func(role string) *data.FileMeta { + if m := tr.TargetMeta(role, path); m != nil { + return m + } + // Depth first search of delegations based on order + // as presented in current targets file for role: + for _, r := range tr.TargetDelegations(role, path, pathHex) { + if m := walkTargets(r.Name); m != nil { + return m + } + } + return nil + } + + return walkTargets("targets") +} + +// AddTargets will attempt to add the given targets specifically to +// the directed role. If the user does not have the signing keys for the role +// the function will return an error and the full slice of targets. +func (tr *TufRepo) AddTargets(role string, targets data.Files) (data.Files, error) { + t, ok := tr.Targets[role] + if !ok { + return targets, errors.ErrInvalidRole{role} + } + invalid := make(data.Files) + for path, target := range targets { + pathDigest := sha256.Sum256([]byte(path)) + pathHex := hex.EncodeToString(pathDigest[:]) + r := tr.keysDB.GetRole(role) + if role == data.ValidRoles["targets"] || (r.CheckPaths(path) || r.CheckPrefixes(pathHex)) { + t.Signed.Targets[path] = target + } else { + invalid[path] = target + } + } + t.Dirty = true + if len(invalid) > 0 { + return invalid, fmt.Errorf("Could not add all targets") + } + return nil, nil +} + +func (tr *TufRepo) RemoveTargets(role string, targets ...string) error { + t, ok := tr.Targets[role] + if !ok { + return errors.ErrInvalidRole{role} + } + + for _, path := range targets { + delete(t.Signed.Targets, path) + } + t.Dirty = true + return nil +} + +func (tr *TufRepo) UpdateSnapshot(role string, s *data.Signed) error { + jsonData, err := json.Marshal(s) + if err != nil { + return err + } + meta, err := data.NewFileMeta(bytes.NewReader(jsonData), "sha256") + if err != nil { + return err + } + tr.Snapshot.Signed.Meta[role] = meta + tr.Snapshot.Dirty = true + return nil +} + +func (tr *TufRepo) UpdateTimestamp(s *data.Signed) error { + jsonData, err := json.Marshal(s) + if err != nil { + return err + } + meta, err := data.NewFileMeta(bytes.NewReader(jsonData), "sha256") + if err != nil { + return err + } + tr.Timestamp.Signed.Meta["snapshot"] = meta + tr.Timestamp.Dirty = true + return nil +} + +func (tr *TufRepo) SignRoot(expires time.Time, cryptoService signed.CryptoService) (*data.Signed, error) { + logrus.Debug("signing root...") + tr.Root.Signed.Expires = expires + tr.Root.Signed.Version++ + root := tr.keysDB.GetRole(data.ValidRoles["root"]) + signed, err := tr.Root.ToSigned() + if err != nil { + return nil, err + } + signed, err = tr.sign(signed, *root, cryptoService) + if err != nil { + return nil, err + } + tr.Root.Signatures = signed.Signatures + return signed, nil +} + +func (tr *TufRepo) SignTargets(role string, expires time.Time, cryptoService signed.CryptoService) (*data.Signed, error) { + logrus.Debugf("sign targets called for role %s", role) + tr.Targets[role].Signed.Expires = expires + tr.Targets[role].Signed.Version++ + signed, err := tr.Targets[role].ToSigned() + if err != nil { + logrus.Debug("errored getting targets data.Signed object") + return nil, err + } + targets := tr.keysDB.GetRole(role) + signed, err = tr.sign(signed, *targets, cryptoService) + if err != nil { + logrus.Debug("errored signing ", role) + return nil, err + } + tr.Targets[role].Signatures = signed.Signatures + return signed, nil +} + +func (tr *TufRepo) SignSnapshot(expires time.Time, cryptoService signed.CryptoService) (*data.Signed, error) { + logrus.Debug("signing snapshot...") + signedRoot, err := tr.Root.ToSigned() + if err != nil { + return nil, err + } + err = tr.UpdateSnapshot("root", signedRoot) + if err != nil { + return nil, err + } + tr.Root.Dirty = false // root dirty until changes captures in snapshot + for role, targets := range tr.Targets { + signedTargets, err := targets.ToSigned() + if err != nil { + return nil, err + } + err = tr.UpdateSnapshot(role, signedTargets) + if err != nil { + return nil, err + } + } + tr.Snapshot.Signed.Expires = expires + tr.Snapshot.Signed.Version++ + signed, err := tr.Snapshot.ToSigned() + if err != nil { + return nil, err + } + snapshot := tr.keysDB.GetRole(data.ValidRoles["snapshot"]) + signed, err = tr.sign(signed, *snapshot, cryptoService) + if err != nil { + return nil, err + } + tr.Snapshot.Signatures = signed.Signatures + return signed, nil +} + +func (tr *TufRepo) SignTimestamp(expires time.Time, cryptoService signed.CryptoService) (*data.Signed, error) { + logrus.Debug("SignTimestamp") + signedSnapshot, err := tr.Snapshot.ToSigned() + if err != nil { + return nil, err + } + err = tr.UpdateTimestamp(signedSnapshot) + if err != nil { + return nil, err + } + tr.Timestamp.Signed.Expires = expires + tr.Timestamp.Signed.Version++ + signed, err := tr.Timestamp.ToSigned() + if err != nil { + return nil, err + } + timestamp := tr.keysDB.GetRole(data.ValidRoles["timestamp"]) + signed, err = tr.sign(signed, *timestamp, cryptoService) + if err != nil { + return nil, err + } + tr.Timestamp.Signatures = signed.Signatures + tr.Snapshot.Dirty = false // snapshot is dirty until changes have been captured in timestamp + return signed, nil +} + +func (tr TufRepo) sign(signedData *data.Signed, role data.Role, cryptoService signed.CryptoService) (*data.Signed, error) { + ks := make([]data.PublicKey, 0, len(role.KeyIDs)) + for _, kid := range role.KeyIDs { + k := tr.keysDB.GetKey(kid) + if k == nil { + continue + } + ks = append(ks, k) + } + if len(ks) < 1 { + return nil, keys.ErrInvalidKey + } + if cryptoService == nil { + cryptoService = tr.cryptoService + } + err := signed.Sign(cryptoService, signedData, ks...) + if err != nil { + return nil, err + } + return signedData, nil +} diff --git a/vendor/src/github.com/endophage/gotuf/utils/util.go b/vendor/src/github.com/endophage/gotuf/utils/util.go new file mode 100644 index 0000000000..5184f799a4 --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/utils/util.go @@ -0,0 +1,82 @@ +package utils + +import ( + "crypto/hmac" + "encoding/hex" + "errors" + "fmt" + gopath "path" + "path/filepath" + + "github.com/endophage/gotuf/data" +) + +var ErrWrongLength = errors.New("wrong length") + +type ErrWrongHash struct { + Type string + Expected []byte + Actual []byte +} + +func (e ErrWrongHash) Error() string { + return fmt.Sprintf("wrong %s hash, expected %#x got %#x", e.Type, e.Expected, e.Actual) +} + +type ErrNoCommonHash struct { + Expected data.Hashes + Actual data.Hashes +} + +func (e ErrNoCommonHash) Error() string { + types := func(a data.Hashes) []string { + t := make([]string, 0, len(a)) + for typ := range a { + t = append(t, typ) + } + return t + } + return fmt.Sprintf("no common hash function, expected one of %s, got %s", types(e.Expected), types(e.Actual)) +} + +type ErrUnknownHashAlgorithm struct { + Name string +} + +func (e ErrUnknownHashAlgorithm) Error() string { + return fmt.Sprintf("unknown hash algorithm: %s", e.Name) +} + +type PassphraseFunc func(role string, confirm bool) ([]byte, error) + +func FileMetaEqual(actual data.FileMeta, expected data.FileMeta) error { + if actual.Length != expected.Length { + return ErrWrongLength + } + hashChecked := false + for typ, hash := range expected.Hashes { + if h, ok := actual.Hashes[typ]; ok { + hashChecked = true + if !hmac.Equal(h, hash) { + return ErrWrongHash{typ, hash, h} + } + } + } + if !hashChecked { + return ErrNoCommonHash{expected.Hashes, actual.Hashes} + } + return nil +} + +func NormalizeTarget(path string) string { + return gopath.Join("/", path) +} + +func HashedPaths(path string, hashes data.Hashes) []string { + paths := make([]string, 0, len(hashes)) + for _, hash := range hashes { + hashedPath := filepath.Join(filepath.Dir(path), hex.EncodeToString(hash)+"."+filepath.Base(path)) + paths = append(paths, hashedPath) + } + return paths +} diff --git a/vendor/src/github.com/endophage/gotuf/utils/utils.go b/vendor/src/github.com/endophage/gotuf/utils/utils.go new file mode 100644 index 0000000000..38d46b031a --- /dev/null +++ b/vendor/src/github.com/endophage/gotuf/utils/utils.go @@ -0,0 +1,80 @@ +package utils + +import ( + "bytes" + "crypto/sha256" + "crypto/tls" + "fmt" + "io" + "net/http" + "net/url" + "os" + "strings" + + "github.com/endophage/gotuf/data" +) + +func Download(url url.URL) (*http.Response, error) { + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + client := &http.Client{Transport: tr} + return client.Get(url.String()) +} + +func Upload(url string, body io.Reader) (*http.Response, error) { + tr := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + client := &http.Client{Transport: tr} + return client.Post(url, "application/json", body) +} + +func ValidateTarget(r io.Reader, m *data.FileMeta) error { + h := sha256.New() + length, err := io.Copy(h, r) + if err != nil { + return err + } + if length != m.Length { + return fmt.Errorf("Size of downloaded target did not match targets entry.\nExpected: %s\nReceived: %s\n", m.Length, length) + } + hashDigest := h.Sum(nil) + if bytes.Compare(m.Hashes["sha256"], hashDigest[:]) != 0 { + return fmt.Errorf("Hash of downloaded target did not match targets entry.\nExpected: %x\nReceived: %x\n", m.Hashes["sha256"], hashDigest) + } + return nil +} + +func StrSliceContains(ss []string, s string) bool { + for _, v := range ss { + if v == s { + return true + } + } + return false +} + +func StrSliceContainsI(ss []string, s string) bool { + s = strings.ToLower(s) + for _, v := range ss { + v = strings.ToLower(v) + if v == s { + return true + } + } + return false +} + +func FileExists(path string) bool { + _, err := os.Stat(path) + return os.IsNotExist(err) +} + +type NoopCloser struct { + io.Reader +} + +func (nc *NoopCloser) Close() error { + return nil +} diff --git a/vendor/src/github.com/tent/canonical-json-go/.travis.yml b/vendor/src/github.com/tent/canonical-json-go/.travis.yml new file mode 100644 index 0000000000..8d61700ec2 --- /dev/null +++ b/vendor/src/github.com/tent/canonical-json-go/.travis.yml @@ -0,0 +1,4 @@ +language: go +go: + - 1.1 + - tip diff --git a/vendor/src/github.com/tent/canonical-json-go/LICENSE b/vendor/src/github.com/tent/canonical-json-go/LICENSE new file mode 100644 index 0000000000..7448756763 --- /dev/null +++ b/vendor/src/github.com/tent/canonical-json-go/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/src/github.com/tent/canonical-json-go/encode.go b/vendor/src/github.com/tent/canonical-json-go/encode.go new file mode 100644 index 0000000000..6334c3ba6d --- /dev/null +++ b/vendor/src/github.com/tent/canonical-json-go/encode.go @@ -0,0 +1,620 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cjson + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "math" + "reflect" + "runtime" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +func Marshal(v interface{}) ([]byte, error) { + e := &encodeState{} + err := e.marshal(v) + if err != nil { + return nil, err + } + return e.Bytes(), nil +} + +// Marshaler is the interface implemented by objects that +// can marshal themselves into valid JSON. +type Marshaler interface { + MarshalJSON() ([]byte, error) +} + +// An UnsupportedTypeError is returned by Marshal when attempting +// to encode an unsupported value type. +type UnsupportedTypeError struct { + Type reflect.Type +} + +func (e *UnsupportedTypeError) Error() string { + return "json: unsupported type: " + e.Type.String() +} + +type UnsupportedValueError struct { + Value reflect.Value + Str string +} + +func (e *UnsupportedValueError) Error() string { + return "json: unsupported value: " + e.Str +} + +type InvalidUTF8Error struct { + S string +} + +func (e *InvalidUTF8Error) Error() string { + return "json: invalid UTF-8 in string: " + strconv.Quote(e.S) +} + +type MarshalerError struct { + Type reflect.Type + Err error +} + +func (e *MarshalerError) Error() string { + return "json: error calling MarshalJSON for type " + e.Type.String() + ": " + e.Err.Error() +} + +var hex = "0123456789abcdef" + +var numberType = reflect.TypeOf(Number("")) + +// A Number represents a JSON number literal. +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +// An encodeState encodes JSON into a bytes.Buffer. +type encodeState struct { + bytes.Buffer // accumulated output + scratch [64]byte +} + +func (e *encodeState) marshal(v interface{}) (err error) { + defer func() { + if r := recover(); r != nil { + if _, ok := r.(runtime.Error); ok { + panic(r) + } + err = r.(error) + } + }() + e.reflectValue(reflect.ValueOf(v)) + return nil +} + +func (e *encodeState) error(err error) { + panic(err) +} + +var byteSliceType = reflect.TypeOf([]byte(nil)) + +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} + +func (e *encodeState) reflectValue(v reflect.Value) { + e.reflectValueQuoted(v, false) +} + +// reflectValueQuoted writes the value in v to the output. +// If quoted is true, the serialization is wrapped in a JSON string. +func (e *encodeState) reflectValueQuoted(v reflect.Value, quoted bool) { + if !v.IsValid() { + e.WriteString("null") + return + } + + m, ok := v.Interface().(Marshaler) + if !ok { + // T doesn't match the interface. Check against *T too. + if v.Kind() != reflect.Ptr && v.CanAddr() { + m, ok = v.Addr().Interface().(Marshaler) + if ok { + v = v.Addr() + } + } + } + if ok && (v.Kind() != reflect.Ptr || !v.IsNil()) { + b, err := m.MarshalJSON() + if err != nil { + e.error(&MarshalerError{v.Type(), err}) + } + + // canonicalize the json if it's an object + b = bytes.TrimSpace(b) + if len(b) > 0 && b[0] == '{' { + var temp interface{} + err = json.Unmarshal(b, &temp) + if err != nil { + e.error(&MarshalerError{v.Type(), err}) + } + b, err = Marshal(temp) + if err != nil { + e.error(&MarshalerError{v.Type(), err}) + } + } + e.Buffer.Write(b) + return + } + + writeString := (*encodeState).WriteString + if quoted { + writeString = (*encodeState).string + } + + switch v.Kind() { + case reflect.Bool: + x := v.Bool() + if x { + writeString(e, "true") + } else { + writeString(e, "false") + } + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + b := strconv.AppendInt(e.scratch[:0], v.Int(), 10) + if quoted { + writeString(e, string(b)) + } else { + e.Write(b) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + b := strconv.AppendUint(e.scratch[:0], v.Uint(), 10) + if quoted { + writeString(e, string(b)) + } else { + e.Write(b) + } + case reflect.Float32, reflect.Float64: + f := v.Float() + if math.IsInf(f, 0) || math.IsNaN(f) || math.Floor(f) != f { + e.error(&UnsupportedValueError{v, "floating point number"}) + } + b := strconv.AppendInt(e.scratch[:0], int64(f), 10) + if quoted { + writeString(e, string(b)) + } else { + e.Write(b) + } + case reflect.String: + if v.Type() == numberType { + numStr := v.String() + if numStr == "" { + numStr = "0" // Number's zero-val + } + e.WriteString(numStr) + break + } + if quoted { + sb, err := Marshal(v.String()) + if err != nil { + e.error(err) + } + e.string(string(sb)) + } else { + e.string(v.String()) + } + + case reflect.Struct: + e.WriteByte('{') + first := true + for _, f := range cachedTypeFields(v.Type()) { + fv := fieldByIndex(v, f.index) + if !fv.IsValid() || f.omitEmpty && isEmptyValue(fv) { + continue + } + if first { + first = false + } else { + e.WriteByte(',') + } + e.string(f.name) + e.WriteByte(':') + e.reflectValueQuoted(fv, f.quoted) + } + e.WriteByte('}') + + case reflect.Map: + if v.Type().Key().Kind() != reflect.String { + e.error(&UnsupportedTypeError{v.Type()}) + } + if v.IsNil() { + e.WriteString("null") + break + } + e.WriteByte('{') + var sv stringValues = v.MapKeys() + sort.Sort(sv) + for i, k := range sv { + if i > 0 { + e.WriteByte(',') + } + e.string(k.String()) + e.WriteByte(':') + e.reflectValue(v.MapIndex(k)) + } + e.WriteByte('}') + + case reflect.Slice: + if v.IsNil() { + e.WriteString("null") + break + } + if v.Type().Elem().Kind() == reflect.Uint8 { + // Byte slices get special treatment; arrays don't. + s := v.Bytes() + e.WriteByte('"') + if len(s) < 1024 { + // for small buffers, using Encode directly is much faster. + dst := make([]byte, base64.StdEncoding.EncodedLen(len(s))) + base64.StdEncoding.Encode(dst, s) + e.Write(dst) + } else { + // for large buffers, avoid unnecessary extra temporary + // buffer space. + enc := base64.NewEncoder(base64.StdEncoding, e) + enc.Write(s) + enc.Close() + } + e.WriteByte('"') + break + } + // Slices can be marshalled as nil, but otherwise are handled + // as arrays. + fallthrough + case reflect.Array: + e.WriteByte('[') + n := v.Len() + for i := 0; i < n; i++ { + if i > 0 { + e.WriteByte(',') + } + e.reflectValue(v.Index(i)) + } + e.WriteByte(']') + + case reflect.Interface, reflect.Ptr: + if v.IsNil() { + e.WriteString("null") + return + } + e.reflectValue(v.Elem()) + + default: + e.error(&UnsupportedTypeError{v.Type()}) + } + return +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + default: + if !unicode.IsLetter(c) && !unicode.IsDigit(c) { + return false + } + } + } + return true +} + +func fieldByIndex(v reflect.Value, index []int) reflect.Value { + for _, i := range index { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return reflect.Value{} + } + v = v.Elem() + } + v = v.Field(i) + } + return v +} + +// stringValues is a slice of reflect.Value holding *reflect.StringValue. +// It implements the methods to sort by string. +type stringValues []reflect.Value + +func (sv stringValues) Len() int { return len(sv) } +func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) } +func (sv stringValues) get(i int) string { return sv[i].String() } + +func (e *encodeState) string(s string) (int, error) { + len0 := e.Len() + e.WriteByte('"') + start := 0 + for i := 0; i < len(s); { + if b := s[i]; b < utf8.RuneSelf { + if b != '\\' && b != '"' { + i++ + continue + } + if start < i { + e.WriteString(s[start:i]) + } + switch b { + case '\\', '"': + e.WriteByte('\\') + e.WriteByte(b) + } + i++ + start = i + continue + } + c, size := utf8.DecodeRuneInString(s[i:]) + if c == utf8.RuneError && size == 1 { + e.error(&InvalidUTF8Error{s}) + } + i += size + } + if start < len(s) { + e.WriteString(s[start:]) + } + e.WriteByte('"') + return e.Len() - len0, nil +} + +// A field represents a single field found in a struct. +type field struct { + name string + tag bool + index []int + typ reflect.Type + omitEmpty bool + quoted bool +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from json tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that JSON should recognize for the given type. +// The algorithm is breadth-first search over the set of structs to include - the top struct +// and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" { // unexported + continue + } + tag := sf.Tag.Get("json") + if tag == "-" { + continue + } + name, opts := parseTag(tag) + if !isValidTag(name) { + name = "" + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := name != "" + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft, + opts.Contains("omitempty"), opts.Contains("string")}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + next = append(next, field{name: ft.Name(), index: index, typ: ft}) + } + } + } + } + + sort.Sort(byName(fields)) + + // Remove fields with annihilating name collisions + // and also fields shadowed by fields with explicit JSON tags. + name := "" + out := fields[:0] + for _, f := range fields { + if f.name != name { + name = f.name + out = append(out, f) + continue + } + if n := len(out); n > 0 && out[n-1].name == name && (!out[n-1].tag || f.tag) { + out = out[:n-1] + } + } + fields = out + + return fields +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} + +// tagOptions is the string following a comma in a struct field's "json" +// tag, or the empty string. It does not include the leading comma. +type tagOptions string + +// parseTag splits a struct field's json tag into its name and +// comma-separated options. +func parseTag(tag string) (string, tagOptions) { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx], tagOptions(tag[idx+1:]) + } + return tag, tagOptions("") +} + +// Contains returns whether checks that a comma-separated list of options +// contains a particular substr flag. substr must be surrounded by a +// string boundary or commas. +func (o tagOptions) Contains(optionName string) bool { + if len(o) == 0 { + return false + } + s := string(o) + for s != "" { + var next string + i := strings.Index(s, ",") + if i >= 0 { + s, next = s[:i], s[i+1:] + } + if s == optionName { + return true + } + s = next + } + return false +}