From 204d3b231867b01d31047f1ea8e0039ba3b738fc Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 7 Feb 2022 20:18:50 +1100 Subject: [PATCH 01/10] Vault sharing, permissions and scanning: - NodeId fixes for vaults tests - Replaced makeVaultIdPretty` with `encodeVaultId` --- benches/gitgc.ts | 104 ++ benches/index.ts | 26 + benches/results/gitgc.chart.html | 116 ++ benches/results/gitgc.json | 451 +++++ benches/results/system.json | 39 + package-lock.json | 242 ++- package.json | 5 +- src/PolykeyAgent.ts | 27 +- src/agent/GRPCClientAgent.ts | 25 +- src/agent/service/index.ts | 8 +- src/agent/service/vaultsGitInfoGet.ts | 64 +- src/agent/service/vaultsGitPackGet.ts | 93 +- src/agent/service/vaultsPermissionsCheck.ts | 35 - src/agent/service/vaultsScan.ts | 61 +- src/bin/errors.ts | 19 - src/bin/secrets/CommandSecrets.ts | 2 + src/bin/secrets/CommandStat.ts | 87 + src/bin/utils/options.ts | 6 + src/bin/vaults/CommandClone.ts | 2 +- src/bin/vaults/CommandPermissions.ts | 182 +- src/bin/vaults/CommandPull.ts | 108 +- src/bin/vaults/CommandScan.ts | 164 +- src/bin/vaults/CommandShare.ts | 6 +- src/bin/vaults/CommandStat.ts | 98 - src/bin/vaults/CommandUnshare.ts | 5 +- src/bin/vaults/CommandVaults.ts | 10 +- src/bootstrap/utils.ts | 23 +- src/client/GRPCClientClient.ts | 37 +- src/client/service/index.ts | 12 +- src/client/service/vaultsClone.ts | 25 +- src/client/service/vaultsCreate.ts | 8 +- src/client/service/vaultsDelete.ts | 2 +- src/client/service/vaultsList.ts | 3 +- src/client/service/vaultsLog.ts | 16 +- src/client/service/vaultsPermissions.ts | 53 - src/client/service/vaultsPermissionsGet.ts | 63 + src/client/service/vaultsPull.ts | 52 +- src/client/service/vaultsRename.ts | 4 +- src/client/service/vaultsScan.ts | 30 +- src/client/service/vaultsSecretsDelete.ts | 8 +- src/client/service/vaultsSecretsEdit.ts | 8 +- src/client/service/vaultsSecretsGet.ts | 11 +- src/client/service/vaultsSecretsList.ts | 10 +- src/client/service/vaultsSecretsMkdir.ts | 10 +- src/client/service/vaultsSecretsNew.ts | 8 +- src/client/service/vaultsSecretsNewDir.ts | 8 +- src/client/service/vaultsSecretsRename.ts | 7 +- src/client/service/vaultsSecretsStat.ts | 50 +- ...vaultsPermissionsSet.ts => vaultsShare.ts} | 28 +- ...tsPermissionsUnset.ts => vaultsUnshare.ts} | 28 +- src/client/service/vaultsVersion.ts | 22 +- src/config.ts | 1 + src/git/GitRequest.ts | 87 - src/git/index.ts | 1 - src/git/utils.ts | 252 ++- src/keys/KeyManager.ts | 59 +- src/notifications/NotificationsManager.ts | 3 +- .../js/polykey/v1/agent_service_grpc_pb.d.ts | 49 +- .../js/polykey/v1/agent_service_grpc_pb.js | 54 +- .../js/polykey/v1/client_service_grpc_pb.d.ts | 134 +- .../js/polykey/v1/client_service_grpc_pb.js | 129 +- .../v1/permissions/permissions_pb.d.ts | 28 + .../polykey/v1/permissions/permissions_pb.js | 229 +++ .../js/polykey/v1/secrets/secrets_pb.d.ts | 20 + src/proto/js/polykey/v1/secrets/secrets_pb.js | 152 ++ src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 38 + src/proto/js/polykey/v1/vaults/vaults_pb.js | 307 ++- .../schemas/polykey/v1/agent_service.proto | 5 +- .../schemas/polykey/v1/client_service.proto | 10 +- .../polykey/v1/permissions/permissions.proto | 5 + .../schemas/polykey/v1/secrets/secrets.proto | 4 + .../schemas/polykey/v1/vaults/vaults.proto | 6 + src/sigchain/Sigchain.ts | 6 +- src/validation/utils.ts | 13 +- src/vaults/Vault.ts | 15 + src/vaults/VaultInternal.ts | 785 +++++--- src/vaults/VaultManager.ts | 1122 ++++++----- src/vaults/VaultOps.ts | 88 +- src/vaults/errors.ts | 128 +- src/vaults/index.ts | 1 + src/vaults/types.ts | 256 +-- src/vaults/utils.ts | 414 ++-- test-git.ts | 337 ++++ test-vaultinternal.ts | 34 + tests/acl/ACL.test.ts | 76 +- tests/agent/GRPCClientAgent.test.ts | 54 +- tests/agent/utils.ts | 14 +- tests/bin/secrets/secrets.test.ts | 153 +- tests/bin/vaults/vaults.test.ts | 821 ++++---- tests/client/rpcVaults.test.ts | 423 +++-- tests/git/utils.test.ts | 32 +- tests/nodes/NodeConnection.test.ts | 7 +- tests/nodes/NodeManager.test.ts | 1 - tests/notifications/utils.test.ts | 11 +- tests/vaults/VaultInternal.test.ts | 997 +++++----- tests/vaults/VaultManager.test.ts | 1660 +++++++---------- tests/vaults/VaultOps.test.ts | 89 +- tests/vaults/old/Vault.test.ts.old | 565 ------ tests/vaults/utils.test.ts | 127 +- 99 files changed, 7038 insertions(+), 5275 deletions(-) create mode 100644 benches/gitgc.ts create mode 100644 benches/index.ts create mode 100644 benches/results/gitgc.chart.html create mode 100644 benches/results/gitgc.json create mode 100644 benches/results/system.json delete mode 100644 src/agent/service/vaultsPermissionsCheck.ts create mode 100644 src/bin/secrets/CommandStat.ts delete mode 100644 src/bin/vaults/CommandStat.ts delete mode 100644 src/client/service/vaultsPermissions.ts create mode 100644 src/client/service/vaultsPermissionsGet.ts rename src/client/service/{vaultsPermissionsSet.ts => vaultsShare.ts} (55%) rename src/client/service/{vaultsPermissionsUnset.ts => vaultsUnshare.ts} (55%) delete mode 100644 src/git/GitRequest.ts create mode 100644 src/vaults/Vault.ts create mode 100644 test-git.ts create mode 100644 test-vaultinternal.ts delete mode 100644 tests/vaults/old/Vault.test.ts.old diff --git a/benches/gitgc.ts b/benches/gitgc.ts new file mode 100644 index 000000000..3ab0f19fb --- /dev/null +++ b/benches/gitgc.ts @@ -0,0 +1,104 @@ +import b from 'benny'; +import packageJson from '../package.json'; + +async function main () { + let map = new Map(); + let obj = {}; + let arr = []; + let set = new Set(); + const summary = await b.suite( + 'gitgc', + b.add('map', async () => { + map = new Map(); + return async () => { + for (let i = 0; i < 1000; i++) { + map.set(i, undefined); + } + for (let i = 0; i < 1000; i++) { + map.delete(i); + } + for (const i of map) { + // NOOP + } + } + }), + b.add('obj', async () => { + obj = {}; + return async () => { + for (let i = 0; i < 1000; i++) { + obj[i] = undefined; + } + for (let i = 0; i < 1000; i++) { + delete obj[i]; + } + for (const i in obj) { + // NOOP + } + }; + }), + b.add('arr', async () => { + // you first have to count the number of objects + arr = []; + return async () => { + // you have to iterate for each object + // then for each value in length + for (let i = 0; i < 1000; i++) { + if (i === arr.length) { + // double the vector + arr.length = arr.length * 2 || 2; + } + arr[i] = { id: i, mark: false }; + // arr.push({ id: i, mark: false}); + } + // this has to iterate the length of the array + // but stop as soon as it reaches the end + // it gets complicate, but for 5x improvement + // it could be interesting + for (let i = 0; i < 1000; i++) { + arr[i].mark = true; + } + for (let i = 0; i < 1000; i++) { + if (arr[i].mark === false) { + // NOOP + } + } + }; + }), + b.add('set', async () => { + set = new Set(); + return async () => { + for (let i = 0; i < 1000; i++) { + set.add(i); + } + for (let i = 0; i < 1000; i++) { + set.delete(i); + } + for (const i of set) { + // NOOP + } + }; + }), + b.cycle(), + b.complete(), + b.save({ + file: 'gitgc', + folder: 'benches/results', + version: packageJson.version, + details: true, + }), + b.save({ + file: 'gitgc', + folder: 'benches/results', + format: 'chart.html', + }), + ); + return summary; +} + +if (require.main === module) { + (async () => { + await main(); + })(); +} + +export default main; diff --git a/benches/index.ts b/benches/index.ts new file mode 100644 index 000000000..98a870855 --- /dev/null +++ b/benches/index.ts @@ -0,0 +1,26 @@ +#!/usr/bin/env node + +import fs from 'fs'; +import si from 'systeminformation'; +import gitgc from './gitgc'; + +async function main(): Promise { + await gitgc(); + const systemData = await si.get({ + cpu: '*', + osInfo: 'platform, distro, release, kernel, arch', + system: 'model, manufacturer', + }); + await fs.promises.writeFile( + 'benches/results/system.json', + JSON.stringify(systemData, null, 2), + ); +} + +if (require.main === module) { + (async () => { + await main(); + })(); +} + +export default main; diff --git a/benches/results/gitgc.chart.html b/benches/results/gitgc.chart.html new file mode 100644 index 000000000..31d69d540 --- /dev/null +++ b/benches/results/gitgc.chart.html @@ -0,0 +1,116 @@ + + + + + + + + gitgc + + + +
+ +
+ + + \ No newline at end of file diff --git a/benches/results/gitgc.json b/benches/results/gitgc.json new file mode 100644 index 000000000..634754e7a --- /dev/null +++ b/benches/results/gitgc.json @@ -0,0 +1,451 @@ +{ + "name": "gitgc", + "date": "2022-01-28T05:51:50.845Z", + "version": "1.0.0", + "results": [ + { + "name": "map", + "ops": 12413, + "margin": 1.36, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 72, + "promise": true, + "details": { + "min": 0.00007077850282485876, + "max": 0.0000894375988700565, + "mean": 0.00008055894499724271, + "median": 0.00008118181255079868, + "standardDeviation": 0.000004759840515182626, + "marginOfError": 0.0000010994670651796875, + "relativeMarginOfError": 1.3647982421037401, + "standardErrorOfMean": 5.609525842753508e-7, + "sampleVariance": 2.2656081729974e-11, + "sampleResults": [ + 0.00007077850282485876, + 0.00007097972765957447, + 0.00007161048068669528, + 0.0000716254878397711, + 0.00007233768005540166, + 0.0000729321731044349, + 0.00007298697838616715, + 0.00007363390634005764, + 0.00007377773631123919, + 0.00007389356259204713, + 0.00007414712103746398, + 0.00007531327286356821, + 0.00007556276671619614, + 0.0000756751266568483, + 0.00007613181859070464, + 0.00007620914542728636, + 0.00007621787256371814, + 0.00007629743778110945, + 0.0000764165616641902, + 0.00007689579341317365, + 0.00007736970958083831, + 0.00007843196101949026, + 0.00007898940057636888, + 0.0000795086251874063, + 0.00007993748065476191, + 0.00008000828276877761, + 0.00008004185326953748, + 0.00008018392344497607, + 0.0000803963149717514, + 0.0000804585292353823, + 0.0000804720243204578, + 0.00008054201594896332, + 0.00008075200576368876, + 0.00008080592907801419, + 0.0000811265326953748, + 0.00008114411911357341, + 0.00008121950598802395, + 0.00008135791412742382, + 0.00008165932904148783, + 0.00008186257121439281, + 0.00008221428571428571, + 0.00008229257421289355, + 0.00008232091754122938, + 0.00008232919340329836, + 0.00008243745606060607, + 0.00008273944976076555, + 0.00008290637031484259, + 0.0000829488474025974, + 0.00008305361812778602, + 0.0000830936871257485, + 0.00008318165817091454, + 0.00008323229346092504, + 0.00008338557780979827, + 0.0000835751552238806, + 0.00008360263112391931, + 0.00008438872362555721, + 0.00008480622978723404, + 0.0000848238645066274, + 0.00008495250954478708, + 0.00008502449279538904, + 0.00008513051939058171, + 0.00008533426129943503, + 0.00008582617596566523, + 0.00008582906389301635, + 0.00008592566987179488, + 0.00008616907035928144, + 0.0000862462365269461, + 0.00008650909585121602, + 0.00008851563922155689, + 0.0000889068603896104, + 0.00008941313193403299, + 0.0000894375988700565 + ] + }, + "completed": true, + "percentSlower": 84.62 + }, + { + "name": "obj", + "ops": 17311, + "margin": 0.6, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 85, + "promise": true, + "details": { + "min": 0.00005589308370535714, + "max": 0.00006599035208098987, + "mean": 0.00005776653152777129, + "median": 0.000057438973003374575, + "standardDeviation": 0.0000016170787282694694, + "marginOfError": 3.437777562752602e-7, + "relativeMarginOfError": 0.5951158000718614, + "standardErrorOfMean": 1.7539681442615316e-7, + "sampleVariance": 2.6149436134216042e-12, + "sampleResults": [ + 0.00005589308370535714, + 0.000055923575892857144, + 0.00005592791731843576, + 0.00005611031620111732, + 0.00005615302346368715, + 0.000056256111731843576, + 0.000056266712849162016, + 0.000056274237723214286, + 0.00005633941899441341, + 0.000056344213169642855, + 0.000056347307086614175, + 0.00005641687626546681, + 0.000056452138357705285, + 0.00005646863892013498, + 0.000056488960937499994, + 0.000056507857142857145, + 0.000056584775028121486, + 0.000056634034870641165, + 0.000056758037120359955, + 0.000056766039370078735, + 0.00005678472440944882, + 0.000056785659167604044, + 0.000056791463442069747, + 0.000056791497187851516, + 0.00005684813948256468, + 0.00005693674353205849, + 0.00005693990502793296, + 0.00005702166815642458, + 0.00005703376602924635, + 0.00005704974578177728, + 0.00005707559842519685, + 0.00005707628683914511, + 0.00005710713160854893, + 0.000057117591676040494, + 0.0000571776366704162, + 0.000057179823397075365, + 0.00005726050393700788, + 0.00005726404274465692, + 0.000057280159730033745, + 0.00005732342633928571, + 0.00005734832960893855, + 0.00005739825586592179, + 0.000057438973003374575, + 0.000057496350456621005, + 0.000057506095890410956, + 0.00005751210348706412, + 0.00005752209832402235, + 0.000057529072625698327, + 0.00005759033858267717, + 0.00005759104836895388, + 0.000057611492688413946, + 0.000057639962053571425, + 0.0000576807120359955, + 0.000057696478065241844, + 0.00005770783914510686, + 0.00005777350055865922, + 0.000057786456692913384, + 0.0000578248203125, + 0.00005786656355455568, + 0.000057876658482142856, + 0.00005790582793296089, + 0.00005817434420697413, + 0.00005818454218222722, + 0.00005825720433789954, + 0.000058297696089385474, + 0.000058414106145251395, + 0.00005842866929133858, + 0.000058655880446927374, + 0.00005872889876265466, + 0.00005874819347581553, + 0.00005887123734533183, + 0.00005892923172103487, + 0.00005896419441340782, + 0.00005902182564679415, + 0.000059149572067039104, + 0.00005934727374301676, + 0.00005941950506186727, + 0.000059622240223463686, + 0.000059698642004773274, + 0.00006059778994413408, + 0.00006075867181926279, + 0.00006084922159730034, + 0.00006127136782902137, + 0.00006371275195530727, + 0.00006599035208098987 + ] + }, + "completed": true, + "percentSlower": 78.55 + }, + { + "name": "arr", + "ops": 80712, + "margin": 0.68, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 85, + "promise": true, + "details": { + "min": 0.000011858622632575757, + "max": 0.000013705367660984848, + "mean": 0.000012389732753812623, + "median": 0.000012287901041666666, + "standardDeviation": 3.959461486703419e-7, + "marginOfError": 8.417492371653915e-8, + "relativeMarginOfError": 0.6793925695502712, + "standardErrorOfMean": 4.2946389651295486e-8, + "sampleVariance": 1.5677335264687652e-13, + "sampleResults": [ + 0.000011858622632575757, + 0.000011860888020833333, + 0.00001186896425189394, + 0.000011887816051136364, + 0.000011913577414772726, + 0.000011929929214015153, + 0.000011943966145833332, + 0.000011944056107954545, + 0.000011970370028409091, + 0.00001197090625, + 0.000011985623579545454, + 0.000011992682528409092, + 0.000012006678977272726, + 0.000012013747395833333, + 0.00001203840790719697, + 0.00001204254237689394, + 0.000012044332859848485, + 0.000012057760416666668, + 0.000012065281486742426, + 0.000012066254261363636, + 0.000012071526988636364, + 0.000012076806818181818, + 0.00001208674502840909, + 0.000012090615767045454, + 0.000012091645657926388, + 0.000012098692234848485, + 0.000012099003077651515, + 0.000012105143702651516, + 0.000012107508522727273, + 0.000012108986268939394, + 0.000012133881865530303, + 0.000012167275568181818, + 0.000012188720712051961, + 0.000012190344933712122, + 0.000012195850142045455, + 0.000012197638494318183, + 0.000012204208806818182, + 0.000012224388037928519, + 0.000012224616240530303, + 0.000012235240056818181, + 0.000012253570312499999, + 0.000012279734611742424, + 0.000012287901041666666, + 0.000012296046164772728, + 0.000012322409801136362, + 0.00001234451112689394, + 0.00001236041737689394, + 0.000012381963541666667, + 0.000012423882102272727, + 0.000012433329308712121, + 0.000012448884232954545, + 0.000012452242897727274, + 0.000012495593986742423, + 0.000012501668560606061, + 0.000012513556581439395, + 0.000012514464962121213, + 0.00001256769981060606, + 0.000012587941761363636, + 0.000012596262310606061, + 0.000012598890388257575, + 0.000012639550206661804, + 0.000012652286458333333, + 0.000012652606770833334, + 0.000012654231534090909, + 0.00001265561671401515, + 0.000012705125328868692, + 0.000012709302556818183, + 0.00001272277959280303, + 0.00001272655800189394, + 0.000012763128859713105, + 0.000012780962121212122, + 0.000012802757575757576, + 0.000012806080729166667, + 0.000012806569128787879, + 0.00001281809659090909, + 0.0000128334453125, + 0.00001283447940340909, + 0.0000128418802020688, + 0.000012932291556410873, + 0.000012976824100378788, + 0.000012983733428030303, + 0.000013109263967803029, + 0.000013405590681003583, + 0.000013586537878787878, + 0.000013705367660984848 + ] + }, + "completed": true, + "percentSlower": 0 + }, + { + "name": "set", + "ops": 16847, + "margin": 1.94, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 78, + "promise": true, + "details": { + "min": 0.000050886196138211386, + "max": 0.00006873873163265307, + "mean": 0.00005935926529388902, + "median": 0.0000615916637755102, + "standardDeviation": 0.000005193393503907441, + "marginOfError": 0.0000011525505979750247, + "relativeMarginOfError": 1.9416523979343772, + "standardErrorOfMean": 5.880360193750126e-7, + "sampleVariance": 2.6971336086428002e-11, + "sampleResults": [ + 0.000050886196138211386, + 0.0000509471698685541, + 0.000050978451971688576, + 0.0000517820306122449, + 0.00005211355284552845, + 0.000052252162244897964, + 0.000052254794715447155, + 0.00005231770408163265, + 0.00005256923983739838, + 0.00005259830894308943, + 0.00005267006326530612, + 0.00005284759150657229, + 0.000052991025510204085, + 0.00005299677142857143, + 0.00005348157448979592, + 0.000053637285714285715, + 0.000053777775510204086, + 0.00005405282551020408, + 0.00005419058469387755, + 0.00005443484183673469, + 0.00005446711734693878, + 0.000054547145918367346, + 0.000054657097959183674, + 0.00005476208265306122, + 0.00005487187142857143, + 0.00005500928265306123, + 0.0000550447081632653, + 0.00005606326224489796, + 0.00005639379387755102, + 0.000056477297959183674, + 0.00005653203367346939, + 0.00005679514285714286, + 0.000057151315306122454, + 0.00005743637346938775, + 0.000058363009183673465, + 0.00005867430102040816, + 0.00005892348571428572, + 0.00006139411632653062, + 0.0000615379193877551, + 0.00006164540816326531, + 0.00006168543265306122, + 0.00006187692653061224, + 0.00006195303979591837, + 0.00006221790447154472, + 0.0000623579756097561, + 0.00006259268469387755, + 0.00006264848469387755, + 0.0000626569387755102, + 0.00006270273979591837, + 0.0000627145581632653, + 0.00006296146916076846, + 0.00006315933571428571, + 0.00006328053469387754, + 0.00006328582959183674, + 0.00006333463617886179, + 0.00006341867138523762, + 0.0000635722275510204, + 0.00006357350510204081, + 0.00006367220204081632, + 0.0000636918612244898, + 0.0000637768081632653, + 0.00006379471836734694, + 0.00006397991203235592, + 0.00006435207653061225, + 0.00006444884081632652, + 0.00006463790853658536, + 0.00006486812142857144, + 0.00006516360161779576, + 0.00006516925204081633, + 0.00006520715918367347, + 0.00006554987653061224, + 0.00006571110102040817, + 0.0000660327887755102, + 0.00006606507448979592, + 0.00006640331199186991, + 0.00006700257330637006, + 0.00006723716260162602, + 0.00006873873163265307 + ] + }, + "completed": true, + "percentSlower": 79.13 + } + ], + "fastest": { + "name": "arr", + "index": 2 + }, + "slowest": { + "name": "map", + "index": 0 + } +} \ No newline at end of file diff --git a/benches/results/system.json b/benches/results/system.json new file mode 100644 index 000000000..312b2e10f --- /dev/null +++ b/benches/results/system.json @@ -0,0 +1,39 @@ +{ + "cpu": { + "manufacturer": "AMD", + "brand": "Ryzen 7 2700X Eight-Core Processor", + "vendor": "AMD", + "family": "23", + "model": "8", + "stepping": "2", + "revision": "", + "voltage": "", + "speed": 3.7, + "speedMin": 2.2, + "speedMax": 3.7, + "governor": "ondemand", + "cores": 16, + "physicalCores": 8, + "processors": 1, + "socket": "", + "flags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb hw_pstate sme ssbd sev ibpb vmmcall sev_es fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt sha_ni xsaveopt xsavec xgetbv1 xsaves clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif overflow_recov succor smca", + "virtualization": true, + "cache": { + "l1d": 262144, + "l1i": 524288, + "l2": 4194304, + "l3": 16777216 + } + }, + "osInfo": { + "platform": "linux", + "distro": "Matrix ML 1", + "release": "unknown", + "kernel": "5.10.81", + "arch": "x64" + }, + "system": { + "model": "System Product Name", + "manufacturer": "System manufacturer" + } +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 37fe8ac1c..ae8c338e4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,6 +4,48 @@ "lockfileVersion": 1, "requires": true, "dependencies": { + "@arrows/array": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@arrows/array/-/array-1.4.1.tgz", + "integrity": "sha512-MGYS8xi3c4tTy1ivhrVntFvufoNzje0PchjEz6G/SsWRgUKxL4tKwS6iPdO8vsaJYldagAeWMd5KRD0aX3Q39g==", + "dev": true, + "requires": { + "@arrows/composition": "^1.2.2" + } + }, + "@arrows/composition": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@arrows/composition/-/composition-1.2.2.tgz", + "integrity": "sha512-9fh1yHwrx32lundiB3SlZ/VwuStPB4QakPsSLrGJFH6rCXvdrd060ivAZ7/2vlqPnEjBkPRRXOcG1YOu19p2GQ==", + "dev": true + }, + "@arrows/dispatch": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@arrows/dispatch/-/dispatch-1.0.3.tgz", + "integrity": "sha512-v/HwvrFonitYZM2PmBlAlCqVqxrkIIoiEuy5bQgn0BdfvlL0ooSBzcPzTMrtzY8eYktPyYcHg8fLbSgyybXEqw==", + "dev": true, + "requires": { + "@arrows/composition": "^1.2.2" + } + }, + "@arrows/error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@arrows/error/-/error-1.0.2.tgz", + "integrity": "sha512-yvkiv1ay4Z3+Z6oQsUkedsQm5aFdyPpkBUQs8vejazU/RmANABx6bMMcBPPHI4aW43VPQmXFfBzr/4FExwWTEA==", + "dev": true + }, + "@arrows/multimethod": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@arrows/multimethod/-/multimethod-1.4.1.tgz", + "integrity": "sha512-AZnAay0dgPnCJxn3We5uKiB88VL+1ZIF2SjZohLj6vqY2UyvB/sKdDnFP+LZNVsTC5lcnGPmLlRRkAh4sXkXsQ==", + "dev": true, + "requires": { + "@arrows/array": "^1.4.1", + "@arrows/composition": "^1.2.2", + "@arrows/error": "^1.0.2", + "fast-deep-equal": "^3.1.3" + } + }, "@babel/code-frame": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", @@ -1593,10 +1635,11 @@ } }, "@matrixai/db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.2.tgz", - "integrity": "sha512-wkVEEAJZaWS5Kbg6T/LcI6lS8AdWqszp8L1Dxmk7vwr1ihIkoIVQNSQ+FQryaFpor2eqh/wJaOKjDUpcHo+hEg==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.5.tgz", + "integrity": "sha512-zPpP/J1A3TLRaQKaGa5smualzjW4Rin4K48cpU5/9ThyXfpVBBp/mrkbDfjL/O5z6YTcuGVf2+yLck8tF8kVUw==", "requires": { + "@matrixai/async-init": "^1.6.0", "@matrixai/logger": "^2.0.1", "@matrixai/workers": "^1.2.3", "abstract-leveldown": "^7.0.0", @@ -1717,6 +1760,12 @@ "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==", "dev": true }, + "@types/abstract-leveldown": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", + "integrity": "sha512-q5veSX6zjUy/DlDhR4Y4cU0k2Ar+DT2LUraP00T19WLmTO6Se1djepCCaqU6nQrwcJ5Hyo/CWqxTzrrFg8eqbQ==", + "dev": true + }, "@types/babel__core": { "version": "7.1.16", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.16.tgz", @@ -1828,6 +1877,23 @@ "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", "dev": true }, + "@types/level-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/level-errors/-/level-errors-3.0.0.tgz", + "integrity": "sha512-/lMtoq/Cf/2DVOm6zE6ORyOM+3ZVm/BvzEZVxUhf6bgh8ZHglXlBqxbxSlJeVp8FCbD3IVvk/VbsaNmDjrQvqQ==", + "dev": true + }, + "@types/levelup": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@types/levelup/-/levelup-5.1.0.tgz", + "integrity": "sha512-XagSD3VJFWjZWeQnG4mL53PFRPmb6E7dKXdJxexVw85ki82BWOp68N+R6M1t9OYsbmlY+2S0GZcZtVH3gGbeDw==", + "dev": true, + "requires": { + "@types/abstract-leveldown": "*", + "@types/level-errors": "*", + "@types/node": "*" + } + }, "@types/nexpect": { "version": "0.4.31", "resolved": "https://registry.npmjs.org/@types/nexpect/-/nexpect-0.4.31.tgz", @@ -2515,6 +2581,58 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, + "benchmark": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz", + "integrity": "sha1-CfPeMckWQl1JjMLuVloOvzwqVik=", + "dev": true, + "requires": { + "lodash": "^4.17.4", + "platform": "^1.3.3" + } + }, + "benny": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/benny/-/benny-3.7.1.tgz", + "integrity": "sha512-USzYxODdVfOS7JuQq/L0naxB788dWCiUgUTxvN+WLPt/JfcDURNNj8kN/N+uK6PDvuR67/9/55cVKGPleFQINA==", + "dev": true, + "requires": { + "@arrows/composition": "^1.0.0", + "@arrows/dispatch": "^1.0.2", + "@arrows/multimethod": "^1.1.6", + "benchmark": "^2.1.4", + "common-tags": "^1.8.0", + "fs-extra": "^10.0.0", + "json2csv": "^5.0.6", + "kleur": "^4.1.4", + "log-update": "^4.0.0" + }, + "dependencies": { + "fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + } + }, + "kleur": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.4.tgz", + "integrity": "sha512-8QADVssbrFjivHWQU7KkMgptGTl6WAcSdlbBPY4uNF+mWr6DGcKrvY2w4FQJoXch7+fKMjj0dRrL75vk3k23OA==", + "dev": true + }, + "universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", + "dev": true + } + } + }, "big-integer": { "version": "1.6.50", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.50.tgz", @@ -2837,6 +2955,15 @@ "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==" }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, "cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -2905,6 +3032,12 @@ "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" }, + "common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true + }, "component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", @@ -3338,14 +3471,15 @@ } }, "encryptedfs": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/encryptedfs/-/encryptedfs-3.2.1.tgz", - "integrity": "sha512-Rt8aFd32ZMXYkfZC9/H1wN+44zClCXXY/JKo4JtIVqPLyScsWv/hzmd5+ijh3vpCOs+YAWYIZSuhIiS4pGvkqA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/encryptedfs/-/encryptedfs-3.4.3.tgz", + "integrity": "sha512-OQqsGw3eNrMdFpiYRX17nMq1NKKebaA0KXyM9IRY9aPOxpaeOwcdvWnOcvvO9wCxZFNxgy/A2SOZdxnhCe3paA==", "requires": { - "@matrixai/db": "^1.0.1", - "@matrixai/logger": "^2.0.1", - "@matrixai/workers": "^1.2.3", - "async-mutex": "^0.3.1", + "@matrixai/async-init": "^1.6.0", + "@matrixai/db": "^1.1.5", + "@matrixai/logger": "^2.1.0", + "@matrixai/workers": "^1.2.5", + "async-mutex": "^0.3.2", "errno": "^0.1.7", "lexicographic-integer": "^1.1.0", "node-forge": "^0.10.0", @@ -3354,16 +3488,6 @@ "threads": "^1.6.5", "ts-custom-error": "^3.2.0", "util-callbackify": "^1.0.0" - }, - "dependencies": { - "async-mutex": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.3.2.tgz", - "integrity": "sha512-HuTK7E7MT7jZEh1P9GtRW9+aTWiDWWi9InbZ5hjxrnRa39KS4BW04+xLBhYNS2aXhHUIKZSw3gj4Pn1pj+qGAA==", - "requires": { - "tslib": "^2.3.1" - } - } } }, "end-of-stream": { @@ -6389,6 +6513,25 @@ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, + "json2csv": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-5.0.6.tgz", + "integrity": "sha512-0/4Lv6IenJV0qj2oBdgPIAmFiKKnh8qh7bmLFJ+/ZZHLjSeiL3fKKGX3UryvKPbxFbhV+JcYo9KUC19GJ/Z/4A==", + "dev": true, + "requires": { + "commander": "^6.1.0", + "jsonparse": "^1.3.1", + "lodash.get": "^4.4.2" + }, + "dependencies": { + "commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true + } + } + }, "json5": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", @@ -6422,6 +6565,12 @@ } } }, + "jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA=", + "dev": true + }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -6597,6 +6746,12 @@ "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=", "dev": true }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "dev": true + }, "lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -6609,6 +6764,31 @@ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, + "log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "requires": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "dependencies": { + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + } + } + }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -7538,6 +7718,12 @@ } } }, + "platform": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", + "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", + "dev": true + }, "posix-character-classes": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", @@ -7913,6 +8099,16 @@ "bitset": "^5.0.3" } }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", @@ -8815,6 +9011,12 @@ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, + "systeminformation": { + "version": "5.11.0", + "resolved": "https://registry.npmjs.org/systeminformation/-/systeminformation-5.11.0.tgz", + "integrity": "sha512-mI/5nFK7NUe9Qbmy65WoB5TlCWKAhP4kG0w6uR2mZM8Mpdi8b45b3hTIK3W5+kQYZnYFWeS9/O5nn5rdcSvqfA==", + "dev": true + }, "table": { "version": "6.7.2", "resolved": "https://registry.npmjs.org/table/-/table-6.7.2.tgz", diff --git a/package.json b/package.json index f370abaa0..cf62e9042 100644 --- a/package.json +++ b/package.json @@ -66,13 +66,14 @@ "lint": "eslint '{src,tests}/**/*.{js,ts}'", "lintfix": "eslint '{src,tests}/**/*.{js,ts}' --fix", "docs": "rm -r ./docs || true; typedoc --gitRevision master --tsconfig ./tsconfig.build.json --out ./docs src && touch ./docs/.nojekyll", + "bench": "rm -r ./benches/results || true; ts-node --require tsconfig-paths/register --compiler typescript-cached-transpile --transpile-only ./benches", "proto-generate": "scripts/proto-generate.sh", "polykey": "ts-node --require tsconfig-paths/register --compiler typescript-cached-transpile --transpile-only src/bin/polykey.ts" }, "dependencies": { "@grpc/grpc-js": "1.3.7", "@matrixai/async-init": "^1.6.0", - "@matrixai/db": "^1.1.2", + "@matrixai/db": "^1.1.5", "@matrixai/id": "^3.3.2", "@matrixai/logger": "^2.1.0", "@matrixai/workers": "^1.2.5", @@ -84,7 +85,7 @@ "commander": "^8.3.0", "cross-fetch": "^3.0.6", "cross-spawn": "^7.0.3", - "encryptedfs": "^3.2.0", + "encryptedfs": "^3.4.3", "fast-fuzzy": "^1.10.8", "fd-lock": "^1.2.0", "google-protobuf": "^3.14.0", diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 87c605c77..f3e4e347d 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -310,13 +310,25 @@ class PolykeyAgent { sigchain, logger: logger.getChild(Discovery.name), })); + notificationsManager = + notificationsManager ?? + (await NotificationsManager.createNotificationsManager({ + acl, + db, + nodeConnectionManager, + nodeManager, + keyManager, + logger: logger.getChild(NotificationsManager.name), + fresh, + })); vaultManager = vaultManager ?? (await VaultManager.createVaultManager({ - vaultsKey: keyManager.vaultKey, vaultsPath, keyManager, nodeConnectionManager, + nodeManager, + notificationsManager, gestaltGraph, acl, db, @@ -324,17 +336,6 @@ class PolykeyAgent { logger: logger.getChild(VaultManager.name), fresh, })); - notificationsManager = - notificationsManager ?? - (await NotificationsManager.createNotificationsManager({ - acl, - db, - nodeConnectionManager, - nodeManager, - keyManager, - logger: logger.getChild(NotificationsManager.name), - fresh, - })); sessionManager = sessionManager ?? (await SessionManager.createSessionManager({ @@ -561,6 +562,8 @@ class PolykeyAgent { sigchain: this.sigchain, nodeConnectionManager: this.nodeConnectionManager, notificationsManager: this.notificationsManager, + acl: this.acl, + gestaltGraph: this.gestaltGraph, }); const clientService = createClientService({ pkAgent: this, diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index 1289354c7..4190f66b6 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -99,18 +99,25 @@ class GRPCClientAgent extends GRPCClient { @ready(new agentErrors.ErrorAgentClientDestroyed()) public vaultsGitPackGet( ...args - ): ClientDuplexStream { - return this.client.vaultsGitPackGet(...args); + ): AsyncGeneratorDuplexStreamClient< + vaultsPB.PackChunk, + vaultsPB.PackChunk, + ClientDuplexStream + > { + return grpcUtils.promisifyDuplexStreamCall( + this.client, + this.client.vaultsGitPackGet, + )(...args); } @ready(new agentErrors.ErrorAgentClientDestroyed()) public vaultsScan( ...args ): AsyncGeneratorReadableStreamClient< - vaultsPB.Vault, - ClientReadableStream + vaultsPB.List, + ClientReadableStream > { - return grpcUtils.promisifyReadableStreamCall( + return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, )(...args); @@ -156,14 +163,6 @@ class GRPCClientAgent extends GRPCClient { )(...args); } - @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsPermissionsCheck(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsPermissionsCheck, - )(...args); - } - @ready(new agentErrors.ErrorAgentClientDestroyed()) public nodesCrossSignClaim( ...args diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts index f20a9fdb8..75bb6ee58 100644 --- a/src/agent/service/index.ts +++ b/src/agent/service/index.ts @@ -7,6 +7,8 @@ import type { } from '../../nodes'; import type { NotificationsManager } from '../../notifications'; import type { Sigchain } from '../../sigchain'; +import type { ACL } from '../../acl'; +import type { GestaltGraph } from '../../gestalts'; import type { IAgentServiceServer } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; import echo from './echo'; import nodesChainDataGet from './nodesChainDataGet'; @@ -17,7 +19,6 @@ import nodesHolePunchMessageSend from './nodesHolePunchMessageSend'; import notificationsSend from './notificationsSend'; import vaultsGitInfoGet from './vaultsGitInfoGet'; import vaultsGitPackGet from './vaultsGitPackGet'; -import vaultsPermissionsCheck from './vaultsPermissionsCheck'; import vaultsScan from './vaultsScan'; import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; @@ -29,7 +30,9 @@ function createService(container: { nodeGraph: NodeGraph; notificationsManager: NotificationsManager; sigchain: Sigchain; -}) { + acl: ACL; + gestaltGraph: GestaltGraph; +}): IAgentServiceServer { const container_ = { ...container, }; @@ -43,7 +46,6 @@ function createService(container: { notificationsSend: notificationsSend(container_), vaultsGitInfoGet: vaultsGitInfoGet(container_), vaultsGitPackGet: vaultsGitPackGet(container_), - vaultsPermissionsCheck: vaultsPermissionsCheck(container_), vaultsScan: vaultsScan(container_), }; return service; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index 8ee13efed..f5a9d8f41 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -1,36 +1,70 @@ import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type { ACL } from '../../acl'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; -function vaultsGitInfoGet({ vaultManager }: { vaultManager: VaultManager }) { +function vaultsGitInfoGet({ + vaultManager, + acl, +}: { + vaultManager: VaultManager; + acl: ACL; +}) { return async ( - call: grpc.ServerWritableStream, + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); const request = call.request; - const vaultNameOrId = request.getNameOrId(); - let vaultId, vaultName; + const vaultMessage = request.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + const nodeMessage = request.getNode(); + if (nodeMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + let vaultName; + const vaultNameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + vaultName = vaultNameOrId; + if (!vaultId) { + try { + vaultId = validationUtils.parseVaultId(vaultNameOrId); + vaultName = (await vaultManager.getVaultMeta(vaultId)).name; + } catch (err) { + await genWritable.throw(new vaultsErrors.ErrorVaultsVaultUndefined()); + return; + } + } + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + const actionType = request.getAction(); + const perms = await acl.getNodePerm(nodeId); + if (!perms) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; + } + const vaultPerms = perms.vaults[idUtils.toString(vaultId)]; try { - vaultId = vaultsUtils.makeVaultId(idUtils.fromString(vaultNameOrId)); - await vaultManager.openVault(vaultId); - vaultName = await vaultManager.getVaultName(vaultId); + if (vaultPerms[actionType] !== null) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; + } } catch (err) { - if (err instanceof vaultsErrors.ErrorVaultUndefined) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - vaultName = vaultNameOrId; - } else { - throw err; + if (err instanceof TypeError) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; } } - // TODO: Check the permissions here const meta = new grpc.Metadata(); meta.set('vaultName', vaultName); - meta.set('vaultId', vaultsUtils.makeVaultIdPretty(vaultId)); + meta.set('vaultId', vaultsUtils.encodeVaultId(vaultId)); genWritable.stream.sendMetadata(meta); const response = new vaultsPB.PackChunk(); const responseGen = vaultManager.handleInfoRequest(vaultId); diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts index 4fad805a1..72e158fae 100644 --- a/src/agent/service/vaultsGitPackGet.ts +++ b/src/agent/service/vaultsGitPackGet.ts @@ -1,8 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; -import { promisify } from '../../utils'; -import { errors as grpcErrors } from '../../grpc'; +import { errors as grpcErrors, utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; @@ -10,60 +9,46 @@ function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { return async ( call: grpc.ServerDuplexStream, ) => { - const write = promisify(call.write).bind(call); - const clientBodyBuffers: Buffer[] = []; - call.on('data', (d) => { - clientBodyBuffers.push(d.getChunk_asU8()); - }); - - call.on('end', async () => { - const body = Buffer.concat(clientBodyBuffers); - const meta = call.metadata; - const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); - if (vaultNameOrId == null) { - throw new grpcErrors.ErrorGRPC('vault-name not in metadata.'); - } - let vaultId; - try { - vaultId = vaultsUtils.makeVaultId(vaultNameOrId); - await vaultManager.openVault(vaultId); - } catch (err) { - if ( - err instanceof vaultsErrors.ErrorVaultUndefined || - err instanceof SyntaxError - ) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - } else { - throw err; - } - } - // TODO: Check the permissions here - const response = new vaultsPB.PackChunk(); - const [sideBand, progressStream] = await vaultManager.handlePackRequest( - vaultId, - Buffer.from(body), - ); - response.setChunk(Buffer.from('0008NAK\n')); - await write(response); - const responseBuffers: Buffer[] = []; - await new Promise((resolve, reject) => { - sideBand.on('data', async (data: Buffer) => { - responseBuffers.push(data); - }); - sideBand.on('end', async () => { - response.setChunk(Buffer.concat(responseBuffers)); - await write(response); - resolve(); - }); - sideBand.on('error', (err) => { - reject(err); - }); - progressStream.write(Buffer.from('0014progress is at 50%\n')); - progressStream.end(); + const genDuplex = grpcUtils.generatorDuplex(call); + const clientBodyBuffers: Uint8Array[] = []; + const clientRequest = (await genDuplex.read()).value; + clientBodyBuffers.push(clientRequest!.getChunk_asU8()); + const body = Buffer.concat(clientBodyBuffers); + const meta = call.metadata; + const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); + if (vaultNameOrId == null) { + throw new grpcErrors.ErrorGRPC('vault-name not in metadata'); + } + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + vaultId = vaultId ?? vaultsUtils.decodeVaultId(vaultNameOrId); + if (vaultId == null) { + await genDuplex.throw(new vaultsErrors.ErrorVaultsVaultUndefined()); + return; + } + const response = new vaultsPB.PackChunk(); + const [sideBand, progressStream] = await vaultManager.handlePackRequest( + vaultId, + Buffer.from(body), + ); + response.setChunk(Buffer.from('0008NAK\n')); + await genDuplex.write(response); + const responseBuffers: Uint8Array[] = []; + await new Promise((resolve, reject) => { + sideBand.on('data', async (data: Uint8Array) => { + responseBuffers.push(data); + }); + sideBand.on('end', async () => { + response.setChunk(Buffer.concat(responseBuffers)); + await genDuplex.write(response); + resolve(); + }); + sideBand.on('error', (err) => { + reject(err); }); - call.end(); + progressStream.write(Buffer.from('0014progress is at 50%\n')); + progressStream.end(); }); + await genDuplex.next(null); }; } diff --git a/src/agent/service/vaultsPermissionsCheck.ts b/src/agent/service/vaultsPermissionsCheck.ts deleted file mode 100644 index 47c61d77e..000000000 --- a/src/agent/service/vaultsPermissionsCheck.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as grpcUtils } from '../../grpc'; - -function vaultsPermissionsCheck(_) { - return async ( - call: grpc.ServerUnaryCall< - vaultsPB.NodePermission, - vaultsPB.NodePermissionAllowed - >, - callback: grpc.sendUnaryData, - ): Promise => { - // Const response = new vaultsPB.NodePermissionAllowed(); - try { - // Const nodeId = makeNodeId(call.request.getNodeId()); - // const vaultId = makeVaultId(call.request.getVaultId()); - throw Error('Not Implemented'); - // FIXME: getVaultPermissions not implemented. - // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); - // let result; - // if (result[nodeId] === undefined) { - // response.setPermission(false); - // } else if (result[nodeId]['pull'] === undefined) { - // response.setPermission(false); - // } else { - // response.setPermission(true); - // } - // callback(null, response); - } catch (e) { - callback(grpcUtils.fromError(e)); - } - }; -} - -export default vaultsPermissionsCheck; diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index f7e618664..ee473872d 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -1,29 +1,54 @@ import type * as grpc from '@grpc/grpc-js'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import type { GestaltGraph } from '../../gestalts'; +import type { VaultManager } from '../../vaults'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as validationUtils from '@/validation/utils'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; -function vaultsScan(_) { +function vaultsScan({ + vaultManager, + gestaltGraph, +}: { + vaultManager: VaultManager; + gestaltGraph: GestaltGraph; +}) { return async ( - call: grpc.ServerWritableStream, + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); - // Const response = new vaultsPB.Vault(); - // const id = makeNodeId(call.request.getNodeId()); + const response = new vaultsPB.List(); + const nodeId = validationUtils.parseNodeId(call.request.getNodeId()); + const perms = await gestaltGraph.getGestaltActionsByNode(nodeId); + if (!perms) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; + } + try { + if (perms['scan'] !== null) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; + } + } catch (err) { + if (err instanceof TypeError) { + await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + return; + } + throw err; + } try { - throw Error('Not implemented'); - // FIXME: handleVaultNamesRequest doesn't exist. - // const listResponse = vaultManager.handleVaultNamesRequest(id); - // let listResponse; - // for await (const vault of listResponse) { - // if (vault !== null) { - // response.setNameOrId(vault); - // await genWritable.next(response); - // } else { - // await genWritable.next(null); - // } - // } - // await genWritable.next(null); + const listResponse = await vaultManager.listVaults(); + for (const vault of listResponse) { + if (vault !== null) { + response.setVaultName(vault[0]); + response.setVaultId(vaultsUtils.encodeVaultId(vault[1])); + await genWritable.next(response); + } else { + await genWritable.next(null); + } + } + await genWritable.next(null); } catch (err) { await genWritable.throw(err); } diff --git a/src/bin/errors.ts b/src/bin/errors.ts index 05cf5eff2..e3383163c 100644 --- a/src/bin/errors.ts +++ b/src/bin/errors.ts @@ -51,22 +51,6 @@ class ErrorCLIFileRead extends ErrorCLI { exitCode = sysexits.NOINPUT; } -class ErrorSecretPathFormat extends ErrorCLI { - description = "Secret name needs to be of format: ':'"; - exitCode = 64; -} - -class ErrorVaultNameAmbiguous extends ErrorCLI { - description = - 'There is more than 1 Vault with this name. Please specify a Vault ID'; - exitCode = 1; -} - -class ErrorSecretsUndefined extends ErrorCLI { - description = 'At least one secret must be specified as an argument'; - exitCode = 64; -} - class ErrorNodeFindFailed extends ErrorCLI { description = 'Failed to find the node in the DHT'; exitCode = 1; @@ -88,9 +72,6 @@ export { ErrorCLIPasswordFileRead, ErrorCLIRecoveryCodeFileRead, ErrorCLIFileRead, - ErrorSecretPathFormat, - ErrorVaultNameAmbiguous, - ErrorSecretsUndefined, ErrorNodeFindFailed, ErrorNodePingFailed, }; diff --git a/src/bin/secrets/CommandSecrets.ts b/src/bin/secrets/CommandSecrets.ts index 904592b93..0cf1c7661 100644 --- a/src/bin/secrets/CommandSecrets.ts +++ b/src/bin/secrets/CommandSecrets.ts @@ -8,6 +8,7 @@ import CommandList from './CommandList'; import CommandMkdir from './CommandMkdir'; import CommandRename from './CommandRename'; import CommandUpdate from './CommandUpdate'; +import commandStat from './CommandStat'; import CommandPolykey from '../CommandPolykey'; class CommandSecrets extends CommandPolykey { @@ -25,6 +26,7 @@ class CommandSecrets extends CommandPolykey { this.addCommand(new CommandMkdir(...args)); this.addCommand(new CommandRename(...args)); this.addCommand(new CommandUpdate(...args)); + this.addCommand(new commandStat(...args)); } } diff --git a/src/bin/secrets/CommandStat.ts b/src/bin/secrets/CommandStat.ts new file mode 100644 index 000000000..77d94cf6c --- /dev/null +++ b/src/bin/secrets/CommandStat.ts @@ -0,0 +1,87 @@ +import type { Stat } from 'encryptedfs'; +import type PolykeyClient from '../../PolykeyClient'; +import * as binProcessors from '../utils/processors'; +import * as parsers from '../utils/parsers'; +import * as binUtils from '../utils'; + +import CommandPolykey from '../CommandPolykey'; +import * as binOptions from '../utils/options'; + +class CommandStat extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('stat'); + this.description('Vaults Stat'); + this.argument( + '', + 'Path to where the secret, specified as :', + parsers.parseSecretPath, + ); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (secretPath, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const secretsPB = await import( + '../../proto/js/polykey/v1/secrets/secrets_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); + + const secretMessage = new secretsPB.Secret(); + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(secretPath[0]); + secretMessage.setVault(vaultMessage); + secretMessage.setSecretName(secretPath[1]); + // Get the secret's stat. + const response = await binUtils.retryAuthentication( + (auth) => pkClient.grpcClient.vaultsSecretsStat(secretMessage, auth), + meta, + ); + + const stat: Stat = JSON.parse(response.getJson()); + const data: string[] = [`Stats for "${secretPath[1]}"`]; + for (const key in stat) { + data.push(`${key}: ${stat[key]}`); + } + + // Print out the result. + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data, + }), + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }); + } +} + +export default CommandStat; diff --git a/src/bin/utils/options.ts b/src/bin/utils/options.ts index e7832b67c..b9238919b 100644 --- a/src/bin/utils/options.ts +++ b/src/bin/utils/options.ts @@ -155,6 +155,11 @@ const workers = new commander.Option( .argParser(binParsers.parseCoreCount) .default(undefined); +const pullVault = new commander.Option( + '-pv, --pull-vault ', + 'Name or Id of the vault to pull from', +); + export { nodePath, format, @@ -176,4 +181,5 @@ export { seedNodes, network, workers, + pullVault, }; diff --git a/src/bin/vaults/CommandClone.ts b/src/bin/vaults/CommandClone.ts index 55853a796..a91fb9680 100644 --- a/src/bin/vaults/CommandClone.ts +++ b/src/bin/vaults/CommandClone.ts @@ -11,7 +11,7 @@ class CommandClone extends CommandPolykey { super(...args); this.name('clone'); this.description('Clone a Vault from Another Node'); - this.argument('', 'Id of the vault to be cloned'); + this.argument('', 'Name or Id of the vault to be cloned'); this.argument( '', 'Id of the node to clone the vault from', diff --git a/src/bin/vaults/CommandPermissions.ts b/src/bin/vaults/CommandPermissions.ts index 96ab965f6..d9c667ac0 100644 --- a/src/bin/vaults/CommandPermissions.ts +++ b/src/bin/vaults/CommandPermissions.ts @@ -1,115 +1,83 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; -// import * as grpcErrors from '../../grpc/errors'; +import type PolykeyClient from '../../PolykeyClient'; +import * as binProcessors from '../utils/processors'; +import * as binUtils from '../utils'; -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; +import CommandPolykey from '../CommandPolykey'; +import * as binOptions from '../utils/options'; -// class CommandPermissions extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('permissions'); -// this.description('Vaults Permissions'); -// this.arguments(' [nodeId]'); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (vaultName, nodeId, options) => { +class CommandPermissions extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('permissions'); + this.alias('perms'); + this.description('Sets the permissions of a vault for Node Ids'); + this.argument('', 'Name or ID of the vault'); + // This.argument('[nodeId]', '(optional) nodeId to check permission on'); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (vaultName, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); -// }); -// } -// } + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); -// export default CommandPermissions; + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultName); -// OLD COMMAND -// const permissions = binUtils.createCommand('permissions', { -// description: { -// description: 'Sets the permissions of a vault for Node Ids', -// args: { -// vaultName: 'Name or ID of the vault', -// nodeId: '(optional) nodeId to check permission on', -// }, -// }, -// aliases: ['perms'], -// nodePath: true, -// verbose: true, -// format: true, -// }); -// permissions.arguments(' [nodeId]'); -// permissions.action(async (vaultName, nodeId, options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); + await pkClient.start(); -// const client = await PolykeyClient.createPolykeyClient(clientConfig); + const data: Array = []; + await binUtils.retryAuthentication(async (auth) => { + const permissionStream = pkClient.grpcClient.vaultsPermissionsGet( + vaultMessage, + auth, + ); + for await (const permission of permissionStream) { + const nodeId = permission.getNode()?.getNodeId(); + const actions = permission.getActionsList().join(', '); + data.push(`${nodeId}: ${actions}`); + } + return true; + }, meta); -// const vaultMessage = new vaultsPB.Vault(); -// vaultMessage.setNameOrId(vaultName); + if (data.length === 0) data.push('No permissions were found'); + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }); + } +} -// const nodeMessage = new nodesPB.Node(); -// nodeMessage.setNodeId(nodeId); - -// const getVaultMessage = new vaultsPB.PermGet(); -// getVaultMessage.setVault(vaultMessage); -// getVaultMessage.setNode(nodeMessage); - -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; - -// const data: Array = []; -// const response = await binUtils.streamCallCARL( -// client, -// setupStreamCall( -// client, -// client.grpcClient.vaultPermissions, -// ), -// )(getVaultMessage); - -// for await (const perm of response.data) { -// data.push(`${perm.getNodeId()}:\t\t${perm.getAction()}`); -// } -// await response.refresh; - -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: data, -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// } -// throw err; -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default permissions; +export default CommandPermissions; diff --git a/src/bin/vaults/CommandPull.ts b/src/bin/vaults/CommandPull.ts index b7aacd3a8..928db7734 100644 --- a/src/bin/vaults/CommandPull.ts +++ b/src/bin/vaults/CommandPull.ts @@ -11,61 +11,73 @@ class CommandPull extends CommandPolykey { super(...args); this.name('pull'); this.description('Pull a Vault from Another Node'); + this.argument('', 'Name of the vault to be pulled into'); this.argument( - '', - 'Id of the node to pull the vault from', + '[targetNodeId]', + '(Optional) target node to pull from', binParsers.parseNodeId, ); - this.argument('', 'Name of the vault to be pulled'); + this.addOption(binOptions.pullVault); this.addOption(binOptions.nodeId); this.addOption(binOptions.clientHost); this.addOption(binOptions.clientPort); - this.action(async (nodeId: NodeId, vaultName, options) => { - const { default: PolykeyClient } = await import('../../PolykeyClient'); - const nodesUtils = await import('../../nodes/utils'); - const vaultsPB = await import( - '../../proto/js/polykey/v1/vaults/vaults_pb' - ); - const nodesPB = await import('../../proto/js/polykey/v1/nodes/nodes_pb'); - const clientOptions = await binProcessors.processClientOptions( - options.nodePath, - options.nodeId, - options.clientHost, - options.clientPort, - this.fs, - this.logger.getChild(binProcessors.processClientOptions.name), - ); - const meta = await binProcessors.processAuthentication( - options.passwordFile, - this.fs, - ); - let pkClient: PolykeyClient; - this.exitHandlers.handlers.push(async () => { - if (pkClient != null) await pkClient.stop(); - }); - try { - pkClient = await PolykeyClient.createPolykeyClient({ - nodePath: options.nodePath, - nodeId: clientOptions.nodeId, - host: clientOptions.clientHost, - port: clientOptions.clientPort, - logger: this.logger.getChild(PolykeyClient.name), - }); - const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - const vaultPullMessage = new vaultsPB.Pull(); - vaultPullMessage.setVault(vaultMessage); - vaultPullMessage.setNode(nodeMessage); - nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); - vaultMessage.setNameOrId(vaultName); - await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.vaultsPull(vaultPullMessage, auth), - meta, + this.action( + async (vaultNameOrId, targetNodeId: NodeId | undefined, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const nodesUtils = await import('../../nodes/utils'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const nodesPB = await import( + '../../proto/js/polykey/v1/nodes/nodes_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), ); - } finally { - if (pkClient! != null) await pkClient.stop(); - } - }); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); + const vaultMessage = new vaultsPB.Vault(); + const pullVaultMessage = new vaultsPB.Vault(); + const nodeMessage = new nodesPB.Node(); + const vaultPullMessage = new vaultsPB.Pull(); + vaultPullMessage.setVault(vaultMessage); + vaultMessage.setNameOrId(vaultNameOrId); + if (targetNodeId != null) { + nodeMessage.setNodeId(nodesUtils.encodeNodeId(targetNodeId)); + vaultPullMessage.setNode(nodeMessage); + } + if (options.pullVault) { + vaultPullMessage.setPullVault(pullVaultMessage); + pullVaultMessage.setNameOrId(options.pullVault); + } + await binUtils.retryAuthentication( + (auth) => pkClient.grpcClient.vaultsPull(vaultPullMessage, auth), + meta, + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }, + ); } } diff --git a/src/bin/vaults/CommandScan.ts b/src/bin/vaults/CommandScan.ts index e8887d35b..f7fadf348 100644 --- a/src/bin/vaults/CommandScan.ts +++ b/src/bin/vaults/CommandScan.ts @@ -1,109 +1,71 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; -// import * as grpcErrors from '../../grpc/errors'; +import type { Metadata } from '@grpc/grpc-js'; -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; +import CommandPolykey from '../CommandPolykey'; +import * as binUtils from '../utils'; +import * as binOptions from '../utils/options'; +import * as binProcessors from '../utils/processors'; -// class CommandScan extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('scan'); -// this.description('Vaults Scan'); -// this.requiredOption( -// '-ni, --node-id ', -// '(required) Id of the node to be scanned', -// ); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (options) => { +class CommandScan extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('scan'); + this.description('Scans a node to reveal their vaults'); + this.argument('', 'Id of the node to scan'); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (nodeId, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const nodesPB = await import('../../proto/js/polykey/v1/nodes/nodes_pb'); -// }); -// } -// } + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const client = await PolykeyClient.createPolykeyClient({ + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); -// export default CommandScan; + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); -// OLD COMMAND -// const commandScanVaults = binUtils.createCommand('scan', { -// description: 'Lists the vaults of another node', -// aliases: ['fetch'], -// nodePath: true, -// verbose: true, -// format: true, -// }); -// commandScanVaults.requiredOption( -// '-ni, --node-id ', -// '(required) Id of the node to be scanned', -// ); -// commandScanVaults.action(async (options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// if (options.nodePath) { -// clientConfig['nodePath'] = options.nodePath; -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); + try { + const grpcClient = client.grpcClient; + const nodeMessage = new nodesPB.Node(); + nodeMessage.setNodeId(nodeId); -// const client = await PolykeyClient.createPolykeyClient(clientConfig); -// const nodeMessage = new nodesPB.Node(); -// nodeMessage.setNodeId(options.nodeId); + const data = await binUtils.retryAuthentication( + async (meta: Metadata) => { + const data: Array = []; + const stream = grpcClient.vaultsScan(nodeMessage, meta); + for await (const vault of stream) { + data.push(`${vault.getVaultName()}\t\t${vault.getVaultId()}`); + } + return data; + }, + meta, + ); -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } finally { + await client.stop(); + } + }); + } +} -// const data: Array = []; -// const response = await binUtils.streamCallCARL( -// client, -// setupStreamCall( -// client, -// client.grpcClient.vaultsScan, -// ), -// )(nodeMessage); - -// for await (const vault of response.data) { -// data.push(`${vault.getVaultName()}`); -// } -// await response.refresh; -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: data, -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// throw err; -// } -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default commandScanVaults; +export default CommandScan; diff --git a/src/bin/vaults/CommandShare.ts b/src/bin/vaults/CommandShare.ts index e71e8de20..943d76468 100644 --- a/src/bin/vaults/CommandShare.ts +++ b/src/bin/vaults/CommandShare.ts @@ -59,11 +59,7 @@ class CommandShare extends CommandPolykey { vaultMessage.setNameOrId(vaultName); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); await binUtils.retryAuthentication( - (auth) => - pkClient.grpcClient.vaultsPermissionsSet( - setVaultPermsMessage, - auth, - ), + (auth) => pkClient.grpcClient.vaultsShare(setVaultPermsMessage, auth), meta, ); } finally { diff --git a/src/bin/vaults/CommandStat.ts b/src/bin/vaults/CommandStat.ts deleted file mode 100644 index 1b44f3c5d..000000000 --- a/src/bin/vaults/CommandStat.ts +++ /dev/null @@ -1,98 +0,0 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; - -// import * as grpcErrors from '../../grpc/errors'; - -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; - -// class CommandStat extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('stat'); -// this.description('Vaults Stat'); -// this.requiredOption( -// '-vn, --vault-name ', -// '(required) Name of the vault to get stats from', -// ); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (options) => { - -// }); -// } -// } - -// export default CommandStat; - -// OLD COMMAND -// const stat = binUtils.createCommand('stat', { -// description: 'Gets stats of an existing vault', -// nodePath: true, -// verbose: true, -// format: true, -// }); -// stat.requiredOption( -// '-vn, --vault-name ', -// '(required) Name of the vault to get stats from', -// ); -// stat.action(async (options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); - -// const client = await PolykeyClient.createPolykeyClient(clientConfig); -// const vaultMessage = new vaultsPB.Vault(); -// vaultMessage.setNameOrId(options.vaultName); - -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; -// const responseMessage = await binUtils.unaryCallCARL( -// client, -// attemptUnaryCall(client, grpcClient.vaultsSecretsStat), -// )(vaultMessage); - -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: [ -// `${vaultMessage.getNameOrId()}:\t\t${responseMessage.getStats()}`, -// ], -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// throw err; -// } -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default stat; diff --git a/src/bin/vaults/CommandUnshare.ts b/src/bin/vaults/CommandUnshare.ts index 097af67a5..23aa99d3d 100644 --- a/src/bin/vaults/CommandUnshare.ts +++ b/src/bin/vaults/CommandUnshare.ts @@ -60,10 +60,7 @@ class CommandUnshare extends CommandPolykey { nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); await binUtils.retryAuthentication( (auth) => - pkClient.grpcClient.vaultsPermissionsUnset( - unsetVaultPermsMessage, - auth, - ), + pkClient.grpcClient.vaultsUnshare(unsetVaultPermsMessage, auth), meta, ); } finally { diff --git a/src/bin/vaults/CommandVaults.ts b/src/bin/vaults/CommandVaults.ts index e7ba102f5..2c9a5d47c 100644 --- a/src/bin/vaults/CommandVaults.ts +++ b/src/bin/vaults/CommandVaults.ts @@ -3,12 +3,11 @@ import CommandCreate from './CommandCreate'; import CommandDelete from './CommandDelete'; import CommandList from './CommandList'; import CommandLog from './CommandLog'; -// Import CommandPermissions from './CommandPermissions'; +import CommandScan from './CommandScan'; +import CommandPermissions from './CommandPermissions'; import CommandPull from './CommandPull'; import CommandRename from './CommandRename'; -// Import CommandScan from './CommandScan'; import CommandShare from './CommandShare'; -// Import CommandStat from './CommandStat'; import CommandUnshare from './CommandUnshare'; import CommandVersion from './CommandVersion'; import CommandPolykey from '../CommandPolykey'; @@ -23,14 +22,13 @@ class CommandVaults extends CommandPolykey { this.addCommand(new CommandDelete(...args)); this.addCommand(new CommandList(...args)); this.addCommand(new CommandLog(...args)); - // This.addCommand(new CommandPermissions(...args)); + this.addCommand(new CommandPermissions(...args)); this.addCommand(new CommandPull(...args)); this.addCommand(new CommandRename(...args)); - // This.addCommand(new CommandScan(...args)); this.addCommand(new CommandShare(...args)); - // This.addCommand(new CommandStat(...args)); this.addCommand(new CommandUnshare(...args)); this.addCommand(new CommandVersion(...args)); + this.addCommand(new CommandScan(...args)); } } diff --git a/src/bootstrap/utils.ts b/src/bootstrap/utils.ts index feba00657..73deb176e 100644 --- a/src/bootstrap/utils.ts +++ b/src/bootstrap/utils.ts @@ -160,17 +160,6 @@ async function bootstrapState({ sigchain, logger: logger.getChild(NodeManager.name), }); - const vaultManager = await VaultManager.createVaultManager({ - acl, - db, - gestaltGraph, - keyManager, - nodeConnectionManager, - vaultsKey: keyManager.vaultKey, - vaultsPath, - logger: logger.getChild(VaultManager.name), - fresh, - }); const notificationsManager = await NotificationsManager.createNotificationsManager({ acl, @@ -181,6 +170,18 @@ async function bootstrapState({ logger: logger.getChild(NotificationsManager.name), fresh, }); + const vaultManager = await VaultManager.createVaultManager({ + acl, + db, + gestaltGraph, + keyManager, + nodeConnectionManager, + vaultsPath, + nodeManager, + notificationsManager, + logger: logger.getChild(VaultManager.name), + fresh, + }); const sessionManager = await SessionManager.createSessionManager({ db, keyManager, diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index ae0de84f1..daa9ac536 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -186,31 +186,26 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsPermissionsSet(...args) { - return grpcUtils.promisifyUnaryCall( + public vaultsPermissionsGet(...args) { + return grpcUtils.promisifyReadableStreamCall( this.client, - this.client.vaultsPermissionsSet, + this.client.vaultsPermissionsGet, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsPermissionsUnset(...args) { + public vaultsShare(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissionsUnset, + this.client.vaultsShare, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultPermissions( - ...args - ): AsyncGeneratorReadableStreamClient< - vaultsPB.Permission, - ClientReadableStream - > { - return grpcUtils.promisifyReadableStreamCall( + public vaultsUnshare(...args) { + return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissions, + this.client.vaultsUnshare, )(...args); } @@ -235,14 +230,6 @@ class GRPCClientClient extends GRPCClient { )(...args); } - @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsSecretsStat(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsSecretsStat, - )(...args); - } - @ready(new clientErrors.ErrorClientClientDestroyed()) public vaultsSecretsDelete(...args) { return grpcUtils.promisifyUnaryCall( @@ -267,6 +254,14 @@ class GRPCClientClient extends GRPCClient { )(...args); } + @ready(new clientErrors.ErrorClientClientDestroyed()) + public vaultsSecretsStat(...args) { + return grpcUtils.promisifyUnaryCall( + this.client, + this.client.vaultsSecretsStat, + )(...args); + } + @ready(new clientErrors.ErrorClientClientDestroyed()) public vaultsSecretsRename(...args) { return grpcUtils.promisifyUnaryCall( diff --git a/src/client/service/index.ts b/src/client/service/index.ts index 70864a2f3..75b022b3e 100644 --- a/src/client/service/index.ts +++ b/src/client/service/index.ts @@ -66,12 +66,12 @@ import vaultsCreate from './vaultsCreate'; import vaultsDelete from './vaultsDelete'; import vaultsList from './vaultsList'; import vaultsLog from './vaultsLog'; -import vaultsPermissions from './vaultsPermissions'; -import vaultsPermissionsSet from './vaultsPermissionsSet'; -import vaultsPermissionsUnset from './vaultsPermissionsUnset'; +import vaultsPermissionsGet from './vaultsPermissionsGet'; import vaultsPull from './vaultsPull'; import vaultsRename from './vaultsRename'; import vaultsScan from './vaultsScan'; +import vaultsShare from './vaultsShare'; +import vaultsUnshare from './vaultsUnshare'; import vaultsVersion from './vaultsVersion'; import vaultsSecretsDelete from './vaultsSecretsDelete'; import vaultsSecretsEdit from './vaultsSecretsEdit'; @@ -169,12 +169,12 @@ function createService({ vaultsDelete: vaultsDelete(container), vaultsList: vaultsList(container), vaultsLog: vaultsLog(container), - vaultsPermissions: vaultsPermissions(container), - vaultsPermissionsSet: vaultsPermissionsSet(container), - vaultsPermissionsUnset: vaultsPermissionsUnset(container), + vaultsPermissionsGet: vaultsPermissionsGet(container), vaultsPull: vaultsPull(container), vaultsRename: vaultsRename(container), vaultsScan: vaultsScan(container), + vaultsShare: vaultsShare(container), + vaultsUnshare: vaultsUnshare(container), vaultsVersion: vaultsVersion(container), vaultsSecretsDelete: vaultsSecretsDelete(container), vaultsSecretsEdit: vaultsSecretsEdit(container), diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index d83cf771f..c7338b9a2 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -1,10 +1,20 @@ import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as validationUtils from '../../validation/utils'; +import * as vaultsUtils from '../../vaults/utils'; +import * as vaultsErrors from '../../vaults/errors'; -function vaultsClone({ authenticate }: { authenticate: Authenticate }) { +function vaultsClone({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, @@ -25,13 +35,14 @@ function vaultsClone({ authenticate }: { authenticate: Authenticate }) { return; } // Vault id - // const vaultId = parseVaultInput(vaultMessage, vaultManager); + let vaultId; + const vaultNameOrId = vaultMessage.getNameOrId(); + vaultId = vaultManager.getVaultId(vaultNameOrId) + vaultId = vaultId ?? vaultsUtils.decodeVaultId(vaultNameOrId); + if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - throw Error('Not implemented'); - // FIXME, not fully implemented - // await vaultManager.cloneVault(vaultId, id); + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + await vaultManager.cloneVault(nodeId, vaultId); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts index bf1ddf33b..a199bd5b4 100644 --- a/src/client/service/vaultsCreate.ts +++ b/src/client/service/vaultsCreate.ts @@ -1,5 +1,5 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultName } from '../../vaults/types'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as grpc from '@grpc/grpc-js'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; @@ -19,14 +19,14 @@ function vaultsCreate({ callback: grpc.sendUnaryData, ): Promise => { const response = new vaultsPB.Vault(); - let vault: Vault; + let vaultId: VaultId; try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - vault = await vaultManager.createVault( + vaultId = await vaultManager.createVault( call.request.getNameOrId() as VaultName, ); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + response.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts index e8c80bb84..1fa8569c0 100644 --- a/src/client/service/vaultsDelete.ts +++ b/src/client/service/vaultsDelete.ts @@ -33,7 +33,7 @@ function vaultsDelete({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); await vaultManager.destroyVault(vaultId); response.setSuccess(true); callback(null, response); diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts index b22723fa0..e0b919e50 100644 --- a/src/client/service/vaultsList.ts +++ b/src/client/service/vaultsList.ts @@ -23,12 +23,11 @@ function vaultsList({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaults = await vaultManager.listVaults(); for await (const [vaultName, vaultId] of vaults) { const vaultListMessage = new vaultsPB.List(); vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + vaultListMessage.setVaultId(vaultsUtils.encodeVaultId(vaultId)); await genWritable.next(((_) => vaultListMessage)()); } await genWritable.next(null); diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts index 3c06e172d..96b43e086 100644 --- a/src/client/service/vaultsLog.ts +++ b/src/client/service/vaultsLog.ts @@ -37,20 +37,20 @@ function vaultsLog({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); // Getting the log const depth = vaultsLogMessage.getLogDepth(); let commitId: string | undefined = vaultsLogMessage.getCommitId(); commitId = commitId ? commitId : undefined; - const log = await vault.log(depth, commitId); - + const log = await vaultManager.withVaults([vaultId], async (vault) => { + return await vault.log(commitId, depth); + }); const vaultsLogEntryMessage = new vaultsPB.LogEntry(); for (const entry of log) { - vaultsLogEntryMessage.setOid(entry.oid); - vaultsLogEntryMessage.setCommitter(entry.committer); - vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); + vaultsLogEntryMessage.setOid(entry.commitId); + vaultsLogEntryMessage.setCommitter(entry.committer.name); + // FIXME: we can make this a google.protobuf.Timestamp field? + vaultsLogEntryMessage.setTimeStamp(entry.committer.timestamp.getTime()); vaultsLogEntryMessage.setMessage(entry.message); await genWritable.next(vaultsLogEntryMessage); } diff --git a/src/client/service/vaultsPermissions.ts b/src/client/service/vaultsPermissions.ts deleted file mode 100644 index 8fba60112..000000000 --- a/src/client/service/vaultsPermissions.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { Authenticate } from '../types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; - -function vaultsPermissions({ authenticate }: { authenticate: Authenticate }) { - return async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const node = nodeMessage.getNodeId(); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // let perms: Record; - throw Error('Not implemented'); - // FIXME - // if (isNodeId(node)) { - // Perms = await vaultManager.getVaultPermissions(id, node); - // } else { - // Perms = await vaultManager.getVaultPermissions(id); - // } - // const permissionMessage = new vaultsPB.Permission(); - // For (const nodeId in perms) { - // permissionMessage.setNodeId(nodeId); - // if (perms[nodeId]['pull'] !== undefined) { - // permissionMessage.setAction('pull'); - // } - // await genWritable.next(permissionMessage); - // } - await genWritable.next(null); - return; - } catch (e) { - await genWritable.throw(e); - return; - } - }; -} - -export default vaultsPermissions; diff --git a/src/client/service/vaultsPermissionsGet.ts b/src/client/service/vaultsPermissionsGet.ts new file mode 100644 index 000000000..916b80cc1 --- /dev/null +++ b/src/client/service/vaultsPermissionsGet.ts @@ -0,0 +1,63 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import type * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsPermissionsGet({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const vaultMessage = call.request; + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Getting vaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + + const permissionList = await vaultManager.getVaultPermission(vaultId); + const nodeActionsMessage = new permissionsPB.NodeActions(); + const nodeMessage = new nodesPB.Node(); + + // Constructing the message. + for (const nodeId in permissionList) { + nodeMessage.setNodeId(nodeId); + nodeActionsMessage.setNode(nodeMessage); + nodeActionsMessage.clearActionsList(); + for (const action in permissionList[nodeId]) { + nodeActionsMessage.addActions(action); + } + await genWritable.next(nodeActionsMessage); + } + + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsPermissionsGet; diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts index a9dcf663a..81e79adc4 100644 --- a/src/client/service/vaultsPull.ts +++ b/src/client/service/vaultsPull.ts @@ -1,10 +1,27 @@ import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as validationUtils from '../../validation/utils'; -function vaultsPull({ authenticate }: { authenticate: Authenticate }) { +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsPull({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, @@ -19,17 +36,34 @@ function vaultsPull({ authenticate }: { authenticate: Authenticate }) { callback({ code: grpc.status.NOT_FOUND }, null); return; } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + let nodeId; const nodeMessage = call.request.getNode(); if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; + nodeId = null; + } else { + nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); } - // Vault name - // const vaultId = await parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - // Await vaultManager.pullVault(vaultId, id); + let pullVault; + const pullVaultMessage = call.request.getPullVault(); + if (pullVaultMessage == null) { + pullVault = null; + } else { + try { + pullVault = decodeVaultId(pullVaultMessage.getNameOrId()); + } catch (err) { + // Do nothing + } + if (!pullVault) pullVault = pullVaultMessage.getNameOrId(); + } + await vaultManager.pullVault({ + vaultId, + pullNodeId: nodeId, + pullVaultNameOrId: pullVault, + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts index 42e1aee97..b5c81a83b 100644 --- a/src/client/service/vaultsRename.ts +++ b/src/client/service/vaultsRename.ts @@ -36,10 +36,10 @@ function vaultsRename({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const newName = call.request.getNewName() as VaultName; await vaultManager.renameVault(vaultId, newName); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); + response.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts index 0845809ed..018bcda2b 100644 --- a/src/client/service/vaultsScan.ts +++ b/src/client/service/vaultsScan.ts @@ -1,9 +1,12 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type { NodeId } from '../../nodes/types'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import type * as grpc from '@grpc/grpc-js'; +import type { VaultManager } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils } from '../../vaults'; +import { validateSync, utils as validationUtils } from '../../validation'; +import { matchSync } from '../../utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsScan({ @@ -20,13 +23,28 @@ function vaultsScan({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaults = await vaultManager.listVaults(); - vaults.forEach(async (vaultId, vaultName) => { + const { + nodeId, + }: { + nodeId: NodeId; + } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['nodeId'], () => validationUtils.parseNodeId(value)], + () => value, + ); + }, + { + nodeId: call.request.getNodeId(), + }, + ); + const list = await vaultManager.scanNodeVaults(nodeId); + for (const vault of list) { const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + vaultListMessage.setVaultName(vault[0]); + vaultListMessage.setVaultId(vaultsUtils.encodeVaultId(vault[1])); await genWritable.next(vaultListMessage); - }); + } await genWritable.next(null); return; } catch (e) { diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts index 8c042e845..ec770a9de 100644 --- a/src/client/service/vaultsSecretsDelete.ts +++ b/src/client/service/vaultsSecretsDelete.ts @@ -29,7 +29,6 @@ function vaultsSecretsDelete({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -38,10 +37,11 @@ function vaultsSecretsDelete({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const secretName = call.request.getSecretName(); - await vaultOps.deleteSecret(vault, secretName); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.deleteSecret(vault, secretName); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts index 2142d5014..356c4143a 100644 --- a/src/client/service/vaultsSecretsEdit.ts +++ b/src/client/service/vaultsSecretsEdit.ts @@ -29,7 +29,6 @@ function vaultsSecretsEdit({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const secretMessage = call.request; if (secretMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -43,11 +42,12 @@ function vaultsSecretsEdit({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const secretName = secretMessage.getSecretName(); const secretContent = Buffer.from(secretMessage.getSecretContent()); - await vaultOps.updateSecret(vault, secretName, secretContent); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.updateSecret(vault, secretName, secretContent); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts index c3fd06cbc..7b5adadda 100644 --- a/src/client/service/vaultsSecretsGet.ts +++ b/src/client/service/vaultsSecretsGet.ts @@ -29,7 +29,6 @@ function vaultsSecretsGet({ const response = new secretsPB.Secret(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -38,10 +37,14 @@ function vaultsSecretsGet({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const secretName = call.request.getSecretName(); - const secretContent = await vaultOps.getSecret(vault, secretName); + const secretContent = await vaultManager.withVaults( + [vaultId], + async (vault) => { + return await vaultOps.getSecret(vault, secretName); + }, + ); response.setSecretContent(secretContent); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts index 42eaea942..4eef962f7 100644 --- a/src/client/service/vaultsSecretsList.ts +++ b/src/client/service/vaultsSecretsList.ts @@ -32,9 +32,13 @@ function vaultsSecretsList({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secrets = await vaultOps.listSecrets(vault); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + const secrets = await vaultManager.withVaults( + [vaultId], + async (vault) => { + return await vaultOps.listSecrets(vault); + }, + ); let secretMessage: secretsPB.Secret; for (const secret of secrets) { secretMessage = new secretsPB.Secret(); diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts index 5c51f0673..345aa01ea 100644 --- a/src/client/service/vaultsSecretsMkdir.ts +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -29,7 +29,6 @@ function vaultsSecretsMkdir({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMkdirMessge = call.request; const vaultMessage = vaultMkdirMessge.getVault(); if (vaultMessage == null) { @@ -39,10 +38,11 @@ function vaultsSecretsMkdir({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { - recursive: vaultMkdirMessge.getRecursive(), + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { + recursive: vaultMkdirMessge.getRecursive(), + }); }); response.setSuccess(true); callback(null, response); diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts index 2e3e7d18b..10bd355fe 100644 --- a/src/client/service/vaultsSecretsNew.ts +++ b/src/client/service/vaultsSecretsNew.ts @@ -29,7 +29,6 @@ function vaultsSecretsNew({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -38,11 +37,12 @@ function vaultsSecretsNew({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const secret = call.request.getSecretName(); const content = Buffer.from(call.request.getSecretContent()); - await vaultOps.addSecret(vault, secret, content); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, secret, content); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts index 33d9b6968..60ff1b14c 100644 --- a/src/client/service/vaultsSecretsNewDir.ts +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -32,7 +32,6 @@ function vaultsSecretsNewDir({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -41,10 +40,11 @@ function vaultsSecretsNewDir({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const secretsPath = call.request.getSecretDirectory(); - await vaultOps.addSecretDirectory(vault, secretsPath, fs); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecretDirectory(vault, secretsPath, fs); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts index 2fe81c7b5..dd04f7d3f 100644 --- a/src/client/service/vaultsSecretsRename.ts +++ b/src/client/service/vaultsSecretsRename.ts @@ -42,11 +42,12 @@ function vaultsSecretsRename({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const oldSecret = secretMessage.getSecretName(); const newSecret = call.request.getNewName(); - await vaultOps.renameSecret(vault, oldSecret, newSecret); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.renameSecret(vault, oldSecret, newSecret); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts index f7250cdad..7f498f1b2 100644 --- a/src/client/service/vaultsSecretsStat.ts +++ b/src/client/service/vaultsSecretsStat.ts @@ -1,25 +1,47 @@ -import type * as grpc from '@grpc/grpc-js'; +import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function vaultsSecretsStat({ authenticate }: { authenticate: Authenticate }) { +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsStat({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, ): Promise => { try { - const response = new vaultsPB.Stat(); + const response = new secretsPB.Stat(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - - // Const vaultMessage = call.request; - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // const vault = await vaultManager.openVault(id); - // FIXME, reimplement this. - throw Error('Not Implemented'); - // Const stats = await vaultManager.vaultStats(id); - // response.setStats(JSON.stringify(stats));); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + const secretName = call.request.getSecretName(); + const stat = await vaultManager.withVaults([vaultId], async (vault) => { + return await vaultOps.statSecret(vault, secretName); + }); + response.setJson(JSON.stringify(stat)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsPermissionsSet.ts b/src/client/service/vaultsShare.ts similarity index 55% rename from src/client/service/vaultsPermissionsSet.ts rename to src/client/service/vaultsShare.ts index 05ddb055f..54e2ef4bc 100644 --- a/src/client/service/vaultsPermissionsSet.ts +++ b/src/client/service/vaultsShare.ts @@ -1,13 +1,26 @@ import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '@/validation/utils'; +import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function vaultsPermissionsSet({ +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsShare({ authenticate, + vaultManager, }: { authenticate: Authenticate; + vaultManager: VaultManager; }) { return async ( call: grpc.ServerUnaryCall, @@ -16,21 +29,22 @@ function vaultsPermissionsSet({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const nodeMessage = call.request.getNode(); if (nodeMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - // Const node = makeNodeId(nodeMessage.getNodeId()); + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not Implemented'); - // Await vaultManager.setVaultPermissions(node, id); // FIXME + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + await vaultManager.shareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); callback(null, response); @@ -42,4 +56,4 @@ function vaultsPermissionsSet({ }; } -export default vaultsPermissionsSet; +export default vaultsShare; diff --git a/src/client/service/vaultsPermissionsUnset.ts b/src/client/service/vaultsUnshare.ts similarity index 55% rename from src/client/service/vaultsPermissionsUnset.ts rename to src/client/service/vaultsUnshare.ts index fd2e8429f..83f028822 100644 --- a/src/client/service/vaultsPermissionsUnset.ts +++ b/src/client/service/vaultsUnshare.ts @@ -1,13 +1,26 @@ import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as validationUtils from '../../validation/utils'; -function vaultsPermissionsUnset({ +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsUnshare({ authenticate, + vaultManager, }: { authenticate: Authenticate; + vaultManager: VaultManager; }) { return async ( call: grpc.ServerUnaryCall, @@ -16,21 +29,22 @@ function vaultsPermissionsUnset({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const nodeMessage = call.request.getNode(); if (nodeMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - // Const node = makeNodeId(nodeMessage.getNodeId()); + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not implemented'); - // Await vaultManager.unsetVaultPermissions(node, id); // FIXME + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + await vaultManager.unshareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); callback(null, response); @@ -42,4 +56,4 @@ function vaultsPermissionsUnset({ }; } -export default vaultsPermissionsUnset; +export default vaultsUnshare; diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts index 4533dca9e..53c7fae9f 100644 --- a/src/client/service/vaultsVersion.ts +++ b/src/client/service/vaultsVersion.ts @@ -29,9 +29,7 @@ function vaultsVersion({ // Checking session token const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultsVersionMessage = call.request; - // Getting vault ID const vaultMessage = vaultsVersionMessage.getVault(); if (vaultMessage == null) { @@ -41,22 +39,22 @@ function vaultsVersion({ const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - + if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); // Doing the deed - const vault = await vaultManager.openVault(vaultId); - const latestOid = (await vault.log())[0].oid; const versionId = vaultsVersionMessage.getVersionId(); - - await vault.version(versionId); - const currentVersionId = (await vault.log(0, versionId))[0]?.oid; - + const [latestOid, currentVersionId] = await vaultManager.withVaults( + [vaultId], + async (vault) => { + const latestOid = (await vault.log())[0].commitId; + await vault.version(versionId); + const currentVersionId = (await vault.log(versionId, 0))[0]?.commitId; + return [latestOid, currentVersionId]; + }, + ); // Checking if latest version ID. const isLatestVersion = latestOid === currentVersionId; - // Creating message response.setIsLatestVersion(isLatestVersion); - // Sending message callback(null, response); return; diff --git a/src/config.ts b/src/config.ts index 09f88b66a..c8322e2f2 100644 --- a/src/config.ts +++ b/src/config.ts @@ -62,6 +62,7 @@ const config = { dbBase: 'db', keysBase: 'keys', vaultsBase: 'vaults', + efsBase: 'efs', tokenBase: 'token', keysConfig: { rootKeyPairBits: 4096, diff --git a/src/git/GitRequest.ts b/src/git/GitRequest.ts deleted file mode 100644 index 14f304d66..000000000 --- a/src/git/GitRequest.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Responsible for converting HTTP messages from isomorphic-git into requests and sending them to a specific node. - */ - -class GitRequest { - private requestInfo: ( - vaultNameOrId: string, - ) => AsyncIterableIterator; - private requestPack: ( - vaultNameOrId: string, - body: any, - ) => AsyncIterableIterator; - private requestVaultNames: () => Promise; - - constructor( - requestInfo: (vaultNameOrId: string) => AsyncIterableIterator, - requestPack: ( - vaultNameOrId: string, - body: Buffer, - ) => AsyncIterableIterator, - requestVaultNames: () => Promise, - ) { - this.requestInfo = requestInfo; - this.requestPack = requestPack; - this.requestVaultNames = requestVaultNames; - } - - /** - * The custom http request method to feed into isomorphic-git's [custom http object](https://isomorphic-git.org/docs/en/http) - * In the future this will need to be changed in order to handle the receive-pack command from isomorphic-git. This will be - * in the url passed into the request function and is needed for push functionality - */ - public async request({ - url, - method = 'GET', - headers = {}, - body = Buffer.from(''), - }) { - const u = new URL(url); - - // Parse request - if (method === 'GET') { - const match = u.pathname.match(/\/(.+)\/info\/refs$/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - const infoResponse = this.requestInfo(vaultNameOrId); - - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const match = u.pathname.match(/\/(.+)\/git-(.+)/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - - const packResponse = this.requestPack(vaultNameOrId, body[0]); - - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - } - - public async scanVaults() { - return await this.requestVaultNames(); - } -} - -export default GitRequest; diff --git a/src/git/index.ts b/src/git/index.ts index dae0d1ba1..006019213 100644 --- a/src/git/index.ts +++ b/src/git/index.ts @@ -1,4 +1,3 @@ -export { default as GitRequest } from './GitRequest'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/git/utils.ts b/src/git/utils.ts index d565ddc15..d7d6b55e2 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -1,17 +1,17 @@ import type { - Refs, - SymRefs, Ack, + DeflatedObject, Identity, Pack, PackIndex, - DeflatedObject, - WrappedObject, RawObject, + Refs, + SymRefs, + WrappedObject, } from './types'; import type { - ReadCommitResult, CommitObject, + ReadCommitResult, TreeEntry, TreeObject, } from 'isomorphic-git'; @@ -22,8 +22,13 @@ import pako from 'pako'; import Hash from 'sha.js/sha1'; import { PassThrough } from 'readable-stream'; import createHash from 'sha.js'; -import * as gitErrors from './errors'; +import { errors as gitErrors } from './'; +import * as vaultsUtils from '../vaults/utils'; +/** + * List of paths to check for a specific ref. + * @param ref Reference string + */ const refpaths = (ref: string) => [ `${ref}`, `refs/${ref}`, @@ -121,6 +126,10 @@ function compareRefNames(refa: string, refb: string): number { return tmp; } +/** + * Parses the packed-refs file. + * @param text - contents of the packed refs file. + */ function textToPackedRefs(text: string): Refs { const refs: Refs = {}; if (text) { @@ -152,14 +161,30 @@ function textToPackedRefs(text: string): Refs { return refs; } +/** + * Reads and parses the packed-refs file. + * @param fs Filesystem implementation + * @param gitdir Git '.git' directory + */ async function packedRefs(fs: EncryptedFS, gitdir: string): Promise { - const text = await fs.promises.readFile(path.join(gitdir, 'packed-refs'), { - encoding: 'utf8', - }); - const refs = textToPackedRefs(text.toString()); - return refs; + let text: string | Buffer = '# pack-refs with: peeled fully-peeled sorted'; + try { + text = await fs.promises.readFile(path.join(gitdir, 'packed-refs'), { + encoding: 'utf8', + }); + } catch (err) { + if (err.code !== 'ENOENT') throw err; + // If no file then ignore and return default. + } + return textToPackedRefs(text!.toString()); } +/** + * Obtains a list of all refs by recursively reading the FS. + * @param fs Filesystem implementation + * @param gitdir Git '.git' directory + * @param filepath Path to start listing from. + */ async function listRefs( fs: EncryptedFS, gitdir: string, @@ -168,7 +193,7 @@ async function listRefs( const packedMap = packedRefs(fs, gitdir); let files: string[] = []; try { - for await (const file of readdirRecursively( + for await (const file of vaultsUtils.readdirRecursively( fs, path.join(gitdir, filepath), )) { @@ -194,33 +219,28 @@ async function listRefs( return files; } -async function* readdirRecursively( - efs: EncryptedFS, - dir: string, - dirs?: boolean, -) { - const dirents = await efs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); // Makes string | buffer a string. - secretPath = path.join(dir, res); - if ((await efs.stat(secretPath)).isDirectory() && dirent !== '.git') { - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursively(efs, secretPath, dirs); - } else if ((await efs.stat(secretPath)).isFile()) { - yield secretPath; - } - } -} - -async function resolve( - fs: EncryptedFS, - gitdir: string, - ref: string, - depth?: number, -): Promise { +/** + * Resolves a ref to it's sha hash by walking the fs and packed refs. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param ref Ref we wish to resolve. + * @param depth How deep to search. + * @returns {String} the resolved sha hash. + */ +async function resolve({ + fs, + dir = '.', + gitdir = '.git', + ref, + depth, +}: { + fs: EncryptedFS; + dir?: string; + gitdir?: string; + ref: string; + depth?: number; +}): Promise { if (depth !== undefined) { depth--; if (depth === -1) { @@ -230,7 +250,7 @@ async function resolve( // Is it a ref pointer? if (ref.startsWith('ref: ')) { ref = ref.slice('ref: '.length); - return resolve(fs, gitdir, ref, depth); + return resolve({ fs, dir, gitdir, ref, depth }); } // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { @@ -248,24 +268,37 @@ async function resolve( await fs.promises.readFile(path.join(gitdir, ref), { encoding: 'utf8', }) - ).toString() || packedMap[ref].line; // FIXME: not sure what is going on here. + ).toString() || packedMap[ref].line; } catch (err) { if (err.code === 'ENOENT') { throw new gitErrors.ErrorGitUndefinedRefs(`Ref ${ref} cannot be found`); } } if (sha != null) { - return resolve(fs, gitdir, sha.trim(), depth); // FIXME: sha is string or config? + return resolve({ fs, dir, gitdir, ref: sha.trim(), depth }); } } throw new gitErrors.ErrorGitUndefinedRefs(`ref ${ref} corrupted`); } -async function uploadPack( - fs: EncryptedFS, - gitdir: string = '.git', +/** + * Obtains a list of all the refs in the repository and formats it. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param advertiseRefs Bool to specify if we want to advertise the refs. + */ +async function uploadPack({ + fs, + dir = '.', + gitdir = '.git', advertiseRefs = false, -): Promise | undefined> { +}: { + fs: EncryptedFS; + dir?: string; + gitdir?: string; + advertiseRefs: boolean; +}): Promise> { try { if (advertiseRefs) { const capabilities = ['side-band-64k']; @@ -274,16 +307,24 @@ async function uploadPack( const refs = {}; keys.unshift('HEAD'); for (const key of keys) { - refs[key] = await resolve(fs, gitdir, key); + refs[key] = await resolve({ fs, dir, gitdir, ref: key }); } const symrefs = {}; - symrefs['HEAD'] = await resolve(fs, gitdir, 'HEAD', 2); + symrefs['HEAD'] = await resolve({ + fs, + dir, + gitdir, + ref: 'HEAD', + depth: 2, + }); const write = { capabilities: capabilities, refs: refs, symrefs: symrefs, }; return writeRefsAdResponse(write); + } else { + return []; } } catch (err) { err.caller = 'git.uploadPack'; @@ -291,28 +332,41 @@ async function uploadPack( } } +/** + * This when given a list of refs works out the missing commits and sends them over as a stream. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param refs List of refs we want. + * @param depth How deep we want to search commits for. + * @param haves list of oids we already have and can be excluded from the stream. + */ async function packObjects({ fs, + dir = '.', gitdir = '.git', refs, depth = undefined, haves = undefined, }: { fs: EncryptedFS; + dir: string; gitdir: string; refs: string[]; depth?: number; haves?: string[]; }): Promise { - const oids = new Set(); + const oids = new Set(); // List of oids for commits we wish to send. const shallows = new Set(); const unshallows = new Set(); - const acks: Ack[] = []; - haves = haves ? haves : []; + const acks: Ack[] = []; // A list of the commits that were found but already had. + haves = haves ? haves : []; // The list of commits we already have. const since = undefined; + // For each desired ref. for (const ref of refs) { - const commits = await log({ fs, gitdir, ref, depth, since }); - const oldshallows: string[] = []; + // Obtain a list of the relevant commits + const commits = await log({ fs, dir, gitdir, ref, depth, since }); + const oldshallows: string[] = []; // Never actually updated so I have no idea. for (let i = 0; i < commits.length; i++) { const commit = commits[i]; if (haves.includes(commit.oid)) { @@ -334,18 +388,34 @@ async function packObjects({ } } } - const objects = await listObjects({ fs, gitdir, oids: Array.from(oids) }); + // Getting all of the Oids within the tree of the desired Oids. + const objects = await listObjects({ + fs, + dir, + gitdir, + oids: Array.from(oids), + }); const packstream = new PassThrough(); - await pack({ fs, gitdir, oids: [...objects], outputStream: packstream }); + // Packing, gzipping and returning a stream of all the desired data through packstream. + await pack({ fs, dir, gitdir, oids: [...objects], outputStream: packstream }); return { packstream, shallows, unshallows, acks }; } +/** + * Walks the git objects and returns a list of blobs, commits and trees. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param oids List of starting oids. + */ async function listObjects({ fs, + dir = '.', gitdir = '.git', oids, }: { fs: EncryptedFS; + dir: string; gitdir: string; oids: string[]; }): Promise> { @@ -358,7 +428,7 @@ async function listObjects({ // tell us which oids are Blobs and which are Trees. And we // do not need to recurse through commit parents. async function walk(oid: string): Promise { - const gitObject = await readObject({ fs, gitdir, oid }); + const gitObject = await readObject({ fs, dir, gitdir, oid }); if (gitObject.type === 'commit') { commits.add(oid); const commit = commitFrom(Buffer.from(gitObject.object)); @@ -454,8 +524,19 @@ function parseBuffer(buffer: Buffer): TreeObject { return _entries; } +/** + * Returns a commit lg for a given ref + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param ref Ref we're getting the commit long for. + * @param depth How many commits to fetch + * @param since Date to start from. + * @param signing Bool to specify signing + */ async function log({ fs, + dir = '.', gitdir = '.git', ref = 'HEAD', depth, @@ -463,6 +544,7 @@ async function log({ signing = false, }: { fs: EncryptedFS; + dir: string; gitdir: string; ref: string; depth?: number; @@ -475,8 +557,8 @@ async function log({ // TODO: In the future, we may want to have an API where we return a // async iterator that emits commits. const commits: ReadCommitResult[] = []; - const oid = await resolve(fs, gitdir, ref); - const tips = [await logCommit(fs, gitdir, oid, signing)]; + const oid = await resolve({ fs, dir, gitdir, ref }); + const tips = [await logCommit({ fs, dir, gitdir, oid, signing })]; // eslint-disable-next-line while (true) { @@ -502,7 +584,13 @@ async function log({ // Add the parents of this commit to the queue // Note: for the case of a commit with no parents, it will concat an empty array, having no net effect. for (const oid of commit.parent) { - const commitResult1 = await logCommit(fs, gitdir, oid, signing); + const commitResult1 = await logCommit({ + fs, + dir, + gitdir, + oid, + signing, + }); if (!tips.map((commit) => commit.oid).includes(commitResult1.oid)) { tips.push(commitResult1); } @@ -525,13 +613,20 @@ function compareAge(a: ReadCommitResult, b: ReadCommitResult): number { return a.commit.committer.timestamp - b.commit.committer.timestamp; } -async function logCommit( - fs: EncryptedFS, - gitdir: string, - oid: string, - signing: boolean, -): Promise { - const gitObject = await readObject({ fs, gitdir, oid }); +async function logCommit({ + fs, + dir = '.', + gitdir = '.git', + oid, + signing, +}: { + fs: EncryptedFS; + dir: string; + gitdir: string; + oid: string; + signing: boolean; +}): Promise { + const gitObject = await readObject({ fs, dir, gitdir, oid }); if (gitObject.type !== 'commit') { throw new gitErrors.ErrorGitUndefinedType( `Expected type to be commit, but instead found ${gitObject.type}`, @@ -734,12 +829,14 @@ function commitFrom(commit: string | Buffer): string { async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format?: 'parsed' | 'content'; @@ -747,12 +844,14 @@ async function readObject({ }): Promise; async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format: 'deflated'; @@ -760,12 +859,14 @@ async function readObject({ }): Promise; async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format: 'wrapped'; @@ -773,12 +874,14 @@ async function readObject({ }): Promise; async function readObject({ fs, - gitdir, + dir = '.', + gitdir = '.git', oid, format = 'parsed', encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format?: 'wrapped' | 'parsed' | 'deflated' | 'content'; @@ -787,7 +890,8 @@ async function readObject({ const _format = format === 'parsed' ? 'content' : format; // Curry the current read method so that the packfile un-deltification // process can acquire external ref-deltas. - const getExternalRefDelta = (oid: string) => readObject({ fs, gitdir, oid }); + const getExternalRefDelta = (oid: string) => + readObject({ fs, dir, gitdir, oid }); let result; // Empty tree - hard-coded so we can use it as a shorthand. // Note: I think the canonical git implementation must do this too because @@ -1191,13 +1295,23 @@ function unwrap(buffer: Buffer): { }; } +/** + * Without getting to deep into it, it seems to be prepping and then sending all the required data through the output stream. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param oids Desired Oids to be sent. + * @param outputStream data output stream. + */ async function pack({ fs, + dir = '.', gitdir = '.git', oids, outputStream, }: { fs: EncryptedFS; + dir: string; gitdir: string; oids: string[]; outputStream: PassThrough; @@ -1250,7 +1364,7 @@ async function pack({ const paddedChunk = '0'.repeat(8 - unpaddedChunk.length) + unpaddedChunk; write(paddedChunk, 'hex'); for (const oid of oids) { - const { type, object } = await readObject({ fs, gitdir, oid }); + const { type, object } = await readObject({ fs, dir, gitdir, oid }); writeObject(object as Uint8Array, type); } // Write SHA1 checksum diff --git a/src/keys/KeyManager.ts b/src/keys/KeyManager.ts index dea504132..268dce9fe 100644 --- a/src/keys/KeyManager.ts +++ b/src/keys/KeyManager.ts @@ -12,7 +12,6 @@ import type { FileSystem } from '../types'; import type { NodeId } from '../nodes/types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; -import type { VaultKey } from '../vaults/types'; import path from 'path'; import { Buffer } from 'buffer'; import Logger from '@matrixai/logger'; @@ -33,35 +32,12 @@ interface KeyManager extends CreateDestroyStartStop {} new keysErrors.ErrorKeyManagerDestroyed(), ) class KeyManager { - public readonly keysPath: string; - public readonly rootPubPath: string; - public readonly rootKeyPath: string; - public readonly rootCertPath: string; - public readonly rootCertsPath: string; - public readonly dbKeyPath: string; - public readonly vaultKeyPath: string; - - protected fs: FileSystem; - protected logger: Logger; - protected rootKeyPairChange: RootKeyPairChange; - protected rootKeyPair: KeyPair; - protected recoveryCode: RecoveryCode | undefined; - protected _dbKey: Buffer; - protected _vaultKey: Buffer; - protected rootCert: Certificate; - protected workerManager?: PolykeyWorkerManagerInterface; - protected rootKeyPairBits: number; - protected rootCertDuration: number; - protected dbKeyBits: number; - protected vaultKeyBits: number; - static async createKeyManager({ keysPath, password, rootKeyPairBits = 4096, rootCertDuration = 31536000, dbKeyBits = 256, - vaultKeyBits = 256, rootKeyPairChange = async () => {}, fs = require('fs'), logger = new Logger(this.name), @@ -73,7 +49,6 @@ class KeyManager { rootKeyPairBits?: number; rootCertDuration?: number; dbKeyBits?: number; - vaultKeyBits?: number; rootKeyPairChange?: RootKeyPairChange; fs?: FileSystem; logger?: Logger; @@ -87,7 +62,6 @@ class KeyManager { rootCertDuration, rootKeyPairBits, dbKeyBits, - vaultKeyBits, rootKeyPairChange, fs, logger, @@ -101,12 +75,30 @@ class KeyManager { return keyManager; } + public readonly keysPath: string; + public readonly rootPubPath: string; + public readonly rootKeyPath: string; + public readonly rootCertPath: string; + public readonly rootCertsPath: string; + public readonly dbKeyPath: string; + + protected fs: FileSystem; + protected logger: Logger; + protected rootKeyPairChange: RootKeyPairChange; + protected rootKeyPair: KeyPair; + protected recoveryCode: RecoveryCode | undefined; + protected _dbKey: Buffer; + protected rootCert: Certificate; + protected workerManager?: PolykeyWorkerManagerInterface; + protected rootKeyPairBits: number; + protected rootCertDuration: number; + protected dbKeyBits: number; + constructor({ keysPath, rootKeyPairBits, rootCertDuration, dbKeyBits, - vaultKeyBits, rootKeyPairChange, fs, logger, @@ -115,7 +107,6 @@ class KeyManager { rootKeyPairBits: number; rootCertDuration: number; dbKeyBits: number; - vaultKeyBits: number; rootKeyPairChange: RootKeyPairChange; fs: FileSystem; logger: Logger; @@ -127,11 +118,9 @@ class KeyManager { this.rootCertPath = path.join(keysPath, 'root.crt'); this.rootCertsPath = path.join(keysPath, 'root_certs'); this.dbKeyPath = path.join(keysPath, 'db.key'); - this.vaultKeyPath = path.join(keysPath, 'vault.key'); this.rootKeyPairBits = rootKeyPairBits; this.rootCertDuration = rootCertDuration; this.dbKeyBits = dbKeyBits; - this.vaultKeyBits = vaultKeyBits; this.rootKeyPairChange = rootKeyPairChange; this.fs = fs; } @@ -182,7 +171,6 @@ class KeyManager { this.recoveryCode = recoveryCode; this.rootCert = rootCert; this._dbKey = await this.setupKey(this.dbKeyPath, this.dbKeyBits); - this._vaultKey = await this.setupKey(this.vaultKeyPath, this.vaultKeyBits); this.logger.info(`Started ${this.constructor.name}`); } @@ -205,11 +193,6 @@ class KeyManager { return this._dbKey; } - @ready(new keysErrors.ErrorKeyManagerNotRunning()) - get vaultKey(): VaultKey { - return this._vaultKey as VaultKey; - } - @ready(new keysErrors.ErrorKeyManagerNotRunning()) public getRootKeyPair(): KeyPair { return keysUtils.keyPairCopy(this.rootKeyPair); @@ -410,7 +393,6 @@ class KeyManager { ): Promise { this.logger.info('Renewing root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const recoveryCodeNew = keysUtils.generateRecoveryCode(); const rootKeyPair = await this.generateKeyPair(bits, recoveryCodeNew); const now = new Date(); @@ -445,7 +427,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.recoveryCode = recoveryCodeNew; @@ -476,7 +457,6 @@ class KeyManager { ): Promise { this.logger.info('Resetting root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const recoveryCodeNew = keysUtils.generateRecoveryCode(); const rootKeyPair = await this.generateKeyPair(bits, recoveryCodeNew); const rootCert = keysUtils.generateCertificate( @@ -493,7 +473,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.recoveryCode = recoveryCodeNew; diff --git a/src/notifications/NotificationsManager.ts b/src/notifications/NotificationsManager.ts index 69766e41e..c17954dd3 100644 --- a/src/notifications/NotificationsManager.ts +++ b/src/notifications/NotificationsManager.ts @@ -144,7 +144,8 @@ class NotificationsManager { reverse: true, }); for await (const o of keyStream) { - latestId = IdInternal.fromBuffer(o); + // FIXME: really a buffer? + latestId = IdInternal.fromBuffer(o as Buffer); } this.notificationIdGenerator = createNotificationIdGenerator(latestId); this.logger.info(`Started ${this.constructor.name}`); diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index ea1c11386..72db2fe5c 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -16,7 +16,6 @@ interface IAgentServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsGitInfoGet extends grpc.MethodDefinition { +interface IAgentServiceService_IVaultsGitInfoGet extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/VaultsGitInfoGet"; requestStream: false; responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } @@ -52,23 +51,14 @@ interface IAgentServiceService_IVaultsGitPackGet extends grpc.MethodDefinition

; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { +interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/VaultsScan"; requestStream: false; responseStream: true; requestSerialize: grpc.serialize; requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IAgentServiceService_IVaultsPermissionsCheck extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsPermissionsCheck"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; } interface IAgentServiceService_INodesClosestLocalNodesGet extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/NodesClosestLocalNodesGet"; @@ -129,10 +119,9 @@ export const AgentServiceService: IAgentServiceService; export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { echo: grpc.handleUnaryCall; - vaultsGitInfoGet: grpc.handleServerStreamingCall; + vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; - vaultsScan: grpc.handleServerStreamingCall; - vaultsPermissionsCheck: grpc.handleUnaryCall; + vaultsScan: grpc.handleServerStreamingCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; @@ -145,16 +134,13 @@ export interface IAgentServiceClient { echo(request: polykey_v1_utils_utils_pb.EchoMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; - vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, options?: Partial): grpc.ClientReadableStream; + vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsGitPackGet(): grpc.ClientDuplexStream; vaultsGitPackGet(options: Partial): grpc.ClientDuplexStream; vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -180,15 +166,12 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public echo(request: polykey_v1_utils_utils_pb.EchoMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; public echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; public echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; - public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, options?: Partial): grpc.ClientReadableStream; + public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsGitPackGet(options?: Partial): grpc.ClientDuplexStream; public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index 782ed2f8e..5f6d9af0d 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -117,26 +117,26 @@ function deserialize_polykey_v1_utils_EmptyMessage(buffer_arg) { return polykey_v1_utils_utils_pb.EmptyMessage.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_NodePermission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermission)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermission'); +function serialize_polykey_v1_vaults_InfoRequest(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.InfoRequest)) { + throw new Error('Expected argument of type polykey.v1.vaults.InfoRequest'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_NodePermission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermission.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_InfoRequest(buffer_arg) { + return polykey_v1_vaults_vaults_pb.InfoRequest.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_NodePermissionAllowed(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermissionAllowed)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermissionAllowed'); +function serialize_polykey_v1_vaults_List(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.List)) { + throw new Error('Expected argument of type polykey.v1.vaults.List'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_NodePermissionAllowed(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermissionAllowed.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_List(buffer_arg) { + return polykey_v1_vaults_vaults_pb.List.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_polykey_v1_vaults_PackChunk(arg) { @@ -150,17 +150,6 @@ function deserialize_polykey_v1_vaults_PackChunk(buffer_arg) { return polykey_v1_vaults_vaults_pb.PackChunk.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Vault(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Vault)) { - throw new Error('Expected argument of type polykey.v1.vaults.Vault'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Vault(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Vault.deserializeBinary(new Uint8Array(buffer_arg)); -} - var AgentServiceService = exports.AgentServiceService = { // Echo @@ -180,10 +169,10 @@ vaultsGitInfoGet: { path: '/polykey.v1.AgentService/VaultsGitInfoGet', requestStream: false, responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.Vault, + requestType: polykey_v1_vaults_vaults_pb.InfoRequest, responseType: polykey_v1_vaults_vaults_pb.PackChunk, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, + requestSerialize: serialize_polykey_v1_vaults_InfoRequest, + requestDeserialize: deserialize_polykey_v1_vaults_InfoRequest, responseSerialize: serialize_polykey_v1_vaults_PackChunk, responseDeserialize: deserialize_polykey_v1_vaults_PackChunk, }, @@ -203,22 +192,11 @@ vaultsGitInfoGet: { requestStream: false, responseStream: true, requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.Vault, + responseType: polykey_v1_vaults_vaults_pb.List, requestSerialize: serialize_polykey_v1_nodes_Node, requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_Vault, - responseDeserialize: deserialize_polykey_v1_vaults_Vault, - }, - vaultsPermissionsCheck: { - path: '/polykey.v1.AgentService/VaultsPermissionsCheck', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.NodePermission, - responseType: polykey_v1_vaults_vaults_pb.NodePermissionAllowed, - requestSerialize: serialize_polykey_v1_vaults_NodePermission, - requestDeserialize: deserialize_polykey_v1_vaults_NodePermission, - responseSerialize: serialize_polykey_v1_vaults_NodePermissionAllowed, - responseDeserialize: deserialize_polykey_v1_vaults_NodePermissionAllowed, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, }, // Nodes nodesClosestLocalNodesGet: { diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index d558b7140..0dafbee89 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -43,21 +43,21 @@ interface IClientServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsScan"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsList extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsList"; requestStream: false; @@ -328,15 +319,6 @@ interface IClientServiceService_IVaultsSecretsMkdir extends grpc.MethodDefinitio responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsSecretsStat extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsSecretsStat"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsDelete extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsDelete"; requestStream: false; @@ -391,8 +373,26 @@ interface IClientServiceService_IVaultsSecretsNewDir extends grpc.MethodDefiniti responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsSet extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsSet"; +interface IClientServiceService_IvaultsSecretsStat extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/vaultsSecretsStat"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionsGet extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionsGet"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsShare extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsShare"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -400,8 +400,8 @@ interface IClientServiceService_IVaultsPermissionsSet extends grpc.MethodDefinit responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsUnset extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsUnset"; +interface IClientServiceService_IVaultsUnshare extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsUnshare"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -409,15 +409,6 @@ interface IClientServiceService_IVaultsPermissionsUnset extends grpc.MethodDefin responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissions extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissions"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsVersion extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsVersion"; requestStream: false; @@ -436,6 +427,15 @@ interface IClientServiceService_IVaultsLog extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } +interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsScan"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IIdentitiesAuthenticate extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/IdentitiesAuthenticate"; requestStream: false; @@ -689,21 +689,21 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation vaultsDelete: grpc.handleUnaryCall; vaultsPull: grpc.handleUnaryCall; vaultsClone: grpc.handleUnaryCall; - vaultsScan: grpc.handleServerStreamingCall; vaultsSecretsList: grpc.handleServerStreamingCall; vaultsSecretsMkdir: grpc.handleUnaryCall; - vaultsSecretsStat: grpc.handleUnaryCall; vaultsSecretsDelete: grpc.handleUnaryCall; vaultsSecretsEdit: grpc.handleUnaryCall; vaultsSecretsGet: grpc.handleUnaryCall; vaultsSecretsRename: grpc.handleUnaryCall; vaultsSecretsNew: grpc.handleUnaryCall; vaultsSecretsNewDir: grpc.handleUnaryCall; - vaultsPermissionsSet: grpc.handleUnaryCall; - vaultsPermissionsUnset: grpc.handleUnaryCall; - vaultsPermissions: grpc.handleServerStreamingCall; + vaultsSecretsStat: grpc.handleUnaryCall; + vaultsPermissionsGet: grpc.handleServerStreamingCall; + vaultsShare: grpc.handleUnaryCall; + vaultsUnshare: grpc.handleUnaryCall; vaultsVersion: grpc.handleUnaryCall; vaultsLog: grpc.handleServerStreamingCall; + vaultsScan: grpc.handleServerStreamingCall; identitiesAuthenticate: grpc.handleServerStreamingCall; identitiesAuthenticatedGet: grpc.handleServerStreamingCall; identitiesTokenPut: grpc.handleUnaryCall; @@ -802,16 +802,11 @@ export interface IClientServiceClient { vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -830,19 +825,24 @@ export interface IClientServiceClient { vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; @@ -987,16 +987,11 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -1015,19 +1010,24 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + public vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + public vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 59bfc3d72..e845b72c2 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -267,6 +267,17 @@ function deserialize_polykey_v1_permissions_Actions(buffer_arg) { return polykey_v1_permissions_permissions_pb.Actions.deserializeBinary(new Uint8Array(buffer_arg)); } +function serialize_polykey_v1_permissions_NodeActions(arg) { + if (!(arg instanceof polykey_v1_permissions_permissions_pb.NodeActions)) { + throw new Error('Expected argument of type polykey.v1.permissions.NodeActions'); + } + return Buffer.from(arg.serializeBinary()); +} + +function deserialize_polykey_v1_permissions_NodeActions(buffer_arg) { + return polykey_v1_permissions_permissions_pb.NodeActions.deserializeBinary(new Uint8Array(buffer_arg)); +} + function serialize_polykey_v1_secrets_Directory(arg) { if (!(arg instanceof polykey_v1_secrets_secrets_pb.Directory)) { throw new Error('Expected argument of type polykey.v1.secrets.Directory'); @@ -300,6 +311,17 @@ function deserialize_polykey_v1_secrets_Secret(buffer_arg) { return polykey_v1_secrets_secrets_pb.Secret.deserializeBinary(new Uint8Array(buffer_arg)); } +function serialize_polykey_v1_secrets_Stat(arg) { + if (!(arg instanceof polykey_v1_secrets_secrets_pb.Stat)) { + throw new Error('Expected argument of type polykey.v1.secrets.Stat'); + } + return Buffer.from(arg.serializeBinary()); +} + +function deserialize_polykey_v1_secrets_Stat(buffer_arg) { + return polykey_v1_secrets_secrets_pb.Stat.deserializeBinary(new Uint8Array(buffer_arg)); +} + function serialize_polykey_v1_sessions_Password(arg) { if (!(arg instanceof polykey_v1_sessions_sessions_pb.Password)) { throw new Error('Expected argument of type polykey.v1.sessions.Password'); @@ -388,17 +410,6 @@ function deserialize_polykey_v1_vaults_Mkdir(buffer_arg) { return polykey_v1_vaults_vaults_pb.Mkdir.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_PermGet(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermGet)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermGet'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermGet(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermGet.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_PermSet(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermSet)) { throw new Error('Expected argument of type polykey.v1.vaults.PermSet'); @@ -421,17 +432,6 @@ function deserialize_polykey_v1_vaults_PermUnset(buffer_arg) { return polykey_v1_vaults_vaults_pb.PermUnset.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Permission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Permission)) { - throw new Error('Expected argument of type polykey.v1.vaults.Permission'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Permission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Permission.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_Pull(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.Pull)) { throw new Error('Expected argument of type polykey.v1.vaults.Pull'); @@ -454,17 +454,6 @@ function deserialize_polykey_v1_vaults_Rename(buffer_arg) { return polykey_v1_vaults_vaults_pb.Rename.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Stat(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Stat)) { - throw new Error('Expected argument of type polykey.v1.vaults.Stat'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Stat(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Stat.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_Vault(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.Vault)) { throw new Error('Expected argument of type polykey.v1.vaults.Vault'); @@ -768,17 +757,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsScan: { - path: '/polykey.v1.ClientService/VaultsScan', - requestStream: false, - responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.List, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_List, - responseDeserialize: deserialize_polykey_v1_vaults_List, - }, vaultsSecretsList: { path: '/polykey.v1.ClientService/VaultsSecretsList', requestStream: false, @@ -801,17 +779,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsSecretsStat: { - path: '/polykey.v1.ClientService/VaultsSecretsStat', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.Vault, - responseType: polykey_v1_vaults_vaults_pb.Stat, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, - responseSerialize: serialize_polykey_v1_vaults_Stat, - responseDeserialize: deserialize_polykey_v1_vaults_Stat, - }, vaultsSecretsDelete: { path: '/polykey.v1.ClientService/VaultsSecretsDelete', requestStream: false, @@ -878,8 +845,30 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissionsSet: { - path: '/polykey.v1.ClientService/VaultsPermissionsSet', + vaultsSecretsStat: { + path: '/polykey.v1.ClientService/vaultsSecretsStat', + requestStream: false, + responseStream: false, + requestType: polykey_v1_secrets_secrets_pb.Secret, + responseType: polykey_v1_secrets_secrets_pb.Stat, + requestSerialize: serialize_polykey_v1_secrets_Secret, + requestDeserialize: deserialize_polykey_v1_secrets_Secret, + responseSerialize: serialize_polykey_v1_secrets_Stat, + responseDeserialize: deserialize_polykey_v1_secrets_Stat, + }, + vaultsPermissionsGet: { + path: '/polykey.v1.ClientService/VaultsPermissionsGet', + requestStream: false, + responseStream: true, + requestType: polykey_v1_vaults_vaults_pb.Vault, + responseType: polykey_v1_permissions_permissions_pb.NodeActions, + requestSerialize: serialize_polykey_v1_vaults_Vault, + requestDeserialize: deserialize_polykey_v1_vaults_Vault, + responseSerialize: serialize_polykey_v1_permissions_NodeActions, + responseDeserialize: deserialize_polykey_v1_permissions_NodeActions, + }, + vaultsShare: { + path: '/polykey.v1.ClientService/VaultsShare', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.PermSet, @@ -889,8 +878,8 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissionsUnset: { - path: '/polykey.v1.ClientService/VaultsPermissionsUnset', + vaultsUnshare: { + path: '/polykey.v1.ClientService/VaultsUnshare', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.PermUnset, @@ -900,17 +889,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissions: { - path: '/polykey.v1.ClientService/VaultsPermissions', - requestStream: false, - responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.PermGet, - responseType: polykey_v1_vaults_vaults_pb.Permission, - requestSerialize: serialize_polykey_v1_vaults_PermGet, - requestDeserialize: deserialize_polykey_v1_vaults_PermGet, - responseSerialize: serialize_polykey_v1_vaults_Permission, - responseDeserialize: deserialize_polykey_v1_vaults_Permission, - }, vaultsVersion: { path: '/polykey.v1.ClientService/VaultsVersion', requestStream: false, @@ -933,6 +911,17 @@ vaultsList: { responseSerialize: serialize_polykey_v1_vaults_LogEntry, responseDeserialize: deserialize_polykey_v1_vaults_LogEntry, }, + vaultsScan: { + path: '/polykey.v1.ClientService/VaultsScan', + requestStream: false, + responseStream: true, + requestType: polykey_v1_nodes_nodes_pb.Node, + responseType: polykey_v1_vaults_vaults_pb.List, + requestSerialize: serialize_polykey_v1_nodes_Node, + requestDeserialize: deserialize_polykey_v1_nodes_Node, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, + }, // Identities identitiesAuthenticate: { path: '/polykey.v1.ClientService/IdentitiesAuthenticate', diff --git a/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts b/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts index e4f06b338..60133c0c0 100644 --- a/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts +++ b/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts @@ -30,6 +30,34 @@ export namespace Actions { } } +export class NodeActions extends jspb.Message { + + hasNode(): boolean; + clearNode(): void; + getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; + setNode(value?: polykey_v1_nodes_nodes_pb.Node): NodeActions; + clearActionsList(): void; + getActionsList(): Array; + setActionsList(value: Array): NodeActions; + addActions(value: string, index?: number): string; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): NodeActions.AsObject; + static toObject(includeInstance: boolean, msg: NodeActions): NodeActions.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: NodeActions, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): NodeActions; + static deserializeBinaryFromReader(message: NodeActions, reader: jspb.BinaryReader): NodeActions; +} + +export namespace NodeActions { + export type AsObject = { + node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + actionsList: Array, + } +} + export class ActionSet extends jspb.Message { hasNode(): boolean; diff --git a/src/proto/js/polykey/v1/permissions/permissions_pb.js b/src/proto/js/polykey/v1/permissions/permissions_pb.js index 29dd4ba20..53e129985 100644 --- a/src/proto/js/polykey/v1/permissions/permissions_pb.js +++ b/src/proto/js/polykey/v1/permissions/permissions_pb.js @@ -21,6 +21,7 @@ goog.object.extend(proto, polykey_v1_identities_identities_pb); goog.exportSymbol('proto.polykey.v1.permissions.ActionSet', null, global); goog.exportSymbol('proto.polykey.v1.permissions.ActionSet.NodeOrProviderCase', null, global); goog.exportSymbol('proto.polykey.v1.permissions.Actions', null, global); +goog.exportSymbol('proto.polykey.v1.permissions.NodeActions', null, global); /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -42,6 +43,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.polykey.v1.permissions.Actions.displayName = 'proto.polykey.v1.permissions.Actions'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.polykey.v1.permissions.NodeActions = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.permissions.NodeActions.repeatedFields_, null); +}; +goog.inherits(proto.polykey.v1.permissions.NodeActions, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.polykey.v1.permissions.NodeActions.displayName = 'proto.polykey.v1.permissions.NodeActions'; +} /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -220,6 +242,213 @@ proto.polykey.v1.permissions.Actions.prototype.clearActionList = function() { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.permissions.NodeActions.repeatedFields_ = [2]; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.permissions.NodeActions.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.permissions.NodeActions.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.permissions.NodeActions} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.permissions.NodeActions.toObject = function(includeInstance, msg) { + var f, obj = { + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + actionsList: (f = jspb.Message.getRepeatedField(msg, 2)) == null ? undefined : f + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.permissions.NodeActions} + */ +proto.polykey.v1.permissions.NodeActions.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.permissions.NodeActions; + return proto.polykey.v1.permissions.NodeActions.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.permissions.NodeActions} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.permissions.NodeActions} + */ +proto.polykey.v1.permissions.NodeActions.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new polykey_v1_nodes_nodes_pb.Node; + reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); + msg.setNode(value); + break; + case 2: + var value = /** @type {string} */ (reader.readString()); + msg.addActions(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.permissions.NodeActions.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.permissions.NodeActions.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.permissions.NodeActions} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.permissions.NodeActions.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getNode(); + if (f != null) { + writer.writeMessage( + 1, + f, + polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter + ); + } + f = message.getActionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 2, + f + ); + } +}; + + +/** + * optional polykey.v1.nodes.Node node = 1; + * @return {?proto.polykey.v1.nodes.Node} + */ +proto.polykey.v1.permissions.NodeActions.prototype.getNode = function() { + return /** @type{?proto.polykey.v1.nodes.Node} */ ( + jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 1)); +}; + + +/** + * @param {?proto.polykey.v1.nodes.Node|undefined} value + * @return {!proto.polykey.v1.permissions.NodeActions} returns this +*/ +proto.polykey.v1.permissions.NodeActions.prototype.setNode = function(value) { + return jspb.Message.setWrapperField(this, 1, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.clearNode = function() { + return this.setNode(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.permissions.NodeActions.prototype.hasNode = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * repeated string actions = 2; + * @return {!Array} + */ +proto.polykey.v1.permissions.NodeActions.prototype.getActionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 2)); +}; + + +/** + * @param {!Array} value + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.setActionsList = function(value) { + return jspb.Message.setField(this, 2, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.addActions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 2, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.clearActionsList = function() { + return this.setActionsList([]); +}; + + + /** * Oneof group definitions for this message. Each group defines the field * numbers belonging to that group. When of these fields' value is set, all diff --git a/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts b/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts index 4fb4ca872..1e9d951d8 100644 --- a/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts +++ b/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts @@ -89,3 +89,23 @@ export namespace Directory { secretDirectory: string, } } + +export class Stat extends jspb.Message { + getJson(): string; + setJson(value: string): Stat; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): Stat.AsObject; + static toObject(includeInstance: boolean, msg: Stat): Stat.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: Stat, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): Stat; + static deserializeBinaryFromReader(message: Stat, reader: jspb.BinaryReader): Stat; +} + +export namespace Stat { + export type AsObject = { + json: string, + } +} diff --git a/src/proto/js/polykey/v1/secrets/secrets_pb.js b/src/proto/js/polykey/v1/secrets/secrets_pb.js index 58fec23fc..5008028d8 100644 --- a/src/proto/js/polykey/v1/secrets/secrets_pb.js +++ b/src/proto/js/polykey/v1/secrets/secrets_pb.js @@ -19,6 +19,7 @@ goog.object.extend(proto, polykey_v1_vaults_vaults_pb); goog.exportSymbol('proto.polykey.v1.secrets.Directory', null, global); goog.exportSymbol('proto.polykey.v1.secrets.Rename', null, global); goog.exportSymbol('proto.polykey.v1.secrets.Secret', null, global); +goog.exportSymbol('proto.polykey.v1.secrets.Stat', null, global); /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -82,6 +83,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.polykey.v1.secrets.Directory.displayName = 'proto.polykey.v1.secrets.Directory'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.polykey.v1.secrets.Stat = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.polykey.v1.secrets.Stat, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.polykey.v1.secrets.Stat.displayName = 'proto.polykey.v1.secrets.Stat'; +} @@ -679,4 +701,134 @@ proto.polykey.v1.secrets.Directory.prototype.setSecretDirectory = function(value }; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.secrets.Stat.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.secrets.Stat.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.secrets.Stat} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.secrets.Stat.toObject = function(includeInstance, msg) { + var f, obj = { + json: jspb.Message.getFieldWithDefault(msg, 1, "") + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.secrets.Stat} + */ +proto.polykey.v1.secrets.Stat.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.secrets.Stat; + return proto.polykey.v1.secrets.Stat.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.secrets.Stat} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.secrets.Stat} + */ +proto.polykey.v1.secrets.Stat.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.setJson(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.secrets.Stat.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.secrets.Stat.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.secrets.Stat} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.secrets.Stat.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getJson(); + if (f.length > 0) { + writer.writeString( + 1, + f + ); + } +}; + + +/** + * optional string json = 1; + * @return {string} + */ +proto.polykey.v1.secrets.Stat.prototype.getJson = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); +}; + + +/** + * @param {string} value + * @return {!proto.polykey.v1.secrets.Stat} returns this + */ +proto.polykey.v1.secrets.Stat.prototype.setJson = function(value) { + return jspb.Message.setProto3StringField(this, 1, value); +}; + + goog.object.extend(exports, proto.polykey.v1.secrets); diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 072887bfe..9e1a08b0b 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -117,6 +117,11 @@ export class Pull extends jspb.Message { getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; setNode(value?: polykey_v1_nodes_nodes_pb.Node): Pull; + hasPullVault(): boolean; + clearPullVault(): void; + getPullVault(): Vault | undefined; + setPullVault(value?: Vault): Pull; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): Pull.AsObject; static toObject(includeInstance: boolean, msg: Pull): Pull.AsObject; @@ -131,6 +136,7 @@ export namespace Pull { export type AsObject = { vault?: Vault.AsObject, node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + pullVault?: Vault.AsObject, } } @@ -397,6 +403,38 @@ export namespace LogEntry { } } +export class InfoRequest extends jspb.Message { + + hasVault(): boolean; + clearVault(): void; + getVault(): Vault | undefined; + setVault(value?: Vault): InfoRequest; + + hasNode(): boolean; + clearNode(): void; + getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; + setNode(value?: polykey_v1_nodes_nodes_pb.Node): InfoRequest; + getAction(): string; + setAction(value: string): InfoRequest; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): InfoRequest.AsObject; + static toObject(includeInstance: boolean, msg: InfoRequest): InfoRequest.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: InfoRequest, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): InfoRequest; + static deserializeBinaryFromReader(message: InfoRequest, reader: jspb.BinaryReader): InfoRequest; +} + +export namespace InfoRequest { + export type AsObject = { + vault?: Vault.AsObject, + node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + action: string, + } +} + export class PackChunk extends jspb.Message { getChunk(): Uint8Array | string; getChunk_asU8(): Uint8Array; diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index 6fbd3c4ac..2a78b7d18 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -17,6 +17,7 @@ var global = Function('return this')(); var polykey_v1_nodes_nodes_pb = require('../../../polykey/v1/nodes/nodes_pb.js'); goog.object.extend(proto, polykey_v1_nodes_nodes_pb); goog.exportSymbol('proto.polykey.v1.vaults.Clone', null, global); +goog.exportSymbol('proto.polykey.v1.vaults.InfoRequest', null, global); goog.exportSymbol('proto.polykey.v1.vaults.List', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Log', null, global); goog.exportSymbol('proto.polykey.v1.vaults.LogEntry', null, global); @@ -350,6 +351,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.polykey.v1.vaults.LogEntry.displayName = 'proto.polykey.v1.vaults.LogEntry'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.polykey.v1.vaults.InfoRequest = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.polykey.v1.vaults.InfoRequest, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.polykey.v1.vaults.InfoRequest.displayName = 'proto.polykey.v1.vaults.InfoRequest'; +} /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -1149,7 +1171,8 @@ proto.polykey.v1.vaults.Pull.prototype.toObject = function(opt_includeInstance) proto.polykey.v1.vaults.Pull.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + pullVault: (f = msg.getPullVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f) }; if (includeInstance) { @@ -1196,6 +1219,11 @@ proto.polykey.v1.vaults.Pull.deserializeBinaryFromReader = function(msg, reader) reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); msg.setNode(value); break; + case 3: + var value = new proto.polykey.v1.vaults.Vault; + reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); + msg.setPullVault(value); + break; default: reader.skipField(); break; @@ -1241,6 +1269,14 @@ proto.polykey.v1.vaults.Pull.serializeBinaryToWriter = function(message, writer) polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter ); } + f = message.getPullVault(); + if (f != null) { + writer.writeMessage( + 3, + f, + proto.polykey.v1.vaults.Vault.serializeBinaryToWriter + ); + } }; @@ -1318,6 +1354,43 @@ proto.polykey.v1.vaults.Pull.prototype.hasNode = function() { }; +/** + * optional Vault pull_vault = 3; + * @return {?proto.polykey.v1.vaults.Vault} + */ +proto.polykey.v1.vaults.Pull.prototype.getPullVault = function() { + return /** @type{?proto.polykey.v1.vaults.Vault} */ ( + jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 3)); +}; + + +/** + * @param {?proto.polykey.v1.vaults.Vault|undefined} value + * @return {!proto.polykey.v1.vaults.Pull} returns this +*/ +proto.polykey.v1.vaults.Pull.prototype.setPullVault = function(value) { + return jspb.Message.setWrapperField(this, 3, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.Pull} returns this + */ +proto.polykey.v1.vaults.Pull.prototype.clearPullVault = function() { + return this.setPullVault(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.Pull.prototype.hasPullVault = function() { + return jspb.Message.getField(this, 3) != null; +}; + + @@ -3161,6 +3234,238 @@ proto.polykey.v1.vaults.LogEntry.prototype.setMessage = function(value) { +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.InfoRequest.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.vaults.InfoRequest} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.vaults.InfoRequest.toObject = function(includeInstance, msg) { + var f, obj = { + vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + action: jspb.Message.getFieldWithDefault(msg, 3, "") + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.vaults.InfoRequest} + */ +proto.polykey.v1.vaults.InfoRequest.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.vaults.InfoRequest; + return proto.polykey.v1.vaults.InfoRequest.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.vaults.InfoRequest} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.vaults.InfoRequest} + */ +proto.polykey.v1.vaults.InfoRequest.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.polykey.v1.vaults.Vault; + reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); + msg.setVault(value); + break; + case 2: + var value = new polykey_v1_nodes_nodes_pb.Node; + reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); + msg.setNode(value); + break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.setAction(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.vaults.InfoRequest.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.vaults.InfoRequest} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.vaults.InfoRequest.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getVault(); + if (f != null) { + writer.writeMessage( + 1, + f, + proto.polykey.v1.vaults.Vault.serializeBinaryToWriter + ); + } + f = message.getNode(); + if (f != null) { + writer.writeMessage( + 2, + f, + polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter + ); + } + f = message.getAction(); + if (f.length > 0) { + writer.writeString( + 3, + f + ); + } +}; + + +/** + * optional Vault vault = 1; + * @return {?proto.polykey.v1.vaults.Vault} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.getVault = function() { + return /** @type{?proto.polykey.v1.vaults.Vault} */ ( + jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); +}; + + +/** + * @param {?proto.polykey.v1.vaults.Vault|undefined} value + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this +*/ +proto.polykey.v1.vaults.InfoRequest.prototype.setVault = function(value) { + return jspb.Message.setWrapperField(this, 1, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this + */ +proto.polykey.v1.vaults.InfoRequest.prototype.clearVault = function() { + return this.setVault(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.hasVault = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * optional polykey.v1.nodes.Node node = 2; + * @return {?proto.polykey.v1.nodes.Node} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.getNode = function() { + return /** @type{?proto.polykey.v1.nodes.Node} */ ( + jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); +}; + + +/** + * @param {?proto.polykey.v1.nodes.Node|undefined} value + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this +*/ +proto.polykey.v1.vaults.InfoRequest.prototype.setNode = function(value) { + return jspb.Message.setWrapperField(this, 2, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this + */ +proto.polykey.v1.vaults.InfoRequest.prototype.clearNode = function() { + return this.setNode(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.hasNode = function() { + return jspb.Message.getField(this, 2) != null; +}; + + +/** + * optional string action = 3; + * @return {string} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.getAction = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, "")); +}; + + +/** + * @param {string} value + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this + */ +proto.polykey.v1.vaults.InfoRequest.prototype.setAction = function(value) { + return jspb.Message.setProto3StringField(this, 3, value); +}; + + + + + if (jspb.Message.GENERATE_TO_OBJECT) { /** * Creates an object representation of this proto. diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 712ee5d8a..9e78598cd 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -13,10 +13,9 @@ service AgentService { rpc Echo(polykey.v1.utils.EchoMessage) returns (polykey.v1.utils.EchoMessage); // Vaults - rpc VaultsGitInfoGet (polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.PackChunk); + rpc VaultsGitInfoGet (polykey.v1.vaults.InfoRequest) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); - rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.Vault); - rpc VaultsPermissionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); + rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index eb5aa9ced..e50a8a474 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -46,21 +46,21 @@ service ClientService { rpc VaultsDelete(polykey.v1.vaults.Vault) returns (polykey.v1.utils.StatusMessage); rpc VaultsPull(polykey.v1.vaults.Pull) returns (polykey.v1.utils.StatusMessage); rpc VaultsClone(polykey.v1.vaults.Clone) returns (polykey.v1.utils.StatusMessage); - rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); rpc VaultsSecretsList(polykey.v1.vaults.Vault) returns (stream polykey.v1.secrets.Secret); rpc VaultsSecretsMkdir(polykey.v1.vaults.Mkdir) returns (polykey.v1.utils.StatusMessage); - rpc VaultsSecretsStat(polykey.v1.vaults.Vault) returns (polykey.v1.vaults.Stat); rpc VaultsSecretsDelete(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsEdit(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsGet(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Secret); rpc VaultsSecretsRename(polykey.v1.secrets.Rename) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNew(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNewDir(polykey.v1.secrets.Directory) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsSet(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsUnset(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissions(polykey.v1.vaults.PermGet) returns (stream polykey.v1.vaults.Permission); + rpc vaultsSecretsStat(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Stat); + rpc VaultsPermissionsGet(polykey.v1.vaults.Vault) returns (stream polykey.v1.permissions.NodeActions); + rpc VaultsShare(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); + rpc VaultsUnshare(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); rpc VaultsVersion(polykey.v1.vaults.Version) returns (polykey.v1.vaults.VersionResult); rpc VaultsLog(polykey.v1.vaults.Log) returns (stream polykey.v1.vaults.LogEntry); + rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); // Identities rpc IdentitiesAuthenticate(polykey.v1.identities.Provider) returns (stream polykey.v1.identities.AuthenticationProcess); diff --git a/src/proto/schemas/polykey/v1/permissions/permissions.proto b/src/proto/schemas/polykey/v1/permissions/permissions.proto index 65441b294..8285f92f0 100644 --- a/src/proto/schemas/polykey/v1/permissions/permissions.proto +++ b/src/proto/schemas/polykey/v1/permissions/permissions.proto @@ -9,6 +9,11 @@ message Actions { repeated string action = 1; } +message NodeActions { + polykey.v1.nodes.Node node = 1; + repeated string actions = 2; +} + message ActionSet { oneof node_or_provider { polykey.v1.nodes.Node node = 1; diff --git a/src/proto/schemas/polykey/v1/secrets/secrets.proto b/src/proto/schemas/polykey/v1/secrets/secrets.proto index a8f5cc207..b13943466 100644 --- a/src/proto/schemas/polykey/v1/secrets/secrets.proto +++ b/src/proto/schemas/polykey/v1/secrets/secrets.proto @@ -19,3 +19,7 @@ message Directory { polykey.v1.vaults.Vault vault = 1; string secret_directory = 2; } + +message Stat { + string json = 1; +} diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index efd2d45b0..478506e1f 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -32,6 +32,7 @@ message Mkdir { message Pull { Vault vault = 1; polykey.v1.nodes.Node node = 2; + Vault pull_vault = 3; } message Clone { @@ -86,6 +87,11 @@ message LogEntry { } // Agent specific. +message InfoRequest { + Vault vault = 1; + polykey.v1.nodes.Node node = 2; + string action = 3; +} message PackChunk { bytes chunk = 1; diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 57f85ca06..a6d2e6f19 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -431,7 +431,8 @@ class Sigchain { const claimStream = this.sigchainClaimsDb.createKeyStream(); let seq = 1; for await (const o of claimStream) { - map[seq] = IdInternal.fromBuffer(o); + // FIXME: really a buffer? + map[seq] = IdInternal.fromBuffer(o as Buffer); seq++; } return map; @@ -458,7 +459,8 @@ class Sigchain { reverse: true, }); for await (const o of keyStream) { - latestId = IdInternal.fromBuffer(o); + // FIXME: really a buffer? + latestId = IdInternal.fromBuffer(o as Buffer); } return latestId; }); diff --git a/src/validation/utils.ts b/src/validation/utils.ts index 5c68e3d71..3ce13f258 100644 --- a/src/validation/utils.ts +++ b/src/validation/utils.ts @@ -9,7 +9,7 @@ import type { NodeId, SeedNodes } from '../nodes/types'; import type { ProviderId, IdentityId } from '../identities/types'; import type { GestaltAction, GestaltId } from '../gestalts/types'; -import type { VaultAction } from '../vaults/types'; +import type { VaultAction, VaultId } from '../vaults/types'; import type { Host, Hostname, Port } from '../network/types'; import type { ClaimId } from '../claims/types'; import * as validationErrors from './errors'; @@ -82,6 +82,16 @@ function parseClaimId(data: any): ClaimId { return data; } +function parseVaultId(data: any): VaultId { + data = vaultsUtils.decodeVaultId(data); + if (data == null) { + throw new validationErrors.ErrorParse( + 'Vault ID must be multibase base58btc encoded strings', + ); + } + return data; +} + function parseGestaltAction(data: any): GestaltAction { if (!gestaltsUtils.isGestaltAction(data)) { throw new validationErrors.ErrorParse( @@ -249,6 +259,7 @@ export { parseNodeId, parseGestaltId, parseClaimId, + parseVaultId, parseGestaltAction, parseVaultAction, parseProviderId, diff --git a/src/vaults/Vault.ts b/src/vaults/Vault.ts new file mode 100644 index 000000000..8c3981c6c --- /dev/null +++ b/src/vaults/Vault.ts @@ -0,0 +1,15 @@ +import type VaultInternal from './VaultInternal'; + +interface Vault { + vaultDataDir: VaultInternal['vaultDataDir']; + vaultGitDir: VaultInternal['vaultGitDir']; + vaultId: VaultInternal['vaultId']; + writeF: VaultInternal['writeF']; + writeG: VaultInternal['writeG']; + readF: VaultInternal['readF']; + readG: VaultInternal['readG']; + log: VaultInternal['log']; + version: VaultInternal['version']; +} + +export type { Vault }; diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index ff737fc11..45db2d7e3 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -1,358 +1,573 @@ +import type { ReadCommitResult } from 'isomorphic-git'; +import type { EncryptedFS } from 'encryptedfs'; +import type { DB, DBDomain, DBLevel } from '@matrixai/db'; import type { VaultId, + VaultRef, + CommitId, + CommitLog, FileSystemReadable, FileSystemWritable, - CommitLog, } from './types'; -import type { MutexInterface } from 'async-mutex'; - -import type { EncryptedFS } from 'encryptedfs'; import type { KeyManager } from '../keys'; +import type { NodeId } from '../nodes/types'; +import type { ResourceAcquire } from '../utils'; import path from 'path'; import git from 'isomorphic-git'; import { Mutex } from 'async-mutex'; import Logger from '@matrixai/logger'; -import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; +import { + CreateDestroyStartStop, + ready, +} from '@matrixai/async-init/dist/CreateDestroyStartStop'; import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; -import { makeVaultIdPretty } from './utils'; -import { utils as nodesUtils } from '../nodes'; +import { withF, withG } from '../utils'; -const lastTag = 'last'; - -interface VaultInternal extends CreateDestroy {} -@CreateDestroy() +interface VaultInternal extends CreateDestroyStartStop {} +@CreateDestroyStartStop( + new vaultsErrors.ErrorVaultRunning(), + new vaultsErrors.ErrorVaultDestroyed(), +) class VaultInternal { - public readonly baseDir: string; - public readonly gitDir: string; - public readonly vaultId: VaultId; - - protected efsRoot: EncryptedFS; - protected efsVault: EncryptedFS; - protected logger: Logger; - protected lock: MutexInterface; - protected workingDir: string; - protected keyManager: KeyManager; - - public static async create({ + public static async createVaultInternal({ vaultId, + db, + vaultsDb, + vaultsDbDomain, keyManager, efs, + remote = false, logger = new Logger(this.name), fresh = false, }: { vaultId: VaultId; + db: DB; + vaultsDb: DBLevel; + vaultsDbDomain: DBDomain; keyManager: KeyManager; efs: EncryptedFS; + remote?: boolean; logger?: Logger; fresh?: boolean; - }) { - logger.info(`Creating ${this.name}`); - if (fresh) { - try { - await efs.rmdir(makeVaultIdPretty(vaultId), { recursive: true }); - } catch (err) { - if (err.code !== 'ENOENT') { - throw err; - } - } - await efs.mkdir(path.join(makeVaultIdPretty(vaultId), 'contents'), { - recursive: true, - }); - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), - ); - await efsVault.start(); - // Creating a new vault. - await git.init({ - fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), - }); - const workingDir = await git.commit({ - fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), - author: { - name: makeVaultIdPretty(vaultId), - }, - message: 'Initial Commit', - }); - await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'packed-refs'), - '# pack-refs with: peeled fully-peeled sorted', - ); - await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - workingDir, - ); - const vault = new VaultInternal({ - vaultId, - keyManager, - efs, - efsVault, - workingDir, - logger, - }); - logger.info(`Initialising vault at '${makeVaultIdPretty(vaultId)}'`); - return vault; - } else { - // Loading an existing vault. - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), - ); - await efsVault.start(); - const workingDir = (await efs.readFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - { - encoding: 'utf8', - }, - )) as string; - const vault = new VaultInternal({ - vaultId, - keyManager, - efs, - efsVault, - workingDir, - logger, - }); - logger.info(`Created ${this.name} at '${makeVaultIdPretty(vaultId)}'`); - return vault; - } + }): Promise { + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + logger.info(`Creating ${this.name} - ${vaultIdEncoded}`); + const vault = new VaultInternal({ + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + efs, + logger, + }); + await vault.start({ fresh }); + logger.info(`Created ${this.name} - ${vaultIdEncoded}`); + return vault; + } + + public static async cloneVaultInternal({ + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + efs, + logger = new Logger(this.name), + }: { + vaultId: VaultId; + db: DB; + vaultsDb: DBLevel; + vaultsDbDomain: DBDomain; + efs: EncryptedFS; + keyManager: KeyManager; + remote?: boolean; + logger?: Logger; + }): Promise { + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + logger.info(`Cloning ${this.name} - ${vaultIdEncoded}`); + // TODO: + // Perform the cloning operation to preseed state + // and also seed the remote state + const vault = new VaultInternal({ + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + efs, + logger, + }); + await vault.start(); + logger.info(`Cloned ${this.name} - ${vaultIdEncoded}`); + return vault; } + public readonly vaultId: VaultId; + public readonly vaultIdEncoded: string; + public readonly vaultDataDir: string; + public readonly vaultGitDir: string; + + protected logger: Logger; + protected db: DB; + protected vaultsDbDomain: DBDomain; + protected vaultsDb: DBLevel; + protected vaultDbDomain: DBDomain; + protected vaultDb: DBLevel; + protected keyManager: KeyManager; + protected efs: EncryptedFS; + protected efsVault: EncryptedFS; + protected remote: boolean; + protected _lock: Mutex = new Mutex(); + + public lock: ResourceAcquire = async () => { + const release = await this._lock.acquire(); + return [async () => release(), this._lock]; + }; + constructor({ vaultId, + db, + vaultsDbDomain, + vaultsDb, keyManager, efs, - efsVault, - workingDir, logger, }: { vaultId: VaultId; + db: DB; + vaultsDbDomain: DBDomain; + vaultsDb: DBLevel; keyManager: KeyManager; efs: EncryptedFS; - efsVault: EncryptedFS; - workingDir: string; logger: Logger; }) { - this.baseDir = path.join(makeVaultIdPretty(vaultId), 'contents'); - this.gitDir = path.join(makeVaultIdPretty(vaultId), '.git'); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + this.logger = logger; this.vaultId = vaultId; + this.vaultIdEncoded = vaultIdEncoded; + this.vaultDataDir = path.join(vaultIdEncoded, 'data'); + this.vaultGitDir = path.join(vaultIdEncoded, '.git'); + this.db = db; + this.vaultsDbDomain = vaultsDbDomain; + this.vaultsDb = vaultsDb; this.keyManager = keyManager; - this.efsRoot = efs; + this.efs = efs; + } + + public async start({ + fresh = false, + }: { + fresh?: boolean; + } = {}): Promise { + this.logger.info( + `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + const vaultDbDomain = [...this.vaultsDbDomain, this.vaultIdEncoded]; + const vaultDb = await this.db.level(this.vaultIdEncoded, this.vaultsDb); + if (fresh) { + await vaultDb.clear(); + try { + await this.efs.rmdir(this.vaultIdEncoded, { + recursive: true, + }); + } catch (e) { + if (e.code !== 'ENOENT') { + throw e; + } + } + } + await this.efs.mkdir(this.vaultIdEncoded, { recursive: true }); + await this.efs.mkdir(this.vaultDataDir, { recursive: true }); + await this.efs.mkdir(this.vaultGitDir, { recursive: true }); + await this.setupMeta(); + await this.setupGit(); + const efsVault = await this.efs.chroot(this.vaultDataDir); + this.vaultDbDomain = vaultDbDomain; + this.vaultDb = vaultDb; this.efsVault = efsVault; - this.workingDir = workingDir; - this.logger = logger; - this.lock = new Mutex(); + this.logger.info( + `Started ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + } + + public async stop(): Promise { + this.logger.info( + `Stopping ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + this.logger.info( + `Stopped ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); } public async destroy(): Promise { this.logger.info( - `Destroying ${this.constructor.name} at '${makeVaultIdPretty( - this.vaultId, - )}'`, + `Destroying ${this.constructor.name} - ${this.vaultIdEncoded}`, ); - const release = await this.lock.acquire(); - try { - await this.efsRoot.writeFile( - path.join(makeVaultIdPretty(this.vaultId), '.git', 'workingDirectory'), - this.workingDir, - ); - } finally { - release(); - } + const vaultDb = await this.db.level(this.vaultIdEncoded, this.vaultsDb); + await vaultDb.clear(); + await this.efs.rmdir(this.vaultIdEncoded, { + recursive: true, + }); this.logger.info( - `Destroyed ${this.constructor.name} at '${makeVaultIdPretty( - this.vaultId, - )}'`, + `Destroyed ${this.constructor.name} - ${this.vaultIdEncoded}`, ); } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async commit( - f: (fs: FileSystemWritable) => Promise, - ): Promise { - const release = await this.lock.acquire(); - const message: string[] = []; + // Is remote? + // well we don't just get remote + // we keep track of it + public async getRemote(): Promise<[NodeId, VaultId]> { + // Get the remote if exists + // if undefined you consider this to be not remote + // and therefore can proceed + // return Promise of [NodeId, VaultId] + throw Error('Not implemented'); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async log( + ref: string | VaultRef = 'HEAD', + limit?: number, + ): Promise> { + if (!vaultsUtils.validateRef(ref)) { + throw new vaultsErrors.ErrorVaultReferenceInvalid(); + } + if (ref === vaultsUtils.tagLast) { + ref = vaultsUtils.canonicalBranch; + } + const commits = await git.log({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref, + depth: limit, + }); + return commits.map(({ oid, commit }: ReadCommitResult) => { + return { + commitId: oid as CommitId, + parent: commit.parent as Array, + author: { + name: commit.author.name, + timestamp: new Date(commit.author.timestamp * 1000), + }, + committer: { + name: commit.committer.name, + timestamp: new Date(commit.committer.timestamp * 1000), + }, + message: commit.message, + }; + }); + } + + /** + * Checks out the vault repository to specific commit ID or special tags + * This changes the working directory and updates the HEAD reference + */ + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async version(ref: string | VaultRef = 'HEAD'): Promise { + if (!vaultsUtils.validateRef(ref)) { + throw new vaultsErrors.ErrorVaultReferenceInvalid(); + } + if (ref === vaultsUtils.tagLast) { + ref = vaultsUtils.canonicalBranch; + } try { await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: this.workingDir, - }); - await f(this.efsVault); - const statusMatrix = await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref, + force: true, }); - for (let file of statusMatrix) { - if (file[1] === file[2] && file[2] === file[3]) { - await git.resetIndex({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - file = ( - await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepaths: [file[0]], - }) - ).pop()!; - if (file[1] === file[2] && file[2] === file[3]) continue; - } - if (file[2] !== file[3]) { - let status: 'added' | 'modified' | 'deleted'; - if (file[2] === 0) { - status = 'deleted'; - await git.remove({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - } else { - await git.add({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - if (file[1] === 1) { - status = 'modified'; - } else { - status = 'added'; - } - } - message.push(file[0] + ' ' + status); - } - } - if (message.length !== 0) { - this.workingDir = await git.commit({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - author: { - name: nodesUtils.encodeNodeId(this.keyManager.getNodeId()), - }, - message: message.toString(), - }); + } catch (e) { + if (e instanceof git.Errors.NotFoundError) { + throw new vaultsErrors.ErrorVaultReferenceMissing(); } - } finally { - const statusMatrix = await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - }); - for await (const file of statusMatrix) { - if (file[2] === 0) { - await git.remove({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - } else { - await git.add({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - } - } - await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: this.workingDir, - }); - release(); + throw e; } } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async access( - f: (fs: FileSystemReadable) => Promise, - ): Promise { - const release = await this.lock.acquire(); - try { + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async readF(f: (fs: FileSystemReadable) => Promise): Promise { + return withF([this.lock], async () => { return await f(this.efsVault); - } finally { - release(); - } + }); } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async log(depth?: number, commit?: string): Promise> { - const commit_ = commit?.toLowerCase() === lastTag ? 'HEAD' : commit; - const log = await git.log({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - depth: depth, - ref: commit_, + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public readG( + g: (fs: FileSystemReadable) => AsyncGenerator, + ): AsyncGenerator { + return withG([this.lock], async function* () { + return yield* g(this.efsVault); }); - return log.map((readCommit) => { - return { - oid: readCommit.oid, - committer: readCommit.commit.committer.name, - timeStamp: readCommit.commit.committer.timestamp * 1000, // Needs to be in milliseconds for Date. - message: readCommit.commit.message, - }; + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async writeF( + f: (fs: FileSystemWritable) => Promise, + ): Promise { + return withF([this.lock], async () => { + await this.db.put(this.vaultsDbDomain, 'dirty', true); + // This should really be an internal property + // get whether this is remote, and the remote address + // if it is, we consider this repo an "attached repo" + // this vault is a "mirrored" vault + if (this.remote) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultImmutable(); + } + + // We have to chroot it + // and then remove it + // but this is done byitself? + + await f(this.efsVault); + + await this.db.put(this.vaultsDbDomain, 'dirty', false); }); + + // Const message: string[] = []; + // try { + + // // If the version of the vault has been changed, checkout the working + // // directory to this point in history and discard any unlinked commits + // await git.checkout({ + // fs: this.efs, + // dir: this.vaultDataDir, + // gitdir: this.vaultGitDir, + // ref: this.workingDirIndex, + // }); + // + // // Efs/someVaultId/contents + // await f(this.efsVault); + // // Get the status of each file in the working directory + // // https://isomorphic-git.org/docs/en/statusMatrix + // const statusMatrix = await git.statusMatrix({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // }); + // for (let [ + // filePath, + // HEADStatus, + // workingDirStatus, + // stageStatus, + // ] of statusMatrix) { + // // Reset the index of files that are marked as 'unmodified' + // // The working directory, HEAD and staging area are all the same + // // https://github.com/MatrixAI/js-polykey/issues/260 + // if ( + // HEADStatus === workingDirStatus && + // workingDirStatus === stageStatus + // ) { + // await git.resetIndex({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepath: filePath, + // }); + // // Check if the file is still 'unmodified' and leave + // // it out of the commit if it is + // [filePath, HEADStatus, workingDirStatus, stageStatus] = ( + // await git.statusMatrix({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepaths: [filePath], + // }) + // ).pop()!; + // if ( + // HEADStatus === workingDirStatus && + // workingDirStatus === stageStatus + // ) + // continue; + // } + // // We want files in the working directory that are both different + // // from the head commit and the staged changes + // // If working directory and stage status are not equal then filepath has unstaged + // // changes in the working directory relative to both the HEAD and staging + // // area that need to be added + // // https://isomorphic-git.org/docs/en/statusMatrix + // if (workingDirStatus !== stageStatus) { + // let status: 'added' | 'modified' | 'deleted'; + // // If the working directory status is 0 then the file has + // // been deleted + // if (workingDirStatus === 0) { + // status = 'deleted'; + // await git.remove({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepath: filePath, + // }); + // } else { + // await git.add({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepath: filePath, + // }); + // // Check whether the file already exists inside the HEAD + // // commit and if it does then it is unmodified + // if (HEADStatus === 1) { + // status = 'modified'; + // } else { + // status = 'added'; + // } + // } + // message.push(filePath + ' ' + status); + // } + // } + // // Check if there were actual changes made to any files + // if (message.length !== 0) { + // this.logger.info( + // `Committing to Vault '${vaultsUtils.makeVaultIdPretty( + // this.vaultId, + // )}'`, + // ); + // this.workingDirIndex = await git.commit({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // author: { + // name: this.keyManager.getNodeId(), + // }, + // message: message.toString(), + // }); + // } + // } finally { + // // Check the status matrix for any unstaged file changes + // // which are considered dirty commits + // const statusMatrix = await git.statusMatrix({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // }); + // for await (const [filePath, _, workingDirStatus] of statusMatrix) { + // // For all files stage all changes, this is needed + // // so that we can check out all untracked files as well + // if (workingDirStatus === 0) { + // await git.remove({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepath: filePath, + // }); + // } else { + // await git.add({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // filepath: filePath, + // }); + // } + // } + // // Remove the staged dirty commits by checking out + // await git.checkout({ + // fs: this.efsRoot, + // dir: this.baseDir, + // gitdir: this.gitDir, + // ref: this.workingDirIndex, + // }); + // release(); + // } } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async version(commit: string): Promise { - // Checking for special tags. - const commit_ = commit.toLowerCase() === lastTag ? 'HEAD' : commit; - // TODO: add a tag for the start of the histoy so we can use that as the operator. + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public writeG( + g: (fs: FileSystemWritable) => AsyncGenerator, + ): AsyncGenerator { + return withG([this.lock], async function* () { + const result = yield* g(this.efsVault); + // At the end of the geneartor + // you need to do this + // but just before + // you need to finish it up - try { - await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: commit_, - noUpdateHead: true, - }); - this.workingDir = commit_; - } catch (err) { - if (err.code === 'NotFoundError') { - throw new vaultsErrors.ErrorVaultCommitUndefined(); - } - throw err; + // DO what you need to do here, create the commit + return result; + }); + } + + /** + * Setup the vault metadata + */ + protected async setupMeta(): Promise { + // Setup the vault metadata + // setup metadata + // and you need to make certain preparations + // the meta gets created first + // if the SoT is the database + // are we suposed to check this? + + if ((await this.db.get(this.vaultDbDomain, 'remote')) == null) { + await this.db.put(this.vaultDbDomain, 'remote', true); } + + // If this is not existing + // setup default vaults db + await this.db.get(this.vaultsDbDomain, 'dirty'); + + // Remote: [NodeId, VaultId] | undefined + // dirty: boolean + // name: string } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async readWorkingDirectory(): Promise { - const workingDir = ( - await git.log({ - fs: this.efsRoot, - dir: path.join(vaultsUtils.makeVaultIdPretty(this.vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(this.vaultId), '.git'), + /** + * TODO: review what happens when you are cloning + * Or you need to load a particular commit object ID here + */ + protected async setupGit(): Promise { + // Initialization is idempotent + // It works even with an existing git repository + await git.init({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + defaultBranch: vaultsUtils.canonicalBranch, + }); + let commitIdLatest: CommitId | undefined; + try { + const commits = await git.log({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranch, depth: 1, - }) - ).pop()!; - await this.efsRoot.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(this.vaultId), - '.git', - 'workingDir', - ), - workingDir.oid, - ); + }); + commitIdLatest = commits[0]?.oid as CommitId | undefined; + } catch (e) { + // Initialized repositories do not have any commits + // It complains that `refs/heads/master` file does not exist + if (!(e instanceof git.Errors.NotFoundError)) { + throw e; + } + } + if (commitIdLatest == null) { + // All vault repositories start with an initial commit + commitIdLatest = (await git.commit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + author: vaultsUtils.commitAuthor(this.keyManager.getNodeId()), + message: 'Initial Commit', + })) as CommitId; + } else { + // Force checkout out to the latest commit + // This ensures that any uncommitted state is dropped + await git.checkout({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranch, + force: true, + }); + } + return commitIdLatest; } - - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async applySchema() {} } export default VaultInternal; diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 613603117..596863a13 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,46 +1,61 @@ -import type { DB, DBLevel } from '@matrixai/db'; -import type { - VaultId, - VaultName, - VaultMap, - VaultKey, - VaultList, - Vault, -} from './types'; +import type { MutexInterface } from 'async-mutex'; +import type { DB, DBDomain, DBLevel } from '@matrixai/db'; +import type { VaultId, VaultName, VaultActions } from './types'; +import type { Vault } from './Vault'; + import type { FileSystem } from '../types'; -import type { NodeId } from '../nodes/types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; +import type { NodeId } from '../nodes/types'; -import type { MutexInterface } from 'async-mutex'; -import type { POJO } from 'encryptedfs'; import type { KeyManager } from '../keys'; +import type { NodeConnectionManager, NodeManager } from '../nodes'; import type { GestaltGraph } from '../gestalts'; import type { ACL } from '../acl'; import type { NotificationsManager } from '../notifications'; -import type { NodeConnection, NodeConnectionManager } from '../nodes'; -import type { GRPCClientAgent } from '../agent'; + import path from 'path'; -import Logger from '@matrixai/logger'; +import { PassThrough } from 'readable-stream'; import { Mutex } from 'async-mutex'; import git from 'isomorphic-git'; -import { PassThrough } from 'readable-stream'; -import * as grpc from '@grpc/grpc-js'; -import { EncryptedFS } from 'encryptedfs'; +import { EncryptedFS, errors as encryptedfsErrors } from 'encryptedfs'; +import Logger from '@matrixai/logger'; import { CreateDestroyStartStop, ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; -import { utils as idUtils } from '@matrixai/id'; -import * as vaultsUtils from './utils'; -import * as vaultsErrors from './errors'; +import { IdInternal, utils as idUtils } from '@matrixai/id'; import VaultInternal from './VaultInternal'; -import { makeVaultId } from './utils'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as utils from '../utils'; +import * as vaultsUtils from '../vaults/utils'; +import * as vaultsErrors from '../vaults/errors'; import * as gitUtils from '../git/utils'; import * as gitErrors from '../git/errors'; -import * as gestaltErrors from '../gestalts/errors'; import { utils as nodesUtils } from '../nodes'; +import { utils as keysUtils } from '../keys'; +import * as validationUtils from '../validation/utils'; +import config from '../config'; +import { mkdirExists } from '../utils'; +import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Object map pattern for each vault + */ +type VaultMap = Map< + VaultId, + { + vault?: VaultInternal; + lock: MutexInterface; + } +>; + +type VaultList = Map; + +// FIXME: this will be removed when moved into VaultInternal. +type VaultMetadata = { + name: VaultName; + workingDirectoryIndex: string; + remoteNode?: NodeId; + remoteVault?: string; +}; interface VaultManager extends CreateDestroyStartStop {} @CreateDestroyStartStop( @@ -48,150 +63,208 @@ interface VaultManager extends CreateDestroyStartStop {} new vaultsErrors.ErrorVaultManagerDestroyed(), ) class VaultManager { - public readonly vaultsPath: string; - - protected fs: FileSystem; - protected nodeConnectionManager: NodeConnectionManager; - protected gestaltGraph: GestaltGraph; - protected acl: ACL; - protected notificationsManager: NotificationsManager; - protected efs: EncryptedFS; - protected db: DB; - protected logger: Logger; - protected vaultsKey: VaultKey; - protected vaultsMap: VaultMap; - protected vaultsDbDomain: string; - protected vaultsNamesDbDomain: Array; - protected vaultsDb: DBLevel; - protected vaultsNamesDb: DBLevel; - protected keyManager: KeyManager; - static async createVaultManager({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, + nodeManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits = 256, fs = require('fs'), logger = new Logger(this.name), fresh = false, }: { vaultsPath: string; + db: DB; + acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; + nodeManager: NodeManager; gestaltGraph: GestaltGraph; - acl: ACL; - db: DB; - vaultsKey: VaultKey; + notificationsManager: NotificationsManager; + keyBits?: 128 | 192 | 256; fs?: FileSystem; logger?: Logger; fresh?: boolean; }) { logger.info(`Creating ${this.name}`); + logger.info(`Setting vaults path to ${vaultsPath}`); const vaultManager = new VaultManager({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, + nodeManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits, fs, logger, }); - logger.info(`Created ${this.name}`); await vaultManager.start({ fresh }); + logger.info(`Created ${this.name}`); return vaultManager; } + public readonly vaultsPath: string; + public readonly efsPath: string; + public readonly keyBits: 128 | 192 | 256; + + protected fs: FileSystem; + protected logger: Logger; + protected db: DB; + protected acl: ACL; + protected keyManager: KeyManager; + // FIXME, add this to create and constructor + protected nodeConnectionManager: NodeConnectionManager; + protected nodeManager: NodeManager; + protected gestaltGraph: GestaltGraph; + protected notificationsManager: NotificationsManager; + protected vaultsDbDomain: DBDomain = [this.constructor.name]; + protected vaultsDb: DBLevel; + // VaultId -> VaultMetadata + protected vaultsMetaDbDomain: DBDomain = [this.vaultsDbDomain[0], 'meta']; + protected vaultsMetaDb: DBLevel; + protected vaultMap: VaultMap = new Map(); + protected vaultKey: Buffer; + protected efs: EncryptedFS; + constructor({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, + nodeManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits, fs, logger, }: { vaultsPath: string; + db: DB; + acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; + nodeManager: NodeManager; gestaltGraph: GestaltGraph; - acl: ACL; - db: DB; - vaultsKey: VaultKey; + notificationsManager: NotificationsManager; + keyBits: 128 | 192 | 256; fs: FileSystem; logger: Logger; }) { + this.logger = logger; this.vaultsPath = vaultsPath; + this.efsPath = path.join(this.vaultsPath, config.defaults.efsBase); + this.db = db; + this.acl = acl; this.keyManager = keyManager; this.nodeConnectionManager = nodeConnectionManager; + this.nodeManager = nodeManager; this.gestaltGraph = gestaltGraph; - this.acl = acl; - this.db = db; - this.vaultsMap = new Map(); + this.notificationsManager = notificationsManager; + this.keyBits = keyBits; this.fs = fs; - this.vaultsKey = vaultsKey; - this.logger = logger; } public async start({ fresh = false, - }: { fresh?: boolean } = {}): Promise { + }: { + fresh?: boolean; + } = {}): Promise { try { this.logger.info(`Starting ${this.constructor.name}`); - this.vaultsDbDomain = 'VaultManager'; - this.vaultsDb = await this.db.level(this.vaultsDbDomain); - this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; - this.vaultsNamesDb = await this.db.level( - this.vaultsNamesDbDomain[1], + const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); + const vaultsMetaDb = await this.db.level( + this.vaultsMetaDbDomain[1], this.vaultsDb, ); if (fresh) { - await this.vaultsDb.clear(); + await vaultsMetaDb.clear(); + await vaultsDb.clear(); await this.fs.promises.rm(this.vaultsPath, { force: true, recursive: true, }); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); } - await utils.mkdirExists(this.fs, this.vaultsPath); - this.efs = await EncryptedFS.createEncryptedFS({ - dbPath: this.vaultsPath, - dbKey: this.vaultsKey, - logger: this.logger, - }); - await this.efs.start(); + await mkdirExists(this.fs, this.vaultsPath); + const vaultKey = await this.setupKey(this.keyBits); + let efs; + try { + efs = await EncryptedFS.createEncryptedFS({ + dbPath: this.efsPath, + dbKey: vaultKey, + logger: this.logger.getChild('EncryptedFileSystem'), + }); + } catch (e) { + if (e instanceof encryptedfsErrors.ErrorEncryptedFSKey) { + throw new vaultsErrors.ErrorVaultManagerKey(); + } + throw new vaultsErrors.ErrorVaultManagerEFS(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + this.vaultsDb = vaultsDb; + this.vaultsMetaDb = vaultsMetaDb; + this.vaultKey = vaultKey; + this.efs = efs; this.logger.info(`Started ${this.constructor.name}`); } catch (e) { this.logger.warn(`Failed Starting ${this.constructor.name}`); - await this.efs.stop(); + await this.efs?.stop(); throw e; } } public async stop(): Promise { this.logger.info(`Stopping ${this.constructor.name}`); - // Destroying managed vaults. - for (const vault of this.vaultsMap.values()) { - await vault.vault?.destroy(); + + // Iterate over vaults in memory and destroy them, ensuring that + // the working directory commit state is saved + + for (const [vaultId, vaultAndLock] of this.vaultMap) { + // This is locking each vault... before it tries to do this + // but if we are calling stop now + // we will have blocked all the other methods + // so in this sense, it actually waits for all vault locks to be relinquished + // before attempting to do anything here + // now if start stop has their own lock + // this this applies already just be calling stop + // in that it waits for stop to finish + + await this.transact(async () => { + // Think about it, maybe we should use stop instead + // it will be clearer!! + // await vaultAndLock.vault?.stop(); + + await vaultAndLock.vault?.destroy(); + }, [vaultId]); } + + // Need to figure out if this id thing is a good idea + // the id should already be workable as a string + // i forgot if it also works under map + await this.efs.stop(); + this.vaultMap = new Map(); this.logger.info(`Stopped ${this.constructor.name}`); } public async destroy(): Promise { this.logger.info(`Destroying ${this.constructor.name}`); - // We want to remove any state for the vault manager. - // this includes clearing out all DB domains and destroying the EFS. - const vaultsDb = await this.db.level(this.vaultsDbDomain); - await vaultsDb.clear(); await this.efs.destroy(); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + // If the DB was stopped, the existing sublevel `this.vaultsDb` will not be valid + // Therefore we recreate the sublevel here + const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); + await vaultsDb.clear(); await this.fs.promises.rm(this.vaultsPath, { force: true, recursive: true, @@ -207,31 +280,37 @@ class VaultManager { this.efs.unsetWorkerManager(); } - public async transaction( - f: (vaultManager: VaultManager) => Promise, - lock: MutexInterface, - ): Promise { - const release = await lock.acquire(); - try { - return await f(this); - } finally { - release(); - } + // The with locks thing + // can be generalised a bit + // we can address the with locking mechanism in general + // with withF and withG + // this will become our generic of way locking anything + + // REPLACE THE FOLLOWING 3 functions + // replace this transact with our new withF and withG mechanisms + // all we need to do is create `ResourceAcquire` types in this domain + + /** + * By default will not lock anything + */ + public async transact(f: () => Promise, vaults: Array = []) { + // Will lock nothing by default + return await this.withLocks(f, vaults.map(this.getLock.bind(this))); } - protected async _transaction( + protected async withLocks( f: () => Promise, - vaults: Array = [], + locks: Array = [], ): Promise { const releases: Array = []; - for (const vault of vaults) { - const lock = this.vaultsMap.get(idUtils.toString(vault)); - if (lock) releases.push(await lock.lock.acquire()); + for (const lock of locks) { + // Take the lock for each vault in memory and acquire it + releases.push(await lock.acquire()); } try { return await f(); } finally { - // Release them in the opposite order + // Release the vault locks in the opposite order releases.reverse(); for (const r of releases) { r(); @@ -239,23 +318,31 @@ class VaultManager { } } - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async getVaultName(vaultId: VaultId): Promise { - const vaultMeta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - ); - if (vaultMeta == null) throw new vaultsErrors.ErrorVaultUndefined(); - return vaultMeta.name; + protected getLock(vaultId: VaultId): MutexInterface { + const vaultAndLock = this.vaultMap.get(vaultId); + if (vaultAndLock != null) return vaultAndLock.lock; + const lock = new Mutex(); + this.vaultMap.set(vaultId, { lock }); + return lock; } + /** + * Constructs a new vault instance with a given name and + * stores it in memory + */ + + // this should actually + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async createVault(vaultName: VaultName): Promise { + public async createVault(vaultName: VaultName): Promise { const vaultId = await this.generateVaultId(); const lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); - return await this._transaction(async () => { - await this.db.put(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId), { + this.vaultMap.set(vaultId, { lock }); + return await this.transact(async () => { + this.logger.info( + `Storing metadata for Vault ${vaultsUtils.encodeVaultId(vaultId)}`, + ); + await this.db.put(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), { name: vaultName, }); const vault = await VaultInternal.create({ @@ -265,277 +352,290 @@ class VaultManager { logger: this.logger.getChild(VaultInternal.name), fresh: true, }); - this.vaultsMap.set(idUtils.toString(vaultId), { lock, vault }); - return vault; + this.vaultMap.set(vaultId, { lock, vault }); + return vault.vaultId; }, [vaultId]); } + /** + * Retreives the vault metadata using the vault Id + * and parses it to return the associated vault name + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async getVaultMeta(vaultId: VaultId): Promise { + const vaultMeta = await this.db.get( + this.vaultsMetaDbDomain, + idUtils.toBuffer(vaultId), + ); + if (vaultMeta == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + return vaultMeta; + } + + /** + * Removes the metadata and EFS state of a vault using a + * given vault Id + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async destroyVault(vaultId: VaultId) { - await this._transaction(async () => { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) return; - await this.db.del(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId)); - this.vaultsMap.delete(idUtils.toString(vaultId)); - await this.efs.rmdir(vaultsUtils.makeVaultIdPretty(vaultId), { + this.logger.info(`Destroying Vault ${vaultsUtils.encodeVaultId(vaultId)}`); + await this.transact(async () => { + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) return; + await this.db.del(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId)); + this.vaultMap.delete(vaultId); + await this.efs.rmdir(vaultsUtils.encodeVaultId(vaultId), { recursive: true, }); }, [vaultId]); } - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async openVault(vaultId: VaultId): Promise { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); - return await this.getVault(vaultId); - } + // /** + // * Constructs or returns the in-memory instance of a vault + // * from metadata using a given vault Id + // */ + // @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + // private async openVault(vaultId: VaultId): Promise { + // const vaultMeta = await this.getVaultMeta(vaultId); + // if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // return await this.getVault(vaultId); + // } + /** + * Writes the working directory commit state of a vault Id + * and removes the vault from memory + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async closeVault(vaultId: VaultId) { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); const vault = await this.getVault(vaultId); + // Updating workingDirectoryIndex in the vault metadata. + vaultMeta.workingDirectoryIndex = vault.getworkingDirIndex(); + await this.db.put( + this.vaultsMetaDbDomain, + idUtils.toBuffer(vaultId), + vaultMeta, + ); await vault.destroy(); - this.vaultsMap.delete(idUtils.toString(vaultId)); + this.vaultMap.delete(vaultId); } + /** + * Lists the vault name and associated vault Id of all + * the vaults stored + */ + // FIXME: this will have to peek into the vaults metadata. + // This will be inside the vaultInternal now. Need to work this out. @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async listVaults(): Promise { const vaults: VaultList = new Map(); - for await (const o of this.vaultsNamesDb.createReadStream({})) { + // Stream all the vault Id and associated metadata values + for await (const o of this.vaultsMetaDb.createReadStream({})) { const dbMeta = (o as any).value; const dbId = (o as any).key; - const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); - vaults.set(vaultMeta.name, makeVaultId(dbId)); + // Manually decrypt the vault metadata + const vaultMeta = await this.db.deserializeDecrypt( + dbMeta, + false, + ); + vaults.set(vaultMeta.name, IdInternal.fromBuffer(dbId)); } return vaults; } + /** + * Changes the vault name metadata of a vault Id + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async renameVault( vaultId: VaultId, newVaultName: VaultName, ): Promise { - await this._transaction(async () => { - const meta = await this.db.get( - this.vaultsNamesDbDomain, + this.logger.info(`Renaming Vault ${vaultsUtils.encodeVaultId(vaultId)}`); + await this.transact(async () => { + const meta = await this.db.get( + this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), ); - if (!meta) throw new vaultsErrors.ErrorVaultUndefined(); + if (!meta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); meta.name = newVaultName; await this.db.put( - this.vaultsNamesDbDomain, + this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), meta, ); }, [vaultId]); } + /** + * Retreives the vault Id associated with a vault name + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async getVaultId(vaultName: VaultName): Promise { - for await (const o of this.vaultsNamesDb.createReadStream({})) { + // Stream all the metadata and associated vault Id values + for await (const o of this.vaultsMetaDb.createReadStream({})) { const dbMeta = (o as any).value; const dbId = (o as any).key; - const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); + // Manually decrypt the vault metadata + const vaultMeta = await this.db.deserializeDecrypt( + dbMeta, + false, + ); + // If the name metadata matches the given name, return the associated vault Id if (vaultName === vaultMeta.name) { - return makeVaultId(dbId); + return IdInternal.fromBuffer(dbId); } } } + /** + * Returns a dictionary of VaultActions for each node. + * @param vaultId + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async getVaultPermission( + vaultId: VaultId, + ): Promise> { + const rawPermissions = await this.acl.getVaultPerm(vaultId); + const permissions: Record = {}; + // Getting the relevant information. + for (const nodeId in rawPermissions) { + permissions[nodeId] = rawPermissions[nodeId].vaults[vaultId]; + } + return permissions; + } + + /** + * Sets clone, pull and scan permissions of a vault for a + * gestalt and send a notification to this gestalt + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async shareVault(vaultId: VaultId, nodeId: NodeId): Promise { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + await this.transact(async () => { + await this.gestaltGraph._transaction(async () => { + await this.acl._transaction(async () => { + // Node Id permissions translated to other nodes in + // a gestalt by other domains + await this.gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + await this.acl.setVaultAction(vaultId, nodeId, 'pull'); + await this.acl.setVaultAction(vaultId, nodeId, 'clone'); + await this.notificationsManager.sendNotification(nodeId, { + type: 'VaultShare', + vaultId: vaultId.toString(), + vaultName: vaultMeta.name, + actions: { + clone: null, + pull: null, + }, + }); + }); + }); + }, [vaultId]); + } + + /** + * Unsets clone, pull and scan permissions of a vault for a + * gestalt + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async unshareVault(vaultId: VaultId, nodeId: NodeId): Promise { + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); return await this.gestaltGraph._transaction(async () => { return await this.acl._transaction(async () => { - const gestalt = await this.gestaltGraph.getGestaltByNode(nodeId); - if (gestalt == null) { - throw new gestaltErrors.ErrorGestaltsGraphNodeIdMissing(); - } - const nodes = gestalt.nodes; - for (const node in nodes) { - await this.acl.setNodeAction(nodeId, 'scan'); - await this.acl.setVaultAction( - vaultId, - nodesUtils.decodeNodeId(nodes[node].id)!, - 'pull', - ); - await this.acl.setVaultAction( - vaultId, - nodesUtils.decodeNodeId(nodes[node].id)!, - 'clone', - ); - } - await this.notificationsManager.sendNotification(nodeId, { - type: 'VaultShare', - vaultId: idUtils.toString(vaultId), - vaultName, - actions: { - clone: null, - pull: null, - }, - }); + await this.gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'pull'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'clone'); }); }); } + /** + * Clones the contents of a remote vault into a new local + * vault instance + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, - ): Promise { - let vaultName, remoteVaultId; - return await this.nodeConnectionManager.withConnF( - nodeId, - async (connection) => { - const client = connection.getClient(); - const vaultId = await this.generateVaultId(); - const lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); - return await this._transaction(async () => { - await this.efs.mkdir( - path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - { recursive: true }, - ); - const request = async ({ - url, - method = 'GET', - headers = {}, - body = [Buffer.from('')], - }: { - url: string; - method: string; - headers: POJO; - body: Buffer[]; - }) => { - if (method === 'GET') { - const infoResponse = { - async *[Symbol.iterator]() { - const request = new vaultsPB.Vault(); - if (typeof vaultNameOrId === 'string') { - request.setNameOrId(vaultNameOrId); - } else { - request.setNameOrId(idUtils.toString(vaultNameOrId)); - } - const response = client.vaultsGitInfoGet(request); - response.stream.on('metadata', async (meta) => { - vaultName = meta.get('vaultName').pop()!.toString(); - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); - }); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } - }, - }; - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const packResponse = { - async *[Symbol.iterator]() { - const responseBuffers: Array = []; - const meta = new grpc.Metadata(); - if (typeof vaultNameOrId === 'string') { - meta.set('vaultNameOrId', vaultNameOrId); - } else { - meta.set( - 'vaultNameOrId', - vaultsUtils.makeVaultIdPretty(vaultNameOrId), - ); - } - const stream = client.vaultsGitPackGet(meta); - const write = utils.promisify(stream.write).bind(stream); - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body[0]); - write(chunk); - stream.end(); - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); - }, - }; - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - }; - await git.clone({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - url: 'http://', - singleBranch: true, - }); - await this.efs.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - '.git', - 'packed-refs', - ), - '# pack-refs with: peeled fully-peeled sorted', - ); - const workingDir = ( - await git.log({ - fs: this.efs, - dir: path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - 'contents', - ), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - depth: 1, - }) - ).pop()!; - await this.efs.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - '.git', - 'workingDir', - ), - workingDir.oid, - ); - const vault = await VaultInternal.create({ - vaultId, - keyManager: this.keyManager, - efs: this.efs, - logger: this.logger.getChild(VaultInternal.name), - }); - this.vaultsMap.set(idUtils.toString(vaultId), { lock, vault }); - await this.db.put( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - { - name: vaultName, - defaultPullNode: nodeId, - defaultPullVault: idUtils.toBuffer(remoteVaultId), - }, - ); - return vault; - }, [vaultId]); - }, + ): Promise { + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Let vaultName, remoteVaultId; + const thisNodeId = this.keyManager.getNodeId(); + const nodeConnection = await this.nodeManager.getConnectionToNode(nodeId); + const client = nodeConnection.getClient(); + const vaultId = await this.generateVaultId(); + const lock = new Mutex(); + this.vaultMap.set(vaultId, { lock }); + this.logger.info( + `Cloning Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, ); + return await this.transact(async () => { + // Make the directory where the .git files will be auto generated and + // where the contents will be cloned to ('contents' file) + await this.efs.mkdir( + path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + { recursive: true }, + ); + const [request, vaultName, remoteVaultId] = await vaultsUtils.request( + client, + thisNodeId, + vaultNameOrId, + ); + try { + await git.clone({ + fs: this.efs, + http: { request }, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + url: 'http://', + singleBranch: true, + }); + } catch (err) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } + throw err; + } + const workingDirIndex = ( + await git.log({ + fs: this.efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + depth: 1, + }) + ).pop()!; + // Store the node and vault Id to be used as default remote values when pulling + await this.db.put(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), { + name: vaultName, + workingDirectoryIndex: workingDirIndex.oid, + remoteNode: nodeId, + remoteVault: remoteVaultId.toString(), + } as VaultMetadata); + const vault = await VaultInternal.create({ + vaultId, + keyManager: this.keyManager, + efs: this.efs, + logger: this.logger.getChild(VaultInternal.name), + remote: true, + }); + this.vaultMap.set(vaultId, { lock, vault }); + this.logger.info( + `Cloned Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, + ); + return vault.vaultId; + }, [vaultId]); } + /** + * Pulls the contents of a remote vault into an existing vault + * instance + */ public async pullVault({ vaultId, pullNodeId, @@ -544,128 +644,57 @@ class VaultManager { vaultId: VaultId; pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; - }): Promise { - throw Error('Not fully implemented.'); - let metaChange = 0; - let vaultMeta, remoteVaultId; - return await this._transaction(async () => { - if (pullNodeId == null || pullVaultNameOrId == null) { - vaultMeta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), + }): Promise { + return await this.transact(async () => { + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Keeps track of whether the metadata needs changing to avoid unnecessary db ops + // 0 = no change, 1 = change with vault Id, 2 = change with vault name + let metaChange = 0; + const thisNodeId = this.keyManager.getNodeId(); + const vaultMeta = await this.db.get( + this.vaultsMetaDbDomain, + idUtils.toBuffer(vaultId), + ); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUnlinked(); + if (pullNodeId == null) { + pullNodeId = vaultMeta.remoteNode; + } else { + metaChange = 1; + vaultMeta.remoteNode = pullNodeId; + } + if (pullVaultNameOrId == null) { + pullVaultNameOrId = IdInternal.fromString( + vaultMeta.remoteVault!, ); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultUnlinked(); - if (pullNodeId == null) { - pullNodeId = vaultMeta.defaultPullNode; + } else { + metaChange = 1; + if (typeof pullVaultNameOrId === 'string') { + metaChange = 2; } else { - metaChange = 1; - vaultMeta.defaultPullNode = pullNodeId; - } - if (pullVaultNameOrId == null) { - pullVaultNameOrId = makeVaultId( - idUtils.fromBuffer(Buffer.from(vaultMeta.defaultPullVault.data)), - ); - } else { - metaChange = 1; - if (typeof pullVaultNameOrId === 'string') { - metaChange = 2; - } else { - vaultMeta.defaultPullVault = idUtils.toBuffer(pullVaultNameOrId); - } + vaultMeta.remoteVault = pullVaultNameOrId.toString(); } } - // TODO: this will need a generator variant of nodeConnectionManager.withConnection() to fix. - // const nodeConnection = await this.nodeConnectionManager.getConnectionToNode( - // pullNodeId!, - // ); - let nodeConnection: NodeConnection; - const client = nodeConnection!.getClient(); - const request = async ({ - url, - method = 'GET', - headers = {}, - body = [Buffer.from('')], - }: { - url: string; - method: string; - headers: POJO; - body: Buffer[]; - }) => { - if (method === 'GET') { - const infoResponse = { - async *[Symbol.iterator]() { - const request = new vaultsPB.Vault(); - if (typeof pullVaultNameOrId === 'string') { - request.setNameOrId(pullVaultNameOrId); - } else { - request.setNameOrId(idUtils.toString(pullVaultNameOrId!)); - } - const response = client.vaultsGitInfoGet(request); - response.stream.on('metadata', async (meta) => { - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); - }); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } - }, - }; - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const packResponse = { - async *[Symbol.iterator]() { - const responseBuffers: Array = []; - const meta = new grpc.Metadata(); - if (typeof pullVaultNameOrId === 'string') { - meta.set('vaultNameOrId', pullVaultNameOrId); - } else { - meta.set( - 'vaultNameOrId', - vaultsUtils.makeVaultIdPretty(pullVaultNameOrId), - ); - } - const stream = client.vaultsGitPackGet(meta); - const write = utils.promisify(stream.write).bind(stream); - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body[0]); - write(chunk); - stream.end(); - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); - }, - }; - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - }; + this.logger.info( + `Pulling Vault ${vaultsUtils.encodeVaultId( + vaultId, + )} from Node ${pullNodeId}`, + ); + const nodeConnection = await this.nodeManager.getConnectionToNode( + pullNodeId!, + ); + const client = nodeConnection.getClient(); + const [request,, remoteVaultId] = await vaultsUtils.request( + client, + thisNodeId, + pullVaultNameOrId!, + ); try { await git.pull({ fs: this.efs, http: { request }, - dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), url: `http://`, ref: 'HEAD', singleBranch: true, @@ -674,72 +703,80 @@ class VaultManager { }, }); } catch (err) { - if (err instanceof git.Errors.MergeNotSupportedError) { - throw new vaultsErrors.ErrorVaultMergeConflict( + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } else if (err instanceof git.Errors.MergeNotSupportedError) { + throw new vaultsErrors.ErrorVaultsMergeConflict( 'Merge Conflicts are not supported yet', ); } throw err; } if (metaChange !== 0) { - if (metaChange === 2) vaultMeta.defaultPullVault = remoteVaultId; + if (metaChange === 2) vaultMeta.remoteVault = remoteVaultId; await this.db.put( - this.vaultsNamesDbDomain, + this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), vaultMeta, ); } const vault = await this.getVault(vaultId); - await vault.readWorkingDirectory(); - return vault; + // Store the working directory commit state in the '.git' directory + this.logger.info( + `Pulled Vault ${vaultsUtils.encodeVaultId( + vaultId, + )} from Node ${pullNodeId}`, + ); + return vault.vaultId; }, [vaultId]); } - protected async generateVaultId(): Promise { - let vaultId = vaultsUtils.generateVaultId(); - let i = 0; - while (await this.efs.exists(idUtils.toString(vaultId))) { - i++; - if (i > 50) { - throw new vaultsErrors.ErrorCreateVaultId( - 'Could not create a unique vaultId after 50 attempts', - ); - } - vaultId = vaultsUtils.generateVaultId(); - } - return vaultId; - } - + /** + * Handler for receiving http GET requests when being + * cloned or pulled from + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async *handleInfoRequest( vaultId: VaultId, ): AsyncGenerator { - const service = 'upload-pack'; + // Adehrance to git protocol yield Buffer.from( - gitUtils.createGitPacketLine('# service=git-' + service + '\n'), + gitUtils.createGitPacketLine('# service=git-upload-pack\n'), ); yield Buffer.from('0000'); - for (const buffer of (await gitUtils.uploadPack( - this.efs, - path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - true, - )) ?? []) { + // Read the commit state of the vault + const uploadPack = await gitUtils.uploadPack({ + fs: this.efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + advertiseRefs: true, + }); + for (const buffer of uploadPack) { yield buffer; } } + /** + * Handler for receiving http POST requests when being + * cloned or pulled from + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async handlePackRequest( vaultId: VaultId, body: Buffer, - ): Promise { + ): Promise<[PassThrough, PassThrough]> { if (body.toString().slice(4, 8) === 'want') { + // Parse the request to get the wanted git object const wantedObjectId = body.toString().slice(9, 49); const packResult = await gitUtils.packObjects({ fs: this.efs, - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), refs: [wantedObjectId], }); + // Generate a contents and progress stream const readable = new PassThrough(); const progressStream = new PassThrough(); const sideBand = gitUtils.mux( @@ -761,31 +798,59 @@ class VaultManager { /** * Retrieves all the vaults for a peers node */ - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async scanVaults(targetNodeId: NodeId): Promise> { - // Create the handler for git to scan from - return this.nodeConnectionManager.withConnF( - targetNodeId, + public async scanNodeVaults( + nodeId: NodeId, + ): Promise> { + // Create a connection to another node + return await this.nodeConnectionManager.withConnF( + nodeId, async (connection) => { const client = connection.getClient(); - const gitRequest = await vaultsUtils.constructGitHandler( - client, - this.keyManager.getNodeId(), + const nodeIdMessage = new nodesPB.Node(); + nodeIdMessage.setNodeId( + nodesUtils.encodeNodeId(this.keyManager.getNodeId()), ); - return await gitRequest.scanVaults(); + const vaults: Array<[VaultName, VaultId]> = []; + const genReadable = client.vaultsScan(nodeIdMessage); + for await (const vault of genReadable) { + vaults.push([ + vault.getVaultName() as VaultName, + validationUtils.parseVaultId(vault.getVaultId()), + ]); + } + return vaults; }, ); } + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + protected async generateVaultId(): Promise { + let vaultId = vaultsUtils.generateVaultId(); + let i = 0; + while (await this.efs.exists(idUtils.toString(vaultId))) { + i++; + if (i > 50) { + throw new vaultsErrors.ErrorVaultsCreateVaultId( + 'Could not create a unique vaultId after 50 attempts', + ); + } + vaultId = vaultsUtils.generateVaultId(); + } + return vaultId; + } + + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) protected async getVault(vaultId: VaultId): Promise { let vault: VaultInternal | undefined; let lock: MutexInterface; - let vaultAndLock = this.vaultsMap.get(idUtils.toString(vaultId)); + let vaultAndLock = this.vaultMap.get(vaultId); if (vaultAndLock != null) { ({ vault, lock } = vaultAndLock); + // Lock and vault exist if (vault != null) { return vault; } + // Only lock exists let release; try { release = await lock.acquire(); @@ -793,33 +858,57 @@ class VaultManager { if (vault != null) { return vault; } + const vaultMeta = await this.db.get( + this.vaultsMetaDbDomain, + idUtils.toBuffer(vaultId), + ); + let remote; + if (vaultMeta) { + if (vaultMeta.remoteVault || vaultMeta.remoteNode) { + remote = true; + } + } vault = await VaultInternal.create({ vaultId, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), + remote, }); vaultAndLock.vault = vault; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultId, vaultAndLock); return vault; } finally { release(); } } else { + // Neither vault nor lock exists lock = new Mutex(); vaultAndLock = { lock }; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultId, vaultAndLock); let release; try { release = await lock.acquire(); + const vaultMeta = await this.db.get( + this.vaultsMetaDbDomain, + idUtils.toBuffer(vaultId), + ); + let remote; + if (vaultMeta) { + if (vaultMeta.remoteVault || vaultMeta.remoteNode) { + remote = true; + } + } vault = await VaultInternal.create({ vaultId, keyManager: this.keyManager, efs: this.efs, + workingDirIndex: vaultMeta?.workingDirectoryIndex, logger: this.logger.getChild(VaultInternal.name), + remote, }); vaultAndLock.vault = vault; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultId, vaultAndLock); return vault; } finally { release(); @@ -827,14 +916,75 @@ class VaultManager { } } - protected async getLock(vaultId: VaultId): Promise { - const vaultLock = this.vaultsMap.get(idUtils.toString(vaultId)); - let lock = vaultLock?.lock; - if (!lock) { - lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); + // THIS can also be replaced with generic withF and withG + + /** + * Takes a function and runs it with the listed vaults. locking is handled automatically. + * @param vaultIds List of vault ID for vaults you wish to use. + * @param f Function you wish to run with the provided vaults. + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async withVaults( + vaultIds: VaultId[], + f: (...args: Vault[]) => Promise, + ): Promise { + // Stages: + // 1. Obtain vaults, + // 2. Call function with vaults while locking the vaults. + // 3. Catch any problems and preform clean up in finally. + // 4. return result. + + const vaults = await Promise.all( + vaultIds.map(async (vaultId) => { + return await this.getVault(vaultId); + }), + ); + + // Obtaining locks. + const vaultLocks = vaultIds.map((vaultId) => { + return this.getLock(vaultId); + }); + + // Running the function with locking. + return await this.withLocks(() => { + return f(...vaults); + }, vaultLocks); + } + + protected async setupKey(bits: 128 | 192 | 256): Promise { + let key: Buffer | undefined; + key = await this.db.get(this.vaultsDbDomain, 'key', true); + // If the EFS already exists, but the key doesn't, then we have lost the key + if (key == null && (await this.existsEFS())) { + throw new vaultsErrors.ErrorVaultManagerKey(); + } + if (key != null) { + return key; + } + this.logger.info('Generating vaults key'); + key = await this.generateKey(bits); + await this.db.put(this.vaultsDbDomain, 'key', key, true); + return key; + } + + protected async generateKey(bits: 128 | 192 | 256): Promise { + return await keysUtils.generateKey(bits); + } + + protected async existsEFS(): Promise { + try { + return (await this.fs.promises.readdir(this.efsPath)).length > 0; + } catch (e) { + if (e.code === 'ENOENT') { + return false; + } + throw new vaultsErrors.ErrorVaultManagerEFS(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); } - return lock; } } diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index a2d90921f..905947143 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -2,20 +2,13 @@ * Adds a secret to the vault */ import type Logger from '@matrixai/logger'; -import type { - FileOptions, - SecretContent, - SecretList, - SecretName, - Vault, -} from './types'; -import type { FileSystem } from '../types'; +import type { Vault } from './Vault'; import path from 'path'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; // TODO: tests -// - add succeded +// - add succeeded // - secret exists // - secret with directory // Might just drop the return type. @@ -23,13 +16,13 @@ import * as vaultsUtils from './utils'; // - Add locking? async function addSecret( vault: Vault, - secretName: SecretName, - content: SecretContent, + secretName: string, + content: Buffer | string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { if (await efs.exists(secretName)) { - throw new vaultsErrors.ErrorSecretDefined( + throw new vaultsErrors.ErrorSecretsSecretDefined( `${secretName} already exists, try updating instead`, ); } @@ -56,14 +49,14 @@ async function addSecret( // - invalid name async function updateSecret( vault: Vault, - secretName: SecretName, - content: SecretContent, + secretName: string, + content: Buffer | string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { // Throw error if secret does not exist if (!(await efs.exists(secretName))) { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( 'Secret does not exist, try adding it instead.', ); } @@ -83,15 +76,15 @@ async function updateSecret( // - invalid name async function renameSecret( vault: Vault, - currSecretName: SecretName, - newSecretName: SecretName, + currstring: string, + newstring: string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { - await efs.rename(currSecretName, newSecretName); + await vault.writeF(async (efs) => { + await efs.rename(currstring, newstring); }); logger?.info( - `Renamed secret at ${currSecretName} to ${newSecretName} in vault ${vault.vaultId}`, + `Renamed secret at ${currstring} to ${newstring} in vault ${vault.vaultId}`, ); } @@ -102,17 +95,29 @@ async function renameSecret( // - read existing file // - try to read non-existent file // - read directory? -async function getSecret( - vault: Vault, - secretName: SecretName, -): Promise { +async function getSecret(vault: Vault, secretName: string): Promise { try { - return await vault.access(async (efs) => { + return await vault.readF(async (efs) => { return (await efs.readFile(secretName)) as Buffer; }); } catch (err) { if (err.code === 'ENOENT') { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( + `Secret with name: ${secretName} does not exist`, + ); + } + throw err; + } +} + +async function statSecret(vault: Vault, secretName: string) { + try { + return await vault.readF(async (efs) => { + return await efs.stat(secretName); + }); + } catch (err) { + if (err.code === 'ENOENT') { + throw new vaultsErrors.ErrorSecretsSecretUndefined( `Secret with name: ${secretName} does not exist`, ); } @@ -129,11 +134,11 @@ async function getSecret( // - delete a full and empty directory with and without recursive. async function deleteSecret( vault: Vault, - secretName: SecretName, + secretName: string, fileOptions?: FileOptions, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { if ((await efs.stat(secretName)).isDirectory()) { await efs.rmdir(secretName, fileOptions); logger?.info(`Deleted directory at '${secretName}'`); @@ -142,7 +147,7 @@ async function deleteSecret( await efs.unlink(secretName); logger?.info(`Deleted secret at '${secretName}'`); } else { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( `path '${secretName}' does not exist in vault`, ); } @@ -155,18 +160,18 @@ async function deleteSecret( */ async function mkdir( vault: Vault, - dirPath: SecretName, + dirPath: string, fileOptions?: FileOptions, logger?: Logger, ): Promise { const recursive = !!fileOptions?.recursive; - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { try { await efs.mkdir(dirPath, fileOptions); } catch (err) { if (err.code === 'ENOENT' && !recursive) { - throw new vaultsErrors.ErrorRecursive( + throw new vaultsErrors.ErrorVaultsRecursive( `Could not create directory '${dirPath}' without recursive option`, ); } @@ -184,13 +189,13 @@ async function mkdir( // - adding a file. async function addSecretDirectory( vault: Vault, - secretDirectory: SecretName, - fs: FileSystem, + secretDirectory: string, + fs = require('fs'), logger?: Logger, ): Promise { const absoluteDirPath = path.resolve(secretDirectory); - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { for await (const secretPath of vaultsUtils.readdirRecursively( fs, absoluteDirPath, @@ -237,10 +242,10 @@ async function addSecretDirectory( // TODO: tests // - read secrets. // - no secrets -async function listSecrets(vault: Vault): Promise { - return await vault.access(async (efs) => { - const secrets: SecretList = []; - for await (const secret of vaultsUtils.readdirRecursivelyEFS(efs, '.')) { +async function listSecrets(vault: Vault): Promise { + return await vault.readF(async (efs) => { + const secrets: string[] = []; + for await (const secret of vaultsUtils.readdirRecursively(efs)) { secrets.push(secret); } return secrets; @@ -252,6 +257,7 @@ export { updateSecret, renameSecret, getSecret, + statSecret, deleteSecret, mkdir, addSecretDirectory, diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index 69e4e3968..3bd7c17aa 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -1,81 +1,117 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorVaults extends ErrorPolykey {} -class ErrorSecrets extends ErrorPolykey {} +class ErrorVaultManagerRunning extends ErrorVaults { + description = 'VaultManager is running'; + exitCode = sysexits.USAGE; +} + +class ErrorVaultManagerNotRunning extends ErrorVaults { + description = 'VaultManager is not running'; + exitCode = sysexits.USAGE; +} -class ErrorVaultManagerRunning extends ErrorVaults {} +class ErrorVaultManagerDestroyed extends ErrorVaults { + description = 'VaultManager is destroyed'; + exitCode = sysexits.USAGE; +} + +class ErrorVaultManagerKey extends ErrorVaults { + description = 'Vault key is invalid'; + exitCode = sysexits.CONFIG; +} -class ErrorVaultManagerNotRunning extends ErrorVaults {} +class ErrorVaultManagerEFS extends ErrorVaults { + description = 'EFS failed'; + exitCode = sysexits.UNAVAILABLE; +} -class ErrorVaultManagerDestroyed extends ErrorVaults {} +class ErrorVault extends ErrorVaults {} -class ErrorVaultUndefined extends ErrorVaults { - description: string = 'Vault does not exist'; - exitCode: number = 10; +class ErrorVaultRunning extends ErrorVault { + description = 'Vault is running'; + exitCode = sysexits.USAGE; } -class ErrorVaultDefined extends ErrorVaults {} +class ErrorVaultNotRunning extends ErrorVault { + description = 'Vault is not running'; + exitCode = sysexits.USAGE; +} -class ErrorRemoteVaultUndefined extends ErrorVaults {} +class ErrorVaultDestroyed extends ErrorVault { + description = 'Vault is destroyed'; + exitCode = sysexits.USAGE; +} -class ErrorVaultUninitialised extends ErrorVaults {} +class ErrorVaultReferenceInvalid extends ErrorVault { + description = 'Reference is invalid'; + exitCode = sysexits.USAGE; +} -class ErrorVaultNotStarted extends ErrorVaults {} +class ErrorVaultReferenceMissing extends ErrorVault { + description = 'Reference does not exist'; + exitCode = sysexits.USAGE; +} -class ErrorVaultDestroyed extends ErrorVaults {} +// Yes it is immutable +// But this is because you don't own the vault right now -class ErrorRecursive extends ErrorVaults {} +class ErrorVaultImmutable extends ErrorVaults { + description = 'Vault cannot be mutated'; + exitCode = sysexits.USAGE; +} -class ErrorVaultModified extends ErrorVaults {} +// --- these need to be reviewed -class ErrorMalformedVaultDBValue extends ErrorVaults {} +class ErrorVaultsVaultUndefined extends ErrorVaults { + description = 'Vault does not exist'; + exitCode = 10; +} -class ErrorVaultUnlinked extends ErrorVaults {} +class ErrorVaultsVaultDefined extends ErrorVaults {} -class ErrorCreateVaultId extends ErrorVaults {} +class ErrorVaultsRecursive extends ErrorVaults {} -class ErrorInvalidVaultId extends ErrorVaults {} // TODO: Assign a proper error code and message. +class ErrorVaultsVaultUnlinked extends ErrorVaults {} -class ErrorVaultMergeConflict extends ErrorVaults {} +class ErrorVaultsCreateVaultId extends ErrorVaults {} -class ErrorVaultCommitUndefined extends ErrorVaults { - description: string = 'Commit does not exist'; - exitCode: number = 10; -} +class ErrorVaultsInvalidVaultId extends ErrorVaults {} // TODO: Assign a proper error code and message. -class ErrorSecretUndefined extends ErrorSecrets {} +class ErrorVaultsMergeConflict extends ErrorVaults {} -class ErrorSecretDefined extends ErrorSecrets {} +class ErrorVaultsPermissionDenied extends ErrorVaults {} -class ErrorReadingSecret extends ErrorSecrets {} +class ErrorSecrets extends ErrorPolykey {} -class ErrorGitFile extends ErrorSecrets {} +class ErrorSecretsSecretUndefined extends ErrorSecrets {} -class ErrorVaultsInvalidVaultId extends ErrorVaults {} +class ErrorSecretsSecretDefined extends ErrorSecrets {} export { ErrorVaults, ErrorVaultManagerRunning, ErrorVaultManagerNotRunning, ErrorVaultManagerDestroyed, - ErrorVaultUndefined, - ErrorVaultDefined, - ErrorRemoteVaultUndefined, - ErrorVaultUninitialised, - ErrorVaultNotStarted, + ErrorVaultManagerKey, + ErrorVaultManagerEFS, + ErrorVault, + ErrorVaultRunning, + ErrorVaultNotRunning, ErrorVaultDestroyed, - ErrorRecursive, - ErrorVaultModified, - ErrorMalformedVaultDBValue, - ErrorVaultUnlinked, - ErrorCreateVaultId, - ErrorInvalidVaultId, - ErrorVaultMergeConflict, - ErrorVaultCommitUndefined, - ErrorSecretUndefined, - ErrorSecretDefined, - ErrorReadingSecret, - ErrorGitFile, + ErrorVaultReferenceInvalid, + ErrorVaultReferenceMissing, + ErrorVaultImmutable, + ErrorVaultsVaultUndefined, + ErrorVaultsVaultDefined, + ErrorVaultsRecursive, + ErrorVaultsVaultUnlinked, + ErrorVaultsCreateVaultId, ErrorVaultsInvalidVaultId, + ErrorVaultsMergeConflict, + ErrorVaultsPermissionDenied, + ErrorSecrets, + ErrorSecretsSecretUndefined, + ErrorSecretsSecretDefined, }; diff --git a/src/vaults/index.ts b/src/vaults/index.ts index 95188f863..84fc46769 100644 --- a/src/vaults/index.ts +++ b/src/vaults/index.ts @@ -1,5 +1,6 @@ export { default as VaultManager } from './VaultManager'; export { default as VaultInternal } from './VaultInternal'; +export type { Vault } from './Vault'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/vaults/types.ts b/src/vaults/types.ts index 559b89bd8..66e053ebf 100644 --- a/src/vaults/types.ts +++ b/src/vaults/types.ts @@ -1,173 +1,175 @@ -import type VaultInternal from './VaultInternal'; -import type { Opaque } from '../types'; -import type { NodeId } from '../nodes/types'; -import type { MutexInterface } from 'async-mutex'; +import type { Id } from '@matrixai/id'; +import type { EncryptedFS } from 'encryptedfs'; import type { Callback, Path } from 'encryptedfs/dist/types'; import type { FdIndex } from 'encryptedfs/dist/fd/types'; -import type { EncryptedFS } from 'encryptedfs'; -import type { Id, IdString } from '../GenericIdTypes'; +import type { Opaque } from '../types'; const vaultActions = ['clone', 'pull'] as const; +type VaultAction = typeof vaultActions[number]; + /** - * Randomly generated vault ID for each new vault + * Special tags that are managed by VaultInternal + * They are used to refer to specific commits + * These may or may not be implemented using Git tags */ -type VaultId = Opaque<'VaultId', Id>; - -type VaultIdPretty = Opaque<'VaultIdPretty', IdString>; - -type VaultName = Opaque<'VaultName', string>; - -type VaultKey = Opaque<'VaultKey', Buffer>; +const tagLast = 'last'; /** - * Actions relating to what is possible with vaults + * Tuple of static references */ -type VaultAction = typeof vaultActions[number]; +const refs = ['HEAD', tagLast] as const; -type VaultList = Map; - -type VaultMetadata = { - name: VaultName; - id: VaultId; - remoteNode: NodeId; - remoteVault: VaultId; -}; - -type SecretName = string; +type VaultId = Opaque<'VaultId', Id>; -type SecretList = string[]; +type VaultIdEncoded = Opaque<'VaultIdEncoded', string>; -type SecretContent = Buffer | string; +type VaultRef = typeof refs[number]; -type VaultMap = Map< - string, - { - vault?: VaultInternal; - lock: MutexInterface; - } ->; +type CommitId = Opaque<'CommitId', string>; -type FileOptions = { - recursive?: boolean; +type CommitLog = { + commitId: CommitId; + parent: Array; + author: { + name: string; + timestamp: Date; + }; + committer: { + name: string; + timestamp: Date; + }; + message: string; }; -type VaultActions = Partial>; - +/** + * Readonly-only interface for EncryptedFS + * Note that open flags type are not complete + * Combinations of the flags can be used as well + */ interface FileSystemReadable { - chdir: typeof EncryptedFS.prototype.chdir; - access: typeof EncryptedFS.prototype.access; - chmod: typeof EncryptedFS.prototype.chmod; - chown: typeof EncryptedFS.prototype.chown; - chownr: typeof EncryptedFS.prototype.chownr; - close: typeof EncryptedFS.prototype.close; - createReadStream: typeof EncryptedFS.prototype.createReadStream; - exists: typeof EncryptedFS.prototype.exists; - fchmod: typeof EncryptedFS.prototype.fchmod; - fchown: typeof EncryptedFS.prototype.fchown; - fstat: typeof EncryptedFS.prototype.fstat; - futimes: typeof EncryptedFS.prototype.futimes; - lchmod: typeof EncryptedFS.prototype.lchmod; - lchown: typeof EncryptedFS.prototype.lchown; - lseek: typeof EncryptedFS.prototype.lseek; - lstat: typeof EncryptedFS.prototype.lstat; + constants: EncryptedFS['constants']; + promises: FileSystemReadable; + access: EncryptedFS['access']; + close: EncryptedFS['close']; + createReadStream: EncryptedFS['createReadStream']; + exists: EncryptedFS['exists']; + fstat: EncryptedFS['fstat']; + lseek: EncryptedFS['lseek']; + lstat: EncryptedFS['lstat']; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], mode?: number, ): Promise; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], callback: Callback<[FdIndex]>, ): Promise; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], mode: number, callback: Callback<[FdIndex]>, ): Promise; - read: typeof EncryptedFS.prototype.read; - readdir: typeof EncryptedFS.prototype.readdir; - readFile: typeof EncryptedFS.prototype.readFile; - readlink: typeof EncryptedFS.prototype.readlink; - realpath: typeof EncryptedFS.prototype.realpath; - stat: typeof EncryptedFS.prototype.stat; - utimes: typeof EncryptedFS.prototype.utimes; + read: EncryptedFS['read']; + readdir: EncryptedFS['readdir']; + readFile: EncryptedFS['readFile']; + readlink: EncryptedFS['readlink']; + realpath: EncryptedFS['realpath']; + stat: EncryptedFS['stat']; } +/** + * Readable & Writable interface for EncryptedFS + */ interface FileSystemWritable extends FileSystemReadable { - chdir: typeof EncryptedFS.prototype.chdir; - access: typeof EncryptedFS.prototype.access; - appendFile: typeof EncryptedFS.prototype.appendFile; - chmod: typeof EncryptedFS.prototype.chmod; - chown: typeof EncryptedFS.prototype.chown; - chownr: typeof EncryptedFS.prototype.chownr; - close: typeof EncryptedFS.prototype.close; - copyFile: typeof EncryptedFS.prototype.copyFile; - createWriteStream: typeof EncryptedFS.prototype.createWriteStream; - fallocate: typeof EncryptedFS.prototype.fallocate; - fchmod: typeof EncryptedFS.prototype.fchmod; - fchown: typeof EncryptedFS.prototype.fchown; - ftruncate: typeof EncryptedFS.prototype.ftruncate; - futimes: typeof EncryptedFS.prototype.futimes; - lchmod: typeof EncryptedFS.prototype.lchmod; - lchown: typeof EncryptedFS.prototype.lchown; - link: typeof EncryptedFS.prototype.link; - lseek: typeof EncryptedFS.prototype.lseek; - mkdir: typeof EncryptedFS.prototype.mkdir; - mkdtemp: typeof EncryptedFS.prototype.mkdtemp; - mknod: typeof EncryptedFS.prototype.mknod; - open: typeof EncryptedFS.prototype.open; - rename: typeof EncryptedFS.prototype.rename; - rmdir: typeof EncryptedFS.prototype.rmdir; - symlink: typeof EncryptedFS.prototype.symlink; - truncate: typeof EncryptedFS.prototype.truncate; - unlink: typeof EncryptedFS.prototype.unlink; - utimes: typeof EncryptedFS.prototype.utimes; - write: typeof EncryptedFS.prototype.write; - writeFile: typeof EncryptedFS.prototype.writeFile; + promises: FileSystemWritable; + appendFile: EncryptedFS['appendFile']; + chmod: EncryptedFS['chmod']; + chown: EncryptedFS['chown']; + chownr: EncryptedFS['chownr']; + copyFile: EncryptedFS['copyFile']; + createWriteStream: EncryptedFS['createWriteStream']; + fallocate: EncryptedFS['fallocate']; + fchmod: EncryptedFS['fchmod']; + fchown: EncryptedFS['fchown']; + fdatasync: EncryptedFS['fdatasync']; + fsync: EncryptedFS['fsync']; + ftruncate: EncryptedFS['ftruncate']; + futimes: EncryptedFS['futimes']; + lchmod: EncryptedFS['lchmod']; + lchown: EncryptedFS['lchown']; + link: EncryptedFS['link']; + mkdir: EncryptedFS['mkdir']; + mkdtemp: EncryptedFS['mkdtemp']; + mknod: EncryptedFS['mknod']; + open: EncryptedFS['open']; + rename: EncryptedFS['rename']; + rmdir: EncryptedFS['rmdir']; + symlink: EncryptedFS['symlink']; + truncate: EncryptedFS['truncate']; + unlink: EncryptedFS['unlink']; + utimes: EncryptedFS['utimes']; + write: EncryptedFS['write']; + writeFile: EncryptedFS['writeFile']; } -type CommitType = typeof VaultInternal.prototype.commit; -type AccessType = typeof VaultInternal.prototype.access; -type LogType = typeof VaultInternal.prototype.log; -type VersionType = typeof VaultInternal.prototype.version; -interface Vault { - baseDir: typeof VaultInternal.prototype.baseDir; - gitDir: typeof VaultInternal.prototype.gitDir; - vaultId: typeof VaultInternal.prototype.vaultId; - commit(...arg: Parameters): ReturnType; - access: AccessType; - log(...arg: Parameters): ReturnType; - version(...arg: Parameters): ReturnType; -} +type VaultName = string; // FIXME, placeholder, remove? -type CommitLog = { - oid: string; - committer: string; - timeStamp: number; - message: string; -}; +// type VaultKey = Opaque<'VaultKey', Buffer>; + +// /** +// * Actions relating to what is possible with vaults +// */ +// type VaultAction = 'clone' | 'pull'; + +// type SecretName = string; + +// type SecretList = string[]; + +// type SecretContent = Buffer | string; + +// type FileOptions = { +// recursive?: boolean; +// }; + +// FIXME: temp placeholder +type VaultActions = Partial>; export { vaultActions }; export type { VaultId, - VaultIdPretty, + VaultIdEncoded, + VaultRef, VaultAction, - VaultKey, - VaultName, - VaultList, - VaultMap, - VaultMetadata, - VaultActions, - SecretName, - SecretList, - SecretContent, - FileOptions, + CommitId, + CommitLog, FileSystemReadable, FileSystemWritable, - Vault, - CommitLog, + // FIXME: placeholder types + VaultName, + VaultActions, }; + +export { tagLast, refs }; diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index b9987c04f..d712691f4 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -1,249 +1,210 @@ -import type { EncryptedFS } from 'encryptedfs'; import type { VaultId, - VaultKey, - VaultList, - VaultName, + VaultIdEncoded, + VaultRef, VaultAction, - FileSystemReadable, - VaultIdPretty, + CommitId, } from './types'; -import type { FileSystem } from '../types'; -import type { NodeId } from '../nodes/types'; +import type { FileSystem, POJO } from '../types'; import type { GRPCClientAgent } from '../agent'; +import type { NodeId } from '../nodes/types'; + import path from 'path'; -import { IdRandom } from '@matrixai/id'; +import { IdInternal, IdRandom, utils as idUtils } from '@matrixai/id'; import * as grpc from '@grpc/grpc-js'; -import { vaultActions } from './types'; -import * as vaultsErrors from './errors'; -import { GitRequest } from '../git'; -import { promisify } from '../utils'; +import { tagLast, refs, vaultActions } from './types'; +import * as nodesUtils from '../nodes/utils'; import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import * as keysUtils from '../keys/utils'; -import { isIdString, isId, makeIdString, makeId } from '../GenericIdTypes'; -import { utils as nodesUtils } from '../nodes'; - -async function generateVaultKey(bits: number = 256): Promise { - return (await keysUtils.generateKey(bits)) as VaultKey; -} - -function isVaultId(arg: any) { - return isId(arg); -} /** - * This will return arg as a valid VaultId or throw an error if it can't be converted. - * This will take a multibase string of the ID or the raw Buffer of the ID. - * @param arg - The variable we wish to convert - * @throws vaultsErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId - * @returns VaultId + * Vault history is designed for linear-history + * The canonical branch represents the one and only true timeline + * In the future, we can introduce non-linear history + * Where branches are automatically made when new timelines are created */ -function makeVaultId(arg: any): VaultId { - return makeId(arg); -} +const canonicalBranch = 'master'; -function isVaultIdPretty(arg: any): arg is VaultIdPretty { - return isIdString(arg); -} - -function makeVaultIdPretty(arg: any): VaultIdPretty { - return makeIdString(arg); -} +const vaultIdGenerator = new IdRandom(); -const randomIdGenerator = new IdRandom(); function generateVaultId(): VaultId { - return makeVaultId(randomIdGenerator.get()); + return vaultIdGenerator.get(); } -async function fileExists(fs: FileSystem, path: string): Promise { - try { - const fh = await fs.promises.open(path, 'r'); - await fh.close(); - } catch (err) { - if (err.code === 'ENOENT') { - return false; - } - } - return true; +function encodeVaultId(vaultId: VaultId): VaultIdEncoded { + return vaultId.toMultibase('base58btc') as VaultIdEncoded; } -async function* readdirRecursively(fs, dir: string) { - const dirents = await fs.promises.readdir(dir, { withFileTypes: true }); - for (const dirent of dirents) { - const res = path.resolve(dir, dirent.name); - if (dirent.isDirectory()) { - yield* readdirRecursively(fs, res); - } else if (dirent.isFile()) { - yield res; - } +function decodeVaultId(vaultIdEncoded: any): VaultId | undefined { + if (typeof vaultIdEncoded !== 'string') { + return; } -} - -async function* readdirRecursivelyEFS( - efs: FileSystemReadable, - dir: string, - dirs?: boolean, -) { - const dirents = await efs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); // Makes string | buffer a string. - secretPath = path.join(dir, res); - if ((await efs.stat(secretPath)).isDirectory() && dirent !== '.git') { - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursivelyEFS(efs, secretPath, dirs); - } else if ((await efs.stat(secretPath)).isFile()) { - yield secretPath; - } - } -} - -async function* readdirRecursivelyEFS2( - fs: EncryptedFS, - dir: string, - dirs?: boolean, -): AsyncGenerator { - const dirents = await fs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); - secretPath = path.join(dir, res); - if (dirent !== '.git') { - try { - await fs.readdir(secretPath); - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursivelyEFS2(fs, secretPath, dirs); - } catch (err) { - if (err.code === 'ENOTDIR') { - yield secretPath; - } - } - } + const vaultId = IdInternal.fromMultibase(vaultIdEncoded); + if (vaultId == null) { + return; } + return vaultId; } /** - * Searches a list of vaults for the given vault Id and associated name - * @throws If the vault Id does not exist + * Vault reference can be HEAD, any of the special tags or a commit ID */ -function searchVaultName(vaultList: VaultList, vaultId: VaultId): VaultName { - let vaultName: VaultName | undefined; - - // Search each element in the list of vaults - for (const elem in vaultList) { - // List is of form \t - const value = vaultList[elem].split('\t'); - if (value[1] === vaultId) { - vaultName = value[0]; - break; - } - } - if (vaultName == null) { - throw new vaultsErrors.ErrorRemoteVaultUndefined( - `${vaultId} does not exist on connected node`, - ); - } - return vaultName; +function validateRef(ref: any): ref is VaultRef { + return refs.includes(ref) || validateCommitId(ref); } /** - * Creates a GitRequest object from the desired node connection. - * @param client GRPC connection to desired node - * @param nodeId + * Commit ids are SHA1 hashes encoded as 40-character long lowercase hexadecimal strings */ -async function constructGitHandler( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const gitRequest = new GitRequest( - ((vaultNameOrId: string) => requestInfo(vaultNameOrId, client)).bind(this), - ((vaultNameOrId: string, body: Buffer) => - requestPack(vaultNameOrId, body, client)).bind(this), - (() => requestVaultNames(client, nodeId)).bind(this), - ); - return gitRequest; -} - -/** - * Requests remote info from the connected node for the named vault. - * @param vaultId ID of the desired vault - * @param client A connection object to the node - * @returns Async Generator of Uint8Arrays representing the Info Response - */ -async function* requestInfo( - vaultNameOrId: string, - client: GRPCClientAgent, -): AsyncGenerator { - const request = new vaultsPB.Vault(); - request.setNameOrId(vaultNameOrId); - const response = client.vaultsGitInfoGet(request); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } -} - -/** - * Requests a pack from the connected node for the named vault - * @param vaultId ID of vault - * @param body contains the pack request - * @param client A connection object to the node - * @returns AsyncGenerator of Uint8Arrays representing the Pack Response - */ -async function* requestPack( - vaultNameOrId: string, - body: Buffer, - client: GRPCClientAgent, -): AsyncGenerator { - const responseBuffers: Array = []; - - const meta = new grpc.Metadata(); - // FIXME make it a VaultIdReadable - meta.set('vaultNameOrId', vaultNameOrId); - - const stream = client.vaultsGitPackGet(meta); - const write = promisify(stream.write).bind(stream); - - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body); - write(chunk); - stream.end(); - - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); -} - -/** - * Requests the vault names from the connected node. - * @param client A connection object to the node - * @param nodeId - */ -async function requestVaultNames( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const request = new nodesPB.Node(); - request.setNodeId(nodesUtils.encodeNodeId(nodeId)); - const vaultList = client.vaultsScan(request); - const data: string[] = []; - for await (const vault of vaultList) { - const vaultMessage = vault.getNameOrId(); - data.push(vaultMessage); - } - - return data; -} +function validateCommitId(commitId: any): commitId is CommitId { + return /^[a-f0-9]{40}$/.test(commitId); +} + +function commitAuthor(nodeId: NodeId): { name: string; email: string } { + return { + name: nodesUtils.encodeNodeId(nodeId), + email: '', + }; +} + +// Function isVaultId(arg: any) { +// return isId(arg); +// } +// /** +// * This will return arg as a valid VaultId or throw an error if it can't be converted. +// * This will take a multibase string of the ID or the raw Buffer of the ID. +// * @param arg - The variable we wish to convert +// * @throws vaultsErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId +// * @returns VaultId +// */ +// function makeVaultId(arg: any): VaultId { +// return makeId(arg); +// } +// function isVaultIdPretty(arg: any): arg is VaultIdPretty { +// return isIdString(arg); +// } +// function makeVaultIdPretty(arg: any): VaultIdPretty { +// return makeIdString(arg); +// } + +// async function fileExists(fs: FileSystem, path: string): Promise { +// try { +// const fh = await fs.promises.open(path, 'r'); +// await fh.close(); +// } catch (err) { +// if (err.code === 'ENOENT') { +// return false; +// } +// } +// return true; +// } + +// async function* readdirRecursively(fs, dir = '.') { +// const dirents = await fs.promises.readdir(dir); +// for (const dirent of dirents) { +// const res = path.join(dir, dirent.toString()); +// const stat = await fs.promises.stat(res); +// if (stat.isDirectory()) { +// yield* readdirRecursively(fs, res); +// } else if (stat.isFile()) { +// yield res; +// } +// } +// } + +// async function request( +// client: GRPCClientAgent, +// nodeId: NodeId, +// vaultNameOrId: VaultId | VaultName, +// ) { +// const requestMessage = new vaultsPB.InfoRequest(); +// const vaultMessage = new vaultsPB.Vault(); +// const nodeMessage = new nodesPB.Node(); +// nodeMessage.setNodeId(nodeId); +// requestMessage.setAction('clone'); +// if (typeof vaultNameOrId === 'string') { +// vaultMessage.setNameOrId(vaultNameOrId); +// } else { +// // To have consistency between GET and POST, send the user +// // readable form of the vault Id +// vaultMessage.setNameOrId(makeVaultIdPretty(vaultNameOrId)); +// } +// requestMessage.setVault(vaultMessage); +// requestMessage.setNode(nodeMessage); +// const response = client.vaultsGitInfoGet(requestMessage); +// let vaultName, remoteVaultId; +// response.stream.on('metadata', async (meta) => { +// // Receive the Id of the remote vault +// vaultName = meta.get('vaultName').pop(); +// if (vaultName) vaultName = vaultName.toString(); +// const vId = meta.get('vaultId').pop(); +// if (vId) remoteVaultId = makeVaultId(vId.toString()); +// }); +// // Collet the response buffers from the GET request +// const infoResponse: Uint8Array[] = []; +// for await (const resp of response) { +// infoResponse.push(resp.getChunk_asU8()); +// } +// const metadata = new grpc.Metadata(); +// if (typeof vaultNameOrId === 'string') { +// metadata.set('vaultNameOrId', vaultNameOrId); +// } else { +// // Metadata only accepts the user readable form of the vault Id +// // as the string form has illegal characters +// metadata.set('vaultNameOrId', makeVaultIdPretty(vaultNameOrId)); +// } +// return [ +// async function ({ +// url, +// method = 'GET', +// headers = {}, +// body = [Buffer.from('')], +// }: { +// url: string; +// method: string; +// headers: POJO; +// body: Buffer[]; +// }) { +// if (method === 'GET') { +// // Send back the GET request info response +// return { +// url: url, +// method: method, +// body: infoResponse, +// headers: headers, +// statusCode: 200, +// statusMessage: 'OK', +// }; +// } else if (method === 'POST') { +// const responseBuffers: Array = []; +// const stream = client.vaultsGitPackGet(metadata); +// const chunk = new vaultsPB.PackChunk(); +// // Body is usually an async generator but in the cases we are using, +// // only the first value is used +// chunk.setChunk(body[0]); +// // Tell the server what commit we need +// await stream.write(chunk); +// let packResponse = (await stream.read()).value; +// while (packResponse != null) { +// responseBuffers.push(packResponse.getChunk_asU8()); +// packResponse = (await stream.read()).value; +// } +// return { +// url: url, +// method: method, +// body: responseBuffers, +// headers: headers, +// statusCode: 200, +// statusMessage: 'OK', +// }; +// } else { +// throw new Error('Method not supported'); +// } +// }, +// vaultName, +// remoteVaultId, +// ]; +// } function isVaultAction(action: any): action is VaultAction { if (typeof action !== 'string') return false; @@ -251,17 +212,14 @@ function isVaultAction(action: any): action is VaultAction { } export { - isVaultId, - isVaultIdPretty, - makeVaultId, - makeVaultIdPretty, - generateVaultKey, + tagLast, + refs, + canonicalBranch, generateVaultId, - fileExists, - readdirRecursively, - readdirRecursivelyEFS, - readdirRecursivelyEFS2, - constructGitHandler, - searchVaultName, + encodeVaultId, + decodeVaultId, + validateRef, + validateCommitId, + commitAuthor, isVaultAction, }; diff --git a/test-git.ts b/test-git.ts new file mode 100644 index 000000000..cad3f9b0e --- /dev/null +++ b/test-git.ts @@ -0,0 +1,337 @@ +import fs from 'fs'; +import path from 'path'; +import git from 'isomorphic-git'; +import * as vaultsUtils from './src/vaults/utils'; + +/* + +I'm going to need to test out how to use the tags and the branches. +When we are pulling the repo, we can checkout to a given version in the commit hash. +We need to switch the HEAD. +We're going to do this in the real FS. So we can see this being done, one step at a time +*/ + + // // await git.checkout({ + // // fs, + // // dir: vaultDir, + // // gitdir: vaultGitDir, + // // ref: 'master' + // // }); + + // // We never change branches anyway + + // try { + // const commits = await git.log({ + // fs, + // dir: vaultDir, + // gitdir: vaultGitDir, + // depth: 1, + // ref: 'master', + // }); + + // console.log(commits); + + // // if the comits is meant to be empty array + + // } catch (e) { + // if (e instanceof git.Errors.NotFoundError) { + // console.log('OH NO!'); + + +async function main () { + + const vaultDataDir = './tmp/git/data'; + const vaultGitDir = './tmp/git/.git'; + + await fs.promises.rm('./tmp/git', { recursive: true, force: true }); + + await fs.promises.mkdir(vaultDataDir, { recursive: true }); + + await git.init({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + defaultBranch: 'master' + }); + + const firstCommit = await git.commit({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + author: { + name: 'this is the author', + email: '', + }, + message: 'Initial Commit', + ref: 'HEAD', + }); + + await git.writeRef({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: 'refs/heads/master', + value: firstCommit, + force: true + }); + + console.log(firstCommit); + + console.log(vaultsUtils.validateCommitId(firstCommit.toUpperCase())); + + // what happens when you create .git inside? + + await git.checkout({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: firstCommit.toUpperCase(), + }); + + + + + + await fs.promises.writeFile( + path.join(vaultDataDir, 'file'), + 'v2' + ); + + await git.add({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + filepath: 'file' + }); + + const secondCommit = await git.commit({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + author: { + name: 'this is the author', + email: '', + }, + message: 'Second Commit', + ref: 'HEAD', + }); + + await git.writeRef({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: 'refs/heads/master', + value: secondCommit, + force: true + }); + + await fs.promises.writeFile( + path.join(vaultDataDir, 'file'), + 'v3' + ); + + await git.add({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + filepath: 'file' + }); + + // This is comparing against the HEAD commit + // the default ref is HEAD + const status = await git.statusMatrix({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + }); + + console.log(status); + + const thirdCommit = await git.commit({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + author: { + name: 'this is the author', + email: '', + }, + message: 'Third Commit', + ref: 'HEAD', + }); + + await git.writeRef({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: 'refs/heads/master', + value: thirdCommit, + force: true + }); + + // we alaways use the master branch + // to find the log of canonical history + // or we find it from where we are in HEAD + const commits = await git.log({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: 'master', + }); + + console.log(commits); + + // this changed to the second commit + // but the working tree isn't updated + // wtf? + + // This changes it to a detached commit + // But master still points to the original one + await git.checkout({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: secondCommit + }); + + console.log('FROM HEAD', await git.log({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + })); + + // the branch always points to the tip, and is considered canonical + // we only change the branch point when we are making new commits + // when making new commits, we want to change the branch pointer + + console.log('FROM MASTER', await git.log({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + ref: 'master', + })); + + // this changes it to ref: refs/heads/master + // it also does a checkout of the working directory + // if we want to checkout to the end, the `HEAD` points to `master` + // that's fine too, it just means head is now attached + await git.checkout({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + // ref: 'master' + ref: secondCommit + // if this is thirdCommit, it's not the same + // the branch pointer doesn't get updated + }); + + // interestingly enough + // moving to the third commit keeps the head there + // if the head is kept there, and we add a new commit here + // what happens? + + await fs.promises.writeFile( + path.join(vaultDataDir, 'file'), + 'v4' + ); + + await git.add({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + filepath: 'file' + }); + + // const currentCommit = await git.resolveRef({ + // fs, + // dir: vaultDataDir, + // gitdir: vaultGitDir, + // ref: 'HEAD' + // }); + + const fourthCommit = await git.commit({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + author: { + name: 'this is the author', + email: '', + }, + message: 'Fourth Commit', + ref: 'HEAD' + // ref: 'refs/heads/master', + // parent: [currentCommit] + }); + + await git.writeRef({ + fs, + dir: vaultDataDir, + gitdir: vaultGitDir, + // ref: 'HEAD', + ref: 'refs/heads/master', + value: fourthCommit, + // value: 'refs/heads/master', + // symbolic: true, + force: true + }); + + // if ref is HEAD, it moves the HEAD pointer on the commit + // if ref is master, it doesn't do anything... + // oh shit, refs/heads/master works, but not master + // the ref here has to be either HEAD or the full path + // like refs/heads/master + // if you don't pass anything, then it is assumed + + // undefined will update both HEAD and master + // refs/heads/master will update both HEAD and master + // HEAD will only update HEAD + + // ok so the issue is this + // if i am in detached head state + // by default NOTHING is updated, neither HEAD nor master + // if HEAD is passed in, then HEAD gets updated + // if refs/heads/master is passed in, then only the master branch is updated + // it makes sense that we would want to update both + // or at the very least update HEAD, then update the branch pointer in a different way + + + + + // console.log(fourthCommit); + + // console.log('FROM HEAD', await git.log({ + // fs, + // dir: vaultDataDir, + // gitdir: vaultGitDir, + // })); + + // console.log('FROM MASTER', await git.log({ + // fs, + // dir: vaultDataDir, + // gitdir: vaultGitDir, + // ref: 'master', + // })); + + // console.log('FROM FOURTH', await git.log({ + // fs, + // dir: vaultDataDir, + // gitdir: vaultGitDir, + // ref: fourthCommit, + // })); + + // await git.checkout({ + // fs, + // dir: vaultDataDir, + // gitdir: vaultGitDir, + // ref: fourthCommit + // }); + + + + // note the above is not transactional + // so we have to be aware of this and "clean" + // the state whenever we start using it + + +} + +main(); diff --git a/test-vaultinternal.ts b/test-vaultinternal.ts new file mode 100644 index 000000000..71cb73f3d --- /dev/null +++ b/test-vaultinternal.ts @@ -0,0 +1,34 @@ +import KeyManager from './src/keys/KeyManager'; +import VaultInternal from './src/vaults/VaultInternal'; +import * as vaultsUtils from './src/vaults/utils'; +import { EncryptedFS, utils as efsUtils } from 'encryptedfs'; + +async function main () { + const keyManager = await KeyManager.createKeyManager({ + keysPath: './tmp/keys', + password: 'abc123' + }); + + // this buffer needs to e + const [vaultKey] = await efsUtils.generateKeyFromPass('abc123', 'hello', 256); + + const efs = await EncryptedFS.createEncryptedFS({ + dbPath: './tmp/db', + dbKey: vaultKey + }); + + const vaultId = vaultsUtils.generateVaultId(); + const vault = await VaultInternal.createVaultInternal({ + vaultId, + keyManager, + efs + }); + + await vault.stop(); + + await efs.stop(); + await keyManager.stop(); + +} + +main(); diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index a6f8c46f6..82c01757c 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -51,10 +51,10 @@ describe(ACL.name, () => { }, }, }); - vaultId1 = vaultsUtils.makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); - vaultId2 = vaultsUtils.makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); - vaultId3 = vaultsUtils.makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); - vaultId4 = vaultsUtils.makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); + vaultId1 = vaultsUtils.generateVaultId(); + vaultId2 = vaultsUtils.generateVaultId(); + vaultId3 = vaultsUtils.generateVaultId(); + vaultId4 = vaultsUtils.generateVaultId(); }); afterEach(async () => { await db.stop(); @@ -144,7 +144,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }); // Gestalt2 @@ -153,7 +153,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault2xxxxxxxxxx: { clone: null }, + [vaultId2]: { clone: null }, }, }); // Check g1 perm @@ -215,7 +215,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }); await acl.unsetVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -227,7 +227,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, + [vaultId1]: {}, }, }); await acl.setVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -237,13 +237,13 @@ describe(ACL.name, () => { expect(vaultPerm[nodeIdG1First].vaults[vaultId1]).toHaveProperty('clone'); const vaultPerms = await acl.getVaultPerms(); expect(vaultPerms).toEqual({ - vault1xxxxxxxxxx: { + [vaultId1]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null, clone: null }, + [vaultId1]: { pull: null, clone: null }, }, }, }, @@ -257,7 +257,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }; await acl.setNodesPerm( @@ -288,7 +288,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, }, }); await acl.setVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -302,39 +302,39 @@ describe(ACL.name, () => { expect(vaultPerm1[nodeIdG1First].vaults[vaultId1]).toHaveProperty('pull'); const vaultPerms = await acl.getVaultPerms(); expect(vaultPerms).toMatchObject({ - vault1xxxxxxxxxx: { + [vaultId1]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, - vault2xxxxxxxxxx: { + [vaultId2]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, - vault3xxxxxxxxxx: { + [vaultId3]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, @@ -355,7 +355,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }; await acl.setNodesPerm( @@ -395,8 +395,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault2xxxxxxxxxx: { pull: null }, + [vaultId1]: { clone: null }, + [vaultId2]: { pull: null }, }, }, }); @@ -409,7 +409,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault2xxxxxxxxxx: { pull: null }, + [vaultId2]: { pull: null }, }, }, }); @@ -457,8 +457,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, [nodeIdG1Fourth]: { @@ -466,8 +466,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, [nodeIdG1Third]: { @@ -475,8 +475,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, }); @@ -486,8 +486,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault4xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, + [vaultId4]: { clone: null }, }, }, [nodeIdG2Second]: { @@ -495,8 +495,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault4xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, + [vaultId4]: { clone: null }, }, }, }); diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index eca793c49..1a76ebab9 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -1,5 +1,4 @@ import type { TLSConfig } from '@/network/types'; -import type { NodeIdEncoded, NodeInfo } from '@/nodes/types'; import type * as grpc from '@grpc/grpc-js'; import fs from 'fs'; import path from 'path'; @@ -19,11 +18,8 @@ import GRPCClientAgent from '@/agent/GRPCClientAgent'; import VaultManager from '@/vaults/VaultManager'; import NotificationsManager from '@/notifications/NotificationsManager'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; import * as agentErrors from '@/agent/errors'; import * as keysUtils from '@/keys/utils'; -import * as nodesUtils from '@/nodes/utils'; import * as testAgentUtils from './utils'; import * as testUtils from '../utils'; @@ -32,11 +28,6 @@ describe(GRPCClientAgent.name, () => { const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); - const node1: NodeInfo = { - id: 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug' as NodeIdEncoded, - chain: {}, - }; - const nodeId1 = nodesUtils.decodeNodeId(node1.id)!; let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; beforeAll(async () => { @@ -159,10 +150,11 @@ describe(GRPCClientAgent.name, () => { keyManager: keyManager, vaultsPath: vaultsPath, nodeConnectionManager: nodeConnectionManager, - vaultsKey: keyManager.vaultKey, + nodeManager: nodeManager, db: db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager: notificationsManager, fs: fs, logger: logger, }); @@ -174,6 +166,8 @@ describe(GRPCClientAgent.name, () => { sigchain, nodeGraph, notificationsManager, + acl, + gestaltGraph, }); client = await testAgentUtils.openTestAgentClient(port); }, global.defaultTimeout); @@ -208,46 +202,6 @@ describe(GRPCClientAgent.name, () => { const response = await client.echo(echoMessage); expect(response.getChallenge()).toBe('yes'); }); - test.skip('can check permissions', async () => { - // FIXME: permissions not implemented on vaults. - // const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - // await vaultManager.unsetVaultPermissions('12345' as NodeId, vault.vaultId); - const vaultPermMessage = new vaultsPB.NodePermission(); - vaultPermMessage.setNodeId(nodesUtils.encodeNodeId(nodeId1)); - // VaultPermMessage.setVaultId(vault.vaultId); - const response = await client.vaultsPermissionsCheck(vaultPermMessage); - expect(response.getPermission()).toBeFalsy(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = await client.vaultsPermissionsCheck(vaultPermMessage); - expect(response2.getPermission()).toBeTruthy(); - // Await vaultManager.deleteVault(vault.vaultId); - }); - test.skip('can scan vaults', async () => { - // FIXME, permissions not implemented on vaults - // const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - const nodeIdMessage = new nodesPB.Node(); - nodeIdMessage.setNodeId(nodesUtils.encodeNodeId(nodeId1)); - const response = client.vaultsScan(nodeIdMessage); - const data: string[] = []; - for await (const resp of response) { - const chunk = resp.getNameOrId(); - data.push(Buffer.from(chunk).toString()); - } - expect(data).toStrictEqual([]); - fail(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - // const response2 = client.vaultsScan(nodeIdMessage); - // Const data2: string[] = []; - // for await (const resp of response2) { - // Const chunk = resp.getNameOrId(); - // Data2.push(Buffer.from(chunk).toString()); - // } - // Expect(data2).toStrictEqual([`${vault.vaultName}\t${vault.vaultId}`]); - // await vaultManager.deleteVault(vault.vaultId); - }); test('Can connect over insecure connection.', async () => { const echoMessage = new utilsPB.EchoMessage(); echoMessage.setChallenge('yes'); diff --git a/tests/agent/utils.ts b/tests/agent/utils.ts index 6b91930dd..7dd702d91 100644 --- a/tests/agent/utils.ts +++ b/tests/agent/utils.ts @@ -6,6 +6,8 @@ import type { VaultManager } from '@/vaults'; import type { NodeGraph, NodeConnectionManager, NodeManager } from '@/nodes'; import type { Sigchain } from '@/sigchain'; import type { NotificationsManager } from '@/notifications'; +import type { ACL } from '@/acl'; +import type { GestaltGraph } from '@/gestalts'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import * as grpc from '@grpc/grpc-js'; import { promisify } from '@/utils'; @@ -24,6 +26,8 @@ async function openTestAgentServer({ nodeGraph, sigchain, notificationsManager, + acl, + gestaltGraph, }: { keyManager: KeyManager; vaultManager: VaultManager; @@ -32,15 +36,19 @@ async function openTestAgentServer({ nodeGraph: NodeGraph; sigchain: Sigchain; notificationsManager: NotificationsManager; + acl: ACL; + gestaltGraph: GestaltGraph; }) { const agentService: IAgentServiceServer = createAgentService({ keyManager, vaultManager, nodeManager, nodeGraph, - sigchain: sigchain, - notificationsManager: notificationsManager, - nodeConnectionManager: nodeConnectionManager, + sigchain, + notificationsManager, + nodeConnectionManager, + acl, + gestaltGraph, }); const server = new grpc.Server(); diff --git a/tests/bin/secrets/secrets.test.ts b/tests/bin/secrets/secrets.test.ts index c72aee00d..4efb4561c 100644 --- a/tests/bin/secrets/secrets.test.ts +++ b/tests/bin/secrets/secrets.test.ts @@ -56,7 +56,7 @@ describe('CLI secrets', () => { 'should create secrets', async () => { const vaultName = 'Vault1' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretPath = path.join(dataDir, 'secret'); await fs.promises.writeFile(secretPath, 'this is a secret'); @@ -72,11 +72,13 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('this is a secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('this is a secret'); + }); }, global.defaultTimeout * 2, ); @@ -84,28 +86,33 @@ describe('CLI secrets', () => { describe('commandDeleteSecret', () => { test('should delete secrets', async () => { const vaultName = 'Vault2' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); - - let list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + }); command = ['secrets', 'delete', '-np', dataDir, `${vaultName}:MySecret`]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([]); + }); }); }); describe('commandGetSecret', () => { test('should retrieve secrets', async () => { const vaultName = 'Vault3' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); command = ['secrets', 'get', '-np', dataDir, `${vaultName}:MySecret`]; @@ -116,11 +123,13 @@ describe('CLI secrets', () => { describe('commandListSecrets', () => { test('should list secrets', async () => { const vaultName = 'Vault4' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret1', 'this is the secret 1'); - await vaultOps.addSecret(vault, 'MySecret2', 'this is the secret 2'); - await vaultOps.addSecret(vault, 'MySecret3', 'this is the secret 3'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret1', 'this is the secret 1'); + await vaultOps.addSecret(vault, 'MySecret2', 'this is the secret 2'); + await vaultOps.addSecret(vault, 'MySecret3', 'this is the secret 3'); + }); command = ['secrets', 'list', '-np', dataDir, vaultName]; @@ -131,7 +140,7 @@ describe('CLI secrets', () => { describe('commandNewDir', () => { test('should make a directory', async () => { const vaultName = 'Vault5' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); command = [ 'secrets', @@ -145,25 +154,33 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - await vaultOps.addSecret(vault, 'dir1/MySecret1', 'this is the secret 1'); - await vaultOps.addSecret( - vault, - 'dir1/dir2/MySecret2', - 'this is the secret 2', - ); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret( + vault, + 'dir1/MySecret1', + 'this is the secret 1', + ); + await vaultOps.addSecret( + vault, + 'dir1/dir2/MySecret2', + 'this is the secret 2', + ); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual( - ['dir1/MySecret1', 'dir1/dir2/MySecret2'].sort(), - ); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual( + ['dir1/MySecret1', 'dir1/dir2/MySecret2'].sort(), + ); + }); }); }); describe('commandRenameSecret', () => { test('should rename secrets', async () => { const vaultName = 'Vault6' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); command = [ 'secrets', @@ -177,23 +194,26 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MyRenamedSecret']); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MyRenamedSecret']); + }); }); }); describe('commandUpdateSecret', () => { test('should update secrets', async () => { const vaultName = 'Vault7' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretPath = path.join(dataDir, 'secret'); await fs.promises.writeFile(secretPath, 'updated-content'); - await vaultOps.addSecret(vault, 'MySecret', 'original-content'); - - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('original-content'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'original-content'); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('original-content'); + }); command = [ 'secrets', @@ -207,17 +227,19 @@ describe('CLI secrets', () => { const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('updated-content'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('updated-content'); + }); }); }); describe('commandNewDirSecret', () => { test('should add a directory of secrets', async () => { const vaultName = 'Vault8' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretDir = path.join(dataDir, 'secrets'); await fs.promises.mkdir(secretDir); @@ -234,20 +256,43 @@ describe('CLI secrets', () => { 'this is the secret 3', ); - let list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([]); + }); command = ['secrets', 'dir', '-np', dataDir, secretDir, vaultName]; const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); - list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([ - 'secrets/secret-1', - 'secrets/secret-2', - 'secrets/secret-3', - ]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([ + 'secrets/secret-1', + 'secrets/secret-2', + 'secrets/secret-3', + ]); + }); + }); + }); + describe('commandStat', () => { + test('should retrieve secrets', async () => { + const vaultName = 'Vault3' as VaultName; + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); + + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); + + command = ['secrets', 'stat', '-np', dataDir, `${vaultName}:MySecret`]; + + const result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('nlink: 1'); + expect(result.stdout).toContain('blocks: 1'); + expect(result.stdout).toContain('blksize: 4096'); + expect(result.stdout).toContain('size: 18'); }); }); }); diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index db23e80fc..84772f2c3 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -1,17 +1,23 @@ -import type { NodeIdEncoded, NodeInfo } from '@/nodes/types'; -import type { Vault, VaultName } from '@/vaults/types'; +import type { NodeIdEncoded, NodeAddress, NodeInfo } from '@/nodes/types'; +import type { VaultId, VaultName } from '@/vaults/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import { makeVaultIdPretty } from '@/vaults/utils'; import { utils as nodesUtils } from '@/nodes'; -import * as keysUtils from '@/keys/utils'; +import { utils as vaultsUtils } from '@/vaults'; +import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import * as testBinUtils from '../utils'; +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + /** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. + * This test file has been optimised to use only one instance of PolykeyAgent where possible. * Setting up the PolykeyAgent has been done in a beforeAll block. * Keep this in mind when adding or editing tests. * Any side effects need to be undone when the test has completed. @@ -33,16 +39,12 @@ describe('CLI vaults', () => { let vaultNumber: number; let vaultName: VaultName; - // Constants const nodeId1Encoded = 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0' as NodeIdEncoded; - const nodeId1 = nodesUtils.decodeNodeId(nodeId1Encoded)!; const nodeId2Encoded = 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg' as NodeIdEncoded; - // Const nodeId2 = nodesUtils.decodeNodeId(nodeId2Encoded); const nodeId3Encoded = 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug' as NodeIdEncoded; - // Const nodeId3 = nodesUtils.decodeNodeId(nodeId3Encoded); const node1: NodeInfo = { id: nodeId1Encoded, @@ -63,16 +65,7 @@ describe('CLI vaults', () => { return `vault-${vaultNumber}` as VaultName; } - const mockedGenerateDeterministicKeyPair = jest.spyOn( - keysUtils, - 'generateDeterministicKeyPair', - ); - beforeAll(async () => { - mockedGenerateDeterministicKeyPair.mockImplementation((bits, _) => { - return keysUtils.generateKeyPair(bits); - }); - dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -203,271 +196,172 @@ describe('CLI vaults', () => { expect(namesList).not.toContain(vaultName); }); }); - describe.skip('commandVaultStats', () => { - test('should return the stats of a vault', async () => { - command = ['vaults', 'stat', '-np', dataDir, vaultName]; - await polykeyAgent.vaultManager.createVault(vaultName); - const id = polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); + test( + 'should clone and pull a vault', + async () => { + const dataDir2 = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: dataDir2, + logger: logger, + }); + const vaultId = await targetPolykeyAgent.vaultManager.createVault( + vaultName, + ); + await targetPolykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret 1', 'secret the first'); + }); + }, + ); + + await targetPolykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), + chain: {}, + }); + const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.nodeManager.setNode(targetNodeId, { + host: targetPolykeyAgent.revProxy.getIngressHost(), + port: targetPolykeyAgent.revProxy.getIngressPort(), + }); + await targetPolykeyAgent.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await polykeyAgent.acl.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + await targetPolykeyAgent.vaultManager.shareVault( + vaultId, + polykeyAgent.keyManager.getNodeId(), + ); - const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(0); - }); - }); - describe.skip('commandSetPermsVault', () => { - test('should share a vault', async () => { command = [ 'vaults', - 'share', + 'clone', '-np', dataDir, - vaultName, - nodesUtils.encodeNodeId(nodeId1), + vaultsUtils.encodeVaultId(vaultId), + targetNodeIdEncoded, ]; - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - const result = await testBinUtils.pkStdio([...command], {}, dataDir); + let result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - fail(); - // FIXME methods not implemented. - // const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // const sharedNodesString = JSON.stringify(sharedNodes); - // expect(sharedNodesString).toContain(node1.id); - // expect(sharedNodesString).not.toContain(node2.id); - }); - }); - describe.skip('commandUnsetPermsVault', () => { - test('should un-share a vault', async () => { + + const clonedVaultId = await polykeyAgent.vaultManager.getVaultId( + vaultName, + ); + + await polykeyAgent.vaultManager.withVaults( + [clonedVaultId!], + async (clonedVault) => { + const file = await clonedVault.readF(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the first'); + }, + ); + + await polykeyAgent.vaultManager.destroyVault(clonedVaultId!); command = [ 'vaults', - 'unshare', + 'clone', '-np', dataDir, vaultName, - nodesUtils.encodeNodeId(nodeId1), + nodesUtils.encodeNodeId(targetNodeId), ]; - // Creating vault. - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - - // Init sharing. - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, id!); - - const result = await testBinUtils.pkStdio([...command]); + result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - // Const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // expect(sharedNodes[node1.id]['pull']).toBeUndefined(); - // expect(sharedNodes[node2.id]['pull']).toBeNull(); - // expect(sharedNodes[node3.id]['pull']).toBeNull(); - }); - }); - describe.skip('commandVaultPermissions', () => { - test('should get permissions of a vault', async () => { - command = ['vaults', 'perms', '-np', dataDir, vaultName]; - - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, vault.vaultId); + const secondClonedVaultId = (await polykeyAgent.vaultManager.getVaultId( + vaultName, + ))!; + await polykeyAgent.vaultManager.withVaults( + [secondClonedVaultId!], + async (secondClonedVault) => { + const file = await secondClonedVault.readF(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the first'); + }, + ); - // await polykeyAgent.vaults.unsetVaultPermissions(node2.id, vault.vaultId); + await targetPolykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret 2', 'secret the second'); + }); + }, + ); - const result = await testBinUtils.pkStdio([...command]); + command = ['vaults', 'pull', '-np', dataDir, vaultName]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - }); - }); - describe.skip('commandPullVault', () => { - test( - 'should clone a vault', - async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, - }); - const vault = await targetPolykeyAgent.vaultManager.createVault( - vaultName, - ); - const id = await targetPolykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - await targetPolykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), - chain: {}, - }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); - - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeGraph.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( - targetNodeId, - async () => {}, - ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, - ); + await polykeyAgent.vaultManager.withVaults( + [secondClonedVaultId!], + async (secondClonedVault) => { + const file = await secondClonedVault.readF(async (efs) => { + return await efs.readFile('secret 2', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the second'); + }, + ); - command = [ - 'vaults', - 'clone', - '-np', - dataDir, - '-ni', - nodesUtils.encodeNodeId(targetNodeId), - '-vi', - makeVaultIdPretty(vault.vaultId), - ]; - - // Vault does not exist on the source PolykeyAgent so the pull command throws an error which - // caught, the error is checked and if it is ErrorVaultUndefined, then the Agent attempts a - // clone instead - const result = await testBinUtils.pkStdio([...command]); - expect(result.exitCode).toBe(0); - - // Const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // expect(JSON.stringify(list)).toContain(vaultName); - - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rm(dataDir2, { - force: true, - recursive: true, - }); - }, - global.defaultTimeout * 2, - ); - test( - 'should pull a vault', - async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, - }); - await targetPolykeyAgent.vaultManager.createVault(vaultName); + command = [ + 'vaults', + 'pull', + '-np', + dataDir, + '-pv', + 'InvalidName', + vaultsUtils.encodeVaultId(secondClonedVaultId), + targetNodeIdEncoded, + ]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(10); + expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); - const id = await targetPolykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); + command = [ + 'vaults', + 'pull', + '-np', + dataDir, + '-pv', + vaultName, + vaultsUtils.encodeVaultId(secondClonedVaultId), + 'InvalidNodeId', + ]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(1); + expect(result.stderr).toContain('ErrorInvalidId'); - await targetPolykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), - chain: {}, - }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); - - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeGraph.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( - targetNodeId, - async () => {}, - ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, - ); - // Await polykeyAgent.vaults.cloneVault(vault.vaultId, targetNodeId); - - // await vault.addSecret('MySecret', 'This secret will be pulled'); - - // const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // const filteredList = list.filter((value) => { - // return value.name === vaultName; - // }); - // expect(filteredList.length).toBe(1); - // const clonedVault = await polykeyAgent.vaults.getVault( - // filteredList[0].id, - // ); - // await expect(clonedVault.listSecrets()).resolves.toStrictEqual([]); - - command = [ - 'vaults', - 'pull', - '-np', - dataDir, - '-vn', - vaultName, - '-ni', - nodesUtils.encodeNodeId(targetNodeId), - ]; - - const result = await testBinUtils.pkStdio([...command]); - expect(result.exitCode).toBe(0); - - // Await expect(clonedVault.listSecrets()).resolves.toStrictEqual([ - // 'MySecret', - // ]); - // await expect(clonedVault.getSecret('MySecret')).resolves.toStrictEqual( - // 'This secret will be pulled', - // ); - - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rm(dataDir2, { recursive: true }); - }, - global.defaultTimeout * 2, - ); - }); - describe.skip('commandScanVault', () => { - test('should scan a node for vaults', async () => { + await targetPolykeyAgent.stop(); + await targetPolykeyAgent.destroy(); + await fs.promises.rm(dataDir2, { + force: true, + recursive: true, + }); + }, + global.defaultTimeout * 3, + ); + test( + 'share and unshare vaults', + async () => { const dataDir2 = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -476,6 +370,17 @@ describe('CLI vaults', () => { nodePath: dataDir2, logger: logger, }); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret 1', 'secret'); + }); + }); + + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetPolykeyAgent.keyManager.getNodeId()), + chain: {}, + }); const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); @@ -484,99 +389,139 @@ describe('CLI vaults', () => { host: targetHost, port: targetPort, }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( - targetNodeId, - async () => {}, + + await targetPolykeyAgent.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, + await targetPolykeyAgent.acl.setNodePerm( + polykeyAgent.keyManager.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, ); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault1` as VaultName, - ); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault2` as VaultName, + await expect(() => + targetPolykeyAgent.vaultManager.cloneVault( + polykeyAgent.keyManager.getNodeId(), + vaultId, + ), + ).rejects.toThrow(); + + command = [ + 'vaults', + 'share', + '-np', + dataDir, + vaultName, + nodesUtils.encodeNodeId(targetNodeId), + ]; + let result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); + + const clonedVaultId = await targetPolykeyAgent.vaultManager.cloneVault( + polykeyAgent.keyManager.getNodeId(), + vaultId, ); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault3` as VaultName, + await targetPolykeyAgent.vaultManager.withVaults( + [clonedVaultId], + async (clonedVault) => { + const file = await clonedVault.readF(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret'); + }, ); - const targetVaults = ( - await targetPolykeyAgent.vaultManager.listVaults() - ).keys(); - const namesList: string[] = []; - for await (const name of targetVaults) { - namesList.push(name); - } - expect(namesList.length).toBe(3); - command = [ 'vaults', - 'scan', + 'unshare', '-np', dataDir, - '-ni', + vaultsUtils.encodeVaultId(vaultId), nodesUtils.encodeNodeId(targetNodeId), ]; - const result = await testBinUtils.pkStdio([...command]); + + result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); + await expect(() => + targetPolykeyAgent.vaultManager.cloneVault( + polykeyAgent.keyManager.getNodeId(), + vaultId, + ), + ).rejects.toThrow(); + await targetPolykeyAgent.stop(); await targetPolykeyAgent.destroy(); - await fs.promises.rmdir(dataDir2, { recursive: true }); - }); - }); + await fs.promises.rm(dataDir2, { recursive: true }); + }, + global.defaultTimeout * 2, + ); describe('commandVaultVersion', () => { test('should switch the version of a vault', async () => { - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); const secret1 = { name: 'Secret-1', content: 'Secret-1-content' }; const secret2 = { name: 'Secret-1', content: 'Secret-2-content' }; - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - const ver1Oid = (await vault.log(1))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); + const ver1Oid = await polykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + const ver1Oid = (await vault.log(undefined, 1))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + return ver1Oid; + }, + ); const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const fileContents = await vault.access(async (efs) => { - return (await efs.readFile(secret1.name)).toString(); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const fileContents = await vault.readF(async (efs) => { + return (await efs.readFile(secret1.name)).toString(); + }); + expect(fileContents).toStrictEqual(secret1.content); }); - expect(fileContents).toStrictEqual(secret1.content); }); test('should switch the version of a vault to the latest version', async () => { - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); const secret1 = { name: 'Secret-1', content: 'Secret-1-content' }; const secret2 = { name: 'Secret-1', content: 'Secret-2-content' }; - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - const ver1Oid = (await vault.log(1))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); + const ver1Oid = await polykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + const ver1Oid = (await vault.log(undefined, 1))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + return ver1Oid; + }, + ); const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; @@ -605,7 +550,7 @@ describe('CLI vaults', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); - expect(result.stderr).toContain('ErrorVaultCommitUndefined'); + expect(result.stderr).toContain('ErrorVaultsWriteFUndefined'); }); test('should throw an error if the vault is not found', async () => { const command = [ @@ -619,59 +564,61 @@ describe('CLI vaults', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); - expect(result.stderr).toContain('ErrorVaultUndefined'); + expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); }); }); describe('commandVaultLog', () => { const secret1 = { name: 'secret1', content: 'Secret-1-content' }; const secret2 = { name: 'secret2', content: 'Secret-2-content' }; - let vault: Vault; - let commit1Oid: string; - let commit2Oid: string; - let commit3Oid: string; + let vaultId: VaultId; + let writeF1Oid: string; + let writeF2Oid: string; + let writeF3Oid: string; beforeEach(async () => { - vault = await polykeyAgent.vaultManager.createVault(vaultName); + vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - commit1Oid = (await vault.log(0))[0].oid; + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + writeF1Oid = (await vault.log(undefined, 0))[0].commitId; - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); - commit2Oid = (await vault.log(0))[0].oid; + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + writeF2Oid = (await vault.log(undefined, 0))[0].commitId; - await vault.commit(async (efs) => { - await efs.unlink(secret2.name); + await vault.writeF(async (efs) => { + await efs.unlink(secret2.name); + }); + writeF3Oid = (await vault.log(undefined, 0))[0].commitId; }); - commit3Oid = (await vault.log(0))[0].oid; }); afterEach(async () => { - await polykeyAgent.vaultManager.destroyVault(vault.vaultId); + await polykeyAgent.vaultManager.destroyVault(vaultId); }); - test('Should get all commits', async () => { + test('Should get all writeFs', async () => { const command = ['vaults', 'log', '-np', dataDir, vaultName]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).toContain(commit3Oid); + expect(result.stdout).toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).toContain(writeF3Oid); }); test('should get a part of the log', async () => { const command = ['vaults', 'log', '-np', dataDir, '-d', '2', vaultName]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).not.toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).toContain(commit3Oid); + expect(result.stdout).not.toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).toContain(writeF3Oid); }); - test('should get a specific commit', async () => { + test('should get a specific writeF', async () => { const command = [ 'vaults', 'log', @@ -681,15 +628,199 @@ describe('CLI vaults', () => { '1', vaultName, '-ci', - commit2Oid, + writeF2Oid, ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).not.toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).not.toContain(commit3Oid); + expect(result.stdout).not.toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).not.toContain(writeF3Oid); }); test.todo('test formatting of the output'); }); + describe('commandScanNode', () => { + test( + 'should return the vaults names and ids of the remote vault', + async () => { + let remoteOnline: PolykeyAgent | undefined; + try { + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password, + logger, + nodePath: path.join(dataDir, 'remoteOnline'), + }); + const remoteOnlineNodeId = remoteOnline.keyManager.getNodeId(); + const remoteOnlineNodeIdEncoded = + nodesUtils.encodeNodeId(remoteOnlineNodeId); + await polykeyAgent.nodeManager.setNode(remoteOnlineNodeId, { + host: remoteOnline.revProxy.getIngressHost(), + port: remoteOnline.revProxy.getIngressPort(), + } as NodeAddress); + + await remoteOnline.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), + chain: {}, + }); + + await remoteOnline.gestaltGraph.setGestaltActionByNode( + polykeyAgent.keyManager.getNodeId(), + 'scan', + ); + + const vault1Id = await remoteOnline.vaultManager.createVault( + 'Vault1' as VaultName, + ); + const vault2Id = await remoteOnline.vaultManager.createVault( + 'Vault2' as VaultName, + ); + const vault3Id = await remoteOnline.vaultManager.createVault( + 'Vault3' as VaultName, + ); + const commands = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result = await testBinUtils.pkStdio( + commands, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain( + `Vault1\t\t${vaultsUtils.encodeVaultId(vault1Id)}`, + ); + expect(result.stdout).toContain( + `Vault2\t\t${vaultsUtils.encodeVaultId(vault2Id)}`, + ); + expect(result.stdout).toContain( + `Vault3\t\t${vaultsUtils.encodeVaultId(vault3Id)}`, + ); + } finally { + await remoteOnline?.stop(); + await remoteOnline?.destroy(); + } + }, + global.defaultTimeout * 2, + ); + }); + describe('commandPermissions', () => { + test('Should return nodeIds and their permissions', async () => { + let remoteKeynode1: PolykeyAgent | undefined; + let remoteKeynode2: PolykeyAgent | undefined; + try { + // A ridiculous amount of setup. + const vaultId1 = await polykeyAgent.vaultManager.createVault( + 'vault1' as VaultName, + ); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + 'vault2' as VaultName, + ); + + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(dataDir, 'remoteKeynode1'), + }); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(dataDir, 'remoteKeynode2'), + }); + + const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); + const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId1), + chain: {}, + }); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId2), + chain: {}, + }); + await polykeyAgent.nodeManager.setNode(targetNodeId1, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), + }); + await polykeyAgent.nodeManager.setNode(targetNodeId2, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), + }); + + await remoteKeynode1.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await remoteKeynode2.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await remoteKeynode1.acl.setNodePerm( + polykeyAgent.keyManager.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, + ); + await remoteKeynode2.acl.setNodePerm( + polykeyAgent.keyManager.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, + ); + + await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId1); + await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId2); + await polykeyAgent.vaultManager.shareVault(vaultId2, targetNodeId1); + + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); + + // Now we call and test the command + const command1 = ['vaults', 'permissions', 'vault1', '-np', dataDir]; + const result1 = await testBinUtils.pkStdio( + command1, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result1.exitCode).toBe(0); + expect(result1.stdout).toContain(remoteKeynode1.keyManager.getNodeId()); + expect(result1.stdout).toContain(remoteKeynode2.keyManager.getNodeId()); + expect(result1.stdout).toContain('pull'); + expect(result1.stdout).toContain('clone'); + + // And the other vault + const command2 = ['vaults', 'permissions', 'vault2', '-np', dataDir]; + const result2 = await testBinUtils.pkStdio( + command2, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain(targetNodeId1); + expect(result2.stdout).not.toContain(targetNodeId2); + expect(result2.stdout).toContain('pull'); + expect(result2.stdout).toContain('clone'); + } finally { + await remoteKeynode1?.stop(); + await remoteKeynode1?.destroy(); + await remoteKeynode2?.stop(); + await remoteKeynode2?.destroy(); + } + }); + }); }); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 5b051f25f..727080a57 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -1,7 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { VaultManager } from '@/vaults'; -import type { Vault, VaultName } from '@/vaults/types'; +import type { VaultId, VaultName } from '@/vaults/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; +import type { Stat } from 'encryptedfs'; +import type * as permissionsPB from '@/proto/js/polykey/v1/permissions/permissions_pb'; import os from 'os'; import path from 'path'; import fs from 'fs'; @@ -16,7 +18,8 @@ import * as grpcUtils from '@/grpc/utils'; import * as vaultErrors from '@/vaults/errors'; import * as vaultsUtils from '@/vaults/utils'; import { vaultOps } from '@/vaults'; -import * as testUtils from './utils'; +import * as nodesUtils from '@/nodes/utils'; +import * as clientUtils from './utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -77,16 +80,16 @@ describe('Vaults client service', () => { vaultManager = pkAgent.vaultManager; - [server, port] = await testUtils.openTestClientServer({ + [server, port] = await clientUtils.openTestClientServer({ pkAgent, secure: false, }); - client = await testUtils.openSimpleClientClient(port); + client = await clientUtils.openSimpleClientClient(port); }, global.polykeyStartupTimeout); afterAll(async () => { - await testUtils.closeTestClientServer(server); - testUtils.closeSimpleClientClient(client); + await clientUtils.closeTestClientServer(server); + clientUtils.closeSimpleClientClient(client); await pkAgent.stop(); await pkAgent.destroy(); @@ -99,7 +102,7 @@ describe('Vaults client service', () => { }); beforeEach(async () => { const sessionToken = await pkAgent.sessionManager.createToken(); - callCredentials = testUtils.createCallCredentials(sessionToken); + callCredentials = clientUtils.createCallCredentials(sessionToken); }); afterEach(async () => { const aliveVaults = await vaultManager.listVaults(); @@ -135,9 +138,7 @@ describe('Vaults client service', () => { const vaultId = await createVault(vaultMessage, callCredentials); const vaultNames = await vaultManager.listVaults(); expect(vaultNames.get(vaultList[0])).toBeTruthy(); - expect(vaultNames.get(vaultList[0])).toStrictEqual( - vaultsUtils.makeVaultId(vaultId.getNameOrId()), - ); + expect(vaultNames.get(vaultList[0])).toStrictEqual(vaultId.getNameOrId()); }); test('should delete vaults', async () => { const deleteVault = grpcUtils.promisifyUnaryCall( @@ -162,21 +163,21 @@ describe('Vaults client service', () => { client, client.vaultsRename, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId1 = await vaultManager.createVault(vaultList[0]); const vaultRenameMessage = new vaultsPB.Rename(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); vaultRenameMessage.setVault(vaultMessage); vaultRenameMessage.setNewName(vaultList[1]); - const vaultId = await renameVault(vaultRenameMessage, callCredentials); - expect(vaultsUtils.makeVaultId(vaultId.getNameOrId())).toStrictEqual( - vault.vaultId, + const vaultId2 = await renameVault(vaultRenameMessage, callCredentials); + expect(vaultsUtils.decodeVaultId(vaultId2.getNameOrId())).toStrictEqual( + vaultId1, ); const renamedVaultId = await vaultManager.getVaultId(vaultList[1]); - expect(renamedVaultId).toEqual(vault.vaultId); + expect(renamedVaultId).toEqual(vaultId1); }); describe('Version', () => { const secretVer1 = { @@ -187,11 +188,11 @@ describe('Vaults client service', () => { name: secretList[0], content: 'Secret-1-content-ver2', }; - let vault: Vault; + let vaultId: VaultId; let vaultsVersion; beforeEach(async () => { - vault = await vaultManager.createVault(vaultList[0]); + vaultId = await vaultManager.createVault(vaultList[0]); vaultsVersion = grpcUtils.promisifyUnaryCall( client, client.vaultsVersion, @@ -200,13 +201,20 @@ describe('Vaults client service', () => { test('should switch a vault to a version', async () => { // Commit some history - await vault.commit(async (efs) => { - await efs.writeFile(secretVer1.name, secretVer1.content); - }); - const ver1Oid = (await vault.log())[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile(secretVer2.name, secretVer2.content); - }); + const ver1Oid = await vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretVer1.name, secretVer1.content); + }); + const ver1Oid = (await vault.log())[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile(secretVer2.name, secretVer2.content); + }); + return ver1Oid; + }, + ); + // Revert the version const vaultMessage = new vaultsPB.Vault(); vaultMessage.setNameOrId(vaultList[0]); @@ -221,22 +229,25 @@ describe('Vaults client service', () => { ); expect(version.getIsLatestVersion()).toBeFalsy(); // Read old history - await vault.access(async (efs) => { - expect( - (await efs.readFile(secretVer1.name)).toString(), - ).toStrictEqual(secretVer1.content); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect( + (await efs.readFile(secretVer1.name)).toString(), + ).toStrictEqual(secretVer1.content); + }); }); }); test('should fail to find a non existent version', async () => { // Revert the version const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); const vaultVersionMessage = new vaultsPB.Version(); vaultVersionMessage.setVault(vaultMessage); vaultVersionMessage.setVersionId('invalidOid'); const version = vaultsVersion(vaultVersionMessage, callCredentials); await expect(version).rejects.toThrow( - vaultErrors.ErrorVaultCommitUndefined, + vaultErrors.ErrorVaultReferenceMissing, ); }); }); @@ -244,7 +255,7 @@ describe('Vaults client service', () => { let vaultLog; const secret1 = { name: secretList[0], content: 'Secret-1-content' }; const secret2 = { name: secretList[1], content: 'Secret-2-content' }; - let vault: Vault; + let vaultId: VaultId; let commit1Oid: string; let commit2Oid: string; let commit3Oid: string; @@ -254,22 +265,24 @@ describe('Vaults client service', () => { client, client.vaultsLog, ); - vault = await vaultManager.createVault(vaultList[0]); - - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - commit1Oid = (await vault.log(0))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); + vaultId = await vaultManager.createVault(vaultList[0]); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + commit1Oid = (await vault.log(undefined, 0))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + commit2Oid = (await vault.log(undefined, 0))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.unlink(secret2.name); + }); + commit3Oid = (await vault.log(undefined, 0))[0].commitId; }); - commit2Oid = (await vault.log(0))[0].oid; - - await vault.commit(async (efs) => { - await efs.unlink(secret2.name); - }); - commit3Oid = (await vault.log(0))[0].oid; }); test('should get the full log', async () => { @@ -325,6 +338,109 @@ describe('Vaults client service', () => { expect(logMessages[0].getOid()).toEqual(commit2Oid); }); }); + test('should get vault permissions', async () => { + const vaultsPermissionsGet = + grpcUtils.promisifyReadableStreamCall( + client, + client.vaultsPermissionsGet, + ); + + let remoteKeynode1: PolykeyAgent | undefined; + let remoteKeynode2: PolykeyAgent | undefined; + try { + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(dataDir, 'remoteKeynode1'), + }); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(dataDir, 'remoteKeynode2'), + }); + + const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); + const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); + const pkAgentNodeId = pkAgent.keyManager.getNodeId(); + await pkAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId1), + chain: {}, + }); + await pkAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId2), + chain: {}, + }); + + await pkAgent.nodeManager.setNode(targetNodeId1, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), + }); + await pkAgent.nodeManager.setNode(targetNodeId2, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), + }); + + await remoteKeynode1.nodeManager.setNode( + pkAgent.keyManager.getNodeId(), + { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + }, + ); + await remoteKeynode2.nodeManager.setNode(targetNodeId2, { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + }); + await remoteKeynode1.acl.setNodePerm(pkAgentNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + await remoteKeynode2.acl.setNodePerm(pkAgentNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + const vaultId1 = await vaultManager.createVault(vaultList[0]); + const vaultId2 = await vaultManager.createVault(vaultList[1]); + + await vaultManager.shareVault(vaultId1, targetNodeId1); + await vaultManager.shareVault(vaultId1, targetNodeId2); + await vaultManager.shareVault(vaultId2, targetNodeId1); + + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); + + const permissionsStream = vaultsPermissionsGet( + vaultMessage, + callCredentials, + ); + const list: Record[] = []; + for await (const permission of permissionsStream) { + expect(permission.getActionsList()).toEqual(['pull', 'clone']); + list.push(permission.toObject()); + } + expect(list).toHaveLength(2); + + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId2)); + const permissionStream2 = vaultsPermissionsGet( + vaultMessage, + callCredentials, + ); + for await (const permission of permissionStream2) { + expect(permission.getActionsList()).toEqual(['pull', 'clone']); + const node = permission.getNode(); + const nodeId = node?.getNodeId(); + expect(nodeId).toEqual(targetNodeId1); + } + } finally { + await remoteKeynode1?.stop(); + await remoteKeynode2?.stop(); + } + }); }); describe('Secrets', () => { test('should make a directory in a vault', async () => { @@ -333,17 +449,19 @@ describe('Vaults client service', () => { client.vaultsSecretsMkdir, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const dirPath = 'dir/dir1/dir2'; const vaultMkdirMessage = new vaultsPB.Mkdir(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); vaultMkdirMessage.setVault(vaultMessage); vaultMkdirMessage.setDirName(dirPath); vaultMkdirMessage.setRecursive(true); await mkdirVault(vaultMkdirMessage, callCredentials); - await vault.access(async (efs) => { - expect(await efs.exists(dirPath)).toBeTruthy(); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect(await efs.exists(dirPath)).toBeTruthy(); + }); }); }); test('should list secrets in a vault', async () => { @@ -353,14 +471,17 @@ describe('Vaults client service', () => { client.vaultsSecretsList, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - for (const secretName of secretList) { - await efs.writeFile(secretName, secretName); - } + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + for (const secretName of secretList) { + await efs.writeFile(secretName, secretName); + } + }); }); + const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); const secretsStream = listSecretsVault(vaultMessage, callCredentials); const names: Array = []; for await (const secret of secretsStream) { @@ -375,22 +496,27 @@ describe('Vaults client service', () => { client.vaultsSecretsDelete, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - for (const secretName of secretList) { - await efs.writeFile(secretName, secretName); - } + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + for (const secretName of secretList) { + await efs.writeFile(secretName, secretName); + } + }); }); + const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); await deleteSecretVault(secretMessage, callCredentials); - const secrets = await vault.access(async (efs) => { - return await efs.readdir('.', { encoding: 'utf8' }); + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vault.readF(async (efs) => { + return await efs.readdir('.', { encoding: 'utf8' }); + }); + expect(secrets.sort()).toEqual(secretList.slice(1).sort()); }); - expect(secrets.sort()).toEqual(secretList.slice(1).sort()); }); test('should edit secrets in a vault', async () => { const editSecretVault = @@ -398,21 +524,26 @@ describe('Vaults client service', () => { client, client.vaultsSecretsEdit, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); secretMessage.setSecretContent(Buffer.from('content-change')); await editSecretVault(secretMessage, callCredentials); - await vault.access(async (efs) => { - expect((await efs.readFile(secretList[0])).toString()).toStrictEqual( - 'content-change', - ); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect((await efs.readFile(secretList[0])).toString()).toStrictEqual( + 'content-change', + ); + }); }); }); test('should get secrets in a vault', async () => { @@ -420,13 +551,15 @@ describe('Vaults client service', () => { client, client.vaultsSecretsGet, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); const secret = await getSecretVault(secretMessage, callCredentials); @@ -439,24 +572,29 @@ describe('Vaults client service', () => { client, client.vaultsSecretsRename, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretRenameMessage = new secretsPB.Rename(); const vaultMessage = new vaultsPB.Vault(); const secretMessage = new secretsPB.Secret(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setSecretName(secretList[0]); secretMessage.setVault(vaultMessage); secretRenameMessage.setNewName(secretList[1]); secretRenameMessage.setOldSecret(secretMessage); await renameSecretVault(secretRenameMessage, callCredentials); - const secrets = await vault.access(async (efs) => { - return await efs.readdir('.'); + + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(secrets.sort()).toEqual(secretList.splice(1, 1).sort()); }); - expect(secrets.sort()).toEqual(secretList.splice(1, 1).sort()); }); test('should add secrets in a vault', async () => { const newSecretVault = @@ -465,20 +603,23 @@ describe('Vaults client service', () => { client.vaultsSecretsNew, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); secretMessage.setSecretContent(Buffer.from(secretList[0])); await newSecretVault(secretMessage, callCredentials); - const secret = await vault.access(async (efs) => { - return await efs.readFile(secretList[0], { encoding: 'utf8' }); + await vaultManager.withVaults([vaultId], async (vault) => { + const secret = await vault.readF(async (efs) => { + return await efs.readFile(secretList[0], { encoding: 'utf8' }); + }); + expect(secret).toBe(secretList[0]); }); - expect(secret).toBe(secretList[0]); }); - test.only('should add a directory of secrets in a vault', async () => { + test('should add a directory of secrets in a vault', async () => { const newDirSecretVault = grpcUtils.promisifyUnaryCall( client, @@ -492,82 +633,42 @@ describe('Vaults client service', () => { // Write secret to file await fs.promises.writeFile(secretFile, secret); } - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const secretDirectoryMessage = new secretsPB.Directory(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretDirectoryMessage.setVault(vaultMessage); secretDirectoryMessage.setSecretDirectory(secretDir); await newDirSecretVault(secretDirectoryMessage, callCredentials); - const secrets = await vaultOps.listSecrets(vault); - expect(secrets.sort()).toEqual( - secretList.map((secret) => path.join('secretDir', secret)).sort(), + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vaultOps.listSecrets(vault); + expect(secrets.sort()).toEqual( + secretList.map((secret) => path.join('secretDir', secret)).sort(), + ); + }); + }); + test('should stat a file', async () => { + const getSecretStat = grpcUtils.promisifyUnaryCall( + client, + client.vaultsSecretsStat, ); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); + }); + const secretMessage = new secretsPB.Secret(); + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); + secretMessage.setVault(vaultMessage); + secretMessage.setSecretName(secretList[0]); + const result = await getSecretStat(secretMessage, callCredentials); + const stat: Stat = JSON.parse(result.getJson()); + expect(stat.size).toBe(7); + expect(stat.blksize).toBe(4096); + expect(stat.blocks).toBe(1); + expect(stat.nlink).toBe(1); }); - // TODO: Permissions not supported yet. - // test.skip('should add permissions to a vault', async () => { - // fail('Functionality not fully implemented'); - // const vaultName = 'vault1' as VaultName; - // const vaultsSetPerms = - // grpcUtils.promisifyUnaryCall( - // client, - // client.vaultsPermissionsSet, - // ); - - // // Creating a vault - // await vaultManager.createVault(vaultName); - - // // Creating a gestalts state - // await createGestaltState(); - - // const setVaultPermMessage = new vaultsPB.PermSet(); - // const nodeMessage = new nodesPB.Node(); - // const vaultMessage = new vaultsPB.Vault(); - // nodeMessage.setNodeId(node2.id); - // vaultMessage.setNameOrId(vaultName); - // setVaultPermMessage.setVault(vaultMessage); - // setVaultPermMessage.setNode(nodeMessage); - // await vaultsSetPerms(setVaultPermMessage, callCredentials); - - // // FIXME: this is not implemented yet. - // const result = 'Not implemented'; //Await vaultManager.getVaultPermissions(vaultId); - // const stringResult = JSON.stringify(result); - // expect(stringResult).toContain(node2.id); - // expect(stringResult).toContain('pull'); - // }); - // test.skip('should remove permissions to a vault', async () => { - // const vaultName = 'vault1' as VaultName; - // const vaultsUnsetPerms = - // grpcUtils.promisifyUnaryCall( - // client, - // client.vaultsPermissionsUnset, - // ); - - // // Creating a vault. - // const vault = await vaultManager.createVault(vaultName); - // const vaults = await vaultManager.listVaults(); - // const vaultId = vault.vaultId; - - // // Creating a gestalts state - // await createGestaltState(); - // fail('Functionality not fully implemented'); - // // FIXME: not implemented yet - // // await vaultManager.setVaultPermissions(node2.id, vaultId); - - // const unsetVaultPermMessage = new vaultsPB.PermUnset(); - // const nodeMessage = new nodesPB.Node(); - // const vaultMessage = new vaultsPB.Vault(); - // nodeMessage.setNodeId(node2.id); - // vaultMessage.setNameOrId(vaults[0].name); - // unsetVaultPermMessage.setVault(vaultMessage); - // unsetVaultPermMessage.setNode(nodeMessage); - // await vaultsUnsetPerms(unsetVaultPermMessage, callCredentials); - - // // FIXME: not implemented yet - // // const result = await vaultManager.getVaultPermissions(vaultId); - // // const stringResult = JSON.stringify(result); - // // expect(stringResult).toContain(node2.id); - // // expect(stringResult.includes('pull')).toBeFalsy(); - // }); }); }); diff --git a/tests/git/utils.test.ts b/tests/git/utils.test.ts index 357070cd6..3f4b6cf9b 100644 --- a/tests/git/utils.test.ts +++ b/tests/git/utils.test.ts @@ -69,7 +69,11 @@ describe('Git utils', () => { }); describe('list refs', () => { test('on master', async () => { - const refs = await gitUtils.listRefs(efs, '.git', 'refs/heads'); + const refs = await gitUtils.listRefs( + efs, + '.git', + path.join('refs', 'heads'), + ); expect(refs).toEqual(['master']); }); }); @@ -85,11 +89,10 @@ describe('Git utils', () => { expect(gitEncodedString.equals(Buffer.from('0004'))).toBe(true); }); test('an upload pack', async () => { - const uploadPackBuffers = (await gitUtils.uploadPack( - efs, - '.git', - true, - )) as Buffer[]; + const uploadPackBuffers = (await gitUtils.uploadPack({ + fs: efs, + advertiseRefs: true, + })) as Buffer[]; const uploadPack = Buffer.concat(uploadPackBuffers); expect(uploadPack.toString('utf8')).toBe( `007d${firstCommit.oid} HEAD\0side-band-64k symref=HEAD:refs/heads/master agent=git/isomorphic-git@1.8.1 @@ -100,20 +103,23 @@ describe('Git utils', () => { }); describe('resolve refs', () => { test('to a commit oid', async () => { - const ref = await gitUtils.resolve(efs, '.git', commits[0].oid); + const ref = await gitUtils.resolve({ + fs: efs, + ref: commits[0].oid, + }); expect(ref).toBe(firstCommit.oid); }); test('to HEAD', async () => { - const ref = await gitUtils.resolve(efs, '.git', 'HEAD'); + const ref = await gitUtils.resolve({ fs: efs, ref: 'HEAD' }); expect(ref).toBe(firstCommit.oid); }); test('to HEAD including depth', async () => { - const ref = await gitUtils.resolve(efs, '.git', 'HEAD', 2); + const ref = await gitUtils.resolve({ fs: efs, ref: 'HEAD', depth: 2 }); expect(ref).toBe('refs/heads/master'); }); test('to non-existant refs', async () => { await expect(() => - gitUtils.resolve(efs, '.git', 'this-is-not-a-ref'), + gitUtils.resolve({ fs: efs, ref: 'this-is-not-a-ref' }), ).rejects.toThrow(gitErrors.ErrorGitUndefinedRefs); }); }); @@ -122,6 +128,7 @@ describe('Git utils', () => { await expect(() => gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', }), @@ -130,6 +137,7 @@ describe('Git utils', () => { test('parsed', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, }); @@ -139,6 +147,7 @@ describe('Git utils', () => { test('content', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'content', @@ -163,6 +172,7 @@ describe('Git utils', () => { test('wrapped', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'wrapped', @@ -187,6 +197,7 @@ describe('Git utils', () => { test('deflated', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'deflated', @@ -209,6 +220,7 @@ describe('Git utils', () => { ); const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'deflated', diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 9d50ae94a..c5e06abad 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -253,8 +253,9 @@ describe(`${NodeConnection.name} test`, () => { serverVaultManager = await VaultManager.createVaultManager({ keyManager: serverKeyManager, vaultsPath: serverVaultsPath, - nodeConnectionManager: serverNodeConnectionManager, - vaultsKey: serverKeyManager.vaultKey, + nodeConnectionManager: dummyNodeConnectionManager, + nodeManager: serverNodeManager, + notificationsManager: serverNotificationsManager, db: serverDb, acl: serverACL, gestaltGraph: serverGestaltGraph, @@ -279,6 +280,8 @@ describe(`${NodeConnection.name} test`, () => { nodeGraph: serverNodeGraph, sigchain: serverSigchain, notificationsManager: serverNotificationsManager, + acl: serverACL, + gestaltGraph: serverGestaltGraph, }); agentServer = new GRPCServer({ logger: logger, diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index 11680548b..cbca0e52e 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -129,7 +129,6 @@ describe(`${NodeManager.name} test`, () => { recursive: true, }); }); - test( 'pings node', async () => { diff --git a/tests/notifications/utils.test.ts b/tests/notifications/utils.test.ts index a57e393bf..4f4d18b0b 100644 --- a/tests/notifications/utils.test.ts +++ b/tests/notifications/utils.test.ts @@ -14,9 +14,8 @@ import * as testUtils from '../utils'; describe('Notifications utils', () => { const nodeId = testUtils.generateRandomNodeId(); const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); - const vaultId = vaultsUtils - .makeVaultId(IdInternal.fromString('vaultIdxxxxxxxxx')) - .toString(); + const vaultId = vaultUtils.generateVaultId(); + const vaultIdEncoded = vaultUtils.encodeVaultId(vaultId); test('generates notification ids', async () => { const generator = notificationsUtils.createNotificationIdGenerator(); @@ -62,7 +61,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -143,7 +142,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -237,7 +236,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index defec75f8..cb721f348 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,606 +1,595 @@ -import type { Vault, VaultId, VaultKey } from '@/vaults/types'; +import type { VaultId } from '@/vaults/types'; +import type { Vault } from '@/vaults/Vault'; +import type { KeyManager } from '@/keys'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { EncryptedFS } from 'encryptedfs'; import { VaultInternal } from '@/vaults'; - -import { generateVaultId, generateVaultKey } from '@/vaults/utils'; +import { generateVaultId } from '@/vaults/utils'; import * as vaultsErrors from '@/vaults/errors'; import { sleep } from '@/utils'; -import { KeyManager, utils as keysUtils } from '@/keys'; -import * as testUtils from '../utils'; +import { utils as keysUtils } from '@/keys'; +import * as testsUtils from '../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); describe('VaultInternal', () => { let dataDir: string; let dbPath: string; let vault: VaultInternal; - let dbKey: VaultKey; + let dbKey: Buffer; let vaultId: VaultId; let efs: EncryptedFS; const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); - let keyManager: KeyManager; - let mockedGenerateKeyPair: jest.SpyInstance; - let mockedGenerateDeterministicKeyPair: jest.SpyInstance; - beforeEach(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); - mockedGenerateDeterministicKeyPair = jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); + const fakeKeyManager = { + getNodeId: () => { + return testsUtils.generateRandomNodeId(); + }, + } as KeyManager; + const secret1 = { name: 'secret-1', content: 'secret-content-1' }; + const secret2 = { name: 'secret-2', content: 'secret-content-2' }; + beforeAll(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - dbKey = await generateVaultKey(); + dbKey = await keysUtils.generateKey(); dbPath = path.join(dataDir, 'db'); await fs.promises.mkdir(dbPath); - vaultId = generateVaultId(); efs = await EncryptedFS.createEncryptedFS({ dbPath, dbKey, logger, }); await efs.start(); - const keysPath = path.join(dataDir, 'KEYS'); - keyManager = await KeyManager.createKeyManager({ - keysPath, - password: 'password', - logger: logger, - }); + }); + + beforeEach(async () => { + vaultId = generateVaultId(); vault = await VaultInternal.create({ vaultId, - keyManager, + keyManager: fakeKeyManager, efs, logger, fresh: true, }); }); - afterEach(async () => { - mockedGenerateKeyPair.mockRestore(); - mockedGenerateDeterministicKeyPair.mockRestore(); - await vault.destroy(); + afterAll(async () => { await efs.stop(); await efs.destroy(); - await keyManager.stop(); - await keyManager.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test('is type correct', async () => { - expect(vault).toBeInstanceOf(VaultInternal); - }); - - describe('version', () => { - test('can change to the current commit', async () => { - let commit = (await vault.log(1))[0]; - await vault.version(commit.oid); - const files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - await vault.commit(async (efs) => { - await efs.writeFile('test', 'testdata'); - }); - commit = (await vault.log(1))[0]; - await vault.version(commit.oid); - const file = await vault.access(async (efs) => { - return await efs.readFile('test', { encoding: 'utf8' }); - }); - expect(file).toBe('testdata'); - }); - test('can change commits and preserve the log with no intermediate vault mutation', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(initCommit); - const endCommit = (await vault.log(1))[0].oid; - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - await vault.version(endCommit); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - }); - test( - 'does not allow changing to an unrecognised commit', - async () => { - await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( - vaultsErrors.ErrorVaultCommitUndefined, - ); - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - const secondCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - const fourthCommit = (await vault.log(1))[0].oid; - await vault.version(secondCommit); - await vault.commit(async (efs) => { - const fd = await efs.open('test3', 'w'); - await efs.write(fd, 'testdata6', 3, 6); - await efs.close(fd); - }); - await vault.version(fourthCommit); - await vault.commit(async (efs) => { - await efs.writeFile('test4', 'testdata4'); - }); - }, - global.defaultTimeout * 2, - ); - test('can change to the HEAD commit', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(initCommit); - await vault.version('HEAD'); - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - await vault.version(initCommit); - await vault.version('last'); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - }); - test('adjusts HEAD after vault mutation, discarding forward and preserving backwards history', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - const secondCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(secondCommit); - await vault.commit(async (efs) => { - await efs.writeFile('test4', 'testdata4'); - }); - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test4']); - await vault.version(initCommit); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - }); - }); - test('VaultInternal readiness', async () => { await vault.destroy(); await expect(async () => { await vault.log(); }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); - await expect(async () => { - await vault.readWorkingDirectory(); - }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); + }); + test('is type correct', async () => { + expect(vault).toBeInstanceOf(VaultInternal); }); test('creating state on disk', async () => { expect(await fs.promises.readdir(dataDir)).toContain('db'); }); - test('Accessing a change', async () => { - await vault.commit(async (efs) => { + test('accessing a change', async () => { + await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { expect(await efs.readdir('.')).toContain('secret-1'); expect((await efs.readFile('secret-1')).toString()).toStrictEqual( 'secret-content', ); }); }); - test('Vault maintains data across VaultInternal instances', async () => { - await vault.commit(async (efs) => { + test('maintains data across VaultInternal instances', async () => { + await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); await vault.destroy(); vault = await VaultInternal.create({ vaultId, - keyManager, + keyManager: fakeKeyManager, efs, logger, fresh: false, }); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { expect((await efs.readFile('secret-1')).toString()).toStrictEqual( 'secret-content', ); }); }); - describe('Writing operations', () => { - const secret1 = { name: 'secret-1', content: 'secret-content-1' }; - const secret2 = { name: 'secret-2', content: 'secret-content-2' }; - test('Write operation allowed', async () => { - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - }); - test('Concurrent write operations prevented', async () => { - await Promise.all([ - vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content-1'); - }), - vault.commit(async (efs) => { - await efs.writeFile('secret-2', 'secret-content-2'); - }), - vault.commit(async (efs) => { - await efs.writeFile('secret-3', 'secret-content-3'); - }), - ]); - - await vault.access(async (efs) => { - const directory = await efs.readdir('.'); - expect(directory).toContain('secret-1'); - expect(directory).toContain('secret-2'); - expect(directory).toContain('secret-3'); - }); - const log = await vault.log(); - expect(log.length).toEqual(4); - }); - test('Write locks read', async () => { - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - - await Promise.all([ - vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); - }), - vault.access(async (efs) => { - expect((await efs.readFile('secret-1')).toString()).toEqual( - 'SUPER-DUPER-SECRET-CONTENT', - ); - }), - ]); - }); - test('Commit added if mutation in write', async () => { - const commit = (await vault.log())[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].message).toContain('secret-1'); - expect(log[0].oid).not.toStrictEqual(commit); - }); - test('No commit added if no mutation in write', async () => { - const commit = (await vault.log())[0].oid; - await vault.commit(async (_efs) => {}); - const log = await vault.log(); - expect(log).toHaveLength(1); - expect(log[0].message).not.toContain('secret-1'); - expect(log[0].oid).toStrictEqual(commit); - }); - test('Commit message contains all actions made in the commit', async () => { - // Adding - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); - }); - let log = await vault.log(); - expect(log[0].message).toContain(`${secret1.name} added`); - expect(log[0].message).toContain(`${secret2.name} added`); - // Checking contents - await vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - expect((await efs.readFile(secret2.name)).toString()).toEqual( - secret2.content, - ); - }); - - // Modifying - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, `${secret2.content} new content`); - }); - log = await vault.log(); - expect(log[0].message).toContain(`${secret2.name} modified`); - // Checking changes - await vault.access(async (efs) => { - expect((await efs.readFile(secret2.name)).toString()).toEqual( - `${secret2.content} new content`, + test('can change to the current commit', async () => { + let commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await vault.writeF(async (efs) => { + await efs.writeFile('test', 'testdata'); + }); + commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const file = await vault.readF(async (efs) => { + return await efs.readFile('test', { encoding: 'utf8' }); + }); + expect(file).toBe('testdata'); + }); + test('can change commits and preserve the log with no intermediate vault mutation', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + await vault.version(initCommit); + const endCommit = (await vault.log(undefined, 1))[0].commitId; + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await vault.version(endCommit); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + }); + test('does not allow changing to an unrecognised commit', async () => { + await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( + vaultsErrors.ErrorVaultReferenceMissing, + ); + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + const fourthCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + const fd = await efs.open('test3', 'w'); + await efs.write(fd, 'testdata6', 3, 6); + await efs.close(fd); + }); + await vault.version(fourthCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('test4', 'testdata4'); + }); + }); + test('can change to the HEAD commit', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + await vault.version(initCommit); + await vault.version('HEAD'); + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + await vault.version(initCommit); + await vault.version('last'); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + }); + test('adjusts HEAD after vault mutation, discarding forward and preserving backwards history', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('test4', 'testdata4'); + }); + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test4']); + await vault.version(initCommit); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + }); + test('write operation allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + }); + test('concurrent write operations prevented', async () => { + await Promise.all([ + vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content-1'); + }), + vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content-2'); + }), + vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret-content-3'); + }), + ]); + + await vault.readF(async (efs) => { + const directory = await efs.readdir('.'); + expect(directory).toContain('secret-1'); + expect(directory).toContain('secret-2'); + expect(directory).toContain('secret-3'); + }); + const log = await vault.log(); + expect(log.length).toEqual(4); + }); + test('write locks read', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + + await Promise.all([ + vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); + }), + vault.readF(async (efs) => { + expect((await efs.readFile('secret-1')).toString()).toEqual( + 'SUPER-DUPER-SECRET-CONTENT', ); - }); - - // Moving and removing - await vault.commit(async (efs) => { - await efs.rename(secret1.name, `${secret1.name}-new`); - await efs.unlink(secret2.name); - }); - // Checking changes. - await vault.access(async (efs) => { - expect(await efs.exists(secret1.name)).toBeFalsy(); - expect(await efs.exists(`${secret1.name}-new`)).toBeTruthy(); - expect(await efs.exists(secret2.name)).toBeFalsy(); - }); - - log = await vault.log(); - expect(log[0].message).toContain(`${secret1.name}-new added`); - expect(log[0].message).toContain(`${secret1.name} deleted`); - expect(log[0].message).toContain(`${secret2.name} deleted`); - }); - test('No mutation to vault when part of a commit operation fails', async () => { - // Failing commit operation - await expect(() => - vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.rename('notValid', 'randomName'); // Throws - }), - ).rejects.toThrow(); - - // Make sure secret1 wasn't written when the above commit failed. - await vault.access(async (efs) => { - expect(await efs.readdir('.')).not.toContain(secret1.name); - }); - - // No new commit. - expect(await vault.log()).toHaveLength(1); - - // Succeeding commit operation. - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); - - // Secret 1 shouldn't exist while secret2 exists. - await vault.access(async (efs) => { - const directory = await efs.readdir('.'); - expect(directory).not.toContain(secret1.name); // - expect(directory).toContain(secret2.name); - }); - - // Has a new commit. - expect(await vault.log()).toHaveLength(2); - }); - test('Locking occurs when making a commit.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; - }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); - }); - - // Now that we are holding the lock hostage, - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - // We want to check if any action resolves before the lock is released. - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - secondCommitResolved = true; - await sleep(2); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - // Commit order should be commit2 -> commit1 -> init - const log = await vault.log(); - expect(log[0].message).toContain(secret2.name); - expect(log[1].message).toContain(secret1.name); + }), + ]); + }); + test('commit added if mutation in write', async () => { + const commit = (await vault.log())[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); }); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].message).toContain('secret-1'); + expect(log[0].commitId).not.toStrictEqual(commit); + }); + test('no commit added if no mutation in write', async () => { + const commit = (await vault.log())[0].commitId; + await vault.writeF(async (_efs) => {}); + const log = await vault.log(); + expect(log).toHaveLength(1); + expect(log[0].message).not.toContain('secret-1'); + expect(log[0].commitId).toStrictEqual(commit); }); - describe('Reading operations', () => { - const secret1 = { name: 'secret-1', content: 'secret-content-1' }; - const secret2 = { name: 'secret-2', content: 'secret-content-2' }; + test('commit message contains all actions made in the commit', async () => { + // Adding + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + let log = await vault.log(); + expect(log[0].message).toContain(`${secret1.name} added`); + expect(log[0].message).toContain(`${secret2.name} added`); + // Checking contents + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + expect((await efs.readFile(secret2.name)).toString()).toEqual( + secret2.content, + ); + }); - beforeEach(async () => { - await vault.commit(async (efs) => { + // Modifying + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, `${secret2.content} new content`); + }); + log = await vault.log(); + expect(log[0].message).toContain(`${secret2.name} modified`); + // Checking changes + await vault.readF(async (efs) => { + expect((await efs.readFile(secret2.name)).toString()).toEqual( + `${secret2.content} new content`, + ); + }); + + // Moving and removing + await vault.writeF(async (efs) => { + await efs.rename(secret1.name, `${secret1.name}-new`); + await efs.unlink(secret2.name); + }); + // Checking changes. + await vault.readF(async (efs) => { + expect(await efs.exists(secret1.name)).toBeFalsy(); + expect(await efs.exists(`${secret1.name}-new`)).toBeTruthy(); + expect(await efs.exists(secret2.name)).toBeFalsy(); + }); + + log = await vault.log(); + expect(log[0].message).toContain(`${secret1.name}-new added`); + expect(log[0].message).toContain(`${secret1.name} deleted`); + expect(log[0].message).toContain(`${secret2.name} deleted`); + }); + test('no mutation to vault when part of a commit operation fails', async () => { + // Failing commit operation + await expect(() => + vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); - }); + await efs.rename('notValid', 'randomName'); // Throws + }), + ).rejects.toThrow(); + + // Make sure secret1 wasn't written when the above commit failed. + await vault.readF(async (efs) => { + expect(await efs.readdir('.')).not.toContain(secret1.name); }); - test('Read operation allowed', async () => { - await vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }); + + // No new commit. + expect(await vault.log()).toHaveLength(1); + + // Succeeding commit operation. + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + + // Secret 1 shouldn't exist while secret2 exists. + await vault.readF(async (efs) => { + const directory = await efs.readdir('.'); + expect(directory).not.toContain(secret1.name); // + expect(directory).toContain(secret2.name); + }); + + // Has a new commit. + expect(await vault.log()).toHaveLength(2); + }); + test('locking occurs when making a commit.', async () => { + // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. + + let resolveDelay; + const delayPromise = new Promise((resolve, _reject) => { + resolveDelay = resolve; + }); + let firstCommitResolved = false; + let firstCommitResolveTime; + + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); + + const commit1 = vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await delayPromise; // Hold the lock hostage. + firstCommitResolved = true; + firstCommitResolveTime = Date.now(); + }); + + // Now that we are holding the lock hostage, + // @ts-ignore + expect(vault.lock.isLocked()).toBeTruthy(); + // We want to check if any action resolves before the lock is released. + + let secondCommitResolved = false; + let secondCommitResolveTime; + const commit2 = vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + secondCommitResolved = true; + await sleep(2); + secondCommitResolveTime = Date.now(); + }); + + // Give plenty of time for a commit to resolve. + await sleep(200); + + // Now we want to check for the expected conditions. + // 1. Both commist have not completed. + // commit 1 is holding the lock. + expect(firstCommitResolved).toBeFalsy(); + expect(secondCommitResolved).toBeFalsy(); + + // 2. We release the hostage so both should resolve. + await sleep(200); + resolveDelay(); + await commit1; + await commit2; + expect(firstCommitResolved).toBeTruthy(); + expect(secondCommitResolved).toBeTruthy(); + expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); + + // Commit order should be commit2 -> commit1 -> init + const log = await vault.log(); + expect(log[0].message).toContain(secret2.name); + expect(log[1].message).toContain(secret1.name); + }); + test('read operation allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + }); + test('concurrent read operations allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + expect((await efs.readFile(secret2.name)).toString()).toEqual( + secret2.content, + ); + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); }); - test('Concurrent read operations allowed', async () => { - await vault.access(async (efs) => { + + await Promise.all([ + vault.readF(async (efs) => { expect((await efs.readFile(secret1.name)).toString()).toEqual( secret1.content, ); + }), + vault.readF(async (efs) => { expect((await efs.readFile(secret2.name)).toString()).toEqual( secret2.content, ); + }), + vault.readF(async (efs) => { expect((await efs.readFile(secret1.name)).toString()).toEqual( secret1.content, ); - }); - - await Promise.all([ - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret2.name)).toString()).toEqual( - secret2.content, - ); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - ]); - }); - test('Read locks write', async () => { - await Promise.all([ - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - vault.commit(async (efs) => { - await efs.writeFile(secret1.name, 'NEW-CONTENT'); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - 'NEW-CONTENT', - ); - }), - ]); - }); - test('No commit after read', async () => { - const commit = (await vault.log())[0].oid; - await vault.access(async (efs) => { + }), + ]); + }); + test('read locks write', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + await Promise.all([ + vault.readF(async (efs) => { expect((await efs.readFile(secret1.name)).toString()).toEqual( secret1.content, ); - }); - const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].oid).toStrictEqual(commit); - }); - test('Locking occurs when making an access.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; - }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.access(async (efs) => { - await efs.readFile(secret1.name); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); - }); - - // Now that we are holding the lock hostage, - // we want to check if any action resolves before the lock is released. - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.access(async (efs) => { - await efs.readFile(secret2.name); - secondCommitResolved = true; - await sleep(10); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); + }), + vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, 'NEW-CONTENT'); + }), + vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + 'NEW-CONTENT', + ); + }), + ]); + }); + test('no commit after read', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + const commit = (await vault.log())[0].commitId; + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('locking occurs when making an access.', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. + let resolveDelay; + const delayPromise = new Promise((resolve, _reject) => { + resolveDelay = resolve; + }); + let firstCommitResolved = false; + let firstCommitResolveTime; + + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); + + const commit1 = vault.readF(async (efs) => { + await efs.readFile(secret1.name); + await delayPromise; // Hold the lock hostage. + firstCommitResolved = true; + firstCommitResolveTime = Date.now(); }); + + // Now that we are holding the lock hostage, + // we want to check if any action resolves before the lock is released. + // @ts-ignore + expect(vault.lock.isLocked()).toBeTruthy(); + + let secondCommitResolved = false; + let secondCommitResolveTime; + const commit2 = vault.readF(async (efs) => { + await efs.readFile(secret2.name); + secondCommitResolved = true; + await sleep(10); + secondCommitResolveTime = Date.now(); + }); + + // Give plenty of time for a commit to resolve. + await sleep(200); + + // Now we want to check for the expected conditions. + // 1. Both commist have not completed. + // commit 1 is holding the lock. + expect(firstCommitResolved).toBeFalsy(); + expect(secondCommitResolved).toBeFalsy(); + + // 2. We release the hostage so both should resolve. + await sleep(200); + resolveDelay(); + await commit1; + await commit2; + expect(firstCommitResolved).toBeTruthy(); + expect(secondCommitResolved).toBeTruthy(); + expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); }); - test('Vault only exposes limited commands of VaultInternal', async () => { + test('only exposes limited commands of VaultInternal', async () => { // Converting a vault to the interface const vaultInterface = vault as Vault; // Using the avaliable functions. - await vaultInterface.commit(async (efs) => { + await vaultInterface.writeF(async (efs) => { await efs.writeFile('test', 'testContent'); }); - await vaultInterface.access(async (efs) => { + await vaultInterface.readF(async (efs) => { const content = (await efs.readFile('test')).toString(); expect(content).toStrictEqual('testContent'); }); - expect(vaultInterface.baseDir).toBeTruthy(); - expect(vaultInterface.gitDir).toBeTruthy(); + expect(vaultInterface.vaultDataDir).toBeTruthy(); + expect(vaultInterface.vaultGitDir).toBeTruthy(); expect(vaultInterface.vaultId).toBeTruthy(); - expect(vaultInterface.commit).toBeTruthy(); - expect(vaultInterface.access).toBeTruthy(); + expect(vaultInterface.writeF).toBeTruthy(); + expect(vaultInterface.writeG).toBeTruthy(); + expect(vaultInterface.readF).toBeTruthy(); + expect(vaultInterface.readG).toBeTruthy(); expect(vaultInterface.log).toBeTruthy(); expect(vaultInterface.version).toBeTruthy(); @@ -608,4 +597,26 @@ describe('VaultInternal', () => { const vaultNormal = vaultInterface as VaultInternal; expect(vaultNormal.destroy).toBeTruthy(); // This exists again. }); + test('cannot commit when the remote field is set', async () => { + await vault.destroy(); + vault = await VaultInternal.create({ + vaultId, + keyManager: fakeKeyManager, + efs, + logger, + remote: true, + fresh: true, + }); + const commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await expect( + vault.writeF(async (efs) => { + await efs.writeFile('test', 'testdata'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultImmutable); + }); }); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 8235ad70e..ca0b5f89f 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -1,294 +1,269 @@ -import type { NodeId, NodeAddress, NodeInfo } from '@/nodes/types'; -import type { Host, Port, TLSConfig } from '@/network/types'; -import type { VaultId, VaultKey, VaultName } from '@/vaults/types'; -import type { ChainData } from '@/sigchain/types'; -import type { IAgentServiceServer } from '@/proto/js/polykey/v1/agent_service_grpc_pb'; +import type { NodeId, NodeIdEncoded } from '@/nodes/types'; +import type { VaultId, VaultName } from '@/vaults/types'; +import type { GestaltGraph } from '@/gestalts'; +import type { ACL } from '@/acl'; +import type { NotificationsManager } from '@/notifications'; +import type { VaultInternal } from '@/vaults'; +import type { KeyManager } from '@/keys'; +import type { NodeConnectionManager, NodeManager } from '@/nodes'; +import type { NodeAddress } from '@/nodes/types'; +import fs from 'fs'; import os from 'os'; import path from 'path'; -import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { IdInternal, utils as idUtils } from '@matrixai/id'; import { DB } from '@matrixai/db'; -import { utils as idUtils } from '@matrixai/id'; - -import { KeyManager, utils as keysUtils } from '@/keys'; -import { NodeConnectionManager, NodeGraph, NodeManager } from '@/nodes'; -import { Sigchain } from '@/sigchain'; +import { utils as keysUtils } from '@/keys'; +import { PolykeyAgent } from '@'; import { VaultManager, vaultOps } from '@/vaults'; -import { ACL } from '@/acl'; -import { GestaltGraph } from '@/gestalts'; -import { ForwardProxy, ReverseProxy } from '@/network'; -import GRPCServer from '@/grpc/GRPCServer'; -import { AgentServiceService, createAgentService } from '@/agent'; -import { NotificationsManager } from '@/notifications'; - import { errors as vaultErrors } from '@/vaults'; -import { utils as vaultUtils } from '@/vaults'; import { utils as nodesUtils } from '@/nodes'; -import * as testUtils from '../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); describe('VaultManager', () => { - const password = 'password'; const logger = new Logger('VaultManager Test', LogLevel.WARN, [ new StreamHandler(), ]); - const nonExistantVaultId = idUtils.fromString('DoesNotExist') as VaultId; - let dataDir: string; - let vaultsPath: string; - let vaultsKey: VaultKey; - let keyManager: KeyManager; - let db: DB; - let acl: ACL; + const nonExistentVaultId = IdInternal.fromString('DoesNotExistxxxx'); + const password = 'password'; let gestaltGraph: GestaltGraph; - let nodeGraph: NodeGraph; - let nodeConnectionManager: NodeConnectionManager; - let nodeManager: NodeManager; let vaultManager: VaultManager; - let sigchain: Sigchain; + let keyManager: KeyManager; + let remoteVaultId: VaultId; - // FIXME, try not to do this, they can all have the localhost, - // but use the generated port when the server is started. - const sourceHost = '127.0.0.1' as Host; - const sourcePort = 11112 as Port; - const targetHost = '127.0.0.2' as Host; - const targetPort = 11113 as Port; - const altHost = '127.0.0.3' as Host; - const altPort = 11114 as Port; - const altHostIn = '127.0.0.4' as Host; - const altPortIn = 11115 as Port; + let localKeynodeId: NodeId; + let localKeynodeIdEncoded: NodeIdEncoded; + let remoteKeynode1Id: NodeId; + let remoteKeynode1IdEncoded: NodeIdEncoded; + let remoteKeynode2Id: NodeId; + let remoteKeynode2IdEncoded: NodeIdEncoded; - let fwdProxy: ForwardProxy; - let revProxy: ReverseProxy; - let altRevProxy: ReverseProxy; + const secretNames = ['Secret1', 'Secret2', 'Secret3', 'Secret4']; const vaultName = 'TestVault' as VaultName; const secondVaultName = 'SecondTestVault' as VaultName; const thirdVaultName = 'ThirdTestVault' as VaultName; - let mockedGenerateKeyPair: jest.SpyInstance; - let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + let localKeynode: PolykeyAgent; + let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; - beforeAll(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); - mockedGenerateDeterministicKeyPair = jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); + let allDataDir: string; - fwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, - }); - revProxy = new ReverseProxy({ - logger: logger, - }); - altRevProxy = new ReverseProxy({ - logger: logger, - }); - }); - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( + beforeAll(async () => { + // Creating agents. + allDataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - const keysPath = path.join(dataDir, 'keys'); - const dbPath = path.join(dataDir, 'db'); - vaultsPath = path.join(dataDir, 'vaults'); - vaultsKey = await vaultUtils.generateVaultKey(); - keyManager = await KeyManager.createKeyManager({ + localKeynode = await PolykeyAgent.createPolykeyAgent({ password, - keysPath: keysPath, - logger: logger, + logger: logger.getChild('Local Keynode'), + nodePath: path.join(allDataDir, 'localKeynode'), }); + gestaltGraph = localKeynode.gestaltGraph; + vaultManager = localKeynode.vaultManager; + keyManager = localKeynode.keyManager; + localKeynodeId = localKeynode.keyManager.getNodeId(); + localKeynodeIdEncoded = nodesUtils.encodeNodeId(localKeynodeId); - await fwdProxy.start({ - tlsConfig: { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }, - egressHost: sourceHost, - egressPort: sourcePort, + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(allDataDir, 'remoteKeynode1'), }); - - db = await DB.createDB({ - dbPath: dbPath, - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, + remoteKeynode1Id = remoteKeynode1.keyManager.getNodeId(); + remoteKeynode1IdEncoded = nodesUtils.encodeNodeId(remoteKeynode1Id); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(allDataDir, 'remoteKeynode2'), }); + remoteKeynode2Id = remoteKeynode2.keyManager.getNodeId(); + remoteKeynode2IdEncoded = nodesUtils.encodeNodeId(remoteKeynode2Id); - sigchain = await Sigchain.createSigchain({ - keyManager: keyManager, - db: db, - logger: logger, + // Adding details to each agent. + await localKeynode.nodeManager.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), }); - nodeGraph = await NodeGraph.createNodeGraph({ - db: db, - keyManager: keyManager, - logger: logger, + await localKeynode.nodeManager.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), }); - nodeConnectionManager = new NodeConnectionManager({ - keyManager, - nodeGraph, - fwdProxy: fwdProxy, - revProxy: revProxy, - logger: logger, + await remoteKeynode1.nodeManager.setNode(localKeynodeId, { + host: localKeynode.revProxy.getIngressHost(), + port: localKeynode.revProxy.getIngressPort(), }); - await nodeConnectionManager.start(); - nodeManager = new NodeManager({ - db: db, - sigchain: sigchain, - keyManager: keyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: nodeConnectionManager, - logger: logger, + await remoteKeynode1.nodeManager.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), }); - - acl = await ACL.createACL({ - db: db, - logger: logger, + await remoteKeynode2.nodeManager.setNode(localKeynodeId, { + host: localKeynode.revProxy.getIngressHost(), + port: localKeynode.revProxy.getIngressPort(), }); - - gestaltGraph = await GestaltGraph.createGestaltGraph({ - db: db, - acl: acl, - logger: logger, + await remoteKeynode2.nodeManager.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), }); - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger: logger, - fresh: true, + await gestaltGraph.setNode({ + id: remoteKeynode1IdEncoded, + chain: {}, + }); + await gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setNode({ + id: localKeynodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, + }); + await remoteKeynode2.gestaltGraph.setNode({ + id: localKeynodeIdEncoded, + chain: {}, }); + await remoteKeynode2.gestaltGraph.setNode({ + id: remoteKeynode1IdEncoded, + chain: {}, + }); + + remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); + await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); + await remoteKeynode1.vaultManager.shareVault( + remoteVaultId, + remoteKeynode2Id, + ); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + for (const secret of secretNames.slice(0, 2)) { + await vaultOps.addSecret(remoteVault, secret, 'success?'); + } + }, + ); }); + afterEach(async () => { - await vaultManager.stop(); - await gestaltGraph.stop(); - await acl.stop(); - await db.stop(); - await nodeConnectionManager.stop(); - await nodeGraph.stop(); - await keyManager.stop(); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); + for (const [, vaultId] of await vaultManager.listVaults()) { + await vaultManager.destroyVault(vaultId); + } + for (const [, vaultId] of await remoteKeynode2.vaultManager.listVaults()) { + await remoteKeynode2.vaultManager.destroyVault(vaultId); + } }); + afterAll(async () => { - mockedGenerateKeyPair.mockRestore(); - mockedGenerateDeterministicKeyPair.mockRestore(); - await fwdProxy.stop(); + await remoteKeynode2.stop(); + await remoteKeynode2.destroy(); + await remoteKeynode1.stop(); + await remoteKeynode1.destroy(); + await localKeynode.stop(); + await localKeynode.destroy(); + await fs.promises.rm(allDataDir, { + recursive: true, + force: true, + }); }); test('VaultManager readiness', async () => { - await expect(vaultManager.destroy()).rejects.toThrow( + const dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + const db = await DB.createDB({ + dbPath: path.join(dataDir, 'DB'), + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, + logger: logger.getChild(DB.name), + }); + const vaultManagerReadiness = await VaultManager.createVaultManager({ + vaultsPath: path.join(dataDir, 'VAULTS'), + keyManager: {} as KeyManager, + nodeManager: {} as NodeManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + + await expect(vaultManagerReadiness.destroy()).rejects.toThrow( vaultErrors.ErrorVaultManagerRunning, ); // Should be a noop - await vaultManager.start(); - await vaultManager.stop(); - await vaultManager.destroy(); - await expect(vaultManager.start()).rejects.toThrow( + await vaultManagerReadiness.start(); + await vaultManagerReadiness.stop(); + await vaultManagerReadiness.destroy(); + await expect(vaultManagerReadiness.start()).rejects.toThrow( vaultErrors.ErrorVaultManagerDestroyed, ); await expect(async () => { - await vaultManager.listVaults(); + await vaultManagerReadiness.listVaults(); }).rejects.toThrow(vaultErrors.ErrorVaultManagerNotRunning); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); }); test('is type correct', () => { expect(vaultManager).toBeInstanceOf(VaultManager); }); - test( - 'can create many vaults and open a vault', - async () => { - const vault = await vaultManager.createVault(vaultName); - const theVault = await vaultManager.openVault(vault.vaultId); - expect(vault).toBe(theVault); - await expect(() => - vaultManager.openVault(nonExistantVaultId), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - 'Vault11', - 'Vault12', - 'Vault13', - 'Vault14', - 'Vault15', - 'Vault16', - 'Vault17', - 'Vault18', - 'Vault19', - 'Vault20', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); - } - expect((await vaultManager.listVaults()).size).toEqual( - vaultNames.length + 1, - ); - }, - global.defaultTimeout * 2, - ); - test('can open the same vault twice and perform mutations', async () => { - const vault = await vaultManager.createVault(vaultName); - const vaultCopyOne = await vaultManager.openVault(vault.vaultId); - const vaultCopyTwo = await vaultManager.openVault(vault.vaultId); - expect(vaultCopyOne).toBe(vaultCopyTwo); - await vaultCopyOne.commit(async (efs) => { - await efs.writeFile('test', 'test'); - }); - const read = await vaultCopyTwo.access(async (efs) => { - return (await efs.readFile('test', { encoding: 'utf8' })) as string; - }); - expect(read).toBe('test'); + test('can create many vaults and open a vault', async () => { + const vaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault4', + 'Vault5', + 'Vault6', + 'Vault7', + 'Vault8', + 'Vault9', + 'Vault10', + 'Vault11', + 'Vault12', + 'Vault13', + 'Vault14', + 'Vault15', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + expect((await vaultManager.listVaults()).size).toEqual(vaultNames.length); }); test('can rename a vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await vaultManager.renameVault(vault.vaultId, secondVaultName as VaultName); - await expect(vaultManager.openVault(vault.vaultId)).resolves.toBe(vault); + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.renameVault(vaultId, secondVaultName); await expect(vaultManager.getVaultId(vaultName)).resolves.toBeUndefined(); await expect( vaultManager.getVaultId(secondVaultName), - ).resolves.toStrictEqual(vault.vaultId); + ).resolves.toStrictEqual(vaultId); await expect(() => - vaultManager.renameVault(nonExistantVaultId, 'DNE' as VaultName), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + vaultManager.renameVault(nonExistentVaultId, 'DNE' as VaultName), + ).rejects.toThrow(vaultErrors.ErrorVaultsVaultUndefined); }); test('can delete a vault', async () => { - const firstVault = await vaultManager.createVault(vaultName); - const secondVault = await vaultManager.createVault(secondVaultName); - await vaultManager.destroyVault(secondVault.vaultId); - await expect(vaultManager.openVault(firstVault.vaultId)).resolves.toBe( - firstVault, - ); - await expect(() => - vaultManager.openVault(secondVault.vaultId), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + const secondVaultId = await vaultManager.createVault(secondVaultName); + await vaultManager.destroyVault(secondVaultId); }); test('can list vaults', async () => { - const firstVault = await vaultManager.createVault(vaultName); - const secondVault = await vaultManager.createVault(secondVaultName); + const firstVaultId = await vaultManager.createVault(vaultName); + const secondVaultId = await vaultManager.createVault(secondVaultName); const vaultNames: Array = []; const vaultIds: Array = []; const vaultList = await vaultManager.listVaults(); @@ -298,843 +273,516 @@ describe('VaultManager', () => { }); expect(vaultNames.sort()).toEqual([vaultName, secondVaultName].sort()); expect(vaultIds.sort()).toEqual( - [firstVault.vaultId.toString(), secondVault.vaultId.toString()].sort(), + [firstVaultId.toString(), secondVaultId.toString()].sort(), ); }); - test( - 'able to read and load existing metadata', - async () => { - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); - } - const vaults = await vaultManager.listVaults(); - const vaultId = vaults.get('Vault1' as VaultName) as VaultId; - expect(vaultId).not.toBeUndefined(); - const vault = await vaultManager.openVault(vaultId); - expect(vault).toBeTruthy(); - await vaultManager.stop(); - await db.stop(); - await db.start(); - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - gestaltGraph, - acl, - db, - logger, - }); - const restartedVaultNames: Array = []; - const vaultList = await vaultManager.listVaults(); - vaultList.forEach((_, vaultName) => { - restartedVaultNames.push(vaultName); - }); - expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); - }, - global.defaultTimeout * 2, - ); - test.skip('cannot concurrently create the same vault', async () => { + test('able to read and load existing metadata', async () => { + const vaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault4', + 'Vault5', + 'Vault6', + 'Vault7', + 'Vault8', + 'Vault9', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + const vaults = await vaultManager.listVaults(); + const vaultId = vaults.get('Vault1' as VaultName) as VaultId; + expect(vaultId).not.toBeUndefined(); + await vaultManager.stop(); + await vaultManager.start(); + const restartedVaultNames: Array = []; + const vaultList = await vaultManager.listVaults(); + vaultList.forEach((_, vaultName) => { + restartedVaultNames.push(vaultName); + }); + expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); + }); + test.skip('cannot concurrently create vaults with the same name', async () => { const vaults = Promise.all([ vaultManager.createVault(vaultName), vaultManager.createVault(vaultName), ]); - await expect(() => vaults).rejects.toThrow(vaultErrors.ErrorVaultDefined); + await expect(() => vaults).rejects.toThrow( + vaultErrors.ErrorVaultsVaultDefined, + ); }); test('can concurrently rename the same vault', async () => { - const vault = await vaultManager.createVault(vaultName); + const vaultId = await vaultManager.createVault(vaultName); await Promise.all([ - vaultManager.renameVault(vault.vaultId, secondVaultName), - vaultManager.renameVault(vault.vaultId, thirdVaultName), + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.renameVault(vaultId, thirdVaultName), ]); - await expect(vaultManager.getVaultName(vault.vaultId)).resolves.toBe( - thirdVaultName, - ); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)).name; + expect(vaultNameTest).toBe(thirdVaultName); }); test('can concurrently open and rename the same vault', async () => { - const vault = await vaultManager.createVault(vaultName); + const vaultId = await vaultManager.createVault(vaultName); await Promise.all([ - vaultManager.renameVault(vault.vaultId, secondVaultName), - vaultManager.openVault(vault.vaultId), + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.withVaults([vaultId], async (vault) => vault.vaultId), ]); - await expect(vaultManager.getVaultName(vault.vaultId)).resolves.toBe( - secondVaultName, - ); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)).name; + expect(vaultNameTest).toBe(secondVaultName); }); test('can save the commit state of a vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await vault.commit(async (efs) => { - await efs.writeFile('test', 'test'); + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('test', 'test'); + }); }); - await vaultManager.closeVault(vault.vaultId); + await vaultManager.stop(); - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger, + await vaultManager.start(); + + const read = await vaultManager.withVaults( + [vaultId], + async (vaultLoaded) => { + return await vaultLoaded.readF(async (efs) => { + return await efs.readFile('test', { encoding: 'utf8' }); + }); + }, + ); + expect(read).toBe('test'); + }); + test('able to recover metadata after complex operations', async () => { + const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; + const alteredVaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault6', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + const v5 = await vaultManager.getVaultId('Vault5' as VaultName); + expect(v5).not.toBeUndefined(); + await vaultManager.destroyVault(v5!); + const v4 = await vaultManager.getVaultId('Vault4' as VaultName); + expect(v4).toBeTruthy(); + await vaultManager.renameVault(v4!, 'Vault10' as VaultName); + const v6 = await vaultManager.createVault('Vault6' as VaultName); + + await vaultManager.withVaults([v6], async (vault6) => { + await vault6.writeF(async (efs) => { + await efs.writeFile('reloaded', 'reload'); + }); + }); + + const vn: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vn.push(vaultName), + ); + expect(vn.sort()).toEqual(alteredVaultNames.sort()); + await vaultManager.stop(); + await vaultManager.start(); + await vaultManager.createVault('Vault7' as VaultName); + + const v10 = await vaultManager.getVaultId('Vault10' as VaultName); + expect(v10).not.toBeUndefined(); + alteredVaultNames.push('Vault7'); + expect((await vaultManager.listVaults()).size).toEqual( + alteredVaultNames.length, + ); + const vnAltered: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vnAltered.push(vaultName), + ); + expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); + const file = await vaultManager.withVaults([v6], async (reloadedVault) => { + return await reloadedVault.readF(async (efs) => { + return await efs.readFile('reloaded', { encoding: 'utf8' }); + }); }); - const vaultLoaded = await vaultManager.openVault(vault.vaultId); - const read = await vaultLoaded.access(async (efs) => { - return await efs.readFile('test', { encoding: 'utf8' }); + + expect(file).toBe('reload'); + }); + test('clone vaults from a remote keynode using a vault name', async () => { + await expect(() => + vaultManager.cloneVault( + remoteKeynode1.keyManager.getNodeId(), + 'not-existing' as VaultName, + ), + ).rejects.toThrow(vaultErrors.ErrorVaultsVaultUndefined); + await vaultManager.cloneVault( + remoteKeynode1.keyManager.getNodeId(), + vaultName, + ); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + const file = await vaultClone.readF(async (efs) => { + return await efs.readFile(secretNames[0], { encoding: 'utf8' }); + }); + const secretsList = (await vaultOps.listSecrets(vaultClone)).sort(); + return [file, secretsList]; + }, + ); + expect(file).toBe('success?'); + expect(secretsList).toStrictEqual(secretNames.slice(0, 2).sort()); + }, 100000); + test('clone and pull vaults using a vault id', async () => { + const vaultId = await vaultManager.cloneVault( + remoteKeynode1.keyManager.getNodeId(), + remoteVaultId, + ); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + const file = await vaultClone.readF(async (efs) => { + return await efs.readFile(secretNames[0], { encoding: 'utf8' }); + }); + expect(file).toBe('success?'); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.slice(0, 2).sort(), + ); + }); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + for (const secret of secretNames.slice(2)) { + await vaultOps.addSecret(remoteVault, secret, 'second success?'); + } + }, + ); + + await vaultManager.pullVault({ vaultId }); + + await vaultManager.withVaults([vaultId], async (vaultClone) => { + const file = await vaultClone.readF(async (efs) => { + return await efs.readFile(secretNames[2], { encoding: 'utf8' }); + }); + expect(file).toBe('second success?'); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.sort(), + ); + }); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + for (const secret of secretNames.slice(2)) { + await vaultOps.deleteSecret(remoteVault, secret); + } + }, + ); + }); + test('reject cloning and pulling when permissions are not set', async () => { + await remoteKeynode1.vaultManager.unshareVault( + remoteVaultId, + localKeynodeId, + ); + await expect(() => + vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), + ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); + expect((await vaultManager.listVaults()).size).toBe(0); + await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); + const clonedVaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { + const file = await clonedVault.readF(async (efs) => { + return await efs.readFile(secretNames[0], { encoding: 'utf8' }); + }); + expect(file).toBe('success?'); + }); + + await remoteKeynode1.vaultManager.unshareVault( + remoteVaultId, + localKeynodeId, + ); + await expect(() => + vaultManager.pullVault({ vaultId: clonedVaultId }), + ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); + + await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { + await expect(vaultOps.listSecrets(clonedVault)).resolves.toStrictEqual( + secretNames.slice(0, 2), + ); + }); + + await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); + }); + test('throw when trying to commit to a cloned vault', async () => { + const clonedVaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { + await expect( + vaultOps.renameSecret(clonedVault, secretNames[0], secretNames[2]), + ).rejects.toThrow(vaultErrors.ErrorVaultImmutable); }); - expect(read).toBe('test'); }); test( - 'able to recover metadata after complex operations', + 'clone and pull from other cloned vaults', async () => { - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - ]; - const alteredVaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault10', - 'ThirdImpact', - 'Cake', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); - } - const v10 = await vaultManager.getVaultId('Vault10' as VaultName); - expect(v10).not.toBeUndefined(); - await vaultManager.destroyVault(v10!); - const v5 = await vaultManager.getVaultId('Vault5' as VaultName); - expect(v5).not.toBeUndefined(); - await vaultManager.destroyVault(v5!); - const v9 = await vaultManager.getVaultId('Vault9' as VaultName); - expect(v9).toBeTruthy(); - await vaultManager.renameVault(v9!, 'Vault10' as VaultName); - await vaultManager.createVault('ThirdImpact' as VaultName); - await vaultManager.createVault('Cake' as VaultName); - const vn: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vn.push(vaultName), + const clonedVaultRemote2Id = await remoteKeynode2.vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, ); - expect(vn.sort()).toEqual(alteredVaultNames.sort()); - await vaultManager.stop(); - await db.stop(); - - await db.start(); - const vaultManagerReloaded = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger, + await localKeynode.acl.setNodePerm(remoteKeynode2Id, { + gestalt: { + notify: null, + }, + vaults: {}, }); - await vaultManagerReloaded.createVault('Pumpkin' as VaultName); - const v102 = await vaultManagerReloaded.getVaultId( - 'Vault10' as VaultName, + await remoteKeynode2.vaultManager.shareVault( + clonedVaultRemote2Id, + localKeynodeId, + ); + const notification = ( + await localKeynode.notificationsManager.readNotifications() + ).pop(); + expect(notification?.data['type']).toBe('VaultShare'); + expect(notification?.data['vaultId']).toBe( + idUtils.toString(clonedVaultRemote2Id), ); - expect(v102).not.toBeUndefined(); - alteredVaultNames.push('Pumpkin'); - expect((await vaultManagerReloaded.listVaults()).size).toEqual( - alteredVaultNames.length, + expect(notification?.data['vaultName']).toBe(vaultName); + expect(notification?.data['actions']['clone']).toBeNull(); + expect(notification?.data['actions']['pull']).toBeNull(); + await vaultManager.cloneVault(remoteKeynode2Id, clonedVaultRemote2Id); + const vaultIdClone = await vaultManager.getVaultId(vaultName); + expect(vaultIdClone).not.toBeUndefined(); + await vaultManager.withVaults([vaultIdClone!], async (vaultClone) => { + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.slice(0, 2).sort(), + ); + }); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + for (const secret of secretNames.slice(2)) { + await vaultOps.addSecret(remoteVault, secret, 'success?'); + } + }, ); - const vnAltered: Array = []; - (await vaultManagerReloaded.listVaults()).forEach((_, vaultName) => - vnAltered.push(vaultName), + + await vaultManager.pullVault({ + vaultId: vaultIdClone!, + pullNodeId: remoteKeynode1Id, + pullVaultNameOrId: remoteVaultId, + }); + await vaultManager.withVaults([vaultIdClone!], async (vaultClone) => { + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.sort(), + ); + }); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + for (const secret of secretNames.slice(2)) { + await vaultOps.deleteSecret(remoteVault, secret); + } + }, ); - expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); - await vaultManagerReloaded.stop(); }, global.defaultTimeout * 2, ); - // Test('able to update the default node repo to pull from', async () => { - // await vaultManager.start({}); - // const vault1 = await vaultManager.createVault('MyTestVault'); - // const vault2 = await vaultManager.createVault('MyOtherTestVault'); - // const noNode = await vaultManager.getDefaultNode(vault1.vaultId); - // expect(noNode).toBeUndefined(); - // await vaultManager.setDefaultNode(vault1.vaultId, 'abc' as NodeId); - // const node = await vaultManager.getDefaultNode(vault1.vaultId); - // const noNode2 = await vaultManager.getDefaultNode(vault2.vaultId); - // expect(node).toBe('abc'); - // expect(noNode2).toBeUndefined(); - // await vaultManager.stop(); - // }); - // test('checking gestalt permissions for vaults', async () => { - // const node1: NodeInfo = { - // id: '123' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node2: NodeInfo = { - // id: '345' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node3: NodeInfo = { - // id: '678' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node4: NodeInfo = { - // id: '890' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const id1: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'abc' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - // const id2: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'def' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - // - // await gestaltGraph.setNode(node1); - // await gestaltGraph.setNode(node2); - // await gestaltGraph.setNode(node3); - // await gestaltGraph.setNode(node4); - // await gestaltGraph.setIdentity(id1); - // await gestaltGraph.setIdentity(id2); - // await gestaltGraph.linkNodeAndNode(node1, node2); - // await gestaltGraph.linkNodeAndIdentity(node1, id1); - // await gestaltGraph.linkNodeAndIdentity(node4, id2); - // - // await vaultManager.start({}); - // const vault = await vaultManager.createVault('Test'); - // await vaultManager.setVaultPermissions('123' as NodeId, vault.vaultId); - // let record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeNull(); - // expect(record['345']['pull']).toBeNull(); - // expect(record['678']).toBeUndefined(); - // expect(record['890']).toBeUndefined(); - // - // await vaultManager.unsetVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeUndefined(); - // - // await gestaltGraph.unlinkNodeAndNode(node1.id, node2.id); - // await vaultManager.setVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeNull(); - // - // await vaultManager.stop(); - // }); - // /* TESTING TODO: - // * Changing the default node to pull from - // */ - describe('interacting with another node to', () => { - let targetDataDir: string, altDataDir: string; - let targetKeyManager: KeyManager, altKeyManager: KeyManager; - let targetFwdProxy: ForwardProxy; - let targetDb: DB, altDb: DB; - let targetACL: ACL, altACL: ACL; - let targetGestaltGraph: GestaltGraph, altGestaltGraph: GestaltGraph; - let targetNodeGraph: NodeGraph, altNodeGraph: NodeGraph; - let targetNodeConnectionManager: NodeConnectionManager, - altNodeConnectionManager: NodeConnectionManager; - let targetNodeManager: NodeManager, altNodeManager: NodeManager; - let targetVaultManager: VaultManager, altVaultManager: VaultManager; - let targetSigchain: Sigchain, altSigchain: Sigchain; - let targetNotificationsManager: NotificationsManager, - altNotificationsManager: NotificationsManager; - - let targetNodeId: NodeId, altNodeId: NodeId; - let revTLSConfig: TLSConfig, altRevTLSConfig: TLSConfig; + // Irrelevant for the moment as cloned vaults are immutable but will + // be useful in the future + test.skip('manage pulling from different remotes', async () => { + const clonedVaultRemote2Id = await remoteKeynode2.vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); - let targetAgentService: IAgentServiceServer, - altAgentService: IAgentServiceServer; - let targetAgentServer: GRPCServer, altAgentServer: GRPCServer; + await remoteKeynode2.vaultManager.shareVault( + clonedVaultRemote2Id, + localKeynodeId, + ); - let node: NodeInfo; + const vaultCloneId = await vaultManager.cloneVault( + remoteKeynode2Id, + clonedVaultRemote2Id, + ); - let altFwdProxy: ForwardProxy; + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await vaultOps.addSecret(remoteVault, secretNames[2], 'success?'); + }, + ); + await vaultManager.pullVault({ + vaultId: vaultCloneId, + pullNodeId: remoteKeynode1Id, + pullVaultNameOrId: vaultName, + }); - beforeAll(async () => { - altFwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, - }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.slice(0, 3).sort(), + ); }); - beforeEach(async () => { - targetDataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), + await remoteKeynode2.vaultManager.withVaults( + [clonedVaultRemote2Id], + async (clonedVaultRemote2) => { + await vaultOps.addSecret( + clonedVaultRemote2, + secretNames[3], + 'second success?', + ); + }, + ); + await vaultManager.pullVault({ vaultId: vaultCloneId }); + + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + secretNames.sort(), ); - targetKeyManager = await KeyManager.createKeyManager({ - password, - keysPath: path.join(targetDataDir, 'keys'), - fs: fs, - logger: logger, - }); - targetNodeId = targetKeyManager.getNodeId(); - revTLSConfig = { - keyPrivatePem: targetKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await targetKeyManager.getRootCertChainPem(), - }; - node = { - id: nodesUtils.encodeNodeId(keyManager.getNodeId()), - chain: { nodes: {}, identities: {} } as ChainData, - }; - targetFwdProxy = new ForwardProxy({ - authToken: '', - logger: logger, - }); - targetDb = await DB.createDB({ - dbPath: path.join(targetDataDir, 'db'), - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, - }); - targetSigchain = await Sigchain.createSigchain({ - keyManager: targetKeyManager, - db: targetDb, - logger: logger, - }); - targetNodeGraph = await NodeGraph.createNodeGraph({ - db: targetDb, - keyManager: targetKeyManager, - logger: logger, - }); - targetNodeConnectionManager = new NodeConnectionManager({ - keyManager: targetKeyManager, - nodeGraph: targetNodeGraph, - fwdProxy: targetFwdProxy, - revProxy: revProxy, - logger: logger, - }); - targetNodeManager = new NodeManager({ - db: targetDb, - sigchain: targetSigchain, - keyManager: targetKeyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: targetNodeConnectionManager, - logger: logger, - }); - targetACL = await ACL.createACL({ - db: targetDb, - logger: logger, - }); - targetNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: targetACL, - db: targetDb, - nodeConnectionManager: targetNodeConnectionManager, - nodeManager: targetNodeManager, - keyManager: targetKeyManager, - messageCap: 5, - logger: logger, - }); - targetGestaltGraph = await GestaltGraph.createGestaltGraph({ - db: targetDb, - acl: targetACL, - logger: logger, - }); - await targetGestaltGraph.setNode(node); - const targetVaultKey = await vaultUtils.generateVaultKey(); - targetVaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath: path.join(targetDataDir, 'vaults'), - vaultsKey: targetVaultKey, - nodeConnectionManager: targetNodeConnectionManager, - db: targetDb, - acl: targetACL, - gestaltGraph: targetGestaltGraph, - logger: logger, - fresh: true, + }); + }); + test('Do actions on a vault using `withVault`', async () => { + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vaults = [vault1, vault2]; + + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + expect(vault1.vaultId).toEqual(vaults[0]); + expect(vault2.vaultId).toEqual(vaults[1]); + await vault1.writeF(async (fs) => { + await fs.writeFile('test', 'test1'); }); - targetAgentService = createAgentService({ - keyManager: targetKeyManager, - vaultManager: targetVaultManager, - nodeManager: targetNodeManager, - nodeGraph: targetNodeGraph, - sigchain: targetSigchain, - notificationsManager: targetNotificationsManager, - nodeConnectionManager, + await vault2.writeF(async (fs) => { + await fs.writeFile('test', 'test2'); }); - targetAgentServer = new GRPCServer({ - logger: logger, + }); + + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + const a = await vault1.readF((fs) => { + return fs.readFile('test'); }); - await targetAgentServer.start({ - services: [[AgentServiceService, targetAgentService]], - host: targetHost, + const b = await vault2.readF((fs) => { + return fs.readFile('test'); }); - altDataDir = await fs.promises.mkdtemp( + expect(a.toString()).toEqual('test1'); + expect(b.toString()).toEqual('test2'); + }); + }); + // FIXME: remove? not relevant anymore? + test.skip('WorkingDirIndex is maintained across certain actions', async () => { + const vaultId = await vaultManager.createVault('testVault1' as VaultName); + const oid2 = await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (fs) => { + await fs.writeFile('test1', 'test1'); + }); + await vault.writeF(async (fs) => { + await fs.writeFile('test2', 'test2'); + }); + const oid2 = (await vault.log(undefined, 1)).pop()!.commitId; + await vault.writeF(async (fs) => { + await fs.writeFile('test3', 'test3'); + }); + await vault.version(oid2); + return oid2; + }); + await vaultManager.closeVault(vaultId); + await vaultManager.withVaults([vaultId], async (vault) => { + const vaultInternal = vault as VaultInternal; + const currentOid = ''; // FIXME: vaultInternal.getworkingDirIndex(); + await vault.readF(async (fs) => { + expect(await fs.readdir('.')).toEqual(['test1', 'test2']); + }); + expect(currentOid).toStrictEqual(oid2); + }); + }); + describe('Scanning nodes', () => { + let server: PolykeyAgent; + let serverNodeId: NodeId; + let serverNodeAddress: NodeAddress; + let allDataDir: string; + + beforeAll(async () => { + allDataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - altKeyManager = await KeyManager.createKeyManager({ + server = await PolykeyAgent.createPolykeyAgent({ password, - keysPath: path.join(altDataDir, 'keys'), - fs: fs, - logger: logger, - }); - altNodeId = altKeyManager.getNodeId(); - await targetGestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(altNodeId), - chain: {}, + logger, + nodePath: path.join(allDataDir, 'server'), }); - altRevTLSConfig = { - keyPrivatePem: altKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await altKeyManager.getRootCertChainPem(), + serverNodeId = server.keyManager.getNodeId(); + serverNodeAddress = { + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), }; - await altFwdProxy.start({ - tlsConfig: { - keyPrivatePem: altKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await altKeyManager.getRootCertChainPem(), - }, - egressHost: altHost, - egressPort: altPort, - }); - altDb = await DB.createDB({ - dbPath: path.join(altDataDir, 'db'), - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, - }); - altSigchain = await Sigchain.createSigchain({ - keyManager: altKeyManager, - db: altDb, - logger: logger, - }); - altNodeGraph = await NodeGraph.createNodeGraph({ - db: altDb, - keyManager: altKeyManager, - logger: logger, - }); - altNodeConnectionManager = new NodeConnectionManager({ - keyManager: altKeyManager, - nodeGraph: altNodeGraph, - fwdProxy: altFwdProxy, - revProxy: altRevProxy, - logger: logger, - }); - await altNodeConnectionManager.start(); - altNodeManager = new NodeManager({ - db: altDb, - sigchain: altSigchain, - keyManager: altKeyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: altNodeConnectionManager, - logger: logger, - }); - altACL = await ACL.createACL({ - db: altDb, - logger: logger, - }); - altNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: altACL, - db: altDb, - nodeConnectionManager: altNodeConnectionManager, - nodeManager: altNodeManager, - keyManager: altKeyManager, - messageCap: 5, - logger: logger, - }); - altGestaltGraph = await GestaltGraph.createGestaltGraph({ - db: altDb, - acl: altACL, - logger: logger, - }); - await altGestaltGraph.setNode(node); - const altVaultKey = await vaultUtils.generateVaultKey(); - altVaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath: path.join(altDataDir, 'vaults'), - vaultsKey: altVaultKey, - nodeConnectionManager: altNodeConnectionManager, - db: altDb, - acl: altACL, - gestaltGraph: altGestaltGraph, - logger: logger, - }); - altAgentService = createAgentService({ - keyManager: altKeyManager, - vaultManager: altVaultManager, - nodeManager: altNodeManager, - nodeGraph: altNodeGraph, - sigchain: altSigchain, - notificationsManager: altNotificationsManager, - nodeConnectionManager, - }); - altAgentServer = new GRPCServer({ - logger: logger, - }); - await altAgentServer.start({ - services: [[AgentServiceService, altAgentService]], - host: altHostIn, - }); + }, global.polykeyStartupTimeout * 2); - await revProxy.start({ - serverHost: targetHost, - serverPort: targetAgentServer.getPort(), - ingressHost: targetHost, - ingressPort: targetPort, - tlsConfig: revTLSConfig, + afterAll(async () => { + await server.stop(); + await server.destroy(); + await fs.promises.rm(allDataDir, { force: true, recursive: true }); + }); + test('scans the targets vaults', async () => { + await localKeynode.nodeGraph.setNode(serverNodeId, serverNodeAddress); + await server.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(keyManager.getNodeId()), + chain: {}, }); + await server.gestaltGraph.setGestaltActionByNode( + keyManager.getNodeId(), + 'scan', + ); - await altRevProxy.start({ - serverHost: altHostIn, - serverPort: altAgentServer.getPort(), - ingressHost: altHostIn, - ingressPort: altPortIn, - tlsConfig: altRevTLSConfig, - }); - }, global.polykeyStartupTimeout * 2); + const vaultName1 = 'vn1' as VaultName; + const vaultName2 = 'vn2' as VaultName; + const vaultName3 = 'vn3' as VaultName; + const v1Id = await server.vaultManager.createVault(vaultName1); + const v2Id = await server.vaultManager.createVault(vaultName2); + const v3Id = await server.vaultManager.createVault(vaultName3); - afterEach(async () => { - await revProxy.closeConnection(altHost, altPort); - await revProxy.closeConnection(sourceHost, sourcePort); - await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection( - fwdProxy.getEgressHost(), - fwdProxy.getEgressPort(), - ); - await altFwdProxy.closeConnection( - altFwdProxy.getEgressHost(), - altFwdProxy.getEgressPort(), + const vaultList: Array<[VaultName, VaultId]> = []; + + vaultList.push([vaultName1, v1Id]); + vaultList.push([vaultName2, v2Id]); + vaultList.push([vaultName3, v3Id]); + + const vaults = await vaultManager.scanNodeVaults(serverNodeId); + expect(vaults.sort()).toStrictEqual(vaultList.sort()); + + await server.gestaltGraph.unsetGestaltActionByNode( + keyManager.getNodeId(), + 'scan', ); - await revProxy.stop(); - await altRevProxy.stop(); - await targetAgentServer.stop(); - await targetVaultManager.stop(); - await targetGestaltGraph.stop(); - await targetNotificationsManager.stop(); - await targetACL.stop(); - await targetDb.stop(); - await targetNodeConnectionManager.stop(); - await targetNodeGraph.stop(); - await targetKeyManager.stop(); - await fs.promises.rm(targetDataDir, { - force: true, - recursive: true, - }); - await altAgentServer.stop(); - await altGestaltGraph.stop(); - await altVaultManager.stop(); - await altNotificationsManager.stop(); - await altACL.stop(); - await altDb.stop(); - await altNodeConnectionManager.stop(); - await altNodeGraph.stop(); - await altKeyManager.stop(); - await fs.promises.rm(altDataDir, { - force: true, - recursive: true, - }); }); + test('fails to scan the targets vaults without permission', async () => { + await localKeynode.nodeGraph.setNode(serverNodeId, serverNodeAddress); + await server.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(keyManager.getNodeId()), + chain: {}, + }); - afterAll(async () => { - await altFwdProxy.stop(); - }); + const vaultName1 = 'vn1' as VaultName; + const vaultName2 = 'vn2' as VaultName; + const vaultName3 = 'vn3' as VaultName; + const v1Id = await server.vaultManager.createVault(vaultName1); + const v2Id = await server.vaultManager.createVault(vaultName2); + const v3Id = await server.vaultManager.createVault(vaultName3); - test( - 'clone and pull vaults', - async () => { - // Await vaultManager.createVault(vaultName); - // await vaultManager.createVault('MyFirstVault copy'); - const vault = await targetVaultManager.createVault(vaultName); - // Await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - const names: string[] = []; - for (let i = 0; i < 1; i++) { - const name = 'secret ' + i.toString(); - names.push(name); - const content = 'Success?'; - await vaultOps.addSecret(vault, name, content); - } - await nodeManager.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - } as NodeAddress); - await nodeConnectionManager.withConnF(targetNodeId, async () => {}); - await revProxy.openConnection(sourceHost, sourcePort); - await vaultManager.cloneVault(targetNodeId, vault.vaultId); - const vaultId = await vaultManager.getVaultId(vaultName); - const vaultClone = await vaultManager.openVault(vaultId!); - let file = await vaultClone.access(async (efs) => { - return await efs.readFile('secret 0', { encoding: 'utf8' }); - }); - expect(file).toBe('Success?'); - // Expect(vaultsList[2].name).toStrictEqual('MyFirstVault copy copy'); - // await expect( - // vaultManager.getDefaultNode(vaultsList[2].id), - // ).resolves.toBe(targetNodeId); - // const clonedVault = await vaultManager.getVault(vaultsList[2].id); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'Success?', - // ); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - for (let i = 1; i < 2; i++) { - const name = 'secret ' + i.toString(); - names.push(name); - const content = 'Second Success?'; - await vaultOps.addSecret(vault, name, content); - } - await vaultManager.pullVault({ vaultId: vaultClone.vaultId }); - file = await vaultClone.access(async (efs) => { - return await efs.readFile('secret 1', { encoding: 'utf8' }); - }); - expect(file).toBe('Second Success?'); - // Expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - // expect(await clonedVault.getSecret('secret 19')).toStrictEqual( - // 'Second Success?', - // ); - }, - global.defaultTimeout * 2, - ); - // TODO: what is this? do we need it? - // Test( - // 'reject clone and pull ops when permissions are not set', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await vault.addSecret('MyFirstSecret', 'Success?'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await expect(() => - // vaultManager.cloneVault(vault.vaultId, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList).toStrictEqual([]); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // await targetVaultManager.unsetVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // vault.addSecret('MySecondSecret', 'SecondSuccess?'); - // await expect(() => - // vaultManager.pullVault(vaultList[0].id, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const list = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(list[0].id); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // ['MyFirstSecret'].sort(), - // ); - // await vaultManager.stop(); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'handle vault conflicts', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await vault.mkdir('dir', { recursive: true }); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(vaultList[0].id); - // await clonedVault.renameSecret('secret 9', 'secret 10'); - // await vault.renameSecret('secret 9', 'causing merge conflict'); - // await expect(() => - // vaultManager.pullVault(clonedVault.vaultId), - // ).rejects.toThrow(vaultErrors.ErrorVaultMergeConflict); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull from a default node', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // await altClonedVault.addSecret('secret 10', 'default pull?'); - // await vaultManager.pullVault(clonedVault.vaultId); - // expect(await clonedVault.getSecret('secret 10')).toStrictEqual( - // 'default pull?', - // ); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull within a system of 3 nodes', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // }, - // global.defaultTimeout * 2, - // ); + const vaultList: Array<[VaultName, VaultId]> = []; + + vaultList.push([vaultName1, v1Id]); + vaultList.push([vaultName2, v2Id]); + vaultList.push([vaultName3, v3Id]); + + await expect(() => + vaultManager.scanNodeVaults(serverNodeId), + ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); + }); }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index b07c08029..8c6907bf9 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -1,4 +1,6 @@ -import type { Vault, VaultId } from '@/vaults/types'; +import type { VaultId } from '@/vaults/types'; +import type { Vault } from '@/vaults/Vault'; +import type { KeyManager } from '@/keys'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -7,26 +9,23 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { utils as idUtils } from '@matrixai/id'; import * as errors from '@/vaults/errors'; import { VaultInternal, vaultOps } from '@/vaults'; -import { KeyManager } from '@/keys'; -import { generateVaultId } from '@/vaults/utils'; +import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; import * as testUtils from '../utils'; describe('VaultOps', () => { - const password = 'password'; const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); - // Const probeLogger = new Logger('vaultOpsProbe', LogLevel.INFO, [ - // new StreamHandler(), - // ]); let dataDir: string; - - let keyManager: KeyManager; let baseEfs: EncryptedFS; - let vaultId: VaultId; let vaultInternal: VaultInternal; let vault: Vault; + const dummyKeyManager = { + getNodeId: () => { + return testUtils.generateRandomNodeId(); + }, + } as KeyManager; let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; @@ -43,17 +42,10 @@ describe('VaultOps', () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - const keysPath = path.join(dataDir, 'keys'); - - keyManager = await KeyManager.createKeyManager({ - keysPath, - password, - logger, - }); - const dbPath = path.join(dataDir, 'db'); + const dbKey = await keysUtils.generateKey(); baseEfs = await EncryptedFS.createEncryptedFS({ - dbKey: keyManager.dbKey, + dbKey, dbPath, logger, }); @@ -65,8 +57,6 @@ describe('VaultOps', () => { mockedGenerateDeterministicKeyPair.mockRestore(); await baseEfs.stop(); await baseEfs.destroy(); - await keyManager.stop(); - await keyManager.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, @@ -74,12 +64,12 @@ describe('VaultOps', () => { }); beforeEach(async () => { - vaultId = generateVaultId(); + vaultId = vaultsUtils.generateVaultId(); await baseEfs.mkdir(path.join(idUtils.toString(vaultId), 'contents'), { recursive: true, }); vaultInternal = await VaultInternal.create({ - keyManager: keyManager, + keyManager: dummyKeyManager, vaultId, efs: baseEfs, logger: logger.getChild(VaultInternal.name), @@ -87,16 +77,10 @@ describe('VaultOps', () => { }); vault = vaultInternal as Vault; }); - afterEach(async () => { - await vaultInternal.destroy(); - }); test('adding a secret', async () => { - // Await vault.access(async efs => { - // console.log(await efs.readdir('.')); - // }) await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - const dir = await vault.access(async (efs) => { + const dir = await vault.readF(async (efs) => { return await efs.readdir('.'); }); expect(dir).toContain('secret-1'); @@ -107,7 +91,7 @@ describe('VaultOps', () => { expect(secret.toString()).toBe('secret-content'); await expect(() => vaultOps.getSecret(vault, 'doesnotexist'), - ).rejects.toThrow(errors.ErrorSecretUndefined); + ).rejects.toThrow(errors.ErrorSecretsSecretUndefined); }); test('able to make directories', async () => { await vaultOps.mkdir(vault, 'dir-1', { recursive: true }); @@ -120,7 +104,7 @@ describe('VaultOps', () => { path.join('dir-3', 'dir-4', 'secret-1'), 'secret-content', ); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { const dir = await efs.readdir('.'); expect(dir).toContain('dir-1'); expect(dir).toContain('dir-2'); @@ -143,9 +127,9 @@ describe('VaultOps', () => { (await vaultOps.getSecret(vault, name)).toString(), ).toStrictEqual(content); - await expect( - vault.access((efs) => efs.readdir('.')), - ).resolves.toContain(name); + await expect(vault.readF((efs) => efs.readdir('.'))).resolves.toContain( + name, + ); } }, global.defaultTimeout * 4, @@ -207,14 +191,14 @@ describe('VaultOps', () => { await expect(() => vaultOps.deleteSecret(vault, 'dir-1')).rejects.toThrow(); await vaultOps.deleteSecret(vault, path.join('dir-1', 'secret-2')); await vaultOps.deleteSecret(vault, 'dir-1'); - await expect( - vault.access((efs) => efs.readdir('.')), - ).resolves.not.toContain('secret-1'); + await expect(vault.readF((efs) => efs.readdir('.'))).resolves.not.toContain( + 'secret-1', + ); }); test('deleting a secret within a directory', async () => { await expect(() => vaultOps.mkdir(vault, path.join('dir-1', 'dir-2')), - ).rejects.toThrow(errors.ErrorRecursive); + ).rejects.toThrow(errors.ErrorVaultsRecursive); await vaultOps.mkdir(vault, path.join('dir-1', 'dir-2'), { recursive: true, }); @@ -223,16 +207,11 @@ describe('VaultOps', () => { path.join('dir-1', 'dir-2', 'secret-1'), 'secret-content', ); - await vaultOps.deleteSecret( - vault, - path.join('dir-1', 'dir-2'), - { - recursive: true, - }, - logger, - ); + await vaultOps.deleteSecret(vault, path.join('dir-1', 'dir-2'), { + recursive: true, + }); await expect( - vault.access((efs) => efs.readdir('dir-1')), + vault.readF((efs) => efs.readdir('dir-1')), ).resolves.not.toContain('dir-2'); }); test( @@ -247,7 +226,7 @@ describe('VaultOps', () => { ).toStrictEqual(content); await vaultOps.deleteSecret(vault, name, { recursive: true }); await expect( - vault.access((efs) => efs.readdir('.')), + vault.readF((efs) => efs.readdir('.')), ).resolves.not.toContain(name); } }, @@ -256,7 +235,7 @@ describe('VaultOps', () => { test('renaming a secret', async () => { await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); await vaultOps.renameSecret(vault, 'secret-1', 'secret-change'); - const dir = vault.access((efs) => efs.readdir('.')); + const dir = vault.readF((efs) => efs.readdir('.')); await expect(dir).resolves.not.toContain('secret-1'); await expect(dir).resolves.toContain('secret-change'); }); @@ -273,9 +252,9 @@ describe('VaultOps', () => { path.join(dirPath, 'secret-1'), path.join(dirPath, 'secret-change'), ); - await expect( - vault.access((efs) => efs.readdir(dirPath)), - ).resolves.toContain(`secret-change`); + await expect(vault.readF((efs) => efs.readdir(dirPath))).resolves.toContain( + `secret-change`, + ); }); test('listing secrets', async () => { await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); @@ -384,7 +363,7 @@ describe('VaultOps', () => { await vaultOps.addSecretDirectory(vault, secretDir, fs); await expect( - vault.access((efs) => efs.readdir(secretDirName)), + vault.readF((efs) => efs.readdir(secretDirName)), ).resolves.toContain('secret'); await fs.promises.rm(secretDir, { @@ -516,7 +495,7 @@ describe('VaultOps', () => { for (let j = 0; j < 8; j++) { await expect( - vault.access((efs) => efs.readdir(secretDirName)), + vault.readF((efs) => efs.readdir(secretDirName)), ).resolves.toContain('secret ' + j.toString()); } expect( diff --git a/tests/vaults/old/Vault.test.ts.old b/tests/vaults/old/Vault.test.ts.old deleted file mode 100644 index 96202daa5..000000000 --- a/tests/vaults/old/Vault.test.ts.old +++ /dev/null @@ -1,565 +0,0 @@ -import type { NodeId } from '@/nodes/types'; -import type { VaultId, VaultIdRaw, VaultKey, VaultName } from "@/vaults/types"; - -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import git from 'isomorphic-git'; -import Vault from '@/vaults/old/Vault'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; - -import { generateVaultId, generateVaultKey } from '@/vaults/utils'; -import { getRandomBytes } from '@/keys/utils'; -import { EncryptedFS } from 'encryptedfs'; -import * as errors from '@/vaults/errors'; -import * as utils from '@/utils'; - -describe.skip('Vault is', () => { - let dataDir: string; - let vault: Vault; - let key: VaultKey; - let vaultId: VaultId; - let efsDir: string; - const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); - const name = 'vault-1' as VaultName; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - key = await generateVaultKey(); - vaultId = generateVaultId(); - efsDir = path.join(dataDir, vaultId); - await fs.promises.mkdir(efsDir); - vault = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - }); - - afterEach(async () => { - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - - test('type correct', async () => { - expect(vault).toBeInstanceOf(Vault); - }); - test('creating the vault directory', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - }); - test('able to destroy an empty vault', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - await vault.stop(); - await expect(fs.promises.readdir(dataDir)).resolves.not.toContain(vaultId); - }); - test('adding a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding a secret and getting it', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - const secret = await vault.getSecret('secret-1'); - expect(secret).toBe('secret-content'); - await expect(() => vault.getSecret('doesnotexist')).rejects.toThrow( - errors.ErrorSecretUndefined, - ); - await vault.stop(); - }); - test('able to make directories', async () => { - await vault.start({ key }); - await vault.mkdir('dir-1', { recursive: true }); - await vault.mkdir('dir-2', { recursive: true }); - await vault.mkdir(path.join('dir-3', 'dir-4'), { recursive: true }); - await vault.addSecret( - path.join('dir-3', 'dir-4', 'secret-1'), - 'secret-content', - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-2.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-3.data')), - ).resolves.toContain('dir-4.data'); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-3.data', 'dir-4.data'), - ), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding and committing a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('updating secret content', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.updateSecret('secret-1', 'secret-content-change'); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual( - 'secret-content-change', - ); - await vault.stop(); - }); - test('updating secret content within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.updateSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content-change', - ); - await expect( - vault.getSecret(path.join('dir-1', 'dir-2', 'secret-1')), - ).resolves.toStrictEqual('secret-content-change'); - await vault.stop(); - }); - test('updating a secret 10 times', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - for (let i = 0; i < 10; i++) { - const content = 'secret-content'; - await vault.updateSecret('secret-1', content); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual(content); - } - await vault.stop(); - }); - test('deleting a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.mkdir('dir-1'); - await vault.deleteSecret('secret-1'); - await expect(() => vault.deleteSecret('dir-1')).rejects.toThrow( - errors.ErrorRecursive, - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await vault.stop(); - }); - test('deleting a secret within a directory', async () => { - await vault.start({ key }); - await expect(() => vault.mkdir(path.join('dir-1', 'dir-2'))).rejects.toThrow( - errors.ErrorRecursive, - ); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.deleteSecret(path.join('dir-1', 'dir-2'), { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-1.data')), - ).resolves.not.toContain('dir2-1.data'); - await vault.stop(); - }); - test('deleting a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await vault.deleteSecret(name, { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('renaming a vault', async () => { - await vault.start({ key }); - await vault.renameVault('vault-change' as VaultName); - expect(vault.vaultName).toEqual('vault-change'); - await vault.stop(); - }); - test('renaming a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.renameSecret('secret-1', 'secret-change'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-change.data'); - await vault.stop(); - }); - test('renaming a secret within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.renameSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - path.join('dir-1', 'dir-2', 'secret-change'), - ); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-1.data', 'dir-2.data'), - ), - ).resolves.toContain(`secret-change.data`); - await vault.stop(); - }); - test('listing secrets', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.addSecret('secret-2', 'secret-content'); - await vault.mkdir(path.join('dir1', 'dir2'), { recursive: true }); - await vault.addSecret( - path.join('dir1', 'dir2', 'secret-3'), - 'secret-content', - ); - expect((await vault.listSecrets()).sort()).toStrictEqual( - ['secret-1', 'secret-2', 'dir1/dir2/secret-3'].sort(), - ); - await vault.stop(); - }); - test('listing secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - } - await vault.start({ key }); - await vault.addSecretDirectory(secretDir); - expect(await vault.listSecrets()).toStrictEqual([ - path.join(secretDirName, `secret 0`), - path.join(secretDirName, `secret 1`), - path.join(secretDirName, `secret 2`), - path.join(secretDirName, `secret 3`), - path.join(secretDirName, `secret 4`), - path.join(secretDirName, `secret 5`), - path.join(secretDirName, `secret 6`), - path.join(secretDirName, `secret 7`), - path.join(secretDirName, `secret 8`), - path.join(secretDirName, `secret 9`), - ]); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hiddenSecret', '.hiddenDir/.hiddenInSecret'].sort(), - ); - await vault.stop(); - }); - test('updating and deleting hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - await vault.updateSecret('.hiddenSecret', 'change_contents'); - await vault.updateSecret('.hiddenDir/.hiddenInSecret', 'change_inside'); - await vault.renameSecret('.hiddenSecret', '.hidingSecret'); - await vault.renameSecret('.hiddenDir', '.hidingDir'); - let list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hidingSecret', '.hidingDir/.hiddenInSecret'].sort(), - ); - await expect(vault.getSecret('.hidingSecret')).resolves.toStrictEqual( - 'change_contents', - ); - await expect( - vault.getSecret('.hidingDir/.hiddenInSecret'), - ).resolves.toStrictEqual('change_inside'); - await vault.deleteSecret('.hidingSecret', { recursive: true }); - await vault.deleteSecret('.hidingDir', { recursive: true }); - list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual([].sort()); - await vault.stop(); - }); - test( - 'adding and committing a secret 100 times on efs', - async () => { - const efs = await EncryptedFS.createEncryptedFS({ - dbKey: await getRandomBytes(32), - dbPath: dataDir, - }); - const exists = utils.promisify(efs.exists).bind(efs); - const mkdir = utils.promisify(efs.mkdir).bind(efs); - const writeFile = utils.promisify(efs.writeFile).bind(efs); - const vaultId = vault.vaultId; - await mkdir(path.join(dataDir, vaultId), { - recursive: true, - }); - await git.init({ - fs: efs, - dir: path.join(dataDir, vaultId), - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: 'Initial Commit', - }); - await writeFile( - path.join(path.join(dataDir, vaultId), '.git', 'packed-refs'), - '# pack-refs with: peeled fully-peeled sorted', - ); - for (let i = 0; i < 100; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - const writePath = path.join(dataDir, vaultId, name); - await writeFile(writePath, content, {}); - await git.add({ - fs: efs, - dir: path.join(dataDir, vaultId), - filepath: name, - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: `Add secret: ${name}`, - }); - - await expect(exists(path.join(dataDir, vaultId, name))).resolves.toBe( - true, - ); - } - }, - global.defaultTimeout * 2, - ); - test('adding a directory of 1 secret', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - const name = 'secret'; - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, `${secretDirName}.data`)), - ).resolves.toContain('secret.data'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('getting the stats of a vault', async () => { - await vault.start({ key }); - const stats = await vault.stats(); - expect(stats).toBeInstanceOf(fs.Stats); - await vault.stop(); - }); - test('adding a directory with subdirectories and files', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('testing the errors handling of adding secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: true }); - await vault.addSecret( - path.join(secretDirName, 'secret1'), - 'blocking-secret', - ); - await vault.addSecretDirectory(secretDir); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.start({ key }); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding a directory of 100 secrets with some secrets already existing', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 50; i++) { - const name = 'secret ' + i.toString(); - const content = 'this is secret ' + i.toString(); - await fs.promises.writeFile( - path.join(secretDir, name), - Buffer.from(content), - ); - } - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: false }); - await vault.addSecret( - path.join(secretDirName, 'secret 8'), - 'secret-content', - ); - await vault.addSecret( - path.join(secretDirName, 'secret 9'), - 'secret-content', - ); - await vault.addSecretDirectory(secretDir); - - for (let j = 0; j < 8; j++) { - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, `${secretDirName}.data`), - ), - ).resolves.toContain('secret ' + j.toString() + '.data'); - } - await expect( - vault.getSecret(path.join(secretDirName, 'secret 8')), - ).resolves.toStrictEqual('this is secret 8'); - await expect( - vault.getSecret(path.join(secretDirName, 'secret 9')), - ).resolves.toStrictEqual('this is secret 9'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('able to persist data across multiple vault objects', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - const vault2 = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - await vault2.start({ key }); - const content = await vault2.getSecret('secret-1'); - expect(content).toBe('secret-content'); - await vault2.stop(); - }); - // Test('able to erase dirty commits on start up', async () => { - // await vault.start({ key }); - // await vault.addSecret('secret-1', 'secret-content'); - // await vault.mkdir('dir-1', { recursive: true }); - // await vault.addSecret('dir-1/secret-1', 'secret-content'); - // await vault.start({ key }); - // await fs.promises.writeFile(path.join(dataDir, `${vault.vaultId}:nodeID`), 'dirty-commit'); - // const vault2 = new Vault({ - // vaultId: vaultId, - // vaultName: name, - // baseDir: efsDir, - // fs: fs, - // logger: logger, - // }); - // await vault2.start({ key }); - // await vault2.stop(); - // }); -}); diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 6cb6d6280..d41cec6c8 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -5,9 +5,8 @@ import { EncryptedFS } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { IdRandom } from '@matrixai/id'; -import * as utils from '@/utils'; import * as vaultsUtils from '@/vaults/utils'; -import { isVaultId } from '@/vaults/utils'; +import * as keysUtils from '@/keys/utils'; describe('Vaults utils', () => { const logger = new Logger('Vaults utils tests', LogLevel.WARN, [ @@ -29,115 +28,57 @@ describe('Vaults utils', () => { }); test('VaultId type guard works', async () => { - // Const nodeId = makeNodeId('A'.repeat(44)); const vaultId = vaultsUtils.generateVaultId(); - expect(isVaultId(vaultId)).toBeTruthy(); + expect(vaultsUtils.decodeVaultId(vaultId)).toBeTruthy(); }); - // TODO: this may be fully removed later. check if splitting is needed for vaultIds - // test('vaultIds can be split', async () => { - // const nodeId = 'alkjsddfjknacqqquiry32741834id'; - // const id = vaultsUtils.generateVaultId(); - // expect(id).toContain(nodeId); - // const vaultId = vaultsUtils.splitVaultId(id); - // expect(vaultId).not.toContain(nodeId); - // }); - test.skip('EFS can be read recursively', async () => { - const key = await vaultsUtils.generateVaultKey(); + test('EFS can be read recursively', async () => { + const key = await keysUtils.generateKey(256); const efs = await EncryptedFS.createEncryptedFS({ dbKey: key, dbPath: dataDir, logger, }); - const mkdir = utils.promisify(efs.mkdir).bind(efs); - const writeFile = utils.promisify(efs.writeFile).bind(efs); - await mkdir('dir', { recursive: true }); - await mkdir('dir/dir2/dir3', { recursive: true }); - await writeFile('dir/file', 'content'); + await efs.promises.mkdir(path.join('dir', 'dir2', 'dir3'), { + recursive: true, + }); + const filePath1 = path.join('dir', 'file'); + await efs.promises.writeFile(filePath1, 'content'); let files: string[] = []; - for await (const file of vaultsUtils.readdirRecursivelyEFS( - efs, - '', - false, - )) { + for await (const file of vaultsUtils.readdirRecursively(efs, './')) { files.push(file); } - expect(files.sort()).toStrictEqual(['dir/file'].sort()); + expect(files).toStrictEqual([filePath1]); files = []; - for await (const file of vaultsUtils.readdirRecursivelyEFS(efs, '', true)) { + const filePath2 = path.join('dir', 'dir2', 'dir3', 'file'); + await efs.promises.writeFile(filePath2, 'content'); + for await (const file of vaultsUtils.readdirRecursively(efs)) { files.push(file); } - expect(files.sort()).toStrictEqual( - ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), - ); + expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); - // Test('a persisted EFS object can be read recursively', async () => { - // const key = await vaultsUtils.generateVaultKey(); - // const efs = new EncryptedFS(key, fs, dataDir); - // const mkdir = utils.promisify(efs.mkdir).bind(efs); - // const writeFile = utils.promisify(efs.writeFile).bind(efs); - // await mkdir('dir', { recursive: true }); - // await mkdir('dir/dir2/dir3', { recursive: true }); - // await writeFile('dir/file', 'content'); - // const efs2 = new EncryptedFS(key, fs, dataDir); - // let files: string[] = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // false, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual(['dir/file'].sort()); - // files = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // true, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual( - // ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), - // ); - // }); - test.skip('can search for a vault name', async () => { - // Const vaultList = ['a\tb', 'b\ta', '', 'c\tc', 'e\tf']; - fail(); - // FIXME secret methods not implemented. - // expect(vaultsUtils.searchVaultName(vaultList, 'b' as VaultId)).toEqual('a'); - // expect(vaultsUtils.searchVaultName(vaultList, 'a' as VaultId)).toEqual('b'); - // expect(vaultsUtils.searchVaultName(vaultList, 'c' as VaultId)).toEqual('c'); - // expect(vaultsUtils.searchVaultName(vaultList, 'f' as VaultId)).toEqual('e'); - // expect(() => - // vaultsUtils.searchVaultName(vaultList, 'd' as VaultId), - // ).toThrow(vaultsErrors.ErrorRemoteVaultUndefined); + test('fs can be read recursively', async () => { + await fs.promises.mkdir(path.join(dataDir, 'dir'), { recursive: true }); + await fs.promises.mkdir(path.join(dataDir, 'dir', 'dir2', 'dir3'), { + recursive: true, + }); + const filePath1 = path.join(dataDir, 'dir', 'file'); + await fs.promises.writeFile(filePath1, 'content'); + let files: string[] = []; + for await (const file of vaultsUtils.readdirRecursively(fs, dataDir)) { + files.push(file); + } + expect(files).toStrictEqual([filePath1]); + files = []; + const filePath2 = path.join(dataDir, 'dir', 'dir2', 'dir3', 'file'); + await fs.promises.writeFile(filePath2, 'content'); + for await (const file of vaultsUtils.readdirRecursively(fs, dataDir)) { + files.push(file); + } + expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); test('makeVaultId converts a buffer', async () => { const randomIdGen = new IdRandom(); Buffer.from(randomIdGen.get()); }); }); - -// Test('vaultIds are alphanumeric', async () => { -// const id1 = utils.generateVaultId('abc'); -// -// expect(isAlphaNumeric(id1)).toBe(true); -// }); -// -// function isAlphaNumeric(str) { -// let code, i, len; -// -// for (i = 0, len = str.length; i < len; i++) { -// code = str.charCodeAt(i); -// if ( -// !(code > 47 && code < 58) && // numeric (0-9) -// !(code > 64 && code < 91) && // upper alpha (A-Z) -// !(code > 96 && code < 123) -// ) { -// // lower alpha (a-z) -// return false; -// } -// } -// return true; -// } From 202c444b08850a1b3a3720be11c6f61d2f3f069b Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Wed, 23 Feb 2022 16:01:41 +1100 Subject: [PATCH 02/10] Fixes #342 - added `connectionInfoGetter` utility --- src/PolykeyAgent.ts | 1 + src/agent/errors.ts | 8 +- src/agent/index.ts | 2 + src/agent/service/echo.ts | 4 +- src/agent/service/index.ts | 5 + src/agent/types.ts | 8 ++ src/agent/utils.ts | 18 ++++ tests/agent/GRPCClientAgent.test.ts | 139 ++++++++++++++++++++++++++-- tests/agent/utils.ts | 16 +++- tests/nodes/NodeConnection.test.ts | 1 + 10 files changed, 188 insertions(+), 14 deletions(-) create mode 100644 src/agent/types.ts create mode 100644 src/agent/utils.ts diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index f3e4e347d..b22c2e795 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -564,6 +564,7 @@ class PolykeyAgent { notificationsManager: this.notificationsManager, acl: this.acl, gestaltGraph: this.gestaltGraph, + revProxy: this.revProxy, }); const clientService = createClientService({ pkAgent: this, diff --git a/src/agent/errors.ts b/src/agent/errors.ts index b92f3b306..e4db4293c 100644 --- a/src/agent/errors.ts +++ b/src/agent/errors.ts @@ -1,4 +1,4 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorAgent extends ErrorPolykey {} @@ -8,9 +8,15 @@ class ErrorAgentClientNotStarted extends ErrorAgent {} class ErrorAgentClientDestroyed extends ErrorAgent {} +class ErrorConnectionInfoMissing extends ErrorAgent { + description = 'Vault already exists'; + exitCode = sysexits.UNAVAILABLE; +} + export { ErrorAgent, ErrorAgentClientNotStarted, ErrorAgentRunning, ErrorAgentClientDestroyed, + ErrorConnectionInfoMissing, }; diff --git a/src/agent/index.ts b/src/agent/index.ts index f45d230fe..4e55eb824 100644 --- a/src/agent/index.ts +++ b/src/agent/index.ts @@ -1,3 +1,5 @@ export { default as createAgentService, AgentServiceService } from './service'; export { default as GRPCClientAgent } from './GRPCClientAgent'; export * as errors from './errors'; +export * as types from './types'; +export * as utils from './utils'; diff --git a/src/agent/service/echo.ts b/src/agent/service/echo.ts index 45b8f0279..b99923bbb 100644 --- a/src/agent/service/echo.ts +++ b/src/agent/service/echo.ts @@ -1,11 +1,13 @@ import type * as grpc from '@grpc/grpc-js'; +import type { ConnectionInfoGet } from 'agent/types'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function echo(_) { +function echo({ connectionInfoGet }: { connectionInfoGet: ConnectionInfoGet }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, ): Promise => { + connectionInfoGet(call); const response = new utilsPB.EchoMessage(); response.setChallenge(call.request.getChallenge()); callback(null, response); diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts index 75bb6ee58..d7427d601 100644 --- a/src/agent/service/index.ts +++ b/src/agent/service/index.ts @@ -10,6 +10,7 @@ import type { Sigchain } from '../../sigchain'; import type { ACL } from '../../acl'; import type { GestaltGraph } from '../../gestalts'; import type { IAgentServiceServer } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import type ReverseProxy from '../../network/ReverseProxy'; import echo from './echo'; import nodesChainDataGet from './nodesChainDataGet'; import nodesClaimsGet from './nodesClaimsGet'; @@ -21,6 +22,7 @@ import vaultsGitInfoGet from './vaultsGitInfoGet'; import vaultsGitPackGet from './vaultsGitPackGet'; import vaultsScan from './vaultsScan'; import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import * as agentUtils from '../utils'; function createService(container: { keyManager: KeyManager; @@ -32,9 +34,12 @@ function createService(container: { sigchain: Sigchain; acl: ACL; gestaltGraph: GestaltGraph; + revProxy: ReverseProxy; }): IAgentServiceServer { + const connectionInfoGet = agentUtils.connectionInfoGetter(container.revProxy); const container_ = { ...container, + connectionInfoGet: connectionInfoGet, }; const service: IAgentServiceServer = { echo: echo(container_), diff --git a/src/agent/types.ts b/src/agent/types.ts new file mode 100644 index 000000000..ced17bbf1 --- /dev/null +++ b/src/agent/types.ts @@ -0,0 +1,8 @@ +import type { ConnectionInfo } from 'network/types'; +import type { ServerSurfaceCall } from '@grpc/grpc-js/build/src/server-call'; + +type ConnectionInfoGet = ( + call: ServerSurfaceCall, +) => ConnectionInfo | undefined; + +export type { ConnectionInfoGet }; diff --git a/src/agent/utils.ts b/src/agent/utils.ts new file mode 100644 index 000000000..6d6b6fdd2 --- /dev/null +++ b/src/agent/utils.ts @@ -0,0 +1,18 @@ +import type { Host, Port } from 'network/types'; +import type ReverseProxy from 'network/ReverseProxy'; +import type { ConnectionInfoGet } from './types'; +import type { ServerSurfaceCall } from '@grpc/grpc-js/build/src/server-call'; + +function connectionInfoGetter(revProxy: ReverseProxy): ConnectionInfoGet { + return (call: ServerSurfaceCall) => { + let urlString = call.getPeer(); + if (!/^.*:\/\//.test(urlString)) urlString = 'pk://' + urlString; + const url = new URL(urlString); + return revProxy.getConnectionInfoByProxy( + url.hostname as Host, + parseInt(url.port) as Port, + ); + }; +} + +export { connectionInfoGetter }; diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 1a76ebab9..e1b4d06d9 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -1,5 +1,6 @@ -import type { TLSConfig } from '@/network/types'; +import type { Host, Port, TLSConfig } from '@/network/types'; import type * as grpc from '@grpc/grpc-js'; +import type { NodeId } from '@/nodes/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -21,26 +22,20 @@ import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as agentErrors from '@/agent/errors'; import * as keysUtils from '@/keys/utils'; import * as testAgentUtils from './utils'; -import * as testUtils from '../utils'; describe(GRPCClientAgent.name, () => { + const host = '127.0.0.1' as Host; const password = 'password'; const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); - let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; beforeAll(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); mockedGenerateDeterministicKeyPair = jest .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); + .mockImplementation((bits, _) => keysUtils.generateKeyPair(bits)); }); afterAll(async () => { - mockedGenerateKeyPair.mockRestore(); mockedGenerateDeterministicKeyPair.mockRestore(); }); let client: GRPCClientAgent; @@ -85,6 +80,8 @@ describe(GRPCClientAgent.name, () => { }); await fwdProxy.start({ tlsConfig, + egressHost: host, + proxyHost: host, }); revProxy = new ReverseProxy({ logger: logger, @@ -168,12 +165,20 @@ describe(GRPCClientAgent.name, () => { notificationsManager, acl, gestaltGraph, + revProxy, + }); + await revProxy.start({ + ingressHost: host, + serverHost: host, + serverPort: port as Port, + tlsConfig: tlsConfig, }); client = await testAgentUtils.openTestAgentClient(port); }, global.defaultTimeout); afterEach(async () => { await testAgentUtils.closeTestAgentClient(client); await testAgentUtils.closeTestAgentServer(server); + await revProxy.stop(); await vaultManager.stop(); await notificationsManager.stop(); await sigchain.stop(); @@ -210,4 +215,120 @@ describe(GRPCClientAgent.name, () => { expect(response.getChallenge()).toBe('yes'); expect(client.secured).toBeFalsy(); }); + describe('With connection through proxies', () => { + const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ + new StreamHandler(), + ]); + const localHost = '127.0.0.1' as Host; + + let clientWithProxies1: GRPCClientAgent; + let clientFwdProxy1: ForwardProxy; + let clientKeyManager1: KeyManager; + let nodeId1: NodeId; + + let clientWithProxies2: GRPCClientAgent; + let clientFwdProxy2: ForwardProxy; + let clientKeyManager2: KeyManager; + let nodeId2: NodeId; + + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + // Setting up clients + clientFwdProxy1 = new ForwardProxy({ + authToken: 'auth', + logger, + }); + clientKeyManager1 = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'clientKeys1'), + password: 'password', + logger, + }); + nodeId1 = clientKeyManager1.getNodeId(); + await clientFwdProxy1.start({ + tlsConfig: { + keyPrivatePem: clientKeyManager1.getRootKeyPairPem().privateKey, + certChainPem: await clientKeyManager1.getRootCertChainPem(), + }, + egressHost: localHost, + proxyHost: localHost, + }); + clientWithProxies1 = await GRPCClientAgent.createGRPCClientAgent({ + host: localHost, + nodeId: keyManager.getNodeId(), + port: revProxy.getIngressPort(), + proxyConfig: { + host: clientFwdProxy1.getProxyHost(), + port: clientFwdProxy1.getProxyPort(), + authToken: clientFwdProxy1.authToken, + }, + timeout: 5000, + logger, + }); + + clientFwdProxy2 = new ForwardProxy({ + authToken: 'auth', + logger, + }); + clientKeyManager2 = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'clientKeys2'), + password: 'password', + logger, + }); + nodeId2 = clientKeyManager2.getNodeId(); + await clientFwdProxy2.start({ + tlsConfig: { + keyPrivatePem: clientKeyManager2.getRootKeyPairPem().privateKey, + certChainPem: await clientKeyManager2.getRootCertChainPem(), + }, + egressHost: localHost, + proxyHost: localHost, + }); + clientWithProxies2 = await GRPCClientAgent.createGRPCClientAgent({ + host: '127.0.0.1' as Host, + logger, + nodeId: keyManager.getNodeId(), + port: revProxy.getIngressPort(), + proxyConfig: { + host: clientFwdProxy2.getProxyHost(), + port: clientFwdProxy2.getProxyPort(), + authToken: clientFwdProxy2.authToken, + }, + timeout: 5000, + }); + }, 26000); + afterEach(async () => { + await testAgentUtils.closeTestAgentClient(clientWithProxies1); + await clientFwdProxy1.stop(); + await clientKeyManager1.stop(); + await testAgentUtils.closeTestAgentClient(clientWithProxies2); + await clientFwdProxy2.stop(); + await clientKeyManager2.stop(); + }, 25000); + test('connectionInfoGetter returns correct information for each connection', async () => { + // We can't directly spy on the connectionInfoGetter result + // but we can check that it called `getConnectionInfoByProxy` properly + const getConnectionInfoByProxySpy = jest.spyOn( + ReverseProxy.prototype, + 'getConnectionInfoByProxy', + ); + await clientWithProxies1.echo(new utilsPB.EchoMessage()); + await clientWithProxies2.echo(new utilsPB.EchoMessage()); + // It should've returned the expected information + const returnedInfo1 = getConnectionInfoByProxySpy.mock.results[0].value; + expect(returnedInfo1.ingressPort).toEqual(revProxy.getIngressPort()); + expect(returnedInfo1.ingressHost).toEqual(localHost); + expect(returnedInfo1.egressPort).toEqual(clientFwdProxy1.getEgressPort()); + expect(returnedInfo1.egressHost).toEqual(localHost); + expect(returnedInfo1.nodeId).toStrictEqual(nodeId1); + // Checking second call + const returnedInfo2 = getConnectionInfoByProxySpy.mock.results[1].value; + expect(returnedInfo2.ingressPort).toEqual(revProxy.getIngressPort()); + expect(returnedInfo2.ingressHost).toEqual(localHost); + expect(returnedInfo2.egressPort).toEqual(clientFwdProxy2.getEgressPort()); + expect(returnedInfo2.egressHost).toEqual(localHost); + expect(returnedInfo2.nodeId).toStrictEqual(nodeId2); + }); + }); }); diff --git a/tests/agent/utils.ts b/tests/agent/utils.ts index 7dd702d91..6bfda2465 100644 --- a/tests/agent/utils.ts +++ b/tests/agent/utils.ts @@ -1,4 +1,4 @@ -import type { Host, Port } from '@/network/types'; +import type { Host, Port, ProxyConfig } from '@/network/types'; import type { IAgentServiceServer } from '@/proto/js/polykey/v1/agent_service_grpc_pb'; import type { KeyManager } from '@/keys'; @@ -8,6 +8,8 @@ import type { Sigchain } from '@/sigchain'; import type { NotificationsManager } from '@/notifications'; import type { ACL } from '@/acl'; import type { GestaltGraph } from '@/gestalts'; +import type { NodeId } from 'nodes/types'; +import type { ReverseProxy } from 'network/index'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import * as grpc from '@grpc/grpc-js'; import { promisify } from '@/utils'; @@ -28,6 +30,7 @@ async function openTestAgentServer({ notificationsManager, acl, gestaltGraph, + revProxy, }: { keyManager: KeyManager; vaultManager: VaultManager; @@ -38,6 +41,7 @@ async function openTestAgentServer({ notificationsManager: NotificationsManager; acl: ACL; gestaltGraph: GestaltGraph; + revProxy: ReverseProxy; }) { const agentService: IAgentServiceServer = createAgentService({ keyManager, @@ -49,6 +53,7 @@ async function openTestAgentServer({ nodeConnectionManager, acl, gestaltGraph, + revProxy, }); const server = new grpc.Server(); @@ -67,16 +72,21 @@ async function closeTestAgentServer(server) { await tryShutdown(); } -async function openTestAgentClient(port: number): Promise { +async function openTestAgentClient( + port: number, + nodeId?: NodeId, + proxyConfig?: ProxyConfig, +): Promise { const logger = new Logger('AgentClientTest', LogLevel.WARN, [ new StreamHandler(), ]); const agentClient = await GRPCClientAgent.createGRPCClientAgent({ - nodeId: testUtils.generateRandomNodeId(), + nodeId: nodeId ?? testUtils.generateRandomNodeId(), host: '127.0.0.1' as Host, port: port as Port, logger: logger, destroyCallback: async () => {}, + proxyConfig, timeout: 30000, }); return agentClient; diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index c5e06abad..6b9829036 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -282,6 +282,7 @@ describe(`${NodeConnection.name} test`, () => { notificationsManager: serverNotificationsManager, acl: serverACL, gestaltGraph: serverGestaltGraph, + revProxy: serverRevProxy, }); agentServer = new GRPCServer({ logger: logger, From 6c1a4ce3e08bb64dafe30e9f156f9eaf43a6aca8 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Wed, 16 Feb 2022 18:27:46 +1100 Subject: [PATCH 03/10] Fixes #305 --- src/PolykeyAgent.ts | 1 - src/acl/types.ts | 2 +- src/agent/service/vaultsGitInfoGet.ts | 56 +- src/agent/service/vaultsGitPackGet.ts | 48 +- src/agent/service/vaultsScan.ts | 2 +- src/bootstrap/utils.ts | 1 - src/client/service/vaultsClone.ts | 6 +- src/client/service/vaultsShare.ts | 2 +- src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 6 - src/proto/js/polykey/v1/vaults/vaults_pb.js | 51 - .../schemas/polykey/v1/vaults/vaults.proto | 1 - src/sigchain/Sigchain.ts | 2 +- src/vaults/VaultInternal.ts | 492 +++- src/vaults/VaultManager.ts | 896 +++--- src/vaults/VaultOps.ts | 5 + src/vaults/errors.ts | 19 +- src/vaults/types.ts | 3 +- src/vaults/utils.ts | 183 +- tests/acl/ACL.test.ts | 1 - tests/agent/GRPCClientAgent.test.ts | 1 - tests/bin/vaults/vaults.test.ts | 415 ++- tests/nodes/NodeConnection.test.ts | 1 - tests/notifications/utils.test.ts | 3 +- tests/vaults/VaultInternal.test.ts | 545 +++- tests/vaults/VaultManager.test.ts | 2432 ++++++++++++----- tests/vaults/VaultOps.test.ts | 56 +- tests/vaults/utils.test.ts | 4 - 27 files changed, 3395 insertions(+), 1839 deletions(-) diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index b22c2e795..82c3402be 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -327,7 +327,6 @@ class PolykeyAgent { vaultsPath, keyManager, nodeConnectionManager, - nodeManager, notificationsManager, gestaltGraph, acl, diff --git a/src/acl/types.ts b/src/acl/types.ts index 92ae07d13..7770edd7d 100644 --- a/src/acl/types.ts +++ b/src/acl/types.ts @@ -8,7 +8,7 @@ type PermissionIdString = Opaque<'PermissionIdString', string>; type Permission = { gestalt: GestaltActions; - vaults: Record; // FIXME: the string union on VaultId is to prevent some false errors. + vaults: Record; }; type GestaltActions = Partial>; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index f5a9d8f41..c20f74a94 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -1,19 +1,23 @@ import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type { ACL } from '../../acl'; +import type { ConnectionInfoGet } from '../../agent/types'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; +import * as nodesUtils from '../../nodes/utils'; +import * as agentErrors from '../errors'; function vaultsGitInfoGet({ vaultManager, acl, + connectionInfoGet, }: { vaultManager: VaultManager; acl: ACL; + connectionInfoGet: ConnectionInfoGet; }) { return async ( call: grpc.ServerWritableStream, @@ -25,11 +29,6 @@ function vaultsGitInfoGet({ await genWritable.throw({ code: grpc.status.NOT_FOUND }); return; } - const nodeMessage = request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } let vaultName; const vaultNameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); @@ -37,30 +36,41 @@ function vaultsGitInfoGet({ if (!vaultId) { try { vaultId = validationUtils.parseVaultId(vaultNameOrId); - vaultName = (await vaultManager.getVaultMeta(vaultId)).name; + vaultName = (await vaultManager.getVaultMeta(vaultId))?.vaultName; } catch (err) { await genWritable.throw(new vaultsErrors.ErrorVaultsVaultUndefined()); return; } } - const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); - const actionType = request.getAction(); - const perms = await acl.getNodePerm(nodeId); - if (!perms) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); + } + const nodeId = connectionInfo.nodeId; + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + const actionType = validationUtils.parseVaultAction(request.getAction()); + const permissions = await acl.getNodePerm(nodeId); + if (permissions == null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ), + ); return; } - const vaultPerms = perms.vaults[idUtils.toString(vaultId)]; - try { - if (vaultPerms[actionType] !== null) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); - return; - } - } catch (err) { - if (err instanceof TypeError) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); - return; - } + const vaultPerms = permissions.vaults[vaultId]; + if (vaultPerms[actionType] !== null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `${nodeIdEncoded} does not have permission to ${actionType} from vault ${vaultsUtils.encodeVaultId( + vaultId, + )}`, + ), + ); + return; } const meta = new grpc.Metadata(); meta.set('vaultName', vaultName); diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts index 72e158fae..061b40e9f 100644 --- a/src/agent/service/vaultsGitPackGet.ts +++ b/src/agent/service/vaultsGitPackGet.ts @@ -1,11 +1,24 @@ import type * as grpc from '@grpc/grpc-js'; import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type { ConnectionInfoGet } from '../../agent/types'; +import type ACL from '../../acl/ACL'; +import * as nodesUtils from '../../nodes/utils'; import { errors as grpcErrors, utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; +import * as agentErrors from '../errors'; -function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { +function vaultsGitPackGet({ + vaultManager, + acl, + connectionInfoGet, +}: { + vaultManager: VaultManager; + acl: ACL; + connectionInfoGet: ConnectionInfoGet; +}) { return async ( call: grpc.ServerDuplexStream, ) => { @@ -15,6 +28,16 @@ function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { clientBodyBuffers.push(clientRequest!.getChunk_asU8()); const body = Buffer.concat(clientBodyBuffers); const meta = call.metadata; + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); + } + const nodeId = connectionInfo.nodeId; + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + // Getting vaultId const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); if (vaultNameOrId == null) { throw new grpcErrors.ErrorGRPC('vault-name not in metadata'); @@ -22,7 +45,28 @@ function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); vaultId = vaultId ?? vaultsUtils.decodeVaultId(vaultNameOrId); if (vaultId == null) { - await genDuplex.throw(new vaultsErrors.ErrorVaultsVaultUndefined()); + await genDuplex.throw( + // Throwing permission error to hide information about vaults existence + new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ), + ); + return; + } + // Checking permissions + const permissions = await acl.getNodePerm(nodeId); + const vaultPerms = permissions?.vaults[vaultId]; + const actionType = validationUtils.parseVaultAction( + meta.get('vaultAction').pop(), + ); + if (vaultPerms?.[actionType] !== null) { + await genDuplex.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `${nodeIdEncoded} does not have permission to ${actionType} from vault ${vaultsUtils.encodeVaultId( + vaultId, + )}`, + ), + ); return; } const response = new vaultsPB.PackChunk(); diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index ee473872d..682863af0 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -2,7 +2,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { GestaltGraph } from '../../gestalts'; import type { VaultManager } from '../../vaults'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import * as validationUtils from '@/validation/utils'; +import * as validationUtils from '../../validation/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; diff --git a/src/bootstrap/utils.ts b/src/bootstrap/utils.ts index 73deb176e..fc855bb02 100644 --- a/src/bootstrap/utils.ts +++ b/src/bootstrap/utils.ts @@ -177,7 +177,6 @@ async function bootstrapState({ keyManager, nodeConnectionManager, vaultsPath, - nodeManager, notificationsManager, logger: logger.getChild(VaultManager.name), fresh, diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index c7338b9a2..ebed0b039 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -6,7 +6,6 @@ import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; import * as vaultsUtils from '../../vaults/utils'; -import * as vaultsErrors from '../../vaults/errors'; function vaultsClone({ authenticate, @@ -37,9 +36,8 @@ function vaultsClone({ // Vault id let vaultId; const vaultNameOrId = vaultMessage.getNameOrId(); - vaultId = vaultManager.getVaultId(vaultNameOrId) - vaultId = vaultId ?? vaultsUtils.decodeVaultId(vaultNameOrId); - if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultsUtils.decodeVaultId(vaultNameOrId); + vaultId = vaultId ?? vaultNameOrId; // Node id const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); await vaultManager.cloneVault(nodeId, vaultId); diff --git a/src/client/service/vaultsShare.ts b/src/client/service/vaultsShare.ts index 54e2ef4bc..9d2bbf85e 100644 --- a/src/client/service/vaultsShare.ts +++ b/src/client/service/vaultsShare.ts @@ -4,7 +4,7 @@ import type { VaultId, VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; -import * as validationUtils from '@/validation/utils'; +import * as validationUtils from '../../validation/utils'; import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 9e1a08b0b..4b0ab311c 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -409,11 +409,6 @@ export class InfoRequest extends jspb.Message { clearVault(): void; getVault(): Vault | undefined; setVault(value?: Vault): InfoRequest; - - hasNode(): boolean; - clearNode(): void; - getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): InfoRequest; getAction(): string; setAction(value: string): InfoRequest; @@ -430,7 +425,6 @@ export class InfoRequest extends jspb.Message { export namespace InfoRequest { export type AsObject = { vault?: Vault.AsObject, - node?: polykey_v1_nodes_nodes_pb.Node.AsObject, action: string, } } diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index 2a78b7d18..aebbcbb33 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -3264,7 +3264,6 @@ proto.polykey.v1.vaults.InfoRequest.prototype.toObject = function(opt_includeIns proto.polykey.v1.vaults.InfoRequest.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), action: jspb.Message.getFieldWithDefault(msg, 3, "") }; @@ -3307,11 +3306,6 @@ proto.polykey.v1.vaults.InfoRequest.deserializeBinaryFromReader = function(msg, reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); msg.setVault(value); break; - case 2: - var value = new polykey_v1_nodes_nodes_pb.Node; - reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); - msg.setNode(value); - break; case 3: var value = /** @type {string} */ (reader.readString()); msg.setAction(value); @@ -3353,14 +3347,6 @@ proto.polykey.v1.vaults.InfoRequest.serializeBinaryToWriter = function(message, proto.polykey.v1.vaults.Vault.serializeBinaryToWriter ); } - f = message.getNode(); - if (f != null) { - writer.writeMessage( - 2, - f, - polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter - ); - } f = message.getAction(); if (f.length > 0) { writer.writeString( @@ -3408,43 +3394,6 @@ proto.polykey.v1.vaults.InfoRequest.prototype.hasVault = function() { }; -/** - * optional polykey.v1.nodes.Node node = 2; - * @return {?proto.polykey.v1.nodes.Node} - */ -proto.polykey.v1.vaults.InfoRequest.prototype.getNode = function() { - return /** @type{?proto.polykey.v1.nodes.Node} */ ( - jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); -}; - - -/** - * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.InfoRequest} returns this -*/ -proto.polykey.v1.vaults.InfoRequest.prototype.setNode = function(value) { - return jspb.Message.setWrapperField(this, 2, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.InfoRequest} returns this - */ -proto.polykey.v1.vaults.InfoRequest.prototype.clearNode = function() { - return this.setNode(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.InfoRequest.prototype.hasNode = function() { - return jspb.Message.getField(this, 2) != null; -}; - - /** * optional string action = 3; * @return {string} diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index 478506e1f..0c8ca8143 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -89,7 +89,6 @@ message LogEntry { // Agent specific. message InfoRequest { Vault vault = 1; - polykey.v1.nodes.Node node = 2; string action = 3; } diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index a6d2e6f19..fdbcd2940 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -440,7 +440,7 @@ class Sigchain { @ready(new sigchainErrors.ErrorSigchainNotRunning()) public async clearDB() { - this.sigchainDb.clear(); + await this.sigchainDb.clear(); await this._transaction(async () => { await this.db.put( diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 45db2d7e3..05d80d26a 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -2,27 +2,44 @@ import type { ReadCommitResult } from 'isomorphic-git'; import type { EncryptedFS } from 'encryptedfs'; import type { DB, DBDomain, DBLevel } from '@matrixai/db'; import type { - VaultId, - VaultRef, CommitId, CommitLog, FileSystemReadable, FileSystemWritable, + VaultAction, + VaultId, + VaultIdEncoded, + VaultName, + VaultRef, } from './types'; -import type { KeyManager } from '../keys'; -import type { NodeId } from '../nodes/types'; -import type { ResourceAcquire } from '../utils'; +import type KeyManager from '../keys/KeyManager'; +import type { NodeId, NodeIdEncoded } from '../nodes/types'; +import type NodeConnectionManager from '../nodes/NodeConnectionManager'; +import type { ResourceAcquire } from '../utils/context'; +import type GRPCClientAgent from '../agent/GRPCClientAgent'; +import type { POJO } from '../types'; import path from 'path'; import git from 'isomorphic-git'; -import { Mutex } from 'async-mutex'; +import * as grpc from '@grpc/grpc-js'; import Logger from '@matrixai/logger'; import { CreateDestroyStartStop, ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; -import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; -import { withF, withG } from '../utils'; +import * as vaultsUtils from './utils'; +import * as nodesUtils from '../nodes/utils'; +import * as validationUtils from '../validation/utils'; +import { withF, withG } from '../utils/context'; +import { RWLock } from '../utils/locks'; +import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; +import { never } from '../utils/utils'; + +// TODO: this might be temp? +export type RemoteInfo = { + remoteNode: NodeIdEncoded; + remoteVault: VaultIdEncoded; +}; interface VaultInternal extends CreateDestroyStartStop {} @CreateDestroyStartStop( @@ -32,22 +49,22 @@ interface VaultInternal extends CreateDestroyStartStop {} class VaultInternal { public static async createVaultInternal({ vaultId, + vaultName, db, vaultsDb, vaultsDbDomain, keyManager, efs, - remote = false, logger = new Logger(this.name), fresh = false, }: { vaultId: VaultId; + vaultName?: VaultName; db: DB; vaultsDb: DBLevel; vaultsDbDomain: DBDomain; keyManager: KeyManager; efs: EncryptedFS; - remote?: boolean; logger?: Logger; fresh?: boolean; }): Promise { @@ -62,34 +79,36 @@ class VaultInternal { efs, logger, }); - await vault.start({ fresh }); + await vault.start({ fresh, vaultName }); logger.info(`Created ${this.name} - ${vaultIdEncoded}`); return vault; } public static async cloneVaultInternal({ + targetNodeId, + targetVaultNameOrId, vaultId, db, vaultsDb, vaultsDbDomain, keyManager, + nodeConnectionManager, efs, logger = new Logger(this.name), }: { + targetNodeId: NodeId; + targetVaultNameOrId: VaultId | VaultName; vaultId: VaultId; db: DB; vaultsDb: DBLevel; vaultsDbDomain: DBDomain; efs: EncryptedFS; keyManager: KeyManager; - remote?: boolean; + nodeConnectionManager: NodeConnectionManager; logger?: Logger; }): Promise { const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); logger.info(`Cloning ${this.name} - ${vaultIdEncoded}`); - // TODO: - // Perform the cloning operation to preseed state - // and also seed the remote state const vault = new VaultInternal({ vaultId, db, @@ -99,11 +118,63 @@ class VaultInternal { efs, logger, }); - await vault.start(); + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Make the directory where the .git files will be auto generated and + // where the contents will be cloned to ('contents' file) + await efs.mkdir(vault.vaultDataDir, { recursive: true }); + let vaultName: VaultName; + let remoteVaultId: VaultId; + let remote: RemoteInfo; + try { + [vaultName, remoteVaultId] = await nodeConnectionManager.withConnF( + targetNodeId, + async (connection) => { + const client = connection.getClient(); + const [request, vaultName, remoteVaultId] = await vault.request( + client, + targetVaultNameOrId, + 'clone', + ); + await git.clone({ + fs: efs, + http: { request }, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + url: 'http://', + singleBranch: true, + }); + return [vaultName, remoteVaultId]; + }, + ); + remote = { + remoteNode: nodesUtils.encodeNodeId(targetNodeId), + remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), + }; + } catch (e) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (e instanceof git.Errors.SmartHttpError && error) { + throw error; + } + throw e; + } + + await vault.start({ vaultName }); + // Setting the remote in the metadata + await vault.db.put( + vault.vaultMetadataDbDomain, + VaultInternal.remoteKey, + remote, + ); logger.info(`Cloned ${this.name} - ${vaultIdEncoded}`); return vault; } + static dirtyKey = 'dirty'; + static remoteKey = 'remote'; + static nameKey = 'key'; + public readonly vaultId: VaultId; public readonly vaultIdEncoded: string; public readonly vaultDataDir: string; @@ -113,17 +184,22 @@ class VaultInternal { protected db: DB; protected vaultsDbDomain: DBDomain; protected vaultsDb: DBLevel; - protected vaultDbDomain: DBDomain; - protected vaultDb: DBLevel; + protected vaultMetadataDbDomain: DBDomain; + protected vaultMetadataDb: DBLevel; protected keyManager: KeyManager; + protected vaultsNamesDomain: DBDomain; protected efs: EncryptedFS; protected efsVault: EncryptedFS; - protected remote: boolean; - protected _lock: Mutex = new Mutex(); + protected lock: RWLock = new RWLock(); - public lock: ResourceAcquire = async () => { - const release = await this._lock.acquire(); - return [async () => release(), this._lock]; + public readLock: ResourceAcquire = async () => { + const release = await this.lock.acquireRead(); + return [async () => release()]; + }; + + public writeLock: ResourceAcquire = async () => { + const release = await this.lock.acquireWrite(); + return [async () => release()]; }; constructor({ @@ -156,18 +232,31 @@ class VaultInternal { this.efs = efs; } + /** + * + * @param fresh Clears all state before starting + * @param vaultName Name of the vault, Only used when creating a new vault + */ public async start({ fresh = false, + vaultName, }: { fresh?: boolean; + vaultName?: VaultName; } = {}): Promise { this.logger.info( `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, ); - const vaultDbDomain = [...this.vaultsDbDomain, this.vaultIdEncoded]; - const vaultDb = await this.db.level(this.vaultIdEncoded, this.vaultsDb); + this.vaultMetadataDbDomain = [...this.vaultsDbDomain, this.vaultIdEncoded]; + this.vaultsNamesDomain = [...this.vaultsDbDomain, 'names']; + this.vaultMetadataDb = await this.db.level( + this.vaultIdEncoded, + this.vaultsDb, + ); + // Let's backup any metadata. + if (fresh) { - await vaultDb.clear(); + await this.vaultMetadataDb.clear(); try { await this.efs.rmdir(this.vaultIdEncoded, { recursive: true, @@ -178,20 +267,27 @@ class VaultInternal { } } } - await this.efs.mkdir(this.vaultIdEncoded, { recursive: true }); - await this.efs.mkdir(this.vaultDataDir, { recursive: true }); - await this.efs.mkdir(this.vaultGitDir, { recursive: true }); - await this.setupMeta(); + await this.mkdirExists(this.vaultIdEncoded); + await this.mkdirExists(this.vaultDataDir); + await this.mkdirExists(this.vaultGitDir); + await this.setupMeta({ vaultName }); await this.setupGit(); - const efsVault = await this.efs.chroot(this.vaultDataDir); - this.vaultDbDomain = vaultDbDomain; - this.vaultDb = vaultDb; - this.efsVault = efsVault; + this.efsVault = await this.efs.chroot(this.vaultDataDir); this.logger.info( `Started ${this.constructor.name} - ${this.vaultIdEncoded}`, ); } + private async mkdirExists(directory: string) { + try { + await this.efs.mkdir(directory, { recursive: true }); + } catch (e) { + if (e.code !== 'EEXIST') { + throw e; + } + } + } + public async stop(): Promise { this.logger.info( `Stopping ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -207,25 +303,19 @@ class VaultInternal { ); const vaultDb = await this.db.level(this.vaultIdEncoded, this.vaultsDb); await vaultDb.clear(); - await this.efs.rmdir(this.vaultIdEncoded, { - recursive: true, - }); + try { + await this.efs.rmdir(this.vaultIdEncoded, { + recursive: true, + }); + } catch (e) { + if (e.code !== 'ENOENT') throw e; + // Otherwise ignore + } this.logger.info( `Destroyed ${this.constructor.name} - ${this.vaultIdEncoded}`, ); } - // Is remote? - // well we don't just get remote - // we keep track of it - public async getRemote(): Promise<[NodeId, VaultId]> { - // Get the remote if exists - // if undefined you consider this to be not remote - // and therefore can proceed - // return Promise of [NodeId, VaultId] - throw Error('Not implemented'); - } - @ready(new vaultsErrors.ErrorVaultNotRunning()) public async log( ref: string | VaultRef = 'HEAD', @@ -291,7 +381,7 @@ class VaultInternal { @ready(new vaultsErrors.ErrorVaultNotRunning()) public async readF(f: (fs: FileSystemReadable) => Promise): Promise { - return withF([this.lock], async () => { + return withF([this.readLock], async () => { return await f(this.efsVault); }); } @@ -300,8 +390,9 @@ class VaultInternal { public readG( g: (fs: FileSystemReadable) => AsyncGenerator, ): AsyncGenerator { - return withG([this.lock], async function* () { - return yield* g(this.efsVault); + const efsVault = this.efsVault; + return withG([this.readLock], async function* () { + return yield* g(efsVault); }); } @@ -309,29 +400,40 @@ class VaultInternal { public async writeF( f: (fs: FileSystemWritable) => Promise, ): Promise { - return withF([this.lock], async () => { - await this.db.put(this.vaultsDbDomain, 'dirty', true); - // This should really be an internal property - // get whether this is remote, and the remote address - // if it is, we consider this repo an "attached repo" - // this vault is a "mirrored" vault - if (this.remote) { - // Mirrored vaults are immutable - throw new vaultsErrors.ErrorVaultImmutable(); - } + // This should really be an internal property + // get whether this is remote, and the remote address + // if it is, we consider this repo an "attached repo" + // this vault is a "mirrored" vault + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + )) != null + ) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultRemoteDefined(); + } + return withF([this.writeLock], async () => { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + true, + ); // We have to chroot it // and then remove it - // but this is done byitself? - + // but this is done by itself? await f(this.efsVault); - - await this.db.put(this.vaultsDbDomain, 'dirty', false); + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + false, + ); }); // Const message: string[] = []; // try { - + // // // If the version of the vault has been changed, checkout the working // // directory to this point in history and discard any unlinked commits // await git.checkout({ @@ -480,40 +582,172 @@ class VaultInternal { public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, ): AsyncGenerator { - return withG([this.lock], async function* () { - const result = yield* g(this.efsVault); - // At the end of the geneartor + const efsVault = this.efsVault; + const db = this.db; + const vaultDbDomain = this.vaultMetadataDbDomain; + return withG([this.writeLock], async function* () { + if ((await db.get(vaultDbDomain, VaultInternal.remoteKey)) != null) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultRemoteDefined(); + } + await db.put(vaultDbDomain, VaultInternal.dirtyKey, true); + const result = yield* g(efsVault); + // At the end of the generator // you need to do this // but just before // you need to finish it up // DO what you need to do here, create the commit + await db.put(vaultDbDomain, VaultInternal.dirtyKey, false); return result; }); } + // TODO: this needs to respect the write lock since we are writing to the EFS + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async pullVault({ + nodeConnectionManager, + pullNodeId, + pullVaultNameOrId, + }: { + nodeConnectionManager: NodeConnectionManager; + pullNodeId?: NodeId; + pullVaultNameOrId?: VaultId | VaultName; + }) { + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Keeps track of whether the metadata needs changing to avoid unnecessary db ops + // 0 = no change, 1 = change with vault Id, 2 = change with vault name + let metaChange = 0; + const remoteInfo = await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + ); + if (remoteInfo == null) throw new vaultsErrors.ErrorVaultRemoteUndefined(); + + if (pullNodeId == null) { + pullNodeId = nodesUtils.decodeNodeId(remoteInfo.remoteNode)!; + } else { + metaChange = 1; + remoteInfo.remoteNode = nodesUtils.encodeNodeId(pullNodeId); + } + if (pullVaultNameOrId == null) { + pullVaultNameOrId = vaultsUtils.decodeVaultId(remoteInfo.remoteVault!)!; + } else { + metaChange = 1; + if (typeof pullVaultNameOrId === 'string') { + metaChange = 2; + } else { + remoteInfo.remoteVault = vaultsUtils.encodeVaultId(pullVaultNameOrId); + } + } + this.logger.info( + `Pulling Vault ${vaultsUtils.encodeVaultId( + this.vaultId, + )} from Node ${pullNodeId}`, + ); + let remoteVaultId: VaultId; + try { + remoteVaultId = await nodeConnectionManager.withConnF( + pullNodeId!, + async (connection) => { + const client = connection.getClient(); + const [request, , remoteVaultId] = await this.request( + client, + pullVaultNameOrId!, + 'pull', + ); + await withF([this.writeLock], async () => { + await git.pull({ + fs: this.efs, + http: { request }, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + url: `http://`, + ref: 'HEAD', + singleBranch: true, + author: { + name: nodesUtils.encodeNodeId(pullNodeId!), + }, + }); + }); + return remoteVaultId; + }, + ); + } catch (err) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } else if (err instanceof git.Errors.MergeNotSupportedError) { + throw new vaultsErrors.ErrorVaultsMergeConflict(); + } + throw err; + } + if (metaChange !== 0) { + if (metaChange === 2) { + remoteInfo.remoteVault = vaultsUtils.encodeVaultId(remoteVaultId); + } + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + remoteInfo, + ); + } + this.logger.info( + `Pulled Vault ${vaultsUtils.encodeVaultId( + this.vaultId, + )} from Node ${pullNodeId}`, + ); + } + /** * Setup the vault metadata */ - protected async setupMeta(): Promise { + protected async setupMeta({ + vaultName, + }: { + vaultName?: VaultName; + }): Promise { // Setup the vault metadata - // setup metadata // and you need to make certain preparations // the meta gets created first // if the SoT is the database - // are we suposed to check this? - - if ((await this.db.get(this.vaultDbDomain, 'remote')) == null) { - await this.db.put(this.vaultDbDomain, 'remote', true); - } + // are we supposed to check this? // If this is not existing // setup default vaults db - await this.db.get(this.vaultsDbDomain, 'dirty'); + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + )) == null + ) { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + true, + ); + } + + // Set up vault Name + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.nameKey, + )) == null && + vaultName != null + ) { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.nameKey, + vaultName, + ); + } // Remote: [NodeId, VaultId] | undefined // dirty: boolean - // name: string + // name: string | undefined } /** @@ -554,7 +788,17 @@ class VaultInternal { gitdir: this.vaultGitDir, author: vaultsUtils.commitAuthor(this.keyManager.getNodeId()), message: 'Initial Commit', + ref: 'HEAD', })) as CommitId; + // Update master ref + await git.writeRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + value: commitIdLatest, + force: true, + }); } else { // Force checkout out to the latest commit // This ensures that any uncommitted state is dropped @@ -568,6 +812,98 @@ class VaultInternal { } return commitIdLatest; } + + protected async request( + client: GRPCClientAgent, + vaultNameOrId: VaultId | VaultName, + vaultAction: VaultAction, + ): Promise { + const requestMessage = new vaultsPB.InfoRequest(); + const vaultMessage = new vaultsPB.Vault(); + requestMessage.setAction(vaultAction); + if (typeof vaultNameOrId === 'string') { + vaultMessage.setNameOrId(vaultNameOrId); + } else { + // To have consistency between GET and POST, send the user + // readable form of the vault Id + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultNameOrId)); + } + requestMessage.setVault(vaultMessage); + const response = client.vaultsGitInfoGet(requestMessage); + let vaultName, remoteVaultId; + response.stream.on('metadata', async (meta) => { + // Receive the Id of the remote vault + vaultName = meta.get('vaultName').pop(); + if (vaultName) vaultName = vaultName.toString(); + const vId = meta.get('vaultId').pop(); + if (vId) remoteVaultId = validationUtils.parseVaultId(vId.toString()); + }); + // Collect the response buffers from the GET request + const infoResponse: Uint8Array[] = []; + for await (const resp of response) { + infoResponse.push(resp.getChunk_asU8()); + } + const metadata = new grpc.Metadata(); + metadata.set('vaultAction', vaultAction); + if (typeof vaultNameOrId === 'string') { + metadata.set('vaultNameOrId', vaultNameOrId); + } else { + // Metadata only accepts the user readable form of the vault Id + // as the string form has illegal characters + metadata.set('vaultNameOrId', vaultsUtils.encodeVaultId(vaultNameOrId)); + } + return [ + async function ({ + url, + method = 'GET', + headers = {}, + body = [Buffer.from('')], + }: { + url: string; + method: string; + headers: POJO; + body: Buffer[]; + }) { + if (method === 'GET') { + // Send back the GET request info response + return { + url: url, + method: method, + body: infoResponse, + headers: headers, + statusCode: 200, + statusMessage: 'OK', + }; + } else if (method === 'POST') { + const responseBuffers: Array = []; + const stream = client.vaultsGitPackGet(metadata); + const chunk = new vaultsPB.PackChunk(); + // Body is usually an async generator but in the cases we are using, + // only the first value is used + chunk.setChunk(body[0]); + // Tell the server what commit we need + await stream.write(chunk); + let packResponse = (await stream.read()).value; + while (packResponse != null) { + responseBuffers.push(packResponse.getChunk_asU8()); + packResponse = (await stream.read()).value; + } + return { + url: url, + method: method, + body: responseBuffers, + headers: headers, + statusCode: 200, + statusMessage: 'OK', + }; + } else { + never(); + } + }, + vaultName, + remoteVaultId, + ]; + } } export default VaultInternal; diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 596863a13..482e62498 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,60 +1,62 @@ -import type { MutexInterface } from 'async-mutex'; import type { DB, DBDomain, DBLevel } from '@matrixai/db'; -import type { VaultId, VaultName, VaultActions } from './types'; +import type { + VaultId, + VaultName, + VaultActions, + VaultIdString, + VaultIdEncoded, +} from './types'; import type { Vault } from './Vault'; - import type { FileSystem } from '../types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; import type { NodeId } from '../nodes/types'; - -import type { KeyManager } from '../keys'; -import type { NodeConnectionManager, NodeManager } from '../nodes'; -import type { GestaltGraph } from '../gestalts'; -import type { ACL } from '../acl'; -import type { NotificationsManager } from '../notifications'; - +import type KeyManager from '../keys/KeyManager'; +import type NodeConnectionManager from '../nodes/NodeConnectionManager'; +import type GestaltGraph from '../gestalts/GestaltGraph'; +import type NotificationsManager from '../notifications/NotificationsManager'; +import type ACL from '../acl/ACL'; + +import type { RemoteInfo } from './VaultInternal'; +import type { ResourceAcquire } from '../utils/context'; +import type { VaultAction } from './types'; import path from 'path'; import { PassThrough } from 'readable-stream'; -import { Mutex } from 'async-mutex'; -import git from 'isomorphic-git'; -import { EncryptedFS, errors as encryptedfsErrors } from 'encryptedfs'; +import { EncryptedFS, errors as encryptedFsErrors } from 'encryptedfs'; import Logger from '@matrixai/logger'; import { CreateDestroyStartStop, ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; -import { IdInternal, utils as idUtils } from '@matrixai/id'; +import { IdInternal } from '@matrixai/id'; import VaultInternal from './VaultInternal'; import * as vaultsUtils from '../vaults/utils'; import * as vaultsErrors from '../vaults/errors'; import * as gitUtils from '../git/utils'; import * as gitErrors from '../git/errors'; -import { utils as nodesUtils } from '../nodes'; -import { utils as keysUtils } from '../keys'; -import * as validationUtils from '../validation/utils'; +import * as nodesUtils from '../nodes/utils'; +import * as keysUtils from '../keys/utils'; import config from '../config'; -import { mkdirExists } from '../utils'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; +import { mkdirExists } from '../utils/utils'; +import { RWLock } from '../utils/locks'; +import { withF, withG } from '../utils/context'; +import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; /** * Object map pattern for each vault */ type VaultMap = Map< - VaultId, + VaultIdString, { vault?: VaultInternal; - lock: MutexInterface; + lock: RWLock; } >; type VaultList = Map; - -// FIXME: this will be removed when moved into VaultInternal. type VaultMetadata = { - name: VaultName; - workingDirectoryIndex: string; - remoteNode?: NodeId; - remoteVault?: string; + dirty: boolean; + vaultName: VaultName; + remoteInfo?: RemoteInfo; }; interface VaultManager extends CreateDestroyStartStop {} @@ -69,7 +71,6 @@ class VaultManager { acl, keyManager, nodeConnectionManager, - nodeManager, gestaltGraph, notificationsManager, keyBits = 256, @@ -82,7 +83,6 @@ class VaultManager { acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; - nodeManager: NodeManager; gestaltGraph: GestaltGraph; notificationsManager: NotificationsManager; keyBits?: 128 | 192 | 256; @@ -98,7 +98,6 @@ class VaultManager { acl, keyManager, nodeConnectionManager, - nodeManager, gestaltGraph, notificationsManager, keyBits, @@ -119,16 +118,15 @@ class VaultManager { protected db: DB; protected acl: ACL; protected keyManager: KeyManager; - // FIXME, add this to create and constructor protected nodeConnectionManager: NodeConnectionManager; - protected nodeManager: NodeManager; protected gestaltGraph: GestaltGraph; protected notificationsManager: NotificationsManager; protected vaultsDbDomain: DBDomain = [this.constructor.name]; protected vaultsDb: DBLevel; + protected vaultsNamesDbDomain: DBDomain = [...this.vaultsDbDomain, 'names']; + protected vaultsNamesDb: DBLevel; + protected vaultsNamesLock: RWLock = new RWLock(); // VaultId -> VaultMetadata - protected vaultsMetaDbDomain: DBDomain = [this.vaultsDbDomain[0], 'meta']; - protected vaultsMetaDb: DBLevel; protected vaultMap: VaultMap = new Map(); protected vaultKey: Buffer; protected efs: EncryptedFS; @@ -139,7 +137,6 @@ class VaultManager { acl, keyManager, nodeConnectionManager, - nodeManager, gestaltGraph, notificationsManager, keyBits, @@ -151,7 +148,6 @@ class VaultManager { acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; - nodeManager: NodeManager; gestaltGraph: GestaltGraph; notificationsManager: NotificationsManager; keyBits: 128 | 192 | 256; @@ -165,7 +161,6 @@ class VaultManager { this.acl = acl; this.keyManager = keyManager; this.nodeConnectionManager = nodeConnectionManager; - this.nodeManager = nodeManager; this.gestaltGraph = gestaltGraph; this.notificationsManager = notificationsManager; this.keyBits = keyBits; @@ -180,12 +175,11 @@ class VaultManager { try { this.logger.info(`Starting ${this.constructor.name}`); const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); - const vaultsMetaDb = await this.db.level( - this.vaultsMetaDbDomain[1], - this.vaultsDb, + const vaultsNamesDb = await this.db.level( + this.vaultsNamesDbDomain[1], + vaultsDb, ); if (fresh) { - await vaultsMetaDb.clear(); await vaultsDb.clear(); await this.fs.promises.rm(this.vaultsPath, { force: true, @@ -202,7 +196,7 @@ class VaultManager { logger: this.logger.getChild('EncryptedFileSystem'), }); } catch (e) { - if (e instanceof encryptedfsErrors.ErrorEncryptedFSKey) { + if (e instanceof encryptedFsErrors.ErrorEncryptedFSKey) { throw new vaultsErrors.ErrorVaultManagerKey(); } throw new vaultsErrors.ErrorVaultManagerEFS(e.message, { @@ -213,7 +207,7 @@ class VaultManager { }); } this.vaultsDb = vaultsDb; - this.vaultsMetaDb = vaultsMetaDb; + this.vaultsNamesDb = vaultsNamesDb; this.vaultKey = vaultKey; this.efs = efs; this.logger.info(`Started ${this.constructor.name}`); @@ -230,29 +224,14 @@ class VaultManager { // Iterate over vaults in memory and destroy them, ensuring that // the working directory commit state is saved - for (const [vaultId, vaultAndLock] of this.vaultMap) { - // This is locking each vault... before it tries to do this - // but if we are calling stop now - // we will have blocked all the other methods - // so in this sense, it actually waits for all vault locks to be relinquished - // before attempting to do anything here - // now if start stop has their own lock - // this this applies already just be calling stop - // in that it waits for stop to finish - - await this.transact(async () => { - // Think about it, maybe we should use stop instead - // it will be clearer!! - // await vaultAndLock.vault?.stop(); - - await vaultAndLock.vault?.destroy(); - }, [vaultId]); + for (const [vaultIdString, vaultAndLock] of this.vaultMap) { + const vaultId = IdInternal.fromString(vaultIdString); + await withF([this.getWriteLock(vaultId)], async () => { + await vaultAndLock.vault?.stop(); + }); + this.vaultMap.delete(vaultIdString); } - // Need to figure out if this id thing is a good idea - // the id should already be workable as a string - // i forgot if it also works under map - await this.efs.stop(); this.vaultMap = new Map(); this.logger.info(`Stopped ${this.constructor.name}`); @@ -264,7 +243,9 @@ class VaultManager { // If the DB was stopped, the existing sublevel `this.vaultsDb` will not be valid // Therefore we recreate the sublevel here const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); + // Clearing all vaults db data await vaultsDb.clear(); + // Is it necessary to remove the vaults domain? await this.fs.promises.rm(this.vaultsPath, { force: true, recursive: true, @@ -280,50 +261,29 @@ class VaultManager { this.efs.unsetWorkerManager(); } - // The with locks thing - // can be generalised a bit - // we can address the with locking mechanism in general - // with withF and withG - // this will become our generic of way locking anything - - // REPLACE THE FOLLOWING 3 functions - // replace this transact with our new withF and withG mechanisms - // all we need to do is create `ResourceAcquire` types in this domain - - /** - * By default will not lock anything - */ - public async transact(f: () => Promise, vaults: Array = []) { - // Will lock nothing by default - return await this.withLocks(f, vaults.map(this.getLock.bind(this))); + protected getLock(vaultId: VaultId): RWLock { + const vaultIdString = vaultId.toString() as VaultIdString; + const vaultAndLock = this.vaultMap.get(vaultIdString); + if (vaultAndLock != null) return vaultAndLock.lock; + const lock = new RWLock(); + this.vaultMap.set(vaultIdString, { lock }); + return lock; } - protected async withLocks( - f: () => Promise, - locks: Array = [], - ): Promise { - const releases: Array = []; - for (const lock of locks) { - // Take the lock for each vault in memory and acquire it - releases.push(await lock.acquire()); - } - try { - return await f(); - } finally { - // Release the vault locks in the opposite order - releases.reverse(); - for (const r of releases) { - r(); - } - } + protected getReadLock(vaultId: VaultId): ResourceAcquire { + const lock = this.getLock(vaultId); + return async () => { + const release = await lock.acquireRead(); + return [async () => release()]; + }; } - protected getLock(vaultId: VaultId): MutexInterface { - const vaultAndLock = this.vaultMap.get(vaultId); - if (vaultAndLock != null) return vaultAndLock.lock; - const lock = new Mutex(); - this.vaultMap.set(vaultId, { lock }); - return lock; + protected getWriteLock(vaultId: VaultId): ResourceAcquire { + const lock = this.getLock(vaultId); + return async () => { + const release = await lock.acquireWrite(); + return [async () => release()]; + }; } /** @@ -335,26 +295,45 @@ class VaultManager { @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async createVault(vaultName: VaultName): Promise { + // Adding vault to name map const vaultId = await this.generateVaultId(); - const lock = new Mutex(); - this.vaultMap.set(vaultId, { lock }); - return await this.transact(async () => { - this.logger.info( - `Storing metadata for Vault ${vaultsUtils.encodeVaultId(vaultId)}`, + await this.vaultsNamesLock.withWrite(async () => { + const vaultIdBuffer = await this.db.get( + this.vaultsNamesDbDomain, + vaultName, + true, ); - await this.db.put(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), { - name: vaultName, - }); - const vault = await VaultInternal.create({ + // Check if the vault name already exists; + if (vaultIdBuffer != null) { + throw new vaultsErrors.ErrorVaultsVaultDefined(); + } + await this.db.put( + this.vaultsNamesDbDomain, + vaultName, + vaultId.toBuffer(), + true, + ); + }); + const lock = new RWLock(); + const vaultIdString = vaultId.toString() as VaultIdString; + this.vaultMap.set(vaultIdString, { lock }); + return await withF([this.getWriteLock(vaultId)], async () => { + // Creating vault + const vault = await VaultInternal.createVaultInternal({ vaultId, + vaultName, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, fresh: true, }); - this.vaultMap.set(vaultId, { lock, vault }); + // Adding vault to object map + this.vaultMap.set(vaultIdString, { lock, vault }); return vault.vaultId; - }, [vaultId]); + }); } /** @@ -362,13 +341,33 @@ class VaultManager { * and parses it to return the associated vault name */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async getVaultMeta(vaultId: VaultId): Promise { - const vaultMeta = await this.db.get( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), + public async getVaultMeta( + vaultId: VaultId, + ): Promise { + // First check if the metadata exists + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + const vaultDbDomain = [...this.vaultsDbDomain, vaultIdEncoded]; + const vaultDb = await this.db.level(vaultIdEncoded, this.vaultsDb); + // Return if metadata has no data + if ((await this.db.count(vaultDb)) === 0) return; + // Obtain the metadata; + const dirty = (await this.db.get( + vaultDbDomain, + VaultInternal.dirtyKey, + ))!; + const vaultName = (await this.db.get( + vaultDbDomain, + VaultInternal.nameKey, + ))!; + const remoteInfo = await this.db.get( + vaultDbDomain, + VaultInternal.remoteKey, ); - if (vaultMeta == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - return vaultMeta; + return { + dirty, + vaultName, + remoteInfo, + }; } /** @@ -377,68 +376,54 @@ class VaultManager { */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async destroyVault(vaultId: VaultId) { + const vaultMeta = await this.getVaultMeta(vaultId); + if (vaultMeta == null) return; + const vaultName = vaultMeta.vaultName; this.logger.info(`Destroying Vault ${vaultsUtils.encodeVaultId(vaultId)}`); - await this.transact(async () => { - const vaultMeta = await this.getVaultMeta(vaultId); - if (!vaultMeta) return; - await this.db.del(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId)); - this.vaultMap.delete(vaultId); - await this.efs.rmdir(vaultsUtils.encodeVaultId(vaultId), { - recursive: true, + const vaultIdString = vaultId.toString() as VaultIdString; + await withF([this.getWriteLock(vaultId)], async () => { + const vault = await this.getVault(vaultId); + // Destroying vault state and metadata + await vault.stop(); + await vault.destroy(); + // Removing from map + this.vaultMap.delete(vaultIdString); + // Removing name->id mapping + await this.vaultsNamesLock.withWrite(async () => { + await this.db.del(this.vaultsNamesDbDomain, vaultName); }); - }, [vaultId]); + }); + this.logger.info(`Destroyed Vault ${vaultsUtils.encodeVaultId(vaultId)}`); } - // /** - // * Constructs or returns the in-memory instance of a vault - // * from metadata using a given vault Id - // */ - // @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - // private async openVault(vaultId: VaultId): Promise { - // const vaultMeta = await this.getVaultMeta(vaultId); - // if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - // return await this.getVault(vaultId); - // } - /** - * Writes the working directory commit state of a vault Id - * and removes the vault from memory + * Removes vault from the vault map */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async closeVault(vaultId: VaultId) { - const vaultMeta = await this.getVaultMeta(vaultId); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - const vault = await this.getVault(vaultId); - // Updating workingDirectoryIndex in the vault metadata. - vaultMeta.workingDirectoryIndex = vault.getworkingDirIndex(); - await this.db.put( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - vaultMeta, - ); - await vault.destroy(); - this.vaultMap.delete(vaultId); + if ((await this.getVaultName(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined(); + } + const vaultIdString = vaultId.toString() as VaultIdString; + await withF([this.getWriteLock(vaultId)], async () => { + const vault = await this.getVault(vaultId); + await vault.stop(); + this.vaultMap.delete(vaultIdString); + }); } /** * Lists the vault name and associated vault Id of all * the vaults stored */ - // FIXME: this will have to peek into the vaults metadata. - // This will be inside the vaultInternal now. Need to work this out. @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async listVaults(): Promise { const vaults: VaultList = new Map(); - // Stream all the vault Id and associated metadata values - for await (const o of this.vaultsMetaDb.createReadStream({})) { - const dbMeta = (o as any).value; - const dbId = (o as any).key; - // Manually decrypt the vault metadata - const vaultMeta = await this.db.deserializeDecrypt( - dbMeta, - false, - ); - vaults.set(vaultMeta.name, IdInternal.fromBuffer(dbId)); + // Stream of vaultName VaultId key value pairs + for await (const vaultNameBuffer of this.vaultsNamesDb.createKeyStream()) { + const vaultName = vaultNameBuffer.toString() as VaultName; + const vaultId = (await this.getVaultId(vaultName))!; + vaults.set(vaultName, vaultId); } return vaults; } @@ -451,20 +436,35 @@ class VaultManager { vaultId: VaultId, newVaultName: VaultName, ): Promise { - this.logger.info(`Renaming Vault ${vaultsUtils.encodeVaultId(vaultId)}`); - await this.transact(async () => { - const meta = await this.db.get( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - ); - if (!meta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - meta.name = newVaultName; - await this.db.put( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - meta, - ); - }, [vaultId]); + await withF([this.getWriteLock(vaultId)], async () => { + this.logger.info(`Renaming Vault ${vaultsUtils.encodeVaultId(vaultId)}`); + // Checking if new name exists + if (await this.getVaultId(newVaultName)) { + throw new vaultsErrors.ErrorVaultsVaultDefined(); + } + // Checking if vault exists + const vaultMetadata = await this.getVaultMeta(vaultId); + if (vaultMetadata == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined(); + } + const oldVaultName = vaultMetadata.vaultName; + // Updating metadata with new name; + const vaultDbDomain = [ + ...this.vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await this.db.put(vaultDbDomain, VaultInternal.nameKey, newVaultName); + // Updating name->id map + await this.vaultsNamesLock.withWrite(async () => { + await this.db.del(this.vaultsNamesDbDomain, oldVaultName); + await this.db.put( + this.vaultsNamesDbDomain, + newVaultName, + vaultId.toBuffer(), + true, + ); + }); + }); } /** @@ -472,24 +472,28 @@ class VaultManager { */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async getVaultId(vaultName: VaultName): Promise { - // Stream all the metadata and associated vault Id values - for await (const o of this.vaultsMetaDb.createReadStream({})) { - const dbMeta = (o as any).value; - const dbId = (o as any).key; - // Manually decrypt the vault metadata - const vaultMeta = await this.db.deserializeDecrypt( - dbMeta, - false, + return await this.vaultsNamesLock.withWrite(async () => { + const vaultIdBuffer = await this.db.get( + this.vaultsNamesDbDomain, + vaultName, + true, ); - // If the name metadata matches the given name, return the associated vault Id - if (vaultName === vaultMeta.name) { - return IdInternal.fromBuffer(dbId); - } - } + if (vaultIdBuffer == null) return; + return IdInternal.fromBuffer(vaultIdBuffer); + }); + } + + /** + * Retreives the vault name associated with a vault Id + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async getVaultName(vaultId: VaultId): Promise { + const metadata = await this.getVaultMeta(vaultId); + return metadata?.vaultName; } /** - * Returns a dictionary of VaultActions for each node. + * Returns a dictionary of VaultActions for each node * @param vaultId */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) @@ -498,7 +502,7 @@ class VaultManager { ): Promise> { const rawPermissions = await this.acl.getVaultPerm(vaultId); const permissions: Record = {}; - // Getting the relevant information. + // Getting the relevant information for (const nodeId in rawPermissions) { permissions[nodeId] = rawPermissions[nodeId].vaults[vaultId]; } @@ -513,26 +517,20 @@ class VaultManager { public async shareVault(vaultId: VaultId, nodeId: NodeId): Promise { const vaultMeta = await this.getVaultMeta(vaultId); if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - await this.transact(async () => { - await this.gestaltGraph._transaction(async () => { - await this.acl._transaction(async () => { - // Node Id permissions translated to other nodes in - // a gestalt by other domains - await this.gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); - await this.acl.setVaultAction(vaultId, nodeId, 'pull'); - await this.acl.setVaultAction(vaultId, nodeId, 'clone'); - await this.notificationsManager.sendNotification(nodeId, { - type: 'VaultShare', - vaultId: vaultId.toString(), - vaultName: vaultMeta.name, - actions: { - clone: null, - pull: null, - }, - }); - }); - }); - }, [vaultId]); + // Node Id permissions translated to other nodes in + // a gestalt by other domains + await this.gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + await this.acl.setVaultAction(vaultId, nodeId, 'pull'); + await this.acl.setVaultAction(vaultId, nodeId, 'clone'); + await this.notificationsManager.sendNotification(nodeId, { + type: 'VaultShare', + vaultId: vaultsUtils.encodeVaultId(vaultId), + vaultName: vaultMeta.vaultName, + actions: { + clone: null, + pull: null, + }, + }); } /** @@ -543,13 +541,9 @@ class VaultManager { public async unshareVault(vaultId: VaultId, nodeId: NodeId): Promise { const vaultMeta = await this.getVaultMeta(vaultId); if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); - return await this.gestaltGraph._transaction(async () => { - return await this.acl._transaction(async () => { - await this.gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); - await this.acl.unsetVaultAction(vaultId, nodeId, 'pull'); - await this.acl.unsetVaultAction(vaultId, nodeId, 'clone'); - }); - }); + await this.gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'pull'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'clone'); } /** @@ -561,75 +555,64 @@ class VaultManager { nodeId: NodeId, vaultNameOrId: VaultId | VaultName, ): Promise { - // This error flag will contain the error returned by the cloning grpc stream - let error; - // Let vaultName, remoteVaultId; - const thisNodeId = this.keyManager.getNodeId(); - const nodeConnection = await this.nodeManager.getConnectionToNode(nodeId); - const client = nodeConnection.getClient(); const vaultId = await this.generateVaultId(); - const lock = new Mutex(); - this.vaultMap.set(vaultId, { lock }); + const lock = new RWLock(); + const vaultIdString = vaultId.toString() as VaultIdString; + this.vaultMap.set(vaultIdString, { lock }); this.logger.info( `Cloning Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, ); - return await this.transact(async () => { - // Make the directory where the .git files will be auto generated and - // where the contents will be cloned to ('contents' file) - await this.efs.mkdir( - path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - { recursive: true }, - ); - const [request, vaultName, remoteVaultId] = await vaultsUtils.request( - client, - thisNodeId, - vaultNameOrId, - ); - try { - await git.clone({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - url: 'http://', - singleBranch: true, - }); - } catch (err) { - // If the error flag set and we have the generalised SmartHttpError from - // isomorphic git then we need to throw the polykey error - if (err instanceof git.Errors.SmartHttpError && error) { - throw error; - } - throw err; - } - const workingDirIndex = ( - await git.log({ - fs: this.efs, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - depth: 1, - }) - ).pop()!; - // Store the node and vault Id to be used as default remote values when pulling - await this.db.put(this.vaultsMetaDbDomain, idUtils.toBuffer(vaultId), { - name: vaultName, - workingDirectoryIndex: workingDirIndex.oid, - remoteNode: nodeId, - remoteVault: remoteVaultId.toString(), - } as VaultMetadata); - const vault = await VaultInternal.create({ + return await withF([this.getWriteLock(vaultId)], async () => { + const vault = await VaultInternal.cloneVaultInternal({ + targetNodeId: nodeId, + targetVaultNameOrId: vaultNameOrId, vaultId, + db: this.db, + nodeConnectionManager: this.nodeConnectionManager, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), - remote: true, }); - this.vaultMap.set(vaultId, { lock, vault }); + this.vaultMap.set(vaultIdString, { lock, vault }); + const vaultMetadata = (await this.getVaultMeta(vaultId))!; + const baseVaultName = vaultMetadata.vaultName; + // Need to check if the name is taken, 10 attempts + let newVaultName = baseVaultName; + let attempts = 1; + while (true) { + const existingVaultId = await this.db.get( + this.vaultsNamesDbDomain, + newVaultName, + ); + if (existingVaultId == null) break; + newVaultName = `${baseVaultName}-${attempts}`; + if (attempts >= 50) { + throw new vaultsErrors.ErrorVaultsNameConflict( + `Too many copies of ${baseVaultName}`, + ); + } + attempts++; + } + // Set the vaultName -> vaultId mapping + await this.db.put( + this.vaultsNamesDbDomain, + newVaultName, + vaultId.toBuffer(), + true, + ); + // Update vault metadata + await this.db.put( + [...this.vaultsDbDomain, vaultsUtils.encodeVaultId(vaultId)], + VaultInternal.nameKey, + newVaultName, + ); this.logger.info( `Cloned Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, ); return vault.vaultId; - }, [vaultId]); + }); } /** @@ -644,93 +627,16 @@ class VaultManager { vaultId: VaultId; pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; - }): Promise { - return await this.transact(async () => { - // This error flag will contain the error returned by the cloning grpc stream - let error; - // Keeps track of whether the metadata needs changing to avoid unnecessary db ops - // 0 = no change, 1 = change with vault Id, 2 = change with vault name - let metaChange = 0; - const thisNodeId = this.keyManager.getNodeId(); - const vaultMeta = await this.db.get( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - ); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUnlinked(); - if (pullNodeId == null) { - pullNodeId = vaultMeta.remoteNode; - } else { - metaChange = 1; - vaultMeta.remoteNode = pullNodeId; - } - if (pullVaultNameOrId == null) { - pullVaultNameOrId = IdInternal.fromString( - vaultMeta.remoteVault!, - ); - } else { - metaChange = 1; - if (typeof pullVaultNameOrId === 'string') { - metaChange = 2; - } else { - vaultMeta.remoteVault = pullVaultNameOrId.toString(); - } - } - this.logger.info( - `Pulling Vault ${vaultsUtils.encodeVaultId( - vaultId, - )} from Node ${pullNodeId}`, - ); - const nodeConnection = await this.nodeManager.getConnectionToNode( - pullNodeId!, - ); - const client = nodeConnection.getClient(); - const [request,, remoteVaultId] = await vaultsUtils.request( - client, - thisNodeId, - pullVaultNameOrId!, - ); - try { - await git.pull({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - url: `http://`, - ref: 'HEAD', - singleBranch: true, - author: { - name: nodesUtils.encodeNodeId(pullNodeId!), - }, - }); - } catch (err) { - // If the error flag set and we have the generalised SmartHttpError from - // isomorphic git then we need to throw the polykey error - if (err instanceof git.Errors.SmartHttpError && error) { - throw error; - } else if (err instanceof git.Errors.MergeNotSupportedError) { - throw new vaultsErrors.ErrorVaultsMergeConflict( - 'Merge Conflicts are not supported yet', - ); - } - throw err; - } - if (metaChange !== 0) { - if (metaChange === 2) vaultMeta.remoteVault = remoteVaultId; - await this.db.put( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - vaultMeta, - ); - } + }): Promise { + if ((await this.getVaultName(vaultId)) == null) return; + await withF([this.getWriteLock(vaultId)], async () => { const vault = await this.getVault(vaultId); - // Store the working directory commit state in the '.git' directory - this.logger.info( - `Pulled Vault ${vaultsUtils.encodeVaultId( - vaultId, - )} from Node ${pullNodeId}`, - ); - return vault.vaultId; - }, [vaultId]); + await vault.pullVault({ + nodeConnectionManager: this.nodeConnectionManager, + pullNodeId, + pullVaultNameOrId, + }); + }); } /** @@ -738,24 +644,29 @@ class VaultManager { * cloned or pulled from */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async *handleInfoRequest( - vaultId: VaultId, - ): AsyncGenerator { - // Adehrance to git protocol - yield Buffer.from( - gitUtils.createGitPacketLine('# service=git-upload-pack\n'), + public async *handleInfoRequest(vaultId: VaultId): AsyncGenerator { + const efs = this.efs; + const vault = await this.getVault(vaultId); + return yield* withG( + [this.getReadLock(vaultId), vault.readLock], + async function* (): AsyncGenerator { + // Adherence to git protocol + yield Buffer.from( + gitUtils.createGitPacketLine('# service=git-upload-pack\n'), + ); + yield Buffer.from('0000'); + // Read the commit state of the vault + const uploadPack = await gitUtils.uploadPack({ + fs: efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + advertiseRefs: true, + }); + for (const buffer of uploadPack) { + yield buffer; + } + }, ); - yield Buffer.from('0000'); - // Read the commit state of the vault - const uploadPack = await gitUtils.uploadPack({ - fs: this.efs, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - advertiseRefs: true, - }); - for (const buffer of uploadPack) { - yield buffer; - } } /** @@ -767,67 +678,118 @@ class VaultManager { vaultId: VaultId, body: Buffer, ): Promise<[PassThrough, PassThrough]> { - if (body.toString().slice(4, 8) === 'want') { - // Parse the request to get the wanted git object - const wantedObjectId = body.toString().slice(9, 49); - const packResult = await gitUtils.packObjects({ - fs: this.efs, - dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), - refs: [wantedObjectId], - }); - // Generate a contents and progress stream - const readable = new PassThrough(); - const progressStream = new PassThrough(); - const sideBand = gitUtils.mux( - 'side-band-64', - readable, - packResult.packstream, - progressStream, - ); - return [sideBand, progressStream]; - } else { - throw new gitErrors.ErrorGitUnimplementedMethod( - `Request of type '${body - .toString() - .slice(4, 8)}' not valid, expected 'want'`, - ); - } + const vault = await this.getVault(vaultId); + return await withF( + [this.getReadLock(vaultId), vault.readLock], + async () => { + if (body.toString().slice(4, 8) === 'want') { + // Parse the request to get the wanted git object + const wantedObjectId = body.toString().slice(9, 49); + const packResult = await gitUtils.packObjects({ + fs: this.efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + refs: [wantedObjectId], + }); + // Generate a contents and progress stream + const readable = new PassThrough(); + const progressStream = new PassThrough(); + const sideBand = gitUtils.mux( + 'side-band-64', + readable, + packResult.packstream, + progressStream, + ); + return [sideBand, progressStream]; + } else { + throw new gitErrors.ErrorGitUnimplementedMethod( + `Request of type '${body + .toString() + .slice(4, 8)}' not valid, expected 'want'`, + ); + } + }, + ); } /** * Retrieves all the vaults for a peers node */ - public async scanNodeVaults( - nodeId: NodeId, - ): Promise> { + public async *scanVaults(targetNodeId: NodeId): AsyncGenerator<{ + vaultName: VaultName; + vaultIdEncoded: VaultIdEncoded; + vaultPermissions: VaultAction[]; + }> { // Create a connection to another node - return await this.nodeConnectionManager.withConnF( - nodeId, - async (connection) => { + return yield* this.nodeConnectionManager.withConnG( + targetNodeId, + async function* (connection): AsyncGenerator<{ + vaultName: VaultName; + vaultIdEncoded: VaultIdEncoded; + vaultPermissions: VaultAction[]; + }> { const client = connection.getClient(); - const nodeIdMessage = new nodesPB.Node(); - nodeIdMessage.setNodeId( - nodesUtils.encodeNodeId(this.keyManager.getNodeId()), - ); - const vaults: Array<[VaultName, VaultId]> = []; - const genReadable = client.vaultsScan(nodeIdMessage); + const genReadable = client.vaultsScan(new utilsPB.EmptyMessage()); for await (const vault of genReadable) { - vaults.push([ - vault.getVaultName() as VaultName, - validationUtils.parseVaultId(vault.getVaultId()), - ]); + const vaultName = vault.getVaultName() as VaultName; + const vaultIdEncoded = vault.getVaultId() as VaultIdEncoded; + const vaultPermissions = + vault.getVaultPermissionsList() as VaultAction[]; + yield { vaultName, vaultIdEncoded, vaultPermissions }; } - return vaults; }, ); } + /** + * Returns all the shared vaults for a NodeId. + */ + public async *handleScanVaults( + nodeId: NodeId, + ): AsyncGenerator<{ + vaultId: VaultId; + vaultName: VaultName; + vaultPermissions: VaultAction[]; + }> { + // Checking permission + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + const permissions = await this.acl.getNodePerm(nodeId); + if (permissions == null) { + throw new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ); + } + if (permissions.gestalt.scan === undefined) { + throw new vaultsErrors.ErrorVaultsPermissionDenied( + `Scanning is not allowed for ${nodeIdEncoded}`, + ); + } + + // Getting the list of vaults + const vaults = permissions.vaults; + for (const vaultIdString of Object.keys(vaults)) { + // Getting vault permissions + const vaultId = IdInternal.fromString(vaultIdString); + const vaultPermissions = Object.keys( + vaults[vaultIdString], + ) as VaultAction[]; + // Getting the vault name + const metadata = await this.getVaultMeta(vaultId); + const vaultName = metadata!.vaultName; + const element = { + vaultId, + vaultName, + vaultPermissions, + }; + yield element; + } + } + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) protected async generateVaultId(): Promise { let vaultId = vaultsUtils.generateVaultId(); let i = 0; - while (await this.efs.exists(idUtils.toString(vaultId))) { + while (await this.efs.exists(vaultsUtils.encodeVaultId(vaultId))) { i++; if (i > 50) { throw new vaultsErrors.ErrorVaultsCreateVaultId( @@ -842,8 +804,9 @@ class VaultManager { @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) protected async getVault(vaultId: VaultId): Promise { let vault: VaultInternal | undefined; - let lock: MutexInterface; - let vaultAndLock = this.vaultMap.get(vaultId); + let lock: RWLock; + const vaultIdString = vaultId.toString() as VaultIdString; + let vaultAndLock = this.vaultMap.get(vaultIdString); if (vaultAndLock != null) { ({ vault, lock } = vaultAndLock); // Lock and vault exist @@ -853,62 +816,57 @@ class VaultManager { // Only lock exists let release; try { - release = await lock.acquire(); - ({ vault, lock } = vaultAndLock); + release = await lock.acquireWrite(); + ({ vault } = vaultAndLock); if (vault != null) { return vault; } - const vaultMeta = await this.db.get( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - ); - let remote; - if (vaultMeta) { - if (vaultMeta.remoteVault || vaultMeta.remoteNode) { - remote = true; - } + // Only create if the vault state already exists + if ((await this.getVaultMeta(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault ${vaultsUtils.encodeVaultId(vaultId)} doesn't exist`, + ); } - vault = await VaultInternal.create({ + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), - remote, + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, }); vaultAndLock.vault = vault; - this.vaultMap.set(vaultId, vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); return vault; } finally { release(); } } else { // Neither vault nor lock exists - lock = new Mutex(); + lock = new RWLock(); vaultAndLock = { lock }; - this.vaultMap.set(vaultId, vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); let release; try { - release = await lock.acquire(); - const vaultMeta = await this.db.get( - this.vaultsMetaDbDomain, - idUtils.toBuffer(vaultId), - ); - let remote; - if (vaultMeta) { - if (vaultMeta.remoteVault || vaultMeta.remoteNode) { - remote = true; - } + release = await lock.acquireWrite(); + // Only create if the vault state already exists + if ((await this.getVaultMeta(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault ${vaultsUtils.encodeVaultId(vaultId)} doesn't exist`, + ); } - vault = await VaultInternal.create({ + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: this.keyManager, efs: this.efs, - workingDirIndex: vaultMeta?.workingDirectoryIndex, + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, logger: this.logger.getChild(VaultInternal.name), - remote, }); vaultAndLock.vault = vault; - this.vaultMap.set(vaultId, vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); return vault; } finally { release(); @@ -919,9 +877,9 @@ class VaultManager { // THIS can also be replaced with generic withF and withG /** - * Takes a function and runs it with the listed vaults. locking is handled automatically. - * @param vaultIds List of vault ID for vaults you wish to use. - * @param f Function you wish to run with the provided vaults. + * Takes a function and runs it with the listed vaults. locking is handled automatically + * @param vaultIds List of vault ID for vaults you wish to use + * @param f Function you wish to run with the provided vaults */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async withVaults( @@ -929,10 +887,10 @@ class VaultManager { f: (...args: Vault[]) => Promise, ): Promise { // Stages: - // 1. Obtain vaults, - // 2. Call function with vaults while locking the vaults. - // 3. Catch any problems and preform clean up in finally. - // 4. return result. + // 1. Obtain vaults + // 2. Call function with vaults while locking the vaults + // 3. Catch any problems and preform clean up in finally + // 4. return result const vaults = await Promise.all( vaultIds.map(async (vaultId) => { @@ -940,15 +898,15 @@ class VaultManager { }), ); - // Obtaining locks. + // Obtaining locks const vaultLocks = vaultIds.map((vaultId) => { - return this.getLock(vaultId); + return this.getReadLock(vaultId); }); - // Running the function with locking. - return await this.withLocks(() => { + // Running the function with locking + return await withF(vaultLocks, () => { return f(...vaults); - }, vaultLocks); + }); } protected async setupKey(bits: 128 | 192 | 256): Promise { diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index 905947143..f9bef210a 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -7,6 +7,11 @@ import path from 'path'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; +// TODO: remove? +type FileOptions = { + recursive?: boolean; +}; + // TODO: tests // - add succeeded // - secret exists diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index 3bd7c17aa..a01793ff1 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -54,11 +54,13 @@ class ErrorVaultReferenceMissing extends ErrorVault { exitCode = sysexits.USAGE; } -// Yes it is immutable -// But this is because you don't own the vault right now +class ErrorVaultRemoteDefined extends ErrorVaults { + description = 'Vault is a clone of a remote vault and can not be mutated'; + exitCode = sysexits.USAGE; +} -class ErrorVaultImmutable extends ErrorVaults { - description = 'Vault cannot be mutated'; +class ErrorVaultRemoteUndefined extends ErrorVaults { + description = 'Vault has no remote set and can not be pulled'; exitCode = sysexits.USAGE; } @@ -83,6 +85,11 @@ class ErrorVaultsMergeConflict extends ErrorVaults {} class ErrorVaultsPermissionDenied extends ErrorVaults {} +class ErrorVaultsNameConflict extends ErrorVaults { + description = 'Unique name could not be created'; + exitCode = sysexits.UNAVAILABLE; +} + class ErrorSecrets extends ErrorPolykey {} class ErrorSecretsSecretUndefined extends ErrorSecrets {} @@ -102,7 +109,8 @@ export { ErrorVaultDestroyed, ErrorVaultReferenceInvalid, ErrorVaultReferenceMissing, - ErrorVaultImmutable, + ErrorVaultRemoteDefined, + ErrorVaultRemoteUndefined, ErrorVaultsVaultUndefined, ErrorVaultsVaultDefined, ErrorVaultsRecursive, @@ -111,6 +119,7 @@ export { ErrorVaultsInvalidVaultId, ErrorVaultsMergeConflict, ErrorVaultsPermissionDenied, + ErrorVaultsNameConflict, ErrorSecrets, ErrorSecretsSecretUndefined, ErrorSecretsSecretDefined, diff --git a/src/vaults/types.ts b/src/vaults/types.ts index 66e053ebf..8635f526a 100644 --- a/src/vaults/types.ts +++ b/src/vaults/types.ts @@ -21,8 +21,8 @@ const tagLast = 'last'; const refs = ['HEAD', tagLast] as const; type VaultId = Opaque<'VaultId', Id>; - type VaultIdEncoded = Opaque<'VaultIdEncoded', string>; +type VaultIdString = Opaque<'VaultIdString', string>; type VaultRef = typeof refs[number]; @@ -161,6 +161,7 @@ export { vaultActions }; export type { VaultId, VaultIdEncoded, + VaultIdString, VaultRef, VaultAction, CommitId, diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index d712691f4..5758f91e9 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -5,17 +5,13 @@ import type { VaultAction, CommitId, } from './types'; -import type { FileSystem, POJO } from '../types'; -import type { GRPCClientAgent } from '../agent'; import type { NodeId } from '../nodes/types'; +import type { EncryptedFS } from 'encryptedfs'; import path from 'path'; -import { IdInternal, IdRandom, utils as idUtils } from '@matrixai/id'; -import * as grpc from '@grpc/grpc-js'; +import { IdInternal, IdRandom } from '@matrixai/id'; import { tagLast, refs, vaultActions } from './types'; import * as nodesUtils from '../nodes/utils'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; /** * Vault history is designed for linear-history @@ -24,6 +20,7 @@ import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; * Where branches are automatically made when new timelines are created */ const canonicalBranch = 'master'; +const canonicalBranchRef = 'refs/heads/' + canonicalBranch; const vaultIdGenerator = new IdRandom(); @@ -36,13 +33,11 @@ function encodeVaultId(vaultId: VaultId): VaultIdEncoded { } function decodeVaultId(vaultIdEncoded: any): VaultId | undefined { - if (typeof vaultIdEncoded !== 'string') { - return; - } + if (typeof vaultIdEncoded !== 'string') return; const vaultId = IdInternal.fromMultibase(vaultIdEncoded); - if (vaultId == null) { - return; - } + if (vaultId == null) return; + // All VaultIds are 16 bytes long + if (vaultId.length !== 16) return; return vaultId; } @@ -67,154 +62,40 @@ function commitAuthor(nodeId: NodeId): { name: string; email: string } { }; } -// Function isVaultId(arg: any) { -// return isId(arg); -// } -// /** -// * This will return arg as a valid VaultId or throw an error if it can't be converted. -// * This will take a multibase string of the ID or the raw Buffer of the ID. -// * @param arg - The variable we wish to convert -// * @throws vaultsErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId -// * @returns VaultId -// */ -// function makeVaultId(arg: any): VaultId { -// return makeId(arg); -// } -// function isVaultIdPretty(arg: any): arg is VaultIdPretty { -// return isIdString(arg); -// } -// function makeVaultIdPretty(arg: any): VaultIdPretty { -// return makeIdString(arg); -// } - -// async function fileExists(fs: FileSystem, path: string): Promise { -// try { -// const fh = await fs.promises.open(path, 'r'); -// await fh.close(); -// } catch (err) { -// if (err.code === 'ENOENT') { -// return false; -// } -// } -// return true; -// } - -// async function* readdirRecursively(fs, dir = '.') { -// const dirents = await fs.promises.readdir(dir); -// for (const dirent of dirents) { -// const res = path.join(dir, dirent.toString()); -// const stat = await fs.promises.stat(res); -// if (stat.isDirectory()) { -// yield* readdirRecursively(fs, res); -// } else if (stat.isFile()) { -// yield res; -// } -// } -// } - -// async function request( -// client: GRPCClientAgent, -// nodeId: NodeId, -// vaultNameOrId: VaultId | VaultName, -// ) { -// const requestMessage = new vaultsPB.InfoRequest(); -// const vaultMessage = new vaultsPB.Vault(); -// const nodeMessage = new nodesPB.Node(); -// nodeMessage.setNodeId(nodeId); -// requestMessage.setAction('clone'); -// if (typeof vaultNameOrId === 'string') { -// vaultMessage.setNameOrId(vaultNameOrId); -// } else { -// // To have consistency between GET and POST, send the user -// // readable form of the vault Id -// vaultMessage.setNameOrId(makeVaultIdPretty(vaultNameOrId)); -// } -// requestMessage.setVault(vaultMessage); -// requestMessage.setNode(nodeMessage); -// const response = client.vaultsGitInfoGet(requestMessage); -// let vaultName, remoteVaultId; -// response.stream.on('metadata', async (meta) => { -// // Receive the Id of the remote vault -// vaultName = meta.get('vaultName').pop(); -// if (vaultName) vaultName = vaultName.toString(); -// const vId = meta.get('vaultId').pop(); -// if (vId) remoteVaultId = makeVaultId(vId.toString()); -// }); -// // Collet the response buffers from the GET request -// const infoResponse: Uint8Array[] = []; -// for await (const resp of response) { -// infoResponse.push(resp.getChunk_asU8()); -// } -// const metadata = new grpc.Metadata(); -// if (typeof vaultNameOrId === 'string') { -// metadata.set('vaultNameOrId', vaultNameOrId); -// } else { -// // Metadata only accepts the user readable form of the vault Id -// // as the string form has illegal characters -// metadata.set('vaultNameOrId', makeVaultIdPretty(vaultNameOrId)); -// } -// return [ -// async function ({ -// url, -// method = 'GET', -// headers = {}, -// body = [Buffer.from('')], -// }: { -// url: string; -// method: string; -// headers: POJO; -// body: Buffer[]; -// }) { -// if (method === 'GET') { -// // Send back the GET request info response -// return { -// url: url, -// method: method, -// body: infoResponse, -// headers: headers, -// statusCode: 200, -// statusMessage: 'OK', -// }; -// } else if (method === 'POST') { -// const responseBuffers: Array = []; -// const stream = client.vaultsGitPackGet(metadata); -// const chunk = new vaultsPB.PackChunk(); -// // Body is usually an async generator but in the cases we are using, -// // only the first value is used -// chunk.setChunk(body[0]); -// // Tell the server what commit we need -// await stream.write(chunk); -// let packResponse = (await stream.read()).value; -// while (packResponse != null) { -// responseBuffers.push(packResponse.getChunk_asU8()); -// packResponse = (await stream.read()).value; -// } -// return { -// url: url, -// method: method, -// body: responseBuffers, -// headers: headers, -// statusCode: 200, -// statusMessage: 'OK', -// }; -// } else { -// throw new Error('Method not supported'); -// } -// }, -// vaultName, -// remoteVaultId, -// ]; -// } +async function* readdirRecursively(fs, dir = '.') { + const dirents = await fs.promises.readdir(dir); + for (const dirent of dirents) { + const res = path.join(dir, dirent.toString()); + const stat = await fs.promises.stat(res); + if (stat.isDirectory()) { + yield* readdirRecursively(fs, res); + } else if (stat.isFile()) { + yield res; + } + } +} function isVaultAction(action: any): action is VaultAction { if (typeof action !== 'string') return false; return (vaultActions as Readonly>).includes(action); } +async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { + const bucket = ref.slice(0, 2); + const shortref = ref.slice(2); + const objectPath = path.join(gitdir, 'objects', bucket, shortref); + try { + await fs.unlink(objectPath); + } catch (e) { + if (e.code !== 'ENOENT') throw e; + } +} + export { tagLast, refs, canonicalBranch, + canonicalBranchRef, generateVaultId, encodeVaultId, decodeVaultId, @@ -222,4 +103,6 @@ export { validateCommitId, commitAuthor, isVaultAction, + readdirRecursively, + deleteObject, }; diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index 82c01757c..a75819f2f 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -7,7 +7,6 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; -import { utils as idUtils } from '@matrixai/id'; import { ACL, errors as aclErrors } from '@/acl'; import { utils as keysUtils } from '@/keys'; import { utils as vaultsUtils } from '@/vaults'; diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index e1b4d06d9..a1cb598ff 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -147,7 +147,6 @@ describe(GRPCClientAgent.name, () => { keyManager: keyManager, vaultsPath: vaultsPath, nodeConnectionManager: nodeConnectionManager, - nodeManager: nodeManager, db: db, acl: acl, gestaltGraph: gestaltGraph, diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index 84772f2c3..108486abf 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -7,8 +7,10 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { utils as nodesUtils } from '@/nodes'; import { utils as vaultsUtils } from '@/vaults'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; +import sysexits from '@/utils/sysexits'; +import { NotificationsManager } from '@/notifications'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -154,7 +156,7 @@ describe('CLI vaults', () => { command = [ 'vaults', 'rename', - 'InvalidVaultId', // Vault does not exist + 'z4iAXFwgHGeyUrdC5CiCNU4', // Vault does not exist 'RenamedVault', '-np', dataDir, @@ -165,7 +167,7 @@ describe('CLI vaults', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); // Exit code of the exception - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); const namesList: string[] = []; @@ -243,10 +245,13 @@ describe('CLI vaults', () => { vaults: {}, }); - await targetPolykeyAgent.vaultManager.shareVault( - vaultId, - polykeyAgent.keyManager.getNodeId(), + const nodeId = polykeyAgent.keyManager.getNodeId(); + await targetPolykeyAgent.gestaltGraph.setGestaltActionByNode( + nodeId, + 'scan', ); + await targetPolykeyAgent.acl.setVaultAction(vaultId, nodeId, 'clone'); + await targetPolykeyAgent.acl.setVaultAction(vaultId, nodeId, 'pull'); command = [ 'vaults', @@ -333,7 +338,7 @@ describe('CLI vaults', () => { targetNodeIdEncoded, ]; result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); command = [ @@ -347,8 +352,7 @@ describe('CLI vaults', () => { 'InvalidNodeId', ]; result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(1); - expect(result.stderr).toContain('ErrorInvalidId'); + expect(result.exitCode).toBe(sysexits.USAGE); await targetPolykeyAgent.stop(); await targetPolykeyAgent.destroy(); @@ -359,111 +363,159 @@ describe('CLI vaults', () => { }, global.defaultTimeout * 3, ); - test( - 'share and unshare vaults', - async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), + describe('commandShare', () => { + test('Should share a vault', async () => { + const mockedSendNotification = jest.spyOn( + NotificationsManager.prototype, + 'sendNotification', ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, - }); - const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret 1', 'secret'); + try { + // We don't want to actually send a notification + mockedSendNotification.mockImplementation(async (_) => {}); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId), + chain: {}, }); - }); - + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.vaults[vaultId], + ).toBeUndefined(); + + command = [ + 'vaults', + 'share', + '-np', + dataDir, + vaultIdEncoded, + targetNodeIdEncoded, + ]; + const result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); + + // Check permission + const permissions1 = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId]; + expect(permissions1).toBeDefined(); + expect(permissions1.pull).toBeDefined(); + expect(permissions1.clone).toBeDefined(); + } finally { + mockedSendNotification.mockRestore(); + } + }); + }); + describe('commandUnshare', () => { + test('Should unshare a vault', async () => { + const vaultId1 = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + vaultName + '1', + ); + const vaultIdEncoded1 = vaultsUtils.encodeVaultId(vaultId1); + const vaultIdEncoded2 = vaultsUtils.encodeVaultId(vaultId2); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); await polykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(targetPolykeyAgent.keyManager.getNodeId()), + id: nodesUtils.encodeNodeId(targetNodeId), chain: {}, }); - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeManager.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - - await targetPolykeyAgent.nodeManager.setNode( - polykeyAgent.keyManager.getNodeId(), - { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - }, - ); - await targetPolykeyAgent.acl.setNodePerm( - polykeyAgent.keyManager.getNodeId(), - { - gestalt: { - notify: null, - }, - vaults: {}, - }, + // Creating permissions + await polykeyAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId, + 'scan', ); - - await expect(() => - targetPolykeyAgent.vaultManager.cloneVault( - polykeyAgent.keyManager.getNodeId(), - vaultId, - ), - ).rejects.toThrow(); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'pull'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'pull'); command = [ 'vaults', - 'share', + 'unshare', '-np', dataDir, - vaultName, - nodesUtils.encodeNodeId(targetNodeId), + vaultIdEncoded1, + targetNodeIdEncoded, ]; - let result = await testBinUtils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const clonedVaultId = await targetPolykeyAgent.vaultManager.cloneVault( - polykeyAgent.keyManager.getNodeId(), - vaultId, - ); - await targetPolykeyAgent.vaultManager.withVaults( - [clonedVaultId], - async (clonedVault) => { - const file = await clonedVault.readF(async (efs) => { - return await efs.readFile('secret 1', { encoding: 'utf8' }); - }); - expect(file).toBe('secret'); - }, - ); + // Check permission + const permissions = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId1]; + expect(permissions).toBeDefined(); + expect(permissions.pull).toBeUndefined(); + expect(permissions.clone).toBeUndefined(); + + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.gestalt['scan'], + ).toBeDefined(); command = [ 'vaults', 'unshare', '-np', dataDir, - vaultsUtils.encodeVaultId(vaultId), - nodesUtils.encodeNodeId(targetNodeId), + vaultIdEncoded2, + targetNodeIdEncoded, ]; + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result2.exitCode).toBe(0); - result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(0); + // Check permission + const permissions2 = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId2]; + expect(permissions2).toBeDefined(); + expect(permissions2.pull).toBeUndefined(); + expect(permissions2.clone).toBeUndefined(); + + // And the scan permission should be removed + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.gestalt['scan'], + ).toBeUndefined(); + }); + }); + describe('commandPermissions', () => { + test('Should get a vaults permissions', async () => { + const vaultId1 = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + vaultName + '1', + ); + const vaultIdEncoded1 = vaultsUtils.encodeVaultId(vaultId1); + const vaultIdEncoded2 = vaultsUtils.encodeVaultId(vaultId2); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId), + chain: {}, + }); - await expect(() => - targetPolykeyAgent.vaultManager.cloneVault( - polykeyAgent.keyManager.getNodeId(), - vaultId, - ), - ).rejects.toThrow(); + // Creating permissions + await polykeyAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId, + 'scan', + ); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'pull'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'pull'); - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rm(dataDir2, { recursive: true }); - }, - global.defaultTimeout * 2, - ); + command = ['vaults', 'permissions', '-np', dataDir, vaultIdEncoded1]; + const result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain(targetNodeIdEncoded); + expect(result.stdout).toContain('clone'); + expect(result.stdout).toContain('pull'); + + command = ['vaults', 'permissions', '-np', dataDir, vaultIdEncoded2]; + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain(targetNodeIdEncoded); + expect(result2.stdout).not.toContain('clone'); + expect(result2.stdout).toContain('pull'); + }); + }); describe('commandVaultVersion', () => { test('should switch the version of a vault', async () => { const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); @@ -548,9 +600,9 @@ describe('CLI vaults', () => { ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); - expect(result.stderr).toContain('ErrorVaultsWriteFUndefined'); + expect(result.stderr).toContain('ErrorVaultReferenceInvalid'); }); test('should throw an error if the vault is not found', async () => { const command = [ @@ -558,12 +610,12 @@ describe('CLI vaults', () => { 'version', '-np', dataDir, - 'A' + vaultName, + 'zLnM7puKobbh4YXEz66StAq', 'NOT_A_VALID_CHECKOUT_ID', ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); }); }); @@ -663,6 +715,45 @@ describe('CLI vaults', () => { chain: {}, }); + const commands1 = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result1 = await testBinUtils.pkStdio( + commands1, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result1.exitCode).toEqual(sysexits.NOPERM); + expect(result1.stderr).toContain( + 'ErrorVaultsPermissionDenied: Permission was denied - Scanning is not allowed for', + ); + + await remoteOnline.gestaltGraph.setGestaltActionByNode( + polykeyAgent.keyManager.getNodeId(), + 'notify', + ); + + const commands2 = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result2 = await testBinUtils.pkStdio( + commands2, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result2.exitCode).toEqual(sysexits.NOPERM); + expect(result2.stderr).toContain( + 'ErrorVaultsPermissionDenied: Permission was denied - Scanning is not allowed for', + ); + await remoteOnline.gestaltGraph.setGestaltActionByNode( polykeyAgent.keyManager.getNodeId(), 'scan', @@ -677,26 +768,30 @@ describe('CLI vaults', () => { const vault3Id = await remoteOnline.vaultManager.createVault( 'Vault3' as VaultName, ); - const commands = [ + const nodeId = polykeyAgent.keyManager.getNodeId(); + await remoteOnline.acl.setVaultAction(vault1Id, nodeId, 'clone'); + await remoteOnline.acl.setVaultAction(vault2Id, nodeId, 'pull'); + await remoteOnline.acl.setVaultAction(vault2Id, nodeId, 'clone'); + const commands3 = [ 'vaults', 'scan', remoteOnlineNodeIdEncoded, '-np', dataDir, ]; - const result = await testBinUtils.pkStdio( - commands, + const result3 = await testBinUtils.pkStdio( + commands3, { PK_PASSWORD: 'password' }, dataDir, ); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain( - `Vault1\t\t${vaultsUtils.encodeVaultId(vault1Id)}`, + expect(result3.exitCode).toBe(0); + expect(result3.stdout).toContain( + `Vault1\t\t${vaultsUtils.encodeVaultId(vault1Id)}\t\tclone`, ); - expect(result.stdout).toContain( - `Vault2\t\t${vaultsUtils.encodeVaultId(vault2Id)}`, + expect(result3.stdout).toContain( + `Vault2\t\t${vaultsUtils.encodeVaultId(vault2Id)}\t\tpull,clone`, ); - expect(result.stdout).toContain( + expect(result3.stdout).not.toContain( `Vault3\t\t${vaultsUtils.encodeVaultId(vault3Id)}`, ); } finally { @@ -707,120 +802,4 @@ describe('CLI vaults', () => { global.defaultTimeout * 2, ); }); - describe('commandPermissions', () => { - test('Should return nodeIds and their permissions', async () => { - let remoteKeynode1: PolykeyAgent | undefined; - let remoteKeynode2: PolykeyAgent | undefined; - try { - // A ridiculous amount of setup. - const vaultId1 = await polykeyAgent.vaultManager.createVault( - 'vault1' as VaultName, - ); - const vaultId2 = await polykeyAgent.vaultManager.createVault( - 'vault2' as VaultName, - ); - - remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 1'), - nodePath: path.join(dataDir, 'remoteKeynode1'), - }); - remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 2'), - nodePath: path.join(dataDir, 'remoteKeynode2'), - }); - - const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); - const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); - await polykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(targetNodeId1), - chain: {}, - }); - await polykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(targetNodeId2), - chain: {}, - }); - await polykeyAgent.nodeManager.setNode(targetNodeId1, { - host: remoteKeynode1.revProxy.getIngressHost(), - port: remoteKeynode1.revProxy.getIngressPort(), - }); - await polykeyAgent.nodeManager.setNode(targetNodeId2, { - host: remoteKeynode2.revProxy.getIngressHost(), - port: remoteKeynode2.revProxy.getIngressPort(), - }); - - await remoteKeynode1.nodeManager.setNode( - polykeyAgent.keyManager.getNodeId(), - { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - }, - ); - await remoteKeynode2.nodeManager.setNode( - polykeyAgent.keyManager.getNodeId(), - { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - }, - ); - await remoteKeynode1.acl.setNodePerm( - polykeyAgent.keyManager.getNodeId(), - { - gestalt: { - notify: null, - }, - vaults: {}, - }, - ); - await remoteKeynode2.acl.setNodePerm( - polykeyAgent.keyManager.getNodeId(), - { - gestalt: { - notify: null, - }, - vaults: {}, - }, - ); - - await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId1); - await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId2); - await polykeyAgent.vaultManager.shareVault(vaultId2, targetNodeId1); - - const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); - - // Now we call and test the command - const command1 = ['vaults', 'permissions', 'vault1', '-np', dataDir]; - const result1 = await testBinUtils.pkStdio( - command1, - { PK_PASSWORD: 'password' }, - dataDir, - ); - expect(result1.exitCode).toBe(0); - expect(result1.stdout).toContain(remoteKeynode1.keyManager.getNodeId()); - expect(result1.stdout).toContain(remoteKeynode2.keyManager.getNodeId()); - expect(result1.stdout).toContain('pull'); - expect(result1.stdout).toContain('clone'); - - // And the other vault - const command2 = ['vaults', 'permissions', 'vault2', '-np', dataDir]; - const result2 = await testBinUtils.pkStdio( - command2, - { PK_PASSWORD: 'password' }, - dataDir, - ); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain(targetNodeId1); - expect(result2.stdout).not.toContain(targetNodeId2); - expect(result2.stdout).toContain('pull'); - expect(result2.stdout).toContain('clone'); - } finally { - await remoteKeynode1?.stop(); - await remoteKeynode1?.destroy(); - await remoteKeynode2?.stop(); - await remoteKeynode2?.destroy(); - } - }); - }); }); diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 6b9829036..16dee5e65 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -254,7 +254,6 @@ describe(`${NodeConnection.name} test`, () => { keyManager: serverKeyManager, vaultsPath: serverVaultsPath, nodeConnectionManager: dummyNodeConnectionManager, - nodeManager: serverNodeManager, notificationsManager: serverNotificationsManager, db: serverDb, acl: serverACL, diff --git a/tests/notifications/utils.test.ts b/tests/notifications/utils.test.ts index 4f4d18b0b..8f85d4642 100644 --- a/tests/notifications/utils.test.ts +++ b/tests/notifications/utils.test.ts @@ -2,8 +2,7 @@ import type { Notification, NotificationData } from '@/notifications/types'; import type { VaultActions, VaultName } from '@/vaults/types'; import { createPublicKey } from 'crypto'; import { EmbeddedJWK, jwtVerify, exportJWK } from 'jose'; -import { IdInternal } from '@matrixai/id'; -import { sleep } from '@/utils'; + import * as keysUtils from '@/keys/utils'; import * as notificationsUtils from '@/notifications/utils'; import * as notificationsErrors from '@/notifications/errors'; diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index cb721f348..aa3ce2bb8 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,16 +1,19 @@ import type { VaultId } from '@/vaults/types'; import type { Vault } from '@/vaults/Vault'; import type { KeyManager } from '@/keys'; +import type { DBDomain, DBLevel } from '@matrixai/db'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { EncryptedFS } from 'encryptedfs'; +import { DB } from '@matrixai/db'; import { VaultInternal } from '@/vaults'; import { generateVaultId } from '@/vaults/utils'; import * as vaultsErrors from '@/vaults/errors'; import { sleep } from '@/utils'; import { utils as keysUtils } from '@/keys'; +import * as vaultsUtils from '@/vaults/utils'; import * as testsUtils from '../utils'; jest.mock('@/keys/utils', () => ({ @@ -20,14 +23,19 @@ jest.mock('@/keys/utils', () => ({ })); describe('VaultInternal', () => { + const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); + let dataDir: string; - let dbPath: string; + let efsDbPath: string; let vault: VaultInternal; let dbKey: Buffer; let vaultId: VaultId; let efs: EncryptedFS; - const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); + + let db: DB; + let vaultsDb: DBLevel; + let vaultsDbDomain: DBDomain; const fakeKeyManager = { getNodeId: () => { @@ -37,33 +45,54 @@ describe('VaultInternal', () => { const secret1 = { name: 'secret-1', content: 'secret-content-1' }; const secret2 = { name: 'secret-2', content: 'secret-content-2' }; - beforeAll(async () => { + beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); dbKey = await keysUtils.generateKey(); - dbPath = path.join(dataDir, 'db'); - await fs.promises.mkdir(dbPath); + efsDbPath = path.join(dataDir, 'efsDb'); + await fs.promises.mkdir(efsDbPath); efs = await EncryptedFS.createEncryptedFS({ - dbPath, + dbPath: efsDbPath, dbKey, logger, }); await efs.start(); - }); - beforeEach(async () => { + db = await DB.createDB({ + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, + dbPath: path.join(dataDir, 'db'), + fs: fs, + logger: logger, + }); + vaultsDbDomain = ['vaults']; + vaultsDb = await db.level(vaultsDbDomain[0]); + vaultId = generateVaultId(); - vault = await VaultInternal.create({ + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: fakeKeyManager, efs, logger, fresh: true, + db, + vaultsDb, + vaultsDbDomain, + vaultName: 'testVault', }); }); - afterAll(async () => { + afterEach(async () => { + await vault.stop(); + await vault.destroy(); + await db.stop(); + await db.destroy(); await efs.stop(); await efs.destroy(); await fs.promises.rm(dataDir, { @@ -73,9 +102,13 @@ describe('VaultInternal', () => { }); test('VaultInternal readiness', async () => { - await vault.destroy(); + await vault.stop(); await expect(async () => { await vault.log(); + }).rejects.toThrow(vaultsErrors.ErrorVaultNotRunning); + await vault.destroy(); + await expect(async () => { + await vault.start(); }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); }); test('is type correct', async () => { @@ -99,13 +132,17 @@ describe('VaultInternal', () => { await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); - await vault.destroy(); - vault = await VaultInternal.create({ + await vault.stop(); + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: fakeKeyManager, efs, logger, fresh: false, + db, + vaultName: 'testVault2', + vaultsDb, + vaultsDbDomain, }); await vault.readF(async (efs) => { expect((await efs.readFile('secret-1')).toString()).toStrictEqual( @@ -155,7 +192,7 @@ describe('VaultInternal', () => { }); test('does not allow changing to an unrecognised commit', async () => { await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( - vaultsErrors.ErrorVaultReferenceMissing, + vaultsErrors.ErrorVaultReferenceInvalid, ); await vault.writeF(async (efs) => { await efs.writeFile('test1', 'testdata1'); @@ -256,22 +293,6 @@ describe('VaultInternal', () => { const log = await vault.log(); expect(log.length).toEqual(4); }); - test('write locks read', async () => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - - await Promise.all([ - vault.writeF(async (efs) => { - await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); - }), - vault.readF(async (efs) => { - expect((await efs.readFile('secret-1')).toString()).toEqual( - 'SUPER-DUPER-SECRET-CONTENT', - ); - }), - ]); - }); test('commit added if mutation in write', async () => { const commit = (await vault.log())[0].commitId; await vault.writeF(async (efs) => { @@ -371,65 +392,6 @@ describe('VaultInternal', () => { // Has a new commit. expect(await vault.log()).toHaveLength(2); }); - test('locking occurs when making a commit.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; - }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.writeF(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); - }); - - // Now that we are holding the lock hostage, - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - // We want to check if any action resolves before the lock is released. - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.writeF(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - secondCommitResolved = true; - await sleep(2); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - // Commit order should be commit2 -> commit1 -> init - const log = await vault.log(); - expect(log[0].message).toContain(secret2.name); - expect(log[1].message).toContain(secret1.name); - }); test('read operation allowed', async () => { await vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); @@ -476,6 +438,86 @@ describe('VaultInternal', () => { }), ]); }); + test('no commit after read', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + const commit = (await vault.log())[0].commitId; + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('only exposes limited commands of VaultInternal', async () => { + // Converting a vault to the interface + const vaultInterface = vault as Vault; + + // Using the avaliable functions. + await vaultInterface.writeF(async (efs) => { + await efs.writeFile('test', 'testContent'); + }); + + await vaultInterface.readF(async (efs) => { + const content = (await efs.readFile('test')).toString(); + expect(content).toStrictEqual('testContent'); + }); + + expect(vaultInterface.vaultDataDir).toBeTruthy(); + expect(vaultInterface.vaultGitDir).toBeTruthy(); + expect(vaultInterface.vaultId).toBeTruthy(); + expect(vaultInterface.writeF).toBeTruthy(); + expect(vaultInterface.writeG).toBeTruthy(); + expect(vaultInterface.readF).toBeTruthy(); + expect(vaultInterface.readG).toBeTruthy(); + expect(vaultInterface.log).toBeTruthy(); + expect(vaultInterface.version).toBeTruthy(); + + // Can we convert back? + const vaultNormal = vaultInterface as VaultInternal; + expect(vaultNormal.destroy).toBeTruthy(); // This exists again. + }); + test('cannot commit when the remote field is set', async () => { + // Write remote metadata + await db.put( + [...vaultsDbDomain, vaultsUtils.encodeVaultId(vaultId)], + VaultInternal.remoteKey, + { remoteNode: '', remoteVault: '' }, + ); + const commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await expect( + vault.writeF(async (efs) => { + await efs.writeFile('test', 'testdata'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + }); + // Old locking tests + // TODO: review and remove? + test('write locks read', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + + await Promise.all([ + vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); + }), + vault.readF(async (efs) => { + expect((await efs.readFile('secret-1')).toString()).toEqual( + 'SUPER-DUPER-SECRET-CONTENT', + ); + }), + ]); + }); test('read locks write', async () => { await vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); @@ -497,20 +539,64 @@ describe('VaultInternal', () => { }), ]); }); - test('no commit after read', async () => { - await vault.writeF(async (efs) => { + test('locking occurs when making a commit.', async () => { + // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. + + let resolveDelay; + const delayPromise = new Promise((resolve, _reject) => { + resolveDelay = resolve; + }); + let firstCommitResolved = false; + let firstCommitResolveTime; + + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); + + const commit1 = vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); + await delayPromise; // Hold the lock hostage. + firstCommitResolved = true; + firstCommitResolveTime = Date.now(); }); - const commit = (await vault.log())[0].commitId; - await vault.readF(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); + + // Now that we are holding the lock hostage, + // @ts-ignore + expect(vault.lock.isLocked()).toBeTruthy(); + // We want to check if any action resolves before the lock is released. + + let secondCommitResolved = false; + let secondCommitResolveTime; + const commit2 = vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + secondCommitResolved = true; + await sleep(2); + secondCommitResolveTime = Date.now(); }); + + // Give plenty of time for a commit to resolve. + await sleep(200); + + // Now we want to check for the expected conditions. + // 1. Both commist have not completed. + // commit 1 is holding the lock. + expect(firstCommitResolved).toBeFalsy(); + expect(secondCommitResolved).toBeFalsy(); + + // 2. We release the hostage so both should resolve. + await sleep(200); + resolveDelay(); + await commit1; + await commit2; + expect(firstCommitResolved).toBeTruthy(); + expect(secondCommitResolved).toBeTruthy(); + expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); + // @ts-ignore + expect(vault.lock.isLocked()).toBeFalsy(); + + // Commit order should be commit2 -> commit1 -> init const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].commitId).toStrictEqual(commit); + expect(log[0].message).toContain(secret2.name); + expect(log[1].message).toContain(secret1.name); }); test('locking occurs when making an access.', async () => { await vault.writeF(async (efs) => { @@ -569,54 +655,223 @@ describe('VaultInternal', () => { // @ts-ignore expect(vault.lock.isLocked()).toBeFalsy(); }); - test('only exposes limited commands of VaultInternal', async () => { - // Converting a vault to the interface - const vaultInterface = vault as Vault; - - // Using the avaliable functions. - await vaultInterface.writeF(async (efs) => { - await efs.writeFile('test', 'testContent'); - }); - - await vaultInterface.readF(async (efs) => { - const content = (await efs.readFile('test')).toString(); - expect(content).toStrictEqual('testContent'); - }); - - expect(vaultInterface.vaultDataDir).toBeTruthy(); - expect(vaultInterface.vaultGitDir).toBeTruthy(); - expect(vaultInterface.vaultId).toBeTruthy(); - expect(vaultInterface.writeF).toBeTruthy(); - expect(vaultInterface.writeG).toBeTruthy(); - expect(vaultInterface.readF).toBeTruthy(); - expect(vaultInterface.readG).toBeTruthy(); - expect(vaultInterface.log).toBeTruthy(); - expect(vaultInterface.version).toBeTruthy(); + // Locking tests + const waitDelay = 200; + const runGen = async (gen) => { + for await (const _ of gen) { + // Do nothing + } + }; + test('writeF respects read and write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeP = vault.writeF(async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await writeP; + expect(finished).toBe(true); + + const releaseRead = await lock.acquireRead(); + finished = false; + const writeP2 = vault.writeF(async () => { + finished = true; + }); + await sleep(waitDelay); + releaseRead(); + await writeP2; + expect(finished).toBe(true); + }); + test('writeG respects read and write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeGen = vault.writeG(async function* () { + yield; + finished = true; + yield; + }); + const runP = runGen(writeGen); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await runP; + expect(finished).toBe(true); + + const releaseRead = await lock.acquireRead(); + finished = false; + const writeGen2 = vault.writeG(async function* () { + yield; + finished = true; + yield; + }); + const runP2 = runGen(writeGen2); + await sleep(waitDelay); + releaseRead(); + await runP2; + expect(finished).toBe(true); + }); + test('readF respects write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeP = vault.readF(async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await writeP; + expect(finished).toBe(true); + }); + test('readG respects write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + let finished = false; + const writeGen = vault.readG(async function* () { + yield; + finished = true; + yield; + }); + const runP = runGen(writeGen); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await runP; + expect(finished).toBe(true); + }); + test('readF allows concurrent reads', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseRead = await lock.acquireRead(); + const finished: boolean[] = []; + const doThing = async () => { + finished.push(true); + }; + await Promise.all([ + vault.readF(doThing), + vault.readF(doThing), + vault.readF(doThing), + vault.readF(doThing), + ]); + expect(finished.length).toBe(4); + releaseRead(); + }); + test('readG allows concurrent reads', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseRead = await lock.acquireRead(); + const finished: boolean[] = []; + const doThing = async function* () { + yield; + finished.push(true); + yield; + }; + await Promise.all([ + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + ]); + expect(finished.length).toBe(4); + releaseRead(); + }); + test.todo('pullVault respects write locking'); + // Life- cycle + test('can create with CreateVaultInternal', async () => { + let vault1: VaultInternal | undefined; + try { + const vaultId1 = vaultsUtils.generateVaultId(); + vault1 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + // Data exists for vault now. + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + } finally { + await vault1?.stop(); + await vault1?.destroy(); + } + }); + test('can create an existing vault with CreateVaultInternal', async () => { + let vault1: VaultInternal | undefined; + let vault2: VaultInternal | undefined; + try { + const vaultId1 = vaultsUtils.generateVaultId(); + vault1 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + // Data exists for vault now. + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + await vault1.stop(); + // Data persists + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); - // Can we convert back? - const vaultNormal = vaultInterface as VaultInternal; - expect(vaultNormal.destroy).toBeTruthy(); // This exists again. + // Re-opening the vault + vault2 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + + // Data still exists and no new data was created + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + expect(await efs.readdir('.')).toHaveLength(2); + } finally { + await vault1?.stop(); + await vault1?.destroy(); + await vault2?.stop(); + await vault2?.destroy(); + } }); - test('cannot commit when the remote field is set', async () => { + test.todo('can create with CloneVaultInternal'); + test('stop is idempotent', async () => { + // Should complete with no errors + await vault.stop(); + await vault.stop(); + }); + test('destroy is idempotent', async () => { + await vault.stop(); + await vault.destroy(); await vault.destroy(); - vault = await VaultInternal.create({ - vaultId, - keyManager: fakeKeyManager, - efs, - logger, - remote: true, - fresh: true, - }); - const commit = (await vault.log(undefined, 1))[0]; - await vault.version(commit.commitId); - const files = await vault.readF(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - await expect( - vault.writeF(async (efs) => { - await efs.writeFile('test', 'testdata'); - }), - ).rejects.toThrow(vaultsErrors.ErrorVaultImmutable); }); }); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index ca0b5f89f..b75a1ed3d 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -1,29 +1,43 @@ import type { NodeId, NodeIdEncoded } from '@/nodes/types'; -import type { VaultId, VaultName } from '@/vaults/types'; -import type { GestaltGraph } from '@/gestalts'; -import type { ACL } from '@/acl'; -import type { NotificationsManager } from '@/notifications'; -import type { VaultInternal } from '@/vaults'; -import type { KeyManager } from '@/keys'; -import type { NodeConnectionManager, NodeManager } from '@/nodes'; -import type { NodeAddress } from '@/nodes/types'; +import type { + VaultAction, + VaultId, + VaultIdString, + VaultName, +} from '@/vaults/types'; +import type NotificationsManager from '@/notifications/NotificationsManager'; +import type ReverseProxy from '@/network/ReverseProxy'; +import type { Host, Port, TLSConfig } from '@/network/types'; import fs from 'fs'; import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { IdInternal, utils as idUtils } from '@matrixai/id'; +import { IdInternal } from '@matrixai/id'; import { DB } from '@matrixai/db'; -import { utils as keysUtils } from '@/keys'; -import { PolykeyAgent } from '@'; -import { VaultManager, vaultOps } from '@/vaults'; -import { errors as vaultErrors } from '@/vaults'; -import { utils as nodesUtils } from '@/nodes'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import { destroyed } from '@matrixai/async-init'; +import { running } from '@matrixai/async-init/dist/utils'; +import git from 'isomorphic-git'; +import ACL from '@/acl/ACL'; +import GestaltGraph from '@/gestalts/GestaltGraph'; +import NodeConnectionManager from '@/nodes/NodeConnectionManager'; +import KeyManager from '@/keys/KeyManager'; +import PolykeyAgent from '@/PolykeyAgent'; +import VaultManager from '@/vaults/VaultManager'; +import * as vaultsErrors from '@/vaults/errors'; +import NodeGraph from '@/nodes/NodeGraph'; +import * as nodesUtils from '@/nodes/utils'; +import ForwardProxy from '@/network/ForwardProxy'; +import * as vaultsUtils from '@/vaults/utils'; +import * as keysUtils from '@/keys/utils'; +import { sleep } from '@/utils'; +import { VaultInternal } from '@/vaults'; +import * as testsUtils from '../utils'; + +const mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockImplementation((bits, _) => { + return keysUtils.generateKeyPair(bits); + }); describe('VaultManager', () => { const logger = new Logger('VaultManager Test', LogLevel.WARN, [ @@ -31,13 +45,8 @@ describe('VaultManager', () => { ]); const nonExistentVaultId = IdInternal.fromString('DoesNotExistxxxx'); const password = 'password'; - let gestaltGraph: GestaltGraph; - let vaultManager: VaultManager; - let keyManager: KeyManager; let remoteVaultId: VaultId; - let localKeynodeId: NodeId; - let localKeynodeIdEncoded: NodeIdEncoded; let remoteKeynode1Id: NodeId; let remoteKeynode1IdEncoded: NodeIdEncoded; let remoteKeynode2Id: NodeId; @@ -49,151 +58,43 @@ describe('VaultManager', () => { const secondVaultName = 'SecondTestVault' as VaultName; const thirdVaultName = 'ThirdTestVault' as VaultName; - let localKeynode: PolykeyAgent; - let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; + let dataDir: string; + let vaultsPath: string; + let db: DB; - let allDataDir: string; + // We only ever use this to get NodeId, No need to create a whole one + const nodeId = testsUtils.generateRandomNodeId(); + const dummyKeyManager = { + getNodeId: () => nodeId, + } as KeyManager; - beforeAll(async () => { - // Creating agents. - allDataDir = await fs.promises.mkdtemp( + beforeEach(async () => { + mockedGenerateDeterministicKeyPair.mockImplementation((bits, _) => { + return keysUtils.generateKeyPair(bits); + }); + dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - localKeynode = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Local Keynode'), - nodePath: path.join(allDataDir, 'localKeynode'), - }); - gestaltGraph = localKeynode.gestaltGraph; - vaultManager = localKeynode.vaultManager; - keyManager = localKeynode.keyManager; - localKeynodeId = localKeynode.keyManager.getNodeId(); - localKeynodeIdEncoded = nodesUtils.encodeNodeId(localKeynodeId); - - remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 1'), - nodePath: path.join(allDataDir, 'remoteKeynode1'), - }); - remoteKeynode1Id = remoteKeynode1.keyManager.getNodeId(); - remoteKeynode1IdEncoded = nodesUtils.encodeNodeId(remoteKeynode1Id); - remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 2'), - nodePath: path.join(allDataDir, 'remoteKeynode2'), - }); - remoteKeynode2Id = remoteKeynode2.keyManager.getNodeId(); - remoteKeynode2IdEncoded = nodesUtils.encodeNodeId(remoteKeynode2Id); - - // Adding details to each agent. - await localKeynode.nodeManager.setNode(remoteKeynode1Id, { - host: remoteKeynode1.revProxy.getIngressHost(), - port: remoteKeynode1.revProxy.getIngressPort(), - }); - await localKeynode.nodeManager.setNode(remoteKeynode2Id, { - host: remoteKeynode2.revProxy.getIngressHost(), - port: remoteKeynode2.revProxy.getIngressPort(), - }); - await remoteKeynode1.nodeManager.setNode(localKeynodeId, { - host: localKeynode.revProxy.getIngressHost(), - port: localKeynode.revProxy.getIngressPort(), - }); - await remoteKeynode1.nodeManager.setNode(remoteKeynode2Id, { - host: remoteKeynode2.revProxy.getIngressHost(), - port: remoteKeynode2.revProxy.getIngressPort(), - }); - await remoteKeynode2.nodeManager.setNode(localKeynodeId, { - host: localKeynode.revProxy.getIngressHost(), - port: localKeynode.revProxy.getIngressPort(), - }); - await remoteKeynode2.nodeManager.setNode(remoteKeynode1Id, { - host: remoteKeynode1.revProxy.getIngressHost(), - port: remoteKeynode1.revProxy.getIngressPort(), - }); - - await gestaltGraph.setNode({ - id: remoteKeynode1IdEncoded, - chain: {}, - }); - await gestaltGraph.setNode({ - id: remoteKeynode2IdEncoded, - chain: {}, - }); - await remoteKeynode1.gestaltGraph.setNode({ - id: localKeynodeIdEncoded, - chain: {}, - }); - await remoteKeynode1.gestaltGraph.setNode({ - id: remoteKeynode2IdEncoded, - chain: {}, - }); - await remoteKeynode2.gestaltGraph.setNode({ - id: localKeynodeIdEncoded, - chain: {}, - }); - await remoteKeynode2.gestaltGraph.setNode({ - id: remoteKeynode1IdEncoded, - chain: {}, - }); - - remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); - await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); - await remoteKeynode1.vaultManager.shareVault( - remoteVaultId, - remoteKeynode2Id, - ); - - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - for (const secret of secretNames.slice(0, 2)) { - await vaultOps.addSecret(remoteVault, secret, 'success?'); - } - }, - ); + vaultsPath = path.join(dataDir, 'VAULTS'); + db = await DB.createDB({ + dbPath: path.join(dataDir, 'DB'), + logger: logger.getChild(DB.name), + }); }); afterEach(async () => { - for (const [, vaultId] of await vaultManager.listVaults()) { - await vaultManager.destroyVault(vaultId); - } - for (const [, vaultId] of await remoteKeynode2.vaultManager.listVaults()) { - await remoteKeynode2.vaultManager.destroyVault(vaultId); - } - }); - - afterAll(async () => { - await remoteKeynode2.stop(); - await remoteKeynode2.destroy(); - await remoteKeynode1.stop(); - await remoteKeynode1.destroy(); - await localKeynode.stop(); - await localKeynode.destroy(); - await fs.promises.rm(allDataDir, { - recursive: true, + await db.stop(); + await db.destroy(); + await fs.promises.rm(dataDir, { force: true, + recursive: true, }); }); test('VaultManager readiness', async () => { - const dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const db = await DB.createDB({ - dbPath: path.join(dataDir, 'DB'), - crypto: { - key: await keysUtils.generateKey(), - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, - logger: logger.getChild(DB.name), - }); - const vaultManagerReadiness = await VaultManager.createVaultManager({ - vaultsPath: path.join(dataDir, 'VAULTS'), - keyManager: {} as KeyManager, - nodeManager: {} as NodeManager, + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, gestaltGraph: {} as GestaltGraph, nodeConnectionManager: {} as NodeConnectionManager, acl: {} as ACL, @@ -201,588 +102,1817 @@ describe('VaultManager', () => { db, logger: logger.getChild(VaultManager.name), }); - - await expect(vaultManagerReadiness.destroy()).rejects.toThrow( - vaultErrors.ErrorVaultManagerRunning, - ); - // Should be a noop - await vaultManagerReadiness.start(); - await vaultManagerReadiness.stop(); - await vaultManagerReadiness.destroy(); - await expect(vaultManagerReadiness.start()).rejects.toThrow( - vaultErrors.ErrorVaultManagerDestroyed, - ); - await expect(async () => { - await vaultManagerReadiness.listVaults(); - }).rejects.toThrow(vaultErrors.ErrorVaultManagerNotRunning); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); + try { + await expect(vaultManager.destroy()).rejects.toThrow( + vaultsErrors.ErrorVaultManagerRunning, + ); + // Should be a noop + await vaultManager.start(); + await vaultManager.stop(); + await vaultManager.destroy(); + await expect(vaultManager.start()).rejects.toThrow( + vaultsErrors.ErrorVaultManagerDestroyed, + ); + await expect(async () => { + await vaultManager.listVaults(); + }).rejects.toThrow(vaultsErrors.ErrorVaultManagerNotRunning); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); - test('is type correct', () => { - expect(vaultManager).toBeInstanceOf(VaultManager); + test('is type correct', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + expect(vaultManager).toBeInstanceOf(VaultManager); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can create many vaults and open a vault', async () => { - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - 'Vault11', - 'Vault12', - 'Vault13', - 'Vault14', - 'Vault15', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault4', + 'Vault5', + 'Vault6', + 'Vault7', + 'Vault8', + 'Vault9', + 'Vault10', + 'Vault11', + 'Vault12', + 'Vault13', + 'Vault14', + 'Vault15', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + expect((await vaultManager.listVaults()).size).toEqual(vaultNames.length); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); } - expect((await vaultManager.listVaults()).size).toEqual(vaultNames.length); }); test('can rename a vault', async () => { - const vaultId = await vaultManager.createVault(vaultName); - await vaultManager.renameVault(vaultId, secondVaultName); - await expect(vaultManager.getVaultId(vaultName)).resolves.toBeUndefined(); - await expect( - vaultManager.getVaultId(secondVaultName), - ).resolves.toStrictEqual(vaultId); - await expect(() => - vaultManager.renameVault(nonExistentVaultId, 'DNE' as VaultName), - ).rejects.toThrow(vaultErrors.ErrorVaultsVaultUndefined); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + // We can rename the vault here + await vaultManager.renameVault(vaultId, secondVaultName); + await expect(vaultManager.getVaultId(vaultName)).resolves.toBeUndefined(); + await expect( + vaultManager.getVaultId(secondVaultName), + ).resolves.toStrictEqual(vaultId); + // Can't rename an non existing vault + await expect(() => + vaultManager.renameVault(nonExistentVaultId, 'DNE' as VaultName), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + await vaultManager.createVault(thirdVaultName); + // Can't rename vault to a name that exists + await expect( + vaultManager.renameVault(vaultId, thirdVaultName), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultDefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can delete a vault', async () => { - const secondVaultId = await vaultManager.createVault(secondVaultName); - await vaultManager.destroyVault(secondVaultId); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + expect((await vaultManager.listVaults()).size).toBe(0); + const secondVaultId = await vaultManager.createVault(secondVaultName); + // @ts-ignore: protected method + const vault = await vaultManager.getVault(secondVaultId); + await vaultManager.destroyVault(secondVaultId); + // The mapping should be gone. + expect((await vaultManager.listVaults()).size).toBe(0); + // The vault should be destroyed + expect(vault[destroyed]).toBe(true); + // Metadata should be gone + expect(await vaultManager.getVaultMeta(secondVaultId)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can list vaults', async () => { - const firstVaultId = await vaultManager.createVault(vaultName); - const secondVaultId = await vaultManager.createVault(secondVaultName); - const vaultNames: Array = []; - const vaultIds: Array = []; - const vaultList = await vaultManager.listVaults(); - vaultList.forEach((vaultId, vaultName) => { - vaultNames.push(vaultName); - vaultIds.push(vaultId.toString()); - }); - expect(vaultNames.sort()).toEqual([vaultName, secondVaultName].sort()); - expect(vaultIds.sort()).toEqual( - [firstVaultId.toString(), secondVaultId.toString()].sort(), - ); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const firstVaultId = await vaultManager.createVault(vaultName); + const secondVaultId = await vaultManager.createVault(secondVaultName); + const vaultNames: Array = []; + const vaultIds: Array = []; + const vaultList = await vaultManager.listVaults(); + vaultList.forEach((vaultId, vaultName) => { + vaultNames.push(vaultName); + vaultIds.push(vaultId.toString()); + }); + expect(vaultNames.sort()).toEqual([vaultName, secondVaultName].sort()); + expect(vaultIds.sort()).toEqual( + [firstVaultId.toString(), secondVaultId.toString()].sort(), + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('able to read and load existing metadata', async () => { - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault4', + 'Vault5', + 'Vault6', + 'Vault7', + 'Vault8', + 'Vault9', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + const vaults = await vaultManager.listVaults(); + const vaultId = vaults.get('Vault1' as VaultName) as VaultId; + expect(vaultId).not.toBeUndefined(); + await vaultManager.stop(); + await vaultManager.start(); + const restartedVaultNames: Array = []; + const vaultList = await vaultManager.listVaults(); + vaultList.forEach((_, vaultName) => { + restartedVaultNames.push(vaultName); + }); + expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); } - const vaults = await vaultManager.listVaults(); - const vaultId = vaults.get('Vault1' as VaultName) as VaultId; - expect(vaultId).not.toBeUndefined(); - await vaultManager.stop(); - await vaultManager.start(); - const restartedVaultNames: Array = []; - const vaultList = await vaultManager.listVaults(); - vaultList.forEach((_, vaultName) => { - restartedVaultNames.push(vaultName); - }); - expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); }); test.skip('cannot concurrently create vaults with the same name', async () => { - const vaults = Promise.all([ - vaultManager.createVault(vaultName), - vaultManager.createVault(vaultName), - ]); - await expect(() => vaults).rejects.toThrow( - vaultErrors.ErrorVaultsVaultDefined, - ); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaults = Promise.all([ + vaultManager.createVault(vaultName), + vaultManager.createVault(vaultName), + ]); + await expect(() => vaults).rejects.toThrow( + vaultsErrors.ErrorVaultsVaultDefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can concurrently rename the same vault', async () => { - const vaultId = await vaultManager.createVault(vaultName); - await Promise.all([ - vaultManager.renameVault(vaultId, secondVaultName), - vaultManager.renameVault(vaultId, thirdVaultName), - ]); - const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)).name; - expect(vaultNameTest).toBe(thirdVaultName); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await Promise.all([ + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.renameVault(vaultId, thirdVaultName), + ]); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) + ?.vaultName; + expect(vaultNameTest).toBe(thirdVaultName); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can concurrently open and rename the same vault', async () => { - const vaultId = await vaultManager.createVault(vaultName); - await Promise.all([ - vaultManager.renameVault(vaultId, secondVaultName), - vaultManager.withVaults([vaultId], async (vault) => vault.vaultId), - ]); - const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)).name; - expect(vaultNameTest).toBe(secondVaultName); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await Promise.all([ + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.withVaults([vaultId], async (vault) => vault.vaultId), + ]); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) + ?.vaultName; + expect(vaultNameTest).toBe(secondVaultName); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can save the commit state of a vault', async () => { - const vaultId = await vaultManager.createVault(vaultName); - await vaultManager.withVaults([vaultId], async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('test', 'test'); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('test', 'test'); + }); }); + + await vaultManager.stop(); + await vaultManager.start(); + + const read = await vaultManager.withVaults( + [vaultId], + async (vaultLoaded) => { + return await vaultLoaded.readF(async (efs) => { + return await efs.readFile('test', { encoding: 'utf8' }); + }); + }, + ); + expect(read).toBe('test'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Do actions on a vault using `withVault`', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + try { + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vaults = [vault1, vault2]; - await vaultManager.stop(); - await vaultManager.start(); + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + expect(vault1.vaultId).toEqual(vaults[0]); + expect(vault2.vaultId).toEqual(vaults[1]); + await vault1.writeF(async (fs) => { + await fs.writeFile('test', 'test1'); + }); + await vault2.writeF(async (fs) => { + await fs.writeFile('test', 'test2'); + }); + }); - const read = await vaultManager.withVaults( - [vaultId], - async (vaultLoaded) => { - return await vaultLoaded.readF(async (efs) => { - return await efs.readFile('test', { encoding: 'utf8' }); + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + const a = await vault1.readF((fs) => { + return fs.readFile('test'); }); - }, - ); - expect(read).toBe('test'); - }); - test('able to recover metadata after complex operations', async () => { - const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; - const alteredVaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault6', - 'Vault10', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); + const b = await vault2.readF((fs) => { + return fs.readFile('test'); + }); + + expect(a.toString()).toEqual('test1'); + expect(b.toString()).toEqual('test2'); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); } - const v5 = await vaultManager.getVaultId('Vault5' as VaultName); - expect(v5).not.toBeUndefined(); - await vaultManager.destroyVault(v5!); - const v4 = await vaultManager.getVaultId('Vault4' as VaultName); - expect(v4).toBeTruthy(); - await vaultManager.renameVault(v4!, 'Vault10' as VaultName); - const v6 = await vaultManager.createVault('Vault6' as VaultName); + }); + describe('With remote agents', () => { + let allDataDir: string; + let keyManager: KeyManager; + let fwdProxy: ForwardProxy; + let nodeGraph: NodeGraph; + let nodeConnectionManager: NodeConnectionManager; + let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; + let localNodeId: NodeId; + let localNodeIdEncoded: NodeIdEncoded; - await vaultManager.withVaults([v6], async (vault6) => { - await vault6.writeF(async (efs) => { - await efs.writeFile('reloaded', 'reload'); + beforeAll(async () => { + // Creating agents. + allDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(allDataDir, 'remoteKeynode1'), }); - }); + remoteKeynode1Id = remoteKeynode1.keyManager.getNodeId(); + remoteKeynode1IdEncoded = nodesUtils.encodeNodeId(remoteKeynode1Id); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(allDataDir, 'remoteKeynode2'), + }); + remoteKeynode2Id = remoteKeynode2.keyManager.getNodeId(); + remoteKeynode2IdEncoded = nodesUtils.encodeNodeId(remoteKeynode2Id); - const vn: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vn.push(vaultName), - ); - expect(vn.sort()).toEqual(alteredVaultNames.sort()); - await vaultManager.stop(); - await vaultManager.start(); - await vaultManager.createVault('Vault7' as VaultName); - - const v10 = await vaultManager.getVaultId('Vault10' as VaultName); - expect(v10).not.toBeUndefined(); - alteredVaultNames.push('Vault7'); - expect((await vaultManager.listVaults()).size).toEqual( - alteredVaultNames.length, - ); - const vnAltered: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vnAltered.push(vaultName), - ); - expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); - const file = await vaultManager.withVaults([v6], async (reloadedVault) => { - return await reloadedVault.readF(async (efs) => { - return await efs.readFile('reloaded', { encoding: 'utf8' }); + // Adding details to each agent. + await remoteKeynode1.nodeGraph.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), + }); + await remoteKeynode2.nodeGraph.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), }); - }); - expect(file).toBe('reload'); - }); - test('clone vaults from a remote keynode using a vault name', async () => { - await expect(() => - vaultManager.cloneVault( - remoteKeynode1.keyManager.getNodeId(), - 'not-existing' as VaultName, - ), - ).rejects.toThrow(vaultErrors.ErrorVaultsVaultUndefined); - await vaultManager.cloneVault( - remoteKeynode1.keyManager.getNodeId(), - vaultName, - ); - const vaultId = await vaultManager.getVaultId(vaultName); - if (vaultId === undefined) fail('VaultId is not found.'); - const [file, secretsList] = await vaultManager.withVaults( - [vaultId], - async (vaultClone) => { - const file = await vaultClone.readF(async (efs) => { - return await efs.readFile(secretNames[0], { encoding: 'utf8' }); - }); - const secretsList = (await vaultOps.listSecrets(vaultClone)).sort(); - return [file, secretsList]; - }, - ); - expect(file).toBe('success?'); - expect(secretsList).toStrictEqual(secretNames.slice(0, 2).sort()); - }, 100000); - test('clone and pull vaults using a vault id', async () => { - const vaultId = await vaultManager.cloneVault( - remoteKeynode1.keyManager.getNodeId(), - remoteVaultId, - ); - await vaultManager.withVaults([vaultId], async (vaultClone) => { - const file = await vaultClone.readF(async (efs) => { - return await efs.readFile(secretNames[0], { encoding: 'utf8' }); + await remoteKeynode1.gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, + }); + await remoteKeynode2.gestaltGraph.setNode({ + id: remoteKeynode1IdEncoded, + chain: {}, + }); + }); + afterAll(async () => { + await remoteKeynode2.stop(); + await remoteKeynode2.destroy(); + await remoteKeynode1.stop(); + await remoteKeynode1.destroy(); + await fs.promises.rm(allDataDir, { + recursive: true, + force: true, }); - expect(file).toBe('success?'); - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.slice(0, 2).sort(), - ); }); + beforeEach(async () => { + remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - for (const secret of secretNames.slice(2)) { - await vaultOps.addSecret(remoteVault, secret, 'second success?'); - } - }, - ); + await remoteKeynode1.gestaltGraph.stop(); + await remoteKeynode1.gestaltGraph.start({ fresh: true }); + await remoteKeynode1.acl.stop(); + await remoteKeynode1.acl.start({ fresh: true }); - await vaultManager.pullVault({ vaultId }); + nodeGraph = await NodeGraph.createNodeGraph({ + db, + keyManager: dummyKeyManager, + logger, + }); + fwdProxy = new ForwardProxy({ + authToken: 'auth', + logger, + }); - await vaultManager.withVaults([vaultId], async (vaultClone) => { - const file = await vaultClone.readF(async (efs) => { - return await efs.readFile(secretNames[2], { encoding: 'utf8' }); + keyManager = await KeyManager.createKeyManager({ + keysPath: path.join(allDataDir, 'allKeyManager'), + password: 'password', + logger, }); - expect(file).toBe('second success?'); - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.sort(), - ); - }); + localNodeId = keyManager.getNodeId(); + localNodeIdEncoded = nodesUtils.encodeNodeId(localNodeId); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - for (const secret of secretNames.slice(2)) { - await vaultOps.deleteSecret(remoteVault, secret); - } - }, - ); - }); - test('reject cloning and pulling when permissions are not set', async () => { - await remoteKeynode1.vaultManager.unshareVault( - remoteVaultId, - localKeynodeId, - ); - await expect(() => - vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), - ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); - expect((await vaultManager.listVaults()).size).toBe(0); - await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); - const clonedVaultId = await vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, - ); - await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { - const file = await clonedVault.readF(async (efs) => { - return await efs.readFile(secretNames[0], { encoding: 'utf8' }); + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + + await fwdProxy.start({ tlsConfig }); + const dummyRevProxy = { + getIngressHost: () => 'localhost' as Host, + getIngressPort: () => 0 as Port, + } as ReverseProxy; + + nodeConnectionManager = new NodeConnectionManager({ + keyManager, + nodeGraph, + fwdProxy, + revProxy: dummyRevProxy, + logger, + }); + await nodeConnectionManager.start(); + + await nodeGraph.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), + }); + await nodeGraph.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), }); - expect(file).toBe('success?'); + }); + afterEach(async () => { + await remoteKeynode1.vaultManager.destroyVault(remoteVaultId); + await nodeConnectionManager.stop(); + await fwdProxy.stop(); + await nodeGraph.stop(); + await nodeGraph.destroy(); + await keyManager.stop(); + await keyManager.destroy(); }); - await remoteKeynode1.vaultManager.unshareVault( - remoteVaultId, - localKeynodeId, - ); - await expect(() => - vaultManager.pullVault({ vaultId: clonedVaultId }), - ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); + test('clone vaults from a remote keynode using a vault name', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); - await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { - await expect(vaultOps.listSecrets(clonedVault)).resolves.toStrictEqual( - secretNames.slice(0, 2), - ); - }); + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); - await remoteKeynode1.vaultManager.shareVault(remoteVaultId, localKeynodeId); - }); - test('throw when trying to commit to a cloned vault', async () => { - const clonedVaultId = await vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, - ); - await vaultManager.withVaults([clonedVaultId], async (clonedVault) => { - await expect( - vaultOps.renameSecret(clonedVault, secretNames[0], secretNames[2]), - ).rejects.toThrow(vaultErrors.ErrorVaultImmutable); + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); - }); - test( - 'clone and pull from other cloned vaults', - async () => { - const clonedVaultRemote2Id = await remoteKeynode2.vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, - ); - await localKeynode.acl.setNodePerm(remoteKeynode2Id, { - gestalt: { - notify: null, - }, - vaults: {}, + test('clone vaults from a remote keynode using a vault name with no history', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await remoteKeynode2.vaultManager.shareVault( - clonedVaultRemote2Id, - localKeynodeId, - ); - const notification = ( - await localKeynode.notificationsManager.readNotifications() - ).pop(); - expect(notification?.data['type']).toBe('VaultShare'); - expect(notification?.data['vaultId']).toBe( - idUtils.toString(clonedVaultRemote2Id), - ); - expect(notification?.data['vaultName']).toBe(vaultName); - expect(notification?.data['actions']['clone']).toBeNull(); - expect(notification?.data['actions']['pull']).toBeNull(); - await vaultManager.cloneVault(remoteKeynode2Id, clonedVaultRemote2Id); - const vaultIdClone = await vaultManager.getVaultId(vaultName); - expect(vaultIdClone).not.toBeUndefined(); - await vaultManager.withVaults([vaultIdClone!], async (vaultClone) => { - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.slice(0, 2).sort(), - ); - }); - - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - for (const secret of secretNames.slice(2)) { - await vaultOps.addSecret(remoteVault, secret, 'success?'); - } - }, - ); + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); - await vaultManager.pullVault({ - vaultId: vaultIdClone!, - pullNodeId: remoteKeynode1Id, - pullVaultNameOrId: remoteVaultId, + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('fails to clone non existing vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await vaultManager.withVaults([vaultIdClone!], async (vaultClone) => { - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.sort(), + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await expect(() => + vaultManager.cloneVault( + remoteKeynode1Id, + 'not-existing' as VaultName, + ), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('clone and pull vaults using a vault id', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - for (const secret of secretNames.slice(2)) { - await vaultOps.deleteSecret(remoteVault, secret); - } - }, - ); - }, - global.defaultTimeout * 2, - ); - // Irrelevant for the moment as cloned vaults are immutable but will - // be useful in the future - test.skip('manage pulling from different remotes', async () => { - const clonedVaultRemote2Id = await remoteKeynode2.vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, - ); + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); - await remoteKeynode2.vaultManager.shareVault( - clonedVaultRemote2Id, - localKeynodeId, - ); + await vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('should reject cloning when permissions are not set', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Should reject with no permissions set + await expect(() => + vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), + ).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // No new vault created + expect((await vaultManager.listVaults()).size).toBe(0); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('should reject Pulling when permissions are not set', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); - const vaultCloneId = await vaultManager.cloneVault( - remoteKeynode2Id, - clonedVaultRemote2Id, - ); + const clonedVaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - await vaultOps.addSecret(remoteVault, secretNames[2], 'success?'); - }, - ); - await vaultManager.pullVault({ - vaultId: vaultCloneId, - pullNodeId: remoteKeynode1Id, - pullVaultNameOrId: vaultName, + await expect(() => + vaultManager.pullVault({ vaultId: clonedVaultId }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); + test('can pull a cloned vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + }); + }, + ); - await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.slice(0, 3).sort(), - ); - }); + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).not.toContain('secret-2'); + }); + }); - await remoteKeynode2.vaultManager.withVaults( - [clonedVaultRemote2Id], - async (clonedVaultRemote2) => { - await vaultOps.addSecret( - clonedVaultRemote2, - secretNames[3], - 'second success?', + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, ); - }, - ); - await vaultManager.pullVault({ vaultId: vaultCloneId }); - await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { - expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( - secretNames.sort(), - ); - }); - }); - test('Do actions on a vault using `withVault`', async () => { - const vault1 = await vaultManager.createVault('testVault1' as VaultName); - const vault2 = await vaultManager.createVault('testVault2' as VaultName); - const vaults = [vault1, vault2]; + // Pulling vault + await vaultManager.pullVault({ + vaultId: vaultId, + }); - await vaultManager.withVaults(vaults, async (vault1, vault2) => { - expect(vault1.vaultId).toEqual(vaults[0]); - expect(vault2.vaultId).toEqual(vaults[1]); - await vault1.writeF(async (fs) => { - await fs.writeFile('test', 'test1'); + // Should have new data + await vaultManager.withVaults([vaultId], async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + }); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('manage pulling from different remotes', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await vault2.writeF(async (fs) => { - await fs.writeFile('test', 'test2'); + try { + // Initial history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[0], 'success?'); + await efs.writeFile(secretNames[1], 'success?'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await remoteKeynode1.gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + remoteKeynode2Id, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'pull', + ); + + const clonedVaultRemote2Id = + await remoteKeynode2.vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + + await remoteKeynode2.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode2.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'clone', + ); + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'pull', + ); + const vaultCloneId = await vaultManager.cloneVault( + remoteKeynode2Id, + clonedVaultRemote2Id, + ); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[2], 'success?'); + }); + }, + ); + await vaultManager.pullVault({ + vaultId: vaultCloneId, + pullNodeId: remoteKeynode1Id, + pullVaultNameOrId: vaultName, + }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.slice(0, 3).sort(), + ); + }); + }); + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[3], 'second success?'); + }); + }, + ); + await vaultManager.pullVault({ vaultId: vaultCloneId }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.sort(), + ); + }); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('able to recover metadata after complex operations', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + try { + const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; + const alteredVaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault6', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + const v5 = await vaultManager.getVaultId('Vault5' as VaultName); + expect(v5).not.toBeUndefined(); + await vaultManager.destroyVault(v5!); + const v4 = await vaultManager.getVaultId('Vault4' as VaultName); + expect(v4).toBeTruthy(); + await vaultManager.renameVault(v4!, 'Vault10' as VaultName); + const v6 = await vaultManager.createVault('Vault6' as VaultName); + + await vaultManager.withVaults([v6], async (vault6) => { + await vault6.writeF(async (efs) => { + await efs.writeFile('reloaded', 'reload'); + }); + }); + + const vn: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vn.push(vaultName), + ); + expect(vn.sort()).toEqual(alteredVaultNames.sort()); + await vaultManager.stop(); + await vaultManager.start(); + await vaultManager.createVault('Vault7' as VaultName); + + const v10 = await vaultManager.getVaultId('Vault10' as VaultName); + expect(v10).not.toBeUndefined(); + alteredVaultNames.push('Vault7'); + expect((await vaultManager.listVaults()).size).toEqual( + alteredVaultNames.length, + ); + const vnAltered: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vnAltered.push(vaultName), + ); + expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); + const file = await vaultManager.withVaults( + [v6], + async (reloadedVault) => { + return await reloadedVault.readF(async (efs) => { + return await efs.readFile('reloaded', { encoding: 'utf8' }); + }); + }, + ); + + expect(file).toBe('reload'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); + test('throw when trying to commit to a cloned vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); - await vaultManager.withVaults(vaults, async (vault1, vault2) => { - const a = await vault1.readF((fs) => { - return fs.readFile('test'); + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + await expect( + vaultClone.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test("test pulling a vault that isn't remote", async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - const b = await vault2.readF((fs) => { - return fs.readFile('test'); + try { + // Creating some state at the remote + const vaultId = await vaultManager.createVault('testVault1'); + await expect(vaultManager.pullVault({ vaultId })).rejects.toThrow( + vaultsErrors.ErrorVaultRemoteUndefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('pullVault respects locking', async () => { + // This should respect the VaultManager read lock + // and the VaultInternal write lock + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + const pullVaultMock = jest.spyOn(VaultInternal.prototype, 'pullVault'); + const gitPullMock = jest.spyOn(git, 'pull'); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); - expect(a.toString()).toEqual('test1'); - expect(b.toString()).toEqual('test2'); + // @ts-ignore: kidnap vaultManager map and grabbing lock + const vaultsMap = vaultManager.vaultMap; + const vaultAndLock = vaultsMap.get(vaultId.toString() as VaultIdString); + const lock = vaultAndLock!.lock; + const releaseWrite = await lock.acquireWrite(); + + // Pulling vault respects VaultManager write lock + const pullP = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(pullVaultMock).not.toHaveBeenCalled(); + await releaseWrite(); + await pullP; + expect(pullVaultMock).toHaveBeenCalled(); + pullVaultMock.mockClear(); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }); + }, + ); + + // Respects VaultInternal write lock + const vault = vaultAndLock!.vault!; + // @ts-ignore: kidnap vault lock + const vaultLock = vault.lock; + const releaseVaultWrite = await vaultLock.acquireWrite(); + // Pulling vault respects VaultManager write lock + gitPullMock.mockClear(); + const pullP2 = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(gitPullMock).not.toHaveBeenCalled(); + await releaseVaultWrite(); + await pullP2; + expect(gitPullMock).toHaveBeenCalled(); + } finally { + pullVaultMock.mockRestore(); + gitPullMock.mockRestore(); + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); }); - // FIXME: remove? not relevant anymore? - test.skip('WorkingDirIndex is maintained across certain actions', async () => { - const vaultId = await vaultManager.createVault('testVault1' as VaultName); - const oid2 = await vaultManager.withVaults([vaultId], async (vault) => { - await vault.writeF(async (fs) => { - await fs.writeFile('test1', 'test1'); - }); - await vault.writeF(async (fs) => { - await fs.writeFile('test2', 'test2'); + test('handleScanVaults should list all vaults with permissions', async () => { + // 1. we need to set up state. + const acl = await ACL.createACL({ + db, + logger, + }); + const gestaltGraph = await GestaltGraph.createGestaltGraph({ + db, + acl, + logger, + }); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + nodeConnectionManager: {} as NodeConnectionManager, + acl, + gestaltGraph, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Setting up state. + const nodeId1 = testsUtils.generateRandomNodeId(); + const nodeId2 = testsUtils.generateRandomNodeId(); + await gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId1), + chain: {}, }); - const oid2 = (await vault.log(undefined, 1)).pop()!.commitId; - await vault.writeF(async (fs) => { - await fs.writeFile('test3', 'test3'); + await gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId2), + chain: {}, }); - await vault.version(oid2); - return oid2; + await gestaltGraph.setGestaltActionByNode(nodeId1, 'scan'); + + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vault3 = await vaultManager.createVault('testVault3' as VaultName); + + // Setting permissions + await acl.setVaultAction(vault1, nodeId1, 'clone'); + await acl.setVaultAction(vault1, nodeId1, 'pull'); + await acl.setVaultAction(vault2, nodeId1, 'clone'); + // No permissions for vault3 + + // scanning vaults + const gen = vaultManager.handleScanVaults(nodeId1); + const vaults: Record = {}; + for await (const vault of gen) { + vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; + } + expect(vaults[vault1]).toEqual(['testVault1', ['clone', 'pull']]); + expect(vaults[vault2]).toEqual(['testVault2', ['clone']]); + expect(vaults[vault3]).toBeUndefined(); + + // Should throw due to no permission + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // Should throw due to lack of scan permission + await gestaltGraph.setGestaltActionByNode(nodeId2, 'notify'); + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + } finally { + await vaultManager.stop(); + await vaultManager.destroy(); + await gestaltGraph.stop(); + await gestaltGraph.destroy(); + await acl.stop(); + await acl.destroy(); + } + }); + test('ScanVaults should get all vaults with permissions from remote node', async () => { + // 1. we need to set up state. + const remoteAgent = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'remoteNode'), + logger, }); - await vaultManager.closeVault(vaultId); - await vaultManager.withVaults([vaultId], async (vault) => { - const vaultInternal = vault as VaultInternal; - const currentOid = ''; // FIXME: vaultInternal.getworkingDirIndex(); - await vault.readF(async (fs) => { - expect(await fs.readdir('.')).toEqual(['test1', 'test2']); - }); - expect(currentOid).toStrictEqual(oid2); + const acl = await ACL.createACL({ + db, + logger, }); - }); - describe('Scanning nodes', () => { - let server: PolykeyAgent; - let serverNodeId: NodeId; - let serverNodeAddress: NodeAddress; - let allDataDir: string; + const gestaltGraph = await GestaltGraph.createGestaltGraph({ + db, + acl, + logger, + }); + const nodeGraph = await NodeGraph.createNodeGraph({ + db, + keyManager: dummyKeyManager, + logger, + }); + const fwdProxy = new ForwardProxy({ + authToken: 'auth', + logger, + }); + const keyManager = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'keys'), + password: 'password', + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }, + }); + const nodeConnectionManager = new NodeConnectionManager({ + keyManager, + logger, + nodeGraph, + fwdProxy, + revProxy: {} as ReverseProxy, + connConnectTime: 1000, + }); + await nodeConnectionManager.start(); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager, + nodeConnectionManager, + acl, + gestaltGraph, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Setting up state. + const targetNodeId = remoteAgent.keyManager.getNodeId(); + const nodeId1 = keyManager.getNodeId(); - beforeAll(async () => { - allDataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - server = await PolykeyAgent.createPolykeyAgent({ - password, - logger, - nodePath: path.join(allDataDir, 'server'), + // Letting nodeGraph know where the remote agent is + await nodeGraph.setNode(targetNodeId, { + host: 'localhost' as Host, + port: remoteAgent.revProxy.getIngressPort(), }); - serverNodeId = server.keyManager.getNodeId(); - serverNodeAddress = { - host: server.revProxy.getIngressHost(), - port: server.revProxy.getIngressPort(), - }; - }, global.polykeyStartupTimeout * 2); - afterAll(async () => { - await server.stop(); - await server.destroy(); - await fs.promises.rm(allDataDir, { force: true, recursive: true }); - }); - test('scans the targets vaults', async () => { - await localKeynode.nodeGraph.setNode(serverNodeId, serverNodeAddress); - await server.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(keyManager.getNodeId()), + await remoteAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId1), chain: {}, }); - await server.gestaltGraph.setGestaltActionByNode( - keyManager.getNodeId(), - 'scan', + + const vault1 = await remoteAgent.vaultManager.createVault( + 'testVault1' as VaultName, + ); + const vault2 = await remoteAgent.vaultManager.createVault( + 'testVault2' as VaultName, + ); + const vault3 = await remoteAgent.vaultManager.createVault( + 'testVault3' as VaultName, ); - const vaultName1 = 'vn1' as VaultName; - const vaultName2 = 'vn2' as VaultName; - const vaultName3 = 'vn3' as VaultName; - const v1Id = await server.vaultManager.createVault(vaultName1); - const v2Id = await server.vaultManager.createVault(vaultName2); - const v3Id = await server.vaultManager.createVault(vaultName3); + // Scanning vaults - const vaultList: Array<[VaultName, VaultId]> = []; + // Should throw due to no permission + await expect(async () => { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // Should throw due to lack of scan permission + await remoteAgent.gestaltGraph.setGestaltActionByNode(nodeId1, 'notify'); + await expect(async () => { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); - vaultList.push([vaultName1, v1Id]); - vaultList.push([vaultName2, v2Id]); - vaultList.push([vaultName3, v3Id]); + // Setting permissions + await remoteAgent.gestaltGraph.setGestaltActionByNode(nodeId1, 'scan'); + await remoteAgent.acl.setVaultAction(vault1, nodeId1, 'clone'); + await remoteAgent.acl.setVaultAction(vault1, nodeId1, 'pull'); + await remoteAgent.acl.setVaultAction(vault2, nodeId1, 'clone'); + // No permissions for vault3 - const vaults = await vaultManager.scanNodeVaults(serverNodeId); - expect(vaults.sort()).toStrictEqual(vaultList.sort()); + const gen = vaultManager.scanVaults(targetNodeId); + const vaults: Record = {}; + for await (const vault of gen) { + vaults[vault.vaultIdEncoded] = [ + vault.vaultName, + vault.vaultPermissions, + ]; + } - await server.gestaltGraph.unsetGestaltActionByNode( - keyManager.getNodeId(), - 'scan', - ); + expect(vaults[vaultsUtils.encodeVaultId(vault1)]).toEqual([ + 'testVault1', + ['clone', 'pull'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault2)]).toEqual([ + 'testVault2', + ['clone'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault3)]).toBeUndefined(); + } finally { + await vaultManager.stop(); + await vaultManager.destroy(); + await nodeConnectionManager.stop(); + await fwdProxy.stop(); + await nodeGraph.stop(); + await nodeGraph.destroy(); + await gestaltGraph.stop(); + await gestaltGraph.destroy(); + await acl.stop(); + await acl.destroy(); + await remoteAgent.stop(); + await remoteAgent.destroy(); + } + }); + test('stopping respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - test('fails to scan the targets vaults without permission', async () => { - await localKeynode.nodeGraph.setNode(serverNodeId, serverNodeAddress); - await server.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(keyManager.getNodeId()), - chain: {}, + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const vault = vaultAndLock.vault!; + const release = await lock.acquireWrite(); + // Try to destroy + const closeP = vaultManager.closeVault(vaultId); + await sleep(1000); + // Shouldn't be closed + expect(vault[running]).toBe(true); + expect( + vaultMap.get(vaultId.toString() as VaultIdString)!.vault, + ).toBeDefined(); + // Release the lock + release(); + await closeP; + expect(vault[running]).toBe(false); + expect(vaultMap.get(vaultId.toString() as VaultIdString)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('destroying respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const vault = vaultAndLock.vault!; + const release = await lock.acquireWrite(); + // Try to destroy + const destroyP = vaultManager.destroyVault(vaultId); + await sleep(1000); + // Shouldn't be destroyed + expect(vault[destroyed]).toBe(false); + expect( + vaultMap.get(vaultId.toString() as VaultIdString)!.vault, + ).toBeDefined(); + // Release the lock + release(); + await destroyP; + expect(vault[destroyed]).toBe(true); + expect(vaultMap.get(vaultId.toString() as VaultIdString)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('withVault respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const release = await lock.acquireWrite(); + // Try to use vault + let finished = false; + const withP = vaultManager.withVaults([vaultId], async () => { + finished = true; }); + await sleep(1000); + // Shouldn't be destroyed + expect(finished).toBe(false); + // Release the lock + release(); + await withP; + expect(finished).toBe(true); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Creation adds a vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Concurrently creating vault with same name only creates 1 vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + await expect( + Promise.all([ + vaultManager.createVault(vaultName), + vaultManager.createVault(vaultName), + ]), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultDefined); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('vaults persist', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.closeVault(vaultId); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(0); - const vaultName1 = 'vn1' as VaultName; - const vaultName2 = 'vn2' as VaultName; - const vaultName3 = 'vn3' as VaultName; - const v1Id = await server.vaultManager.createVault(vaultName1); - const v2Id = await server.vaultManager.createVault(vaultName2); - const v3Id = await server.vaultManager.createVault(vaultName3); - - const vaultList: Array<[VaultName, VaultId]> = []; + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); - vaultList.push([vaultName1, v1Id]); - vaultList.push([vaultName2, v2Id]); - vaultList.push([vaultName3, v3Id]); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); + expect(vault1).toEqual(vault2); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('vaults can be removed from map', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + await vaultManager.closeVault(vaultId); + expect(vaultMap.size).toBe(0); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vault1).not.toEqual(vault2); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('stopping vaultManager empties map and stops all vaults', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId1 = await vaultManager.createVault('vault1'); + const vaultId2 = await vaultManager.createVault('vault2'); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(2); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId1); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId2); + await vaultManager.stop(); + expect(vaultMap.size).toBe(0); + expect(vault1[running]).toBe(false); + expect(vault2[running]).toBe(false); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('destroying vaultManager destroys all data', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + let vaultManager2: VaultManager | undefined; + try { + const vaultId = await vaultManager.createVault('vault1'); + await vaultManager.stop(); + await vaultManager.destroy(); + // Vaults DB should be empty + const vaultsDb = await db.level(VaultManager.constructor.name); + expect(await db.count(vaultsDb)).toBe(0); + vaultManager2 = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); - await expect(() => - vaultManager.scanNodeVaults(serverNodeId), - ).rejects.toThrow(vaultErrors.ErrorVaultsPermissionDenied); + // @ts-ignore: protected method + await expect(vaultManager2.getVault(vaultId)).rejects.toThrow( + vaultsErrors.ErrorVaultsVaultUndefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + await vaultManager2?.stop(); + await vaultManager2?.destroy(); + } + }); + test("withVaults should throw if vaultId doesn't exist", async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = vaultsUtils.generateVaultId(); + await expect( + vaultManager.withVaults([vaultId], async () => { + // Do nothing + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('generateVaultId handles vault conflicts', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + const generateVaultIdMock = jest.spyOn(vaultsUtils, 'generateVaultId'); + try { + // Generate 100 ids + const vaultIds: VaultId[] = []; + for (let i = 0; i < 100; i++) { + vaultIds.push( + // @ts-ignore: protected method + vaultsUtils.encodeVaultId(await vaultManager.generateVaultId()), + ); + } + const duplicates = vaultIds.filter( + (item, index) => vaultIds.indexOf(item) !== index, + ); + expect(duplicates.length).toBe(0); + + const vaultId = await vaultManager.createVault('testvault'); + // Now only returns duplicates + generateVaultIdMock.mockReturnValue(vaultId); + const asd = async () => { + for (let i = 0; i < 100; i++) { + // @ts-ignore: protected method + await vaultManager.generateVaultId(); + } + }; + await expect(async () => { + return await asd(); + }).rejects.toThrow(vaultsErrors.ErrorVaultsCreateVaultId); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index 8c6907bf9..3437657f5 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -1,12 +1,13 @@ import type { VaultId } from '@/vaults/types'; import type { Vault } from '@/vaults/Vault'; -import type { KeyManager } from '@/keys'; +import type KeyManager from '@/keys/KeyManager'; +import type { DBDomain, DBLevel } from '@matrixai/db'; import fs from 'fs'; import path from 'path'; import os from 'os'; import { EncryptedFS } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { utils as idUtils } from '@matrixai/id'; +import { DB } from '@matrixai/db'; import * as errors from '@/vaults/errors'; import { VaultInternal, vaultOps } from '@/vaults'; import * as vaultsUtils from '@/vaults/utils'; @@ -21,6 +22,9 @@ describe('VaultOps', () => { let vaultId: VaultId; let vaultInternal: VaultInternal; let vault: Vault; + let db: DB; + let vaultsDb: DBLevel; + let vaultsDbDomain: DBDomain; const dummyKeyManager = { getNodeId: () => { return testUtils.generateRandomNodeId(); @@ -30,7 +34,7 @@ describe('VaultOps', () => { let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; - beforeAll(async () => { + beforeEach(async () => { const globalKeyPair = await testUtils.setupGlobalKeypair(); mockedGenerateKeyPair = jest .spyOn(keysUtils, 'generateKeyPair') @@ -42,7 +46,7 @@ describe('VaultOps', () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - const dbPath = path.join(dataDir, 'db'); + const dbPath = path.join(dataDir, 'efsDb'); const dbKey = await keysUtils.generateKey(); baseEfs = await EncryptedFS.createEncryptedFS({ dbKey, @@ -50,9 +54,36 @@ describe('VaultOps', () => { logger, }); await baseEfs.start(); + + vaultId = vaultsUtils.generateVaultId(); + await baseEfs.mkdir( + path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + { + recursive: true, + }, + ); + db = await DB.createDB({ dbPath: path.join(dataDir, 'db'), logger }); + vaultsDbDomain = ['vaults']; + vaultsDb = await db.level(vaultsDbDomain[0]); + vaultInternal = await VaultInternal.createVaultInternal({ + keyManager: dummyKeyManager, + vaultId, + efs: baseEfs, + logger: logger.getChild(VaultInternal.name), + fresh: true, + db, + vaultsDbDomain, + vaultsDb, + vaultName: 'VaultName', + }); + vault = vaultInternal as Vault; }); - afterAll(async () => { + afterEach(async () => { + await vaultInternal.stop(); + await vaultInternal.destroy(); + await db.stop(); + await db.destroy(); mockedGenerateKeyPair.mockRestore(); mockedGenerateDeterministicKeyPair.mockRestore(); await baseEfs.stop(); @@ -63,21 +94,6 @@ describe('VaultOps', () => { }); }); - beforeEach(async () => { - vaultId = vaultsUtils.generateVaultId(); - await baseEfs.mkdir(path.join(idUtils.toString(vaultId), 'contents'), { - recursive: true, - }); - vaultInternal = await VaultInternal.create({ - keyManager: dummyKeyManager, - vaultId, - efs: baseEfs, - logger: logger.getChild(VaultInternal.name), - fresh: true, - }); - vault = vaultInternal as Vault; - }); - test('adding a secret', async () => { await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); const dir = await vault.readF(async (efs) => { diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index d41cec6c8..85a866f88 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -27,10 +27,6 @@ describe('Vaults utils', () => { }); }); - test('VaultId type guard works', async () => { - const vaultId = vaultsUtils.generateVaultId(); - expect(vaultsUtils.decodeVaultId(vaultId)).toBeTruthy(); - }); test('EFS can be read recursively', async () => { const key = await keysUtils.generateKey(256); const efs = await EncryptedFS.createEncryptedFS({ From 3a5a34bc150c11f0f922b36160072bb847c45aba Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Mon, 21 Feb 2022 16:16:01 +1100 Subject: [PATCH 04/10] Fixes #337 - `VaultId` usage, - updated validation - removal of `GenericIdTypes.ts` --- package-lock.json | 210 ------------------ src/GenericIdTypes.ts | 119 ---------- src/acl/ACL.ts | 3 +- src/agent/service/vaultsGitInfoGet.ts | 2 +- src/client/service/vaultsClone.ts | 5 +- src/client/service/vaultsDelete.ts | 14 +- src/client/service/vaultsLog.ts | 14 +- src/client/service/vaultsPermissionsGet.ts | 14 +- src/client/service/vaultsPull.ts | 23 +- src/client/service/vaultsRename.ts | 15 +- src/client/service/vaultsSecretsDelete.ts | 15 +- src/client/service/vaultsSecretsEdit.ts | 15 +- src/client/service/vaultsSecretsGet.ts | 15 +- src/client/service/vaultsSecretsList.ts | 15 +- src/client/service/vaultsSecretsMkdir.ts | 15 +- src/client/service/vaultsSecretsNew.ts | 15 +- src/client/service/vaultsSecretsNewDir.ts | 15 +- src/client/service/vaultsSecretsRename.ts | 15 +- src/client/service/vaultsSecretsStat.ts | 15 +- src/client/service/vaultsShare.ts | 13 +- src/client/service/vaultsUnshare.ts | 13 +- src/client/service/vaultsVersion.ts | 14 +- src/discovery/types.ts | 2 +- src/discovery/utils.ts | 12 +- src/notifications/types.ts | 6 +- tests/GenericIdTypes.test.ts | 66 ------ tests/client/rpcVaults.test.ts | 4 +- .../client/service/notificationsRead.test.ts | 4 +- .../NotificationsManager.test.ts | 6 +- tests/notifications/utils.test.ts | 9 +- 30 files changed, 86 insertions(+), 617 deletions(-) delete mode 100644 src/GenericIdTypes.ts delete mode 100644 tests/GenericIdTypes.test.ts diff --git a/package-lock.json b/package-lock.json index ae8c338e4..eca4f5a4d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,48 +4,6 @@ "lockfileVersion": 1, "requires": true, "dependencies": { - "@arrows/array": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@arrows/array/-/array-1.4.1.tgz", - "integrity": "sha512-MGYS8xi3c4tTy1ivhrVntFvufoNzje0PchjEz6G/SsWRgUKxL4tKwS6iPdO8vsaJYldagAeWMd5KRD0aX3Q39g==", - "dev": true, - "requires": { - "@arrows/composition": "^1.2.2" - } - }, - "@arrows/composition": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@arrows/composition/-/composition-1.2.2.tgz", - "integrity": "sha512-9fh1yHwrx32lundiB3SlZ/VwuStPB4QakPsSLrGJFH6rCXvdrd060ivAZ7/2vlqPnEjBkPRRXOcG1YOu19p2GQ==", - "dev": true - }, - "@arrows/dispatch": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@arrows/dispatch/-/dispatch-1.0.3.tgz", - "integrity": "sha512-v/HwvrFonitYZM2PmBlAlCqVqxrkIIoiEuy5bQgn0BdfvlL0ooSBzcPzTMrtzY8eYktPyYcHg8fLbSgyybXEqw==", - "dev": true, - "requires": { - "@arrows/composition": "^1.2.2" - } - }, - "@arrows/error": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@arrows/error/-/error-1.0.2.tgz", - "integrity": "sha512-yvkiv1ay4Z3+Z6oQsUkedsQm5aFdyPpkBUQs8vejazU/RmANABx6bMMcBPPHI4aW43VPQmXFfBzr/4FExwWTEA==", - "dev": true - }, - "@arrows/multimethod": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@arrows/multimethod/-/multimethod-1.4.1.tgz", - "integrity": "sha512-AZnAay0dgPnCJxn3We5uKiB88VL+1ZIF2SjZohLj6vqY2UyvB/sKdDnFP+LZNVsTC5lcnGPmLlRRkAh4sXkXsQ==", - "dev": true, - "requires": { - "@arrows/array": "^1.4.1", - "@arrows/composition": "^1.2.2", - "@arrows/error": "^1.0.2", - "fast-deep-equal": "^3.1.3" - } - }, "@babel/code-frame": { "version": "7.15.8", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz", @@ -1760,12 +1718,6 @@ "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==", "dev": true }, - "@types/abstract-leveldown": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@types/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz", - "integrity": "sha512-q5veSX6zjUy/DlDhR4Y4cU0k2Ar+DT2LUraP00T19WLmTO6Se1djepCCaqU6nQrwcJ5Hyo/CWqxTzrrFg8eqbQ==", - "dev": true - }, "@types/babel__core": { "version": "7.1.16", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.16.tgz", @@ -1877,23 +1829,6 @@ "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", "dev": true }, - "@types/level-errors": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/level-errors/-/level-errors-3.0.0.tgz", - "integrity": "sha512-/lMtoq/Cf/2DVOm6zE6ORyOM+3ZVm/BvzEZVxUhf6bgh8ZHglXlBqxbxSlJeVp8FCbD3IVvk/VbsaNmDjrQvqQ==", - "dev": true - }, - "@types/levelup": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@types/levelup/-/levelup-5.1.0.tgz", - "integrity": "sha512-XagSD3VJFWjZWeQnG4mL53PFRPmb6E7dKXdJxexVw85ki82BWOp68N+R6M1t9OYsbmlY+2S0GZcZtVH3gGbeDw==", - "dev": true, - "requires": { - "@types/abstract-leveldown": "*", - "@types/level-errors": "*", - "@types/node": "*" - } - }, "@types/nexpect": { "version": "0.4.31", "resolved": "https://registry.npmjs.org/@types/nexpect/-/nexpect-0.4.31.tgz", @@ -2581,58 +2516,6 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, - "benchmark": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz", - "integrity": "sha1-CfPeMckWQl1JjMLuVloOvzwqVik=", - "dev": true, - "requires": { - "lodash": "^4.17.4", - "platform": "^1.3.3" - } - }, - "benny": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/benny/-/benny-3.7.1.tgz", - "integrity": "sha512-USzYxODdVfOS7JuQq/L0naxB788dWCiUgUTxvN+WLPt/JfcDURNNj8kN/N+uK6PDvuR67/9/55cVKGPleFQINA==", - "dev": true, - "requires": { - "@arrows/composition": "^1.0.0", - "@arrows/dispatch": "^1.0.2", - "@arrows/multimethod": "^1.1.6", - "benchmark": "^2.1.4", - "common-tags": "^1.8.0", - "fs-extra": "^10.0.0", - "json2csv": "^5.0.6", - "kleur": "^4.1.4", - "log-update": "^4.0.0" - }, - "dependencies": { - "fs-extra": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", - "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", - "dev": true, - "requires": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - } - }, - "kleur": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.4.tgz", - "integrity": "sha512-8QADVssbrFjivHWQU7KkMgptGTl6WAcSdlbBPY4uNF+mWr6DGcKrvY2w4FQJoXch7+fKMjj0dRrL75vk3k23OA==", - "dev": true - }, - "universalify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", - "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", - "dev": true - } - } - }, "big-integer": { "version": "1.6.50", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.50.tgz", @@ -2955,15 +2838,6 @@ "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==" }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, "cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -3032,12 +2906,6 @@ "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" }, - "common-tags": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", - "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", - "dev": true - }, "component-emitter": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", @@ -6513,25 +6381,6 @@ "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", "dev": true }, - "json2csv": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-5.0.6.tgz", - "integrity": "sha512-0/4Lv6IenJV0qj2oBdgPIAmFiKKnh8qh7bmLFJ+/ZZHLjSeiL3fKKGX3UryvKPbxFbhV+JcYo9KUC19GJ/Z/4A==", - "dev": true, - "requires": { - "commander": "^6.1.0", - "jsonparse": "^1.3.1", - "lodash.get": "^4.4.2" - }, - "dependencies": { - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true - } - } - }, "json5": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", @@ -6565,12 +6414,6 @@ } } }, - "jsonparse": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", - "integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA=", - "dev": true - }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -6746,12 +6589,6 @@ "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=", "dev": true }, - "lodash.get": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", - "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", - "dev": true - }, "lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -6764,31 +6601,6 @@ "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", "dev": true }, - "log-update": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", - "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", - "dev": true, - "requires": { - "ansi-escapes": "^4.3.0", - "cli-cursor": "^3.1.0", - "slice-ansi": "^4.0.0", - "wrap-ansi": "^6.2.0" - }, - "dependencies": { - "wrap-ansi": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", - "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - } - } - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -7718,12 +7530,6 @@ } } }, - "platform": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", - "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", - "dev": true - }, "posix-character-classes": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", @@ -8099,16 +7905,6 @@ "bitset": "^5.0.3" } }, - "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - } - }, "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", @@ -9011,12 +8807,6 @@ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, - "systeminformation": { - "version": "5.11.0", - "resolved": "https://registry.npmjs.org/systeminformation/-/systeminformation-5.11.0.tgz", - "integrity": "sha512-mI/5nFK7NUe9Qbmy65WoB5TlCWKAhP4kG0w6uR2mZM8Mpdi8b45b3hTIK3W5+kQYZnYFWeS9/O5nn5rdcSvqfA==", - "dev": true - }, "table": { "version": "6.7.2", "resolved": "https://registry.npmjs.org/table/-/table-6.7.2.tgz", diff --git a/src/GenericIdTypes.ts b/src/GenericIdTypes.ts deleted file mode 100644 index e4d2db430..000000000 --- a/src/GenericIdTypes.ts +++ /dev/null @@ -1,119 +0,0 @@ -import type { Codec } from 'multiformats/bases/base'; -import type { Id as InternalId } from '@matrixai/id/dist/Id'; -import { utils as idUtils } from '@matrixai/id'; -import { bases } from 'multiformats/basics'; -import { ErrorInvalidId } from './errors'; - -type MultibaseFormats = keyof typeof bases; -// / This is the internal form of the Id. -export type Id = InternalId; -// / This is the user readable string form of the Id. -export type IdString = string; -// This is the number of bytes a valid Id has -const idValidByteLength = 16; - -// Type guards for generic RandomId types. -function isId(arg: any): arg is T { - if (!(arg instanceof Uint8Array)) return false; - return arg.length === idValidByteLength; -} - -/** - * This will return arg as a valid VaultId or throw an error if it can't be converted. - * This will take a multibase string of the ID or the raw Buffer of the ID. - * @param arg - The variable we wish to convert - * @throws vaultErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId - * @returns VaultIdRaw - */ -function makeId(arg: any): T { - let id = arg; - // Checking and converting a string - if (typeof arg === 'string') { - // Covert the string to the Buffer form. - try { - id = idUtils.fromMultibase(arg); - if (id == null) throw new ErrorInvalidId(); - } catch (err) { - throw new ErrorInvalidId(); - } - } - - // If its a buffer we convert it to a Id. - if (arg instanceof Buffer) id = idUtils.fromBuffer(id); - - // Checking if valid buffer. - if (isId(id)) return id; - throw new ErrorInvalidId(); -} - -function isIdString( - arg: any, - validByteLength: number = idValidByteLength, -): arg is T { - if (typeof arg !== 'string') return false; - const id = fromMultibase(arg); - if (id == null) return false; - return id.length === validByteLength; -} - -function makeIdString( - arg: any, - validByteLength: number = idValidByteLength, - format: MultibaseFormats = 'base58btc', -): T { - const id = arg; - if (id instanceof Uint8Array) { - if (id.length !== validByteLength) throw new ErrorInvalidId(); - return toMultibase(arg, format) as T; - } - if (isIdString(id, validByteLength)) return id; - throw new ErrorInvalidId(); -} - -function idToString(id: Id): IdString { - return id.toString(); -} - -function stringToId(idString: IdString): Id { - return idUtils.fromString(idString)!; -} - -// Multibase helper functions. -const basesByPrefix: Record> = {}; -for (const k in bases) { - const codec = bases[k]; - basesByPrefix[codec.prefix] = codec; -} - -/** - * Encodes an multibase ID string - */ -function toMultibase(id: Uint8Array, format: MultibaseFormats): string { - const codec = bases[format]; - return codec.encode(id); -} - -/** - * Decodes a multibase encoded ID - * Do not use this for generic multibase strings - */ -function fromMultibase(idString: string): Uint8Array | undefined { - const prefix = idString[0]; - const codec = basesByPrefix[prefix]; - if (codec == null) { - return; - } - const buffer = codec.decode(idString); - return new Uint8Array(buffer); -} - -export { - isId, - makeId, - isIdString, - makeIdString, - idToString, - stringToId, - toMultibase, - fromMultibase, -}; diff --git a/src/acl/ACL.ts b/src/acl/ACL.ts index f1f551059..358663d51 100644 --- a/src/acl/ACL.ts +++ b/src/acl/ACL.ts @@ -201,7 +201,8 @@ class ACL { const vaultPerms: Record> = {}; const ops: Array = []; for await (const o of this.aclVaultsDb.createReadStream()) { - const vaultId = (o as any).key as VaultId; + const vaultIdBuffer = (o as any).key as Buffer; + const vaultId = IdInternal.fromBuffer(vaultIdBuffer); const data = (o as any).value as Buffer; const nodeIds = await this.db.deserializeDecrypt>( data, diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index c20f74a94..5269fccf5 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -33,7 +33,7 @@ function vaultsGitInfoGet({ const vaultNameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); vaultName = vaultNameOrId; - if (!vaultId) { + if (vaultId == null) { try { vaultId = validationUtils.parseVaultId(vaultNameOrId); vaultName = (await vaultManager.getVaultMeta(vaultId))?.vaultName; diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index ebed0b039..ba55bb397 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -5,7 +5,6 @@ import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; -import * as vaultsUtils from '../../vaults/utils'; function vaultsClone({ authenticate, @@ -36,8 +35,8 @@ function vaultsClone({ // Vault id let vaultId; const vaultNameOrId = vaultMessage.getNameOrId(); - vaultId = vaultsUtils.decodeVaultId(vaultNameOrId); - vaultId = vaultId ?? vaultNameOrId; + vaultId = vaultManager.getVaultId(vaultNameOrId); + vaultId = vaultId ?? validationUtils.parseVaultId(vaultId); // Node id const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); await vaultManager.cloneVault(nodeId, vaultId); diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts index 1fa8569c0..34aef3810 100644 --- a/src/client/service/vaultsDelete.ts +++ b/src/client/service/vaultsDelete.ts @@ -1,18 +1,11 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as validationUtils from '../../validation/utils'; function vaultsDelete({ vaultManager, @@ -32,8 +25,7 @@ function vaultsDelete({ call.sendMetadata(metadata); const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); await vaultManager.destroyVault(vaultId); response.setSuccess(true); callback(null, response); diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts index 96b43e086..c5303a148 100644 --- a/src/client/service/vaultsLog.ts +++ b/src/client/service/vaultsLog.ts @@ -1,17 +1,10 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as validationUtils from '../../validation/utils'; function vaultsLog({ vaultManager, @@ -36,8 +29,7 @@ function vaultsLog({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); // Getting the log const depth = vaultsLogMessage.getLogDepth(); let commitId: string | undefined = vaultsLogMessage.getCommitId(); diff --git a/src/client/service/vaultsPermissionsGet.ts b/src/client/service/vaultsPermissionsGet.ts index 916b80cc1..a1d2e69c4 100644 --- a/src/client/service/vaultsPermissionsGet.ts +++ b/src/client/service/vaultsPermissionsGet.ts @@ -1,19 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import type * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as validationUtils from '../../validation/utils'; function vaultsPermissionsGet({ authenticate, @@ -33,8 +26,7 @@ function vaultsPermissionsGet({ // Getting vaultId const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const permissionList = await vaultManager.getVaultPermission(vaultId); const nodeActionsMessage = new permissionsPB.NodeActions(); diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts index 81e79adc4..6384cc661 100644 --- a/src/client/service/vaultsPull.ts +++ b/src/client/service/vaultsPull.ts @@ -1,19 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as vaultsUtils from '../../vaults/utils'; function vaultsPull({ authenticate, @@ -38,8 +31,7 @@ function vaultsPull({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); let nodeId; const nodeMessage = call.request.getNode(); if (nodeMessage == null) { @@ -52,12 +44,9 @@ function vaultsPull({ if (pullVaultMessage == null) { pullVault = null; } else { - try { - pullVault = decodeVaultId(pullVaultMessage.getNameOrId()); - } catch (err) { - // Do nothing - } - if (!pullVault) pullVault = pullVaultMessage.getNameOrId(); + pullVault = vaultsUtils.decodeVaultId(pullVaultMessage.getNameOrId()); + pullVault = pullVault ?? pullVaultMessage.getNameOrId(); + if (pullVault == null) pullVault = pullVaultMessage.getNameOrId(); } await vaultManager.pullVault({ vaultId, diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts index b5c81a83b..d89cef630 100644 --- a/src/client/service/vaultsRename.ts +++ b/src/client/service/vaultsRename.ts @@ -1,18 +1,12 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import { utils as vaultsUtils } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsRename({ vaultManager, authenticate, @@ -35,8 +29,7 @@ function vaultsRename({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const newName = call.request.getNewName() as VaultName; await vaultManager.renameVault(vaultId, newName); response.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts index ec770a9de..5de6f64ee 100644 --- a/src/client/service/vaultsSecretsDelete.ts +++ b/src/client/service/vaultsSecretsDelete.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsDelete({ vaultManager, authenticate, @@ -36,8 +30,7 @@ function vaultsSecretsDelete({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = call.request.getSecretName(); await vaultManager.withVaults([vaultId], async (vault) => { await vaultOps.deleteSecret(vault, secretName); diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts index 356c4143a..876804ef1 100644 --- a/src/client/service/vaultsSecretsEdit.ts +++ b/src/client/service/vaultsSecretsEdit.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsEdit({ vaultManager, authenticate, @@ -41,8 +35,7 @@ function vaultsSecretsEdit({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = secretMessage.getSecretName(); const secretContent = Buffer.from(secretMessage.getSecretContent()); await vaultManager.withVaults([vaultId], async (vault) => { diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts index 7b5adadda..4d4c0356b 100644 --- a/src/client/service/vaultsSecretsGet.ts +++ b/src/client/service/vaultsSecretsGet.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsGet({ vaultManager, authenticate, @@ -36,8 +30,7 @@ function vaultsSecretsGet({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = call.request.getSecretName(); const secretContent = await vaultManager.withVaults( [vaultId], diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts index 4eef962f7..e8954a4e1 100644 --- a/src/client/service/vaultsSecretsList.ts +++ b/src/client/service/vaultsSecretsList.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsList({ vaultManager, authenticate, @@ -31,8 +25,7 @@ function vaultsSecretsList({ const vaultMessage = call.request; const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secrets = await vaultManager.withVaults( [vaultId], async (vault) => { diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts index 345aa01ea..68c2f12ee 100644 --- a/src/client/service/vaultsSecretsMkdir.ts +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsMkdir({ vaultManager, authenticate, @@ -37,8 +31,7 @@ function vaultsSecretsMkdir({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); await vaultManager.withVaults([vaultId], async (vault) => { await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { recursive: vaultMkdirMessge.getRecursive(), diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts index 10bd355fe..b8160233a 100644 --- a/src/client/service/vaultsSecretsNew.ts +++ b/src/client/service/vaultsSecretsNew.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsNew({ vaultManager, authenticate, @@ -36,8 +30,7 @@ function vaultsSecretsNew({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secret = call.request.getSecretName(); const content = Buffer.from(call.request.getSecretContent()); await vaultManager.withVaults([vaultId], async (vault) => { diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts index 60ff1b14c..0702dafeb 100644 --- a/src/client/service/vaultsSecretsNewDir.ts +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -1,20 +1,14 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type { FileSystem } from '../../types'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsNewDir({ vaultManager, authenticate, @@ -39,8 +33,7 @@ function vaultsSecretsNewDir({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretsPath = call.request.getSecretDirectory(); await vaultManager.withVaults([vaultId], async (vault) => { await vaultOps.addSecretDirectory(vault, secretsPath, fs); diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts index dd04f7d3f..46c842261 100644 --- a/src/client/service/vaultsSecretsRename.ts +++ b/src/client/service/vaultsSecretsRename.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsRename({ vaultManager, authenticate, @@ -41,8 +35,7 @@ function vaultsSecretsRename({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const oldSecret = secretMessage.getSecretName(); const newSecret = call.request.getNewName(); await vaultManager.withVaults([vaultId], async (vault) => { diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts index 7f498f1b2..2d7d68ea8 100644 --- a/src/client/service/vaultsSecretsStat.ts +++ b/src/client/service/vaultsSecretsStat.ts @@ -1,18 +1,12 @@ import type { VaultManager } from '../../vaults'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { Authenticate } from '../types'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import { vaultOps } from '../../vaults'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsStat({ authenticate, vaultManager, @@ -35,8 +29,7 @@ function vaultsSecretsStat({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = call.request.getSecretName(); const stat = await vaultManager.withVaults([vaultId], async (vault) => { return await vaultOps.statSecret(vault, secretName); diff --git a/src/client/service/vaultsShare.ts b/src/client/service/vaultsShare.ts index 9d2bbf85e..c43d6b6b2 100644 --- a/src/client/service/vaultsShare.ts +++ b/src/client/service/vaultsShare.ts @@ -1,20 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; import * as validationUtils from '../../validation/utils'; -import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsShare({ authenticate, vaultManager, @@ -42,8 +34,7 @@ function vaultsShare({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); await vaultManager.shareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); diff --git a/src/client/service/vaultsUnshare.ts b/src/client/service/vaultsUnshare.ts index 83f028822..bdc2b1d01 100644 --- a/src/client/service/vaultsUnshare.ts +++ b/src/client/service/vaultsUnshare.ts @@ -1,20 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { errors as vaultsErrors } from '../../vaults'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsUnshare({ authenticate, vaultManager, @@ -42,8 +34,7 @@ function vaultsUnshare({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); await vaultManager.unshareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts index 53c7fae9f..c5da80880 100644 --- a/src/client/service/vaultsVersion.ts +++ b/src/client/service/vaultsVersion.ts @@ -1,18 +1,11 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; +import * as validationUtils from '../../validation/utils'; import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsVersion({ vaultManager, authenticate, @@ -38,8 +31,7 @@ function vaultsVersion({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); // Doing the deed const versionId = vaultsVersionMessage.getVersionId(); const [latestOid, currentVersionId] = await vaultManager.withVaults( diff --git a/src/discovery/types.ts b/src/discovery/types.ts index 9c32ed947..c91021c7e 100644 --- a/src/discovery/types.ts +++ b/src/discovery/types.ts @@ -1,5 +1,5 @@ import type { Opaque } from '../types'; -import type { Id } from '../GenericIdTypes'; +import type { Id } from '@matrixai/id'; /** * Used to preserve order in the Discovery Queue. diff --git a/src/discovery/utils.ts b/src/discovery/utils.ts index b8a9f9808..b0c774a63 100644 --- a/src/discovery/utils.ts +++ b/src/discovery/utils.ts @@ -1,19 +1,13 @@ import type { DiscoveryQueueId, DiscoveryQueueIdGenerator } from './types'; import { IdSortable } from '@matrixai/id'; -import { makeId } from '../GenericIdTypes'; - -function makeDiscoveryQueueId(arg: any) { - return makeId(arg); -} function createDiscoveryQueueIdGenerator( lastId?: DiscoveryQueueId, ): DiscoveryQueueIdGenerator { - const idSortableGenerator = new IdSortable({ + const idSortableGenerator = new IdSortable({ lastId, }); - return (): DiscoveryQueueId => - makeDiscoveryQueueId(idSortableGenerator.get()); + return (): DiscoveryQueueId => idSortableGenerator.get(); } -export { makeDiscoveryQueueId, createDiscoveryQueueIdGenerator }; +export { createDiscoveryQueueIdGenerator }; diff --git a/src/notifications/types.ts b/src/notifications/types.ts index 428000a3a..a9d9b6dd4 100644 --- a/src/notifications/types.ts +++ b/src/notifications/types.ts @@ -1,7 +1,7 @@ +import type { Id } from '@matrixai/id'; import type { Opaque } from '../types'; import type { NodeIdEncoded } from '../nodes/types'; -import type { VaultName, VaultActions } from '../vaults/types'; -import type { Id, IdString } from '../GenericIdTypes'; +import type { VaultName, VaultActions, VaultIdEncoded } from '../vaults/types'; type NotificationId = Opaque<'NotificationId', Id>; @@ -12,7 +12,7 @@ type GestaltInvite = { }; type VaultShare = { type: 'VaultShare'; - vaultId: IdString; + vaultId: VaultIdEncoded; vaultName: VaultName; actions: VaultActions; }; diff --git a/tests/GenericIdTypes.test.ts b/tests/GenericIdTypes.test.ts deleted file mode 100644 index fa3fa6d42..000000000 --- a/tests/GenericIdTypes.test.ts +++ /dev/null @@ -1,66 +0,0 @@ -import type { Id } from '@matrixai/id/dist/Id'; -import type { IdString } from '@/GenericIdTypes'; -import type { Opaque } from '@/types'; -import { utils as idUtils } from '@matrixai/id'; -import { makeIdString, makeId } from '@/GenericIdTypes'; -import { ErrorInvalidId } from '@/errors'; - -describe('GenericID Type utility functions', () => { - type TestRawType = Opaque<'testRawType', Id>; - type TestType = Opaque<'testType', IdString>; - - const validString = 'zUGWu8zn6VSa6dYrty8DJdm'; - const invalidString = 'notAValidString'; - const validBuffer = Buffer.alloc(16); - const invalidBuffer = Buffer.alloc(20); - const validTestRawType = idUtils.fromString( - 'Vaultxxxxxxxxxxx', - ) as TestRawType; - - // Testing generation. - // test('can generate a Id', async () => { - // const idGen = new IdRandom(); - // const id = idGen.get(); - // console.log(id.toString()); - // console.log(Buffer.from(id).toString()); - // }); - // // Testing conversions. - // test('random tests', () => { - // const idGen = new IdRandom(); - // const id = idGen.get(); - // const idString = id.toString(); - // console.log(idString); - // - // const testString = 'vault1xxxxxxxxxx'; - // console.log(idUtils.fromString(testString)) - // console.log(idUtils.toString(idUtils.fromString(testString)!)) - // }); - - test('makeId converts a buffer', () => { - expect(() => makeId(validTestRawType)).not.toThrow(); - }); - test('makeId converts a buffer', () => { - expect(() => makeId(validBuffer)).not.toThrow(); - }); - test('makeId converts a string', () => { - expect(() => makeId(validString)).not.toThrow(); - }); - test('makeId throws error for invalid buffer.', () => { - expect(() => makeId(invalidBuffer)).toThrow(ErrorInvalidId); - }); - test('makeId throws error for invalid string.', () => { - expect(() => makeId(invalidString)).toThrow(ErrorInvalidId); - }); - test('makeIdString converts a Buffer.', () => { - expect(() => makeIdString(validBuffer)).not.toThrow(); - }); - test('makeIdString converts a string.', () => { - expect(() => makeIdString(validString)).not.toThrow(); - }); - test('makeIdString throws error for invalid buffer.', () => { - expect(() => makeIdString(invalidBuffer)).toThrow(ErrorInvalidId); - }); - test('makeIdString throws error for invalid buffer.', () => { - expect(() => makeIdString(invalidString)).toThrow(ErrorInvalidId); - }); -}); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 727080a57..75d0c4517 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -138,7 +138,9 @@ describe('Vaults client service', () => { const vaultId = await createVault(vaultMessage, callCredentials); const vaultNames = await vaultManager.listVaults(); expect(vaultNames.get(vaultList[0])).toBeTruthy(); - expect(vaultNames.get(vaultList[0])).toStrictEqual(vaultId.getNameOrId()); + expect( + vaultsUtils.encodeVaultId(vaultNames.get(vaultList[0])!), + ).toStrictEqual(vaultId.getNameOrId()); }); test('should delete vaults', async () => { const deleteVault = grpcUtils.promisifyUnaryCall( diff --git a/tests/client/service/notificationsRead.test.ts b/tests/client/service/notificationsRead.test.ts index c3882bc8a..c5442b973 100644 --- a/tests/client/service/notificationsRead.test.ts +++ b/tests/client/service/notificationsRead.test.ts @@ -1,5 +1,5 @@ import type { Host, Port } from '@/network/types'; -import type { VaultName } from '@/vaults/types'; +import type { VaultIdEncoded, VaultName } from '@/vaults/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -106,7 +106,7 @@ describe('notificationsRead', () => { { data: { type: 'VaultShare', - vaultId: 'vault', + vaultId: 'vault' as VaultIdEncoded, vaultName: 'vault' as VaultName, actions: { clone: null, diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index d52aa0968..8153178a0 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -212,7 +212,7 @@ describe('NotificationsManager', () => { }; const vaultNotification: NotificationData = { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -276,7 +276,7 @@ describe('NotificationsManager', () => { }; const vaultNotification: NotificationData = { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -341,7 +341,7 @@ describe('NotificationsManager', () => { const notification3: Notification = { data: { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, diff --git a/tests/notifications/utils.test.ts b/tests/notifications/utils.test.ts index 8f85d4642..5a3b8a617 100644 --- a/tests/notifications/utils.test.ts +++ b/tests/notifications/utils.test.ts @@ -13,8 +13,8 @@ import * as testUtils from '../utils'; describe('Notifications utils', () => { const nodeId = testUtils.generateRandomNodeId(); const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); - const vaultId = vaultUtils.generateVaultId(); - const vaultIdEncoded = vaultUtils.encodeVaultId(vaultId); + const vaultId = vaultsUtils.generateVaultId(); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); test('generates notification ids', async () => { const generator = notificationsUtils.createNotificationIdGenerator(); @@ -37,7 +37,6 @@ describe('Notifications utils', () => { currentId = generator(); expect(Buffer.compare(lastId, currentId)).toBeTruthy(); lastId = currentId; - await sleep(10); } }); @@ -110,7 +109,7 @@ describe('Notifications utils', () => { result = await jwtVerify(signedVaultShareNotification, EmbeddedJWK, {}); expect(result.payload.data).toEqual({ type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName', actions: { clone: null, @@ -195,7 +194,7 @@ describe('Notifications utils', () => { ); expect(decodedVaultShareNotification.data).toEqual({ type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName', actions: { clone: null, From 96c752e08b321b67c655e719db7c50d6f4de6580 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Mon, 21 Feb 2022 17:22:55 +1100 Subject: [PATCH 05/10] Fixes #340 - selective imports - error descriptions updated - Using google `Timestamp` message in `LogEntry` message - fixing comments --- src/agent/service/vaultsGitInfoGet.ts | 9 +- src/agent/service/vaultsGitPackGet.ts | 8 +- src/agent/service/vaultsScan.ts | 70 ++- src/bin/vaults/CommandLog.ts | 4 +- .../service/gestaltsActionsGetByIdentity.ts | 2 +- .../service/gestaltsActionsGetByNode.ts | 2 +- src/client/service/vaultsClone.ts | 4 +- src/client/service/vaultsCreate.ts | 6 +- src/client/service/vaultsDelete.ts | 4 +- src/client/service/vaultsList.ts | 6 +- src/client/service/vaultsLog.ts | 12 +- src/client/service/vaultsPull.ts | 4 +- src/client/service/vaultsRename.ts | 6 +- src/client/service/vaultsScan.ts | 22 +- src/client/service/vaultsSecretsDelete.ts | 6 +- src/client/service/vaultsSecretsEdit.ts | 6 +- src/client/service/vaultsSecretsGet.ts | 6 +- src/client/service/vaultsSecretsList.ts | 6 +- src/client/service/vaultsSecretsMkdir.ts | 6 +- src/client/service/vaultsSecretsNew.ts | 6 +- src/client/service/vaultsSecretsNewDir.ts | 6 +- src/client/service/vaultsSecretsRename.ts | 6 +- src/client/service/vaultsSecretsStat.ts | 6 +- src/client/service/vaultsVersion.ts | 6 +- src/notifications/NotificationsManager.ts | 3 +- src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 10 +- src/proto/js/polykey/v1/vaults/vaults_pb.js | 45 +- .../schemas/polykey/v1/vaults/vaults.proto | 3 +- src/sigchain/Sigchain.ts | 2 - src/utils/sysexits.ts | 64 +++ src/vaults/VaultInternal.ts | 481 +++++++++++------- src/vaults/VaultOps.ts | 11 +- src/vaults/errors.ts | 45 +- src/vaults/types.ts | 20 +- src/vaults/utils.ts | 2 +- test-git.ts | 337 ------------ test-vaultinternal.ts | 34 -- tests/bin/secrets/secrets.test.ts | 2 +- tests/bin/vaults/vaults.test.ts | 6 +- tests/client/rpcVaults.test.ts | 8 +- tests/vaults/VaultInternal.test.ts | 423 ++++++++------- tests/vaults/VaultManager.test.ts | 19 +- tests/vaults/VaultOps.test.ts | 3 +- tests/vaults/utils.test.ts | 16 +- 44 files changed, 836 insertions(+), 917 deletions(-) delete mode 100644 test-git.ts delete mode 100644 test-vaultinternal.ts diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index 5269fccf5..ca906c642 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -1,10 +1,11 @@ import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; -import type { ACL } from '../../acl'; +import type VaultManager from '../../vaults/VaultManager'; +import type ACL from '../../acl/ACL'; import type { ConnectionInfoGet } from '../../agent/types'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; +import * as vaultsErrors from '../../vaults/errors'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; import * as nodesUtils from '../../nodes/utils'; diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts index 061b40e9f..0a180b4ff 100644 --- a/src/agent/service/vaultsGitPackGet.ts +++ b/src/agent/service/vaultsGitPackGet.ts @@ -1,11 +1,13 @@ import type * as grpc from '@grpc/grpc-js'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type { ConnectionInfoGet } from '../../agent/types'; import type ACL from '../../acl/ACL'; import * as nodesUtils from '../../nodes/utils'; -import { errors as grpcErrors, utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as grpcErrors from '../../grpc/errors'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsErrors from '../../vaults/errors'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; import * as agentErrors from '../errors'; diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index 682863af0..11b73d21f 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -1,56 +1,50 @@ import type * as grpc from '@grpc/grpc-js'; -import type { GestaltGraph } from '../../gestalts'; -import type { VaultManager } from '../../vaults'; -import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import * as validationUtils from '../../validation/utils'; +import type VaultManager from '../../vaults/VaultManager'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import type { ConnectionInfoGet } from '../../agent/types'; +import type ACL from '../../acl/ACL'; +import * as agentErrors from '../../agent/errors'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; -import { utils as grpcUtils } from '../../grpc'; +import * as vaultsUtils from '../../vaults/utils'; +import * as grpcUtils from '../../grpc/utils'; function vaultsScan({ vaultManager, - gestaltGraph, + acl, + connectionInfoGet, }: { vaultManager: VaultManager; - gestaltGraph: GestaltGraph; + acl: ACL; + connectionInfoGet: ConnectionInfoGet; }) { return async ( - call: grpc.ServerWritableStream, + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); - const response = new vaultsPB.List(); - const nodeId = validationUtils.parseNodeId(call.request.getNodeId()); - const perms = await gestaltGraph.getGestaltActionsByNode(nodeId); - if (!perms) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); - return; + const listMessage = new vaultsPB.List(); + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); } + const nodeId = connectionInfo.nodeId; try { - if (perms['scan'] !== null) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); - return; - } - } catch (err) { - if (err instanceof TypeError) { - await genWritable.throw(new vaultsErrors.ErrorVaultsPermissionDenied()); - return; - } - throw err; - } - try { - const listResponse = await vaultManager.listVaults(); - for (const vault of listResponse) { - if (vault !== null) { - response.setVaultName(vault[0]); - response.setVaultId(vaultsUtils.encodeVaultId(vault[1])); - await genWritable.next(response); - } else { - await genWritable.next(null); - } + const listResponse = vaultManager.handleScanVaults(nodeId, acl); + for await (const { + vaultId, + vaultName, + vaultPermissions, + } of listResponse) { + listMessage.setVaultId(vaultsUtils.encodeVaultId(vaultId)); + listMessage.setVaultName(vaultName); + listMessage.setVaultPermissionsList(vaultPermissions); + await genWritable.next(listMessage); } await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); + } catch (e) { + await genWritable.throw(e); } }; } diff --git a/src/bin/vaults/CommandLog.ts b/src/bin/vaults/CommandLog.ts index 12e1f6a3f..01a0c4839 100644 --- a/src/bin/vaults/CommandLog.ts +++ b/src/bin/vaults/CommandLog.ts @@ -63,8 +63,8 @@ class CommandLog extends CommandPolykey { meta, ); for await (const commit of stream) { - const timeStamp = commit.getTimeStamp(); - const date = new Date(timeStamp); + const timestamp = commit.getTimeStamp(); + const date = timestamp!.toDate(); data.push(`commit ${commit.getOid()}`); data.push(`committer ${commit.getCommitter()}`); data.push(`Date: ${date.toDateString()}`); diff --git a/src/client/service/gestaltsActionsGetByIdentity.ts b/src/client/service/gestaltsActionsGetByIdentity.ts index 1ee46b1fd..c4df02f2c 100644 --- a/src/client/service/gestaltsActionsGetByIdentity.ts +++ b/src/client/service/gestaltsActionsGetByIdentity.ts @@ -44,7 +44,7 @@ function gestaltsActionsGetByIdentity({ identityId, ); if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. + // Node doesn't exist, so no permissions response.setActionList([]); } else { // Contains permission diff --git a/src/client/service/gestaltsActionsGetByNode.ts b/src/client/service/gestaltsActionsGetByNode.ts index 3d1f6b1c5..f4bcd4d5a 100644 --- a/src/client/service/gestaltsActionsGetByNode.ts +++ b/src/client/service/gestaltsActionsGetByNode.ts @@ -36,7 +36,7 @@ function gestaltsActionsGetByNode({ ); const result = await gestaltGraph.getGestaltActionsByNode(nodeId); if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. + // Node doesn't exist, so no permissions response.setActionList([]); } else { // Contains permission diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index ba55bb397..c7c650d55 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -1,8 +1,8 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts index a199bd5b4..363e4a200 100644 --- a/src/client/service/vaultsCreate.ts +++ b/src/client/service/vaultsCreate.ts @@ -1,10 +1,10 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsCreate({ diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts index 34aef3810..d2f029c4a 100644 --- a/src/client/service/vaultsDelete.ts +++ b/src/client/service/vaultsDelete.ts @@ -1,9 +1,9 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts index e0b919e50..d81902976 100644 --- a/src/client/service/vaultsList.ts +++ b/src/client/service/vaultsList.ts @@ -1,9 +1,9 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsList({ diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts index c5303a148..99056911a 100644 --- a/src/client/service/vaultsLog.ts +++ b/src/client/service/vaultsLog.ts @@ -1,8 +1,9 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; +import { Timestamp } from 'google-protobuf/google/protobuf/timestamp_pb'; +import * as grpcUtils from '../../grpc/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; @@ -20,7 +21,7 @@ function vaultsLog({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - // Getting the vault. + // Getting the vault const vaultsLogMessage = call.request; const vaultMessage = vaultsLogMessage.getVault(); if (vaultMessage == null) { @@ -41,8 +42,9 @@ function vaultsLog({ for (const entry of log) { vaultsLogEntryMessage.setOid(entry.commitId); vaultsLogEntryMessage.setCommitter(entry.committer.name); - // FIXME: we can make this a google.protobuf.Timestamp field? - vaultsLogEntryMessage.setTimeStamp(entry.committer.timestamp.getTime()); + const timestampMessage = new Timestamp(); + timestampMessage.fromDate(entry.committer.timestamp); + vaultsLogEntryMessage.setTimeStamp(timestampMessage); vaultsLogEntryMessage.setMessage(entry.message); await genWritable.next(vaultsLogEntryMessage); } diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts index 6384cc661..8c18e1a29 100644 --- a/src/client/service/vaultsPull.ts +++ b/src/client/service/vaultsPull.ts @@ -1,9 +1,9 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; import * as vaultsUtils from '../../vaults/utils'; diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts index d89cef630..506162989 100644 --- a/src/client/service/vaultsRename.ts +++ b/src/client/service/vaultsRename.ts @@ -1,10 +1,10 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsRename({ diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts index 018bcda2b..3d8d73a7e 100644 --- a/src/client/service/vaultsScan.ts +++ b/src/client/service/vaultsScan.ts @@ -2,10 +2,10 @@ import type { Authenticate } from '../types'; import type { NodeId } from '../../nodes/types'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import type * as grpc from '@grpc/grpc-js'; -import type { VaultManager } from '../../vaults'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; -import { validateSync, utils as validationUtils } from '../../validation'; +import type VaultManager from '../../vaults/VaultManager'; +import * as grpcUtils from '../../grpc/utils'; +import { validateSync } from '../../validation'; +import * as validationUtils from '../../validation/utils'; import { matchSync } from '../../utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; @@ -38,11 +38,15 @@ function vaultsScan({ nodeId: call.request.getNodeId(), }, ); - const list = await vaultManager.scanNodeVaults(nodeId); - for (const vault of list) { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vault[0]); - vaultListMessage.setVaultId(vaultsUtils.encodeVaultId(vault[1])); + const vaultListMessage = new vaultsPB.List(); + for await (const { + vaultIdEncoded, + vaultName, + vaultPermissions, + } of vaultManager.scanVaults(nodeId)) { + vaultListMessage.setVaultName(vaultName); + vaultListMessage.setVaultId(vaultIdEncoded); + vaultListMessage.setVaultPermissionsList(vaultPermissions); await genWritable.next(vaultListMessage); } await genWritable.next(null); diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts index 5de6f64ee..07a56a92d 100644 --- a/src/client/service/vaultsSecretsDelete.ts +++ b/src/client/service/vaultsSecretsDelete.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsDelete({ diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts index 876804ef1..8f45362b2 100644 --- a/src/client/service/vaultsSecretsEdit.ts +++ b/src/client/service/vaultsSecretsEdit.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsEdit({ diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts index 4d4c0356b..fa836e1b0 100644 --- a/src/client/service/vaultsSecretsGet.ts +++ b/src/client/service/vaultsSecretsGet.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function vaultsSecretsGet({ diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts index e8954a4e1..db2a1cc36 100644 --- a/src/client/service/vaultsSecretsList.ts +++ b/src/client/service/vaultsSecretsList.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function vaultsSecretsList({ diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts index 68c2f12ee..fca32d4f9 100644 --- a/src/client/service/vaultsSecretsMkdir.ts +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsMkdir({ diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts index b8160233a..3c22baa7a 100644 --- a/src/client/service/vaultsSecretsNew.ts +++ b/src/client/service/vaultsSecretsNew.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsNew({ diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts index 0702dafeb..31a075e01 100644 --- a/src/client/service/vaultsSecretsNewDir.ts +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -1,12 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type { FileSystem } from '../../types'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsNewDir({ diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts index 46c842261..7de527519 100644 --- a/src/client/service/vaultsSecretsRename.ts +++ b/src/client/service/vaultsSecretsRename.ts @@ -1,11 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsSecretsRename({ diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts index 2d7d68ea8..e657d4009 100644 --- a/src/client/service/vaultsSecretsStat.ts +++ b/src/client/service/vaultsSecretsStat.ts @@ -1,10 +1,10 @@ -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type { VaultName } from '../../vaults/types'; import type { Authenticate } from '../types'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function vaultsSecretsStat({ diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts index c5da80880..4338966da 100644 --- a/src/client/service/vaultsVersion.ts +++ b/src/client/service/vaultsVersion.ts @@ -1,9 +1,9 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsVersion({ @@ -43,7 +43,7 @@ function vaultsVersion({ return [latestOid, currentVersionId]; }, ); - // Checking if latest version ID. + // Checking if latest version ID const isLatestVersion = latestOid === currentVersionId; // Creating message response.setIsLatestVersion(isLatestVersion); diff --git a/src/notifications/NotificationsManager.ts b/src/notifications/NotificationsManager.ts index c17954dd3..44974ae67 100644 --- a/src/notifications/NotificationsManager.ts +++ b/src/notifications/NotificationsManager.ts @@ -137,14 +137,13 @@ class NotificationsManager { this.notificationsDb = notificationsDb; this.notificationsMessagesDb = notificationsMessagesDb; - // Getting latest ID and creating ID generator FIXME, does this need to be a transaction? + // Getting latest ID and creating ID generator let latestId: NotificationId | undefined; const keyStream = this.notificationsMessagesDb.createKeyStream({ limit: 1, reverse: true, }); for await (const o of keyStream) { - // FIXME: really a buffer? latestId = IdInternal.fromBuffer(o as Buffer); } this.notificationIdGenerator = createNotificationIdGenerator(latestId); diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 4b0ab311c..ce03ed70b 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -6,6 +6,7 @@ import * as jspb from "google-protobuf"; import * as polykey_v1_nodes_nodes_pb from "../../../polykey/v1/nodes/nodes_pb"; +import * as google_protobuf_timestamp_pb from "google-protobuf/google/protobuf/timestamp_pb"; export class Vault extends jspb.Message { getNameOrId(): string; @@ -379,8 +380,11 @@ export class LogEntry extends jspb.Message { setOid(value: string): LogEntry; getCommitter(): string; setCommitter(value: string): LogEntry; - getTimeStamp(): number; - setTimeStamp(value: number): LogEntry; + + hasTimeStamp(): boolean; + clearTimeStamp(): void; + getTimeStamp(): google_protobuf_timestamp_pb.Timestamp | undefined; + setTimeStamp(value?: google_protobuf_timestamp_pb.Timestamp): LogEntry; getMessage(): string; setMessage(value: string): LogEntry; @@ -398,7 +402,7 @@ export namespace LogEntry { export type AsObject = { oid: string, committer: string, - timeStamp: number, + timeStamp?: google_protobuf_timestamp_pb.Timestamp.AsObject, message: string, } } diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index aebbcbb33..da87622fd 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -16,6 +16,8 @@ var global = Function('return this')(); var polykey_v1_nodes_nodes_pb = require('../../../polykey/v1/nodes/nodes_pb.js'); goog.object.extend(proto, polykey_v1_nodes_nodes_pb); +var google_protobuf_timestamp_pb = require('google-protobuf/google/protobuf/timestamp_pb.js'); +goog.object.extend(proto, google_protobuf_timestamp_pb); goog.exportSymbol('proto.polykey.v1.vaults.Clone', null, global); goog.exportSymbol('proto.polykey.v1.vaults.InfoRequest', null, global); goog.exportSymbol('proto.polykey.v1.vaults.List', null, global); @@ -3045,7 +3047,7 @@ proto.polykey.v1.vaults.LogEntry.toObject = function(includeInstance, msg) { var f, obj = { oid: jspb.Message.getFieldWithDefault(msg, 1, ""), committer: jspb.Message.getFieldWithDefault(msg, 2, ""), - timeStamp: jspb.Message.getFieldWithDefault(msg, 4, 0), + timeStamp: (f = msg.getTimeStamp()) && google_protobuf_timestamp_pb.Timestamp.toObject(includeInstance, f), message: jspb.Message.getFieldWithDefault(msg, 3, "") }; @@ -3092,7 +3094,8 @@ proto.polykey.v1.vaults.LogEntry.deserializeBinaryFromReader = function(msg, rea msg.setCommitter(value); break; case 4: - var value = /** @type {number} */ (reader.readUint64()); + var value = new google_protobuf_timestamp_pb.Timestamp; + reader.readMessage(value,google_protobuf_timestamp_pb.Timestamp.deserializeBinaryFromReader); msg.setTimeStamp(value); break; case 3: @@ -3143,10 +3146,11 @@ proto.polykey.v1.vaults.LogEntry.serializeBinaryToWriter = function(message, wri ); } f = message.getTimeStamp(); - if (f !== 0) { - writer.writeUint64( + if (f != null) { + writer.writeMessage( 4, - f + f, + google_protobuf_timestamp_pb.Timestamp.serializeBinaryToWriter ); } f = message.getMessage(); @@ -3196,20 +3200,39 @@ proto.polykey.v1.vaults.LogEntry.prototype.setCommitter = function(value) { /** - * optional uint64 time_stamp = 4; - * @return {number} + * optional google.protobuf.Timestamp time_stamp = 4; + * @return {?proto.google.protobuf.Timestamp} */ proto.polykey.v1.vaults.LogEntry.prototype.getTimeStamp = function() { - return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 4, 0)); + return /** @type{?proto.google.protobuf.Timestamp} */ ( + jspb.Message.getWrapperField(this, google_protobuf_timestamp_pb.Timestamp, 4)); }; /** - * @param {number} value + * @param {?proto.google.protobuf.Timestamp|undefined} value * @return {!proto.polykey.v1.vaults.LogEntry} returns this - */ +*/ proto.polykey.v1.vaults.LogEntry.prototype.setTimeStamp = function(value) { - return jspb.Message.setProto3IntField(this, 4, value); + return jspb.Message.setWrapperField(this, 4, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.LogEntry} returns this + */ +proto.polykey.v1.vaults.LogEntry.prototype.clearTimeStamp = function() { + return this.setTimeStamp(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.LogEntry.prototype.hasTimeStamp = function() { + return jspb.Message.getField(this, 4) != null; }; diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index 0c8ca8143..309cef87a 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -1,6 +1,7 @@ syntax = "proto3"; import "polykey/v1/nodes/nodes.proto"; +import "google/protobuf/timestamp.proto"; package polykey.v1.vaults; @@ -82,7 +83,7 @@ message Log { message LogEntry { string oid = 1; string committer = 2; - uint64 time_stamp = 4; + google.protobuf.Timestamp time_stamp = 4; string message = 3; } diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index fdbcd2940..fd4beaa35 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -431,7 +431,6 @@ class Sigchain { const claimStream = this.sigchainClaimsDb.createKeyStream(); let seq = 1; for await (const o of claimStream) { - // FIXME: really a buffer? map[seq] = IdInternal.fromBuffer(o as Buffer); seq++; } @@ -459,7 +458,6 @@ class Sigchain { reverse: true, }); for await (const o of keyStream) { - // FIXME: really a buffer? latestId = IdInternal.fromBuffer(o as Buffer); } return latestId; diff --git a/src/utils/sysexits.ts b/src/utils/sysexits.ts index d48e6dacf..935c1810e 100644 --- a/src/utils/sysexits.ts +++ b/src/utils/sysexits.ts @@ -2,20 +2,84 @@ const sysexits = Object.freeze({ OK: 0, GENERAL: 1, // Sysexit standard starts at 64 to avoid conflicts + /** + * The command was used incorrectly, e.g., with the wrong number of arguments, + * a bad flag, a bad syntax in a parameter, or whatever. + */ USAGE: 64, + /** + * The input data was incorrect in some way. This should only be used for + * user's data and not system files. + */ DATAERR: 65, + /** + * An input file (not a system file) did not exist or was not readable. + * This could also include errors like "No message" to a mailer + * (if it cared to catch it). + */ NOINPUT: 66, + /** + * The user specified did not exist. This might be used for mail addresses + * or remote logins. + */ NOUSER: 67, + /** + * The host specified did not exist. This is used in mail addresses or + * network requests. + */ NOHOST: 68, + /** + * A service is unavailable. This can occur if a support program or file + * does not exist. This can also be used as a catchall message when + * something you wanted to do does not work, but you do not know why. + */ UNAVAILABLE: 69, + /** + * An internal software error has been detected. This should be limited to + * non-operating system related errors as possible. + */ SOFTWARE: 70, + /** + * An operating system error has been detected. This is intended to be used + * for such things as "cannot fork", "cannot create pipe", or the like. + * It in-cludes things like getuid returning a user that does not exist in + * the passwd file. + */ OSERR: 71, + /** + * Some system file (e.g., /etc/passwd, /var/run/utx.active, etc.) + * does not exist, cannot be opened, or has some sort of error + * (e.g., syntax error). + */ OSFILE: 72, + /** + * A (user specified) output file cannot be created. + */ CANTCREAT: 73, + /** + * An error occurred while doing I/O on some file. + */ IOERR: 74, + /** + * Temporary failure, indicating something that is not really an error. + * In sendmail, this means that a mailer (e.g.) could not create a connection, + * and the request should be reattempted later. + */ TEMPFAIL: 75, + /** + * The remote system returned something that was "not possible" during a + * protocol exchange. + */ PROTOCOL: 76, + /** + * You did not have sufficient permission to perform the operation. This is + * not intended for file system problems, which should use EX_NOINPUT or + * EX_CANTCREAT, but rather for higher level permissions. + */ NOPERM: 77, + /** + * Something was found in an un-configured or mis-configured state. + */ CONFIG: 78, CANNOT_EXEC: 126, COMMAND_NOT_FOUND: 127, diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 05d80d26a..63611b1a8 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -28,6 +28,7 @@ import { } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; +import { tagLast } from './types'; import * as nodesUtils from '../nodes/utils'; import * as validationUtils from '../validation/utils'; import { withF, withG } from '../utils/context'; @@ -35,7 +36,6 @@ import { RWLock } from '../utils/locks'; import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import { never } from '../utils/utils'; -// TODO: this might be temp? export type RemoteInfo = { remoteNode: NodeIdEncoded; remoteVault: VaultIdEncoded; @@ -253,7 +253,7 @@ class VaultInternal { this.vaultIdEncoded, this.vaultsDb, ); - // Let's backup any metadata. + // Let's backup any metadata if (fresh) { await this.vaultMetadataDb.clear(); @@ -356,7 +356,7 @@ class VaultInternal { * This changes the working directory and updates the HEAD reference */ @ready(new vaultsErrors.ErrorVaultNotRunning()) - public async version(ref: string | VaultRef = 'HEAD'): Promise { + public async version(ref: string | VaultRef = tagLast): Promise { if (!vaultsUtils.validateRef(ref)) { throw new vaultsErrors.ErrorVaultReferenceInvalid(); } @@ -372,7 +372,10 @@ class VaultInternal { force: true, }); } catch (e) { - if (e instanceof git.Errors.NotFoundError) { + if ( + e instanceof git.Errors.NotFoundError || + e instanceof git.Errors.CommitNotFetchedError + ) { throw new vaultsErrors.ErrorVaultReferenceMissing(); } throw e; @@ -419,163 +422,21 @@ class VaultInternal { VaultInternal.dirtyKey, true, ); - - // We have to chroot it - // and then remove it - // but this is done by itself? - await f(this.efsVault); + try { + await f(this.efsVault); + // After doing mutation we need to commit the new history + await this.createCommit(); + } catch (e) { + // Error implies dirty state + await this.cleanWorkingDirectory(); + throw e; + } await this.db.put( this.vaultMetadataDbDomain, VaultInternal.dirtyKey, false, ); }); - - // Const message: string[] = []; - // try { - // - // // If the version of the vault has been changed, checkout the working - // // directory to this point in history and discard any unlinked commits - // await git.checkout({ - // fs: this.efs, - // dir: this.vaultDataDir, - // gitdir: this.vaultGitDir, - // ref: this.workingDirIndex, - // }); - // - // // Efs/someVaultId/contents - // await f(this.efsVault); - // // Get the status of each file in the working directory - // // https://isomorphic-git.org/docs/en/statusMatrix - // const statusMatrix = await git.statusMatrix({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // }); - // for (let [ - // filePath, - // HEADStatus, - // workingDirStatus, - // stageStatus, - // ] of statusMatrix) { - // // Reset the index of files that are marked as 'unmodified' - // // The working directory, HEAD and staging area are all the same - // // https://github.com/MatrixAI/js-polykey/issues/260 - // if ( - // HEADStatus === workingDirStatus && - // workingDirStatus === stageStatus - // ) { - // await git.resetIndex({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepath: filePath, - // }); - // // Check if the file is still 'unmodified' and leave - // // it out of the commit if it is - // [filePath, HEADStatus, workingDirStatus, stageStatus] = ( - // await git.statusMatrix({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepaths: [filePath], - // }) - // ).pop()!; - // if ( - // HEADStatus === workingDirStatus && - // workingDirStatus === stageStatus - // ) - // continue; - // } - // // We want files in the working directory that are both different - // // from the head commit and the staged changes - // // If working directory and stage status are not equal then filepath has unstaged - // // changes in the working directory relative to both the HEAD and staging - // // area that need to be added - // // https://isomorphic-git.org/docs/en/statusMatrix - // if (workingDirStatus !== stageStatus) { - // let status: 'added' | 'modified' | 'deleted'; - // // If the working directory status is 0 then the file has - // // been deleted - // if (workingDirStatus === 0) { - // status = 'deleted'; - // await git.remove({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepath: filePath, - // }); - // } else { - // await git.add({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepath: filePath, - // }); - // // Check whether the file already exists inside the HEAD - // // commit and if it does then it is unmodified - // if (HEADStatus === 1) { - // status = 'modified'; - // } else { - // status = 'added'; - // } - // } - // message.push(filePath + ' ' + status); - // } - // } - // // Check if there were actual changes made to any files - // if (message.length !== 0) { - // this.logger.info( - // `Committing to Vault '${vaultsUtils.makeVaultIdPretty( - // this.vaultId, - // )}'`, - // ); - // this.workingDirIndex = await git.commit({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // author: { - // name: this.keyManager.getNodeId(), - // }, - // message: message.toString(), - // }); - // } - // } finally { - // // Check the status matrix for any unstaged file changes - // // which are considered dirty commits - // const statusMatrix = await git.statusMatrix({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // }); - // for await (const [filePath, _, workingDirStatus] of statusMatrix) { - // // For all files stage all changes, this is needed - // // so that we can check out all untracked files as well - // if (workingDirStatus === 0) { - // await git.remove({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepath: filePath, - // }); - // } else { - // await git.add({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // filepath: filePath, - // }); - // } - // } - // // Remove the staged dirty commits by checking out - // await git.checkout({ - // fs: this.efsRoot, - // dir: this.baseDir, - // gitdir: this.gitDir, - // ref: this.workingDirIndex, - // }); - // release(); - // } } @ready(new vaultsErrors.ErrorVaultNotRunning()) @@ -585,25 +446,35 @@ class VaultInternal { const efsVault = this.efsVault; const db = this.db; const vaultDbDomain = this.vaultMetadataDbDomain; + const createCommit = () => this.createCommit(); + const cleanWorkingDirectory = () => this.cleanWorkingDirectory(); return withG([this.writeLock], async function* () { if ((await db.get(vaultDbDomain, VaultInternal.remoteKey)) != null) { // Mirrored vaults are immutable throw new vaultsErrors.ErrorVaultRemoteDefined(); } await db.put(vaultDbDomain, VaultInternal.dirtyKey, true); - const result = yield* g(efsVault); - // At the end of the generator - // you need to do this - // but just before - // you need to finish it up - // DO what you need to do here, create the commit + let result; + // Do what you need to do here, create the commit + try { + result = yield* g(efsVault); + // At the end of the generator + // you need to do this + // but just before + // you need to finish it up + // After doing mutation we need to commit the new history + await createCommit(); + } catch (e) { + // Error implies dirty state + await cleanWorkingDirectory(); + throw e; + } await db.put(vaultDbDomain, VaultInternal.dirtyKey, false); return result; }); } - // TODO: this needs to respect the write lock since we are writing to the EFS @ready(new vaultsErrors.ErrorVaultNotRunning()) public async pullVault({ nodeConnectionManager, @@ -726,7 +597,7 @@ class VaultInternal { await this.db.put( this.vaultMetadataDbDomain, VaultInternal.dirtyKey, - true, + false, ); } @@ -750,10 +621,6 @@ class VaultInternal { // name: string | undefined } - /** - * TODO: review what happens when you are cloning - * Or you need to load a particular commit object ID here - */ protected async setupGit(): Promise { // Initialization is idempotent // It works even with an existing git repository @@ -800,15 +667,26 @@ class VaultInternal { force: true, }); } else { - // Force checkout out to the latest commit - // This ensures that any uncommitted state is dropped - await git.checkout({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - ref: vaultsUtils.canonicalBranch, - force: true, - }); + // Checking for dirty + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + )) === true + ) { + // Force checkout out to the latest commit + // This ensures that any uncommitted state is dropped + await this.cleanWorkingDirectory(); + // Do global GC operation + await this.garbageCollectGitObjects(); + + // Setting dirty back to false + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + false, + ); + } } return commitIdLatest; } @@ -904,6 +782,257 @@ class VaultInternal { remoteVaultId, ]; } + + /** + * Creates a commit while moving the canonicalBranch reference + */ + protected async createCommit() { + // Checking if commit is appending or branching + const headRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: 'HEAD', + }); + const masterRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + }); + const nodeIdEncoded = nodesUtils.encodeNodeId(this.keyManager.getNodeId()); + // Staging changes and creating commit message + const message: string[] = []; + // Get the status of each file in the working directory + // https://isomorphic-git.org/docs/en/statusMatrix + const statusMatrix = await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + }); + for (let [ + filePath, + HEADStatus, + workingDirStatus, + stageStatus, + ] of statusMatrix) { + // Reset the index of files that are marked as 'unmodified' + // The working directory, HEAD and staging area are all the same + // https://github.com/MatrixAI/js-polykey/issues/260 + if (HEADStatus === workingDirStatus && workingDirStatus === stageStatus) { + await git.resetIndex({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + // Check if the file is still 'unmodified' and leave + // it out of the commit if it is + [filePath, HEADStatus, workingDirStatus, stageStatus] = ( + await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepaths: [filePath], + }) + ).pop()!; + if ( + HEADStatus === workingDirStatus && + workingDirStatus === stageStatus + ) { + continue; + } + } + // We want files in the working directory that are both different + // from the head commit and the staged changes + // If working directory and stage status are not equal then filepath has un-staged + // changes in the working directory relative to both the HEAD and staging + // area that need to be added + // https://isomorphic-git.org/docs/en/statusMatrix + if (workingDirStatus !== stageStatus) { + let status: 'added' | 'modified' | 'deleted'; + // If the working directory status is 0 then the file has + // been deleted + if (workingDirStatus === 0) { + status = 'deleted'; + await git.remove({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + } else { + await git.add({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + // Check whether the file already exists inside the HEAD + // commit and if it does then it is unmodified + if (HEADStatus === 1) { + status = 'modified'; + } else { + status = 'added'; + } + } + message.push(`${filePath} ${status}`); + } + } + // Skip commit if no changes were made + if (message.length !== 0) { + // Creating commit + const commitRef = await git.commit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + author: { + name: nodeIdEncoded, + }, + message: message.toString(), + ref: 'HEAD', + }); + // Updating branch pointer + await git.writeRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + value: commitRef, + force: true, + }); + // We clean old history if a commit was made on previous version + if (headRef !== masterRef) { + // Delete old commits following chain from masterRef -> headRef + let currentRef = masterRef; + while (currentRef !== headRef) { + // Read commit info + const commit = await git.readCommit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + oid: currentRef, + }); + // Delete commit + await vaultsUtils.deleteObject( + this.efs, + this.vaultGitDir, + commit.oid, + ); + // Getting new ref + const nextRef = commit.commit.parent.pop(); + if (nextRef == null) break; + currentRef = nextRef; + } + } + } + } + + /** + * Cleans the git working directory by checking out the canonicalBranch + */ + protected async cleanWorkingDirectory() { + // Check the status matrix for any un-staged file changes + // which are considered dirty commits + const statusMatrix = await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + }); + for await (const [filePath, , workingDirStatus] of statusMatrix) { + // For all files stage all changes, this is needed + // so that we can check out all untracked files as well + if (workingDirStatus === 0) { + await git.remove({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + } else { + await git.add({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + } + } + // Remove the staged dirty commits by checking out + await git.checkout({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + force: true, + }); + } + + /** + * Deletes any git objects that can't be reached from the canonicalBranch + */ + protected async garbageCollectGitObjects() { + // To garbage collect the git objects, + // we need to walk all objects connected to the master branch + // and delete the object files that are not touched by this walk + const touchedOids = {}; + const masterRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranch, + }); + const queuedOids: string[] = [masterRef]; + while (queuedOids.length > 0) { + const currentOid = queuedOids.shift()!; + if (touchedOids[currentOid] === null) continue; + const result = await git.readObject({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + oid: currentOid, + }); + touchedOids[result.oid] = result.type; + if (result.format !== 'parsed') continue; + switch (result.type) { + case 'commit': + { + const object = result.object; + queuedOids.push(...object.parent); + queuedOids.push(object.tree); + } + break; + case 'tree': + { + const object = result.object; + for (const item of object) { + touchedOids[item.oid] = item.type; + } + } + break; + default: { + never(); + } + } + } + // Walking all objects + const objectPath = path.join(this.vaultGitDir, 'objects'); + const buckets = (await this.efs.readdir(objectPath)).filter((item) => { + return item !== 'info' && item !== 'pack'; + }); + for (const bucket of buckets) { + const bucketPath = path.join(objectPath, bucket.toString()); + const oids = await this.efs.readdir(bucketPath); + for (const shortOid of oids) { + const oidPath = path.join(bucketPath, shortOid.toString()); + const oid = bucket.toString() + shortOid.toString(); + if (touchedOids[oid] === undefined) { + // Removing unused objects + await this.efs.unlink(oidPath); + } + } + } + } } export default VaultInternal; diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index f9bef210a..703f09752 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -7,7 +7,6 @@ import path from 'path'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; -// TODO: remove? type FileOptions = { recursive?: boolean; }; @@ -16,8 +15,8 @@ type FileOptions = { // - add succeeded // - secret exists // - secret with directory -// Might just drop the return type. -// I don't see a case where it would be false without an error. +// Might just drop the return type +// I don't see a case where it would be false without an error // - Add locking? async function addSecret( vault: Vault, @@ -136,7 +135,7 @@ async function statSecret(vault: Vault, secretName: string) { // TODO: tests // - delete a secret // - Secret doesn't exist -// - delete a full and empty directory with and without recursive. +// - delete a full and empty directory with and without recursive async function deleteSecret( vault: Vault, secretName: string, @@ -191,7 +190,7 @@ async function mkdir( // TODO: tests // - adding existing directory // - adding non-existent directory -// - adding a file. +// - adding a file async function addSecretDirectory( vault: Vault, secretDirectory: string, @@ -245,7 +244,7 @@ async function addSecretDirectory( * Retrieves a list of the secrets in a vault */ // TODO: tests -// - read secrets. +// - read secrets // - no secrets async function listSecrets(vault: Vault): Promise { return await vault.readF(async (efs) => { diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index a01793ff1..43e877caf 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -64,26 +64,35 @@ class ErrorVaultRemoteUndefined extends ErrorVaults { exitCode = sysexits.USAGE; } -// --- these need to be reviewed - class ErrorVaultsVaultUndefined extends ErrorVaults { description = 'Vault does not exist'; - exitCode = 10; + exitCode = sysexits.USAGE; } -class ErrorVaultsVaultDefined extends ErrorVaults {} - -class ErrorVaultsRecursive extends ErrorVaults {} - -class ErrorVaultsVaultUnlinked extends ErrorVaults {} +class ErrorVaultsVaultDefined extends ErrorVaults { + description = 'Vault already exists'; + exitCode = sysexits.USAGE; +} -class ErrorVaultsCreateVaultId extends ErrorVaults {} +class ErrorVaultsRecursive extends ErrorVaults { + description = 'Recursive option was not set'; + exitCode = sysexits.USAGE; +} -class ErrorVaultsInvalidVaultId extends ErrorVaults {} // TODO: Assign a proper error code and message. +class ErrorVaultsCreateVaultId extends ErrorVaults { + description = 'Failed to create unique VaultId'; + exitCode = sysexits.SOFTWARE; +} -class ErrorVaultsMergeConflict extends ErrorVaults {} +class ErrorVaultsMergeConflict extends ErrorVaults { + description = 'Merge Conflicts are not supported yet'; + exitCode = sysexits.SOFTWARE; +} -class ErrorVaultsPermissionDenied extends ErrorVaults {} +class ErrorVaultsPermissionDenied extends ErrorVaults { + description = 'Permission was denied'; + exitCode = sysexits.NOPERM; +} class ErrorVaultsNameConflict extends ErrorVaults { description = 'Unique name could not be created'; @@ -92,9 +101,15 @@ class ErrorVaultsNameConflict extends ErrorVaults { class ErrorSecrets extends ErrorPolykey {} -class ErrorSecretsSecretUndefined extends ErrorSecrets {} +class ErrorSecretsSecretUndefined extends ErrorSecrets { + description = 'Secret does not exist'; + exitCode = sysexits.USAGE; +} -class ErrorSecretsSecretDefined extends ErrorSecrets {} +class ErrorSecretsSecretDefined extends ErrorSecrets { + description = 'Secret already exists'; + exitCode = sysexits.USAGE; +} export { ErrorVaults, @@ -114,9 +129,7 @@ export { ErrorVaultsVaultUndefined, ErrorVaultsVaultDefined, ErrorVaultsRecursive, - ErrorVaultsVaultUnlinked, ErrorVaultsCreateVaultId, - ErrorVaultsInvalidVaultId, ErrorVaultsMergeConflict, ErrorVaultsPermissionDenied, ErrorVaultsNameConflict, diff --git a/src/vaults/types.ts b/src/vaults/types.ts index 8635f526a..12c3bc386 100644 --- a/src/vaults/types.ts +++ b/src/vaults/types.ts @@ -134,24 +134,7 @@ interface FileSystemWritable extends FileSystemReadable { writeFile: EncryptedFS['writeFile']; } -type VaultName = string; // FIXME, placeholder, remove? - -// type VaultKey = Opaque<'VaultKey', Buffer>; - -// /** -// * Actions relating to what is possible with vaults -// */ -// type VaultAction = 'clone' | 'pull'; - -// type SecretName = string; - -// type SecretList = string[]; - -// type SecretContent = Buffer | string; - -// type FileOptions = { -// recursive?: boolean; -// }; +type VaultName = string; // FIXME: temp placeholder type VaultActions = Partial>; @@ -168,7 +151,6 @@ export type { CommitLog, FileSystemReadable, FileSystemWritable, - // FIXME: placeholder types VaultName, VaultActions, }; diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index 5758f91e9..92103e879 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -7,9 +7,9 @@ import type { } from './types'; import type { NodeId } from '../nodes/types'; +import { IdInternal, IdRandom } from '@matrixai/id'; import type { EncryptedFS } from 'encryptedfs'; import path from 'path'; -import { IdInternal, IdRandom } from '@matrixai/id'; import { tagLast, refs, vaultActions } from './types'; import * as nodesUtils from '../nodes/utils'; diff --git a/test-git.ts b/test-git.ts deleted file mode 100644 index cad3f9b0e..000000000 --- a/test-git.ts +++ /dev/null @@ -1,337 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import git from 'isomorphic-git'; -import * as vaultsUtils from './src/vaults/utils'; - -/* - -I'm going to need to test out how to use the tags and the branches. -When we are pulling the repo, we can checkout to a given version in the commit hash. -We need to switch the HEAD. -We're going to do this in the real FS. So we can see this being done, one step at a time -*/ - - // // await git.checkout({ - // // fs, - // // dir: vaultDir, - // // gitdir: vaultGitDir, - // // ref: 'master' - // // }); - - // // We never change branches anyway - - // try { - // const commits = await git.log({ - // fs, - // dir: vaultDir, - // gitdir: vaultGitDir, - // depth: 1, - // ref: 'master', - // }); - - // console.log(commits); - - // // if the comits is meant to be empty array - - // } catch (e) { - // if (e instanceof git.Errors.NotFoundError) { - // console.log('OH NO!'); - - -async function main () { - - const vaultDataDir = './tmp/git/data'; - const vaultGitDir = './tmp/git/.git'; - - await fs.promises.rm('./tmp/git', { recursive: true, force: true }); - - await fs.promises.mkdir(vaultDataDir, { recursive: true }); - - await git.init({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - defaultBranch: 'master' - }); - - const firstCommit = await git.commit({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - author: { - name: 'this is the author', - email: '', - }, - message: 'Initial Commit', - ref: 'HEAD', - }); - - await git.writeRef({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: 'refs/heads/master', - value: firstCommit, - force: true - }); - - console.log(firstCommit); - - console.log(vaultsUtils.validateCommitId(firstCommit.toUpperCase())); - - // what happens when you create .git inside? - - await git.checkout({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: firstCommit.toUpperCase(), - }); - - - - - - await fs.promises.writeFile( - path.join(vaultDataDir, 'file'), - 'v2' - ); - - await git.add({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - filepath: 'file' - }); - - const secondCommit = await git.commit({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - author: { - name: 'this is the author', - email: '', - }, - message: 'Second Commit', - ref: 'HEAD', - }); - - await git.writeRef({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: 'refs/heads/master', - value: secondCommit, - force: true - }); - - await fs.promises.writeFile( - path.join(vaultDataDir, 'file'), - 'v3' - ); - - await git.add({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - filepath: 'file' - }); - - // This is comparing against the HEAD commit - // the default ref is HEAD - const status = await git.statusMatrix({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - }); - - console.log(status); - - const thirdCommit = await git.commit({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - author: { - name: 'this is the author', - email: '', - }, - message: 'Third Commit', - ref: 'HEAD', - }); - - await git.writeRef({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: 'refs/heads/master', - value: thirdCommit, - force: true - }); - - // we alaways use the master branch - // to find the log of canonical history - // or we find it from where we are in HEAD - const commits = await git.log({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: 'master', - }); - - console.log(commits); - - // this changed to the second commit - // but the working tree isn't updated - // wtf? - - // This changes it to a detached commit - // But master still points to the original one - await git.checkout({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: secondCommit - }); - - console.log('FROM HEAD', await git.log({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - })); - - // the branch always points to the tip, and is considered canonical - // we only change the branch point when we are making new commits - // when making new commits, we want to change the branch pointer - - console.log('FROM MASTER', await git.log({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - ref: 'master', - })); - - // this changes it to ref: refs/heads/master - // it also does a checkout of the working directory - // if we want to checkout to the end, the `HEAD` points to `master` - // that's fine too, it just means head is now attached - await git.checkout({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - // ref: 'master' - ref: secondCommit - // if this is thirdCommit, it's not the same - // the branch pointer doesn't get updated - }); - - // interestingly enough - // moving to the third commit keeps the head there - // if the head is kept there, and we add a new commit here - // what happens? - - await fs.promises.writeFile( - path.join(vaultDataDir, 'file'), - 'v4' - ); - - await git.add({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - filepath: 'file' - }); - - // const currentCommit = await git.resolveRef({ - // fs, - // dir: vaultDataDir, - // gitdir: vaultGitDir, - // ref: 'HEAD' - // }); - - const fourthCommit = await git.commit({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - author: { - name: 'this is the author', - email: '', - }, - message: 'Fourth Commit', - ref: 'HEAD' - // ref: 'refs/heads/master', - // parent: [currentCommit] - }); - - await git.writeRef({ - fs, - dir: vaultDataDir, - gitdir: vaultGitDir, - // ref: 'HEAD', - ref: 'refs/heads/master', - value: fourthCommit, - // value: 'refs/heads/master', - // symbolic: true, - force: true - }); - - // if ref is HEAD, it moves the HEAD pointer on the commit - // if ref is master, it doesn't do anything... - // oh shit, refs/heads/master works, but not master - // the ref here has to be either HEAD or the full path - // like refs/heads/master - // if you don't pass anything, then it is assumed - - // undefined will update both HEAD and master - // refs/heads/master will update both HEAD and master - // HEAD will only update HEAD - - // ok so the issue is this - // if i am in detached head state - // by default NOTHING is updated, neither HEAD nor master - // if HEAD is passed in, then HEAD gets updated - // if refs/heads/master is passed in, then only the master branch is updated - // it makes sense that we would want to update both - // or at the very least update HEAD, then update the branch pointer in a different way - - - - - // console.log(fourthCommit); - - // console.log('FROM HEAD', await git.log({ - // fs, - // dir: vaultDataDir, - // gitdir: vaultGitDir, - // })); - - // console.log('FROM MASTER', await git.log({ - // fs, - // dir: vaultDataDir, - // gitdir: vaultGitDir, - // ref: 'master', - // })); - - // console.log('FROM FOURTH', await git.log({ - // fs, - // dir: vaultDataDir, - // gitdir: vaultGitDir, - // ref: fourthCommit, - // })); - - // await git.checkout({ - // fs, - // dir: vaultDataDir, - // gitdir: vaultGitDir, - // ref: fourthCommit - // }); - - - - // note the above is not transactional - // so we have to be aware of this and "clean" - // the state whenever we start using it - - -} - -main(); diff --git a/test-vaultinternal.ts b/test-vaultinternal.ts deleted file mode 100644 index 71cb73f3d..000000000 --- a/test-vaultinternal.ts +++ /dev/null @@ -1,34 +0,0 @@ -import KeyManager from './src/keys/KeyManager'; -import VaultInternal from './src/vaults/VaultInternal'; -import * as vaultsUtils from './src/vaults/utils'; -import { EncryptedFS, utils as efsUtils } from 'encryptedfs'; - -async function main () { - const keyManager = await KeyManager.createKeyManager({ - keysPath: './tmp/keys', - password: 'abc123' - }); - - // this buffer needs to e - const [vaultKey] = await efsUtils.generateKeyFromPass('abc123', 'hello', 256); - - const efs = await EncryptedFS.createEncryptedFS({ - dbPath: './tmp/db', - dbKey: vaultKey - }); - - const vaultId = vaultsUtils.generateVaultId(); - const vault = await VaultInternal.createVaultInternal({ - vaultId, - keyManager, - efs - }); - - await vault.stop(); - - await efs.stop(); - await keyManager.stop(); - -} - -main(); diff --git a/tests/bin/secrets/secrets.test.ts b/tests/bin/secrets/secrets.test.ts index 4efb4561c..aeee174d9 100644 --- a/tests/bin/secrets/secrets.test.ts +++ b/tests/bin/secrets/secrets.test.ts @@ -278,7 +278,7 @@ describe('CLI secrets', () => { }); describe('commandStat', () => { test('should retrieve secrets', async () => { - const vaultName = 'Vault3' as VaultName; + const vaultName = 'Vault9'; const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index 108486abf..6a5c1a974 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -5,10 +5,10 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import { utils as nodesUtils } from '@/nodes'; -import { utils as vaultsUtils } from '@/vaults'; +import * as nodesUtils from '@/nodes/utils'; +import * as vaultsUtils from '@/vaults/utils'; import sysexits from '@/utils/sysexits'; -import { NotificationsManager } from '@/notifications'; +import NotificationsManager from '@/notifications/NotificationsManager'; import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 75d0c4517..1cb03c41e 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -1,5 +1,5 @@ import type * as grpc from '@grpc/grpc-js'; -import type { VaultManager } from '@/vaults'; +import type VaultManager from '@/vaults/VaultManager'; import type { VaultId, VaultName } from '@/vaults/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; import type { Stat } from 'encryptedfs'; @@ -12,12 +12,12 @@ import { PolykeyAgent } from '@'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import * as secretsPB from '@/proto/js/polykey/v1/secrets/secrets_pb'; -import { KeyManager } from '@/keys'; -import { ForwardProxy } from '@/network'; +import KeyManager from '@/keys/KeyManager'; +import ForwardProxy from '@/network/ForwardProxy'; import * as grpcUtils from '@/grpc/utils'; import * as vaultErrors from '@/vaults/errors'; import * as vaultsUtils from '@/vaults/utils'; -import { vaultOps } from '@/vaults'; +import * as vaultOps from '@/vaults/VaultOps'; import * as nodesUtils from '@/nodes/utils'; import * as clientUtils from './utils'; diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index aa3ce2bb8..34f03d70c 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,18 +1,19 @@ import type { VaultId } from '@/vaults/types'; import type { Vault } from '@/vaults/Vault'; -import type { KeyManager } from '@/keys'; +import type KeyManager from '@/keys/KeyManager'; import type { DBDomain, DBLevel } from '@matrixai/db'; import os from 'os'; import path from 'path'; import fs from 'fs'; +import { DB } from '@matrixai/db'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { EncryptedFS } from 'encryptedfs'; -import { DB } from '@matrixai/db'; -import { VaultInternal } from '@/vaults'; -import { generateVaultId } from '@/vaults/utils'; +import git from 'isomorphic-git'; +import { tagLast } from '@/vaults/types'; +import VaultInternal from '@/vaults/VaultInternal'; import * as vaultsErrors from '@/vaults/errors'; import { sleep } from '@/utils'; -import { utils as keysUtils } from '@/keys'; +import * as keysUtils from '@/keys/utils'; import * as vaultsUtils from '@/vaults/utils'; import * as testsUtils from '../utils'; @@ -44,6 +45,13 @@ describe('VaultInternal', () => { } as KeyManager; const secret1 = { name: 'secret-1', content: 'secret-content-1' }; const secret2 = { name: 'secret-2', content: 'secret-content-2' }; + const secret3 = { name: 'secret-3', content: 'secret-content-3' }; + + const runGen = async (gen) => { + for await (const _ of gen) { + // Do nothing + } + }; beforeEach(async () => { dataDir = await fs.promises.mkdtemp( @@ -74,7 +82,7 @@ describe('VaultInternal', () => { vaultsDbDomain = ['vaults']; vaultsDb = await db.level(vaultsDbDomain[0]); - vaultId = generateVaultId(); + vaultId = vaultsUtils.generateVaultId(); vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: fakeKeyManager, @@ -132,6 +140,9 @@ describe('VaultInternal', () => { await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); + }); await vault.stop(); vault = await VaultInternal.createVaultInternal({ vaultId, @@ -140,7 +151,7 @@ describe('VaultInternal', () => { logger, fresh: false, db, - vaultName: 'testVault2', + vaultName: 'testVault1', vaultsDb, vaultsDbDomain, }); @@ -150,6 +161,7 @@ describe('VaultInternal', () => { ); }); }); + // Mutation and history test('can change to the current commit', async () => { let commit = (await vault.log(undefined, 1))[0]; await vault.version(commit.commitId); @@ -178,8 +190,8 @@ describe('VaultInternal', () => { await vault.writeF(async (efs) => { await efs.writeFile('test3', 'testdata3'); }); - await vault.version(initCommit); const endCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(initCommit); let files = await vault.readF(async (efs) => { return await efs.readdir('.'); }); @@ -211,12 +223,11 @@ describe('VaultInternal', () => { await efs.write(fd, 'testdata6', 3, 6); await efs.close(fd); }); - await vault.version(fourthCommit); - await vault.writeF(async (efs) => { - await efs.writeFile('test4', 'testdata4'); - }); + await expect(vault.version(fourthCommit)).rejects.toThrow( + vaultsErrors.ErrorVaultReferenceMissing, + ); }); - test('can change to the HEAD commit', async () => { + test('can change to the latest commit', async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { await efs.writeFile('test1', 'testdata1'); @@ -228,7 +239,7 @@ describe('VaultInternal', () => { await efs.writeFile('test3', 'testdata3'); }); await vault.version(initCommit); - await vault.version('HEAD'); + await vault.version(tagLast); let files = await vault.readF(async (efs) => { return await efs.readdir('.'); }); @@ -271,6 +282,17 @@ describe('VaultInternal', () => { await efs.writeFile('secret-1', 'secret-content'); }); }); + test('read operation allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + }); test('concurrent write operations prevented', async () => { await Promise.all([ vault.writeF(async (efs) => { @@ -293,14 +315,13 @@ describe('VaultInternal', () => { const log = await vault.log(); expect(log.length).toEqual(4); }); - test('commit added if mutation in write', async () => { + test('commit added if mutation in writeF', async () => { const commit = (await vault.log())[0].commitId; await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); const log = await vault.log(); expect(log).toHaveLength(2); - expect(log[0].message).toContain('secret-1'); expect(log[0].commitId).not.toStrictEqual(commit); }); test('no commit added if no mutation in write', async () => { @@ -348,7 +369,7 @@ describe('VaultInternal', () => { await efs.rename(secret1.name, `${secret1.name}-new`); await efs.unlink(secret2.name); }); - // Checking changes. + // Checking changes await vault.readF(async (efs) => { expect(await efs.exists(secret1.name)).toBeFalsy(); expect(await efs.exists(`${secret1.name}-new`)).toBeTruthy(); @@ -369,40 +390,29 @@ describe('VaultInternal', () => { }), ).rejects.toThrow(); - // Make sure secret1 wasn't written when the above commit failed. + // Make sure secret1 wasn't written when the above commit failed await vault.readF(async (efs) => { expect(await efs.readdir('.')).not.toContain(secret1.name); }); - // No new commit. + // No new commit expect(await vault.log()).toHaveLength(1); - // Succeeding commit operation. + // Succeeding commit operation await vault.writeF(async (efs) => { await efs.writeFile(secret2.name, secret2.content); }); - // Secret 1 shouldn't exist while secret2 exists. + // Secret 1 shouldn't exist while secret2 exists await vault.readF(async (efs) => { const directory = await efs.readdir('.'); expect(directory).not.toContain(secret1.name); // expect(directory).toContain(secret2.name); }); - // Has a new commit. + // Has a new commit expect(await vault.log()).toHaveLength(2); }); - test('read operation allowed', async () => { - await vault.writeF(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); - }); - await vault.readF(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }); - }); test('concurrent read operations allowed', async () => { await vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); @@ -457,7 +467,7 @@ describe('VaultInternal', () => { // Converting a vault to the interface const vaultInterface = vault as Vault; - // Using the avaliable functions. + // Using the avaliable functions await vaultInterface.writeF(async (efs) => { await efs.writeFile('test', 'testContent'); }); @@ -479,7 +489,7 @@ describe('VaultInternal', () => { // Can we convert back? const vaultNormal = vaultInterface as VaultInternal; - expect(vaultNormal.destroy).toBeTruthy(); // This exists again. + expect(vaultNormal.destroy).toBeTruthy(); // This exists again }); test('cannot commit when the remote field is set', async () => { // Write remote metadata @@ -500,168 +510,227 @@ describe('VaultInternal', () => { }), ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); }); - // Old locking tests - // TODO: review and remove? - test('write locks read', async () => { + test('cannot checkout old commits after branching commit', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + const thirdCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + const fourthCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('test4', 'testdata4'); + }); + await expect(() => { + return vault.version(thirdCommit); + }).rejects.toThrow(); + await expect(() => { + return vault.version(fourthCommit); + }).rejects.toThrow(); + }); + test('can recover from dirty state', async () => { await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); + }); + // Write files to the working directory + // @ts-ignore: kidnap vault EFS + const vaultEFS = vault.efsVault; + await vaultEFS.writeFile('dirty', 'dirtyData'); + await vaultEFS.writeFile('secret-1', 'dirtyData'); + // Setting dirty flag true + const vaultMetadataDbDomain = [ + ...vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await db.put(vaultMetadataDbDomain, VaultInternal.dirtyKey, true); + + // Restarting vault + await vault.stop(); + await vault.start({}); - await Promise.all([ - vault.writeF(async (efs) => { - await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); - }), - vault.readF(async (efs) => { - expect((await efs.readFile('secret-1')).toString()).toEqual( - 'SUPER-DUPER-SECRET-CONTENT', - ); - }), - ]); + // Checking if working directory was cleaned + // and head was moved to latest commit + await vault.readF(async (efs) => { + const files = await efs.readdir('.'); + expect(files).toContain('secret-1'); + expect((await efs.readFile('secret-1')).toString()).toEqual( + 'secret-content', + ); + expect(files).toContain('secret-2'); + expect(files).not.toContain('dirty'); + }); }); - test('read locks write', async () => { + test('clean errant commits recovering from dirty state', async () => { await vault.writeF(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); + await efs.writeFile('secret-1', 'secret-content'); }); - await Promise.all([ - vault.readF(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - vault.writeF(async (efs) => { - await efs.writeFile(secret1.name, 'NEW-CONTENT'); - }), - vault.readF(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - 'NEW-CONTENT', - ); - }), - ]); - }); - test('locking occurs when making a commit.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.writeF(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); + // Creating out of history commits + // @ts-ignore: kidnap vault EFS + const vaultEFS = vault.efs; + const log = await vault.log(); + const ref = log[1].commitId; + await efs.writeFile(path.join(vault.vaultDataDir, 'newfile1'), 'hello'); + const newRef1 = await git.commit({ + fs: vaultEFS, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: ref, + }); + await efs.writeFile(path.join(vault.vaultDataDir, 'newfile2'), 'world'); + const newRef2 = await git.commit({ + fs: vaultEFS, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: newRef1, }); - // Now that we are holding the lock hostage, - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - // We want to check if any action resolves before the lock is released. + // Setting dirty flag true + const vaultMetadataDbDomain = [ + ...vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await db.put(vaultMetadataDbDomain, VaultInternal.dirtyKey, true); - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.writeF(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - secondCommitResolved = true; - await sleep(2); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); + // Restarting vault + await vault.stop(); + await vault.start({}); - // Commit order should be commit2 -> commit1 -> init + // Checking if errant commits were cleaned up + await expect(vault.version(newRef1)).rejects.toThrow(); + await expect(vault.version(newRef2)).rejects.toThrow(); + }); + test('commit added if mutation in writeG', async () => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG(async function* (efs): AsyncGenerator { + yield await efs.writeFile('secret-1', 'secret-content'); + }); + for await (const _ of gen) { + // Do nothing + } const log = await vault.log(); - expect(log[0].message).toContain(secret2.name); - expect(log[1].message).toContain(secret1.name); + expect(log).toHaveLength(2); + expect(log[0].commitId).not.toStrictEqual(commit); + }); + test('no commit added if no mutation in writeG', async () => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG(async function* (_efs): AsyncGenerator {}); + for await (const _ of gen) { + // Do nothing + } + const log = await vault.log(); + expect(log).toHaveLength(1); + expect(log[0].message).not.toContain('secret-1'); + expect(log[0].commitId).toStrictEqual(commit); }); - test('locking occurs when making an access.', async () => { + test('no mutation to vault when part of a commit operation fails in writeG', async () => { + const gen = vault.writeG(async function* (efs): AsyncGenerator { + yield await efs.writeFile(secret1.name, secret1.content); + yield await efs.rename('notValid', 'randomName'); // Throws + }); + // Failing commit operation + await expect(() => runGen(gen)).rejects.toThrow(); + + // Make sure secret1 wasn't written when the above commit failed + await vault.readF(async (efs) => { + expect(await efs.readdir('.')).not.toContain(secret1.name); + }); + // No new commit + expect(await vault.log()).toHaveLength(1); + }); + test('no commit after readG', async () => { await vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); await efs.writeFile(secret2.name, secret2.content); }); - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; + const commit = (await vault.log())[0].commitId; + const gen = await vault.readG(async function* (efs): AsyncGenerator { + yield expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.readF(async (efs) => { - await efs.readFile(secret1.name); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); + await runGen(gen); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('garbage collection', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); }); - - // Now that we are holding the lock hostage, - // we want to check if any action resolves before the lock is released. - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.readF(async (efs) => { - await efs.readFile(secret2.name); - secondCommitResolved = true; - await sleep(10); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.writeF(async (efs) => { + await efs.writeFile(secret3.name, secret3.content); + }); + // @ts-ignore: kidnap efs + const vaultEfs = vault.efs; + // @ts-ignore: kidnap efs + const vaultEfsData = vault.efsVault; + const quickCommit = async (ref: string, secret: string) => { + await vaultEfsData.writeFile(secret, secret); + await git.add({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + filepath: secret, + }); + return await git.commit({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: ref, + }); + }; + const log = await vault.log(); + let num = 5; + const refs: string[] = []; + for (const logElement of log) { + refs.push(await quickCommit(logElement.commitId, `secret-${num++}`)); + } // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); + await vault.garbageCollectGitObjects(); + + for (const ref of refs) { + await expect( + git.checkout({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + ref, + }), + ).rejects.toThrow(git.Errors.CommitNotFetchedError); + } }); // Locking tests const waitDelay = 200; - const runGen = async (gen) => { - for await (const _ of gen) { - // Do nothing - } - }; test('writeF respects read and write locking', async () => { // @ts-ignore: kidnap lock const lock = vault.lock; @@ -792,8 +861,7 @@ describe('VaultInternal', () => { expect(finished.length).toBe(4); releaseRead(); }); - test.todo('pullVault respects write locking'); - // Life- cycle + // Life-cycle test('can create with CreateVaultInternal', async () => { let vault1: VaultInternal | undefined; try { @@ -807,7 +875,7 @@ describe('VaultInternal', () => { vaultsDbDomain, logger, }); - // Data exists for vault now. + // Data exists for vault now expect(await efs.readdir('.')).toContain( vaultsUtils.encodeVaultId(vaultId1), ); @@ -830,7 +898,7 @@ describe('VaultInternal', () => { vaultsDbDomain, logger, }); - // Data exists for vault now. + // Data exists for vault now expect(await efs.readdir('.')).toContain( vaultsUtils.encodeVaultId(vaultId1), ); @@ -863,7 +931,6 @@ describe('VaultInternal', () => { await vault2?.destroy(); } }); - test.todo('can create with CloneVaultInternal'); test('stop is idempotent', async () => { // Should complete with no errors await vault.stop(); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index b75a1ed3d..a4d519654 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -14,8 +14,7 @@ import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { IdInternal } from '@matrixai/id'; import { DB } from '@matrixai/db'; -import { destroyed } from '@matrixai/async-init'; -import { running } from '@matrixai/async-init/dist/utils'; +import { destroyed, running } from '@matrixai/async-init'; import git from 'isomorphic-git'; import ACL from '@/acl/ACL'; import GestaltGraph from '@/gestalts/GestaltGraph'; @@ -30,7 +29,7 @@ import ForwardProxy from '@/network/ForwardProxy'; import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; import { sleep } from '@/utils'; -import { VaultInternal } from '@/vaults'; +import VaultInternal from '@/vaults/VaultInternal'; import * as testsUtils from '../utils'; const mockedGenerateDeterministicKeyPair = jest @@ -227,7 +226,7 @@ describe('VaultManager', () => { // @ts-ignore: protected method const vault = await vaultManager.getVault(secondVaultId); await vaultManager.destroyVault(secondVaultId); - // The mapping should be gone. + // The mapping should be gone expect((await vaultManager.listVaults()).size).toBe(0); // The vault should be destroyed expect(vault[destroyed]).toBe(true); @@ -475,7 +474,7 @@ describe('VaultManager', () => { let localNodeIdEncoded: NodeIdEncoded; beforeAll(async () => { - // Creating agents. + // Creating agents allDataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -495,7 +494,7 @@ describe('VaultManager', () => { remoteKeynode2Id = remoteKeynode2.keyManager.getNodeId(); remoteKeynode2IdEncoded = nodesUtils.encodeNodeId(remoteKeynode2Id); - // Adding details to each agent. + // Adding details to each agent await remoteKeynode1.nodeGraph.setNode(remoteKeynode2Id, { host: remoteKeynode2.revProxy.getIngressHost(), port: remoteKeynode2.revProxy.getIngressPort(), @@ -1344,7 +1343,7 @@ describe('VaultManager', () => { }); }); test('handleScanVaults should list all vaults with permissions', async () => { - // 1. we need to set up state. + // 1. we need to set up state const acl = await ACL.createACL({ db, logger, @@ -1365,7 +1364,7 @@ describe('VaultManager', () => { logger: logger.getChild(VaultManager.name), }); try { - // Setting up state. + // Setting up state const nodeId1 = testsUtils.generateRandomNodeId(); const nodeId2 = testsUtils.generateRandomNodeId(); await gestaltGraph.setNode({ @@ -1421,7 +1420,7 @@ describe('VaultManager', () => { } }); test('ScanVaults should get all vaults with permissions from remote node', async () => { - // 1. we need to set up state. + // 1. we need to set up state const remoteAgent = await PolykeyAgent.createPolykeyAgent({ password: 'password', nodePath: path.join(dataDir, 'remoteNode'), @@ -1476,7 +1475,7 @@ describe('VaultManager', () => { logger: logger.getChild(VaultManager.name), }); try { - // Setting up state. + // Setting up state const targetNodeId = remoteAgent.keyManager.getNodeId(); const nodeId1 = keyManager.getNodeId(); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index 3437657f5..e376eb306 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -9,7 +9,8 @@ import { EncryptedFS } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import * as errors from '@/vaults/errors'; -import { VaultInternal, vaultOps } from '@/vaults'; +import VaultInternal from '@/vaults/VaultInternal'; +import * as vaultOps from '@/vaults/VaultOps'; import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; import * as testUtils from '../utils'; diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 85a866f88..a2333467b 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -1,3 +1,4 @@ +import type { VaultId } from '@/vaults/types'; import fs from 'fs'; import os from 'os'; import path from 'path'; @@ -52,7 +53,6 @@ describe('Vaults utils', () => { } expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); - test('fs can be read recursively', async () => { await fs.promises.mkdir(path.join(dataDir, 'dir'), { recursive: true }); await fs.promises.mkdir(path.join(dataDir, 'dir', 'dir2', 'dir3'), { @@ -73,8 +73,16 @@ describe('Vaults utils', () => { } expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); - test('makeVaultId converts a buffer', async () => { - const randomIdGen = new IdRandom(); - Buffer.from(randomIdGen.get()); + test('decodeNodeId does not throw an error', async () => { + const randomIdGen = new IdRandom(); + const vaultId = randomIdGen.get(); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + + expect(vaultsUtils.decodeVaultId(vaultIdEncoded)).toBeDefined(); + expect(vaultsUtils.decodeVaultId('invalidVaultIdEncoded')).toBeUndefined(); + expect( + vaultsUtils.decodeVaultId('zF4VfF3uRhSqgxTOOLONGxTRdVKauV9'), + ).toBeUndefined(); + expect(vaultsUtils.decodeVaultId('zF4VfxTOOSHORTxTV9')).toBeUndefined(); }); }); From 409aeea937beb3a4fddfd903776a4b63a6ef718f Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Tue, 22 Feb 2022 18:58:37 +1100 Subject: [PATCH 06/10] Fixes #339 - Vaults scanning --- src/bin/vaults/CommandScan.ts | 8 +- .../js/polykey/v1/agent_service_grpc_pb.d.ts | 16 +-- .../js/polykey/v1/agent_service_grpc_pb.js | 6 +- src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 5 + src/proto/js/polykey/v1/vaults/vaults_pb.js | 60 ++++++++- .../schemas/polykey/v1/agent_service.proto | 2 +- .../schemas/polykey/v1/vaults/vaults.proto | 1 + tests/agent/GRPCClientAgent.test.ts | 6 +- tests/bin/vaults/vaults.test.ts | 117 ++++++++++++++++++ tests/vaults/VaultManager.test.ts | 8 +- 10 files changed, 206 insertions(+), 23 deletions(-) diff --git a/src/bin/vaults/CommandScan.ts b/src/bin/vaults/CommandScan.ts index f7fadf348..8477156ed 100644 --- a/src/bin/vaults/CommandScan.ts +++ b/src/bin/vaults/CommandScan.ts @@ -9,7 +9,7 @@ class CommandScan extends CommandPolykey { constructor(...args: ConstructorParameters) { super(...args); this.name('scan'); - this.description('Scans a node to reveal their vaults'); + this.description('Scans a node to reveal their shared vaults'); this.argument('', 'Id of the node to scan'); this.addOption(binOptions.nodeId); this.addOption(binOptions.clientHost); @@ -27,6 +27,7 @@ class CommandScan extends CommandPolykey { this.logger.getChild(binProcessors.processClientOptions.name), ); const client = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, nodeId: clientOptions.nodeId, host: clientOptions.clientHost, port: clientOptions.clientPort, @@ -48,7 +49,10 @@ class CommandScan extends CommandPolykey { const data: Array = []; const stream = grpcClient.vaultsScan(nodeMessage, meta); for await (const vault of stream) { - data.push(`${vault.getVaultName()}\t\t${vault.getVaultId()}`); + const vaultName = vault.getVaultName(); + const vaultIdEncoded = vault.getVaultId(); + const permissions = vault.getVaultPermissionsList().join(','); + data.push(`${vaultName}\t\t${vaultIdEncoded}\t\t${permissions}`); } return data; }, diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index 72db2fe5c..068ddd535 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -51,12 +51,12 @@ interface IAgentServiceService_IVaultsGitPackGet extends grpc.MethodDefinition

; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { +interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/VaultsScan"; requestStream: false; responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } @@ -121,7 +121,7 @@ export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { echo: grpc.handleUnaryCall; vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; - vaultsScan: grpc.handleServerStreamingCall; + vaultsScan: grpc.handleServerStreamingCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; @@ -139,8 +139,8 @@ export interface IAgentServiceClient { vaultsGitPackGet(): grpc.ClientDuplexStream; vaultsGitPackGet(options: Partial): grpc.ClientDuplexStream; vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -170,8 +170,8 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsGitPackGet(options?: Partial): grpc.ClientDuplexStream; public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index 5f6d9af0d..387b87b83 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -191,10 +191,10 @@ vaultsGitInfoGet: { path: '/polykey.v1.AgentService/VaultsScan', requestStream: false, responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, + requestType: polykey_v1_utils_utils_pb.EmptyMessage, responseType: polykey_v1_vaults_vaults_pb.List, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, + requestSerialize: serialize_polykey_v1_utils_EmptyMessage, + requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, responseSerialize: serialize_polykey_v1_vaults_List, responseDeserialize: deserialize_polykey_v1_vaults_List, }, diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index ce03ed70b..8cdea111b 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -33,6 +33,10 @@ export class List extends jspb.Message { setVaultName(value: string): List; getVaultId(): string; setVaultId(value: string): List; + clearVaultPermissionsList(): void; + getVaultPermissionsList(): Array; + setVaultPermissionsList(value: Array): List; + addVaultPermissions(value: string, index?: number): string; serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): List.AsObject; @@ -48,6 +52,7 @@ export namespace List { export type AsObject = { vaultName: string, vaultId: string, + vaultPermissionsList: Array, } } diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index da87622fd..31d273126 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -70,7 +70,7 @@ if (goog.DEBUG && !COMPILED) { * @constructor */ proto.polykey.v1.vaults.List = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.vaults.List.repeatedFields_, null); }; goog.inherits(proto.polykey.v1.vaults.List, jspb.Message); if (goog.DEBUG && !COMPILED) { @@ -589,6 +589,13 @@ proto.polykey.v1.vaults.Vault.prototype.setNameOrId = function(value) { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.vaults.List.repeatedFields_ = [3]; + if (jspb.Message.GENERATE_TO_OBJECT) { @@ -621,7 +628,8 @@ proto.polykey.v1.vaults.List.prototype.toObject = function(opt_includeInstance) proto.polykey.v1.vaults.List.toObject = function(includeInstance, msg) { var f, obj = { vaultName: jspb.Message.getFieldWithDefault(msg, 1, ""), - vaultId: jspb.Message.getFieldWithDefault(msg, 2, "") + vaultId: jspb.Message.getFieldWithDefault(msg, 2, ""), + vaultPermissionsList: (f = jspb.Message.getRepeatedField(msg, 3)) == null ? undefined : f }; if (includeInstance) { @@ -666,6 +674,10 @@ proto.polykey.v1.vaults.List.deserializeBinaryFromReader = function(msg, reader) var value = /** @type {string} */ (reader.readString()); msg.setVaultId(value); break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.addVaultPermissions(value); + break; default: reader.skipField(); break; @@ -709,6 +721,13 @@ proto.polykey.v1.vaults.List.serializeBinaryToWriter = function(message, writer) f ); } + f = message.getVaultPermissionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 3, + f + ); + } }; @@ -748,6 +767,43 @@ proto.polykey.v1.vaults.List.prototype.setVaultId = function(value) { }; +/** + * repeated string vault_permissions = 3; + * @return {!Array} + */ +proto.polykey.v1.vaults.List.prototype.getVaultPermissionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 3)); +}; + + +/** + * @param {!Array} value + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.setVaultPermissionsList = function(value) { + return jspb.Message.setField(this, 3, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.addVaultPermissions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 3, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.clearVaultPermissionsList = function() { + return this.setVaultPermissionsList([]); +}; + + diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 9e78598cd..a4c824360 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -15,7 +15,7 @@ service AgentService { // Vaults rpc VaultsGitInfoGet (polykey.v1.vaults.InfoRequest) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); - rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); + rpc VaultsScan (polykey.v1.utils.EmptyMessage) returns (stream polykey.v1.vaults.List); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index 309cef87a..662a77d42 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -17,6 +17,7 @@ message Vault { message List { string vault_name = 1; string vault_id = 2; + repeated string vault_permissions = 3; } message Rename { diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index a1cb598ff..408775ab8 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -285,7 +285,7 @@ describe(GRPCClientAgent.name, () => { proxyHost: localHost, }); clientWithProxies2 = await GRPCClientAgent.createGRPCClientAgent({ - host: '127.0.0.1' as Host, + host: localHost, logger, nodeId: keyManager.getNodeId(), port: revProxy.getIngressPort(), @@ -296,7 +296,7 @@ describe(GRPCClientAgent.name, () => { }, timeout: 5000, }); - }, 26000); + }); afterEach(async () => { await testAgentUtils.closeTestAgentClient(clientWithProxies1); await clientFwdProxy1.stop(); @@ -304,7 +304,7 @@ describe(GRPCClientAgent.name, () => { await testAgentUtils.closeTestAgentClient(clientWithProxies2); await clientFwdProxy2.stop(); await clientKeyManager2.stop(); - }, 25000); + }); test('connectionInfoGetter returns correct information for each connection', async () => { // We can't directly spy on the connectionInfoGetter result // but we can check that it called `getConnectionInfoByProxy` properly diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index 6a5c1a974..c50b78815 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -8,6 +8,7 @@ import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as vaultsUtils from '@/vaults/utils'; import sysexits from '@/utils/sysexits'; +import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import NotificationsManager from '@/notifications/NotificationsManager'; import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; @@ -802,4 +803,120 @@ describe('CLI vaults', () => { global.defaultTimeout * 2, ); }); + describe('commandPermissions', () => { + test('Should return nodeIds and their permissions', async () => { + let remoteKeynode1: PolykeyAgent | undefined; + let remoteKeynode2: PolykeyAgent | undefined; + try { + // A ridiculous amount of setup. + const vaultId1 = await polykeyAgent.vaultManager.createVault( + 'vault1' as VaultName, + ); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + 'vault2' as VaultName, + ); + + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(dataDir, 'remoteKeynode1'), + }); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(dataDir, 'remoteKeynode2'), + }); + + const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); + const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId1), + chain: {}, + }); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId2), + chain: {}, + }); + await polykeyAgent.nodeManager.setNode(targetNodeId1, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), + }); + await polykeyAgent.nodeManager.setNode(targetNodeId2, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), + }); + + await remoteKeynode1.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await remoteKeynode2.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await remoteKeynode1.acl.setNodePerm( + polykeyAgent.keyManager.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, + ); + await remoteKeynode2.acl.setNodePerm( + polykeyAgent.keyManager.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, + ); + + await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId1); + await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId2); + await polykeyAgent.vaultManager.shareVault(vaultId2, targetNodeId1); + + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); + + // Now we call and test the command + const command1 = ['vaults', 'permissions', 'vault1', '-np', dataDir]; + const result1 = await testBinUtils.pkStdio( + command1, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result1.exitCode).toBe(0); + expect(result1.stdout).toContain(remoteKeynode1.keyManager.getNodeId()); + expect(result1.stdout).toContain(remoteKeynode2.keyManager.getNodeId()); + expect(result1.stdout).toContain('pull'); + expect(result1.stdout).toContain('clone'); + + // And the other vault + const command2 = ['vaults', 'permissions', 'vault2', '-np', dataDir]; + const result2 = await testBinUtils.pkStdio( + command2, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain(targetNodeId1); + expect(result2.stdout).not.toContain(targetNodeId2); + expect(result2.stdout).toContain('pull'); + expect(result2.stdout).toContain('clone'); + } finally { + await remoteKeynode1?.stop(); + await remoteKeynode1?.destroy(); + await remoteKeynode2?.stop(); + await remoteKeynode2?.destroy(); + } + }); + }); }); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index a4d519654..ac0f5dba4 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -463,7 +463,7 @@ describe('VaultManager', () => { await vaultManager?.destroy(); } }); - describe('With remote agents', () => { + describe.skip('With remote agents', () => { let allDataDir: string; let keyManager: KeyManager; let fwdProxy: ForwardProxy; @@ -1388,7 +1388,7 @@ describe('VaultManager', () => { // No permissions for vault3 // scanning vaults - const gen = vaultManager.handleScanVaults(nodeId1); + const gen = vaultManager.handleScanVaults(nodeId1, acl); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; @@ -1399,14 +1399,14 @@ describe('VaultManager', () => { // Should throw due to no permission await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + for await (const _ of vaultManager.handleScanVaults(nodeId2, acl)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); // Should throw due to lack of scan permission await gestaltGraph.setGestaltActionByNode(nodeId2, 'notify'); await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + for await (const _ of vaultManager.handleScanVaults(nodeId2, acl)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); From 10cbcce4624746836bd158c062006fbd8d9cf637 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Mon, 28 Feb 2022 17:25:41 +1100 Subject: [PATCH 07/10] Fixes #259 replacing `un/shareVault` with `vaultsPermissionSet`, `vaultsPermissionGet` and `vaultsPermissionUnset`. --- src/PolykeyAgent.ts | 1 + src/acl/types.ts | 4 +- src/agent/service/vaultsGitInfoGet.ts | 2 +- src/agent/service/vaultsScan.ts | 5 +- src/bin/vaults/CommandPermissions.ts | 5 +- src/bin/vaults/CommandShare.ts | 15 +- src/bin/vaults/CommandUnshare.ts | 14 +- src/client/GRPCClientClient.ts | 14 +- src/client/service/index.ts | 14 +- src/client/service/vaultsPermissionGet.ts | 67 ++ src/client/service/vaultsPermissionSet.ts | 82 ++ src/client/service/vaultsPermissionUnset.ts | 81 ++ src/client/service/vaultsPermissionsGet.ts | 55 -- src/client/service/vaultsShare.ts | 50 -- src/client/service/vaultsUnshare.ts | 50 -- .../js/polykey/v1/client_service_grpc_pb.d.ts | 98 +-- .../js/polykey/v1/client_service_grpc_pb.js | 98 +-- src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 104 +-- src/proto/js/polykey/v1/vaults/vaults_pb.js | 712 ++---------------- .../schemas/polykey/v1/client_service.proto | 6 +- .../schemas/polykey/v1/vaults/vaults.proto | 18 +- src/vaults/types.ts | 1 - tests/bin/vaults/vaults.test.ts | 117 --- tests/client/rpcVaults.test.ts | 50 +- tests/client/utils.ts | 1 + 25 files changed, 479 insertions(+), 1185 deletions(-) create mode 100644 src/client/service/vaultsPermissionGet.ts create mode 100644 src/client/service/vaultsPermissionSet.ts create mode 100644 src/client/service/vaultsPermissionUnset.ts delete mode 100644 src/client/service/vaultsPermissionsGet.ts delete mode 100644 src/client/service/vaultsShare.ts delete mode 100644 src/client/service/vaultsUnshare.ts diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 82c3402be..5329b8747 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -578,6 +578,7 @@ class PolykeyAgent { sessionManager: this.sessionManager, vaultManager: this.vaultManager, sigchain: this.sigchain, + acl: this.acl, grpcServerClient: this.grpcServerClient, grpcServerAgent: this.grpcServerAgent, fwdProxy: this.fwdProxy, diff --git a/src/acl/types.ts b/src/acl/types.ts index 7770edd7d..d5e0362e3 100644 --- a/src/acl/types.ts +++ b/src/acl/types.ts @@ -1,6 +1,6 @@ import type { Opaque } from '../types'; import type { GestaltAction } from '../gestalts/types'; -import type { VaultActions, VaultId } from '../vaults/types'; +import type { VaultActions, VaultIdString } from '../vaults/types'; import type { Id } from '@matrixai/id'; type PermissionId = Opaque<'PermissionId', Id>; @@ -8,7 +8,7 @@ type PermissionIdString = Opaque<'PermissionIdString', string>; type Permission = { gestalt: GestaltActions; - vaults: Record; + vaults: Record; }; type GestaltActions = Partial>; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index ca906c642..6391b3f7b 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -63,7 +63,7 @@ function vaultsGitInfoGet({ return; } const vaultPerms = permissions.vaults[vaultId]; - if (vaultPerms[actionType] !== null) { + if (vaultPerms?.[actionType] !== null) { await genWritable.throw( new vaultsErrors.ErrorVaultsPermissionDenied( `${nodeIdEncoded} does not have permission to ${actionType} from vault ${vaultsUtils.encodeVaultId( diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index 11b73d21f..cb4447b03 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -2,7 +2,6 @@ import type * as grpc from '@grpc/grpc-js'; import type VaultManager from '../../vaults/VaultManager'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import type { ConnectionInfoGet } from '../../agent/types'; -import type ACL from '../../acl/ACL'; import * as agentErrors from '../../agent/errors'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as vaultsUtils from '../../vaults/utils'; @@ -10,11 +9,9 @@ import * as grpcUtils from '../../grpc/utils'; function vaultsScan({ vaultManager, - acl, connectionInfoGet, }: { vaultManager: VaultManager; - acl: ACL; connectionInfoGet: ConnectionInfoGet; }) { return async ( @@ -31,7 +28,7 @@ function vaultsScan({ } const nodeId = connectionInfo.nodeId; try { - const listResponse = vaultManager.handleScanVaults(nodeId, acl); + const listResponse = vaultManager.handleScanVaults(nodeId); for await (const { vaultId, vaultName, diff --git a/src/bin/vaults/CommandPermissions.ts b/src/bin/vaults/CommandPermissions.ts index d9c667ac0..d45117249 100644 --- a/src/bin/vaults/CommandPermissions.ts +++ b/src/bin/vaults/CommandPermissions.ts @@ -12,7 +12,6 @@ class CommandPermissions extends CommandPolykey { this.alias('perms'); this.description('Sets the permissions of a vault for Node Ids'); this.argument('', 'Name or ID of the vault'); - // This.argument('[nodeId]', '(optional) nodeId to check permission on'); this.addOption(binOptions.nodeId); this.addOption(binOptions.clientHost); this.addOption(binOptions.clientPort); @@ -54,13 +53,13 @@ class CommandPermissions extends CommandPolykey { const data: Array = []; await binUtils.retryAuthentication(async (auth) => { - const permissionStream = pkClient.grpcClient.vaultsPermissionsGet( + const permissionStream = pkClient.grpcClient.vaultsPermissionGet( vaultMessage, auth, ); for await (const permission of permissionStream) { const nodeId = permission.getNode()?.getNodeId(); - const actions = permission.getActionsList().join(', '); + const actions = permission.getVaultPermissionsList().join(', '); data.push(`${nodeId}: ${actions}`); } return true; diff --git a/src/bin/vaults/CommandShare.ts b/src/bin/vaults/CommandShare.ts index 943d76468..a37658c0b 100644 --- a/src/bin/vaults/CommandShare.ts +++ b/src/bin/vaults/CommandShare.ts @@ -52,14 +52,19 @@ class CommandShare extends CommandPolykey { logger: this.logger.getChild(PolykeyClient.name), }); const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - const setVaultPermsMessage = new vaultsPB.PermSet(); - setVaultPermsMessage.setVault(vaultMessage); - setVaultPermsMessage.setNode(nodeMessage); vaultMessage.setNameOrId(vaultName); + const nodeMessage = new nodesPB.Node(); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + const vaultsPermissionsList = new vaultsPB.Permissions(); + vaultsPermissionsList.setVault(vaultMessage); + vaultsPermissionsList.setNode(nodeMessage); + vaultsPermissionsList.setVaultPermissionsList(['pull', 'clone']); await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.vaultsShare(setVaultPermsMessage, auth), + (auth) => + pkClient.grpcClient.vaultsPermissionSet( + vaultsPermissionsList, + auth, + ), meta, ); } finally { diff --git a/src/bin/vaults/CommandUnshare.ts b/src/bin/vaults/CommandUnshare.ts index 23aa99d3d..5a189632d 100644 --- a/src/bin/vaults/CommandUnshare.ts +++ b/src/bin/vaults/CommandUnshare.ts @@ -51,16 +51,20 @@ class CommandUnshare extends CommandPolykey { port: clientOptions.clientPort, logger: this.logger.getChild(PolykeyClient.name), }); - const unsetVaultPermsMessage = new vaultsPB.PermUnset(); + const vaultsPermissionsMessage = new vaultsPB.Permissions(); const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - unsetVaultPermsMessage.setVault(vaultMessage); - unsetVaultPermsMessage.setNode(nodeMessage); vaultMessage.setNameOrId(vaultName); + const nodeMessage = new nodesPB.Node(); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + vaultsPermissionsMessage.setVault(vaultMessage); + vaultsPermissionsMessage.setNode(nodeMessage); + vaultsPermissionsMessage.setVaultPermissionsList(['clone', 'pull']); await binUtils.retryAuthentication( (auth) => - pkClient.grpcClient.vaultsUnshare(unsetVaultPermsMessage, auth), + pkClient.grpcClient.vaultsPermissionUnset( + vaultsPermissionsMessage, + auth, + ), meta, ); } finally { diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index daa9ac536..3b07305ea 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -186,26 +186,26 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsPermissionsGet(...args) { - return grpcUtils.promisifyReadableStreamCall( + public vaultsPermissionGet(...args) { + return grpcUtils.promisifyReadableStreamCall( this.client, - this.client.vaultsPermissionsGet, + this.client.vaultsPermissionGet, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsShare(...args) { + public vaultsPermissionSet(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsShare, + this.client.vaultsPermissionSet, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsUnshare(...args) { + public vaultsPermissionUnset(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsUnshare, + this.client.vaultsPermissionUnset, )(...args); } diff --git a/src/client/service/index.ts b/src/client/service/index.ts index 75b022b3e..50e282fbb 100644 --- a/src/client/service/index.ts +++ b/src/client/service/index.ts @@ -13,6 +13,7 @@ import type { NotificationsManager } from '../../notifications'; import type { Discovery } from '../../discovery'; import type { Sigchain } from '../../sigchain'; import type { GRPCServer } from '../../grpc'; +import type ACL from '../../acl/ACL'; import type ForwardProxy from '../../network/ForwardProxy'; import type ReverseProxy from '../../network/ReverseProxy'; import type { IClientServiceServer } from '../../proto/js/polykey/v1/client_service_grpc_pb'; @@ -66,12 +67,12 @@ import vaultsCreate from './vaultsCreate'; import vaultsDelete from './vaultsDelete'; import vaultsList from './vaultsList'; import vaultsLog from './vaultsLog'; -import vaultsPermissionsGet from './vaultsPermissionsGet'; +import vaultsPermissionGet from './vaultsPermissionGet'; +import vaultsPermissionSet from './vaultsPermissionSet'; +import vaultsPermissionUnset from './vaultsPermissionUnset'; import vaultsPull from './vaultsPull'; import vaultsRename from './vaultsRename'; import vaultsScan from './vaultsScan'; -import vaultsShare from './vaultsShare'; -import vaultsUnshare from './vaultsUnshare'; import vaultsVersion from './vaultsVersion'; import vaultsSecretsDelete from './vaultsSecretsDelete'; import vaultsSecretsEdit from './vaultsSecretsEdit'; @@ -104,6 +105,7 @@ function createService({ notificationsManager: NotificationsManager; discovery: Discovery; sigchain: Sigchain; + acl: ACL; grpcServerClient: GRPCServer; grpcServerAgent: GRPCServer; fwdProxy: ForwardProxy; @@ -169,12 +171,12 @@ function createService({ vaultsDelete: vaultsDelete(container), vaultsList: vaultsList(container), vaultsLog: vaultsLog(container), - vaultsPermissionsGet: vaultsPermissionsGet(container), + vaultsPermissionSet: vaultsPermissionSet(container), + vaultsPermissionUnset: vaultsPermissionUnset(container), + vaultsPermissionGet: vaultsPermissionGet(container), vaultsPull: vaultsPull(container), vaultsRename: vaultsRename(container), vaultsScan: vaultsScan(container), - vaultsShare: vaultsShare(container), - vaultsUnshare: vaultsUnshare(container), vaultsVersion: vaultsVersion(container), vaultsSecretsDelete: vaultsSecretsDelete(container), vaultsSecretsEdit: vaultsSecretsEdit(container), diff --git a/src/client/service/vaultsPermissionGet.ts b/src/client/service/vaultsPermissionGet.ts new file mode 100644 index 000000000..cb901e27b --- /dev/null +++ b/src/client/service/vaultsPermissionGet.ts @@ -0,0 +1,67 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type * as grpc from '@grpc/grpc-js'; +import type { VaultActions } from '../../vaults/types'; +import type ACL from '../../acl/ACL'; +import type { NodeId, NodeIdEncoded } from 'nodes/types'; +import { IdInternal } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; +import * as nodesUtils from '../../nodes/utils'; + +function vaultsPermissionGet({ + authenticate, + vaultManager, + acl, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; + acl: ACL; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const vaultMessage = call.request; + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Getting vaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + + // Getting permissions + const rawPermissions = await acl.getVaultPerm(vaultId); + const permissionList: Record = {}; + // Getting the relevant information. + for (const nodeId in rawPermissions) { + permissionList[nodeId] = rawPermissions[nodeId].vaults[vaultId]; + } + + const vaultPermissionsMessage = new vaultsPB.Permissions(); + vaultPermissionsMessage.setVault(vaultMessage); + const nodeMessage = new nodesPB.Node(); + + // Constructing the message. + for (const nodeIdString in permissionList) { + const nodeId = IdInternal.fromString(nodeIdString); + nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + vaultPermissionsMessage.setNode(nodeMessage); + const actions = Object.keys(permissionList[nodeIdString]); + vaultPermissionsMessage.setVaultPermissionsList(actions); + await genWritable.next(vaultPermissionsMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsPermissionGet; diff --git a/src/client/service/vaultsPermissionSet.ts b/src/client/service/vaultsPermissionSet.ts new file mode 100644 index 000000000..8faa4f710 --- /dev/null +++ b/src/client/service/vaultsPermissionSet.ts @@ -0,0 +1,82 @@ +import type { Authenticate } from '../types'; +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type GestaltGraph from '../../gestalts/GestaltGraph'; +import type ACL from '../../acl/ACL'; +import type NotificationsManager from '../../notifications/NotificationsManager'; +import type { VaultActions } from '../../vaults/types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import * as vaultsUtils from '../../vaults/utils'; +import * as vaultsErrors from '../../vaults/errors'; +import * as validationUtils from '../../validation/utils'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionSet({ + vaultManager, + authenticate, + gestaltGraph, + acl, + notificationsManager, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + gestaltGraph: GestaltGraph; + acl: ACL; + notificationsManager: NotificationsManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultsPermissionsMessage = call.request; + const vaultMessage = vaultsPermissionsMessage.getVault(); + const nodeMessage = vaultsPermissionsMessage.getNode(); + if (vaultMessage == null || nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Parsing VaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + // Parsing NodeId + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + // Parsing actions + const actions = vaultsPermissionsMessage + .getVaultPermissionsList() + .map((vaultAction) => validationUtils.parseVaultAction(vaultAction)); + // Checking if vault exists + const vaultMeta = await vaultManager.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // Setting permissions + const actionsSet: VaultActions = {}; + await gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + for (const action of actions) { + await acl.setVaultAction(vaultId, nodeId, action); + actionsSet[action] = null; + } + // Sending notification + await notificationsManager.sendNotification(nodeId, { + type: 'VaultShare', + vaultId: vaultsUtils.encodeVaultId(vaultId), + vaultName: vaultMeta.vaultName, + actions: actionsSet, + }); + // Formatting response + const response = new utilsPB.StatusMessage().setSuccess(true); + callback(null, response); + return; + } catch (e) { + callback(grpcUtils.fromError(e)); + return; + } + }; +} + +export default vaultsPermissionSet; diff --git a/src/client/service/vaultsPermissionUnset.ts b/src/client/service/vaultsPermissionUnset.ts new file mode 100644 index 000000000..4217a0edc --- /dev/null +++ b/src/client/service/vaultsPermissionUnset.ts @@ -0,0 +1,81 @@ +import type { Authenticate } from '../types'; +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type GestaltGraph from '../../gestalts/GestaltGraph'; +import type ACL from '../../acl/ACL'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import * as vaultsErrors from '../../vaults/errors'; +import * as validationUtils from '../../validation/utils'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionUnset({ + vaultManager, + authenticate, + gestaltGraph, + acl, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + gestaltGraph: GestaltGraph; + acl: ACL; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultsPermissionsMessage = call.request; + const vaultMessage = vaultsPermissionsMessage.getVault(); + const nodeMessage = vaultsPermissionsMessage.getNode(); + if (vaultMessage == null || nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Parsing VaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + // Parsing NodeId + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + // Parsing actions + const actions = vaultsPermissionsMessage + .getVaultPermissionsList() + .map((vaultAction) => validationUtils.parseVaultAction(vaultAction)); + // Checking if vault exists + const vaultMeta = await vaultManager.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // Unsetting permissions + await gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + for (const action of actions) { + await acl.unsetVaultAction(vaultId, nodeId, action); + } + // We need to check if there are still shared vaults. + const nodePermissions = await acl.getNodePerm(nodeId); + // Remove scan permissions if no more shared vaults + if (nodePermissions != null) { + // Counting total number of permissions + const totalPermissions = Object.keys(nodePermissions.vaults) + .map((key) => Object.keys(nodePermissions.vaults[key]).length) + .reduce((prev, current) => current + prev); + // If no permissions are left then we remove the scan permission + if (totalPermissions === 0) { + await gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + } + } + // Formatting response + const response = new utilsPB.StatusMessage().setSuccess(true); + callback(null, response); + return; + } catch (e) { + callback(grpcUtils.fromError(e)); + return; + } + }; +} + +export default vaultsPermissionUnset; diff --git a/src/client/service/vaultsPermissionsGet.ts b/src/client/service/vaultsPermissionsGet.ts deleted file mode 100644 index a1d2e69c4..000000000 --- a/src/client/service/vaultsPermissionsGet.ts +++ /dev/null @@ -1,55 +0,0 @@ -import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; -import type { VaultName } from '../../vaults/types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import type * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; -import * as validationUtils from '../../validation/utils'; - -function vaultsPermissionsGet({ - authenticate, - vaultManager, -}: { - authenticate: Authenticate; - vaultManager: VaultManager; -}) { - return async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const vaultMessage = call.request; - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Getting vaultId - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); - - const permissionList = await vaultManager.getVaultPermission(vaultId); - const nodeActionsMessage = new permissionsPB.NodeActions(); - const nodeMessage = new nodesPB.Node(); - - // Constructing the message. - for (const nodeId in permissionList) { - nodeMessage.setNodeId(nodeId); - nodeActionsMessage.setNode(nodeMessage); - nodeActionsMessage.clearActionsList(); - for (const action in permissionList[nodeId]) { - nodeActionsMessage.addActions(action); - } - await genWritable.next(nodeActionsMessage); - } - - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }; -} - -export default vaultsPermissionsGet; diff --git a/src/client/service/vaultsShare.ts b/src/client/service/vaultsShare.ts deleted file mode 100644 index c43d6b6b2..000000000 --- a/src/client/service/vaultsShare.ts +++ /dev/null @@ -1,50 +0,0 @@ -import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; -import type { VaultName } from '../../vaults/types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; - -function vaultsShare({ - authenticate, - vaultManager, -}: { - authenticate: Authenticate; - vaultManager: VaultManager; -}) { - return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); - await vaultManager.shareVault(vaultId, nodeId); - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (e) { - callback(grpcUtils.fromError(e)); - return; - } - }; -} - -export default vaultsShare; diff --git a/src/client/service/vaultsUnshare.ts b/src/client/service/vaultsUnshare.ts deleted file mode 100644 index bdc2b1d01..000000000 --- a/src/client/service/vaultsUnshare.ts +++ /dev/null @@ -1,50 +0,0 @@ -import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; -import type { VaultName } from '../../vaults/types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as validationUtils from '../../validation/utils'; - -function vaultsUnshare({ - authenticate, - vaultManager, -}: { - authenticate: Authenticate; - vaultManager: VaultManager; -}) { - return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); - await vaultManager.unshareVault(vaultId, nodeId); - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (e) { - callback(grpcUtils.fromError(e)); - return; - } - }; -} - -export default vaultsUnshare; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index 0dafbee89..42258e4bc 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -52,12 +52,12 @@ interface IClientServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsGet extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsGet"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_IVaultsShare extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsShare"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_IVaultsUnshare extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsUnshare"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsVersion extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsVersion"; requestStream: false; @@ -436,6 +409,33 @@ interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } +interface IClientServiceService_IVaultsPermissionSet extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionSet"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionUnset extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionUnset"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionGet extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionGet"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IIdentitiesAuthenticate extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/IdentitiesAuthenticate"; requestStream: false; @@ -698,12 +698,12 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation vaultsSecretsNew: grpc.handleUnaryCall; vaultsSecretsNewDir: grpc.handleUnaryCall; vaultsSecretsStat: grpc.handleUnaryCall; - vaultsPermissionsGet: grpc.handleServerStreamingCall; - vaultsShare: grpc.handleUnaryCall; - vaultsUnshare: grpc.handleUnaryCall; vaultsVersion: grpc.handleUnaryCall; vaultsLog: grpc.handleServerStreamingCall; vaultsScan: grpc.handleServerStreamingCall; + vaultsPermissionSet: grpc.handleUnaryCall; + vaultsPermissionUnset: grpc.handleUnaryCall; + vaultsPermissionGet: grpc.handleServerStreamingCall; identitiesAuthenticate: grpc.handleServerStreamingCall; identitiesAuthenticatedGet: grpc.handleServerStreamingCall; identitiesTokenPut: grpc.handleUnaryCall; @@ -828,14 +828,6 @@ export interface IClientServiceClient { vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; @@ -843,6 +835,14 @@ export interface IClientServiceClient { vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; @@ -1013,14 +1013,6 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissionsGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; @@ -1028,6 +1020,14 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + public vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index e845b72c2..6b6e4bf1b 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -267,17 +267,6 @@ function deserialize_polykey_v1_permissions_Actions(buffer_arg) { return polykey_v1_permissions_permissions_pb.Actions.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_permissions_NodeActions(arg) { - if (!(arg instanceof polykey_v1_permissions_permissions_pb.NodeActions)) { - throw new Error('Expected argument of type polykey.v1.permissions.NodeActions'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_permissions_NodeActions(buffer_arg) { - return polykey_v1_permissions_permissions_pb.NodeActions.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_secrets_Directory(arg) { if (!(arg instanceof polykey_v1_secrets_secrets_pb.Directory)) { throw new Error('Expected argument of type polykey.v1.secrets.Directory'); @@ -410,26 +399,15 @@ function deserialize_polykey_v1_vaults_Mkdir(buffer_arg) { return polykey_v1_vaults_vaults_pb.Mkdir.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_PermSet(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermSet)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermSet'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermSet(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermSet.deserializeBinary(new Uint8Array(buffer_arg)); -} - -function serialize_polykey_v1_vaults_PermUnset(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermUnset)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermUnset'); +function serialize_polykey_v1_vaults_Permissions(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.Permissions)) { + throw new Error('Expected argument of type polykey.v1.vaults.Permissions'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_PermUnset(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermUnset.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_Permissions(buffer_arg) { + return polykey_v1_vaults_vaults_pb.Permissions.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_polykey_v1_vaults_Pull(arg) { @@ -856,39 +834,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_secrets_Stat, responseDeserialize: deserialize_polykey_v1_secrets_Stat, }, - vaultsPermissionsGet: { - path: '/polykey.v1.ClientService/VaultsPermissionsGet', - requestStream: false, - responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.Vault, - responseType: polykey_v1_permissions_permissions_pb.NodeActions, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, - responseSerialize: serialize_polykey_v1_permissions_NodeActions, - responseDeserialize: deserialize_polykey_v1_permissions_NodeActions, - }, - vaultsShare: { - path: '/polykey.v1.ClientService/VaultsShare', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.PermSet, - responseType: polykey_v1_utils_utils_pb.StatusMessage, - requestSerialize: serialize_polykey_v1_vaults_PermSet, - requestDeserialize: deserialize_polykey_v1_vaults_PermSet, - responseSerialize: serialize_polykey_v1_utils_StatusMessage, - responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, - }, - vaultsUnshare: { - path: '/polykey.v1.ClientService/VaultsUnshare', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.PermUnset, - responseType: polykey_v1_utils_utils_pb.StatusMessage, - requestSerialize: serialize_polykey_v1_vaults_PermUnset, - requestDeserialize: deserialize_polykey_v1_vaults_PermUnset, - responseSerialize: serialize_polykey_v1_utils_StatusMessage, - responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, - }, vaultsVersion: { path: '/polykey.v1.ClientService/VaultsVersion', requestStream: false, @@ -922,6 +867,39 @@ vaultsList: { responseSerialize: serialize_polykey_v1_vaults_List, responseDeserialize: deserialize_polykey_v1_vaults_List, }, + vaultsPermissionSet: { + path: '/polykey.v1.ClientService/VaultsPermissionSet', + requestStream: false, + responseStream: false, + requestType: polykey_v1_vaults_vaults_pb.Permissions, + responseType: polykey_v1_utils_utils_pb.StatusMessage, + requestSerialize: serialize_polykey_v1_vaults_Permissions, + requestDeserialize: deserialize_polykey_v1_vaults_Permissions, + responseSerialize: serialize_polykey_v1_utils_StatusMessage, + responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, + }, + vaultsPermissionUnset: { + path: '/polykey.v1.ClientService/VaultsPermissionUnset', + requestStream: false, + responseStream: false, + requestType: polykey_v1_vaults_vaults_pb.Permissions, + responseType: polykey_v1_utils_utils_pb.StatusMessage, + requestSerialize: serialize_polykey_v1_vaults_Permissions, + requestDeserialize: deserialize_polykey_v1_vaults_Permissions, + responseSerialize: serialize_polykey_v1_utils_StatusMessage, + responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, + }, + vaultsPermissionGet: { + path: '/polykey.v1.ClientService/VaultsPermissionGet', + requestStream: false, + responseStream: true, + requestType: polykey_v1_vaults_vaults_pb.Vault, + responseType: polykey_v1_vaults_vaults_pb.Permissions, + requestSerialize: serialize_polykey_v1_vaults_Vault, + requestDeserialize: deserialize_polykey_v1_vaults_Vault, + responseSerialize: serialize_polykey_v1_vaults_Permissions, + responseDeserialize: deserialize_polykey_v1_vaults_Permissions, + }, // Identities identitiesAuthenticate: { path: '/polykey.v1.ClientService/IdentitiesAuthenticate', diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 8cdea111b..63e148525 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -195,113 +195,37 @@ export namespace Stat { } } -export class PermSet extends jspb.Message { +export class Permissions extends jspb.Message { hasVault(): boolean; clearVault(): void; getVault(): Vault | undefined; - setVault(value?: Vault): PermSet; + setVault(value?: Vault): Permissions; hasNode(): boolean; clearNode(): void; getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermSet; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermSet.AsObject; - static toObject(includeInstance: boolean, msg: PermSet): PermSet.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermSet, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermSet; - static deserializeBinaryFromReader(message: PermSet, reader: jspb.BinaryReader): PermSet; -} - -export namespace PermSet { - export type AsObject = { - vault?: Vault.AsObject, - node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class PermUnset extends jspb.Message { - - hasVault(): boolean; - clearVault(): void; - getVault(): Vault | undefined; - setVault(value?: Vault): PermUnset; - - hasNode(): boolean; - clearNode(): void; - getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermUnset; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermUnset.AsObject; - static toObject(includeInstance: boolean, msg: PermUnset): PermUnset.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermUnset, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermUnset; - static deserializeBinaryFromReader(message: PermUnset, reader: jspb.BinaryReader): PermUnset; -} - -export namespace PermUnset { - export type AsObject = { - vault?: Vault.AsObject, - node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class PermGet extends jspb.Message { - - hasVault(): boolean; - clearVault(): void; - getVault(): Vault | undefined; - setVault(value?: Vault): PermGet; - - hasNode(): boolean; - clearNode(): void; - getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermGet; + setNode(value?: polykey_v1_nodes_nodes_pb.Node): Permissions; + clearVaultPermissionsList(): void; + getVaultPermissionsList(): Array; + setVaultPermissionsList(value: Array): Permissions; + addVaultPermissions(value: string, index?: number): string; serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermGet.AsObject; - static toObject(includeInstance: boolean, msg: PermGet): PermGet.AsObject; + toObject(includeInstance?: boolean): Permissions.AsObject; + static toObject(includeInstance: boolean, msg: Permissions): Permissions.AsObject; static extensions: {[key: number]: jspb.ExtensionFieldInfo}; static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermGet, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermGet; - static deserializeBinaryFromReader(message: PermGet, reader: jspb.BinaryReader): PermGet; + static serializeBinaryToWriter(message: Permissions, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): Permissions; + static deserializeBinaryFromReader(message: Permissions, reader: jspb.BinaryReader): Permissions; } -export namespace PermGet { +export namespace Permissions { export type AsObject = { vault?: Vault.AsObject, node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class Permission extends jspb.Message { - getNodeId(): string; - setNodeId(value: string): Permission; - getAction(): string; - setAction(value: string): Permission; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): Permission.AsObject; - static toObject(includeInstance: boolean, msg: Permission): Permission.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: Permission, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): Permission; - static deserializeBinaryFromReader(message: Permission, reader: jspb.BinaryReader): Permission; -} - -export namespace Permission { - export type AsObject = { - nodeId: string, - action: string, + vaultPermissionsList: Array, } } diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index 31d273126..6b793dc63 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -28,10 +28,7 @@ goog.exportSymbol('proto.polykey.v1.vaults.NodePermission', null, global); goog.exportSymbol('proto.polykey.v1.vaults.NodePermissionAllowed', null, global); goog.exportSymbol('proto.polykey.v1.vaults.PackChunk', null, global); goog.exportSymbol('proto.polykey.v1.vaults.PackRequest', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermGet', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermSet', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermUnset', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.Permission', null, global); +goog.exportSymbol('proto.polykey.v1.vaults.Permissions', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Pull', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Rename', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Stat', null, global); @@ -195,79 +192,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.PermSet = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); -}; -goog.inherits(proto.polykey.v1.vaults.PermSet, jspb.Message); -if (goog.DEBUG && !COMPILED) { - /** - * @public - * @override - */ - proto.polykey.v1.vaults.PermSet.displayName = 'proto.polykey.v1.vaults.PermSet'; -} -/** - * Generated by JsPbCodeGenerator. - * @param {Array=} opt_data Optional initial data array, typically from a - * server response, or constructed directly in Javascript. The array is used - * in place and becomes part of the constructed object. It is not cloned. - * If no data is provided, the constructed object will be empty, but still - * valid. - * @extends {jspb.Message} - * @constructor - */ -proto.polykey.v1.vaults.PermUnset = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); -}; -goog.inherits(proto.polykey.v1.vaults.PermUnset, jspb.Message); -if (goog.DEBUG && !COMPILED) { - /** - * @public - * @override - */ - proto.polykey.v1.vaults.PermUnset.displayName = 'proto.polykey.v1.vaults.PermUnset'; -} -/** - * Generated by JsPbCodeGenerator. - * @param {Array=} opt_data Optional initial data array, typically from a - * server response, or constructed directly in Javascript. The array is used - * in place and becomes part of the constructed object. It is not cloned. - * If no data is provided, the constructed object will be empty, but still - * valid. - * @extends {jspb.Message} - * @constructor - */ -proto.polykey.v1.vaults.PermGet = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); -}; -goog.inherits(proto.polykey.v1.vaults.PermGet, jspb.Message); -if (goog.DEBUG && !COMPILED) { - /** - * @public - * @override - */ - proto.polykey.v1.vaults.PermGet.displayName = 'proto.polykey.v1.vaults.PermGet'; -} -/** - * Generated by JsPbCodeGenerator. - * @param {Array=} opt_data Optional initial data array, typically from a - * server response, or constructed directly in Javascript. The array is used - * in place and becomes part of the constructed object. It is not cloned. - * If no data is provided, the constructed object will be empty, but still - * valid. - * @extends {jspb.Message} - * @constructor - */ -proto.polykey.v1.vaults.Permission = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); +proto.polykey.v1.vaults.Permissions = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.vaults.Permissions.repeatedFields_, null); }; -goog.inherits(proto.polykey.v1.vaults.Permission, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.Permissions, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.Permission.displayName = 'proto.polykey.v1.vaults.Permission'; + proto.polykey.v1.vaults.Permissions.displayName = 'proto.polykey.v1.vaults.Permissions'; } /** * Generated by JsPbCodeGenerator. @@ -1782,6 +1716,13 @@ proto.polykey.v1.vaults.Stat.prototype.setStats = function(value) { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.vaults.Permissions.repeatedFields_ = [3]; + if (jspb.Message.GENERATE_TO_OBJECT) { @@ -1797,8 +1738,8 @@ if (jspb.Message.GENERATE_TO_OBJECT) { * http://goto/soy-param-migration * @return {!Object} */ -proto.polykey.v1.vaults.PermSet.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermSet.toObject(opt_includeInstance, this); +proto.polykey.v1.vaults.Permissions.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.Permissions.toObject(opt_includeInstance, this); }; @@ -1807,14 +1748,15 @@ proto.polykey.v1.vaults.PermSet.prototype.toObject = function(opt_includeInstanc * @param {boolean|undefined} includeInstance Deprecated. Whether to include * the JSPB instance for transitional soy proto support: * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermSet} msg The msg instance to transform. + * @param {!proto.polykey.v1.vaults.Permissions} msg The msg instance to transform. * @return {!Object} * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermSet.toObject = function(includeInstance, msg) { +proto.polykey.v1.vaults.Permissions.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + vaultPermissionsList: (f = jspb.Message.getRepeatedField(msg, 3)) == null ? undefined : f }; if (includeInstance) { @@ -1828,23 +1770,23 @@ proto.polykey.v1.vaults.PermSet.toObject = function(includeInstance, msg) { /** * Deserializes binary data (in protobuf wire format). * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermSet} + * @return {!proto.polykey.v1.vaults.Permissions} */ -proto.polykey.v1.vaults.PermSet.deserializeBinary = function(bytes) { +proto.polykey.v1.vaults.Permissions.deserializeBinary = function(bytes) { var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermSet; - return proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader(msg, reader); + var msg = new proto.polykey.v1.vaults.Permissions; + return proto.polykey.v1.vaults.Permissions.deserializeBinaryFromReader(msg, reader); }; /** * Deserializes binary data (in protobuf wire format) from the * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermSet} msg The message object to deserialize into. + * @param {!proto.polykey.v1.vaults.Permissions} msg The message object to deserialize into. * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermSet} + * @return {!proto.polykey.v1.vaults.Permissions} */ -proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, reader) { +proto.polykey.v1.vaults.Permissions.deserializeBinaryFromReader = function(msg, reader) { while (reader.nextField()) { if (reader.isEndGroup()) { break; @@ -1861,6 +1803,10 @@ proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, read reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); msg.setNode(value); break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.addVaultPermissions(value); + break; default: reader.skipField(); break; @@ -1874,9 +1820,9 @@ proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, read * Serializes the message to binary data (in protobuf wire format). * @return {!Uint8Array} */ -proto.polykey.v1.vaults.PermSet.prototype.serializeBinary = function() { +proto.polykey.v1.vaults.Permissions.prototype.serializeBinary = function() { var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter(this, writer); + proto.polykey.v1.vaults.Permissions.serializeBinaryToWriter(this, writer); return writer.getResultBuffer(); }; @@ -1884,11 +1830,11 @@ proto.polykey.v1.vaults.PermSet.prototype.serializeBinary = function() { /** * Serializes the given message to binary data (in protobuf wire * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermSet} message + * @param {!proto.polykey.v1.vaults.Permissions} message * @param {!jspb.BinaryWriter} writer * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writer) { +proto.polykey.v1.vaults.Permissions.serializeBinaryToWriter = function(message, writer) { var f = undefined; f = message.getVault(); if (f != null) { @@ -1906,6 +1852,13 @@ proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writ polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter ); } + f = message.getVaultPermissionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 3, + f + ); + } }; @@ -1913,7 +1866,7 @@ proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writ * optional Vault vault = 1; * @return {?proto.polykey.v1.vaults.Vault} */ -proto.polykey.v1.vaults.PermSet.prototype.getVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.getVault = function() { return /** @type{?proto.polykey.v1.vaults.Vault} */ ( jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); }; @@ -1921,18 +1874,18 @@ proto.polykey.v1.vaults.PermSet.prototype.getVault = function() { /** * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.setVault = function(value) { +proto.polykey.v1.vaults.Permissions.prototype.setVault = function(value) { return jspb.Message.setWrapperField(this, 1, value); }; /** * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.clearVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.clearVault = function() { return this.setVault(undefined); }; @@ -1941,7 +1894,7 @@ proto.polykey.v1.vaults.PermSet.prototype.clearVault = function() { * Returns whether this field is set. * @return {boolean} */ -proto.polykey.v1.vaults.PermSet.prototype.hasVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.hasVault = function() { return jspb.Message.getField(this, 1) != null; }; @@ -1950,7 +1903,7 @@ proto.polykey.v1.vaults.PermSet.prototype.hasVault = function() { * optional polykey.v1.nodes.Node node = 2; * @return {?proto.polykey.v1.nodes.Node} */ -proto.polykey.v1.vaults.PermSet.prototype.getNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.getNode = function() { return /** @type{?proto.polykey.v1.nodes.Node} */ ( jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); }; @@ -1958,18 +1911,18 @@ proto.polykey.v1.vaults.PermSet.prototype.getNode = function() { /** * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.setNode = function(value) { +proto.polykey.v1.vaults.Permissions.prototype.setNode = function(value) { return jspb.Message.setWrapperField(this, 2, value); }; /** * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.clearNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.clearNode = function() { return this.setNode(undefined); }; @@ -1978,572 +1931,45 @@ proto.polykey.v1.vaults.PermSet.prototype.clearNode = function() { * Returns whether this field is set. * @return {boolean} */ -proto.polykey.v1.vaults.PermSet.prototype.hasNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.hasNode = function() { return jspb.Message.getField(this, 2) != null; }; - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.PermUnset.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermUnset.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermUnset} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.PermUnset.toObject = function(includeInstance, msg) { - var f, obj = { - vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermUnset} - */ -proto.polykey.v1.vaults.PermUnset.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermUnset; - return proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermUnset} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermUnset} - */ -proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = new proto.polykey.v1.vaults.Vault; - reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); - msg.setVault(value); - break; - case 2: - var value = new polykey_v1_nodes_nodes_pb.Node; - reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); - msg.setNode(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.PermUnset.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermUnset} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getVault(); - if (f != null) { - writer.writeMessage( - 1, - f, - proto.polykey.v1.vaults.Vault.serializeBinaryToWriter - ); - } - f = message.getNode(); - if (f != null) { - writer.writeMessage( - 2, - f, - polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter - ); - } -}; - - -/** - * optional Vault vault = 1; - * @return {?proto.polykey.v1.vaults.Vault} - */ -proto.polykey.v1.vaults.PermUnset.prototype.getVault = function() { - return /** @type{?proto.polykey.v1.vaults.Vault} */ ( - jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); -}; - - -/** - * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermUnset} returns this -*/ -proto.polykey.v1.vaults.PermUnset.prototype.setVault = function(value) { - return jspb.Message.setWrapperField(this, 1, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermUnset} returns this - */ -proto.polykey.v1.vaults.PermUnset.prototype.clearVault = function() { - return this.setVault(undefined); -}; - - /** - * Returns whether this field is set. - * @return {boolean} + * repeated string vault_permissions = 3; + * @return {!Array} */ -proto.polykey.v1.vaults.PermUnset.prototype.hasVault = function() { - return jspb.Message.getField(this, 1) != null; +proto.polykey.v1.vaults.Permissions.prototype.getVaultPermissionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 3)); }; /** - * optional polykey.v1.nodes.Node node = 2; - * @return {?proto.polykey.v1.nodes.Node} + * @param {!Array} value + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.getNode = function() { - return /** @type{?proto.polykey.v1.nodes.Node} */ ( - jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); -}; - - -/** - * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermUnset} returns this -*/ -proto.polykey.v1.vaults.PermUnset.prototype.setNode = function(value) { - return jspb.Message.setWrapperField(this, 2, value); +proto.polykey.v1.vaults.Permissions.prototype.setVaultPermissionsList = function(value) { + return jspb.Message.setField(this, 3, value || []); }; /** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermUnset} returns this + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.clearNode = function() { - return this.setNode(undefined); +proto.polykey.v1.vaults.Permissions.prototype.addVaultPermissions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 3, value, opt_index); }; /** - * Returns whether this field is set. - * @return {boolean} + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.hasNode = function() { - return jspb.Message.getField(this, 2) != null; -}; - - - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.PermGet.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermGet.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermGet} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.PermGet.toObject = function(includeInstance, msg) { - var f, obj = { - vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermGet} - */ -proto.polykey.v1.vaults.PermGet.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermGet; - return proto.polykey.v1.vaults.PermGet.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermGet} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermGet} - */ -proto.polykey.v1.vaults.PermGet.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = new proto.polykey.v1.vaults.Vault; - reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); - msg.setVault(value); - break; - case 2: - var value = new polykey_v1_nodes_nodes_pb.Node; - reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); - msg.setNode(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.PermGet.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermGet.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermGet} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.PermGet.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getVault(); - if (f != null) { - writer.writeMessage( - 1, - f, - proto.polykey.v1.vaults.Vault.serializeBinaryToWriter - ); - } - f = message.getNode(); - if (f != null) { - writer.writeMessage( - 2, - f, - polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter - ); - } -}; - - -/** - * optional Vault vault = 1; - * @return {?proto.polykey.v1.vaults.Vault} - */ -proto.polykey.v1.vaults.PermGet.prototype.getVault = function() { - return /** @type{?proto.polykey.v1.vaults.Vault} */ ( - jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); -}; - - -/** - * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermGet} returns this -*/ -proto.polykey.v1.vaults.PermGet.prototype.setVault = function(value) { - return jspb.Message.setWrapperField(this, 1, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermGet} returns this - */ -proto.polykey.v1.vaults.PermGet.prototype.clearVault = function() { - return this.setVault(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.PermGet.prototype.hasVault = function() { - return jspb.Message.getField(this, 1) != null; -}; - - -/** - * optional polykey.v1.nodes.Node node = 2; - * @return {?proto.polykey.v1.nodes.Node} - */ -proto.polykey.v1.vaults.PermGet.prototype.getNode = function() { - return /** @type{?proto.polykey.v1.nodes.Node} */ ( - jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); -}; - - -/** - * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermGet} returns this -*/ -proto.polykey.v1.vaults.PermGet.prototype.setNode = function(value) { - return jspb.Message.setWrapperField(this, 2, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermGet} returns this - */ -proto.polykey.v1.vaults.PermGet.prototype.clearNode = function() { - return this.setNode(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.PermGet.prototype.hasNode = function() { - return jspb.Message.getField(this, 2) != null; -}; - - - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.Permission.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.Permission.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.Permission} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Permission.toObject = function(includeInstance, msg) { - var f, obj = { - nodeId: jspb.Message.getFieldWithDefault(msg, 1, ""), - action: jspb.Message.getFieldWithDefault(msg, 2, "") - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.Permission} - */ -proto.polykey.v1.vaults.Permission.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.Permission; - return proto.polykey.v1.vaults.Permission.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.Permission} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.Permission} - */ -proto.polykey.v1.vaults.Permission.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = /** @type {string} */ (reader.readString()); - msg.setNodeId(value); - break; - case 2: - var value = /** @type {string} */ (reader.readString()); - msg.setAction(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.Permission.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.Permission.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.Permission} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Permission.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getNodeId(); - if (f.length > 0) { - writer.writeString( - 1, - f - ); - } - f = message.getAction(); - if (f.length > 0) { - writer.writeString( - 2, - f - ); - } -}; - - -/** - * optional string node_id = 1; - * @return {string} - */ -proto.polykey.v1.vaults.Permission.prototype.getNodeId = function() { - return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); -}; - - -/** - * @param {string} value - * @return {!proto.polykey.v1.vaults.Permission} returns this - */ -proto.polykey.v1.vaults.Permission.prototype.setNodeId = function(value) { - return jspb.Message.setProto3StringField(this, 1, value); -}; - - -/** - * optional string action = 2; - * @return {string} - */ -proto.polykey.v1.vaults.Permission.prototype.getAction = function() { - return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); -}; - - -/** - * @param {string} value - * @return {!proto.polykey.v1.vaults.Permission} returns this - */ -proto.polykey.v1.vaults.Permission.prototype.setAction = function(value) { - return jspb.Message.setProto3StringField(this, 2, value); +proto.polykey.v1.vaults.Permissions.prototype.clearVaultPermissionsList = function() { + return this.setVaultPermissionsList([]); }; diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index e50a8a474..57788c678 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -55,9 +55,9 @@ service ClientService { rpc VaultsSecretsNew(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNewDir(polykey.v1.secrets.Directory) returns (polykey.v1.utils.StatusMessage); rpc vaultsSecretsStat(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Stat); - rpc VaultsPermissionsGet(polykey.v1.vaults.Vault) returns (stream polykey.v1.permissions.NodeActions); - rpc VaultsShare(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); - rpc VaultsUnshare(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); + rpc VaultsPermissionGet(polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.Permissions); + rpc VaultsPermissionSet(polykey.v1.vaults.Permissions) returns (polykey.v1.utils.StatusMessage); + rpc VaultsPermissionUnset(polykey.v1.vaults.Permissions) returns (polykey.v1.utils.StatusMessage); rpc VaultsVersion(polykey.v1.vaults.Version) returns (polykey.v1.vaults.VersionResult); rpc VaultsLog(polykey.v1.vaults.Log) returns (stream polykey.v1.vaults.LogEntry); rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index 662a77d42..3ea6926af 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -46,24 +46,10 @@ message Stat { string stats = 1; } -message PermSet { +message Permissions { Vault vault = 1; polykey.v1.nodes.Node node = 2; -} - -message PermUnset { - Vault vault = 1; - polykey.v1.nodes.Node node = 2; -} - -message PermGet { - Vault vault = 1; - polykey.v1.nodes.Node node = 2; -} - -message Permission { - string node_id = 1; - string action = 2; + repeated string vault_permissions = 3; } message Version { diff --git a/src/vaults/types.ts b/src/vaults/types.ts index 12c3bc386..f4eee7644 100644 --- a/src/vaults/types.ts +++ b/src/vaults/types.ts @@ -136,7 +136,6 @@ interface FileSystemWritable extends FileSystemReadable { type VaultName = string; -// FIXME: temp placeholder type VaultActions = Partial>; export { vaultActions }; diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index c50b78815..6a5c1a974 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -8,7 +8,6 @@ import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as vaultsUtils from '@/vaults/utils'; import sysexits from '@/utils/sysexits'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import NotificationsManager from '@/notifications/NotificationsManager'; import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; @@ -803,120 +802,4 @@ describe('CLI vaults', () => { global.defaultTimeout * 2, ); }); - describe('commandPermissions', () => { - test('Should return nodeIds and their permissions', async () => { - let remoteKeynode1: PolykeyAgent | undefined; - let remoteKeynode2: PolykeyAgent | undefined; - try { - // A ridiculous amount of setup. - const vaultId1 = await polykeyAgent.vaultManager.createVault( - 'vault1' as VaultName, - ); - const vaultId2 = await polykeyAgent.vaultManager.createVault( - 'vault2' as VaultName, - ); - - remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 1'), - nodePath: path.join(dataDir, 'remoteKeynode1'), - }); - remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ - password, - logger: logger.getChild('Remote Keynode 2'), - nodePath: path.join(dataDir, 'remoteKeynode2'), - }); - - const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); - const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); - await polykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(targetNodeId1), - chain: {}, - }); - await polykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(targetNodeId2), - chain: {}, - }); - await polykeyAgent.nodeManager.setNode(targetNodeId1, { - host: remoteKeynode1.revProxy.getIngressHost(), - port: remoteKeynode1.revProxy.getIngressPort(), - }); - await polykeyAgent.nodeManager.setNode(targetNodeId2, { - host: remoteKeynode2.revProxy.getIngressHost(), - port: remoteKeynode2.revProxy.getIngressPort(), - }); - - await remoteKeynode1.nodeManager.setNode( - polykeyAgent.keyManager.getNodeId(), - { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - }, - ); - await remoteKeynode2.nodeManager.setNode( - polykeyAgent.keyManager.getNodeId(), - { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - }, - ); - await remoteKeynode1.acl.setNodePerm( - polykeyAgent.keyManager.getNodeId(), - { - gestalt: { - notify: null, - }, - vaults: {}, - }, - ); - await remoteKeynode2.acl.setNodePerm( - polykeyAgent.keyManager.getNodeId(), - { - gestalt: { - notify: null, - }, - vaults: {}, - }, - ); - - await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId1); - await polykeyAgent.vaultManager.shareVault(vaultId1, targetNodeId2); - await polykeyAgent.vaultManager.shareVault(vaultId2, targetNodeId1); - - const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); - - // Now we call and test the command - const command1 = ['vaults', 'permissions', 'vault1', '-np', dataDir]; - const result1 = await testBinUtils.pkStdio( - command1, - { PK_PASSWORD: 'password' }, - dataDir, - ); - expect(result1.exitCode).toBe(0); - expect(result1.stdout).toContain(remoteKeynode1.keyManager.getNodeId()); - expect(result1.stdout).toContain(remoteKeynode2.keyManager.getNodeId()); - expect(result1.stdout).toContain('pull'); - expect(result1.stdout).toContain('clone'); - - // And the other vault - const command2 = ['vaults', 'permissions', 'vault2', '-np', dataDir]; - const result2 = await testBinUtils.pkStdio( - command2, - { PK_PASSWORD: 'password' }, - dataDir, - ); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain(targetNodeId1); - expect(result2.stdout).not.toContain(targetNodeId2); - expect(result2.stdout).toContain('pull'); - expect(result2.stdout).toContain('clone'); - } finally { - await remoteKeynode1?.stop(); - await remoteKeynode1?.destroy(); - await remoteKeynode2?.stop(); - await remoteKeynode2?.destroy(); - } - }); - }); }); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 1cb03c41e..09f44b410 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -3,7 +3,6 @@ import type VaultManager from '@/vaults/VaultManager'; import type { VaultId, VaultName } from '@/vaults/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; import type { Stat } from 'encryptedfs'; -import type * as permissionsPB from '@/proto/js/polykey/v1/permissions/permissions_pb'; import os from 'os'; import path from 'path'; import fs from 'fs'; @@ -342,9 +341,9 @@ describe('Vaults client service', () => { }); test('should get vault permissions', async () => { const vaultsPermissionsGet = - grpcUtils.promisifyReadableStreamCall( + grpcUtils.promisifyReadableStreamCall( client, - client.vaultsPermissionsGet, + client.vaultsPermissionGet, ); let remoteKeynode1: PolykeyAgent | undefined; @@ -360,7 +359,6 @@ describe('Vaults client service', () => { logger: logger.getChild('Remote Keynode 2'), nodePath: path.join(dataDir, 'remoteKeynode2'), }); - const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); const pkAgentNodeId = pkAgent.keyManager.getNodeId(); @@ -382,14 +380,11 @@ describe('Vaults client service', () => { port: remoteKeynode2.revProxy.getIngressPort(), }); - await remoteKeynode1.nodeManager.setNode( - pkAgent.keyManager.getNodeId(), - { - host: pkAgent.revProxy.getIngressHost(), - port: pkAgent.revProxy.getIngressPort(), - }, - ); - await remoteKeynode2.nodeManager.setNode(targetNodeId2, { + await remoteKeynode1.nodeManager.setNode(pkAgentNodeId, { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + }); + await remoteKeynode2.nodeManager.setNode(pkAgentNodeId, { host: pkAgent.revProxy.getIngressHost(), port: pkAgent.revProxy.getIngressPort(), }); @@ -409,9 +404,24 @@ describe('Vaults client service', () => { const vaultId1 = await vaultManager.createVault(vaultList[0]); const vaultId2 = await vaultManager.createVault(vaultList[1]); - await vaultManager.shareVault(vaultId1, targetNodeId1); - await vaultManager.shareVault(vaultId1, targetNodeId2); - await vaultManager.shareVault(vaultId2, targetNodeId1); + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId1, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId1, 'clone'); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId1, 'pull'); + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId2, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId2, 'clone'); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId2, 'pull'); + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId1, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId2, targetNodeId1, 'clone'); + await pkAgent.acl.setVaultAction(vaultId2, targetNodeId1, 'pull'); const vaultMessage = new vaultsPB.Vault(); vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); @@ -422,7 +432,9 @@ describe('Vaults client service', () => { ); const list: Record[] = []; for await (const permission of permissionsStream) { - expect(permission.getActionsList()).toEqual(['pull', 'clone']); + const permissionsList = permission.getVaultPermissionsList(); + expect(permissionsList).toContain('pull'); + expect(permissionsList).toContain('clone'); list.push(permission.toObject()); } expect(list).toHaveLength(2); @@ -433,10 +445,12 @@ describe('Vaults client service', () => { callCredentials, ); for await (const permission of permissionStream2) { - expect(permission.getActionsList()).toEqual(['pull', 'clone']); + const permissionsList = permission.getVaultPermissionsList(); + expect(permissionsList).toContain('pull'); + expect(permissionsList).toContain('clone'); const node = permission.getNode(); const nodeId = node?.getNodeId(); - expect(nodeId).toEqual(targetNodeId1); + expect(nodeId).toEqual(nodesUtils.encodeNodeId(targetNodeId1)); } } finally { await remoteKeynode1?.stop(); diff --git a/tests/client/utils.ts b/tests/client/utils.ts index 7c55b5c2e..71d81d943 100644 --- a/tests/client/utils.ts +++ b/tests/client/utils.ts @@ -36,6 +36,7 @@ async function openTestClientServer({ notificationsManager: pkAgent.notificationsManager, discovery: pkAgent.discovery, sigchain: pkAgent.sigchain, + acl: pkAgent.acl, fwdProxy: pkAgent.fwdProxy, revProxy: pkAgent.revProxy, grpcServerClient: pkAgent.grpcServerClient, From 205ce256355e55543be15c4aad9caf9a42c19fe3 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Thu, 3 Mar 2022 13:25:43 +1100 Subject: [PATCH 08/10] Fixes #335 - prototyping committing history - prototyping committing, EOD - Linear and branching committing implemented - recovery from dirty state and expanding tests - added global garbage collection for git objects. --- src/vaults/utils.ts | 2 +- tests/vaults/VaultManager.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index 92103e879..5758f91e9 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -7,9 +7,9 @@ import type { } from './types'; import type { NodeId } from '../nodes/types'; -import { IdInternal, IdRandom } from '@matrixai/id'; import type { EncryptedFS } from 'encryptedfs'; import path from 'path'; +import { IdInternal, IdRandom } from '@matrixai/id'; import { tagLast, refs, vaultActions } from './types'; import * as nodesUtils from '../nodes/utils'; diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index ac0f5dba4..e942b013e 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -310,7 +310,7 @@ describe('VaultManager', () => { await vaultManager?.destroy(); } }); - test.skip('cannot concurrently create vaults with the same name', async () => { + test('cannot concurrently create vaults with the same name', async () => { const vaultManager = await VaultManager.createVaultManager({ vaultsPath, keyManager: dummyKeyManager, From 4f454904719da6b3fd8154383b657bf3f2946632 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Tue, 8 Mar 2022 16:36:19 +1100 Subject: [PATCH 09/10] Fixes #336 --- tests/client/rpcVaults.test.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 09f44b410..34192efde 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -248,6 +248,14 @@ describe('Vaults client service', () => { vaultVersionMessage.setVersionId('invalidOid'); const version = vaultsVersion(vaultVersionMessage, callCredentials); await expect(version).rejects.toThrow( + vaultErrors.ErrorVaultReferenceInvalid, + ); + + vaultVersionMessage.setVersionId( + '7660aa9a2fee90e875c2d19e5deefe882ca1d4d9', + ); + const version2 = vaultsVersion(vaultVersionMessage, callCredentials); + await expect(version2).rejects.toThrow( vaultErrors.ErrorVaultReferenceMissing, ); }); From 08f6c001a105204ba0abafa29a679150412cfdfb Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Wed, 9 Mar 2022 19:30:29 +1100 Subject: [PATCH 10/10] General fixes --- src/client/service/vaultsClone.ts | 5 +++-- src/client/service/vaultsPermissionGet.ts | 8 ++++---- src/client/service/vaultsPermissionSet.ts | 4 ++-- src/client/service/vaultsPermissionUnset.ts | 6 +++--- src/vaults/VaultManager.ts | 4 +--- tests/vaults/VaultManager.test.ts | 8 ++++---- 6 files changed, 17 insertions(+), 18 deletions(-) diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index c7c650d55..a35d70e7f 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -5,6 +5,7 @@ import * as grpc from '@grpc/grpc-js'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as validationUtils from '../../validation/utils'; +import * as vaultsUtils from '../../vaults/utils'; function vaultsClone({ authenticate, @@ -35,8 +36,8 @@ function vaultsClone({ // Vault id let vaultId; const vaultNameOrId = vaultMessage.getNameOrId(); - vaultId = vaultManager.getVaultId(vaultNameOrId); - vaultId = vaultId ?? validationUtils.parseVaultId(vaultId); + vaultId = vaultsUtils.decodeVaultId(vaultNameOrId); + vaultId = vaultId ?? vaultNameOrId; // Node id const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); await vaultManager.cloneVault(nodeId, vaultId); diff --git a/src/client/service/vaultsPermissionGet.ts b/src/client/service/vaultsPermissionGet.ts index cb901e27b..23780000e 100644 --- a/src/client/service/vaultsPermissionGet.ts +++ b/src/client/service/vaultsPermissionGet.ts @@ -1,12 +1,12 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type { VaultName } from '../../vaults/types'; import type * as grpc from '@grpc/grpc-js'; import type { VaultActions } from '../../vaults/types'; import type ACL from '../../acl/ACL'; import type { NodeId, NodeIdEncoded } from 'nodes/types'; import { IdInternal } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as validationUtils from '../../validation/utils'; @@ -37,7 +37,7 @@ function vaultsPermissionGet({ // Getting permissions const rawPermissions = await acl.getVaultPerm(vaultId); const permissionList: Record = {}; - // Getting the relevant information. + // Getting the relevant information for (const nodeId in rawPermissions) { permissionList[nodeId] = rawPermissions[nodeId].vaults[vaultId]; } @@ -46,7 +46,7 @@ function vaultsPermissionGet({ vaultPermissionsMessage.setVault(vaultMessage); const nodeMessage = new nodesPB.Node(); - // Constructing the message. + // Constructing the message for (const nodeIdString in permissionList) { const nodeId = IdInternal.fromString(nodeIdString); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); diff --git a/src/client/service/vaultsPermissionSet.ts b/src/client/service/vaultsPermissionSet.ts index 8faa4f710..6b4768ee8 100644 --- a/src/client/service/vaultsPermissionSet.ts +++ b/src/client/service/vaultsPermissionSet.ts @@ -1,6 +1,6 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type GestaltGraph from '../../gestalts/GestaltGraph'; import type ACL from '../../acl/ACL'; import type NotificationsManager from '../../notifications/NotificationsManager'; @@ -10,7 +10,7 @@ import * as grpc from '@grpc/grpc-js'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsPermissionSet({ diff --git a/src/client/service/vaultsPermissionUnset.ts b/src/client/service/vaultsPermissionUnset.ts index 4217a0edc..d16d81d98 100644 --- a/src/client/service/vaultsPermissionUnset.ts +++ b/src/client/service/vaultsPermissionUnset.ts @@ -1,13 +1,13 @@ import type { Authenticate } from '../types'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type GestaltGraph from '../../gestalts/GestaltGraph'; import type ACL from '../../acl/ACL'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import * as vaultsErrors from '../../vaults/errors'; import * as validationUtils from '../../validation/utils'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function vaultsPermissionUnset({ @@ -54,7 +54,7 @@ function vaultsPermissionUnset({ for (const action of actions) { await acl.unsetVaultAction(vaultId, nodeId, action); } - // We need to check if there are still shared vaults. + // We need to check if there are still shared vaults const nodePermissions = await acl.getNodePerm(nodeId); // Remove scan permissions if no more shared vaults if (nodePermissions != null) { diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 482e62498..ed65478f5 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -744,9 +744,7 @@ class VaultManager { /** * Returns all the shared vaults for a NodeId. */ - public async *handleScanVaults( - nodeId: NodeId, - ): AsyncGenerator<{ + public async *handleScanVaults(nodeId: NodeId): AsyncGenerator<{ vaultId: VaultId; vaultName: VaultName; vaultPermissions: VaultAction[]; diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index e942b013e..fe5cd97b8 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -463,7 +463,7 @@ describe('VaultManager', () => { await vaultManager?.destroy(); } }); - describe.skip('With remote agents', () => { + describe('With remote agents', () => { let allDataDir: string; let keyManager: KeyManager; let fwdProxy: ForwardProxy; @@ -1388,7 +1388,7 @@ describe('VaultManager', () => { // No permissions for vault3 // scanning vaults - const gen = vaultManager.handleScanVaults(nodeId1, acl); + const gen = vaultManager.handleScanVaults(nodeId1); const vaults: Record = {}; for await (const vault of gen) { vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; @@ -1399,14 +1399,14 @@ describe('VaultManager', () => { // Should throw due to no permission await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2, acl)) { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); // Should throw due to lack of scan permission await gestaltGraph.setGestaltActionByNode(nodeId2, 'notify'); await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2, acl)) { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { // Should throw } }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied);