Use react-query for server state managment (#2133)
* Use react-query for server state managment * Refactor using of react query
This commit is contained in:
parent
3cd0c2897a
commit
70656b7fc0
36 changed files with 438 additions and 609 deletions
177
kafka-ui-react-app/package-lock.json
generated
177
kafka-ui-react-app/package-lock.json
generated
|
@ -33,6 +33,7 @@
|
||||||
"react-dom": "^18.1.0",
|
"react-dom": "^18.1.0",
|
||||||
"react-hook-form": "7.6.9",
|
"react-hook-form": "7.6.9",
|
||||||
"react-multi-select-component": "^4.0.6",
|
"react-multi-select-component": "^4.0.6",
|
||||||
|
"react-query": "^3.39.1",
|
||||||
"react-redux": "^7.2.6",
|
"react-redux": "^7.2.6",
|
||||||
"react-router-dom": "^6.3.0",
|
"react-router-dom": "^6.3.0",
|
||||||
"redux": "^4.1.1",
|
"redux": "^4.1.1",
|
||||||
|
@ -10512,6 +10513,14 @@
|
||||||
"node": ">= 8.0.0"
|
"node": ">= 8.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/big-integer": {
|
||||||
|
"version": "1.6.51",
|
||||||
|
"resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz",
|
||||||
|
"integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/big.js": {
|
"node_modules/big.js": {
|
||||||
"version": "5.2.2",
|
"version": "5.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
||||||
|
@ -10617,6 +10626,21 @@
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/broadcast-channel": {
|
||||||
|
"version": "3.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/broadcast-channel/-/broadcast-channel-3.7.0.tgz",
|
||||||
|
"integrity": "sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg==",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime": "^7.7.2",
|
||||||
|
"detect-node": "^2.1.0",
|
||||||
|
"js-sha3": "0.8.0",
|
||||||
|
"microseconds": "0.2.0",
|
||||||
|
"nano-time": "1.0.0",
|
||||||
|
"oblivious-set": "1.0.0",
|
||||||
|
"rimraf": "3.0.2",
|
||||||
|
"unload": "2.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/browser-process-hrtime": {
|
"node_modules/browser-process-hrtime": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz",
|
||||||
|
@ -12170,8 +12194,7 @@
|
||||||
"node_modules/detect-node": {
|
"node_modules/detect-node": {
|
||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
|
||||||
"integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==",
|
"integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"node_modules/detect-port-alt": {
|
"node_modules/detect-port-alt": {
|
||||||
"version": "1.1.6",
|
"version": "1.1.6",
|
||||||
|
@ -19028,6 +19051,11 @@
|
||||||
"url": "https://github.com/chalk/supports-color?sponsor=1"
|
"url": "https://github.com/chalk/supports-color?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/js-sha3": {
|
||||||
|
"version": "0.8.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
|
||||||
|
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q=="
|
||||||
|
},
|
||||||
"node_modules/js-tokens": {
|
"node_modules/js-tokens": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||||
|
@ -19699,6 +19727,15 @@
|
||||||
"tmpl": "1.0.x"
|
"tmpl": "1.0.x"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/match-sorter": {
|
||||||
|
"version": "6.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/match-sorter/-/match-sorter-6.3.1.tgz",
|
||||||
|
"integrity": "sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw==",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime": "^7.12.5",
|
||||||
|
"remove-accents": "0.4.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/mdn-data": {
|
"node_modules/mdn-data": {
|
||||||
"version": "2.0.4",
|
"version": "2.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz",
|
||||||
|
@ -19769,6 +19806,11 @@
|
||||||
"node": ">=8.6"
|
"node": ">=8.6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/microseconds": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/microseconds/-/microseconds-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA=="
|
||||||
|
},
|
||||||
"node_modules/mime": {
|
"node_modules/mime": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||||
|
@ -19928,6 +19970,14 @@
|
||||||
"integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
|
"integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/nano-time": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/nano-time/-/nano-time-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-flnngywOoQ0lLQOTRNexn2gGSNuM9bKj9RZAWSzhQ+UJYaAFG9bac4DW9VHjUAzrOaIcajHybCTHe/bkvozQqA==",
|
||||||
|
"dependencies": {
|
||||||
|
"big-integer": "^1.6.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/nanoclone": {
|
"node_modules/nanoclone": {
|
||||||
"version": "0.2.1",
|
"version": "0.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz",
|
||||||
|
@ -21380,6 +21430,11 @@
|
||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/oblivious-set": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/oblivious-set/-/oblivious-set-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw=="
|
||||||
|
},
|
||||||
"node_modules/obuf": {
|
"node_modules/obuf": {
|
||||||
"version": "1.1.2",
|
"version": "1.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||||
|
@ -24123,6 +24178,31 @@
|
||||||
"react": "^16.8.0 || ^17"
|
"react": "^16.8.0 || ^17"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/react-query": {
|
||||||
|
"version": "3.39.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-query/-/react-query-3.39.1.tgz",
|
||||||
|
"integrity": "sha512-qYKT1bavdDiQZbngWZyPotlBVzcBjDYEJg5RQLBa++5Ix5jjfbEYJmHSZRZD+USVHUSvl/ey9Hu+QfF1QAK80A==",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime": "^7.5.5",
|
||||||
|
"broadcast-channel": "^3.4.1",
|
||||||
|
"match-sorter": "^6.0.2"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "github",
|
||||||
|
"url": "https://github.com/sponsors/tannerlinsley"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"react-dom": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"react-native": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-redux": {
|
"node_modules/react-redux": {
|
||||||
"version": "7.2.6",
|
"version": "7.2.6",
|
||||||
"resolved": "https://registry.npmjs.org/react-redux/-/react-redux-7.2.6.tgz",
|
"resolved": "https://registry.npmjs.org/react-redux/-/react-redux-7.2.6.tgz",
|
||||||
|
@ -26803,6 +26883,11 @@
|
||||||
"node": ">= 0.10"
|
"node": ">= 0.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/remove-accents": {
|
||||||
|
"version": "0.4.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz",
|
||||||
|
"integrity": "sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U="
|
||||||
|
},
|
||||||
"node_modules/renderkid": {
|
"node_modules/renderkid": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz",
|
||||||
|
@ -28980,6 +29065,15 @@
|
||||||
"node": ">= 10.0.0"
|
"node": ">= 10.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/unload": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/unload/-/unload-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA==",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/runtime": "^7.6.2",
|
||||||
|
"detect-node": "^2.0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/unpipe": {
|
"node_modules/unpipe": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||||
|
@ -37665,6 +37759,11 @@
|
||||||
"tryer": "^1.0.1"
|
"tryer": "^1.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"big-integer": {
|
||||||
|
"version": "1.6.51",
|
||||||
|
"resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz",
|
||||||
|
"integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg=="
|
||||||
|
},
|
||||||
"big.js": {
|
"big.js": {
|
||||||
"version": "5.2.2",
|
"version": "5.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
||||||
|
@ -37756,6 +37855,21 @@
|
||||||
"fill-range": "^7.0.1"
|
"fill-range": "^7.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"broadcast-channel": {
|
||||||
|
"version": "3.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/broadcast-channel/-/broadcast-channel-3.7.0.tgz",
|
||||||
|
"integrity": "sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg==",
|
||||||
|
"requires": {
|
||||||
|
"@babel/runtime": "^7.7.2",
|
||||||
|
"detect-node": "^2.1.0",
|
||||||
|
"js-sha3": "0.8.0",
|
||||||
|
"microseconds": "0.2.0",
|
||||||
|
"nano-time": "1.0.0",
|
||||||
|
"oblivious-set": "1.0.0",
|
||||||
|
"rimraf": "3.0.2",
|
||||||
|
"unload": "2.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"browser-process-hrtime": {
|
"browser-process-hrtime": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz",
|
||||||
|
@ -38900,8 +39014,7 @@
|
||||||
"detect-node": {
|
"detect-node": {
|
||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
|
||||||
"integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==",
|
"integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="
|
||||||
"dev": true
|
|
||||||
},
|
},
|
||||||
"detect-port-alt": {
|
"detect-port-alt": {
|
||||||
"version": "1.1.6",
|
"version": "1.1.6",
|
||||||
|
@ -44027,6 +44140,11 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"js-sha3": {
|
||||||
|
"version": "0.8.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz",
|
||||||
|
"integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q=="
|
||||||
|
},
|
||||||
"js-tokens": {
|
"js-tokens": {
|
||||||
"version": "4.0.0",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||||
|
@ -44554,6 +44672,15 @@
|
||||||
"tmpl": "1.0.x"
|
"tmpl": "1.0.x"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"match-sorter": {
|
||||||
|
"version": "6.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/match-sorter/-/match-sorter-6.3.1.tgz",
|
||||||
|
"integrity": "sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw==",
|
||||||
|
"requires": {
|
||||||
|
"@babel/runtime": "^7.12.5",
|
||||||
|
"remove-accents": "0.4.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"mdn-data": {
|
"mdn-data": {
|
||||||
"version": "2.0.4",
|
"version": "2.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz",
|
||||||
|
@ -44609,6 +44736,11 @@
|
||||||
"picomatch": "^2.2.3"
|
"picomatch": "^2.2.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"microseconds": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/microseconds/-/microseconds-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA=="
|
||||||
|
},
|
||||||
"mime": {
|
"mime": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
|
||||||
|
@ -44723,6 +44855,14 @@
|
||||||
"integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
|
"integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"nano-time": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/nano-time/-/nano-time-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-flnngywOoQ0lLQOTRNexn2gGSNuM9bKj9RZAWSzhQ+UJYaAFG9bac4DW9VHjUAzrOaIcajHybCTHe/bkvozQqA==",
|
||||||
|
"requires": {
|
||||||
|
"big-integer": "^1.6.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nanoclone": {
|
"nanoclone": {
|
||||||
"version": "0.2.1",
|
"version": "0.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz",
|
||||||
|
@ -45756,6 +45896,11 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"oblivious-set": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/oblivious-set/-/oblivious-set-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw=="
|
||||||
|
},
|
||||||
"obuf": {
|
"obuf": {
|
||||||
"version": "1.1.2",
|
"version": "1.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||||
|
@ -47729,6 +47874,16 @@
|
||||||
"warning": "^4.0.2"
|
"warning": "^4.0.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"react-query": {
|
||||||
|
"version": "3.39.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-query/-/react-query-3.39.1.tgz",
|
||||||
|
"integrity": "sha512-qYKT1bavdDiQZbngWZyPotlBVzcBjDYEJg5RQLBa++5Ix5jjfbEYJmHSZRZD+USVHUSvl/ey9Hu+QfF1QAK80A==",
|
||||||
|
"requires": {
|
||||||
|
"@babel/runtime": "^7.5.5",
|
||||||
|
"broadcast-channel": "^3.4.1",
|
||||||
|
"match-sorter": "^6.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
"react-redux": {
|
"react-redux": {
|
||||||
"version": "7.2.6",
|
"version": "7.2.6",
|
||||||
"resolved": "https://registry.npmjs.org/react-redux/-/react-redux-7.2.6.tgz",
|
"resolved": "https://registry.npmjs.org/react-redux/-/react-redux-7.2.6.tgz",
|
||||||
|
@ -49725,6 +49880,11 @@
|
||||||
"integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=",
|
"integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"remove-accents": {
|
||||||
|
"version": "0.4.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz",
|
||||||
|
"integrity": "sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U="
|
||||||
|
},
|
||||||
"renderkid": {
|
"renderkid": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz",
|
||||||
|
@ -51313,6 +51473,15 @@
|
||||||
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
|
"integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"unload": {
|
||||||
|
"version": "2.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/unload/-/unload-2.2.0.tgz",
|
||||||
|
"integrity": "sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA==",
|
||||||
|
"requires": {
|
||||||
|
"@babel/runtime": "^7.6.2",
|
||||||
|
"detect-node": "^2.0.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"unpipe": {
|
"unpipe": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
"react-dom": "^18.1.0",
|
"react-dom": "^18.1.0",
|
||||||
"react-hook-form": "7.6.9",
|
"react-hook-form": "7.6.9",
|
||||||
"react-multi-select-component": "^4.0.6",
|
"react-multi-select-component": "^4.0.6",
|
||||||
|
"react-query": "^3.39.1",
|
||||||
"react-redux": "^7.2.6",
|
"react-redux": "^7.2.6",
|
||||||
"react-router-dom": "^6.3.0",
|
"react-router-dom": "^6.3.0",
|
||||||
"redux": "^4.1.1",
|
"redux": "^4.1.1",
|
||||||
|
|
|
@ -1,15 +1,6 @@
|
||||||
import React, { useState } from 'react';
|
import React from 'react';
|
||||||
import useInterval from 'lib/hooks/useInterval';
|
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
import { BrokersApi, Configuration } from 'generated-sources';
|
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
|
||||||
import * as Metrics from 'components/common/Metrics';
|
import * as Metrics from 'components/common/Metrics';
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import {
|
|
||||||
fetchBrokers,
|
|
||||||
fetchClusterStats,
|
|
||||||
selectStats,
|
|
||||||
} from 'redux/reducers/brokers/brokersSlice';
|
|
||||||
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
|
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import { translateLogdir } from 'components/Brokers/utils/translateLogdir';
|
import { translateLogdir } from 'components/Brokers/utils/translateLogdir';
|
||||||
|
@ -17,9 +8,9 @@ import { SmartTable } from 'components/common/SmartTable/SmartTable';
|
||||||
import { TableColumn } from 'components/common/SmartTable/TableColumn';
|
import { TableColumn } from 'components/common/SmartTable/TableColumn';
|
||||||
import { useTableState } from 'lib/hooks/useTableState';
|
import { useTableState } from 'lib/hooks/useTableState';
|
||||||
import { ClusterBrokerParam } from 'lib/paths';
|
import { ClusterBrokerParam } from 'lib/paths';
|
||||||
|
import useClusterStats from 'lib/hooks/useClusterStats';
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
import useBrokers from 'lib/hooks/useBrokers';
|
||||||
export const brokersApiClient = new BrokersApi(apiClientConf);
|
import useBrokersLogDirs from 'lib/hooks/useBrokersLogDirs';
|
||||||
|
|
||||||
export interface BrokerLogdirState {
|
export interface BrokerLogdirState {
|
||||||
name: string;
|
name: string;
|
||||||
|
@ -29,41 +20,24 @@ export interface BrokerLogdirState {
|
||||||
}
|
}
|
||||||
|
|
||||||
const Broker: React.FC = () => {
|
const Broker: React.FC = () => {
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
const { clusterName, brokerId } = useAppParams<ClusterBrokerParam>();
|
const { clusterName, brokerId } = useAppParams<ClusterBrokerParam>();
|
||||||
|
|
||||||
const [logdirs, setLogdirs] = useState<BrokerLogdirState[]>([]);
|
const { data: clusterStats } = useClusterStats(clusterName);
|
||||||
const { diskUsage, items } = useAppSelector(selectStats);
|
const { data: brokers } = useBrokers(clusterName);
|
||||||
|
const { data: logDirs } = useBrokersLogDirs(clusterName, Number(brokerId));
|
||||||
|
|
||||||
React.useEffect(() => {
|
const preparedRows = logDirs?.map(translateLogdir) || [];
|
||||||
brokersApiClient
|
const tableState = useTableState<BrokerLogdirState, string>(preparedRows, {
|
||||||
.getAllBrokersLogdirs({
|
idSelector: ({ name }) => name,
|
||||||
clusterName,
|
|
||||||
broker: [Number(brokerId)],
|
|
||||||
})
|
|
||||||
.then((res) => {
|
|
||||||
if (res && res[0]) {
|
|
||||||
setLogdirs([translateLogdir(res[0])]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
dispatch(fetchClusterStats(clusterName));
|
|
||||||
dispatch(fetchBrokers(clusterName));
|
|
||||||
}, [clusterName, brokerId, dispatch]);
|
|
||||||
|
|
||||||
const tableState = useTableState<BrokerLogdirState, string>(logdirs, {
|
|
||||||
idSelector: (logdir) => logdir.name,
|
|
||||||
totalPages: 0,
|
totalPages: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
const brokerItem = items?.find((item) => item.id === Number(brokerId));
|
if (!clusterStats) return null;
|
||||||
const brokerDiskUsage = diskUsage?.find(
|
|
||||||
|
const brokerItem = brokers?.find(({ id }) => id === Number(brokerId));
|
||||||
|
const brokerDiskUsage = clusterStats.diskUsage?.find(
|
||||||
(item) => item.brokerId === Number(brokerId)
|
(item) => item.brokerId === Number(brokerId)
|
||||||
);
|
);
|
||||||
|
|
||||||
useInterval(() => {
|
|
||||||
fetchClusterStats(clusterName);
|
|
||||||
fetchBrokers(clusterName);
|
|
||||||
}, 5000);
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<PageHeading text={`Broker ${brokerId}`} />
|
<PageHeading text={`Broker ${brokerId}`} />
|
||||||
|
|
|
@ -3,88 +3,65 @@ import { render, WithRoute } from 'lib/testHelpers';
|
||||||
import { screen, waitFor } from '@testing-library/dom';
|
import { screen, waitFor } from '@testing-library/dom';
|
||||||
import { clusterBrokerPath } from 'lib/paths';
|
import { clusterBrokerPath } from 'lib/paths';
|
||||||
import fetchMock from 'fetch-mock';
|
import fetchMock from 'fetch-mock';
|
||||||
import { clusterStatsPayloadBroker } from 'redux/reducers/brokers/__test__/fixtures';
|
|
||||||
import { act } from '@testing-library/react';
|
import { act } from '@testing-library/react';
|
||||||
import Broker from 'components/Brokers/Broker/Broker';
|
import Broker from 'components/Brokers/Broker/Broker';
|
||||||
import { BrokersLogdirs } from 'generated-sources';
|
import {
|
||||||
|
clusterStatsPayload,
|
||||||
|
brokerLogDirsPayload,
|
||||||
|
brokersPayload,
|
||||||
|
} from 'components/Brokers/__test__/fixtures';
|
||||||
|
|
||||||
|
const clusterName = 'local';
|
||||||
|
const brokerId = 1;
|
||||||
|
const fetchStatsUrl = `/api/clusters/${clusterName}/stats`;
|
||||||
|
const fetchBrokersUrl = `/api/clusters/${clusterName}/brokers`;
|
||||||
|
const fetchLogDirsUrl = `/api/clusters/${clusterName}/brokers/logdirs`;
|
||||||
|
|
||||||
describe('Broker Component', () => {
|
describe('Broker Component', () => {
|
||||||
afterEach(() => fetchMock.reset());
|
afterEach(() => {
|
||||||
|
fetchMock.reset();
|
||||||
|
});
|
||||||
|
|
||||||
const clusterName = 'local';
|
const renderComponent = async () => {
|
||||||
const brokerId = 1;
|
const fetchStatsMock = fetchMock.get(fetchStatsUrl, clusterStatsPayload);
|
||||||
|
const fetchBrokersMock = fetchMock.get(fetchBrokersUrl, brokersPayload);
|
||||||
|
await act(() => {
|
||||||
|
render(
|
||||||
|
<WithRoute path={clusterBrokerPath()}>
|
||||||
|
<Broker />
|
||||||
|
</WithRoute>,
|
||||||
|
{
|
||||||
|
initialEntries: [clusterBrokerPath(clusterName, brokerId)],
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
await waitFor(() => expect(fetchStatsMock.called()).toBeTruthy());
|
||||||
|
expect(fetchBrokersMock.called()).toBeTruthy();
|
||||||
|
};
|
||||||
|
|
||||||
const renderComponent = () =>
|
it('shows warning when server returns empty logDirs response', async () => {
|
||||||
render(
|
const fetchLogDirsMock = fetchMock.getOnce(fetchLogDirsUrl, [], {
|
||||||
<WithRoute path={clusterBrokerPath()}>
|
query: { broker: brokerId },
|
||||||
<Broker />
|
});
|
||||||
</WithRoute>,
|
await renderComponent();
|
||||||
|
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||||
|
expect(screen.getByText('Log dir data not available')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows broker found', async () => {
|
||||||
|
const fetchLogDirsMock = fetchMock.getOnce(
|
||||||
|
fetchLogDirsUrl,
|
||||||
|
brokerLogDirsPayload,
|
||||||
{
|
{
|
||||||
initialEntries: [clusterBrokerPath(clusterName, brokerId)],
|
query: { broker: brokerId },
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
describe('Broker', () => {
|
await renderComponent();
|
||||||
const fetchBrokerMockUrl = `/api/clusters/${clusterName}/brokers/logdirs?broker=${brokerId}`;
|
await waitFor(() => expect(fetchLogDirsMock.called()).toBeTruthy());
|
||||||
|
const topicCount = screen.getByText(3);
|
||||||
const actRender = async (
|
const partitionsCount = screen.getByText(4);
|
||||||
mockData: BrokersLogdirs[] = clusterStatsPayloadBroker
|
expect(topicCount).toBeInTheDocument();
|
||||||
) => {
|
expect(partitionsCount).toBeInTheDocument();
|
||||||
const fetchBrokerMock = fetchMock.getOnce(fetchBrokerMockUrl, mockData);
|
|
||||||
|
|
||||||
await act(() => {
|
|
||||||
renderComponent();
|
|
||||||
});
|
|
||||||
await waitFor(() => expect(fetchBrokerMock.called()).toBeTruthy());
|
|
||||||
};
|
|
||||||
|
|
||||||
it('renders', async () => {
|
|
||||||
await actRender();
|
|
||||||
|
|
||||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
|
||||||
const rows = screen.getAllByRole('row');
|
|
||||||
expect(rows.length).toEqual(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show warning when broker not found', async () => {
|
|
||||||
await actRender([]);
|
|
||||||
|
|
||||||
expect(
|
|
||||||
screen.getByText('Log dir data not available')
|
|
||||||
).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show broker found', async () => {
|
|
||||||
await actRender();
|
|
||||||
const topicCount = screen.getByText(
|
|
||||||
clusterStatsPayloadBroker[0].topics?.length || 0
|
|
||||||
);
|
|
||||||
const partitionsCount = screen.getByText(
|
|
||||||
clusterStatsPayloadBroker[0].topics?.reduce(
|
|
||||||
(previousValue, currentValue) =>
|
|
||||||
previousValue + (currentValue.partitions?.length || 0),
|
|
||||||
0
|
|
||||||
) || 0
|
|
||||||
);
|
|
||||||
expect(topicCount).toBeInTheDocument();
|
|
||||||
expect(partitionsCount).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show 0s when broker has not topics', async () => {
|
|
||||||
await actRender([{ ...clusterStatsPayloadBroker[0], topics: undefined }]);
|
|
||||||
|
|
||||||
expect(screen.getAllByText(0).length).toEqual(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show - when broker has not name', async () => {
|
|
||||||
await actRender([{ ...clusterStatsPayloadBroker[0], name: undefined }]);
|
|
||||||
|
|
||||||
expect(screen.getByText('-')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show - when broker has not error', async () => {
|
|
||||||
await actRender([{ ...clusterStatsPayloadBroker[0], error: undefined }]);
|
|
||||||
expect(screen.getByText('-')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { Route, Routes } from 'react-router-dom';
|
import { Route, Routes } from 'react-router-dom';
|
||||||
import { getNonExactPath, RouteParams } from 'lib/paths';
|
import { getNonExactPath, RouteParams } from 'lib/paths';
|
||||||
import BrokersList from 'components/Brokers/List/BrokersList';
|
import BrokersList from 'components/Brokers/BrokersList/BrokersList';
|
||||||
import Broker from 'components/Brokers/Broker/Broker';
|
import Broker from 'components/Brokers/Broker/Broker';
|
||||||
import { BreadcrumbRoute } from 'components/common/Breadcrumb/Breadcrumb.route';
|
import { BreadcrumbRoute } from 'components/common/Breadcrumb/Breadcrumb.route';
|
||||||
|
|
||||||
|
|
|
@ -1,23 +1,22 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { ClusterName } from 'redux/interfaces';
|
import { ClusterName } from 'redux/interfaces';
|
||||||
import useInterval from 'lib/hooks/useInterval';
|
|
||||||
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
|
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
|
||||||
import { NavLink } from 'react-router-dom';
|
import { NavLink } from 'react-router-dom';
|
||||||
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
||||||
import { Table } from 'components/common/table/Table/Table.styled';
|
import { Table } from 'components/common/table/Table/Table.styled';
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
import * as Metrics from 'components/common/Metrics';
|
import * as Metrics from 'components/common/Metrics';
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import {
|
|
||||||
fetchBrokers,
|
|
||||||
fetchClusterStats,
|
|
||||||
selectStats,
|
|
||||||
} from 'redux/reducers/brokers/brokersSlice';
|
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
|
import useBrokers from 'lib/hooks/useBrokers';
|
||||||
|
import useClusterStats from 'lib/hooks/useClusterStats';
|
||||||
|
|
||||||
const BrokersList: React.FC = () => {
|
const BrokersList: React.FC = () => {
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
const { clusterName } = useAppParams<{ clusterName: ClusterName }>();
|
const { clusterName } = useAppParams<{ clusterName: ClusterName }>();
|
||||||
|
const { data: clusterStats } = useClusterStats(clusterName);
|
||||||
|
const { data: brokers } = useBrokers(clusterName);
|
||||||
|
|
||||||
|
if (!clusterStats) return null;
|
||||||
|
|
||||||
const {
|
const {
|
||||||
brokerCount,
|
brokerCount,
|
||||||
activeControllers,
|
activeControllers,
|
||||||
|
@ -28,21 +27,12 @@ const BrokersList: React.FC = () => {
|
||||||
underReplicatedPartitionCount,
|
underReplicatedPartitionCount,
|
||||||
diskUsage,
|
diskUsage,
|
||||||
version,
|
version,
|
||||||
items,
|
} = clusterStats;
|
||||||
} = useAppSelector(selectStats);
|
|
||||||
|
|
||||||
const replicas = (inSyncReplicasCount ?? 0) + (outOfSyncReplicasCount ?? 0);
|
const replicas = (inSyncReplicasCount ?? 0) + (outOfSyncReplicasCount ?? 0);
|
||||||
const areAllInSync = inSyncReplicasCount && replicas === inSyncReplicasCount;
|
const areAllInSync = inSyncReplicasCount && replicas === inSyncReplicasCount;
|
||||||
const partitionIsOffline = offlinePartitionCount && offlinePartitionCount > 0;
|
const partitionIsOffline = offlinePartitionCount && offlinePartitionCount > 0;
|
||||||
React.useEffect(() => {
|
|
||||||
dispatch(fetchClusterStats(clusterName));
|
|
||||||
dispatch(fetchBrokers(clusterName));
|
|
||||||
}, [clusterName, dispatch]);
|
|
||||||
|
|
||||||
useInterval(() => {
|
|
||||||
fetchClusterStats(clusterName);
|
|
||||||
fetchBrokers(clusterName);
|
|
||||||
}, 5000);
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<PageHeading text="Broker" />
|
<PageHeading text="Broker" />
|
||||||
|
@ -123,7 +113,7 @@ const BrokersList: React.FC = () => {
|
||||||
{diskUsage &&
|
{diskUsage &&
|
||||||
diskUsage.length !== 0 &&
|
diskUsage.length !== 0 &&
|
||||||
diskUsage.map(({ brokerId, segmentSize, segmentCount }) => {
|
diskUsage.map(({ brokerId, segmentSize, segmentCount }) => {
|
||||||
const brokerItem = items?.find((item) => item.id === brokerId);
|
const brokerItem = brokers?.find(({ id }) => id === brokerId);
|
||||||
return (
|
return (
|
||||||
<tr key={brokerId}>
|
<tr key={brokerId}>
|
||||||
<td>
|
<td>
|
|
@ -3,9 +3,12 @@ import { render, WithRoute } from 'lib/testHelpers';
|
||||||
import { screen, waitFor } from '@testing-library/dom';
|
import { screen, waitFor } from '@testing-library/dom';
|
||||||
import { clusterBrokersPath } from 'lib/paths';
|
import { clusterBrokersPath } from 'lib/paths';
|
||||||
import fetchMock from 'fetch-mock';
|
import fetchMock from 'fetch-mock';
|
||||||
import { clusterStatsPayload } from 'redux/reducers/brokers/__test__/fixtures';
|
|
||||||
import { act } from '@testing-library/react';
|
import { act } from '@testing-library/react';
|
||||||
import BrokersList from 'components/Brokers/List/BrokersList';
|
import BrokersList from 'components/Brokers/BrokersList/BrokersList';
|
||||||
|
import {
|
||||||
|
brokersPayload,
|
||||||
|
clusterStatsPayload,
|
||||||
|
} from 'components/Brokers/__test__/fixtures';
|
||||||
|
|
||||||
describe('BrokersList Component', () => {
|
describe('BrokersList Component', () => {
|
||||||
afterEach(() => fetchMock.reset());
|
afterEach(() => fetchMock.reset());
|
||||||
|
@ -30,17 +33,14 @@ describe('BrokersList Component', () => {
|
||||||
const fetchStatsUrl = `/api/clusters/${clusterName}/stats`;
|
const fetchStatsUrl = `/api/clusters/${clusterName}/stats`;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
fetchBrokersMock = fetchMock.getOnce(
|
fetchBrokersMock = fetchMock.get(
|
||||||
`/api/clusters/${clusterName}/brokers`,
|
`/api/clusters/${clusterName}/brokers`,
|
||||||
clusterStatsPayload
|
brokersPayload
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders', async () => {
|
it('renders', async () => {
|
||||||
const fetchStatsMock = fetchMock.getOnce(
|
const fetchStatsMock = fetchMock.get(fetchStatsUrl, clusterStatsPayload);
|
||||||
fetchStatsUrl,
|
|
||||||
clusterStatsPayload
|
|
||||||
);
|
|
||||||
await act(() => {
|
await act(() => {
|
||||||
renderComponent();
|
renderComponent();
|
||||||
});
|
});
|
|
@ -7,7 +7,7 @@ import Brokers from 'components/Brokers/Brokers';
|
||||||
const brokersList = 'brokersList';
|
const brokersList = 'brokersList';
|
||||||
const broker = 'brokers';
|
const broker = 'brokers';
|
||||||
|
|
||||||
jest.mock('components/Brokers/List/BrokersList', () => () => (
|
jest.mock('components/Brokers/BrokersList/BrokersList', () => () => (
|
||||||
<div>{brokersList}</div>
|
<div>{brokersList}</div>
|
||||||
));
|
));
|
||||||
jest.mock('components/Brokers/Broker/Broker', () => () => <div>{broker}</div>);
|
jest.mock('components/Brokers/Broker/Broker', () => () => <div>{broker}</div>);
|
||||||
|
@ -15,11 +15,10 @@ jest.mock('components/Brokers/Broker/Broker', () => () => <div>{broker}</div>);
|
||||||
describe('Brokers Component', () => {
|
describe('Brokers Component', () => {
|
||||||
const clusterName = 'clusterName';
|
const clusterName = 'clusterName';
|
||||||
const brokerId = '1';
|
const brokerId = '1';
|
||||||
const renderComponent = (path?: string) => {
|
const renderComponent = (path?: string) =>
|
||||||
return render(<Brokers />, {
|
render(<Brokers />, {
|
||||||
initialEntries: path ? [path] : undefined,
|
initialEntries: path ? [path] : undefined,
|
||||||
});
|
});
|
||||||
};
|
|
||||||
|
|
||||||
it('renders BrokersList', () => {
|
it('renders BrokersList', () => {
|
||||||
renderComponent();
|
renderComponent();
|
|
@ -52,7 +52,7 @@ export const updatedBrokersReducerState = {
|
||||||
version: '2.2.1',
|
version: '2.2.1',
|
||||||
};
|
};
|
||||||
|
|
||||||
const partitions = {
|
const partition = {
|
||||||
broker: 2,
|
broker: 2,
|
||||||
offsetLag: 0,
|
offsetLag: 0,
|
||||||
partition: 2,
|
partition: 2,
|
||||||
|
@ -60,17 +60,17 @@ const partitions = {
|
||||||
};
|
};
|
||||||
const topics = {
|
const topics = {
|
||||||
name: '_confluent-ksql-devquery_CTAS_NUMBER_OF_TESTS_59-Aggregate-Aggregate-Materialize-changelog',
|
name: '_confluent-ksql-devquery_CTAS_NUMBER_OF_TESTS_59-Aggregate-Aggregate-Materialize-changelog',
|
||||||
partitions: [partitions],
|
partitions: [partition],
|
||||||
};
|
};
|
||||||
|
|
||||||
export const clusterStatsPayloadBroker: BrokersLogdirs[] = [
|
export const brokerLogDirsPayload: BrokersLogdirs[] = [
|
||||||
{
|
{
|
||||||
error: 'NONE',
|
error: 'NONE',
|
||||||
name: '/opt/kafka/data-0/logs',
|
name: '/opt/kafka/data-0/logs',
|
||||||
topics: [
|
topics: [
|
||||||
{
|
{
|
||||||
...topics,
|
...topics,
|
||||||
partitions: [partitions, partitions, partitions],
|
partitions: [partition, partition, partition],
|
||||||
},
|
},
|
||||||
topics,
|
topics,
|
||||||
{
|
{
|
|
@ -1,4 +1,4 @@
|
||||||
import React from 'react';
|
import React, { Suspense } from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { Routes, Navigate, Route, Outlet } from 'react-router-dom';
|
import { Routes, Navigate, Route, Outlet } from 'react-router-dom';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
|
@ -22,12 +22,14 @@ import Topics from 'components/Topics/Topics';
|
||||||
import Schemas from 'components/Schemas/Schemas';
|
import Schemas from 'components/Schemas/Schemas';
|
||||||
import Connect from 'components/Connect/Connect';
|
import Connect from 'components/Connect/Connect';
|
||||||
import ClusterContext from 'components/contexts/ClusterContext';
|
import ClusterContext from 'components/contexts/ClusterContext';
|
||||||
import Brokers from 'components/Brokers/Brokers';
|
|
||||||
import ConsumersGroups from 'components/ConsumerGroups/ConsumerGroups';
|
import ConsumersGroups from 'components/ConsumerGroups/ConsumerGroups';
|
||||||
import KsqlDb from 'components/KsqlDb/KsqlDb';
|
import KsqlDb from 'components/KsqlDb/KsqlDb';
|
||||||
import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb';
|
import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb';
|
||||||
import { BreadcrumbRoute } from 'components/common/Breadcrumb/Breadcrumb.route';
|
import { BreadcrumbRoute } from 'components/common/Breadcrumb/Breadcrumb.route';
|
||||||
import { BreadcrumbProvider } from 'components/common/Breadcrumb/Breadcrumb.provider';
|
import { BreadcrumbProvider } from 'components/common/Breadcrumb/Breadcrumb.provider';
|
||||||
|
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||||
|
|
||||||
|
const Brokers = React.lazy(() => import('components/Brokers/Brokers'));
|
||||||
|
|
||||||
const Cluster: React.FC = () => {
|
const Cluster: React.FC = () => {
|
||||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||||
|
@ -63,79 +65,81 @@ const Cluster: React.FC = () => {
|
||||||
return (
|
return (
|
||||||
<BreadcrumbProvider>
|
<BreadcrumbProvider>
|
||||||
<Breadcrumb />
|
<Breadcrumb />
|
||||||
<ClusterContext.Provider value={contextValue}>
|
<Suspense fallback={<PageLoader />}>
|
||||||
<Routes>
|
<ClusterContext.Provider value={contextValue}>
|
||||||
<Route
|
<Routes>
|
||||||
path={getNonExactPath(clusterBrokerRelativePath)}
|
|
||||||
element={
|
|
||||||
<BreadcrumbRoute>
|
|
||||||
<Brokers />
|
|
||||||
</BreadcrumbRoute>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
<Route
|
|
||||||
path={getNonExactPath(clusterTopicsRelativePath)}
|
|
||||||
element={
|
|
||||||
<BreadcrumbRoute>
|
|
||||||
<Topics />
|
|
||||||
</BreadcrumbRoute>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
<Route
|
|
||||||
path={getNonExactPath(clusterConsumerGroupsRelativePath)}
|
|
||||||
element={
|
|
||||||
<BreadcrumbRoute>
|
|
||||||
<ConsumersGroups />
|
|
||||||
</BreadcrumbRoute>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
{hasSchemaRegistryConfigured && (
|
|
||||||
<Route
|
<Route
|
||||||
path={getNonExactPath(clusterSchemasRelativePath)}
|
path={getNonExactPath(clusterBrokerRelativePath)}
|
||||||
element={
|
element={
|
||||||
<BreadcrumbRoute>
|
<BreadcrumbRoute>
|
||||||
<Schemas />
|
<Brokers />
|
||||||
</BreadcrumbRoute>
|
</BreadcrumbRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
)}
|
|
||||||
{hasKafkaConnectConfigured && (
|
|
||||||
<Route
|
<Route
|
||||||
path={getNonExactPath(clusterConnectsRelativePath)}
|
path={getNonExactPath(clusterTopicsRelativePath)}
|
||||||
element={
|
element={
|
||||||
<BreadcrumbRoute>
|
<BreadcrumbRoute>
|
||||||
<Connect />
|
<Topics />
|
||||||
</BreadcrumbRoute>
|
</BreadcrumbRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
)}
|
|
||||||
{hasKafkaConnectConfigured && (
|
|
||||||
<Route
|
<Route
|
||||||
path={getNonExactPath(clusterConnectorsRelativePath)}
|
path={getNonExactPath(clusterConsumerGroupsRelativePath)}
|
||||||
element={
|
element={
|
||||||
<BreadcrumbRoute>
|
<BreadcrumbRoute>
|
||||||
<Connect />
|
<ConsumersGroups />
|
||||||
</BreadcrumbRoute>
|
</BreadcrumbRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
)}
|
{hasSchemaRegistryConfigured && (
|
||||||
{hasKsqlDbConfigured && (
|
<Route
|
||||||
|
path={getNonExactPath(clusterSchemasRelativePath)}
|
||||||
|
element={
|
||||||
|
<BreadcrumbRoute>
|
||||||
|
<Schemas />
|
||||||
|
</BreadcrumbRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{hasKafkaConnectConfigured && (
|
||||||
|
<Route
|
||||||
|
path={getNonExactPath(clusterConnectsRelativePath)}
|
||||||
|
element={
|
||||||
|
<BreadcrumbRoute>
|
||||||
|
<Connect />
|
||||||
|
</BreadcrumbRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{hasKafkaConnectConfigured && (
|
||||||
|
<Route
|
||||||
|
path={getNonExactPath(clusterConnectorsRelativePath)}
|
||||||
|
element={
|
||||||
|
<BreadcrumbRoute>
|
||||||
|
<Connect />
|
||||||
|
</BreadcrumbRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{hasKsqlDbConfigured && (
|
||||||
|
<Route
|
||||||
|
path={getNonExactPath(clusterKsqlDbRelativePath)}
|
||||||
|
element={
|
||||||
|
<BreadcrumbRoute>
|
||||||
|
<KsqlDb />
|
||||||
|
</BreadcrumbRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
<Route
|
<Route
|
||||||
path={getNonExactPath(clusterKsqlDbRelativePath)}
|
path="/"
|
||||||
element={
|
element={<Navigate to={clusterBrokerRelativePath} replace />}
|
||||||
<BreadcrumbRoute>
|
|
||||||
<KsqlDb />
|
|
||||||
</BreadcrumbRoute>
|
|
||||||
}
|
|
||||||
/>
|
/>
|
||||||
)}
|
</Routes>
|
||||||
<Route
|
<Outlet />
|
||||||
path="/"
|
</ClusterContext.Provider>
|
||||||
element={<Navigate to={clusterBrokerRelativePath} replace />}
|
</Suspense>
|
||||||
/>
|
|
||||||
</Routes>
|
|
||||||
<Outlet />
|
|
||||||
</ClusterContext.Provider>
|
|
||||||
</BreadcrumbProvider>
|
</BreadcrumbProvider>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -15,6 +15,7 @@ import {
|
||||||
clusterSchemasPath,
|
clusterSchemasPath,
|
||||||
clusterTopicsPath,
|
clusterTopicsPath,
|
||||||
} from 'lib/paths';
|
} from 'lib/paths';
|
||||||
|
import { act } from 'react-dom/test-utils';
|
||||||
|
|
||||||
const CLusterCompText = {
|
const CLusterCompText = {
|
||||||
Topics: 'Topics',
|
Topics: 'Topics',
|
||||||
|
@ -45,16 +46,17 @@ jest.mock('components/KsqlDb/KsqlDb', () => () => (
|
||||||
));
|
));
|
||||||
|
|
||||||
describe('Cluster', () => {
|
describe('Cluster', () => {
|
||||||
const renderComponent = (pathname: string) =>
|
const renderComponent = (pathname: string) => {
|
||||||
render(
|
render(
|
||||||
<WithRoute path={`${clusterPath()}/*`}>
|
<WithRoute path={`${clusterPath()}/*`}>
|
||||||
<Cluster />
|
<Cluster />
|
||||||
</WithRoute>,
|
</WithRoute>,
|
||||||
{ initialEntries: [pathname], store }
|
{ initialEntries: [pathname], store }
|
||||||
);
|
);
|
||||||
|
};
|
||||||
|
|
||||||
it('renders Brokers', () => {
|
it('renders Brokers', async () => {
|
||||||
renderComponent(clusterBrokersPath('second'));
|
await act(() => renderComponent(clusterBrokersPath('second')));
|
||||||
expect(screen.getByText(CLusterCompText.Brokers)).toBeInTheDocument();
|
expect(screen.getByText(CLusterCompText.Brokers)).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
it('renders Topics', () => {
|
it('renders Topics', () => {
|
||||||
|
|
|
@ -21,7 +21,6 @@ import {
|
||||||
fetchSchemaVersions,
|
fetchSchemaVersions,
|
||||||
getAreSchemaLatestFulfilled,
|
getAreSchemaLatestFulfilled,
|
||||||
getAreSchemaVersionsFulfilled,
|
getAreSchemaVersionsFulfilled,
|
||||||
schemasApiClient,
|
|
||||||
SCHEMAS_VERSIONS_FETCH_ACTION,
|
SCHEMAS_VERSIONS_FETCH_ACTION,
|
||||||
SCHEMA_LATEST_FETCH_ACTION,
|
SCHEMA_LATEST_FETCH_ACTION,
|
||||||
selectAllSchemaVersions,
|
selectAllSchemaVersions,
|
||||||
|
@ -32,6 +31,7 @@ import { getResponse } from 'lib/errorHandling';
|
||||||
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
|
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
|
||||||
import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled';
|
import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
|
import { schemasApiClient } from 'lib/api';
|
||||||
|
|
||||||
import LatestVersionItem from './LatestVersion/LatestVersionItem';
|
import LatestVersionItem from './LatestVersion/LatestVersionItem';
|
||||||
import SchemaVersion from './SchemaVersion/SchemaVersion';
|
import SchemaVersion from './SchemaVersion/SchemaVersion';
|
||||||
|
|
|
@ -16,7 +16,6 @@ import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import {
|
import {
|
||||||
schemaAdded,
|
schemaAdded,
|
||||||
schemasApiClient,
|
|
||||||
fetchLatestSchema,
|
fetchLatestSchema,
|
||||||
getSchemaLatest,
|
getSchemaLatest,
|
||||||
SCHEMA_LATEST_FETCH_ACTION,
|
SCHEMA_LATEST_FETCH_ACTION,
|
||||||
|
@ -27,6 +26,7 @@ import { serverErrorAlertAdded } from 'redux/reducers/alerts/alertsSlice';
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||||
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
|
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
|
||||||
|
import { schemasApiClient } from 'lib/api';
|
||||||
|
|
||||||
import * as S from './Edit.styled';
|
import * as S from './Edit.styled';
|
||||||
|
|
||||||
|
|
|
@ -8,11 +8,9 @@ import usePagination from 'lib/hooks/usePagination';
|
||||||
import useSearch from 'lib/hooks/useSearch';
|
import useSearch from 'lib/hooks/useSearch';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import { serverErrorAlertAdded } from 'redux/reducers/alerts/alertsSlice';
|
import { serverErrorAlertAdded } from 'redux/reducers/alerts/alertsSlice';
|
||||||
import {
|
import { fetchSchemas } from 'redux/reducers/schemas/schemasSlice';
|
||||||
fetchSchemas,
|
|
||||||
schemasApiClient,
|
|
||||||
} from 'redux/reducers/schemas/schemasSlice';
|
|
||||||
import { ClusterNameRoute } from 'lib/paths';
|
import { ClusterNameRoute } from 'lib/paths';
|
||||||
|
import { schemasApiClient } from 'lib/api';
|
||||||
|
|
||||||
import * as S from './GlobalSchemaSelector.styled';
|
import * as S from './GlobalSchemaSelector.styled';
|
||||||
|
|
||||||
|
|
|
@ -13,14 +13,12 @@ import Select, { SelectOption } from 'components/common/Select/Select';
|
||||||
import { Button } from 'components/common/Button/Button';
|
import { Button } from 'components/common/Button/Button';
|
||||||
import { Textarea } from 'components/common/Textbox/Textarea.styled';
|
import { Textarea } from 'components/common/Textbox/Textarea.styled';
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
import {
|
import { schemaAdded } from 'redux/reducers/schemas/schemasSlice';
|
||||||
schemaAdded,
|
|
||||||
schemasApiClient,
|
|
||||||
} from 'redux/reducers/schemas/schemasSlice';
|
|
||||||
import { useAppDispatch } from 'lib/hooks/redux';
|
import { useAppDispatch } from 'lib/hooks/redux';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import { serverErrorAlertAdded } from 'redux/reducers/alerts/alertsSlice';
|
import { serverErrorAlertAdded } from 'redux/reducers/alerts/alertsSlice';
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
|
import { schemasApiClient } from 'lib/api';
|
||||||
|
|
||||||
import * as S from './New.styled';
|
import * as S from './New.styled';
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ import {
|
||||||
RouteParamsClusterTopic,
|
RouteParamsClusterTopic,
|
||||||
} from 'lib/paths';
|
} from 'lib/paths';
|
||||||
import jsf from 'json-schema-faker';
|
import jsf from 'json-schema-faker';
|
||||||
import { messagesApiClient } from 'redux/reducers/topicMessages/topicMessagesSlice';
|
|
||||||
import {
|
import {
|
||||||
fetchTopicMessageSchema,
|
fetchTopicMessageSchema,
|
||||||
fetchTopicDetails,
|
fetchTopicDetails,
|
||||||
|
@ -25,6 +24,7 @@ import {
|
||||||
import Select, { SelectOption } from 'components/common/Select/Select';
|
import Select, { SelectOption } from 'components/common/Select/Select';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import Heading from 'components/common/heading/Heading.styled';
|
import Heading from 'components/common/heading/Heading.styled';
|
||||||
|
import { messagesApiClient } from 'lib/api';
|
||||||
|
|
||||||
import validateMessage from './validateMessage';
|
import validateMessage from './validateMessage';
|
||||||
import * as S from './SendMessage.styled';
|
import * as S from './SendMessage.styled';
|
||||||
|
|
|
@ -2,12 +2,15 @@ import React from 'react';
|
||||||
import { createRoot } from 'react-dom/client';
|
import { createRoot } from 'react-dom/client';
|
||||||
import { BrowserRouter } from 'react-router-dom';
|
import { BrowserRouter } from 'react-router-dom';
|
||||||
import { Provider } from 'react-redux';
|
import { Provider } from 'react-redux';
|
||||||
|
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||||
import * as serviceWorker from 'serviceWorker';
|
import * as serviceWorker from 'serviceWorker';
|
||||||
import App from 'components/App';
|
import App from 'components/App';
|
||||||
import { store } from 'redux/store';
|
import { store } from 'redux/store';
|
||||||
import 'theme/index.scss';
|
import 'theme/index.scss';
|
||||||
import 'lib/constants';
|
import 'lib/constants';
|
||||||
|
|
||||||
|
const queryClient = new QueryClient();
|
||||||
|
|
||||||
const container =
|
const container =
|
||||||
document.getElementById('root') || document.createElement('div');
|
document.getElementById('root') || document.createElement('div');
|
||||||
const root = createRoot(container);
|
const root = createRoot(container);
|
||||||
|
@ -15,7 +18,9 @@ const root = createRoot(container);
|
||||||
root.render(
|
root.render(
|
||||||
<Provider store={store}>
|
<Provider store={store}>
|
||||||
<BrowserRouter basename={window.basePath || '/'}>
|
<BrowserRouter basename={window.basePath || '/'}>
|
||||||
<App />
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<App />
|
||||||
|
</QueryClientProvider>
|
||||||
</BrowserRouter>
|
</BrowserRouter>
|
||||||
</Provider>
|
</Provider>
|
||||||
);
|
);
|
||||||
|
|
23
kafka-ui-react-app/src/lib/api.ts
Normal file
23
kafka-ui-react-app/src/lib/api.ts
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import {
|
||||||
|
BrokersApi,
|
||||||
|
ClustersApi,
|
||||||
|
Configuration,
|
||||||
|
ConsumerGroupsApi,
|
||||||
|
KafkaConnectApi,
|
||||||
|
KsqlApi,
|
||||||
|
MessagesApi,
|
||||||
|
SchemasApi,
|
||||||
|
TopicsApi,
|
||||||
|
} from 'generated-sources';
|
||||||
|
import { BASE_PARAMS } from 'lib/constants';
|
||||||
|
|
||||||
|
const apiClientConf = new Configuration(BASE_PARAMS);
|
||||||
|
|
||||||
|
export const brokersApiClient = new BrokersApi(apiClientConf);
|
||||||
|
export const clustersApiClient = new ClustersApi(apiClientConf);
|
||||||
|
export const kafkaConnectApiClient = new KafkaConnectApi(apiClientConf);
|
||||||
|
export const consumerGroupsApiClient = new ConsumerGroupsApi(apiClientConf);
|
||||||
|
export const ksqlDbApiClient = new KsqlApi(apiClientConf);
|
||||||
|
export const topicsApiClient = new TopicsApi(apiClientConf);
|
||||||
|
export const messagesApiClient = new MessagesApi(apiClientConf);
|
||||||
|
export const schemasApiClient = new SchemasApi(apiClientConf);
|
11
kafka-ui-react-app/src/lib/hooks/useBrokers.tsx
Normal file
11
kafka-ui-react-app/src/lib/hooks/useBrokers.tsx
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
import { brokersApiClient } from 'lib/api';
|
||||||
|
import { useQuery } from 'react-query';
|
||||||
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
|
||||||
|
export default function useBrokers(clusterName: ClusterName) {
|
||||||
|
return useQuery(
|
||||||
|
['brokers', clusterName],
|
||||||
|
() => brokersApiClient.getBrokers({ clusterName }),
|
||||||
|
{ suspense: true, refetchInterval: 5000 }
|
||||||
|
);
|
||||||
|
}
|
18
kafka-ui-react-app/src/lib/hooks/useBrokersLogDirs.tsx
Normal file
18
kafka-ui-react-app/src/lib/hooks/useBrokersLogDirs.tsx
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
import { brokersApiClient } from 'lib/api';
|
||||||
|
import { useQuery } from 'react-query';
|
||||||
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
|
||||||
|
export default function useBrokersLogDirs(
|
||||||
|
clusterName: ClusterName,
|
||||||
|
brokerId: number
|
||||||
|
) {
|
||||||
|
return useQuery(
|
||||||
|
['logDirs', clusterName, brokerId],
|
||||||
|
() =>
|
||||||
|
brokersApiClient.getAllBrokersLogdirs({
|
||||||
|
clusterName,
|
||||||
|
broker: [brokerId],
|
||||||
|
}),
|
||||||
|
{ suspense: true, refetchInterval: 5000 }
|
||||||
|
);
|
||||||
|
}
|
11
kafka-ui-react-app/src/lib/hooks/useClusterStats.tsx
Normal file
11
kafka-ui-react-app/src/lib/hooks/useClusterStats.tsx
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
import { clustersApiClient } from 'lib/api';
|
||||||
|
import { useQuery } from 'react-query';
|
||||||
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
|
||||||
|
export default function useClusterStats(clusterName: ClusterName) {
|
||||||
|
return useQuery(
|
||||||
|
['clusterStats', clusterName],
|
||||||
|
() => clustersApiClient.getClusterStats({ clusterName }),
|
||||||
|
{ suspense: true, refetchInterval: 5000 }
|
||||||
|
);
|
||||||
|
}
|
|
@ -1,25 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
|
|
||||||
type Callback = () => void;
|
|
||||||
|
|
||||||
const useInterval = (callback: Callback, delay: number) => {
|
|
||||||
const savedCallback = React.useRef<Callback>();
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
savedCallback.current = callback;
|
|
||||||
}, [callback]);
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
React.useEffect(() => {
|
|
||||||
const tick = () => {
|
|
||||||
if (savedCallback.current) savedCallback.current();
|
|
||||||
};
|
|
||||||
|
|
||||||
if (delay !== null) {
|
|
||||||
const id = setInterval(tick, delay);
|
|
||||||
return () => clearInterval(id);
|
|
||||||
}
|
|
||||||
}, [delay]);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default useInterval;
|
|
|
@ -14,6 +14,7 @@ import { RootState } from 'redux/interfaces';
|
||||||
import { configureStore } from '@reduxjs/toolkit';
|
import { configureStore } from '@reduxjs/toolkit';
|
||||||
import rootReducer from 'redux/reducers';
|
import rootReducer from 'redux/reducers';
|
||||||
import mockStoreCreator from 'redux/store/configureStore/mockStoreCreator';
|
import mockStoreCreator from 'redux/store/configureStore/mockStoreCreator';
|
||||||
|
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||||
|
|
||||||
interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
|
interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
|
||||||
preloadedState?: Partial<RootState>;
|
preloadedState?: Partial<RootState>;
|
||||||
|
@ -57,6 +58,10 @@ const customRender = (
|
||||||
...renderOptions
|
...renderOptions
|
||||||
}: CustomRenderOptions = {}
|
}: CustomRenderOptions = {}
|
||||||
) => {
|
) => {
|
||||||
|
// use new QueryClient instance for each test run to avoid issues with cache
|
||||||
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: { queries: { retry: false } },
|
||||||
|
});
|
||||||
// overrides @testing-library/react render.
|
// overrides @testing-library/react render.
|
||||||
const AllTheProviders: React.FC<PropsWithChildren<unknown>> = ({
|
const AllTheProviders: React.FC<PropsWithChildren<unknown>> = ({
|
||||||
children,
|
children,
|
||||||
|
@ -64,9 +69,11 @@ const customRender = (
|
||||||
return (
|
return (
|
||||||
<ThemeProvider theme={theme}>
|
<ThemeProvider theme={theme}>
|
||||||
<Provider store={store}>
|
<Provider store={store}>
|
||||||
<MemoryRouter initialEntries={initialEntries}>
|
<QueryClientProvider client={queryClient}>
|
||||||
{children}
|
<MemoryRouter initialEntries={initialEntries}>
|
||||||
</MemoryRouter>
|
{children}
|
||||||
|
</MemoryRouter>
|
||||||
|
</QueryClientProvider>
|
||||||
</Provider>
|
</Provider>
|
||||||
</ThemeProvider>
|
</ThemeProvider>
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,113 +0,0 @@
|
||||||
import fetchMock from 'fetch-mock-jest';
|
|
||||||
import reducer, {
|
|
||||||
initialState,
|
|
||||||
fetchBrokers,
|
|
||||||
fetchClusterStats,
|
|
||||||
} from 'redux/reducers/brokers/brokersSlice';
|
|
||||||
import mockStoreCreator from 'redux/store/configureStore/mockStoreCreator';
|
|
||||||
|
|
||||||
import {
|
|
||||||
brokersPayload,
|
|
||||||
clusterStatsPayload,
|
|
||||||
initialBrokersReducerState,
|
|
||||||
updatedBrokersReducerState,
|
|
||||||
} from './fixtures';
|
|
||||||
|
|
||||||
const store = mockStoreCreator;
|
|
||||||
const clusterName = 'test-sluster-name';
|
|
||||||
|
|
||||||
describe('Brokers slice', () => {
|
|
||||||
describe('reducer', () => {
|
|
||||||
it('returns the initial state', () => {
|
|
||||||
expect(reducer(undefined, { type: fetchBrokers.pending })).toEqual(
|
|
||||||
initialState
|
|
||||||
);
|
|
||||||
});
|
|
||||||
it('reacts on fetchBrokers.fullfiled and returns payload', () => {
|
|
||||||
expect(
|
|
||||||
reducer(initialState, {
|
|
||||||
type: fetchBrokers.fulfilled,
|
|
||||||
payload: brokersPayload,
|
|
||||||
})
|
|
||||||
).toEqual({
|
|
||||||
...initialState,
|
|
||||||
items: brokersPayload,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
it('reacts on fetchClusterStats.fullfiled and returns payload', () => {
|
|
||||||
expect(
|
|
||||||
reducer(initialBrokersReducerState, {
|
|
||||||
type: fetchClusterStats.fulfilled,
|
|
||||||
payload: clusterStatsPayload,
|
|
||||||
})
|
|
||||||
).toEqual(updatedBrokersReducerState);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('thunks', () => {
|
|
||||||
afterEach(() => {
|
|
||||||
fetchMock.restore();
|
|
||||||
store.clearActions();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('fetchBrokers', () => {
|
|
||||||
it('creates fetchBrokers.fulfilled when broker are fetched', async () => {
|
|
||||||
fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/brokers`,
|
|
||||||
brokersPayload
|
|
||||||
);
|
|
||||||
await store.dispatch(fetchBrokers(clusterName));
|
|
||||||
expect(
|
|
||||||
store.getActions().map(({ type, payload }) => ({ type, payload }))
|
|
||||||
).toEqual([
|
|
||||||
{ type: fetchBrokers.pending.type },
|
|
||||||
{
|
|
||||||
type: fetchBrokers.fulfilled.type,
|
|
||||||
payload: brokersPayload,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates fetchBrokers.rejected when fetched clusters', async () => {
|
|
||||||
fetchMock.getOnce(`/api/clusters/${clusterName}/brokers`, 422);
|
|
||||||
await store.dispatch(fetchBrokers(clusterName));
|
|
||||||
expect(
|
|
||||||
store.getActions().map(({ type, payload }) => ({ type, payload }))
|
|
||||||
).toEqual([
|
|
||||||
{ type: fetchBrokers.pending.type },
|
|
||||||
{ type: fetchBrokers.rejected.type },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('fetchClusterStats', () => {
|
|
||||||
it('creates fetchClusterStats.fulfilled when broker are fetched', async () => {
|
|
||||||
fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/stats`,
|
|
||||||
clusterStatsPayload
|
|
||||||
);
|
|
||||||
await store.dispatch(fetchClusterStats(clusterName));
|
|
||||||
expect(
|
|
||||||
store.getActions().map(({ type, payload }) => ({ type, payload }))
|
|
||||||
).toEqual([
|
|
||||||
{ type: fetchClusterStats.pending.type },
|
|
||||||
{
|
|
||||||
type: fetchClusterStats.fulfilled.type,
|
|
||||||
payload: clusterStatsPayload,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates fetchClusterStats.rejected when fetched clusters', async () => {
|
|
||||||
fetchMock.getOnce(`/api/clusters/${clusterName}/stats`, 422);
|
|
||||||
await store.dispatch(fetchClusterStats(clusterName));
|
|
||||||
expect(
|
|
||||||
store.getActions().map(({ type, payload }) => ({ type, payload }))
|
|
||||||
).toEqual([
|
|
||||||
{ type: fetchClusterStats.pending.type },
|
|
||||||
{ type: fetchClusterStats.rejected.type },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,83 +0,0 @@
|
||||||
import { store } from 'redux/store';
|
|
||||||
import * as selectors from 'redux/reducers/brokers/selectors';
|
|
||||||
import {
|
|
||||||
fetchBrokers,
|
|
||||||
fetchClusterStats,
|
|
||||||
} from 'redux/reducers/brokers/brokersSlice';
|
|
||||||
|
|
||||||
import { brokersPayload, updatedBrokersReducerState } from './fixtures';
|
|
||||||
|
|
||||||
const { dispatch, getState } = store;
|
|
||||||
|
|
||||||
describe('Brokers selectors', () => {
|
|
||||||
describe('Initial State', () => {
|
|
||||||
it('returns broker count', () => {
|
|
||||||
expect(selectors.getBrokerCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns active controllers', () => {
|
|
||||||
expect(selectors.getActiveControllers(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns online partition count', () => {
|
|
||||||
expect(selectors.getOnlinePartitionCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns offline partition count', () => {
|
|
||||||
expect(selectors.getOfflinePartitionCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns in sync replicas count', () => {
|
|
||||||
expect(selectors.getInSyncReplicasCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns out of sync replicas count', () => {
|
|
||||||
expect(selectors.getOutOfSyncReplicasCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns under replicated partition count', () => {
|
|
||||||
expect(selectors.getUnderReplicatedPartitionCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns disk usage', () => {
|
|
||||||
expect(selectors.getDiskUsage(getState())).toEqual([]);
|
|
||||||
});
|
|
||||||
it('returns version', () => {
|
|
||||||
expect(selectors.getVersion(getState())).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('state', () => {
|
|
||||||
beforeAll(() => {
|
|
||||||
dispatch({ type: fetchBrokers.fulfilled.type, payload: brokersPayload });
|
|
||||||
dispatch({
|
|
||||||
type: fetchClusterStats.fulfilled.type,
|
|
||||||
payload: updatedBrokersReducerState,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns broker count', () => {
|
|
||||||
expect(selectors.getBrokerCount(getState())).toEqual(2);
|
|
||||||
});
|
|
||||||
it('returns active controllers', () => {
|
|
||||||
expect(selectors.getActiveControllers(getState())).toEqual(1);
|
|
||||||
});
|
|
||||||
it('returns online partition count', () => {
|
|
||||||
expect(selectors.getOnlinePartitionCount(getState())).toEqual(138);
|
|
||||||
});
|
|
||||||
it('returns offline partition count', () => {
|
|
||||||
expect(selectors.getOfflinePartitionCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns in sync replicas count', () => {
|
|
||||||
expect(selectors.getInSyncReplicasCount(getState())).toEqual(239);
|
|
||||||
});
|
|
||||||
it('returns out of sync replicas count', () => {
|
|
||||||
expect(selectors.getOutOfSyncReplicasCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns under replicated partition count', () => {
|
|
||||||
expect(selectors.getUnderReplicatedPartitionCount(getState())).toEqual(0);
|
|
||||||
});
|
|
||||||
it('returns disk usage', () => {
|
|
||||||
expect(selectors.getDiskUsage(getState())).toEqual([
|
|
||||||
{ brokerId: 0, segmentSize: 334567, segmentCount: 245 },
|
|
||||||
{ brokerId: 1, segmentSize: 12345678, segmentCount: 121 },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
it('returns version', () => {
|
|
||||||
expect(selectors.getVersion(getState())).toEqual('2.2.1');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,52 +0,0 @@
|
||||||
import { BrokersApi, ClustersApi, Configuration } from 'generated-sources';
|
|
||||||
import { BrokersState, ClusterName, RootState } from 'redux/interfaces';
|
|
||||||
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
|
||||||
|
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const brokersApiClient = new BrokersApi(apiClientConf);
|
|
||||||
export const clustersApiClient = new ClustersApi(apiClientConf);
|
|
||||||
|
|
||||||
export const fetchBrokers = createAsyncThunk(
|
|
||||||
'brokers/fetchBrokers',
|
|
||||||
(clusterName: ClusterName) => brokersApiClient.getBrokers({ clusterName })
|
|
||||||
);
|
|
||||||
|
|
||||||
export const fetchClusterStats = createAsyncThunk(
|
|
||||||
'brokers/fetchClusterStats',
|
|
||||||
(clusterName: ClusterName) =>
|
|
||||||
clustersApiClient.getClusterStats({ clusterName })
|
|
||||||
);
|
|
||||||
|
|
||||||
export const initialState: BrokersState = {
|
|
||||||
items: [],
|
|
||||||
brokerCount: 0,
|
|
||||||
activeControllers: 0,
|
|
||||||
onlinePartitionCount: 0,
|
|
||||||
offlinePartitionCount: 0,
|
|
||||||
inSyncReplicasCount: 0,
|
|
||||||
outOfSyncReplicasCount: 0,
|
|
||||||
underReplicatedPartitionCount: 0,
|
|
||||||
diskUsage: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
export const brokersSlice = createSlice({
|
|
||||||
name: 'brokers',
|
|
||||||
initialState,
|
|
||||||
reducers: {},
|
|
||||||
extraReducers: (builder) => {
|
|
||||||
builder.addCase(fetchBrokers.pending, () => initialState);
|
|
||||||
builder.addCase(fetchBrokers.fulfilled, (state, { payload }) => ({
|
|
||||||
...state,
|
|
||||||
items: payload,
|
|
||||||
}));
|
|
||||||
builder.addCase(fetchClusterStats.fulfilled, (state, { payload }) => ({
|
|
||||||
...state,
|
|
||||||
...payload,
|
|
||||||
}));
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export const selectStats = (state: RootState) => state.brokers;
|
|
||||||
|
|
||||||
export default brokersSlice.reducer;
|
|
|
@ -1,43 +0,0 @@
|
||||||
import { createSelector } from '@reduxjs/toolkit';
|
|
||||||
import { RootState, BrokersState } from 'redux/interfaces';
|
|
||||||
|
|
||||||
const brokersState = ({ brokers }: RootState): BrokersState => brokers;
|
|
||||||
|
|
||||||
export const getBrokerCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ brokerCount }) => brokerCount
|
|
||||||
);
|
|
||||||
export const getActiveControllers = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ activeControllers }) => activeControllers
|
|
||||||
);
|
|
||||||
export const getOnlinePartitionCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ onlinePartitionCount }) => onlinePartitionCount
|
|
||||||
);
|
|
||||||
export const getOfflinePartitionCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ offlinePartitionCount }) => offlinePartitionCount
|
|
||||||
);
|
|
||||||
export const getInSyncReplicasCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ inSyncReplicasCount }) => inSyncReplicasCount
|
|
||||||
);
|
|
||||||
export const getOutOfSyncReplicasCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ outOfSyncReplicasCount }) => outOfSyncReplicasCount
|
|
||||||
);
|
|
||||||
export const getUnderReplicatedPartitionCount = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ underReplicatedPartitionCount }) => underReplicatedPartitionCount
|
|
||||||
);
|
|
||||||
|
|
||||||
export const getDiskUsage = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ diskUsage }) => diskUsage
|
|
||||||
);
|
|
||||||
|
|
||||||
export const getVersion = createSelector(
|
|
||||||
brokersState,
|
|
||||||
({ version }) => version
|
|
||||||
);
|
|
|
@ -3,20 +3,12 @@ import {
|
||||||
createSlice,
|
createSlice,
|
||||||
createSelector,
|
createSelector,
|
||||||
} from '@reduxjs/toolkit';
|
} from '@reduxjs/toolkit';
|
||||||
import {
|
import { Cluster, ServerStatus, ClusterFeaturesEnum } from 'generated-sources';
|
||||||
ClustersApi,
|
import { clustersApiClient } from 'lib/api';
|
||||||
Configuration,
|
import { AsyncRequestStatus } from 'lib/constants';
|
||||||
Cluster,
|
|
||||||
ServerStatus,
|
|
||||||
ClusterFeaturesEnum,
|
|
||||||
} from 'generated-sources';
|
|
||||||
import { BASE_PARAMS, AsyncRequestStatus } from 'lib/constants';
|
|
||||||
import { RootState } from 'redux/interfaces';
|
import { RootState } from 'redux/interfaces';
|
||||||
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
||||||
|
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const clustersApiClient = new ClustersApi(apiClientConf);
|
|
||||||
|
|
||||||
export const fetchClusters = createAsyncThunk(
|
export const fetchClusters = createAsyncThunk(
|
||||||
'clusters/fetchClusters',
|
'clusters/fetchClusters',
|
||||||
async () => {
|
async () => {
|
||||||
|
|
|
@ -1,18 +1,16 @@
|
||||||
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
||||||
import {
|
import {
|
||||||
Configuration,
|
|
||||||
Connect,
|
Connect,
|
||||||
Connector,
|
Connector,
|
||||||
ConnectorAction,
|
ConnectorAction,
|
||||||
ConnectorState,
|
ConnectorState,
|
||||||
ConnectorTaskStatus,
|
ConnectorTaskStatus,
|
||||||
FullConnectorInfo,
|
FullConnectorInfo,
|
||||||
KafkaConnectApi,
|
|
||||||
NewConnector,
|
NewConnector,
|
||||||
Task,
|
Task,
|
||||||
TaskId,
|
TaskId,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
import { kafkaConnectApiClient } from 'lib/api';
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import {
|
import {
|
||||||
ClusterName,
|
ClusterName,
|
||||||
|
@ -24,9 +22,6 @@ import {
|
||||||
} from 'redux/interfaces';
|
} from 'redux/interfaces';
|
||||||
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
||||||
|
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const kafkaConnectApiClient = new KafkaConnectApi(apiClientConf);
|
|
||||||
|
|
||||||
export const fetchConnects = createAsyncThunk<
|
export const fetchConnects = createAsyncThunk<
|
||||||
{ connects: Connect[] },
|
{ connects: Connect[] },
|
||||||
ClusterName
|
ClusterName
|
||||||
|
|
|
@ -6,14 +6,12 @@ import {
|
||||||
PayloadAction,
|
PayloadAction,
|
||||||
} from '@reduxjs/toolkit';
|
} from '@reduxjs/toolkit';
|
||||||
import {
|
import {
|
||||||
Configuration,
|
|
||||||
ConsumerGroupDetails,
|
ConsumerGroupDetails,
|
||||||
ConsumerGroupOrdering,
|
ConsumerGroupOrdering,
|
||||||
ConsumerGroupsApi,
|
|
||||||
ConsumerGroupsPageResponse,
|
ConsumerGroupsPageResponse,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
import { BASE_PARAMS, AsyncRequestStatus } from 'lib/constants';
|
import { AsyncRequestStatus } from 'lib/constants';
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import {
|
import {
|
||||||
ClusterName,
|
ClusterName,
|
||||||
|
@ -23,9 +21,7 @@ import {
|
||||||
} from 'redux/interfaces';
|
} from 'redux/interfaces';
|
||||||
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
||||||
import { EntityState } from '@reduxjs/toolkit/src/entities/models';
|
import { EntityState } from '@reduxjs/toolkit/src/entities/models';
|
||||||
|
import { consumerGroupsApiClient } from 'lib/api';
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const api = new ConsumerGroupsApi(apiClientConf);
|
|
||||||
|
|
||||||
export const fetchConsumerGroupsPaged = createAsyncThunk<
|
export const fetchConsumerGroupsPaged = createAsyncThunk<
|
||||||
ConsumerGroupsPageResponse,
|
ConsumerGroupsPageResponse,
|
||||||
|
@ -44,7 +40,7 @@ export const fetchConsumerGroupsPaged = createAsyncThunk<
|
||||||
{ rejectWithValue }
|
{ rejectWithValue }
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const response = await api.getConsumerGroupsPageRaw({
|
const response = await consumerGroupsApiClient.getConsumerGroupsPageRaw({
|
||||||
clusterName,
|
clusterName,
|
||||||
orderBy,
|
orderBy,
|
||||||
sortOrder,
|
sortOrder,
|
||||||
|
@ -66,7 +62,7 @@ export const fetchConsumerGroupDetails = createAsyncThunk<
|
||||||
'consumerGroups/fetchConsumerGroupDetails',
|
'consumerGroups/fetchConsumerGroupDetails',
|
||||||
async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
|
async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
|
||||||
try {
|
try {
|
||||||
return await api.getConsumerGroup({
|
return await consumerGroupsApiClient.getConsumerGroup({
|
||||||
clusterName,
|
clusterName,
|
||||||
id: consumerGroupID,
|
id: consumerGroupID,
|
||||||
});
|
});
|
||||||
|
@ -83,7 +79,7 @@ export const deleteConsumerGroup = createAsyncThunk<
|
||||||
'consumerGroups/deleteConsumerGroup',
|
'consumerGroups/deleteConsumerGroup',
|
||||||
async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
|
async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
|
||||||
try {
|
try {
|
||||||
await api.deleteConsumerGroup({
|
await consumerGroupsApiClient.deleteConsumerGroup({
|
||||||
clusterName,
|
clusterName,
|
||||||
id: consumerGroupID,
|
id: consumerGroupID,
|
||||||
});
|
});
|
||||||
|
@ -105,7 +101,7 @@ export const resetConsumerGroupOffsets = createAsyncThunk<
|
||||||
{ rejectWithValue }
|
{ rejectWithValue }
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
await api.resetConsumerGroupOffsets({
|
await consumerGroupsApiClient.resetConsumerGroupOffsets({
|
||||||
clusterName,
|
clusterName,
|
||||||
id: consumerGroupID,
|
id: consumerGroupID,
|
||||||
consumerGroupOffsetsReset: {
|
consumerGroupOffsetsReset: {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import { combineReducers } from '@reduxjs/toolkit';
|
import { combineReducers } from '@reduxjs/toolkit';
|
||||||
import clusters from 'redux/reducers/clusters/clustersSlice';
|
import clusters from 'redux/reducers/clusters/clustersSlice';
|
||||||
import loader from 'redux/reducers/loader/loaderSlice';
|
import loader from 'redux/reducers/loader/loaderSlice';
|
||||||
import brokers from 'redux/reducers/brokers/brokersSlice';
|
|
||||||
import alerts from 'redux/reducers/alerts/alertsSlice';
|
import alerts from 'redux/reducers/alerts/alertsSlice';
|
||||||
import schemas from 'redux/reducers/schemas/schemasSlice';
|
import schemas from 'redux/reducers/schemas/schemasSlice';
|
||||||
import connect from 'redux/reducers/connect/connectSlice';
|
import connect from 'redux/reducers/connect/connectSlice';
|
||||||
|
@ -16,7 +15,6 @@ export default combineReducers({
|
||||||
topics,
|
topics,
|
||||||
topicMessages,
|
topicMessages,
|
||||||
clusters,
|
clusters,
|
||||||
brokers,
|
|
||||||
consumerGroups,
|
consumerGroups,
|
||||||
schemas,
|
schemas,
|
||||||
connect,
|
connect,
|
||||||
|
|
|
@ -1,16 +1,8 @@
|
||||||
import { KsqlState } from 'redux/interfaces/ksqlDb';
|
import { KsqlState } from 'redux/interfaces/ksqlDb';
|
||||||
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
import { ExecuteKsqlRequest, Table as KsqlTable } from 'generated-sources';
|
||||||
import {
|
|
||||||
Configuration,
|
|
||||||
ExecuteKsqlRequest,
|
|
||||||
KsqlApi,
|
|
||||||
Table as KsqlTable,
|
|
||||||
} from 'generated-sources';
|
|
||||||
import { ClusterName } from 'redux/interfaces';
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
import { ksqlDbApiClient } from 'lib/api';
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const ksqlDbApiClient = new KsqlApi(apiClientConf);
|
|
||||||
|
|
||||||
export const transformKsqlResponse = (
|
export const transformKsqlResponse = (
|
||||||
rawTable: Required<KsqlTable>
|
rawTable: Required<KsqlTable>
|
||||||
|
|
|
@ -4,13 +4,9 @@ import {
|
||||||
UnknownAsyncThunkPendingAction,
|
UnknownAsyncThunkPendingAction,
|
||||||
UnknownAsyncThunkRejectedAction,
|
UnknownAsyncThunkRejectedAction,
|
||||||
} from '@reduxjs/toolkit/dist/matchers';
|
} from '@reduxjs/toolkit/dist/matchers';
|
||||||
import { ClustersApi, Configuration } from 'generated-sources';
|
import { AsyncRequestStatus } from 'lib/constants';
|
||||||
import { BASE_PARAMS, AsyncRequestStatus } from 'lib/constants';
|
|
||||||
import { LoaderSliceState } from 'redux/interfaces';
|
import { LoaderSliceState } from 'redux/interfaces';
|
||||||
|
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const clustersApiClient = new ClustersApi(apiClientConf);
|
|
||||||
|
|
||||||
export const initialState: LoaderSliceState = {};
|
export const initialState: LoaderSliceState = {};
|
||||||
|
|
||||||
export const loaderSlice = createSlice({
|
export const loaderSlice = createSlice({
|
||||||
|
|
|
@ -5,21 +5,17 @@ import {
|
||||||
createSlice,
|
createSlice,
|
||||||
} from '@reduxjs/toolkit';
|
} from '@reduxjs/toolkit';
|
||||||
import {
|
import {
|
||||||
Configuration,
|
|
||||||
SchemasApi,
|
|
||||||
SchemaSubject,
|
SchemaSubject,
|
||||||
SchemaSubjectsResponse,
|
SchemaSubjectsResponse,
|
||||||
GetSchemasRequest,
|
GetSchemasRequest,
|
||||||
GetLatestSchemaRequest,
|
GetLatestSchemaRequest,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
import { BASE_PARAMS, AsyncRequestStatus } from 'lib/constants';
|
import { schemasApiClient } from 'lib/api';
|
||||||
|
import { AsyncRequestStatus } from 'lib/constants';
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import { ClusterName, RootState } from 'redux/interfaces';
|
import { ClusterName, RootState } from 'redux/interfaces';
|
||||||
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
|
||||||
|
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const schemasApiClient = new SchemasApi(apiClientConf);
|
|
||||||
|
|
||||||
export const SCHEMA_LATEST_FETCH_ACTION = 'schemas/latest/fetch';
|
export const SCHEMA_LATEST_FETCH_ACTION = 'schemas/latest/fetch';
|
||||||
export const fetchLatestSchema = createAsyncThunk<
|
export const fetchLatestSchema = createAsyncThunk<
|
||||||
SchemaSubject,
|
SchemaSubject,
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
||||||
import { TopicMessagesState, ClusterName, TopicName } from 'redux/interfaces';
|
import { TopicMessagesState, ClusterName, TopicName } from 'redux/interfaces';
|
||||||
import { TopicMessage, Configuration, MessagesApi } from 'generated-sources';
|
import { TopicMessage } from 'generated-sources';
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
||||||
import { fetchTopicDetails } from 'redux/reducers/topics/topicsSlice';
|
import { fetchTopicDetails } from 'redux/reducers/topics/topicsSlice';
|
||||||
|
import { messagesApiClient } from 'lib/api';
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
|
||||||
export const messagesApiClient = new MessagesApi(apiClientConf);
|
|
||||||
|
|
||||||
export const clearTopicMessages = createAsyncThunk<
|
export const clearTopicMessages = createAsyncThunk<
|
||||||
undefined,
|
undefined,
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
import { v4 } from 'uuid';
|
import { v4 } from 'uuid';
|
||||||
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
|
||||||
import {
|
import {
|
||||||
Configuration,
|
|
||||||
TopicsApi,
|
|
||||||
ConsumerGroupsApi,
|
|
||||||
TopicsResponse,
|
TopicsResponse,
|
||||||
TopicDetails,
|
TopicDetails,
|
||||||
GetTopicsRequest,
|
GetTopicsRequest,
|
||||||
|
@ -18,7 +15,6 @@ import {
|
||||||
RecreateTopicRequest,
|
RecreateTopicRequest,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
TopicColumnsToSort,
|
TopicColumnsToSort,
|
||||||
MessagesApi,
|
|
||||||
GetTopicSchemaRequest,
|
GetTopicSchemaRequest,
|
||||||
TopicMessageSchema,
|
TopicMessageSchema,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
|
@ -30,15 +26,14 @@ import {
|
||||||
TopicFormDataRaw,
|
TopicFormDataRaw,
|
||||||
ClusterName,
|
ClusterName,
|
||||||
} from 'redux/interfaces';
|
} from 'redux/interfaces';
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
|
||||||
import { getResponse } from 'lib/errorHandling';
|
import { getResponse } from 'lib/errorHandling';
|
||||||
import { clearTopicMessages } from 'redux/reducers/topicMessages/topicMessagesSlice';
|
import { clearTopicMessages } from 'redux/reducers/topicMessages/topicMessagesSlice';
|
||||||
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
import { showSuccessAlert } from 'redux/reducers/alerts/alertsSlice';
|
||||||
|
import {
|
||||||
const apiClientConf = new Configuration(BASE_PARAMS);
|
consumerGroupsApiClient,
|
||||||
const topicsApiClient = new TopicsApi(apiClientConf);
|
messagesApiClient,
|
||||||
const topicConsumerGroupsApiClient = new ConsumerGroupsApi(apiClientConf);
|
topicsApiClient,
|
||||||
const messagesApiClient = new MessagesApi(apiClientConf);
|
} from 'lib/api';
|
||||||
|
|
||||||
export const fetchTopicsList = createAsyncThunk<
|
export const fetchTopicsList = createAsyncThunk<
|
||||||
TopicsResponse,
|
TopicsResponse,
|
||||||
|
@ -143,8 +138,9 @@ export const fetchTopicConsumerGroups = createAsyncThunk<
|
||||||
>('topic/fetchTopicConsumerGroups', async (payload, { rejectWithValue }) => {
|
>('topic/fetchTopicConsumerGroups', async (payload, { rejectWithValue }) => {
|
||||||
try {
|
try {
|
||||||
const { topicName } = payload;
|
const { topicName } = payload;
|
||||||
const consumerGroups =
|
const consumerGroups = await consumerGroupsApiClient.getTopicConsumerGroups(
|
||||||
await topicConsumerGroupsApiClient.getTopicConsumerGroups(payload);
|
payload
|
||||||
|
);
|
||||||
|
|
||||||
return { consumerGroups, topicName };
|
return { consumerGroups, topicName };
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
Loading…
Add table
Reference in a new issue