Merge branch 'master' of github.com:provectus/kafka-ui into ISSUE_754_acl

 Conflicts:
	kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
This commit is contained in:
iliax 2023-04-11 12:09:47 +04:00
commit a58c2055f5
182 changed files with 6467 additions and 5149 deletions

View file

@ -0,0 +1,36 @@
{
"name": "Java",
"image": "mcr.microsoft.com/devcontainers/java:0-17",
"features": {
"ghcr.io/devcontainers/features/java:1": {
"version": "none",
"installMaven": "true",
"installGradle": "false"
},
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "java -version",
"customizations": {
"vscode": {
"extensions" : [
"vscjava.vscode-java-pack",
"vscjava.vscode-maven",
"vscjava.vscode-java-debug",
"EditorConfig.EditorConfig",
"ms-azuretools.vscode-docker",
"antfu.vite",
"ms-kubernetes-tools.vscode-kubernetes-tools",
"github.vscode-pull-request-github"
]
}
}
}

View file

@ -36,7 +36,7 @@ jobs:
- name: Pull with Docker
id: pull_chrome
run: |
docker pull selenium/standalone-chrome:103.0
docker pull selenoid/vnc_chrome:103.0
- name: Set up JDK
uses: actions/setup-java@v3
with:
@ -52,6 +52,7 @@ jobs:
id: compose_app
# use the following command until #819 will be fixed
run: |
docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
- name: Run test suite
run: |
@ -78,7 +79,7 @@ jobs:
uses: Sibz/github-status-action@v1.1.6
with:
authToken: ${{secrets.GITHUB_TOKEN}}
context: "Test report"
context: "Click Details button to open Allure report"
state: "success"
sha: ${{ github.sha }}
target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}

View file

@ -15,20 +15,20 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Configure AWS credentials for Kafka-UI account
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Set the values
- name: Set up environment
id: set_env_values
run: |
cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
- name: pull docker
- name: Pull with Docker
id: pull_chrome
run: |
docker pull selenium/standalone-chrome:103.0
docker pull selenoid/vnc_chrome:103.0
- name: Set up JDK
uses: actions/setup-java@v3
with:
@ -40,12 +40,13 @@ jobs:
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
- name: compose app
- name: Compose with Docker
id: compose_app
# use the following command until #819 will be fixed
run: |
docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
- name: e2e run
- name: Run test suite
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
@ -65,7 +66,7 @@ jobs:
AWS_S3_BUCKET: 'kafkaui-allure-reports'
AWS_REGION: 'eu-central-1'
SOURCE_DIR: 'allure-history/allure-results'
- name: Post the link to allure report
- name: Deploy report to Amazon S3
if: always()
uses: Sibz/github-status-action@v1.1.6
with:

View file

@ -23,7 +23,7 @@ jobs:
- name: Pull with Docker
id: pull_chrome
run: |
docker pull selenium/standalone-chrome:103.0
docker pull selenoid/vnc_chrome:103.0
- name: Set up JDK
uses: actions/setup-java@v3
with:
@ -39,6 +39,7 @@ jobs:
id: compose_app
# use the following command until #819 will be fixed
run: |
docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
- name: Run test suite
run: |
@ -65,7 +66,7 @@ jobs:
uses: Sibz/github-status-action@v1.1.6
with:
authToken: ${{secrets.GITHUB_TOKEN}}
context: "Test report"
context: "Click Details button to open Allure report"
state: "success"
sha: ${{ github.sha }}
target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}

View file

@ -0,0 +1,333 @@
<?xml version="1.0"?>
<!DOCTYPE module PUBLIC
"-//Checkstyle//DTD Checkstyle Configuration 1.3//EN"
"https://checkstyle.org/dtds/configuration_1_3.dtd">
<!--
Checkstyle configuration that checks the Google coding conventions from Google Java Style
that can be found at https://google.github.io/styleguide/javaguide.html
Checkstyle is very configurable. Be sure to read the documentation at
http://checkstyle.org (or in your downloaded distribution).
To completely disable a check, just comment it out or delete it from the file.
To suppress certain violations please review suppression filters.
Authors: Max Vetrenko, Ruslan Diachenko, Roman Ivanov.
-->
<module name = "Checker">
<property name="charset" value="UTF-8"/>
<property name="severity" value="warning"/>
<property name="fileExtensions" value="java, properties, xml"/>
<!-- Excludes all 'module-info.java' files -->
<!-- See https://checkstyle.org/config_filefilters.html -->
<module name="BeforeExecutionExclusionFileFilter">
<property name="fileNamePattern" value="module\-info\.java$"/>
</module>
<!-- https://checkstyle.org/config_filters.html#SuppressionFilter -->
<module name="SuppressionFilter">
<property name="file" value="${org.checkstyle.google.suppressionfilter.config}"
default="checkstyle-suppressions.xml" />
<property name="optional" value="true"/>
</module>
<!-- Checks for whitespace -->
<!-- See http://checkstyle.org/config_whitespace.html -->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
</module>
<module name="LineLength">
<property name="fileExtensions" value="java"/>
<property name="max" value="120"/>
<property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
</module>
<module name="TreeWalker">
<module name="OuterTypeFilename"/>
<module name="IllegalTokenText">
<property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
<property name="format"
value="\\u00(09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/>
<property name="message"
value="Consider using special escape sequence instead of octal value or Unicode escaped value."/>
</module>
<module name="AvoidEscapedUnicodeCharacters">
<property name="allowEscapesForControlCharacters" value="true"/>
<property name="allowByTailComment" value="true"/>
<property name="allowNonPrintableEscapes" value="true"/>
</module>
<module name="AvoidStarImport"/>
<module name="OneTopLevelClass"/>
<module name="NoLineWrap">
<property name="tokens" value="PACKAGE_DEF, IMPORT, STATIC_IMPORT"/>
</module>
<module name="EmptyBlock">
<property name="option" value="TEXT"/>
<property name="tokens"
value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/>
</module>
<module name="NeedBraces">
<property name="tokens"
value="LITERAL_DO, LITERAL_ELSE, LITERAL_FOR, LITERAL_IF, LITERAL_WHILE"/>
</module>
<module name="LeftCurly">
<property name="tokens"
value="ANNOTATION_DEF, CLASS_DEF, CTOR_DEF, ENUM_CONSTANT_DEF, ENUM_DEF,
INTERFACE_DEF, LAMBDA, LITERAL_CASE, LITERAL_CATCH, LITERAL_DEFAULT,
LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF,
LITERAL_SWITCH, LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, METHOD_DEF,
OBJBLOCK, STATIC_INIT"/>
</module>
<module name="RightCurly">
<property name="id" value="RightCurlySame"/>
<property name="tokens"
value="LITERAL_TRY, LITERAL_CATCH, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE,
LITERAL_DO"/>
</module>
<module name="RightCurly">
<property name="id" value="RightCurlyAlone"/>
<property name="option" value="alone"/>
<property name="tokens"
value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, STATIC_INIT,
INSTANCE_INIT, ANNOTATION_DEF, ENUM_DEF"/>
</module>
<module name="SuppressionXpathSingleFilter">
<!-- suppresion is required till https://github.com/checkstyle/checkstyle/issues/7541 -->
<property name="id" value="RightCurlyAlone"/>
<property name="query" value="//RCURLY[parent::SLIST[count(./*)=1]
or preceding-sibling::*[last()][self::LCURLY]]"/>
</module>
<module name="WhitespaceAfter">
<property name="tokens"
value="COMMA, SEMI, TYPECAST, LITERAL_IF, LITERAL_ELSE,
LITERAL_WHILE, LITERAL_DO, LITERAL_FOR, DO_WHILE"/>
</module>
<module name="WhitespaceAround">
<property name="allowEmptyConstructors" value="true"/>
<property name="allowEmptyLambdas" value="true"/>
<property name="allowEmptyMethods" value="true"/>
<property name="allowEmptyTypes" value="true"/>
<property name="allowEmptyLoops" value="true"/>
<property name="tokens"
value="ASSIGN, BAND, BAND_ASSIGN, BOR, BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR,
BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN, DO_WHILE, EQUAL, GE, GT, LAMBDA, LAND,
LCURLY, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY,
LITERAL_FOR, LITERAL_IF, LITERAL_RETURN, LITERAL_SWITCH, LITERAL_SYNCHRONIZED,
LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS, MINUS_ASSIGN, MOD, MOD_ASSIGN,
NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION, RCURLY, SL, SLIST, SL_ASSIGN, SR,
SR_ASSIGN, STAR, STAR_ASSIGN, LITERAL_ASSERT, TYPE_EXTENSION_AND"/>
<message key="ws.notFollowed"
value="WhitespaceAround: ''{0}'' is not followed by whitespace. Empty blocks may only be represented as '{}' when not part of a multi-block statement (4.1.3)"/>
<message key="ws.notPreceded"
value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/>
</module>
<module name="OneStatementPerLine"/>
<!-- <module name="MultipleVariableDeclarations"/>-->
<module name="ArrayTypeStyle"/>
<module name="MissingSwitchDefault"/>
<module name="FallThrough"/>
<module name="UpperEll"/>
<module name="ModifierOrder"/>
<module name="EmptyLineSeparator">
<property name="tokens"
value="PACKAGE_DEF, IMPORT, STATIC_IMPORT, CLASS_DEF, INTERFACE_DEF, ENUM_DEF,
STATIC_INIT, INSTANCE_INIT, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
<property name="allowNoEmptyLineBetweenFields" value="true"/>
</module>
<module name="SeparatorWrap">
<property name="id" value="SeparatorWrapDot"/>
<property name="tokens" value="DOT"/>
<property name="option" value="nl"/>
</module>
<module name="SeparatorWrap">
<property name="id" value="SeparatorWrapComma"/>
<property name="tokens" value="COMMA"/>
<property name="option" value="EOL"/>
</module>
<module name="SeparatorWrap">
<!-- ELLIPSIS is EOL until https://github.com/google/styleguide/issues/258 -->
<property name="id" value="SeparatorWrapEllipsis"/>
<property name="tokens" value="ELLIPSIS"/>
<property name="option" value="EOL"/>
</module>
<module name="SeparatorWrap">
<!-- ARRAY_DECLARATOR is EOL until https://github.com/google/styleguide/issues/259 -->
<property name="id" value="SeparatorWrapArrayDeclarator"/>
<property name="tokens" value="ARRAY_DECLARATOR"/>
<property name="option" value="EOL"/>
</module>
<module name="SeparatorWrap">
<property name="id" value="SeparatorWrapMethodRef"/>
<property name="tokens" value="METHOD_REF"/>
<property name="option" value="nl"/>
</module>
<module name="PackageName">
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/>
<message key="name.invalidPattern"
value="Package name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="TypeName">
<property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF"/>
<message key="name.invalidPattern"
value="Type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="MemberName">
<property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
<message key="name.invalidPattern"
value="Member name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="ParameterName">
<property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
<message key="name.invalidPattern"
value="Parameter name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="LambdaParameterName">
<property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
<message key="name.invalidPattern"
value="Lambda parameter name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="CatchParameterName">
<property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
<message key="name.invalidPattern"
value="Catch parameter name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="LocalVariableName">
<property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
<message key="name.invalidPattern"
value="Local variable name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="ClassTypeParameterName">
<property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
<message key="name.invalidPattern"
value="Class type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="MethodTypeParameterName">
<property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
<message key="name.invalidPattern"
value="Method type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="InterfaceTypeParameterName">
<property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
<message key="name.invalidPattern"
value="Interface type name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="NoFinalizer"/>
<module name="GenericWhitespace">
<message key="ws.followed"
value="GenericWhitespace ''{0}'' is followed by whitespace."/>
<message key="ws.preceded"
value="GenericWhitespace ''{0}'' is preceded with whitespace."/>
<message key="ws.illegalFollow"
value="GenericWhitespace ''{0}'' should followed by whitespace."/>
<message key="ws.notPreceded"
value="GenericWhitespace ''{0}'' is not preceded with whitespace."/>
</module>
<module name="Indentation">
<property name="basicOffset" value="2"/>
<property name="braceAdjustment" value="0"/>
<property name="caseIndent" value="2"/>
<property name="throwsIndent" value="4"/>
<property name="lineWrappingIndentation" value="4"/>
<property name="arrayInitIndent" value="2"/>
</module>
<module name="AbbreviationAsWordInName">
<property name="ignoreFinal" value="false"/>
<property name="allowedAbbreviationLength" value="1"/>
<property name="tokens"
value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF, ANNOTATION_FIELD_DEF,
PARAMETER_DEF, VARIABLE_DEF, METHOD_DEF"/>
</module>
<module name="OverloadMethodsDeclarationOrder"/>
<!-- <module name="VariableDeclarationUsageDistance"/>-->
<module name="CustomImportOrder">
<property name="sortImportsInGroupAlphabetically" value="true"/>
<property name="separateLineBetweenGroups" value="true"/>
<property name="customImportOrderRules" value="STATIC###THIRD_PARTY_PACKAGE"/>
<property name="tokens" value="IMPORT, STATIC_IMPORT, PACKAGE_DEF"/>
</module>
<module name="MethodParamPad">
<property name="tokens"
value="CTOR_DEF, LITERAL_NEW, METHOD_CALL, METHOD_DEF,
SUPER_CTOR_CALL, ENUM_CONSTANT_DEF"/>
</module>
<module name="NoWhitespaceBefore">
<property name="tokens"
value="COMMA, SEMI, POST_INC, POST_DEC, DOT, ELLIPSIS,
LABELED_STAT, METHOD_REF"/>
<property name="allowLineBreaks" value="true"/>
</module>
<module name="ParenPad">
<property name="tokens"
value="ANNOTATION, ANNOTATION_FIELD_DEF, CTOR_CALL, CTOR_DEF, DOT, ENUM_CONSTANT_DEF,
EXPR, LITERAL_CATCH, LITERAL_DO, LITERAL_FOR, LITERAL_IF, LITERAL_NEW,
LITERAL_SWITCH, LITERAL_SYNCHRONIZED, LITERAL_WHILE, METHOD_CALL,
METHOD_DEF, QUESTION, RESOURCE_SPECIFICATION, SUPER_CTOR_CALL, LAMBDA"/>
</module>
<module name="OperatorWrap">
<property name="option" value="NL"/>
<property name="tokens"
value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR,
LT, MINUS, MOD, NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR, METHOD_REF "/>
</module>
<module name="AnnotationLocation">
<property name="id" value="AnnotationLocationMostCases"/>
<property name="tokens"
value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF"/>
</module>
<module name="AnnotationLocation">
<property name="id" value="AnnotationLocationVariables"/>
<property name="tokens" value="VARIABLE_DEF"/>
<property name="allowSamelineMultipleAnnotations" value="true"/>
</module>
<module name="NonEmptyAtclauseDescription"/>
<module name="InvalidJavadocPosition"/>
<module name="JavadocTagContinuationIndentation"/>
<module name="SummaryJavadoc">
<property name="forbiddenSummaryFragments"
value="^@return the *|^This method returns |^A [{]@code [a-zA-Z0-9]+[}]( is a )"/>
</module>
<module name="JavadocParagraph"/>
<module name="AtclauseOrder">
<property name="tagOrder" value="@param, @return, @throws, @deprecated"/>
<property name="target"
value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
</module>
<module name="JavadocMethod">
<property name="accessModifiers" value="public"/>
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>
<property name="allowedAnnotations" value="Override, Test"/>
<property name="tokens" value="METHOD_DEF, CTOR_DEF, ANNOTATION_FIELD_DEF"/>
</module>
<!-- <module name="MissingJavadocMethod">-->
<!-- <property name="scope" value="public"/>-->
<!-- <property name="minLineCount" value="2"/>-->
<!-- <property name="allowedAnnotations" value="Override, Test"/>-->
<!-- <property name="tokens" value="METHOD_DEF, CTOR_DEF, ANNOTATION_FIELD_DEF"/>-->
<!-- </module>-->
<module name="MethodName">
<property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/>
<message key="name.invalidPattern"
value="Method name ''{0}'' must match pattern ''{1}''."/>
</module>
<module name="SingleLineJavadoc">
<property name="ignoreInlineTags" value="false"/>
</module>
<module name="EmptyCatchBlock">
<property name="exceptionVariableName" value="ignored"/>
</module>
<module name="CommentsIndentation">
<property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>
</module>
<!-- https://checkstyle.org/config_filters.html#SuppressionXpathFilter -->
<module name="SuppressionXpathFilter">
<property name="file" value="${org.checkstyle.google.suppressionxpathfilter.config}"
default="checkstyle-xpath-suppressions.xml" />
<property name="optional" value="true"/>
</module>
</module>
</module>

View file

@ -318,7 +318,7 @@
<property name="ignoreInlineTags" value="false"/>
</module>
<module name="EmptyCatchBlock">
<property name="exceptionVariableName" value="expected"/>
<property name="exceptionVariableName" value="ignored"/>
</module>
<module name="CommentsIndentation">
<property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>

View file

@ -1,4 +1,5 @@
FROM azul/zulu-openjdk-alpine:17-jre
#FROM azul/zulu-openjdk-alpine:17-jre-headless
FROM azul/zulu-openjdk-alpine@sha256:a36679ac0d28cb835e2a8c00e1e0d95509c6c51c5081c7782b85edb1f37a771a
RUN apk add --no-cache gcompat # need to make snappy codec work
RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui

View file

@ -6,7 +6,13 @@ import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.ApiClient;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.connect.model.Connector;
import com.provectus.kafka.ui.connect.model.ConnectorPlugin;
import com.provectus.kafka.ui.connect.model.ConnectorPluginConfigValidationResponse;
import com.provectus.kafka.ui.connect.model.ConnectorStatus;
import com.provectus.kafka.ui.connect.model.ConnectorTask;
import com.provectus.kafka.ui.connect.model.ConnectorTopics;
import com.provectus.kafka.ui.connect.model.NewConnector;
import com.provectus.kafka.ui.connect.model.TaskStatus;
import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.util.WebClientConfigurator;
@ -15,11 +21,7 @@ import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.util.MultiValueMap;
import org.springframework.http.ResponseEntity;
import org.springframework.util.unit.DataSize;
import org.springframework.web.client.RestClientException;
import org.springframework.web.reactive.function.client.WebClient;
@ -79,6 +81,176 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
);
}
@Override
public Mono<ResponseEntity<Connector>> createConnectorWithHttpInfo(NewConnector newConnector)
throws WebClientResponseException {
return withRetryOnConflict(super.createConnectorWithHttpInfo(newConnector));
}
@Override
public Mono<Void> deleteConnector(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.deleteConnector(connectorName));
}
@Override
public Mono<ResponseEntity<Void>> deleteConnectorWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.deleteConnectorWithHttpInfo(connectorName));
}
@Override
public Mono<Connector> getConnector(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.getConnector(connectorName));
}
@Override
public Mono<ResponseEntity<Connector>> getConnectorWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorWithHttpInfo(connectorName));
}
@Override
public Mono<Map<String, Object>> getConnectorConfig(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorConfig(connectorName));
}
@Override
public Mono<ResponseEntity<Map<String, Object>>> getConnectorConfigWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorConfigWithHttpInfo(connectorName));
}
@Override
public Flux<ConnectorPlugin> getConnectorPlugins() throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorPlugins());
}
@Override
public Mono<ResponseEntity<List<ConnectorPlugin>>> getConnectorPluginsWithHttpInfo()
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorPluginsWithHttpInfo());
}
@Override
public Mono<ConnectorStatus> getConnectorStatus(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorStatus(connectorName));
}
@Override
public Mono<ResponseEntity<ConnectorStatus>> getConnectorStatusWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorStatusWithHttpInfo(connectorName));
}
@Override
public Mono<TaskStatus> getConnectorTaskStatus(String connectorName, Integer taskId)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTaskStatus(connectorName, taskId));
}
@Override
public Mono<ResponseEntity<TaskStatus>> getConnectorTaskStatusWithHttpInfo(String connectorName, Integer taskId)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTaskStatusWithHttpInfo(connectorName, taskId));
}
@Override
public Flux<ConnectorTask> getConnectorTasks(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTasks(connectorName));
}
@Override
public Mono<ResponseEntity<List<ConnectorTask>>> getConnectorTasksWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTasksWithHttpInfo(connectorName));
}
@Override
public Mono<Map<String, ConnectorTopics>> getConnectorTopics(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTopics(connectorName));
}
@Override
public Mono<ResponseEntity<Map<String, ConnectorTopics>>> getConnectorTopicsWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorTopicsWithHttpInfo(connectorName));
}
@Override
public Flux<String> getConnectors(String search) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectors(search));
}
@Override
public Mono<ResponseEntity<List<String>>> getConnectorsWithHttpInfo(String search) throws WebClientResponseException {
return withRetryOnConflict(super.getConnectorsWithHttpInfo(search));
}
@Override
public Mono<Void> pauseConnector(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.pauseConnector(connectorName));
}
@Override
public Mono<ResponseEntity<Void>> pauseConnectorWithHttpInfo(String connectorName) throws WebClientResponseException {
return withRetryOnConflict(super.pauseConnectorWithHttpInfo(connectorName));
}
@Override
public Mono<Void> restartConnector(String connectorName, Boolean includeTasks, Boolean onlyFailed)
throws WebClientResponseException {
return withRetryOnConflict(super.restartConnector(connectorName, includeTasks, onlyFailed));
}
@Override
public Mono<ResponseEntity<Void>> restartConnectorWithHttpInfo(String connectorName, Boolean includeTasks,
Boolean onlyFailed) throws WebClientResponseException {
return withRetryOnConflict(super.restartConnectorWithHttpInfo(connectorName, includeTasks, onlyFailed));
}
@Override
public Mono<Void> restartConnectorTask(String connectorName, Integer taskId) throws WebClientResponseException {
return withRetryOnConflict(super.restartConnectorTask(connectorName, taskId));
}
@Override
public Mono<ResponseEntity<Void>> restartConnectorTaskWithHttpInfo(String connectorName, Integer taskId)
throws WebClientResponseException {
return withRetryOnConflict(super.restartConnectorTaskWithHttpInfo(connectorName, taskId));
}
@Override
public Mono<Void> resumeConnector(String connectorName) throws WebClientResponseException {
return super.resumeConnector(connectorName);
}
@Override
public Mono<ResponseEntity<Void>> resumeConnectorWithHttpInfo(String connectorName)
throws WebClientResponseException {
return withRetryOnConflict(super.resumeConnectorWithHttpInfo(connectorName));
}
@Override
public Mono<ResponseEntity<Connector>> setConnectorConfigWithHttpInfo(String connectorName,
Map<String, Object> requestBody)
throws WebClientResponseException {
return withRetryOnConflict(super.setConnectorConfigWithHttpInfo(connectorName, requestBody));
}
@Override
public Mono<ConnectorPluginConfigValidationResponse> validateConnectorPluginConfig(String pluginName,
Map<String, Object> requestBody)
throws WebClientResponseException {
return withRetryOnConflict(super.validateConnectorPluginConfig(pluginName, requestBody));
}
@Override
public Mono<ResponseEntity<ConnectorPluginConfigValidationResponse>> validateConnectorPluginConfigWithHttpInfo(
String pluginName, Map<String, Object> requestBody) throws WebClientResponseException {
return withRetryOnConflict(super.validateConnectorPluginConfigWithHttpInfo(pluginName, requestBody));
}
private static class RetryingApiClient extends ApiClient {
public RetryingApiClient(ConnectCluster config,
@ -108,35 +280,5 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
.configureBufferSize(maxBuffSize)
.build();
}
@Override
public <T> Mono<T> invokeAPI(String path, HttpMethod method, Map<String, Object> pathParams,
MultiValueMap<String, String> queryParams, Object body,
HttpHeaders headerParams,
MultiValueMap<String, String> cookieParams,
MultiValueMap<String, Object> formParams, List<MediaType> accept,
MediaType contentType, String[] authNames,
ParameterizedTypeReference<T> returnType)
throws RestClientException {
return withRetryOnConflict(
super.invokeAPI(path, method, pathParams, queryParams, body, headerParams, cookieParams,
formParams, accept, contentType, authNames, returnType)
);
}
@Override
public <T> Flux<T> invokeFluxAPI(String path, HttpMethod method, Map<String, Object> pathParams,
MultiValueMap<String, String> queryParams, Object body,
HttpHeaders headerParams,
MultiValueMap<String, String> cookieParams,
MultiValueMap<String, Object> formParams,
List<MediaType> accept, MediaType contentType,
String[] authNames, ParameterizedTypeReference<T> returnType)
throws RestClientException {
return withRetryOnConflict(
super.invokeFluxAPI(path, method, pathParams, queryParams, body, headerParams,
cookieParams, formParams, accept, contentType, authNames, returnType)
);
}
}
}

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.config;
import com.provectus.kafka.ui.model.MetricsConfig;
import jakarta.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@ -8,7 +9,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;

View file

@ -1,9 +1,9 @@
package com.provectus.kafka.ui.config.auth;
import jakarta.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.util.Assert;

View file

@ -13,12 +13,12 @@ import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
import com.provectus.kafka.ui.model.RestartRequestDTO;
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.service.ApplicationInfoService;
import com.provectus.kafka.ui.service.KafkaClusterFactory;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.util.ApplicationRestarter;
import com.provectus.kafka.ui.util.DynamicConfigOperations;
import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
@ -53,18 +53,11 @@ public class ApplicationConfigController implements ApplicationConfigApi {
private final DynamicConfigOperations dynamicConfigOperations;
private final ApplicationRestarter restarter;
private final KafkaClusterFactory kafkaClusterFactory;
private final ApplicationInfoService applicationInfoService;
@Override
public Mono<ResponseEntity<ApplicationInfoDTO>> getApplicationInfo(ServerWebExchange exchange) {
return Mono.just(
new ApplicationInfoDTO()
.enabledFeatures(
dynamicConfigOperations.dynamicConfigEnabled()
? List.of(ApplicationInfoDTO.EnabledFeaturesEnum.DYNAMIC_CONFIG)
: List.of()
)
).map(ResponseEntity::ok);
return Mono.just(applicationInfoService.getApplicationInfo()).map(ResponseEntity::ok);
}
@Override

View file

@ -149,10 +149,9 @@ public class KafkaConnectController extends AbstractController implements KafkaC
}
@Override
public Mono<ResponseEntity<ConnectorDTO>> setConnectorConfig(String clusterName,
String connectName,
public Mono<ResponseEntity<ConnectorDTO>> setConnectorConfig(String clusterName, String connectName,
String connectorName,
@Valid Mono<Object> requestBody,
Mono<Map<String, Object>> requestBody,
ServerWebExchange exchange) {
Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
@ -164,8 +163,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
return validateAccess.then(
kafkaConnectService
.setConnectorConfig(getCluster(clusterName), connectName, connectorName, requestBody)
.map(ResponseEntity::ok)
);
.map(ResponseEntity::ok));
}
@Override
@ -242,7 +240,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
@Override
public Mono<ResponseEntity<ConnectorPluginConfigValidationResponseDTO>> validateConnectorPluginConfig(
String clusterName, String connectName, String pluginName, @Valid Mono<Object> requestBody,
String clusterName, String connectName, String pluginName, @Valid Mono<Map<String, Object>> requestBody,
ServerWebExchange exchange) {
return kafkaConnectService
.validateConnectorPluginConfig(

View file

@ -1,9 +1,6 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import java.time.Duration;
import java.time.Instant;
import org.apache.kafka.clients.consumer.Consumer;
@ -14,13 +11,12 @@ import reactor.core.publisher.FluxSink;
public abstract class AbstractEmitter {
private final ConsumerRecordDeserializer recordDeserializer;
private final ConsumingStats consumingStats = new ConsumingStats();
private final MessagesProcessing messagesProcessing;
private final PollingThrottler throttler;
protected final PollingSettings pollingSettings;
protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) {
this.recordDeserializer = recordDeserializer;
protected AbstractEmitter(MessagesProcessing messagesProcessing, PollingSettings pollingSettings) {
this.messagesProcessing = messagesProcessing;
this.pollingSettings = pollingSettings;
this.throttler = pollingSettings.getPollingThrottler();
}
@ -40,39 +36,27 @@ public abstract class AbstractEmitter {
return records;
}
protected boolean sendLimitReached() {
return messagesProcessing.limitReached();
}
protected void sendMessage(FluxSink<TopicMessageEventDTO> sink,
ConsumerRecord<Bytes, Bytes> msg) {
final TopicMessageDTO topicMessage = recordDeserializer.deserialize(msg);
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.MESSAGE)
.message(topicMessage)
);
messagesProcessing.sendMsg(sink, msg);
}
protected void sendPhase(FluxSink<TopicMessageEventDTO> sink, String name) {
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.PHASE)
.phase(new TopicMessagePhaseDTO().name(name))
);
messagesProcessing.sendPhase(sink, name);
}
protected int sendConsuming(FluxSink<TopicMessageEventDTO> sink,
ConsumerRecords<Bytes, Bytes> records,
long elapsed) {
return consumingStats.sendConsumingEvt(sink, records, elapsed, getFilterApplyErrors(sink));
return messagesProcessing.sentConsumingInfo(sink, records, elapsed);
}
protected void sendFinishStatsAndCompleteSink(FluxSink<TopicMessageEventDTO> sink) {
consumingStats.sendFinishEvent(sink, getFilterApplyErrors(sink));
messagesProcessing.sendFinishEvent(sink);
sink.complete();
}
protected Number getFilterApplyErrors(FluxSink<?> sink) {
return sink.contextView()
.<MessageFilterStats>getOrEmpty(MessageFilterStats.class)
.<Number>map(MessageFilterStats::getFilterApplyErrors)
.orElse(0);
}
}

View file

@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -31,9 +30,9 @@ public class BackwardRecordEmitter
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
ConsumerPosition consumerPosition,
int messagesPerPage,
ConsumerRecordDeserializer recordDeserializer,
MessagesProcessing messagesProcessing,
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
super(messagesProcessing, pollingSettings);
this.consumerPosition = consumerPosition;
this.messagesPerPage = messagesPerPage;
this.consumerSupplier = consumerSupplier;
@ -52,7 +51,7 @@ public class BackwardRecordEmitter
int msgsToPollPerPartition = (int) Math.ceil((double) messagesPerPage / readUntilOffsets.size());
log.debug("'Until' offsets for polling: {}", readUntilOffsets);
while (!sink.isCancelled() && !readUntilOffsets.isEmpty()) {
while (!sink.isCancelled() && !readUntilOffsets.isEmpty() && !sendLimitReached()) {
new TreeMap<>(readUntilOffsets).forEach((tp, readToOffset) -> {
if (sink.isCancelled()) {
return; //fast return in case of sink cancellation
@ -61,8 +60,6 @@ public class BackwardRecordEmitter
long readFromOffset = Math.max(beginOffset, readToOffset - msgsToPollPerPartition);
partitionPollIteration(tp, readFromOffset, readToOffset, consumer, sink)
.stream()
.filter(r -> !sink.isCancelled())
.forEach(r -> sendMessage(sink, r));
if (beginOffset == readFromOffset) {
@ -106,6 +103,7 @@ public class BackwardRecordEmitter
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
while (!sink.isCancelled()
&& !sendLimitReached()
&& recordsToSend.size() < desiredMsgsToPoll
&& !emptyPolls.noDataEmptyPollsReached()) {
var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());

View file

@ -19,7 +19,7 @@ class ConsumingStats {
int sendConsumingEvt(FluxSink<TopicMessageEventDTO> sink,
ConsumerRecords<Bytes, Bytes> polledRecords,
long elapsed,
Number filterApplyErrors) {
int filterApplyErrors) {
int polledBytes = ConsumerRecordsUtil.calculatePolledSize(polledRecords);
bytes += polledBytes;
this.records += polledRecords.count();
@ -32,7 +32,7 @@ class ConsumingStats {
return polledBytes;
}
void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink, Number filterApplyErrors) {
void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink, int filterApplyErrors) {
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.DONE)
@ -41,12 +41,12 @@ class ConsumingStats {
}
private TopicMessageConsumingDTO createConsumingStats(FluxSink<TopicMessageEventDTO> sink,
Number filterApplyErrors) {
int filterApplyErrors) {
return new TopicMessageConsumingDTO()
.bytesConsumed(this.bytes)
.elapsedMs(this.elapsed)
.isCancelled(sink.isCancelled())
.filterApplyErrors(filterApplyErrors.intValue())
.filterApplyErrors(filterApplyErrors)
.messagesConsumed(this.records);
}
}

View file

@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -23,9 +22,9 @@ public class ForwardRecordEmitter
public ForwardRecordEmitter(
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
ConsumerPosition position,
ConsumerRecordDeserializer recordDeserializer,
MessagesProcessing messagesProcessing,
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
super(messagesProcessing, pollingSettings);
this.position = position;
this.consumerSupplier = consumerSupplier;
}
@ -40,6 +39,7 @@ public class ForwardRecordEmitter
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
while (!sink.isCancelled()
&& !sendLimitReached()
&& !seekOperations.assignedPartitionsFullyPolled()
&& !emptyPolls.noDataEmptyPollsReached()) {
@ -50,11 +50,7 @@ public class ForwardRecordEmitter
log.debug("{} records polled", records.count());
for (ConsumerRecord<Bytes, Bytes> msg : records) {
if (!sink.isCancelled()) {
sendMessage(sink, msg);
} else {
break;
}
}
}
sendFinishStatsAndCompleteSink(sink);

View file

@ -1,16 +0,0 @@
package com.provectus.kafka.ui.emitter;
import java.util.concurrent.atomic.AtomicLong;
import lombok.AccessLevel;
import lombok.Getter;
public class MessageFilterStats {
@Getter(AccessLevel.PACKAGE)
private final AtomicLong filterApplyErrors = new AtomicLong();
public final void incrementApplyErrors() {
filterApplyErrors.incrementAndGet();
}
}

View file

@ -0,0 +1,82 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import java.util.function.Predicate;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@Slf4j
public class MessagesProcessing {
private final ConsumingStats consumingStats = new ConsumingStats();
private long sentMessages = 0;
private int filterApplyErrors = 0;
private final ConsumerRecordDeserializer deserializer;
private final Predicate<TopicMessageDTO> filter;
private final @Nullable Integer limit;
public MessagesProcessing(ConsumerRecordDeserializer deserializer,
Predicate<TopicMessageDTO> filter,
@Nullable Integer limit) {
this.deserializer = deserializer;
this.filter = filter;
this.limit = limit;
}
boolean limitReached() {
return limit != null && sentMessages >= limit;
}
void sendMsg(FluxSink<TopicMessageEventDTO> sink, ConsumerRecord<Bytes, Bytes> rec) {
if (!sink.isCancelled() && !limitReached()) {
TopicMessageDTO topicMessage = deserializer.deserialize(rec);
try {
if (filter.test(topicMessage)) {
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.MESSAGE)
.message(topicMessage)
);
sentMessages++;
}
} catch (Exception e) {
filterApplyErrors++;
log.trace("Error applying filter for message {}", topicMessage);
}
}
}
int sentConsumingInfo(FluxSink<TopicMessageEventDTO> sink,
ConsumerRecords<Bytes, Bytes> polledRecords,
long elapsed) {
if (!sink.isCancelled()) {
return consumingStats.sendConsumingEvt(sink, polledRecords, elapsed, filterApplyErrors);
}
return 0;
}
void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink) {
if (!sink.isCancelled()) {
consumingStats.sendFinishEvent(sink, filterApplyErrors);
}
}
void sendPhase(FluxSink<TopicMessageEventDTO> sink, String name) {
if (!sink.isCancelled()) {
sink.next(
new TopicMessageEventDTO()
.type(TopicMessageEventDTO.TypeEnum.PHASE)
.phase(new TopicMessagePhaseDTO().name(name))
);
}
}
}

View file

@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import java.util.HashMap;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
@ -20,9 +19,9 @@ public class TailingEmitter extends AbstractEmitter
public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
ConsumerPosition consumerPosition,
ConsumerRecordDeserializer recordDeserializer,
MessagesProcessing messagesProcessing,
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
super(messagesProcessing, pollingSettings);
this.consumerSupplier = consumerSupplier;
this.consumerPosition = consumerPosition;
}

View file

@ -134,7 +134,7 @@ public class GlobalErrorWebExceptionHandler extends AbstractErrorWebExceptionHan
.timestamp(currentTimestamp())
.stackTrace(Throwables.getStackTraceAsString(exception));
return ServerResponse
.status(exception.getStatus())
.status(exception.getStatusCode())
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(response);
}

View file

@ -1,5 +1,6 @@
package com.provectus.kafka.ui.model.rbac;
import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
@ -26,6 +27,8 @@ import org.springframework.util.Assert;
@EqualsAndHashCode
public class Permission {
private static final List<Resource> RBAC_ACTION_EXEMPT_LIST = List.of(KSQL, CLUSTERCONFIG, APPLICATIONCONFIG);
Resource resource;
List<String> actions;
@ -51,7 +54,7 @@ public class Permission {
public void validate() {
Assert.notNull(resource, "resource cannot be null");
if (!List.of(KSQL, CLUSTERCONFIG).contains(this.resource)) {
if (!RBAC_ACTION_EXEMPT_LIST.contains(this.resource)) {
Assert.notNull(value, "permission value can't be empty for resource " + resource);
}
}

View file

@ -0,0 +1,76 @@
package com.provectus.kafka.ui.service;
import static com.provectus.kafka.ui.model.ApplicationInfoDTO.EnabledFeaturesEnum;
import com.provectus.kafka.ui.model.ApplicationInfoBuildDTO;
import com.provectus.kafka.ui.model.ApplicationInfoDTO;
import com.provectus.kafka.ui.model.ApplicationInfoLatestReleaseDTO;
import com.provectus.kafka.ui.util.DynamicConfigOperations;
import com.provectus.kafka.ui.util.GithubReleaseInfo;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.info.BuildProperties;
import org.springframework.boot.info.GitProperties;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@Service
public class ApplicationInfoService {
private final GithubReleaseInfo githubReleaseInfo = new GithubReleaseInfo();
private final DynamicConfigOperations dynamicConfigOperations;
private final BuildProperties buildProperties;
private final GitProperties gitProperties;
public ApplicationInfoService(DynamicConfigOperations dynamicConfigOperations,
@Autowired(required = false) BuildProperties buildProperties,
@Autowired(required = false) GitProperties gitProperties) {
this.dynamicConfigOperations = dynamicConfigOperations;
this.buildProperties = Optional.ofNullable(buildProperties).orElse(new BuildProperties(new Properties()));
this.gitProperties = Optional.ofNullable(gitProperties).orElse(new GitProperties(new Properties()));
}
public ApplicationInfoDTO getApplicationInfo() {
var releaseInfo = githubReleaseInfo.get();
return new ApplicationInfoDTO()
.build(getBuildInfo(releaseInfo))
.enabledFeatures(getEnabledFeatures())
.latestRelease(convert(releaseInfo));
}
private ApplicationInfoLatestReleaseDTO convert(GithubReleaseInfo.GithubReleaseDto releaseInfo) {
return new ApplicationInfoLatestReleaseDTO()
.htmlUrl(releaseInfo.html_url())
.publishedAt(releaseInfo.published_at())
.versionTag(releaseInfo.tag_name());
}
private ApplicationInfoBuildDTO getBuildInfo(GithubReleaseInfo.GithubReleaseDto release) {
return new ApplicationInfoBuildDTO()
.isLatestRelease(release.tag_name() != null && release.tag_name().equals(buildProperties.getVersion()))
.commitId(gitProperties.getShortCommitId())
.version(buildProperties.getVersion())
.buildTime(buildProperties.getTime() != null
? DateTimeFormatter.ISO_INSTANT.format(buildProperties.getTime()) : null);
}
private List<EnabledFeaturesEnum> getEnabledFeatures() {
var enabledFeatures = new ArrayList<EnabledFeaturesEnum>();
if (dynamicConfigOperations.dynamicConfigEnabled()) {
enabledFeatures.add(EnabledFeaturesEnum.DYNAMIC_CONFIG);
}
return enabledFeatures;
}
// updating on startup and every hour
@Scheduled(fixedRateString = "${github-release-info-update-rate:3600000}")
public void updateGithubReleaseInfo() {
githubReleaseInfo.refresh().block();
}
}

View file

@ -1,38 +1,58 @@
package com.provectus.kafka.ui.service;
import static java.util.regex.Pattern.CASE_INSENSITIVE;
import com.google.common.collect.ImmutableList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.actuate.endpoint.Sanitizer;
import org.springframework.stereotype.Component;
@Component
class KafkaConfigSanitizer extends Sanitizer {
private static final List<String> DEFAULT_PATTERNS_TO_SANITIZE = Arrays.asList(
class KafkaConfigSanitizer {
private static final String SANITIZED_VALUE = "******";
private static final String[] REGEX_PARTS = {"*", "$", "^", "+"};
private static final List<String> DEFAULT_PATTERNS_TO_SANITIZE = ImmutableList.<String>builder()
.addAll(kafkaConfigKeysToSanitize())
.add(
"basic.auth.user.info", /* For Schema Registry credentials */
"password", "secret", "token", "key", ".*credentials.*", /* General credential patterns */
"aws.access.*", "aws.secret.*", "aws.session.*" /* AWS-related credential patterns */
);
)
.build();
private final List<Pattern> sanitizeKeysPatterns;
KafkaConfigSanitizer(
@Value("${kafka.config.sanitizer.enabled:true}") boolean enabled,
@Value("${kafka.config.sanitizer.patterns:}") List<String> patternsToSanitize
) {
if (!enabled) {
setKeysToSanitize();
} else {
var keysToSanitize = new HashSet<>(
patternsToSanitize.isEmpty() ? DEFAULT_PATTERNS_TO_SANITIZE : patternsToSanitize);
keysToSanitize.addAll(kafkaConfigKeysToSanitize());
setKeysToSanitize(keysToSanitize.toArray(new String[] {}));
this.sanitizeKeysPatterns = enabled
? compile(patternsToSanitize.isEmpty() ? DEFAULT_PATTERNS_TO_SANITIZE : patternsToSanitize)
: List.of();
}
private static List<Pattern> compile(Collection<String> patternStrings) {
return patternStrings.stream()
.map(p -> isRegex(p)
? Pattern.compile(p, CASE_INSENSITIVE)
: Pattern.compile(".*" + p + "$", CASE_INSENSITIVE))
.toList();
}
private static boolean isRegex(String str) {
return Arrays.stream(REGEX_PARTS).anyMatch(str::contains);
}
private static Set<String> kafkaConfigKeysToSanitize() {
@ -45,4 +65,17 @@ class KafkaConfigSanitizer extends Sanitizer {
.collect(Collectors.toSet());
}
public Object sanitize(String key, Object value) {
if (value == null) {
return null;
}
for (Pattern pattern : sanitizeKeysPatterns) {
if (pattern.matcher(key).matches()) {
return SANITIZED_VALUE;
}
}
return value;
}
}

View file

@ -225,11 +225,11 @@ public class KafkaConnectService {
}
public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
String connectorName, Mono<Object> requestBody) {
String connectorName, Mono<Map<String, Object>> requestBody) {
return api(cluster, connectName)
.mono(c ->
requestBody
.flatMap(body -> c.setConnectorConfig(connectorName, (Map<String, Object>) body))
.flatMap(body -> c.setConnectorConfig(connectorName, body))
.map(kafkaConnectMapper::fromClient));
}
@ -298,12 +298,12 @@ public class KafkaConnectService {
}
public Mono<ConnectorPluginConfigValidationResponseDTO> validateConnectorPluginConfig(
KafkaCluster cluster, String connectName, String pluginName, Mono<Object> requestBody) {
KafkaCluster cluster, String connectName, String pluginName, Mono<Map<String, Object>> requestBody) {
return api(cluster, connectName)
.mono(client ->
requestBody
.flatMap(body ->
client.validateConnectorPluginConfig(pluginName, (Map<String, Object>) body))
client.validateConnectorPluginConfig(pluginName, body))
.map(kafkaConnectMapper::fromClient)
);
}

View file

@ -3,9 +3,8 @@ package com.provectus.kafka.ui.service;
import com.google.common.util.concurrent.RateLimiter;
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.emitter.MessageFilterStats;
import com.provectus.kafka.ui.emitter.MessageFilters;
import com.provectus.kafka.ui.emitter.ResultSizeLimiter;
import com.provectus.kafka.ui.emitter.MessagesProcessing;
import com.provectus.kafka.ui.emitter.TailingEmitter;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
import com.provectus.kafka.ui.exception.ValidationException;
@ -14,9 +13,9 @@ import com.provectus.kafka.ui.model.CreateTopicMessageDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
import com.provectus.kafka.ui.model.SeekDirectionDTO;
import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.util.List;
@ -162,13 +161,18 @@ public class MessagesService {
@Nullable String valueSerde) {
java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
ConsumerRecordDeserializer recordDeserializer =
deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde);
var processing = new MessagesProcessing(
deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde),
getMsgFilter(query, filterQueryType),
seekDirection == SeekDirectionDTO.TAILING ? null : limit
);
if (seekDirection.equals(SeekDirectionDTO.FORWARD)) {
emitter = new ForwardRecordEmitter(
() -> consumerGroupService.createConsumer(cluster),
consumerPosition,
recordDeserializer,
processing,
cluster.getPollingSettings()
);
} else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
@ -176,33 +180,22 @@ public class MessagesService {
() -> consumerGroupService.createConsumer(cluster),
consumerPosition,
limit,
recordDeserializer,
processing,
cluster.getPollingSettings()
);
} else {
emitter = new TailingEmitter(
() -> consumerGroupService.createConsumer(cluster),
consumerPosition,
recordDeserializer,
processing,
cluster.getPollingSettings()
);
}
MessageFilterStats filterStats = new MessageFilterStats();
return Flux.create(emitter)
.contextWrite(ctx -> ctx.put(MessageFilterStats.class, filterStats))
.filter(getMsgFilter(query, filterQueryType, filterStats))
.map(getDataMasker(cluster, topic))
.takeWhile(createTakeWhilePredicate(seekDirection, limit))
.map(throttleUiPublish(seekDirection));
}
private Predicate<TopicMessageEventDTO> createTakeWhilePredicate(
SeekDirectionDTO seekDirection, int limit) {
return seekDirection == SeekDirectionDTO.TAILING
? evt -> true // no limit for tailing
: new ResultSizeLimiter(limit);
}
private UnaryOperator<TopicMessageEventDTO> getDataMasker(KafkaCluster cluster, String topicName) {
var keyMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.KEY);
var valMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.VALUE);
@ -217,26 +210,12 @@ public class MessagesService {
};
}
private Predicate<TopicMessageEventDTO> getMsgFilter(String query,
MessageFilterTypeDTO filterQueryType,
MessageFilterStats filterStats) {
private Predicate<TopicMessageDTO> getMsgFilter(String query,
MessageFilterTypeDTO filterQueryType) {
if (StringUtils.isEmpty(query)) {
return evt -> true;
}
var messageFilter = MessageFilters.createMsgFilter(query, filterQueryType);
return evt -> {
// we only apply filter for message events
if (evt.getType() == TopicMessageEventDTO.TypeEnum.MESSAGE) {
try {
return messageFilter.test(evt.getMessage());
} catch (Exception e) {
filterStats.incrementApplyErrors();
log.trace("Error applying filter '{}' for message {}", query, evt.getMessage());
return false;
}
}
return true;
};
return MessageFilters.createMsgFilter(query, filterQueryType);
}
private <T> UnaryOperator<T> throttleUiPublish(SeekDirectionDTO seekDirection) {

View file

@ -4,6 +4,7 @@ import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Iterables;
@ -514,6 +515,14 @@ public class ReactiveAdminClient implements Closeable {
.flatMap(parts -> listOffsetsUnsafe(parts, offsetSpec));
}
/**
* List offset for the specified topics, skipping no-leader partitions.
*/
public Mono<Map<TopicPartition, Long>> listOffsets(Collection<TopicDescription> topicDescriptions,
OffsetSpec offsetSpec) {
return listOffsetsUnsafe(filterPartitionsWithLeaderCheck(topicDescriptions, p -> true, false), offsetSpec);
}
private Mono<Collection<TopicPartition>> filterPartitionsWithLeaderCheck(Collection<TopicPartition> partitions,
boolean failOnUnknownLeader) {
var targetTopics = partitions.stream().map(TopicPartition::topic).collect(Collectors.toSet());
@ -523,34 +532,44 @@ public class ReactiveAdminClient implements Closeable {
descriptions.values(), partitions::contains, failOnUnknownLeader));
}
private Set<TopicPartition> filterPartitionsWithLeaderCheck(Collection<TopicDescription> topicDescriptions,
@VisibleForTesting
static Set<TopicPartition> filterPartitionsWithLeaderCheck(Collection<TopicDescription> topicDescriptions,
Predicate<TopicPartition> partitionPredicate,
boolean failOnUnknownLeader) {
var goodPartitions = new HashSet<TopicPartition>();
for (TopicDescription description : topicDescriptions) {
var goodTopicPartitions = new ArrayList<TopicPartition>();
for (TopicPartitionInfo partitionInfo : description.partitions()) {
TopicPartition topicPartition = new TopicPartition(description.name(), partitionInfo.partition());
if (!partitionPredicate.test(topicPartition)) {
continue;
}
if (partitionInfo.leader() != null) {
goodPartitions.add(topicPartition);
} else if (failOnUnknownLeader) {
if (partitionInfo.leader() == null) {
if (failOnUnknownLeader) {
throw new ValidationException(String.format("Topic partition %s has no leader", topicPartition));
} else {
// if ANY of topic partitions has no leader - we have to skip all topic partitions
goodTopicPartitions.clear();
break;
}
}
if (partitionPredicate.test(topicPartition)) {
goodTopicPartitions.add(topicPartition);
}
}
goodPartitions.addAll(goodTopicPartitions);
}
return goodPartitions;
}
// 1. NOTE(!): should only apply for partitions with existing leader,
// 1. NOTE(!): should only apply for partitions from topics where all partitions have leaders,
// otherwise AdminClient will try to fetch topic metadata, fail and retry infinitely (until timeout)
// 2. NOTE(!): Skips partitions that were not initialized yet
// (UnknownTopicOrPartitionException thrown, ex. after topic creation)
// 3. TODO: check if it is a bug that AdminClient never throws LeaderNotAvailableException and just retrying instead
@KafkaClientInternalsDependant
public Mono<Map<TopicPartition, Long>> listOffsetsUnsafe(Collection<TopicPartition> partitions,
OffsetSpec offsetSpec) {
@VisibleForTesting
Mono<Map<TopicPartition, Long>> listOffsetsUnsafe(Collection<TopicPartition> partitions, OffsetSpec offsetSpec) {
if (partitions.isEmpty()) {
return Mono.just(Map.of());
}
Function<Collection<TopicPartition>, Mono<Map<TopicPartition, Long>>> call =
parts -> {

View file

@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import com.google.common.collect.Sets;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.TopicMetadataException;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
@ -136,22 +137,14 @@ public class TopicsService {
}
private Mono<InternalPartitionsOffsets> getPartitionOffsets(Map<String, TopicDescription>
descriptions,
descriptionsMap,
ReactiveAdminClient ac) {
var topicPartitions = descriptions.values().stream()
.flatMap(desc ->
desc.partitions().stream()
// list offsets should only be applied to partitions with existing leader
// (see ReactiveAdminClient.listOffsetsUnsafe(..) docs)
.filter(tp -> tp.leader() != null)
.map(p -> new TopicPartition(desc.name(), p.partition())))
.collect(toList());
return ac.listOffsetsUnsafe(topicPartitions, OffsetSpec.earliest())
.zipWith(ac.listOffsetsUnsafe(topicPartitions, OffsetSpec.latest()),
var descriptions = descriptionsMap.values();
return ac.listOffsets(descriptions, OffsetSpec.earliest())
.zipWith(ac.listOffsets(descriptions, OffsetSpec.latest()),
(earliest, latest) ->
topicPartitions.stream()
.filter(tp -> earliest.containsKey(tp) && latest.containsKey(tp))
Sets.intersection(earliest.keySet(), latest.keySet())
.stream()
.map(tp ->
Map.entry(tp,
new InternalPartitionsOffsets.Offsets(

View file

@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.analyze;
import com.provectus.kafka.ui.model.TopicAnalysisSizeStatsDTO;
import com.provectus.kafka.ui.model.TopicAnalysisStatsDTO;
import com.provectus.kafka.ui.model.TopicAnalysisStatsHourlyMsgCountsDTO;
import com.provectus.kafka.ui.model.TopicAnalysisStatsHourlyMsgCountsInnerDTO;
import java.time.Duration;
import java.time.Instant;
import java.util.Comparator;
@ -78,10 +78,10 @@ class TopicAnalysisStats {
}
}
List<TopicAnalysisStatsHourlyMsgCountsDTO> toDto() {
List<TopicAnalysisStatsHourlyMsgCountsInnerDTO> toDto() {
return hourlyStats.entrySet().stream()
.sorted(Comparator.comparingLong(Map.Entry::getKey))
.map(e -> new TopicAnalysisStatsHourlyMsgCountsDTO()
.map(e -> new TopicAnalysisStatsHourlyMsgCountsInnerDTO()
.hourStart(e.getKey())
.count(e.getValue()))
.collect(Collectors.toList());

View file

@ -52,7 +52,10 @@ public class KsqlApiClient {
boolean error;
public Optional<JsonNode> getColumnValue(List<JsonNode> row, String column) {
return Optional.ofNullable(row.get(columnNames.indexOf(column)));
int colIdx = columnNames.indexOf(column);
return colIdx >= 0
? Optional.ofNullable(row.get(colIdx))
: Optional.empty();
}
}

View file

@ -89,7 +89,14 @@ public class KsqlServiceV2 {
.name(resp.getColumnValue(row, "name").map(JsonNode::asText).orElse(null))
.topic(resp.getColumnValue(row, "topic").map(JsonNode::asText).orElse(null))
.keyFormat(resp.getColumnValue(row, "keyFormat").map(JsonNode::asText).orElse(null))
.valueFormat(resp.getColumnValue(row, "valueFormat").map(JsonNode::asText).orElse(null)))
.valueFormat(
// for old versions (<0.13) "format" column is filled,
// for new version "keyFormat" & "valueFormat" columns should be filled
resp.getColumnValue(row, "valueFormat")
.or(() -> resp.getColumnValue(row, "format"))
.map(JsonNode::asText)
.orElse(null))
)
.collect(Collectors.toList()));
});
}

View file

@ -21,6 +21,7 @@ import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
import java.util.Set;
@ -28,7 +29,6 @@ import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;

View file

@ -1,9 +1,9 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.nimbusds.jose.shaded.json.JSONArray;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@ -44,7 +44,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
.map(Role::getName)
.collect(Collectors.toSet());
JSONArray groups = principal.getAttribute(COGNITO_GROUPS_ATTRIBUTE_NAME);
List<String> groups = principal.getAttribute(COGNITO_GROUPS_ATTRIBUTE_NAME);
if (groups == null) {
log.debug("Cognito groups param is not present");
return Mono.just(groupsByUsername);
@ -56,9 +56,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
.stream()
.filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO))
.filter(s -> s.getType().equals("group"))
.anyMatch(subject -> Stream.of(groups.toArray())
.anyMatch(subject -> Stream.of(groups)
.map(Object::toString)
.distinct()
.anyMatch(cognitoGroup -> cognitoGroup.equals(subject.getValue()))
))
.map(Role::getName)

View file

@ -0,0 +1,53 @@
package com.provectus.kafka.ui.util;
import com.google.common.annotations.VisibleForTesting;
import java.time.Duration;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
@Slf4j
public class GithubReleaseInfo {
private static final String GITHUB_LATEST_RELEASE_RETRIEVAL_URL =
"https://api.github.com/repos/provectus/kafka-ui/releases/latest";
private static final Duration GITHUB_API_MAX_WAIT_TIME = Duration.ofSeconds(2);
public record GithubReleaseDto(String html_url, String tag_name, String published_at) {
static GithubReleaseDto empty() {
return new GithubReleaseDto(null, null, null);
}
}
private volatile GithubReleaseDto release = GithubReleaseDto.empty();
private final Mono<Void> refreshMono;
public GithubReleaseInfo() {
this(GITHUB_LATEST_RELEASE_RETRIEVAL_URL);
}
@VisibleForTesting
GithubReleaseInfo(String url) {
this.refreshMono = WebClient.create()
.get()
.uri(url)
.exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))
.timeout(GITHUB_API_MAX_WAIT_TIME)
.doOnError(th -> log.trace("Error getting latest github release info", th))
.onErrorResume(th -> true, th -> Mono.just(GithubReleaseDto.empty()))
.doOnNext(release -> this.release = release)
.then();
}
public GithubReleaseDto get() {
return release;
}
public Mono<Void> refresh() {
return refreshMono;
}
}

View file

@ -16,7 +16,7 @@ import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.util.SocketUtils;
import org.springframework.test.util.TestSocketUtils;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.utility.DockerImageName;
@ -61,7 +61,7 @@ public abstract class AbstractIntegrationTest {
System.setProperty("kafka.clusters.0.bootstrapServers", kafka.getBootstrapServers());
// List unavailable hosts to verify failover
System.setProperty("kafka.clusters.0.schemaRegistry", String.format("http://localhost:%1$s,http://localhost:%1$s,%2$s",
SocketUtils.findAvailableTcpPort(), schemaRegistry.getUrl()));
TestSocketUtils.findAvailableTcpPort(), schemaRegistry.getUrl()));
System.setProperty("kafka.clusters.0.kafkaConnect.0.name", "kafka-connect");
System.setProperty("kafka.clusters.0.kafkaConnect.0.userName", "kafka-connect");
System.setProperty("kafka.clusters.0.kafkaConnect.0.password", "kafka-connect");

View file

@ -5,13 +5,12 @@ import static org.assertj.core.api.Assertions.assertThat;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
import org.springframework.boot.actuate.endpoint.Sanitizer;
class KafkaConfigSanitizerTest {
@Test
void doNothingIfEnabledPropertySetToFalse() {
final Sanitizer sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList());
final var sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList());
assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret");
assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret");
assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret");
@ -19,7 +18,7 @@ class KafkaConfigSanitizerTest {
@Test
void obfuscateCredentials() {
final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******");
@ -37,7 +36,7 @@ class KafkaConfigSanitizerTest {
@Test
void notObfuscateNormalConfigs() {
final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL");
final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"};
assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer);
@ -45,7 +44,7 @@ class KafkaConfigSanitizerTest {
@Test
void obfuscateCredentialsWithDefinedPatterns() {
final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Arrays.asList("kafka.ui", ".*test.*"));
final var sanitizer = new KafkaConfigSanitizer(true, Arrays.asList("kafka.ui", ".*test.*"));
assertThat(sanitizer.sanitize("consumer.kafka.ui", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("this.is.test.credentials", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("this.is.not.credential", "not.credential"))

View file

@ -4,8 +4,11 @@ import static com.provectus.kafka.ui.service.ReactiveAdminClient.toMonoWithExcep
import static java.util.Objects.requireNonNull;
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.ThrowableAssert.ThrowingCallable;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
import java.time.Duration;
import java.util.ArrayList;
@ -22,16 +25,20 @@ import org.apache.kafka.clients.admin.Config;
import org.apache.kafka.clients.admin.ConfigEntry;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.admin.TopicDescription;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.TopicPartitionInfo;
import org.apache.kafka.common.config.ConfigResource;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.internals.KafkaFutureImpl;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.assertj.core.api.ThrowableAssert;
import org.junit.function.ThrowingRunnable;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -133,6 +140,56 @@ class ReactiveAdminClientTest extends AbstractIntegrationTest {
.verifyComplete();
}
@Test
void filterPartitionsWithLeaderCheckSkipsPartitionsFromTopicWhereSomePartitionsHaveNoLeader() {
var filteredPartitions = ReactiveAdminClient.filterPartitionsWithLeaderCheck(
List.of(
// contains partitions with no leader
new TopicDescription("noLeaderTopic", false,
List.of(
new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()),
new TopicPartitionInfo(1, null, List.of(), List.of()))),
// should be skipped by predicate
new TopicDescription("skippingByPredicate", false,
List.of(
new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()))),
// good topic
new TopicDescription("good", false,
List.of(
new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()),
new TopicPartitionInfo(1, new Node(2, "n2", 9092), List.of(), List.of()))
)),
p -> !p.topic().equals("skippingByPredicate"),
false
);
assertThat(filteredPartitions)
.containsExactlyInAnyOrder(
new TopicPartition("good", 0),
new TopicPartition("good", 1)
);
}
@Test
void filterPartitionsWithLeaderCheckThrowExceptionIfThereIsSomePartitionsWithoutLeaderAndFlagSet() {
ThrowingCallable call = () -> ReactiveAdminClient.filterPartitionsWithLeaderCheck(
List.of(
// contains partitions with no leader
new TopicDescription("t1", false,
List.of(
new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()),
new TopicPartitionInfo(1, null, List.of(), List.of()))),
new TopicDescription("t2", false,
List.of(
new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()))
)),
p -> true,
// setting failOnNoLeader flag
true
);
assertThatThrownBy(call).isInstanceOf(ValidationException.class);
}
@Test
void testListOffsetsUnsafe() {
String topic = UUID.randomUUID().toString();

View file

@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.emitter.MessagesProcessing;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
@ -106,12 +107,16 @@ class RecordEmitterTest extends AbstractIntegrationTest {
);
}
private MessagesProcessing createMessagesProcessing() {
return new MessagesProcessing(RECORD_DESERIALIZER, msg -> true, null);
}
@Test
void pollNothingOnEmptyTopic() {
var forwardEmitter = new ForwardRecordEmitter(
this::createConsumer,
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -119,7 +124,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
100,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -141,7 +146,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
var forwardEmitter = new ForwardRecordEmitter(
this::createConsumer,
new ConsumerPosition(BEGINNING, TOPIC, null),
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -149,7 +154,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(LATEST, TOPIC, null),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -170,7 +175,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
var forwardEmitter = new ForwardRecordEmitter(
this::createConsumer,
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -178,7 +183,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -215,7 +220,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
var forwardEmitter = new ForwardRecordEmitter(
this::createConsumer,
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -223,7 +228,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -254,7 +259,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
numMessages,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);
@ -280,7 +285,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(OFFSET, TOPIC, offsets),
100,
RECORD_DESERIALIZER,
createMessagesProcessing(),
PollingSettings.createDefault()
);

View file

@ -15,7 +15,6 @@ import java.util.concurrent.CopyOnWriteArraySet;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.springframework.util.unit.DataSize;
import org.testcontainers.utility.DockerImageName;
class KsqlServiceV2Test extends AbstractIntegrationTest {
@ -27,8 +26,6 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
private static final Set<String> STREAMS_TO_DELETE = new CopyOnWriteArraySet<>();
private static final Set<String> TABLES_TO_DELETE = new CopyOnWriteArraySet<>();
private static final DataSize maxBuffSize = DataSize.ofMegabytes(20);
@BeforeAll
static void init() {
KSQL_DB.start();

View file

@ -0,0 +1,54 @@
package com.provectus.kafka.ui.util;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.time.Duration;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import reactor.test.StepVerifier;
class GithubReleaseInfoTest {
private final MockWebServer mockWebServer = new MockWebServer();
@BeforeEach
void startMockServer() throws IOException {
mockWebServer.start();
}
@AfterEach
void stopMockServer() throws IOException {
mockWebServer.close();
}
@Test
void test() {
mockWebServer.enqueue(new MockResponse()
.addHeader("content-type: application/json")
.setBody("""
{
"published_at": "2023-03-09T16:11:31Z",
"tag_name": "v0.6.0",
"html_url": "https://github.com/provectus/kafka-ui/releases/tag/v0.6.0",
"some_unused_prop": "ololo"
}
"""));
var url = mockWebServer.url("repos/provectus/kafka-ui/releases/latest").toString();
var infoHolder = new GithubReleaseInfo(url);
infoHolder.refresh().block();
var i = infoHolder.get();
assertThat(i.html_url())
.isEqualTo("https://github.com/provectus/kafka-ui/releases/tag/v0.6.0");
assertThat(i.published_at())
.isEqualTo("2023-03-09T16:11:31Z");
assertThat(i.tag_name())
.isEqualTo("v0.6.0");
}
}

View file

@ -27,20 +27,24 @@
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
<version>${swagger-annotations.version}</version>
<groupId>io.swagger.core.v3</groupId>
<artifactId>swagger-integration-jakarta</artifactId>
<version>2.2.8</version>
</dependency>
<dependency>
<groupId>org.openapitools</groupId>
<artifactId>jackson-databind-nullable</artifactId>
<version>${jackson-databind-nullable.version}</version>
<version>0.2.4</version>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>3.0.2</version>
<scope>provided</scope>
<groupId>jakarta.annotation</groupId>
<artifactId>jakarta.annotation-api</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.3.2</version>
</dependency>
</dependencies>
@ -71,6 +75,7 @@
<library>webclient</library>
<useBeanValidation>true</useBeanValidation>
<dateLibrary>java8</dateLibrary>
<useJakartaEe>true</useJakartaEe>
</configOptions>
</configuration>
</execution>
@ -80,8 +85,7 @@
<goal>generate</goal>
</goals>
<configuration>
<inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml
</inputSpec>
<inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml</inputSpec>
<output>${project.build.directory}/generated-sources/api</output>
<generatorName>spring</generatorName>
<modelNameSuffix>DTO</modelNameSuffix>
@ -89,14 +93,12 @@
<modelPackage>com.provectus.kafka.ui.model</modelPackage>
<apiPackage>com.provectus.kafka.ui.api</apiPackage>
<sourceFolder>kafka-ui-contract</sourceFolder>
<reactive>true</reactive>
<interfaceOnly>true</interfaceOnly>
<skipDefaultInterface>true</skipDefaultInterface>
<useBeanValidation>true</useBeanValidation>
<useTags>true</useTags>
<useSpringBoot3>true</useSpringBoot3>
<dateLibrary>java8</dateLibrary>
</configOptions>
<typeMappings>
@ -116,15 +118,13 @@
<generatorName>java</generatorName>
<generateApiTests>false</generateApiTests>
<generateModelTests>false</generateModelTests>
<configOptions>
<modelPackage>com.provectus.kafka.ui.connect.model</modelPackage>
<apiPackage>com.provectus.kafka.ui.connect.api</apiPackage>
<sourceFolder>kafka-connect-client</sourceFolder>
<asyncNative>true</asyncNative>
<library>webclient</library>
<useJakartaEe>true</useJakartaEe>
<useBeanValidation>true</useBeanValidation>
<dateLibrary>java8</dateLibrary>
</configOptions>
@ -142,15 +142,13 @@
<generatorName>java</generatorName>
<generateApiTests>false</generateApiTests>
<generateModelTests>false</generateModelTests>
<configOptions>
<modelPackage>com.provectus.kafka.ui.sr.model</modelPackage>
<apiPackage>com.provectus.kafka.ui.sr.api</apiPackage>
<sourceFolder>kafka-sr-client</sourceFolder>
<asyncNative>true</asyncNative>
<library>webclient</library>
<useJakartaEe>true</useJakartaEe>
<useBeanValidation>true</useBeanValidation>
<dateLibrary>java8</dateLibrary>
</configOptions>

View file

@ -2021,6 +2021,26 @@ components:
type: string
enum:
- DYNAMIC_CONFIG
build:
type: object
properties:
commitId:
type: string
version:
type: string
buildTime:
type: string
isLatestRelease:
type: boolean
latestRelease:
type: object
properties:
versionTag:
type: string
publishedAt:
type: string
htmlUrl:
type: string
Cluster:
type: object
@ -2493,6 +2513,10 @@ components:
- UNKNOWN
ConsumerGroup:
discriminator:
propertyName: inherit
mapping:
details: "#/components/schemas/ConsumerGroupDetails"
type: object
properties:
groupId:

View file

@ -27,7 +27,7 @@ This repository is for E2E UI automation.
```
git clone https://github.com/provectus/kafka-ui.git
cd kafka-ui-e2e-checks
docker pull selenoid/vnc:chrome_86.0
docker pull selenoid/vnc_chrome:103.0
```
### How to run checks
@ -36,6 +36,7 @@ docker pull selenoid/vnc:chrome_86.0
```
cd kafka-ui
docker-compose -f kafka-ui-e2e-checks/docker/selenoid-local.yaml up -d
docker-compose -f documentation/compose/e2e-tests.yaml up -d
```
@ -51,6 +52,14 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
-Dbrowser=local
```
Expected Location of Chrome
```
Linux: /usr/bin/google-chrome1
Mac: /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome
Windows XP: %HOMEPATH%\Local Settings\Application Data\Google\Chrome\Application\chrome.exe
Windows Vista and newer: C:\Users%USERNAME%\AppData\Local\Google\Chrome\Application\chrome.exe
```
### Qase integration
Found instruction for Qase.io integration (for internal use only) at `kafka-ui-e2e-checks/QASE.md`

View file

@ -1,17 +1,19 @@
---
version: '3'
services:
selenoid:
network_mode: bridge
image: aerokube/selenoid:1.10.7
volumes:
- "../selenoid/config:/etc/selenoid"
- "/var/run/docker.sock:/var/run/docker.sock"
- "../selenoid/video:/video"
- "../selenoid/video:/opt/selenoid/video"
- "../selenoid/logs:/opt/selenoid/logs"
environment:
- OVERRIDE_VIDEO_OUTPUT_DIR=video
command: [ "-conf", "/etc/selenoid/browsers.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
- OVERRIDE_VIDEO_OUTPUT_DIR=../selenoid/video
command: [ "-conf", "/etc/selenoid/browsersGit.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
ports:
- "4444:4444"
@ -22,10 +24,10 @@ services:
- selenoid
ports:
- "8081:8080"
command: [ "--selenoid-uri", "http://localhost:4444" ]
command: [ "--selenoid-uri", "http://selenoid:4444" ]
selenoid-chrome:
network_mode: bridge
image: selenoid/vnc:chrome_96.0
image: selenoid/vnc_chrome:103.0
extra_hosts:
- "host.docker.internal:host-gateway"

View file

@ -0,0 +1,33 @@
---
version: '3'
services:
selenoid:
network_mode: bridge
image: aerokube/selenoid:1.10.7
volumes:
- "../selenoid/config:/etc/selenoid"
- "/var/run/docker.sock:/var/run/docker.sock"
- "../selenoid/video:/opt/selenoid/video"
- "../selenoid/logs:/opt/selenoid/logs"
environment:
- OVERRIDE_VIDEO_OUTPUT_DIR=../selenoid/video
command: [ "-conf", "/etc/selenoid/browsersLocal.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
ports:
- "4444:4444"
selenoid-ui:
network_mode: bridge
image: aerokube/selenoid-ui:latest-release
links:
- selenoid
ports:
- "8081:8080"
command: [ "--selenoid-uri", "http://selenoid:4444" ]
selenoid-chrome:
network_mode: bridge
image: selenoid/vnc_chrome:103.0
extra_hosts:
- "host.docker.internal:host-gateway"

View file

@ -17,15 +17,14 @@
<testcontainers.version>1.17.6</testcontainers.version>
<httpcomponents.version>5.2.1</httpcomponents.version>
<selenium.version>4.8.1</selenium.version>
<selenide.version>6.11.2</selenide.version>
<selenide.version>6.12.3</selenide.version>
<testng.version>7.7.0</testng.version>
<allure.version>2.21.0</allure.version>
<qase.io.version>3.0.3</qase.io.version>
<qase.io.version>3.0.4</qase.io.version>
<aspectj.version>1.9.9.1</aspectj.version>
<assertj.version>3.24.2</assertj.version>
<hamcrest.version>2.2</hamcrest.version>
<slf4j.version>1.7.36</slf4j.version>
<dotenv.version>2.3.1</dotenv.version>
<slf4j.version>2.0.5</slf4j.version>
<kafka.version>3.3.1</kafka.version>
</properties>
@ -122,6 +121,11 @@
<artifactId>selenium</artifactId>
<version>${testcontainers.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${org.projectlombok.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5</artifactId>
@ -132,6 +136,11 @@
<artifactId>httpclient5</artifactId>
<version>${httpcomponents.version}</version>
</dependency>
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-http-jdk-client</artifactId>
<version>${selenium.version}</version>
</dependency>
<dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-http</artifactId>
@ -187,16 +196,6 @@
<artifactId>slf4j-simple</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${org.projectlombok.version}</version>
</dependency>
<dependency>
<groupId>io.github.cdimascio</groupId>
<artifactId>dotenv-java</artifactId>
<version>${dotenv.version}</version>
</dependency>
<dependency>
<groupId>com.provectus</groupId>
<artifactId>kafka-ui-contract</artifactId>
@ -265,6 +264,37 @@
<artifactId>allure-maven</artifactId>
<version>2.10.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>3.1.2</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>10.3.1</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>checkstyle</id>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
<configuration>
<violationSeverity>warning</violationSeverity>
<failOnViolation>true</failOnViolation>
<failsOnError>true</failsOnError>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>file:${basedir}/../etc/checkstyle/checkstyle-e2e.xml</configLocation>
<headerLocation>file:${basedir}/../etc/checkstyle/apache-header.txt</headerLocation>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>

View file

@ -0,0 +1,15 @@
{
"chrome": {
"default": "103.0",
"versions": {
"103.0": {
"image": "selenoid/vnc_chrome:103.0",
"hosts": [
"host.docker.internal:172.17.0.1"
],
"port": "4444",
"path": "/"
}
}
}
}

View file

@ -1,9 +1,9 @@
{
"chrome": {
"default": "96.0",
"default": "103.0",
"versions": {
"96.0": {
"image": "selenoid/vnc_chrome:96.0",
"103.0": {
"image": "selenoid/vnc_chrome:103.0",
"port": "4444",
"path": "/"
}

View file

@ -1,11 +1,11 @@
package com.provectus.kafka.ui.models;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
import com.provectus.kafka.ui.api.model.SchemaType;
import lombok.Data;
import lombok.experimental.Accessors;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@Data
@Accessors(chain = true)
public class Schema {
@ -28,6 +28,7 @@ public class Schema {
public static Schema createSchemaProtobuf() {
return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
.setType(SchemaType.PROTOBUF)
.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
.setValuePath(
System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
}
}

View file

@ -11,7 +11,7 @@ import lombok.experimental.Accessors;
@Accessors(chain = true)
public class Topic {
private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
private String name, timeToRetainData, maxMessageBytes, messageKey, messageValue, customParameterValue;
private int numberOfPartitions;
private CustomParameterType customParameterType;
private CleanupPolicyValue cleanupPolicyValue;

View file

@ -1,26 +1,26 @@
package com.provectus.kafka.ui.pages;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import com.provectus.kafka.ui.utilities.WebUtils;
import java.time.Duration;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Keys;
import org.openqa.selenium.interactions.Actions;
import java.time.Duration;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
@Slf4j
public abstract class BasePage extends WebUtils {
protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
protected SelenideElement submitBtn = $x("//button[@type='submit']");
protected SelenideElement tableGrid = $x("//table");
protected SelenideElement searchFld = $x("//input[@type='text'][contains(@id, ':r')]");
protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
@ -37,13 +37,20 @@ public abstract class BasePage extends WebUtils {
protected String pageTitleFromHeader = "//h1[text()='%s']";
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
protected void waitUntilSpinnerDisappear() {
protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
log.debug("\nwaitUntilSpinnerDisappear");
if (isVisible(loadingSpinner)) {
if (isVisible(loadingSpinner, timeoutInSeconds)) {
loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
}
}
protected void searchItem(String tag) {
log.debug("\nsearchItem: {}", tag);
sendKeysAfterClear(searchFld, tag);
searchFld.pressEnter().shouldHave(Condition.value(tag));
waitUntilSpinnerDisappear(1);
}
protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) {
return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle()));
}

View file

@ -1,17 +1,16 @@
package com.provectus.kafka.ui.pages.brokers;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
public class BrokersConfigTab extends BasePage {
protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']");

View file

@ -1,19 +1,18 @@
package com.provectus.kafka.ui.pages.brokers;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import org.openqa.selenium.By;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import org.openqa.selenium.By;
public class BrokersDetails extends BasePage {

View file

@ -1,19 +1,18 @@
package com.provectus.kafka.ui.pages.brokers;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
public class BrokersList extends BasePage {
@Step

View file

@ -1,12 +1,12 @@
package com.provectus.kafka.ui.pages.connectors;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class ConnectorCreateForm extends BasePage {
protected SelenideElement nameField = $x("//input[@name='name']");

View file

@ -1,12 +1,12 @@
package com.provectus.kafka.ui.pages.connectors;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class ConnectorDetails extends BasePage {
protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");

View file

@ -1,13 +1,13 @@
package com.provectus.kafka.ui.pages.connectors;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
public class KafkaConnectList extends BasePage {

View file

@ -1,11 +1,11 @@
package com.provectus.kafka.ui.pages.consumers;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class ConsumersDetails extends BasePage {
protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";

View file

@ -1,11 +1,11 @@
package com.provectus.kafka.ui.pages.consumers;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
public class ConsumersList extends BasePage {
@Step

View file

@ -1,139 +0,0 @@
package com.provectus.kafka.ui.pages.ksqlDb;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
import org.openqa.selenium.By;
import java.util.ArrayList;
import java.util.List;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
public class KsqlDbList extends BasePage {
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@Step
public KsqlDbList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
return this;
}
@Step
public KsqlDbList clickExecuteKsqlRequestBtn() {
clickByJavaScript(executeKsqlBtn);
return this;
}
@Step
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
waitUntilSpinnerDisappear();
return this;
}
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@Step
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
return initTablesItems().stream()
.filter(e -> e.getTableName().equals(tableName))
.findFirst().orElseThrow();
}
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
@Step
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
return initStreamsItems().stream()
.filter(e -> e.getStreamName().equals(streamName))
.findFirst().orElseThrow();
}
public static class KsqlTablesGridItem extends BasePage {
private final SelenideElement element;
public KsqlTablesGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getTableName() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
public static class KsqlStreamsGridItem extends BasePage {
private final SelenideElement element;
public KsqlStreamsGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getStreamName() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
}

View file

@ -1,154 +0,0 @@
package com.provectus.kafka.ui.pages.ksqlDb;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
public class KsqlQueryForm extends BasePage {
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
@Step
public KsqlQueryForm waitUntilScreenReady() {
waitUntilSpinnerDisappear();
executeBtn.shouldBe(Condition.visible);
return this;
}
@Step
public KsqlQueryForm clickClearBtn() {
clickByJavaScript(clearBtn);
return this;
}
@Step
public KsqlQueryForm clickExecuteBtn() {
clickByActions(executeBtn);
if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
loadingSpinner.shouldBe(Condition.visible);
} else {
waitUntilSpinnerDisappear();
}
return this;
}
@Step
public KsqlQueryForm clickStopQueryBtn() {
clickByActions(stopQueryBtn);
waitUntilSpinnerDisappear();
return this;
}
@Step
public KsqlQueryForm clickClearResultsBtn() {
clickByActions(clearResultsBtn);
waitUntilSpinnerDisappear();
return this;
}
@Step
public KsqlQueryForm clickAddStreamProperty() {
clickByJavaScript(addStreamPropertyBtn);
return this;
}
@Step
public KsqlQueryForm setQuery(String query) {
queryAreaValue.shouldBe(Condition.visible).click();
queryArea.setValue(query);
return this;
}
@Step
public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
return initItems().stream()
.filter(e -> e.getName().equalsIgnoreCase(name))
.findFirst().orElseThrow();
}
@Step
public boolean areResultsVisible() {
boolean visible = false;
try {
visible = initItems().size() > 0;
} catch (Throwable ignored) {
}
return visible;
}
private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
return gridItemList;
}
public static class KsqlResponseGridItem extends BasePage {
private final SelenideElement element;
private KsqlResponseGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getType() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getName() {
return element.$x("./td[2]").scrollTo().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}
return isVisible;
}
@Step
public String getTopic() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[5]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[6]").getText().trim();
}
}
}

View file

@ -1,17 +0,0 @@
package com.provectus.kafka.ui.pages.ksqlDb.enums;
public enum KsqlMenuTabs {
TABLES("Table"),
STREAMS("Streams");
private final String value;
KsqlMenuTabs(String value) {
this.value = value;
}
public String toString() {
return value;
}
}

View file

@ -1,19 +0,0 @@
package com.provectus.kafka.ui.pages.ksqlDb.enums;
public enum KsqlQueryConfig {
SHOW_TABLES("show tables;"),
SHOW_STREAMS("show streams;"),
SELECT_ALL_FROM("SELECT * FROM %s\n" +
"EMIT CHANGES;");
private final String query;
KsqlQueryConfig(String query) {
this.query = query;
}
public String getQuery() {
return query;
}
}

View file

@ -0,0 +1,138 @@
package com.provectus.kafka.ui.pages.ksqldb;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.List;
import org.openqa.selenium.By;
public class KsqlDbList extends BasePage {
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@Step
public KsqlDbList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
return this;
}
@Step
public KsqlDbList clickExecuteKsqlRequestBtn() {
clickByJavaScript(executeKsqlBtn);
return this;
}
@Step
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
waitUntilSpinnerDisappear();
return this;
}
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@Step
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
return initTablesItems().stream()
.filter(e -> e.getTableName().equals(tableName))
.findFirst().orElseThrow();
}
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
@Step
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
return initStreamsItems().stream()
.filter(e -> e.getStreamName().equals(streamName))
.findFirst().orElseThrow();
}
public static class KsqlTablesGridItem extends BasePage {
private final SelenideElement element;
public KsqlTablesGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getTableName() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
public static class KsqlStreamsGridItem extends BasePage {
private final SelenideElement element;
public KsqlStreamsGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getStreamName() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
}

View file

@ -0,0 +1,153 @@
package com.provectus.kafka.ui.pages.ksqldb;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
public class KsqlQueryForm extends BasePage {
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
@Step
public KsqlQueryForm waitUntilScreenReady() {
waitUntilSpinnerDisappear();
executeBtn.shouldBe(Condition.visible);
return this;
}
@Step
public KsqlQueryForm clickClearBtn() {
clickByJavaScript(clearBtn);
return this;
}
@Step
public KsqlQueryForm clickExecuteBtn() {
clickByActions(executeBtn);
if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
loadingSpinner.shouldBe(Condition.visible);
} else {
waitUntilSpinnerDisappear();
}
return this;
}
@Step
public KsqlQueryForm clickStopQueryBtn() {
clickByActions(stopQueryBtn);
waitUntilSpinnerDisappear();
return this;
}
@Step
public KsqlQueryForm clickClearResultsBtn() {
clickByActions(clearResultsBtn);
waitUntilSpinnerDisappear();
return this;
}
@Step
public KsqlQueryForm clickAddStreamProperty() {
clickByJavaScript(addStreamPropertyBtn);
return this;
}
@Step
public KsqlQueryForm setQuery(String query) {
queryAreaValue.shouldBe(Condition.visible).click();
queryArea.setValue(query);
return this;
}
@Step
public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
return initItems().stream()
.filter(e -> e.getName().equalsIgnoreCase(name))
.findFirst().orElseThrow();
}
@Step
public boolean areResultsVisible() {
boolean visible = false;
try {
visible = initItems().size() > 0;
} catch (Throwable ignored) {
}
return visible;
}
private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
return gridItemList;
}
public static class KsqlResponseGridItem extends BasePage {
private final SelenideElement element;
private KsqlResponseGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getType() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getName() {
return element.$x("./td[2]").scrollTo().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}
return isVisible;
}
@Step
public String getTopic() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[5]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[6]").getText().trim();
}
}
}

View file

@ -0,0 +1,17 @@
package com.provectus.kafka.ui.pages.ksqldb.enums;
public enum KsqlMenuTabs {
TABLES("Table"),
STREAMS("Streams");
private final String value;
KsqlMenuTabs(String value) {
this.value = value;
}
public String toString() {
return value;
}
}

View file

@ -0,0 +1,18 @@
package com.provectus.kafka.ui.pages.ksqldb.enums;
public enum KsqlQueryConfig {
SHOW_TABLES("show tables;"),
SHOW_STREAMS("show streams;"),
SELECT_ALL_FROM("SELECT * FROM %s\n" + "EMIT CHANGES;");
private final String query;
KsqlQueryConfig(String query) {
this.query = query;
}
public String getQuery() {
return query;
}
}

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.pages.ksqlDb.models;
package com.provectus.kafka.ui.pages.ksqldb.models;
import lombok.Data;
import lombok.experimental.Accessors;

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.pages.ksqlDb.models;
package com.provectus.kafka.ui.pages.ksqldb.models;
import lombok.Data;
import lombok.experimental.Accessors;

View file

@ -1,19 +1,18 @@
package com.provectus.kafka.ui.pages.panels;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import io.qameta.allure.Step;
import java.time.Duration;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
public class NaviSideBar extends BasePage {
protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");

View file

@ -1,13 +1,12 @@
package com.provectus.kafka.ui.pages.panels;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import java.util.Arrays;
import java.util.List;
import static com.codeborne.selenide.Selenide.$x;
public class TopPanel extends BasePage {
protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");

View file

@ -1,5 +1,10 @@
package com.provectus.kafka.ui.pages.schemas;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import static org.openqa.selenium.By.id;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
@ -7,15 +12,11 @@ import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.api.model.SchemaType;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import org.openqa.selenium.Keys;
import org.openqa.selenium.interactions.Actions;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.*;
import static org.openqa.selenium.By.id;
import org.openqa.selenium.Keys;
import org.openqa.selenium.interactions.Actions;
public class SchemaCreateForm extends BasePage {
@ -29,7 +30,8 @@ public class SchemaCreateForm extends BasePage {
protected SelenideElement latestSchemaTextArea = $x("//div[@id='latestSchema']");
protected SelenideElement leftVersionDdl = $(id("left-select"));
protected SelenideElement rightVersionDdl = $(id("right-select"));
protected List<SelenideElement> visibleMarkers = $$x("//div[@class='ace_scroller']//div[contains(@class,'codeMarker')]");
protected List<SelenideElement> visibleMarkers =
$$x("//div[@class='ace_scroller']//div[contains(@class,'codeMarker')]");
protected List<SelenideElement> elementsCompareVersionDdl = $$x("//ul[@role='listbox']/ul/li");
protected String ddlElementLocator = "//li[@value='%s']";

View file

@ -1,12 +1,12 @@
package com.provectus.kafka.ui.pages.schemas;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class SchemaDetails extends BasePage {
protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");

View file

@ -1,13 +1,13 @@
package com.provectus.kafka.ui.pages.schemas;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
public class SchemaRegistryList extends BasePage {
protected SelenideElement createSchemaBtn = $x("//button[contains(text(),'Create Schema')]");

View file

@ -1,19 +1,18 @@
package com.provectus.kafka.ui.pages.topics;
import static com.codeborne.selenide.Selenide.$x;
import static com.codeborne.selenide.Selenide.refresh;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.util.Arrays;
import static com.codeborne.selenide.Selenide.$x;
import static com.codeborne.selenide.Selenide.refresh;
public class ProduceMessagePanel extends BasePage {
protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
protected SelenideElement contentTextArea = $x("//div[@id='content']/textarea");
protected SelenideElement valueTextArea = $x("//div[@id='content']/textarea");
protected SelenideElement headersTextArea = $x("//div[@id='headers']/textarea");
protected SelenideElement submitBtn = headersTextArea.$x("../../../..//button[@type='submit']");
protected SelenideElement partitionDdl = $x("//ul[@name='partition']");
@ -35,14 +34,14 @@ public class ProduceMessagePanel extends BasePage {
}
@Step
public ProduceMessagePanel setContentFiled(String value) {
clearByKeyboard(contentTextArea);
contentTextArea.setValue(value);
public ProduceMessagePanel setValueFiled(String value) {
clearByKeyboard(valueTextArea);
valueTextArea.setValue(value);
return this;
}
@Step
public ProduceMessagePanel setHeaderFiled(String value) {
public ProduceMessagePanel setHeadersFld(String value) {
headersTextArea.setValue(value);
return this;
}

View file

@ -1,6 +1,15 @@
package com.provectus.kafka.ui.pages.topics;
import com.codeborne.selenide.*;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$$;
import static com.codeborne.selenide.Selenide.$x;
import static org.openqa.selenium.By.id;
import com.codeborne.selenide.ClickOptions;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
@ -8,13 +17,11 @@ import com.provectus.kafka.ui.pages.topics.enums.MaxSizeOnDisk;
import com.provectus.kafka.ui.pages.topics.enums.TimeToRetain;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.*;
public class TopicCreateEditForm extends BasePage {
protected SelenideElement timeToRetainField = $x("//input[@id='timeToRetain']");
protected SelenideElement partitionsField = $x("//input[@name='partitions']");
protected SelenideElement nameField = $x("//input[@name='name']");
protected SelenideElement nameField = $(id("topicFormName"));
protected SelenideElement maxMessageBytesField = $x("//input[@name='maxMessageBytes']");
protected SelenideElement minInSyncReplicasField = $x("//input[@name='minInSyncReplicas']");
protected SelenideElement cleanUpPolicyDdl = $x("//ul[@id='topicFormCleanupPolicy']");
@ -49,10 +56,7 @@ public class TopicCreateEditForm extends BasePage {
@Step
public TopicCreateEditForm setTopicName(String topicName) {
nameField.shouldBe(Condition.enabled).clear();
if (topicName != null) {
nameField.sendKeys(topicName);
}
sendKeysAfterClear(nameField, topicName);
return this;
}
@ -75,9 +79,9 @@ public class TopicCreateEditForm extends BasePage {
}
@Step
public TopicCreateEditForm setMaxSizeOnDiskInGB(MaxSizeOnDisk MaxSizeOnDisk) {
public TopicCreateEditForm setMaxSizeOnDiskInGB(MaxSizeOnDisk maxSizeOnDisk) {
maxSizeOnDiscDdl.shouldBe(Condition.visible).click();
$x(String.format(ddlElementLocator, MaxSizeOnDisk.getOptionValue())).shouldBe(Condition.visible).click();
$x(String.format(ddlElementLocator, maxSizeOnDisk.getOptionValue())).shouldBe(Condition.visible).click();
return this;
}
@ -143,7 +147,7 @@ public class TopicCreateEditForm extends BasePage {
}
@Step
public TopicCreateEditForm clickCreateTopicBtn() {
public TopicCreateEditForm clickSaveTopicBtn() {
clickSubmitBtn();
return this;
}
@ -153,20 +157,20 @@ public class TopicCreateEditForm extends BasePage {
String customParameterValue) {
ElementsCollection customParametersElements =
$$("ul[role=listbox][name^=customParams][name$=name]");
KafkaUISelectElement kafkaUISelectElement = null;
KafkaUiSelectElement kafkaUiSelectElement = null;
if (customParametersElements.size() == 1) {
if ("Select".equals(customParametersElements.first().getText())) {
kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.first());
kafkaUiSelectElement = new KafkaUiSelectElement(customParametersElements.first());
}
} else {
$$("button")
.find(Condition.exactText("Add Custom Parameter"))
.click();
customParametersElements = $$("ul[role=listbox][name^=customParams][name$=name]");
kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.last());
kafkaUiSelectElement = new KafkaUiSelectElement(customParametersElements.last());
}
if (kafkaUISelectElement != null) {
kafkaUISelectElement.selectByVisibleText(customParameterName);
if (kafkaUiSelectElement != null) {
kafkaUiSelectElement.selectByVisibleText(customParameterName);
}
$(String.format("input[name=\"customParams.%d.value\"]", customParametersElements.size() - 1))
.setValue(customParameterValue);
@ -188,7 +192,7 @@ public class TopicCreateEditForm extends BasePage {
@Step
public String getCleanupPolicy() {
return new KafkaUISelectElement("cleanupPolicy").getCurrentValue();
return new KafkaUiSelectElement("cleanupPolicy").getCurrentValue();
}
@Step
@ -198,7 +202,7 @@ public class TopicCreateEditForm extends BasePage {
@Step
public String getMaxSizeOnDisk() {
return new KafkaUISelectElement("retentionBytes").getCurrentValue();
return new KafkaUiSelectElement("retentionBytes").getCurrentValue();
}
@Step
@ -229,27 +233,27 @@ public class TopicCreateEditForm extends BasePage {
private TopicCreateEditForm selectFromDropDownByOptionValue(String dropDownElementName,
String optionValue) {
KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
KafkaUiSelectElement select = new KafkaUiSelectElement(dropDownElementName);
select.selectByOptionValue(optionValue);
return this;
}
private TopicCreateEditForm selectFromDropDownByVisibleText(String dropDownElementName,
String visibleText) {
KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
KafkaUiSelectElement select = new KafkaUiSelectElement(dropDownElementName);
select.selectByVisibleText(visibleText);
return this;
}
private static class KafkaUISelectElement {
private static class KafkaUiSelectElement {
private final SelenideElement selectElement;
public KafkaUISelectElement(String selectElementName) {
public KafkaUiSelectElement(String selectElementName) {
this.selectElement = $("ul[role=listbox][name=" + selectElementName + "]");
}
public KafkaUISelectElement(SelenideElement selectElement) {
public KafkaUiSelectElement(SelenideElement selectElement) {
this.selectElement = selectElement;
}

View file

@ -1,23 +1,29 @@
package com.provectus.kafka.ui.pages.topics;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import static com.codeborne.selenide.Selenide.sleep;
import static com.provectus.kafka.ui.pages.topics.TopicDetails.TopicMenu.OVERVIEW;
import static org.testcontainers.shaded.org.apache.commons.lang3.RandomUtils.nextInt;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import org.openqa.selenium.By;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.YearMonth;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.util.*;
import static com.codeborne.selenide.Selenide.*;
import static org.testcontainers.shaded.org.apache.commons.lang3.RandomUtils.nextInt;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
public class TopicDetails extends BasePage {
@ -26,7 +32,7 @@ public class TopicDetails extends BasePage {
protected SelenideElement messageAmountCell = $x("//tbody/tr/td[5]");
protected SelenideElement overviewTab = $x("//a[contains(text(),'Overview')]");
protected SelenideElement messagesTab = $x("//a[contains(text(),'Messages')]");
protected SelenideElement seekTypeDdl = $x("//ul[@id='selectSeekType']/li");
protected SelenideElement seekTypeDdl = $x("//ul[@id='selectSeekType']//li");
protected SelenideElement seekTypeField = $x("//label[text()='Seek Type']//..//div/input");
protected SelenideElement addFiltersBtn = $x("//button[text()='Add Filters']");
protected SelenideElement savedFiltersLink = $x("//div[text()='Saved Filters']");
@ -50,6 +56,7 @@ public class TopicDetails extends BasePage {
protected SelenideElement previousMonthButton = $x("//button[@aria-label='Previous Month']");
protected SelenideElement nextMonthButton = $x("//button[@aria-label='Next Month']");
protected SelenideElement calendarTimeFld = $x("//input[@placeholder='Time']");
protected String detailsTabLtr = "//nav//a[contains(text(),'%s')]";
protected String dayCellLtr = "//div[@role='option'][contains(text(),'%d')]";
protected String seekFilterDdlLocator = "//ul[@id='selectSeekType']/ul/li[text()='%s']";
protected String savedFilterNameLocator = "//div[@role='savedFilter']/div[contains(text(),'%s')]";
@ -61,13 +68,13 @@ public class TopicDetails extends BasePage {
@Step
public TopicDetails waitUntilScreenReady() {
waitUntilSpinnerDisappear();
overviewTab.shouldBe(Condition.visible);
$x(String.format(detailsTabLtr, OVERVIEW)).shouldBe(Condition.visible);
return this;
}
@Step
public TopicDetails openDetailsTab(TopicMenu menu) {
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
$x(String.format(detailsTabLtr, menu.toString())).shouldBe(Condition.enabled).click();
waitUntilSpinnerDisappear();
return this;
}
@ -105,6 +112,13 @@ public class TopicDetails extends BasePage {
return this;
}
@Step
public boolean isClearMessagesMenuEnabled() {
return !Objects.requireNonNull(clearMessagesBtn.shouldBe(Condition.visible)
.$x("./..").getAttribute("class"))
.contains("disabled");
}
@Step
public TopicDetails clickRecreateTopicMenu() {
recreateTopicBtn.shouldBe(Condition.visible).click();
@ -263,7 +277,8 @@ public class TopicDetails extends BasePage {
}
public List<SelenideElement> getAllAddFilterModalVisibleElements() {
return Arrays.asList(savedFiltersLink, displayNameInputAddFilterMdl, addFilterBtnAddFilterMdl, cancelBtnAddFilterMdl);
return Arrays.asList(savedFiltersLink, displayNameInputAddFilterMdl, addFilterBtnAddFilterMdl,
cancelBtnAddFilterMdl);
}
public List<SelenideElement> getAllAddFilterModalEnabledElements() {
@ -280,16 +295,6 @@ public class TopicDetails extends BasePage {
return this;
}
@Step
public boolean isKeyMessageVisible(String keyMessage) {
return keyMessage.equals($("td[title]").getText());
}
@Step
public boolean isContentMessageVisible(String contentMessage) {
return contentMessage.matches(contentMessageTab.getText().trim());
}
private void selectYear(int expectedYear) {
while (getActualCalendarDate().getYear() > expectedYear) {
clickByJavaScript(previousMonthButton);
@ -366,6 +371,13 @@ public class TopicDetails extends BasePage {
.findFirst().orElseThrow();
}
@Step
public TopicDetails.MessageGridItem getMessageByKey(String key) {
return initItems().stream()
.filter(e -> e.getKey().equals(key))
.findFirst().orElseThrow();
}
@Step
public List<MessageGridItem> getAllMessages() {
return initItems();
@ -435,7 +447,7 @@ public class TopicDetails extends BasePage {
@Step
public String getValue() {
return element.$x("./td[6]/span/p").getText().trim();
return element.$x("./td[6]").getAttribute("title");
}
@Step

View file

@ -1,16 +1,15 @@
package com.provectus.kafka.ui.pages.topics;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.List;
import static com.codeborne.selenide.Selenide.$x;
public class TopicSettingsTab extends BasePage {
protected SelenideElement defaultValueColumnHeaderLocator = $x("//div[text() = 'Default Value']");

View file

@ -1,21 +1,20 @@
package com.provectus.kafka.ui.pages.topics;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.TOPICS;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.TOPICS;
public class TopicsList extends BasePage {
protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
@ -23,7 +22,8 @@ public class TopicsList extends BasePage {
protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
protected SelenideElement deleteSelectedTopicsBtn = $x("//button[text()='Delete selected topics']");
protected SelenideElement copySelectedTopicBtn = $x("//button[text()='Copy selected topic']");
protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
protected SelenideElement purgeMessagesOfSelectedTopicsBtn =
$x("//button[text()='Purge messages of selected topics']");
protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
protected SelenideElement recreateTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Recreate Topic']");
protected SelenideElement removeTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Remove Topic']");
@ -54,7 +54,17 @@ public class TopicsList extends BasePage {
@Step
public TopicsList setShowInternalRadioButton(boolean select) {
selectElement(showInternalRadioBtn, select);
if (select) {
if (!showInternalRadioBtn.isSelected()) {
clickByJavaScript(showInternalRadioBtn);
waitUntilSpinnerDisappear(1);
}
} else {
if (showInternalRadioBtn.isSelected()) {
clickByJavaScript(showInternalRadioBtn);
waitUntilSpinnerDisappear(1);
}
}
return this;
}
@ -134,7 +144,8 @@ public class TopicsList extends BasePage {
}
private List<SelenideElement> getVisibleColumnHeaders() {
return Stream.of("Replication Factor", "Number of messages", "Topic Name", "Partitions", "Out of sync replicas", "Size")
return Stream.of("Replication Factor", "Number of messages", "Topic Name", "Partitions", "Out of sync replicas",
"Size")
.map(name -> $x(String.format(columnHeaderLocator, name)))
.collect(Collectors.toList());
}
@ -169,10 +180,17 @@ public class TopicsList extends BasePage {
@Step
public TopicGridItem getTopicItem(String name) {
return initGridItems().stream()
TopicGridItem topicGridItem = initGridItems().stream()
.filter(e -> e.getName().equals(name))
.findFirst().orElse(null);
if (topicGridItem == null) {
searchItem(name);
topicGridItem = initGridItems().stream()
.filter(e -> e.getName().equals(name))
.findFirst().orElseThrow();
}
return topicGridItem;
}
@Step
public TopicGridItem getAnyNonInternalTopic() {

View file

@ -1,27 +1,36 @@
package com.provectus.kafka.ui.services;
import static com.codeborne.selenide.Selenide.sleep;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.api.ApiClient;
import com.provectus.kafka.ui.api.api.*;
import com.provectus.kafka.ui.api.model.*;
import com.provectus.kafka.ui.api.api.KafkaConnectApi;
import com.provectus.kafka.ui.api.api.KsqlApi;
import com.provectus.kafka.ui.api.api.MessagesApi;
import com.provectus.kafka.ui.api.api.SchemasApi;
import com.provectus.kafka.ui.api.api.TopicsApi;
import com.provectus.kafka.ui.api.model.CreateTopicMessage;
import com.provectus.kafka.ui.api.model.KsqlCommandV2;
import com.provectus.kafka.ui.api.model.KsqlCommandV2Response;
import com.provectus.kafka.ui.api.model.KsqlResponse;
import com.provectus.kafka.ui.api.model.NewConnector;
import com.provectus.kafka.ui.api.model.NewSchemaSubject;
import com.provectus.kafka.ui.api.model.TopicCreation;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.ksqlDb.models.Stream;
import com.provectus.kafka.ui.pages.ksqlDb.models.Table;
import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
import com.provectus.kafka.ui.pages.ksqldb.models.Table;
import com.provectus.kafka.ui.settings.BaseSource;
import io.qameta.allure.Step;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.codeborne.selenide.Selenide.sleep;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.reactive.function.client.WebClientResponseException;
@Slf4j
@ -29,27 +38,27 @@ public class ApiService extends BaseSource {
@SneakyThrows
private TopicsApi topicApi() {
return new TopicsApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
return new TopicsApi(new ApiClient().setBasePath(BASE_API_URL));
}
@SneakyThrows
private SchemasApi schemaApi() {
return new SchemasApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
return new SchemasApi(new ApiClient().setBasePath(BASE_API_URL));
}
@SneakyThrows
private KafkaConnectApi connectorApi() {
return new KafkaConnectApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
return new KafkaConnectApi(new ApiClient().setBasePath(BASE_API_URL));
}
@SneakyThrows
private MessagesApi messageApi() {
return new MessagesApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
return new MessagesApi(new ApiClient().setBasePath(BASE_API_URL));
}
@SneakyThrows
private KsqlApi ksqlApi() {
return new KsqlApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
return new KsqlApi(new ApiClient().setBasePath(BASE_API_URL));
}
@SneakyThrows
@ -76,7 +85,7 @@ public class ApiService extends BaseSource {
private void deleteTopic(String clusterName, String topicName) {
try {
topicApi().deleteTopic(clusterName, topicName).block();
} catch (WebClientResponseException ignore) {
} catch (WebClientResponseException ignored) {
}
}
@ -109,7 +118,7 @@ public class ApiService extends BaseSource {
private void deleteSchema(String clusterName, String schemaName) {
try {
schemaApi().deleteSchema(clusterName, schemaName).block();
} catch (WebClientResponseException ignore) {
} catch (WebClientResponseException ignored) {
}
}
@ -123,7 +132,7 @@ public class ApiService extends BaseSource {
private void deleteConnector(String clusterName, String connectName, String connectorName) {
try {
connectorApi().deleteConnector(clusterName, connectName, connectorName).block();
} catch (WebClientResponseException ignore) {
} catch (WebClientResponseException ignored) {
}
}
@ -176,7 +185,7 @@ public class ApiService extends BaseSource {
createMessage.setKeySerde("String");
createMessage.setValueSerde("String");
createMessage.setKey(topic.getMessageKey());
createMessage.setContent(topic.getMessageContent());
createMessage.setContent(topic.getMessageValue());
try {
messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
} catch (WebClientResponseException ex) {
@ -249,17 +258,18 @@ public class ApiService extends BaseSource {
String streamName = stream.getName();
KsqlCommandV2Response pipeIdInsert = ksqlApi()
.executeKsql(CLUSTER_NAME, new KsqlCommandV2()
.ksql("INSERT INTO " + streamName + " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
+ "INSERT INTO " + streamName +
" (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
+ "INSERT INTO " + streamName +
" (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
+ "INSERT INTO " + streamName +
" (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
+ "INSERT INTO " + streamName +
" (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
+ "INSERT INTO " + streamName +
" (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
.ksql("INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
+ "INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
+ "INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
+ "INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
+ "INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
+ "INSERT INTO " + streamName
+ " (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
.block();
assert pipeIdInsert != null;
List<KsqlResponse> responseListInsert = ksqlApi()

View file

@ -1,16 +1,23 @@
package com.provectus.kafka.ui.settings;
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
import com.provectus.kafka.ui.settings.configs.Config;
import org.aeonbits.owner.ConfigFactory;
public abstract class BaseSource {
public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
public static final String BASE_LOCAL_URL = "http://localhost:8080";
public static final String CLUSTER_NAME = "local";
public static final String CONNECT_NAME = "first";
private static final String LOCAL_HOST = "localhost";
public static final String REMOTE_URL = String.format("http://%s:4444/wd/hub", LOCAL_HOST);
public static final String BASE_API_URL = String.format("http://%s:8080", LOCAL_HOST);
private static Config config;
public static final String BROWSER = config().browser();
public static final String BASE_HOST = BROWSER.equals(LOCAL)
? LOCAL_HOST
: "host.docker.internal";
public static final String BASE_UI_URL = String.format("http://%s:8080", BASE_HOST);
public static final String SUITE_NAME = config().suite();
private static Config config() {

View file

@ -1,10 +1,10 @@
package com.provectus.kafka.ui.settings.configs;
import org.aeonbits.owner.Config;
import static com.provectus.kafka.ui.variables.Browser.CONTAINER;
import static com.provectus.kafka.ui.variables.Suite.CUSTOM;
import org.aeonbits.owner.Config;
public interface Profiles extends Config {
@Key("browser")

View file

@ -1,68 +0,0 @@
package com.provectus.kafka.ui.settings.drivers;
import static com.codeborne.selenide.Selenide.clearBrowserCookies;
import static com.codeborne.selenide.Selenide.clearBrowserLocalStorage;
import static com.codeborne.selenide.Selenide.open;
import static com.codeborne.selenide.Selenide.refresh;
import com.codeborne.selenide.Configuration;
import com.codeborne.selenide.WebDriverRunner;
import com.codeborne.selenide.logevents.SelenideLogger;
import io.qameta.allure.Step;
import io.qameta.allure.selenide.AllureSelenide;
import org.openqa.selenium.chrome.ChromeOptions;
public abstract class LocalWebDriver {
private static org.openqa.selenium.WebDriver getWebDriver() {
try {
return WebDriverRunner.getWebDriver();
} catch (IllegalStateException ex) {
Configuration.headless = false;
Configuration.browser = "chrome";
Configuration.browserSize = "1920x1080";
/**screenshots and savePageSource config is needed for local debug
* optionally can be set as 'false' to not duplicate Allure report
*/
Configuration.screenshots = true;
Configuration.savePageSource = true;
Configuration.pageLoadTimeout = 120000;
Configuration.browserCapabilities = new ChromeOptions()
.addArguments("--remote-allow-origins=*")
.addArguments("--lang=en_US");
open();
return WebDriverRunner.getWebDriver();
}
}
@Step
public static void openUrl(String url) {
if (!getWebDriver().getCurrentUrl().equals(url)) {
getWebDriver().get(url);
}
}
@Step
public static void browserInit() {
getWebDriver();
}
@Step
public static void browserClear() {
clearBrowserLocalStorage();
clearBrowserCookies();
refresh();
}
@Step
public static void browserQuit() {
getWebDriver().quit();
}
@Step
public static void loggerSetup() {
SelenideLogger.addListener("AllureSelenide", new AllureSelenide()
.screenshots(true)
.savePageSource(false));
}
}

View file

@ -0,0 +1,101 @@
package com.provectus.kafka.ui.settings.drivers;
import static com.codeborne.selenide.Selenide.clearBrowserCookies;
import static com.codeborne.selenide.Selenide.clearBrowserLocalStorage;
import static com.codeborne.selenide.Selenide.refresh;
import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
import static com.provectus.kafka.ui.settings.BaseSource.REMOTE_URL;
import static com.provectus.kafka.ui.variables.Browser.CONTAINER;
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
import com.codeborne.selenide.Configuration;
import com.codeborne.selenide.Selenide;
import com.codeborne.selenide.WebDriverRunner;
import com.codeborne.selenide.logevents.SelenideLogger;
import io.qameta.allure.Step;
import io.qameta.allure.selenide.AllureSelenide;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.remote.DesiredCapabilities;
@Slf4j
public abstract class WebDriver {
@Step
public static void browserSetup() {
Configuration.headless = false;
Configuration.browser = "chrome";
Configuration.browserSize = "1920x1080";
Configuration.screenshots = true;
Configuration.savePageSource = false;
Configuration.pageLoadTimeout = 120000;
ChromeOptions options = new ChromeOptions()
.addArguments("--no-sandbox")
.addArguments("--verbose")
.addArguments("--remote-allow-origins=*")
.addArguments("--disable-dev-shm-usage")
.addArguments("--disable-gpu")
.addArguments("--lang=en_US");
switch (BROWSER) {
case (LOCAL) -> Configuration.browserCapabilities = options;
case (CONTAINER) -> {
Configuration.remote = REMOTE_URL;
Configuration.remoteConnectionTimeout = 180000;
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability("enableVNC", true);
capabilities.setCapability("enableVideo", false);
Configuration.browserCapabilities = capabilities.merge(options);
}
default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
}
}
private static org.openqa.selenium.WebDriver getWebDriver() {
try {
return WebDriverRunner.getWebDriver();
} catch (IllegalStateException ex) {
browserSetup();
Selenide.open();
return WebDriverRunner.getWebDriver();
}
}
@Step
public static void openUrl(String url) {
org.openqa.selenium.WebDriver driver = getWebDriver();
if (!driver.getCurrentUrl().equals(url)) {
driver.get(url);
}
}
@Step
public static void browserInit() {
getWebDriver();
}
@Step
public static void browserClear() {
clearBrowserLocalStorage();
clearBrowserCookies();
refresh();
}
@Step
public static void browserQuit() {
org.openqa.selenium.WebDriver driver = null;
try {
driver = WebDriverRunner.getWebDriver();
} catch (Throwable ignored) {
}
if (driver != null) {
driver.quit();
}
}
@Step
public static void loggerSetup() {
SelenideLogger.addListener("AllureSelenide", new AllureSelenide()
.screenshots(true)
.savePageSource(false));
}
}

View file

@ -1,23 +1,27 @@
package com.provectus.kafka.ui.settings.listeners;
import static java.nio.file.Files.newInputStream;
import com.codeborne.selenide.Screenshots;
import io.qameta.allure.Allure;
import io.qameta.allure.testng.AllureTestNg;
import java.io.File;
import java.io.IOException;
import lombok.extern.slf4j.Slf4j;
import org.testng.ITestListener;
import org.testng.ITestResult;
import java.io.File;
import java.io.IOException;
import java.util.Objects;
import static java.nio.file.Files.newInputStream;
@Slf4j
public class AllureListener extends AllureTestNg implements ITestListener {
private void takeScreenshot() {
File screenshot = Screenshots.takeScreenShotAsFile();
try {
Allure.addAttachment(Objects.requireNonNull(screenshot).getName(), newInputStream(screenshot.toPath()));
if (screenshot != null) {
Allure.addAttachment(screenshot.getName(), newInputStream(screenshot.toPath()));
} else {
log.warn("Unable to take screenshot");
}
} catch (IOException e) {
throw new RuntimeException(e);
}

View file

@ -9,29 +9,29 @@ public class LoggerListener extends TestListenerAdapter {
@Override
public void onTestStart(final ITestResult testResult) {
log.info(String.format("\n------------------------------------------------------------------------ " +
"\nTEST STARTED: %s.%s \n------------------------------------------------------------------------ \n",
log.info(String.format("\n------------------------------------------------------------------------ "
+ "\nTEST STARTED: %s.%s \n------------------------------------------------------------------------ \n",
testResult.getInstanceName(), testResult.getName()));
}
@Override
public void onTestSuccess(final ITestResult testResult) {
log.info(String.format("\n------------------------------------------------------------------------ " +
"\nTEST PASSED: %s.%s \n------------------------------------------------------------------------ \n",
log.info(String.format("\n------------------------------------------------------------------------ "
+ "\nTEST PASSED: %s.%s \n------------------------------------------------------------------------ \n",
testResult.getInstanceName(), testResult.getName()));
}
@Override
public void onTestFailure(final ITestResult testResult) {
log.info(String.format("\n------------------------------------------------------------------------ " +
"\nTEST FAILED: %s.%s \n------------------------------------------------------------------------ \n",
log.info(String.format("\n------------------------------------------------------------------------ "
+ "\nTEST FAILED: %s.%s \n------------------------------------------------------------------------ \n",
testResult.getInstanceName(), testResult.getName()));
}
@Override
public void onTestSkipped(final ITestResult testResult) {
log.info(String.format("\n------------------------------------------------------------------------ " +
"\nTEST SKIPPED: %s.%s \n------------------------------------------------------------------------ \n",
log.info(String.format("\n------------------------------------------------------------------------ "
+ "\nTEST SKIPPED: %s.%s \n------------------------------------------------------------------------ \n",
testResult.getInstanceName(), testResult.getName()));
}
}

View file

@ -1,14 +1,28 @@
package com.provectus.kafka.ui.settings.listeners;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Status;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
import static io.qase.api.utils.IntegrationUtils.getCaseTitle;
import com.provectus.kafka.ui.utilities.qase.annotations.Automation;
import com.provectus.kafka.ui.utilities.qase.annotations.Status;
import com.provectus.kafka.ui.utilities.qase.annotations.Suite;
import io.qase.api.QaseClient;
import io.qase.api.StepStorage;
import io.qase.api.annotation.QaseId;
import io.qase.client.ApiClient;
import io.qase.client.api.CasesApi;
import io.qase.client.model.*;
import io.qase.client.model.GetCasesFiltersParameter;
import io.qase.client.model.ResultCreateStepsInner;
import io.qase.client.model.TestCase;
import io.qase.client.model.TestCaseCreate;
import io.qase.client.model.TestCaseCreateStepsInner;
import io.qase.client.model.TestCaseListResponse;
import io.qase.client.model.TestCaseListResponseAllOfResult;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.testng.Assert;
@ -16,11 +30,6 @@ import org.testng.ITestListener;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import java.lang.reflect.Method;
import java.util.*;
import static io.qase.api.utils.IntegrationUtils.getCaseTitle;
@Slf4j
public class QaseCreateListener extends TestListenerAdapter implements ITestListener {
@ -33,14 +42,16 @@ public class QaseCreateListener extends TestListenerAdapter implements ITestList
}
private static int getStatus(Method method) {
if (method.isAnnotationPresent(Status.class))
if (method.isAnnotationPresent(Status.class)) {
return method.getDeclaredAnnotation(Status.class).status().getValue();
}
return 1;
}
private static int getAutomation(Method method) {
if (method.isAnnotationPresent(Automation.class))
if (method.isAnnotationPresent(Automation.class)) {
return method.getDeclaredAnnotation(Automation.class).state().getValue();
}
return 0;
}
@ -75,8 +86,8 @@ public class QaseCreateListener extends TestListenerAdapter implements ITestList
for (Map.Entry<Long, String> map : cases.entrySet()) {
if (map.getValue().matches(title)) {
long id = map.getKey();
log.warn(String.format("Test case with @QaseTitle='%s' already exists with @QaseId=%d. " +
"Please verify @QaseTitle annotation", title, id));
log.warn(String.format("Test case with @QaseTitle='%s' already exists with @QaseId=%d. "
+ "Please verify @QaseTitle annotation", title, id));
return true;
}
}
@ -115,9 +126,11 @@ public class QaseCreateListener extends TestListenerAdapter implements ITestList
newCase).getResult()).getId();
log.info(String.format("New test case '%s' was created with @QaseId=%d", title, id));
}
} else
} else {
log.warn("To create new test case in Qase.io please add @QaseTitle annotation");
} else
}
} else {
log.warn("To create new test case in Qase.io please remove @QaseId annotation");
}
}
}

View file

@ -1,5 +1,12 @@
package com.provectus.kafka.ui.settings.listeners;
import static io.qase.api.utils.IntegrationUtils.getCaseId;
import static io.qase.api.utils.IntegrationUtils.getCaseTitle;
import static io.qase.api.utils.IntegrationUtils.getStacktrace;
import static io.qase.client.model.ResultCreate.StatusEnum.FAILED;
import static io.qase.client.model.ResultCreate.StatusEnum.PASSED;
import static io.qase.client.model.ResultCreate.StatusEnum.SKIPPED;
import io.qase.api.StepStorage;
import io.qase.api.config.QaseConfig;
import io.qase.api.services.QaseTestCaseListener;
@ -7,6 +14,9 @@ import io.qase.client.model.ResultCreate;
import io.qase.client.model.ResultCreateCase;
import io.qase.client.model.ResultCreateStepsInner;
import io.qase.testng.guice.module.TestNgModule;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.Optional;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@ -15,13 +25,6 @@ import org.testng.ITestListener;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.Optional;
import static io.qase.api.utils.IntegrationUtils.*;
import static io.qase.client.model.ResultCreate.StatusEnum.*;
@Slf4j
public class QaseResultListener extends TestListenerAdapter implements ITestListener {

View file

@ -1,11 +1,10 @@
package com.provectus.kafka.ui.utilities;
import org.testcontainers.shaded.org.apache.commons.io.IOUtils;
import static org.apache.kafka.common.utils.Utils.readFileAsString;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static org.apache.kafka.common.utils.Utils.readFileAsString;
import org.testcontainers.shaded.org.apache.commons.io.IOUtils;
public class FileUtils {

View file

@ -1,10 +1,9 @@
package com.provectus.kafka.ui.utilities;
import lombok.extern.slf4j.Slf4j;
import static com.codeborne.selenide.Selenide.sleep;
import java.time.LocalTime;
import static com.codeborne.selenide.Selenide.sleep;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class TimeUtils {

View file

@ -1,17 +1,30 @@
package com.provectus.kafka.ui.utilities;
import static com.codeborne.selenide.Selenide.executeJavaScript;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import java.time.Duration;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Keys;
import org.openqa.selenium.interactions.Actions;
import static com.codeborne.selenide.Selenide.executeJavaScript;
@Slf4j
public class WebUtils {
public static int getTimeout(int... timeoutInSeconds) {
return (timeoutInSeconds != null && timeoutInSeconds.length > 0) ? timeoutInSeconds[0] : 4;
}
public static void sendKeysAfterClear(SelenideElement element, String keys) {
log.debug("\nsendKeysAfterClear: {} \nsend keys '{}'", element.getSearchCriteria(), keys);
element.shouldBe(Condition.enabled).clear();
if (keys != null) {
element.sendKeys(keys);
}
}
public static void clickByActions(SelenideElement element) {
log.debug("\nclickByActions: {}", element.getSearchCriteria());
element.shouldBe(Condition.enabled);
@ -43,11 +56,12 @@ public class WebUtils {
field.sendKeys(Keys.chord(Keys.CONTROL + "a"), Keys.DELETE);
}
public static boolean isVisible(SelenideElement element) {
public static boolean isVisible(SelenideElement element, int... timeoutInSeconds) {
log.debug("\nisVisible: {}", element.getSearchCriteria());
boolean isVisible = false;
try {
element.shouldBe(Condition.visible);
element.shouldBe(Condition.visible,
Duration.ofSeconds(getTimeout(timeoutInSeconds)));
isVisible = true;
} catch (Throwable e) {
log.debug("{} is not visible", element.getSearchCriteria());
@ -55,11 +69,12 @@ public class WebUtils {
return isVisible;
}
public static boolean isEnabled(SelenideElement element) {
public static boolean isEnabled(SelenideElement element, int... timeoutInSeconds) {
log.debug("\nisEnabled: {}", element.getSearchCriteria());
boolean isEnabled = false;
try {
element.shouldBe(Condition.enabled);
element.shouldBe(Condition.enabled,
Duration.ofSeconds(getTimeout(timeoutInSeconds)));
isEnabled = true;
} catch (Throwable e) {
log.debug("{} is not enabled", element.getSearchCriteria());
@ -67,11 +82,12 @@ public class WebUtils {
return isEnabled;
}
public static boolean isSelected(SelenideElement element) {
public static boolean isSelected(SelenideElement element, int... timeoutInSeconds) {
log.debug("\nisSelected: {}", element.getSearchCriteria());
boolean isSelected = false;
try {
element.shouldBe(Condition.selected);
element.shouldBe(Condition.selected,
Duration.ofSeconds(getTimeout(timeoutInSeconds)));
isSelected = true;
} catch (Throwable e) {
log.debug("{} is not selected", element.getSearchCriteria());
@ -81,9 +97,13 @@ public class WebUtils {
public static boolean selectElement(SelenideElement element, boolean select) {
if (select) {
if (!element.isSelected()) clickByJavaScript(element);
if (!element.isSelected()) {
clickByJavaScript(element);
}
} else {
if (element.isSelected()) clickByJavaScript(element);
if (element.isSelected()) {
clickByJavaScript(element);
}
}
return true;
}

View file

@ -0,0 +1,33 @@
package com.provectus.kafka.ui.utilities.qase;
import static com.provectus.kafka.ui.settings.BaseSource.SUITE_NAME;
import static com.provectus.kafka.ui.variables.Suite.MANUAL;
import static org.apache.commons.lang3.BooleanUtils.FALSE;
import static org.apache.commons.lang3.BooleanUtils.TRUE;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class QaseSetup {
public static void qaseIntegrationSetup() {
String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
if (isEmpty(qaseApiToken)) {
log.warn("Integration with Qase is disabled due to run config or token wasn't defined.");
System.setProperty("QASE_ENABLE", FALSE);
} else {
log.warn("Integration with Qase is enabled. Find this run at https://app.qase.io/run/KAFKAUI.");
String automation = SUITE_NAME.equalsIgnoreCase(MANUAL) ? "" : "Automation ";
System.setProperty("QASE_ENABLE", TRUE);
System.setProperty("QASE_PROJECT_CODE", "KAFKAUI");
System.setProperty("QASE_API_TOKEN", qaseApiToken);
System.setProperty("QASE_USE_BULK", TRUE);
System.setProperty("QASE_RUN_NAME", DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm")
.format(OffsetDateTime.now(ZoneOffset.UTC)) + ": " + automation + SUITE_NAME.toUpperCase() + " suite");
}
}
}

View file

@ -1,7 +1,6 @@
package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
import com.provectus.kafka.ui.utilities.qaseUtils.enums.State;
package com.provectus.kafka.ui.utilities.qase.annotations;
import com.provectus.kafka.ui.utilities.qase.enums.State;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
package com.provectus.kafka.ui.utilities.qase.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@ -9,5 +9,5 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
public @interface Status {
com.provectus.kafka.ui.utilities.qaseUtils.enums.Status status();
com.provectus.kafka.ui.utilities.qase.enums.Status status();
}

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
package com.provectus.kafka.ui.utilities.qase.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View file

@ -0,0 +1,18 @@
package com.provectus.kafka.ui.utilities.qase.enums;
public enum State {
NOT_AUTOMATED(0),
TO_BE_AUTOMATED(1),
AUTOMATED(2);
private final int value;
State(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}

View file

@ -0,0 +1,18 @@
package com.provectus.kafka.ui.utilities.qase.enums;
public enum Status {
ACTUAL(0),
DRAFT(1),
DEPRECATED(2);
private final int value;
Status(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}

View file

@ -1,34 +0,0 @@
package com.provectus.kafka.ui.utilities.qaseUtils;
import lombok.extern.slf4j.Slf4j;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import static com.provectus.kafka.ui.settings.BaseSource.SUITE_NAME;
import static com.provectus.kafka.ui.variables.Suite.MANUAL;
import static org.apache.commons.lang3.BooleanUtils.FALSE;
import static org.apache.commons.lang3.BooleanUtils.TRUE;
import static org.apache.commons.lang3.StringUtils.isEmpty;
@Slf4j
public class QaseSetup {
public static void qaseIntegrationSetup() {
String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
if (isEmpty(qaseApiToken)) {
log.warn("Integration with Qase is disabled due to run config or token wasn't defined.");
System.setProperty("QASE_ENABLE", FALSE);
} else {
log.warn("Integration with Qase is enabled. Find this run at https://app.qase.io/run/KAFKAUI.");
String automation = SUITE_NAME.equalsIgnoreCase(MANUAL) ? "" : "Automation ";
System.setProperty("QASE_ENABLE", TRUE);
System.setProperty("QASE_PROJECT_CODE", "KAFKAUI");
System.setProperty("QASE_API_TOKEN", qaseApiToken);
System.setProperty("QASE_USE_BULK", TRUE);
System.setProperty("QASE_RUN_NAME", DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm")
.format(OffsetDateTime.now(ZoneOffset.UTC)) + ": " + automation + SUITE_NAME.toUpperCase() + " suite");
}
}
}

View file

@ -1,18 +0,0 @@
package com.provectus.kafka.ui.utilities.qaseUtils.enums;
public enum State {
NOT_AUTOMATED(0),
TO_BE_AUTOMATED(1),
AUTOMATED(2);
private final int value;
State(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}

View file

@ -1,18 +0,0 @@
package com.provectus.kafka.ui.utilities.qaseUtils.enums;
public enum Status {
ACTUAL(0),
DRAFT(1),
DEPRECATED(2);
private final int value;
Status(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}

View file

@ -3,7 +3,7 @@ package com.provectus.kafka.ui.variables;
public interface Url {
String BROKERS_LIST_URL = "http://%s:8080/ui/clusters/local/brokers";
String TOPICS_LIST_URL = "http://%s:8080/ui/clusters/local/all-topics?perPage=25";
String TOPICS_LIST_URL = "http://%s:8080/ui/clusters/local/all-topics";
String CONSUMERS_LIST_URL = "http://%s:8080/ui/clusters/local/consumer-groups";
String SCHEMA_REGISTRY_LIST_URL = "http://%s:8080/ui/clusters/local/schemas";
String KAFKA_CONNECT_LIST_URL = "http://%s:8080/ui/clusters/local/connectors";

Some files were not shown because too many files have changed in this diff Show more