diff --git a/.github/actions/build-alpha-release-notes/action.yml b/.github/actions/build-alpha-release-notes/action.yml new file mode 100644 index 000000000..6c664f482 --- /dev/null +++ b/.github/actions/build-alpha-release-notes/action.yml @@ -0,0 +1,39 @@ +name: build-alpha-release-notes +description: build-alpha-release-notes +inputs: + version: + required: true + description: The alpha version + latest-tag: + description: The latest tag or empty +runs: + using: composite + steps: + - name: Generate alpha release notes + if: inputs.latest-tag != '' + shell: bash + run: | + { + echo "# Mokapi Release ${{ inputs.version }}" + echo + echo "⚠️ Automatically generated alpha build." + echo + echo "- Alpha version: ${{ inputs.version }}" + echo "- Commit: $(git rev-parse --short HEAD)" + echo "- Built at: $(date -u +"%Y-%m-%d %H:%M:%S UTC")" + echo + echo "## Changes since ${{ inputs.latest-tag }}" + echo + git log "${{ inputs.latest-tag }}..HEAD" \ + --oneline \ + --no-merges | + grep -v -E 'github-actions\[bot\]|dependabot\[bot\]|test|Bump' | + sed 's/^[a-f0-9]\{7,40\} //' | # remove SHA prefix + awk '!seen[$0]++' | + sed 's/^/- /' + } > docs/release.md + - name: Upload UI artifact + uses: actions/upload-artifact@v4 + with: + name: release-notes + path: docs/release.md diff --git a/.github/actions/build-cli-flags-doc/action.yaml b/.github/actions/build-cli-flags-doc/action.yaml new file mode 100644 index 000000000..eb0df5230 --- /dev/null +++ b/.github/actions/build-cli-flags-doc/action.yaml @@ -0,0 +1,18 @@ +name: "Build Mokapi CLI Flags doc" +runs: + using: composite + steps: + - name: Create directory and file + shell: bash + run: | + mkdir -p webui/dist + touch webui/dist/.tmp + - name: Build CLI doc + shell: bash + run: go run ./cmd/internal/gen-cli-docs/main.go + - name: Add CLI Flags nav entry + working-directory: ./docs + shell: bash + run: | + jq '.Configuration.items.Static.items["CLI Flags"] = "configuration/static/mokapi.md"' ./config.json > ./tmp.json + mv ./tmp.json ./config.json \ No newline at end of file diff --git a/.github/actions/build-mokapi-image/action.yml b/.github/actions/build-mokapi-image/action.yml index 656d7599c..87b725ae0 100644 --- a/.github/actions/build-mokapi-image/action.yml +++ b/.github/actions/build-mokapi-image/action.yml @@ -1,5 +1,4 @@ name: "Build Mokapi image" -description: "Builds and deploys the React frontend to AWS S3" inputs: image-name: required: true @@ -10,27 +9,20 @@ inputs: version: required: true description: "The version of mokapi to build" - type: string + latest-tag: + required: true + description: "The latest tag" runs: using: "composite" steps: - uses: actions/setup-go@v5 with: go-version: 1.25.5 - - name: Create directory and file - run: | - mkdir -p webui/dist - touch webui/dist/.tmp - shell: bash - - name: Build CLI doc - run: go run ./cmd/internal/gen-cli-docs/main.go - shell: bash - - name: Add CLI Flags nav entry - run: | - jq '.Configuration.items.Static.items["CLI Flags"] = "configuration/static/mokapi.md"' ./config.json > ./tmp.json - mv ./tmp.json ./config.json - working-directory: ./docs - shell: bash + - uses: ./.github/actions/build-cli-flags-doc + - uses: ./.github/actions/build-alpha-release-notes + with: + version: ${{ inputs.version }} + latest-tag: ${{ inputs.latest-tag }} - name: Build docker alpha image run: docker build --build-arg VERSION=${{ inputs.version }} -t ${{ inputs.image-name }} -t mokapi/mokapi:alpha -f ./images/alpha.Dockerfile . shell: bash diff --git a/.github/actions/build-release-notes/action.yml b/.github/actions/build-release-notes/action.yml new file mode 100644 index 000000000..11dc14557 --- /dev/null +++ b/.github/actions/build-release-notes/action.yml @@ -0,0 +1,16 @@ +name: build-release-notes +description: build-release-notes +runs: + using: composite + steps: + - name: Write release notes + shell: bash + run: | + VERSION="${{ github.event.release.tag_name }}" + BODY="${{ github.event.release.body }}" + echo "# Mokapi ${VERSION}\n\n${BODY}" > docs/release.md + - name: Upload UI artifact + uses: actions/upload-artifact@v4 + with: + name: release-notes + path: docs/release.md diff --git a/.github/workflows/alpha.yml b/.github/workflows/alpha.yml index 7166c8ed5..1d2a3aff6 100644 --- a/.github/workflows/alpha.yml +++ b/.github/workflows/alpha.yml @@ -16,11 +16,15 @@ jobs: outputs: version: ${{ steps.get_version.outputs.version }} image-name: ${{ steps.get_version.outputs.image-name }} + latest-tag: ${{ steps.get_version.outputs.latest_tag }} steps: - name: Check out code uses: actions/checkout@v4 with: fetch-depth: 0 # Ensure all tags are fetched + - name: Fetch main + shell: bash + run: git fetch origin main --tags --force - name: get version id: get_version shell: bash @@ -40,8 +44,9 @@ jobs: NEW_VERSION="$MAJOR.$MINOR.$PATCH" echo "New version: $NEW_VERSION" - echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT + echo "version=$NEW_VERSION-alpha" >> $GITHUB_OUTPUT echo "image-name=mokapi/mokapi:$NEW_VERSION-alpha" >> $GITHUB_OUTPUT + echo "latest_tag=$LATEST_TAG" >> $GITHUB_OUTPUT build-alpha: needs: setup @@ -52,6 +57,7 @@ jobs: artifact-test-report: playwright-report report-publish-path: reports/${{ github.ref_name }} version: ${{ needs.setup.outputs.version }} + latest-tag: ${{ needs.setup.outputs.latest-tag }} publish-image: name: Publish docker image diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d134255d9..e889dea64 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,8 +10,37 @@ permissions: contents: write jobs: - release-linux: - name: Release + build-dashboard: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-go@v5 + with: + go-version: 1.25.5 + - uses: ./.github/actions/build-cli-flags-doc + - uses: ./.github/actions/build-release-notes + - uses: actions/setup-node@v4 + with: + node-version: 23 + registry-url: 'https://registry.npmjs.org' + - run: npm install + working-directory: ./webui + - name: Install taskfile + run: sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b ~/.local/bin + - name: Build + run: task build-vue-app + env: + CGO_ENABLED: 0 + - name: Upload UI artifact + uses: actions/upload-artifact@v4 + with: + name: dashboard + path: webui/dist + + release-unix: + needs: build-dashboard runs-on: ubuntu-latest env: DOCKER_CLI_EXPERIMENTAL: "enabled" @@ -19,6 +48,11 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + - name: Download dashboard + uses: actions/download-artifact@v4 + with: + name: dashboard + path: webui/dist - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -31,17 +65,6 @@ jobs: - uses: actions/setup-go@v5 with: go-version: 1.25.5 - - uses: actions/setup-node@v4 - with: - node-version: 23 - registry-url: 'https://registry.npmjs.org' - - run: sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b ~/.local/bin - - run: npm install - working-directory: ./webui - - name: Build - run: task build-vue-app - env: - CGO_ENABLED: 0 - name: Run GoReleaser uses: goreleaser/goreleaser-action@v6 with: @@ -50,17 +73,19 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} TAP_GITHUB_TOKEN: ${{ secrets.TAP_GITHUB_TOKEN }} - - name: Publish npm - run: task publish-npm-package VERSION=${GITHUB_REF##*/v} - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} release-windows: + needs: build-dashboard runs-on: windows-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 + - name: Download dashboard + uses: actions/download-artifact@v4 + with: + name: dashboard + path: webui/dist - uses: actions/setup-go@v5 with: go-version: 1.25.5 @@ -69,12 +94,6 @@ jobs: node-version: 23 registry-url: 'https://registry.npmjs.org' - run: choco install go-task - - run: npm install - working-directory: ./webui - - name: Build - run: task build-vue-app - env: - CGO_ENABLED: 0 - name: Install goversioninfo run: go install github.com/josephspurrier/goversioninfo/cmd/goversioninfo@v1.4.0 - name: Generate Windows metadata (.syso) @@ -107,3 +126,21 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} CHOCOLATEY_API_KEY: ${{ secrets.CHOCOLATEY_API_KEY }} + + release-npm: + needs: build-dashboard + runs-on: ubuntu-latest + steps: + - name: Download dashboard + uses: actions/download-artifact@v4 + with: + name: dashboard + path: webui/dist + - name: Install taskfile + run: sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b ~/.local/bin + - name: Build + run: task build-vue-app + env: + CGO_ENABLED: 0 + - name: Publish + run: task publish-npm-package VERSION=${GITHUB_REF##*/v} \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 771c6338c..22f84d24e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,6 +18,9 @@ on: required: true description: "The version of mokapi to build" type: string + latest-tag: + description: "The latest tag" + type: string jobs: build: @@ -26,11 +29,17 @@ jobs: steps: - name: Check out code uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + - name: Fetch main + run: git fetch origin main --tags --force - uses: ./.github/actions/build-mokapi-image with: image-name: ${{ inputs.image-name }} artifact-name: ${{ inputs.image-artifact-name }} version: ${{ inputs.version }} + latest-tag: ${{ inputs.latest-tag }} test: name: Run UI tests @@ -55,9 +64,7 @@ jobs: steps: - name: Check out code uses: actions/checkout@v4 - with: - path: src - - uses: ./src/.github/actions/publish-test-report + - uses: ./.github/actions/publish-test-report with: artifact-test-report: ${{ inputs.artifact-test-report }} test-report-path: ${{ inputs.report-publish-path }} \ No newline at end of file diff --git a/acceptance/mail_test.go b/acceptance/mail_test.go index f53723e27..8895c87a3 100644 --- a/acceptance/mail_test.go +++ b/acceptance/mail_test.go @@ -36,6 +36,8 @@ func (suite *MailSuite) SetupSuite() { } func (suite *MailSuite) TestSendMail() { + time.Sleep(2 * time.Second) + ca := cert.DefaultRootCert() err := smtptest.SendMail("from@foo.bar", diff --git a/acceptance/petstore_test.go b/acceptance/petstore_test.go index 241bc14ed..6b9733194 100644 --- a/acceptance/petstore_test.go +++ b/acceptance/petstore_test.go @@ -57,10 +57,12 @@ func (suite *PetStoreSuite) TestApi() { "description": "", "servers": []interface{}{ map[string]interface{}{ - "description": "", "host": "127.0.0.1:19092", "name": "broker", "protocol": "kafka", + "title": "", + "summary": "", + "description": "", }, }, @@ -120,8 +122,8 @@ func (suite *PetStoreSuite) TestApi() { }, "name": "petstore.order-event", "partitions": []interface{}{ - map[string]interface{}{"id": float64(0), "leader": map[string]interface{}{"addr": "127.0.0.1:19092", "name": "broker"}, "offset": float64(1), "segments": float64(1), "startOffset": float64(0)}, - map[string]interface{}{"id": float64(1), "leader": map[string]interface{}{"addr": "127.0.0.1:19092", "name": "broker"}, "offset": float64(0), "segments": float64(0), "startOffset": float64(0)}, + map[string]interface{}{"id": float64(0), "offset": float64(1), "segments": float64(1), "startOffset": float64(0)}, + map[string]interface{}{"id": float64(1), "offset": float64(0), "segments": float64(0), "startOffset": float64(0)}, }, }}, } @@ -172,7 +174,7 @@ func (suite *PetStoreSuite) TestJsHttpHandler() { // test http metrics try.GetRequest(suite.T(), fmt.Sprintf("http://127.0.0.1:%d/api/metrics/http?path=/pet/{petId}", suite.cfg.Api.Port), nil, - try.BodyContains(`http_requests_total{service=\"Swagger Petstore\",endpoint=\"/pet/{petId}\"}","value":4}`), + try.BodyContains(`http_requests_total{service=\"Swagger Petstore\",endpoint=\"/pet/{petId}\",method=\"GET\"}","value":4}`), ) } diff --git a/api/handler_events_test.go b/api/handler_events_test.go index 9eb99196e..e05f8d85c 100644 --- a/api/handler_events_test.go +++ b/api/handler_events_test.go @@ -274,7 +274,7 @@ func TestHandler_KafkaEvents(t *testing.T) { fn: func(t *testing.T, h http.Handler, sm *events.StoreManager) { sm.SetStore(1, events.NewTraits().WithNamespace("kafka")) - err := sm.Push(&store.KafkaLog{ + err := sm.Push(&store.KafkaMessageLog{ Offset: 123, Key: store.LogValue{}, Message: store.LogValue{}, @@ -283,6 +283,8 @@ func TestHandler_KafkaEvents(t *testing.T) { ProducerId: 3, ProducerEpoch: 1, SequenceNumber: 2, + ClientId: "foo", + ScriptFile: "123-123", }, events.NewTraits().WithNamespace("kafka")) require.NoError(t, err) @@ -315,6 +317,8 @@ func TestHandler_KafkaEvents(t *testing.T) { "producerId": float64(3), "schemaId": float64(0), "sequenceNumber": float64(2), + "clientId": "foo", + "script": "123-123", }, m[0]["data"]) })) diff --git a/api/handler_http_test.go b/api/handler_http_test.go index 844929963..756af9fcb 100644 --- a/api/handler_http_test.go +++ b/api/handler_http_test.go @@ -362,18 +362,18 @@ func TestHandler_Http_Metrics(t *testing.T) { name: "service list with metric", app: runtimetest.NewHttpApp(openapitest.NewConfig("3.0.0", openapitest.WithInfo("foo", "", ""))), requestUrl: "http://foo.api/api/services", - responseBody: `{"name":"foo","type":"http","metrics":[{"name":"http_requests_total{service=\"foo\",endpoint=\"bar\"}","value":1}]}`, + responseBody: `{"name":"foo","type":"http","metrics":[{"name":"http_requests_total{service=\"foo\",endpoint=\"bar\",method=\"GET\"}","value":1}]}`, addMetrics: func(monitor *monitor.Monitor) { - monitor.Http.RequestCounter.WithLabel("foo", "bar").Add(1) + monitor.Http.RequestCounter.WithLabel("foo", "bar", http.MethodGet).Add(1) }, }, { name: "specific with metric", app: runtimetest.NewHttpApp(openapitest.NewConfig("3.0.0", openapitest.WithInfo("foo", "", ""))), requestUrl: "http://foo.api/api/services/http/foo", - responseBody: `"metrics":[{"name":"http_requests_total{service=\"foo\",endpoint=\"bar\"}","value":1}]`, + responseBody: `"metrics":[{"name":"http_requests_total{service=\"foo\",endpoint=\"bar\",method=\"POST\"}","value":1}]`, addMetrics: func(monitor *monitor.Monitor) { - monitor.Http.RequestCounter.WithLabel("foo", "bar").Add(1) + monitor.Http.RequestCounter.WithLabel("foo", "bar", http.MethodPost).Add(1) }, }, } diff --git a/api/handler_kafka.go b/api/handler_kafka.go index 55dd2b80a..4904b6082 100644 --- a/api/handler_kafka.go +++ b/api/handler_kafka.go @@ -40,33 +40,52 @@ type kafkaInfo struct { Groups []group `json:"groups,omitempty"` Metrics []metrics.Metric `json:"metrics,omitempty"` Configs []config `json:"configs,omitempty"` + Clients []client `json:"clients,omitempty"` } type kafkaServer struct { - Name string `json:"name"` - Host string `json:"host"` - Protocol string `json:"protocol"` - Description string `json:"description"` - Tags []kafkaServerTag `json:"tags,omitempty"` + Name string `json:"name"` + Host string `json:"host"` + Protocol string `json:"protocol"` + Title string `json:"title"` + Summary string `json:"summary"` + Description string `json:"description"` + Configs map[string]any `json:"configs,omitempty"` + Tags []kafkaTag `json:"tags,omitempty"` } -type kafkaServerTag struct { +type kafkaTag struct { Name string `json:"name"` Description string `json:"description"` } type group struct { Name string `json:"name"` + Generation int `json:"generation"` Members []member `json:"members"` - Coordinator string `json:"coordinator"` Leader string `json:"leader"` State string `json:"state"` AssignmentStrategy string `json:"protocol"` Topics []string `json:"topics"` } +type client struct { + ClientId string `json:"clientId"` + Address string `json:"address"` + BrokerAddress string `json:"brokerAddress"` + ClientSoftwareName string `json:"clientSoftwareName"` + ClientSoftwareVersion string `json:"clientSoftwareVersion"` + Groups []clientGroupMember `json:"groups"` +} + +type clientGroupMember struct { + MemberId string `json:"memberId"` + Group string `json:"group"` +} + type member struct { Name string `json:"name"` + ClientId string `json:"clientId"` Addr string `json:"addr"` ClientSoftwareName string `json:"clientSoftwareName"` ClientSoftwareVersion string `json:"clientSoftwareVersion"` @@ -80,19 +99,14 @@ type topic struct { Partitions []partition `json:"partitions"` Messages map[string]messageConfig `json:"messages,omitempty"` Bindings bindings `json:"bindings,omitempty"` + Tags []kafkaTag `json:"tags,omitempty"` } type partition struct { - Id int `json:"id"` - StartOffset int64 `json:"startOffset"` - Offset int64 `json:"offset"` - Leader broker `json:"leader"` - Segments int `json:"segments"` -} - -type broker struct { - Name string `json:"name"` - Addr string `json:"addr"` + Id int `json:"id"` + StartOffset int64 `json:"startOffset"` + Offset int64 `json:"offset"` + Segments int `json:"segments"` } type messageConfig struct { @@ -406,7 +420,9 @@ func getKafka(info *runtime.KafkaInfo) kafkaInfo { } } - for name, s := range info.Servers { + for it := info.Servers.Iter(); it.Next(); { + name := it.Key() + s := it.Value() if s == nil || s.Value == nil { continue } @@ -414,15 +430,18 @@ func getKafka(info *runtime.KafkaInfo) kafkaInfo { ks := kafkaServer{ Name: name, Host: s.Value.Host, + Title: s.Value.Title, + Summary: s.Value.Summary, Description: s.Value.Description, Protocol: s.Value.Protocol, + Configs: s.Value.Bindings.Kafka.Configs(), } for _, r := range s.Value.Tags { if r.Value == nil { continue } t := r.Value - ks.Tags = append(ks.Tags, kafkaServerTag{ + ks.Tags = append(ks.Tags, kafkaTag{ Name: t.Name, Description: t.Description, }) @@ -444,6 +463,24 @@ func getKafka(info *runtime.KafkaInfo) kafkaInfo { k.Configs = getConfigs(info.Configs()) + for _, ctx := range info.Store.Clients() { + c := client{ + ClientId: ctx.ClientId, + Address: ctx.Addr, + BrokerAddress: ctx.ServerAddress, + ClientSoftwareName: ctx.ClientSoftwareName, + ClientSoftwareVersion: ctx.ClientSoftwareVersion, + } + for groupName, memberId := range ctx.Member { + c.Groups = append(c.Groups, clientGroupMember{ + MemberId: memberId, + Group: groupName, + }) + } + + k.Clients = append(k.Clients, c) + } + return k } @@ -546,6 +583,16 @@ func newTopic(t *store.Topic, ch *asyncapi3.Channel, cfg *asyncapi3.Config) topi result.Messages[messageId] = m } + for _, tRef := range ch.Tags { + if tRef.Value == nil { + continue + } + result.Tags = append(result.Tags, kafkaTag{ + Name: tRef.Value.Name, + Description: tRef.Value.Description, + }) + } + return result } @@ -562,17 +609,18 @@ func getPartitions(t *store.Topic) []partition { func newGroup(g *store.Group) group { grp := group{ - Name: g.Name, - State: g.State.String(), - Coordinator: g.Coordinator.Addr(), + Name: g.Name, + State: g.State.String(), } if g.Generation != nil { + grp.Generation = g.Generation.Id grp.Leader = g.Generation.LeaderId grp.AssignmentStrategy = g.Generation.Protocol for id, m := range g.Generation.Members { grp.Members = append(grp.Members, member{ Name: id, + ClientId: m.Client.ClientId, Addr: m.Client.Addr, ClientSoftwareName: m.Client.ClientSoftwareName, ClientSoftwareVersion: m.Client.ClientSoftwareVersion, @@ -583,6 +631,8 @@ func newGroup(g *store.Group) group { sort.Slice(grp.Members, func(i, j int) bool { return strings.Compare(grp.Members[i].Name, grp.Members[j].Name) < 0 }) + } else { + grp.Generation = -1 } for topicName := range g.Commits { grp.Topics = append(grp.Topics, topicName) @@ -599,21 +649,10 @@ func newPartition(p *store.Partition) partition { Id: p.Index, StartOffset: p.StartOffset(), Offset: p.Offset(), - Leader: newBroker(p.Leader), Segments: len(p.Segments), } } -func newBroker(b *store.Broker) broker { - if b == nil { - return broker{} - } - return broker{ - Name: b.Name, - Addr: b.Addr(), - } -} - func getKafkaClusters(app *runtime.App) []cluster { var clusters []cluster for _, k := range app.Kafka.List() { diff --git a/api/handler_kafka_test.go b/api/handler_kafka_test.go index 1bd2de160..2a5a071c4 100644 --- a/api/handler_kafka_test.go +++ b/api/handler_kafka_test.go @@ -104,7 +104,7 @@ func TestHandler_Kafka(t *testing.T) { return app }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","configs":[{"id":"64613435-3062-6462-3033-316532633233","url":"file://foo.yml","provider":"test","time":"2023-12-27T13:01:30Z"}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"mokapi","host":":9092","protocol":"kafka","title":"Mokapi Default Broker","summary":"Automatically added broker because no servers are defined in the AsyncAPI spec","description":""}],"configs":[{"id":"64613435-3062-6462-3033-316532633233","url":"file://foo.yml","provider":"test","time":"2023-12-27T13:01:30Z"}]}`, }, { name: "get specific with contact", @@ -132,7 +132,7 @@ func TestHandler_Kafka(t *testing.T) { })) }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","description":"bar"}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","title":"","summary":"","description":"bar"}]}`, }, { name: "server with tags", @@ -151,7 +151,7 @@ func TestHandler_Kafka(t *testing.T) { })) }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","description":"bar","tags":[{"name":"env:test","description":"This environment is for running internal tests"}]}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","title":"","summary":"","description":"bar","tags":[{"name":"env:test","description":"This environment is for running internal tests"}]}]}`, }, { name: "get specific with topic", @@ -166,7 +166,7 @@ func TestHandler_Kafka(t *testing.T) { ), ), ) - s := store.New(c, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(c, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) return runtimetest.NewApp(runtimetest.WithKafkaInfo("foo", &runtime.KafkaInfo{ Config: c, @@ -174,7 +174,26 @@ func TestHandler_Kafka(t *testing.T) { })) }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"","addr":""},"segments":0}],"messages":{"foo":{"name":"foo","payload":{"schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"messages":{"foo":{"name":"foo","payload":{"schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, + }, + { + name: "get specific with topic with tag", + app: func() *runtime.App { + c := asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "bar", "1.0"), + asyncapi3test.WithChannel("foo", + asyncapi3test.WithChannelTag("env:test", "bar"), + ), + ) + s := store.New(c, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) + + return runtimetest.NewApp(runtimetest.WithKafkaInfo("foo", &runtime.KafkaInfo{ + Config: c, + Store: s, + })) + }, + requestUrl: "http://foo.api/api/services/kafka/foo", + responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"bindings":{"partitions":1,"valueSchemaValidation":true},"tags":[{"name":"env:test","description":"bar"}]}]}`, }, { name: "get specific with topic and multi schema format", @@ -189,7 +208,7 @@ func TestHandler_Kafka(t *testing.T) { ), ), ) - s := store.New(c, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(c, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) return runtimetest.NewApp(runtimetest.WithKafkaInfo("foo", &runtime.KafkaInfo{ Config: c, @@ -197,7 +216,7 @@ func TestHandler_Kafka(t *testing.T) { })) }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"","addr":""},"segments":0}],"messages":{"foo":{"name":"foo","payload":{"format":"foo","schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"messages":{"foo":{"name":"foo","payload":{"format":"foo","schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, }, { name: "get specific with group", @@ -222,7 +241,26 @@ func TestHandler_Kafka(t *testing.T) { return app }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","description":""}],"groups":[{"name":"foo","members":null,"coordinator":"foo.bar:9092","leader":"","state":"PreparingRebalance","protocol":"range","topics":null}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","title":"","summary":"","description":""}],"groups":[{"name":"foo","generation":3,"members":null,"leader":"","state":"PreparingRebalance","protocol":"range","topics":null}]}`, + }, + { + name: "get specific with group no generation", + app: func() *runtime.App { + app := runtime.New(&static.Config{}) + app.Kafka.Set("foo", getKafkaInfoWithGroup(asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "bar", "1.0"), + asyncapi3test.WithServer("foo", "kafka", "foo.bar"), + ), + &store.Group{ + Name: "foo", + State: store.PreparingRebalance, + Commits: nil, + }, + )) + return app + }, + requestUrl: "http://foo.api/api/services/kafka/foo", + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","title":"","summary":"","description":""}],"groups":[{"name":"foo","generation":-1,"members":null,"leader":"","state":"PreparingRebalance","protocol":"","topics":null}]}`, }, { name: "get specific with group containing members", @@ -278,7 +316,7 @@ func TestHandler_Kafka(t *testing.T) { return app }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","description":""}],"groups":[{"name":"foo","members":[{"name":"m1","addr":"192.168.0.100","clientSoftwareName":"mokapi","clientSoftwareVersion":"1.0","heartbeat":"2024-04-22T15:04:05+07:00","partitions":{"topic":[1,2,5]}},{"name":"m2","addr":"192.168.0.200","clientSoftwareName":"mokapi","clientSoftwareVersion":"1.0","heartbeat":"2024-04-22T15:04:10+07:00","partitions":{"topic":[3,4,6]}}],"coordinator":"foo.bar:9092","leader":"m1","state":"PreparingRebalance","protocol":"range","topics":null}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","servers":[{"name":"foo","host":"foo.bar","protocol":"kafka","title":"","summary":"","description":""}],"groups":[{"name":"foo","generation":3,"members":[{"name":"m1","clientId":"client1","addr":"192.168.0.100","clientSoftwareName":"mokapi","clientSoftwareVersion":"1.0","heartbeat":"2024-04-22T15:04:05+07:00","partitions":{"topic":[1,2,5]}},{"name":"m2","clientId":"client2","addr":"192.168.0.200","clientSoftwareName":"mokapi","clientSoftwareVersion":"1.0","heartbeat":"2024-04-22T15:04:10+07:00","partitions":{"topic":[3,4,6]}}],"leader":"m1","state":"PreparingRebalance","protocol":"range","topics":null}]}`, }, { name: "get specific with topic and openapi schema", @@ -297,7 +335,7 @@ func TestHandler_Kafka(t *testing.T) { return app }, requestUrl: "http://foo.api/api/services/kafka/foo", - responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"","addr":""},"segments":0}],"messages":{"foo":{"name":"foo","payload":{"format":"foo","schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, + responseBody: `{"name":"foo","description":"bar","version":"1.0","topics":[{"name":"foo","description":"bar","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"messages":{"foo":{"name":"foo","payload":{"format":"foo","schema":{"type":"string"}},"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]}`, }, } @@ -379,7 +417,7 @@ func TestHandler_KafkaAPI(t *testing.T) { h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), - try.HasBody(`[{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]`), + try.HasBody(`[{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}]`), ) }, }, @@ -409,7 +447,7 @@ func TestHandler_KafkaAPI(t *testing.T) { h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), - try.HasBody(`{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}`), + try.HasBody(`{"name":"topic-1","description":"foobar","partitions":[{"id":0,"startOffset":0,"offset":0,"segments":0}],"messages":{"foo":{"name":"foo","payload":null,"contentType":"application/json"}},"bindings":{"partitions":1,"valueSchemaValidation":true}}`), ) }, }, @@ -611,7 +649,7 @@ func TestHandler_KafkaAPI(t *testing.T) { "", h, try.HasStatusCode(http.StatusOK), - try.HasBody(`[{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}]`), + try.HasBody(`[{"id":0,"startOffset":0,"offset":0,"segments":0}]`), ) }, }, @@ -640,7 +678,7 @@ func TestHandler_KafkaAPI(t *testing.T) { "", h, try.HasStatusCode(http.StatusOK), - try.HasBody(`{"id":0,"startOffset":0,"offset":0,"leader":{"name":"broker-1","addr":"localhost:9092"},"segments":0}`), + try.HasBody(`{"id":0,"startOffset":0,"offset":0,"segments":0}`), ) }, }, @@ -1061,14 +1099,13 @@ func TestHandler_Kafka_Metrics(t *testing.T) { func getKafkaInfo(config *asyncapi3.Config) *runtime.KafkaInfo { return &runtime.KafkaInfo{ Config: config, - Store: store.New(config, enginetest.NewEngine(), &eventstest.Handler{}), + Store: store.New(config, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()), } } func getKafkaInfoWithGroup(config *asyncapi3.Config, group *store.Group) *runtime.KafkaInfo { - s := store.New(config, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(config, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) g := s.GetOrCreateGroup(group.Name, 0) - group.Coordinator, _ = s.Broker(0) *g = *group return &runtime.KafkaInfo{ Config: config, diff --git a/config/dynamic/asyncApi/convert.go b/config/dynamic/asyncApi/convert.go index b9c9b27ac..3b6b8d667 100644 --- a/config/dynamic/asyncApi/convert.go +++ b/config/dynamic/asyncApi/convert.go @@ -4,6 +4,7 @@ import ( "fmt" "mokapi/config/dynamic" "mokapi/providers/asyncapi3" + "mokapi/sortedmap" "net/url" "path" "strings" @@ -316,15 +317,15 @@ func convertServers(cfg *asyncapi3.Config, servers map[string]*ServerRef) { return } if cfg.Servers == nil { - cfg.Servers = map[string]*asyncapi3.ServerRef{} + cfg.Servers = &sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef]{} } for name, orig := range servers { if len(orig.Ref) > 0 { - cfg.Servers[name] = &asyncapi3.ServerRef{Reference: dynamic.Reference{Ref: orig.Ref}} + cfg.Servers.Set(name, &asyncapi3.ServerRef{Reference: dynamic.Reference{Ref: orig.Ref}}) } if orig.Value != nil { - cfg.Servers[name] = convertServer(orig.Value) + cfg.Servers.Set(name, convertServer(orig.Value)) } } } diff --git a/config/dynamic/asyncApi/convert_test.go b/config/dynamic/asyncApi/convert_test.go index 9c295600e..7fce80b25 100644 --- a/config/dynamic/asyncApi/convert_test.go +++ b/config/dynamic/asyncApi/convert_test.go @@ -1,21 +1,25 @@ -package asyncApi +package asyncApi_test import ( - "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" "mokapi/config/dynamic" + "mokapi/config/dynamic/asyncApi" + "mokapi/config/dynamic/asyncApi/asyncapitest" "mokapi/config/dynamic/dynamictest" + "mokapi/providers/asyncapi3" "mokapi/schema/json/schema" "os" "strings" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestConfig_Convert(t *testing.T) { b, err := os.ReadFile("./test/streetlight-kafka-2.0.yaml") require.NoError(t, err) - var cfg *Config + var cfg *asyncApi.Config err = yaml.Unmarshal(b, &cfg) require.NoError(t, err) @@ -36,10 +40,10 @@ func TestConfig_Convert(t *testing.T) { require.Equal(t, "https://www.apache.org/licenses/LICENSE-2.0", cfg3.Info.License.Url) // Server - require.Len(t, cfg3.Servers, 1) - require.Equal(t, "test.mosquitto.org:{port}", cfg3.Servers["production"].Value.Host) - require.Equal(t, "kafka", cfg3.Servers["production"].Value.Protocol) - require.Equal(t, "Test broker", cfg3.Servers["production"].Value.Description) + require.Equal(t, cfg3.Servers.Len(), 1) + require.Equal(t, "test.mosquitto.org:{port}", cfg3.Servers.Lookup("production").Value.Host) + require.Equal(t, "kafka", cfg3.Servers.Lookup("production").Value.Protocol) + require.Equal(t, "Test broker", cfg3.Servers.Lookup("production").Value.Description) // Channel channel := cfg3.Channels["smartylighting/streetlights/1/0/event/{streetlightId}/lighting/measured"].Value @@ -75,6 +79,32 @@ func TestConfig_Convert(t *testing.T) { require.Equal(t, "Inform about environmental lighting conditions of a particular streetlight.", op.Summary) } +func TestServer_Convert(t *testing.T) { + testcases := []struct { + name string + cfg *asyncApi.Config + test func(t *testing.T, config *asyncapi3.Config, err error) + }{ + { + name: "server with url:port", + cfg: asyncapitest.NewConfig(asyncapitest.WithServer("foo", "kafka", "mokapi-service:9092")), + test: func(t *testing.T, config *asyncapi3.Config, err error) { + require.NoError(t, err) + require.Equal(t, config.Servers.Len(), 1) + require.Equal(t, "mokapi-service:9092", config.Servers.Lookup("foo").Value.Host) + require.Equal(t, "kafka", config.Servers.Lookup("foo").Value.Protocol) + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + c, err := tc.cfg.Convert() + tc.test(t, c, err) + }) + } +} + func TestConfig_ConvertNoOperationId(t *testing.T) { s := ` asyncapi: '2.6.0' @@ -94,7 +124,7 @@ channels: schema: type: string ` - var old *Config + var old *asyncApi.Config err := yaml.Unmarshal([]byte(s), &old) require.NoError(t, err) cfg, err := old.Convert() diff --git a/config/dynamic/provider/git/git.go b/config/dynamic/provider/git/git.go index 39c3f8e00..6474d7d90 100644 --- a/config/dynamic/provider/git/git.go +++ b/config/dynamic/provider/git/git.go @@ -4,12 +4,6 @@ import ( "context" "errors" "fmt" - "github.com/go-git/go-git/v5" - "github.com/go-git/go-git/v5/config" - "github.com/go-git/go-git/v5/plumbing" - "github.com/go-git/go-git/v5/plumbing/transport/client" - "github.com/go-git/go-git/v5/plumbing/transport/http" - log "github.com/sirupsen/logrus" "mokapi/config/dynamic" "mokapi/config/dynamic/provider/file" "mokapi/config/static" @@ -19,6 +13,13 @@ import ( "path/filepath" "strings" "time" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/transport/client" + "github.com/go-git/go-git/v5/plumbing/transport/http" + log "github.com/sirupsen/logrus" ) type repository struct { @@ -110,7 +111,7 @@ func (p *Provider) Start(ch chan dynamic.ConfigEvent, pool *safe.Pool) error { pool.Go(func(ctx context.Context) { err := p.initRepository(r, ch, pool) if err != nil { - log.Errorf("init git repository failed: %v", err) + log.Errorf("init git repository '%v' failed: %v", r.url, err) } }) } @@ -128,9 +129,16 @@ func (p *Provider) initRepository(r *repository, ch chan dynamic.ConfigEvent, po return err } - r.repo, err = git.PlainClone(r.localPath, false, &git.CloneOptions{ - URL: r.repoUrl, - }) + options := &git.CloneOptions{ + URL: r.repoUrl, + Depth: 1, + SingleBranch: true, + } + if r.ref != "" { + options.ReferenceName = plumbing.NewBranchReferenceName(r.ref) + } + + r.repo, err = git.PlainClone(r.localPath, false, options) if err != nil { return fmt.Errorf("unable to clone git %q: %v", r.repoUrl, err) } @@ -140,31 +148,7 @@ func (p *Provider) initRepository(r *repository, ch chan dynamic.ConfigEvent, po return fmt.Errorf("unable to get git worktree: %v", err.Error()) } - h, err := r.repo.Head() - if err != nil { - return fmt.Errorf("unable to get git head: %v", err.Error()) - } - - r.pullOptions = &git.PullOptions{SingleBranch: true} - if len(r.ref) > 0 { - ref := plumbing.NewBranchReferenceName(r.ref) - - if h.Name() != ref { - r.pullOptions.ReferenceName = ref - err = r.repo.Fetch(&git.FetchOptions{RefSpecs: []config.RefSpec{"refs/*:refs/*", "HEAD:refs/heads/HEAD"}}) - if errors.Is(err, git.ErrForceNeeded) { - err = r.repo.Fetch(&git.FetchOptions{RefSpecs: []config.RefSpec{"+refs/*:refs/*", "HEAD:refs/heads/HEAD"}}) - } - if err != nil { - return fmt.Errorf("git fetch error %v: %v", r.url, err.Error()) - } - err = r.wt.Checkout(&git.CheckoutOptions{Branch: ref}) - if err != nil && !errors.Is(err, git.NoErrAlreadyUpToDate) { - return fmt.Errorf("git checkout error %v: %v", r.url, err.Error()) - } - } - } - + r.pullOptions = &git.PullOptions{SingleBranch: true, Depth: 1} ref, err := r.repo.Head() if err != nil { return fmt.Errorf("unable to get git head: %w", err) diff --git a/config/dynamic/provider/git/github.go b/config/dynamic/provider/git/github.go index d007571aa..2287dfe07 100644 --- a/config/dynamic/provider/git/github.go +++ b/config/dynamic/provider/git/github.go @@ -3,8 +3,9 @@ package git import ( "encoding/base64" "fmt" - "github.com/bradleyfalzon/ghinstallation/v2" "net/http" + + "github.com/bradleyfalzon/ghinstallation/v2" ) type githubTransport struct { @@ -16,6 +17,9 @@ func addGitHubAuth(t *transport, r *repository) error { if err != nil { return err } + if len(key) == 0 { + return fmt.Errorf("private key is empty") + } transToken, err := ghinstallation.New(http.DefaultTransport, r.config.Auth.GitHub.AppId, r.config.Auth.GitHub.InstallationId, key) if err != nil { diff --git a/docs/configuration/dynamic/file.md b/docs/configuration/dynamic/file.md index 92df3b0bd..bb23f302b 100644 --- a/docs/configuration/dynamic/file.md +++ b/docs/configuration/dynamic/file.md @@ -22,7 +22,7 @@ MOKAPI_PROVIDERS_FILE_DIRECTORY=/path/to/dynamic/config ``` ``` box=tip -You also can use CLI JSON or shorthand syntax to, see [CLI](/docs/configuration/static/cli.md) +You also can use CLI JSON or shorthand syntax to, see [CLI](/docs/configuration/static/cli-usage.md) ``` ## Provider Configuration @@ -67,7 +67,7 @@ MOKAPI_PROVIDERS_FILE_DIRECTORY=/foobar ``` ``` box=tip -You can define multiple file names or directory using CLI JSON or shorthand syntax, see [CLI](/docs/configuration/static/cli.md) +You can define multiple file names or directory using CLI JSON or shorthand syntax, see [CLI](/docs/configuration/static/cli-usage.md) ``` ``` box=tip diff --git a/docs/configuration/dynamic/http.md b/docs/configuration/dynamic/http.md index 13d6855a9..6ef9dafb9 100644 --- a/docs/configuration/dynamic/http.md +++ b/docs/configuration/dynamic/http.md @@ -29,7 +29,7 @@ A list of all options that can be used with the HTTP provider, refer to the [reference page](/docs/configuration/reference.md). ``` box=tip -You can also use CLI JSON or shorthand syntax, see [CLI](/docs/configuration/static/cli.md) +You can also use CLI JSON or shorthand syntax, see [CLI](/docs/configuration/static/cli-usage.md) ``` ``` box=tip diff --git a/docs/guides/kafka/overview.md b/docs/guides/kafka/overview.md index f8d9cf328..dbe572b76 100644 --- a/docs/guides/kafka/overview.md +++ b/docs/guides/kafka/overview.md @@ -2,23 +2,116 @@ title: Mock Kafka Topics for Testing and Development description: Mock Kafka Topics Using AsyncAPI Specification for Seamless Testing and Development --- -# Mocking a Kafka Topic with Mokapi -Mokapi simplifies the process of mocking Kafka topics, enabling you to efficiently test and debug your event-driven applications without the need for a live Kafka environment. Whether you’re validating message schemas, simulating producer-consumer workflows, or testing error-handling scenarios, Mokapi offers a flexible and developer-friendly solution. +# Mocking Kafka with AsyncAPI -Designed to integrate seamlessly into your projects, Mokapi allows you to mock Kafka brokers and topics using your AsyncAPI specifications. This ensures that your Kafka topics are simulated according to their contract, making it easier to prototype, test, and validate your systems with accuracy. Developers can also simulate Kafka producers and consumers with custom Mokapi Scripts, providing full control over message flows and test scenarios. +Mokapi transforms your AsyncAPI Specification into a functional Kafka mock server. +Mokapi focuses on mocking Kafka topics and message flow rather than simulating Kafka’s internal cluster mechanics. +It eliminates the overhead of managing local clusters while ensuring your producers and +consumers strictly adhere to their API contracts. -## Key Features +```yaml tab=3.0 +asyncapi: 3.0.0 +info: + title: User Service + version: 1.0.0 --

Mock Kafka Topics:
Use an AsyncAPI specification to mock Kafka topics by contract, ensuring consistency and adherence to expected behavior.

+channels: + userSignedUp: + address: 'users.signedup' + messages: + userEvent: + payload: + type: object + properties: + id: { type: string, format: uuid } + email: { type: string, format: email } +``` --

Schema Validation:
Mokapi validates message against the schemas to ensure compatibility and compliance between services, reducing runtime issues.

+```yaml tab=2.6 +asyncapi: 2.6.0 +info: + title: User Service + version: 1.0.0 --

Realistic Test Data:
Import message payloads or generate representative data to test edge cases, workflows, and error conditions.

+channels: + users.signedup: + publish: + message: + name: userEvent + payload: + type: object + properties: + id: { type: string, format: uuid } + email: { type: string, format: email } +``` --

Streamlined Setup:
Avoid the complexity of setting up and maintaining a full Kafka environment, simplifying your development and testing process.

+```box=tip +Ready to dive in? Head over to the Kafka [Quick Start Guide](/docs/guides/kafka/quick-start.md) and run your first +Kafka mock in seconds. +``` ## Why Use Mokapi for Kafka? -By leveraging Mokapi, you can enhance your development workflow and reduce dependencies on external systems, while ensuring the reliability and robustness of your Kafka-based applications. The ability to simulate real-world scenarios and validate messages against contracts reduces the risk of issues in production, accelerates development, and supports a smooth CI/CD pipeline. -## Start Mocking Today! -Refer to [Quick Start](/docs/guides/kafka/quick-start.md) to learn how to create your first Kafka topic mock with Mokapi and streamline the process of testing and debugging event-driven architectures. Mokapi empowers you to build more reliable, scalable, and efficient systems, all while reducing the operational overhead of managing Kafka in your development environment. \ No newline at end of file +Testing event-driven architectures with a real Kafka instance is often heavy and non-deterministic. Mokapi provides a +lightweight, stable broker designed specifically for local development and CI/CD pipelines. + +-

Zero Infrastructure Overhead:
Skip the complexity of Kafka cluster setup (ZooKeeper or KRaft). Mokapi is a single binary/container that works out of the box. +-

Contract-First Validation:
Every message is validated against your schemas in real-time. Catch breaking changes before they even reach your codebase. +-

Reproducible Test Suites:
Operating entirely in-memory, Mokapi ensures every test run starts with a clean slate—no leftover offsets or orphaned topics. + +## Supported Standards + +Mokapi integrates seamlessly into the existing ecosystem, supporting modern industry standards: + +-

AsyncAPI Specifications:
Full support for both Version 2.x and Version 3.0. +-

Schema Formats:
Built-in validation for JSON Schema and Avro. +-

Kafka Protocol:
Compatible with standard Kafka clients (Java, Go, Python, .NET, etc.). + +## Key Features + +### 1. Automated Topic Provisioning + +Based on your AsyncAPI definition, Mokapi automatically initializes the required channels (topics). No manual admin commands or startup scripts required. + +### 2. Dynamic Data Generation + +If a consumer requests data but no producer is active, with Mokapi you can generate realistic mock data based on your schema. +This allows you to test consumers in complete isolation. + +### 3. Error & Latency Simulation + +Use Mokapi Scripts to simulate edge cases that are difficult to trigger in a real environment: + +- Inject network latency or jitter. +- Simulate broker-specific Kafka Error Codes. +- Create stateful mock behavior using JavaScript. + +## Architectural Design + +To ensure speed and determinism, Mokapi simulates Kafka's application behavior rather than its cluster administration: + +-

Single Stable Broker:
Focuses on message flow rather than + leader election, partition replication, or broker coordination. +-

Ephemeral by Design:
Data is kept in-memory to provide + lightning-fast feedback loops during development. +-

Deterministic Broker Address Resolution:
+ Mokapi resolves the advertised broker address based on the listener port. + If multiple AsyncAPI servers share the same port, the first matching server + definition is used. AsyncAPI servers are treated as environment-specific + configurations rather than simultaneously addressable brokers.

+ +Kafka clients do not transmit the DNS name used to establish a connection. +When multiple servers share the same listener port, it is therefore not +possible to determine which server was used at runtime. Mokapi follows +Kafka’s networking model and applies a deterministic resolution strategy +instead of guessing. + +``` box=tip title=Recommendation +If you define multiple AsyncAPI servers, use different ports for each server +or use Mokapi’s [patching](/docs/configuration/patching.md) mechanism. +``` + +## Next Steps + +- [Quick Start Guide](/docs/guides/kafka/quick-start.md): Learn how to run Mokapi and load your first AsyncAPI file. +- [Mokapi CLI:](/docs/configuration/static/cli-usage.md): Detailed command-line options and runtime configuration. \ No newline at end of file diff --git a/engine/common/host.go b/engine/common/host.go index e3dca7fdb..e1df87c2e 100644 --- a/engine/common/host.go +++ b/engine/common/host.go @@ -64,11 +64,13 @@ type KafkaClient interface { } type KafkaProduceArgs struct { - Cluster string - Topic string - Messages []KafkaMessage - Timeout int - Retry KafkaProduceRetry + Cluster string + Topic string + Messages []KafkaMessage + Timeout int + Retry KafkaProduceRetry + ClientId string + ScriptFile string } type KafkaMessage struct { diff --git a/engine/host.go b/engine/host.go index e14638963..6b8d21d93 100644 --- a/engine/host.go +++ b/engine/host.go @@ -88,14 +88,13 @@ func (sh *scriptHost) RunEvent(event string, args ...interface{}) []*common.Acti log.Errorf("unable to execute event handler: %v", err) action.Error = &common.Error{Message: err.Error()} } else if !b && logs == len(action.Logs) { - //sh.Unlock() continue } else { log.WithField("handler", action).Debug("processed event handler") } - action.Duration = time.Now().Sub(start).Milliseconds() action.Parameters = getDeepCopy(args) + action.Duration = time.Now().Sub(start).Milliseconds() result = append(result, action) } return result diff --git a/engine/kafka.go b/engine/kafka.go index 90f759076..71961a027 100644 --- a/engine/kafka.go +++ b/engine/kafka.go @@ -39,6 +39,9 @@ func (c *KafkaClient) Produce(args *common.KafkaProduceArgs) (*common.KafkaProdu } client := store.NewClient(k.Store, c.app.Monitor.Kafka) + client.ClientId = args.ClientId + client.ScriptFile = args.ScriptFile + var produced []common.KafkaMessageResult for _, m := range args.Messages { value := m.Data @@ -274,7 +277,11 @@ func selectMessage(value any, topic *asyncapi3.Channel, cfg *asyncapi3.Config) ( } if noOperationDefined { - return nil, fmt.Errorf("no 'send' or 'receive' operation defined in specification") + for _, msg := range topic.Messages { + if validationErr = valueMatchMessagePayload(value, msg.Value); validationErr == nil { + return msg.Value, nil + } + } } if value != nil { @@ -287,9 +294,12 @@ func selectMessage(value any, topic *asyncapi3.Channel, cfg *asyncapi3.Config) ( value = string(b) } } - return nil, fmt.Errorf("no matching message configuration found for the given value: %v\nhint:\n%w\n", value, validationErr) + if validationErr != nil { + return nil, fmt.Errorf("no matching message configuration found for the given value: %v\nhint:\n%w\n", value, validationErr) + } + return nil, nil } - return nil, fmt.Errorf("no message ") + return nil, fmt.Errorf("channel defines no message schema; define a message payload in the channel or provide an explicit message") } func valueMatchMessagePayload(value any, msg *asyncapi3.Message) error { diff --git a/engine/kafka_test.go b/engine/kafka_test.go index 198d1f70b..37fa7f2ac 100644 --- a/engine/kafka_test.go +++ b/engine/kafka_test.go @@ -591,7 +591,33 @@ func TestKafkaClient(t *testing.T) { } `)) - require.EqualError(t, err, "failed to produce message to Kafka topic 'foo': no 'send' or 'receive' operation defined in specification at mokapi/js/kafka.(*Module).Produce-fm (native)") + require.NoError(t, err) + b, errCode := app.Kafka.Get("foo").Store.Topic("foo").Partition(0).Read(0, 1000) + require.Equal(t, kafka.None, errCode) + require.NotNil(t, b) + require.Equal(t, "gbrmarxhk", kafka.BytesToString(b.Records[0].Key)) + require.Equal(t, `"foo"`, kafka.BytesToString(b.Records[0].Value)) + }, + }, + { + name: "channel does not define a message produce without defining a message", + cfg: func() *asyncapi3.Config { + ch := asyncapi3test.NewChannel() + + return asyncapi3test.NewConfig( + asyncapi3test.WithInfo("foo", "", ""), + asyncapi3test.AddChannel("foo", ch), + ) + }, + test: func(t *testing.T, e *engine.Engine, app *runtime.App) { + err := e.AddScript(newScript("test.js", ` + import { produce } from 'mokapi/kafka' + export default function() { + produce({ topic: 'foo' }) + } + `)) + + require.EqualError(t, err, "failed to produce message to Kafka topic 'foo': channel defines no message schema; define a message payload in the channel or provide an explicit message at mokapi/js/kafka.(*Module).Produce-fm (native)") }, }, { diff --git a/examples/kafka/asyncapi-2/api.yaml b/examples/kafka/asyncapi-2/api.yaml new file mode 100644 index 000000000..412994a1c --- /dev/null +++ b/examples/kafka/asyncapi-2/api.yaml @@ -0,0 +1,15 @@ +asyncapi: 2.0.0 +info: + title: User Service + version: 1.0.0 + +channels: + users.signedup: + publish: + message: + name: userEvent + payload: + type: object + properties: + id: { type: string, format: uuid } + email: { type: string, format: email } \ No newline at end of file diff --git a/examples/kafka/asyncapi.yaml b/examples/kafka/asyncapi-26/asyncapi.yaml similarity index 100% rename from examples/kafka/asyncapi.yaml rename to examples/kafka/asyncapi-26/asyncapi.yaml diff --git a/examples/kafka/orders.js b/examples/kafka/asyncapi-26/orders.js similarity index 100% rename from examples/kafka/orders.js rename to examples/kafka/asyncapi-26/orders.js diff --git a/examples/kafka/user.signup.js b/examples/kafka/asyncapi-26/user.signup.js similarity index 100% rename from examples/kafka/user.signup.js rename to examples/kafka/asyncapi-26/user.signup.js diff --git a/examples/kafka/asyncapi-3/api.yaml b/examples/kafka/asyncapi-3/api.yaml new file mode 100644 index 000000000..317a6cff2 --- /dev/null +++ b/examples/kafka/asyncapi-3/api.yaml @@ -0,0 +1,14 @@ +asyncapi: 3.0.0 +info: + title: User Service + version: 1.0.0 +channels: + userSignedUp: + address: 'users.signedup' + messages: + userEvent: + payload: + type: object + properties: + id: { type: string, format: uuid } + email: { type: string, format: email } \ No newline at end of file diff --git a/examples/kafka/asyncapi-3/consumer.js b/examples/kafka/asyncapi-3/consumer.js new file mode 100644 index 000000000..9c39c15ae --- /dev/null +++ b/examples/kafka/asyncapi-3/consumer.js @@ -0,0 +1,17 @@ +import { Kafka } from 'kafkajs'; + +const consumerClient = new Kafka({ + clientId: 'consumer-1', + brokers: ['localhost:9092'] +}); + +const consumer = consumerClient.consumer({ groupId: 'group-1' }); +await consumer.connect(); +await consumer.subscribe({ topic: 'users.signedup', fromBeginning: true }); + +await consumer.run({ + eachMessage: async ({ message }) => { + const value = JSON.parse(message.value.toString()); + console.log('Received command:', value); + } +}); \ No newline at end of file diff --git a/examples/kafka/asyncapi-3/package-lock.json b/examples/kafka/asyncapi-3/package-lock.json new file mode 100644 index 000000000..1b131b733 --- /dev/null +++ b/examples/kafka/asyncapi-3/package-lock.json @@ -0,0 +1,21 @@ +{ + "name": "asyncapi-3", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "kafkajs": "^2.2.4" + } + }, + "node_modules/kafkajs": { + "version": "2.2.4", + "resolved": "https://artifactory.tools.post.ch/artifactory/api/npm/npm-virtual/kafkajs/-/kafkajs-2.2.4.tgz", + "integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + } + } +} diff --git a/examples/kafka/asyncapi-3/package.json b/examples/kafka/asyncapi-3/package.json new file mode 100644 index 000000000..6579aae94 --- /dev/null +++ b/examples/kafka/asyncapi-3/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "kafkajs": "^2.2.4" + } +} diff --git a/examples/kafka/asyncapi-3/producer.js b/examples/kafka/asyncapi-3/producer.js new file mode 100644 index 000000000..ad7f455a7 --- /dev/null +++ b/examples/kafka/asyncapi-3/producer.js @@ -0,0 +1,5 @@ +import { produce } from 'mokapi/kafka'; + +export default function() { + produce({ topic: 'users.signedup' }); +} \ No newline at end of file diff --git a/examples/kafka/asyncapi-3/readme.md b/examples/kafka/asyncapi-3/readme.md new file mode 100644 index 000000000..af73cdb05 --- /dev/null +++ b/examples/kafka/asyncapi-3/readme.md @@ -0,0 +1,24 @@ +# AsyncAPI 3.0 – Mokapi Example + +This example demonstrates how to mock a Kafka topic using an **AsyncAPI 3.0** specification and **Mokapi**, +and how to produce and consume messages locally. + +## Run the example + +### 1. Install dependencies: + +```bash +npm install +``` + +### 2. Start Mokapi with the AsyncAPI spec and producer script + +```bash +mokapi ./api.yaml ./producer.js & +``` + +### 3. Run the consumer + +```bash +node consumer.js +``` \ No newline at end of file diff --git a/examples/mokapi/kafka.js b/examples/mokapi/kafka.js index 61d7f7070..f1f48a525 100644 --- a/examples/mokapi/kafka.js +++ b/examples/mokapi/kafka.js @@ -93,7 +93,11 @@ export let clusters = [ name: 'Broker', host: 'localhost:9092', tags: [{name: 'env:test', description: 'This environment is for running internal tests'}], - description: 'Dashwood contempt on mr unlocked resolved provided of of. Stanhill wondered it it welcomed oh. Hundred no prudent he however smiling at an offence. If earnestly extremity he he propriety something admitting convinced ye.' + description: 'Dashwood contempt on mr unlocked resolved provided of of. Stanhill wondered it it welcomed oh. Hundred no prudent he however smiling at an offence. If earnestly extremity he he propriety something admitting convinced ye.', + configs: { + 'log.retention.bytes': 1024, + 'log.retention': 5000, + } } ], topics: [ @@ -261,6 +265,7 @@ export let events = [ traits: { namespace: 'kafka', name: 'Kafka World', + type: 'message', topic: 'mokapi.shop.products' }, time: '2023-02-13T09:49:25.482366+01:00', @@ -296,6 +301,7 @@ export let events = [ traits: { namespace: 'kafka', name: 'Kafka World', + type: 'message', topic: 'mokapi.shop.products' }, time: '2023-02-13T09:49:25.482366+01:00', @@ -331,6 +337,7 @@ export let events = [ traits: { namespace: 'kafka', name: 'Kafka World', + type: 'message', topic: 'mokapi.shop.avro' }, time: '2025-02-13T09:49:25.482366+01:00', diff --git a/examples/mokapi/kafka.yml b/examples/mokapi/kafka.yml index 274983c4a..ecd4d7ce9 100644 --- a/examples/mokapi/kafka.yml +++ b/examples/mokapi/kafka.yml @@ -312,6 +312,11 @@ components: type: string description: type: string + tags: + $ref: '#/components/schemas/Tags' + configs: + additionalProperties: + type: ['number', 'string'] topics: type: array items: @@ -339,6 +344,8 @@ components: $ref: '#/components/schemas/MessageConfig' bindings: $ref: '#/components/schemas/TopicBindings' + tags: + $ref: '#/components/schemas/Tags' Partition: type: object properties: @@ -357,6 +364,8 @@ components: properties: name: type: string + generation: + type: integer members: type: array items: @@ -461,3 +470,12 @@ components: type: integer error: type: string + Tags: + type: array + items: + type: object + properties: + name: + type: string + description: + type: string diff --git a/examples/mokapi/metrics.js b/examples/mokapi/metrics.js index 312b3cb70..6959147dc 100644 --- a/examples/mokapi/metrics.js +++ b/examples/mokapi/metrics.js @@ -15,27 +15,27 @@ export let metrics = [ value: 1 }, { - name: 'http_requests_total{service="Swagger Petstore",endpoint="/pet"}"', + name: 'http_requests_total{service="Swagger Petstore",endpoint="/pet",method="POST"}"', value: 2 }, { - name: 'http_requests_errors_total{service="Swagger Petstore",endpoint="/pet"}"', + name: 'http_requests_errors_total{service="Swagger Petstore",endpoint="/pet",method="POST"}"', value: 1 }, { - name: 'http_requests_total{service="Swagger Petstore",endpoint="/pet/findByStatus"}"', + name: 'http_requests_total{service="Swagger Petstore",endpoint="/pet/findByStatus",method="GET"}"', value: 2 }, { - name: 'http_requests_errors_total{service="Swagger Petstore",endpoint="/pet/findByStatus"}"', + name: 'http_requests_errors_total{service="Swagger Petstore",endpoint="/pet/findByStatus",method="POST"}"', value: 0 }, { - name: 'http_request_timestamp{service="Swagger Petstore",endpoint="/pet"}"', + name: 'http_request_timestamp{service="Swagger Petstore",endpoint="/pet",method="POST"}"', value: 1652235690 }, { - name: 'http_request_timestamp{service="Swagger Petstore",endpoint="/pet/findByStatus"}"', + name: 'http_request_timestamp{service="Swagger Petstore",endpoint="/pet/findByStatus",method="POST"}"', value: 1652237690 }, { diff --git a/go.mod b/go.mod index 5dd15dfd5..e57cad3d8 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.25.5 require ( github.com/Masterminds/sprig v2.22.0+incompatible github.com/blevesearch/bleve/v2 v2.5.7 - github.com/blevesearch/bleve_index_api v1.3.0 + github.com/blevesearch/bleve_index_api v1.3.1 github.com/bradleyfalzon/ghinstallation/v2 v2.17.0 github.com/brianvoe/gofakeit/v6 v6.28.0 github.com/dop251/goja v0.0.0-20250309171923-bcd7cc6bf64c diff --git a/go.sum b/go.sum index 53c7de3ab..7ecb3f6ff 100644 --- a/go.sum +++ b/go.sum @@ -24,8 +24,8 @@ github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCk github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/blevesearch/bleve/v2 v2.5.7 h1:2d9YrL5zrX5EBBW++GOaEKjE+NPWeZGaX77IM26m1Z8= github.com/blevesearch/bleve/v2 v2.5.7/go.mod h1:yj0NlS7ocGC4VOSAedqDDMktdh2935v2CSWOCDMHdSA= -github.com/blevesearch/bleve_index_api v1.3.0 h1:DsMpWVjFNlBw9/6pyWf59XoqcAkhHj3H0UWiQsavb6E= -github.com/blevesearch/bleve_index_api v1.3.0/go.mod h1:xvd48t5XMeeioWQ5/jZvgLrV98flT2rdvEJ3l/ki4Ko= +github.com/blevesearch/bleve_index_api v1.3.1 h1:LdH3CQgBbIZ5UI/5Pykz87e0jfeQtVnrdZ2WUBrHHwU= +github.com/blevesearch/bleve_index_api v1.3.1/go.mod h1:xvd48t5XMeeioWQ5/jZvgLrV98flT2rdvEJ3l/ki4Ko= github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk= github.com/blevesearch/geo v0.2.4/go.mod h1:K56Q33AzXt2YExVHGObtmRSFYZKYGv0JEN5mdacJJR8= github.com/blevesearch/go-faiss v1.0.26 h1:4dRLolFgjPyjkaXwff4NfbZFdE/dfywbzDqporeQvXI= diff --git a/imap/idle_test.go b/imap/idle_test.go index f527b2a0b..5e0376806 100644 --- a/imap/idle_test.go +++ b/imap/idle_test.go @@ -178,7 +178,7 @@ func TestIdle(t *testing.T) { require.NoError(t, err) require.Equal(t, "+ idling", res) - time.Sleep(500 * time.Millisecond) + time.Sleep(2 * time.Second) res, err = c.ReadLine() require.NoError(t, err) diff --git a/js/kafka/kafka.go b/js/kafka/kafka.go index 33ce982fd..ac4b587ef 100644 --- a/js/kafka/kafka.go +++ b/js/kafka/kafka.go @@ -2,6 +2,7 @@ package kafka import ( "fmt" + "mokapi/config/dynamic" "mokapi/engine/common" "mokapi/js/eventloop" "mokapi/js/util" @@ -92,12 +93,17 @@ func (m *Module) ProduceAsync(v goja.Value) interface{} { } func (m *Module) mapParams(args goja.Value) (*common.KafkaProduceArgs, error) { - opt := &common.KafkaProduceArgs{Retry: common.KafkaProduceRetry{ - MaxRetryTime: 3 * time.Minute, - InitialRetryTime: 500 * time.Millisecond, - Retries: 10, - Factor: 2, - }} + file := getFile(m.rt) + opt := &common.KafkaProduceArgs{ + ClientId: "mokapi-script", + ScriptFile: file.Info.Key(), + Retry: common.KafkaProduceRetry{ + MaxRetryTime: 3 * time.Minute, + InitialRetryTime: 500 * time.Millisecond, + Retries: 10, + Factor: 2, + }, + } if args != nil && !goja.IsUndefined(args) && !goja.IsNull(args) { params := args.ToObject(m.rt) @@ -265,3 +271,7 @@ func (m *Module) mapParams(args goja.Value) (*common.KafkaProduceArgs, error) { func (m *Module) warnDeprecatedAttribute(name string) { m.host.Warn(fmt.Sprintf("DEPRECATED: '%v' should not be used anymore: check https://mokapi.io/docs/javascript-api/mokapi-kafka/produceargs for more info in %v", name, m.host.Name())) } + +func getFile(vm *goja.Runtime) *dynamic.Config { + return vm.Get("mokapi/internal").(*goja.Object).Get("file").Export().(*dynamic.Config) +} diff --git a/js/kafka/kafka_test.go b/js/kafka/kafka_test.go index 12c6b698f..44245d685 100644 --- a/js/kafka/kafka_test.go +++ b/js/kafka/kafka_test.go @@ -1,9 +1,8 @@ package kafka_test import ( - "github.com/dop251/goja" - r "github.com/stretchr/testify/require" "mokapi/config/dynamic" + "mokapi/config/dynamic/dynamictest" "mokapi/engine/common" "mokapi/engine/enginetest" "mokapi/js" @@ -11,6 +10,9 @@ import ( "mokapi/js/kafka" "mokapi/js/require" "testing" + + "github.com/dop251/goja" + r "github.com/stretchr/testify/require" ) func TestKafka(t *testing.T) { @@ -22,6 +24,8 @@ func TestKafka(t *testing.T) { name: "produce no parameter", test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { host.KafkaClientTest = &enginetest.KafkaClient{ProduceFunc: func(args *common.KafkaProduceArgs) (*common.KafkaProduceResult, error) { + r.Equal(t, "mokapi-script", args.ClientId) + r.Equal(t, "64613435-3062-6462-3033-316532633233", args.ScriptFile) return &common.KafkaProduceResult{}, nil }} @@ -81,7 +85,7 @@ func TestKafka(t *testing.T) { t.Run(tc.name, func(t *testing.T) { vm := goja.New() host := &enginetest.Host{} - js.EnableInternal(vm, host, &eventloop.EventLoop{}, &dynamic.Config{}) + js.EnableInternal(vm, host, &eventloop.EventLoop{}, &dynamic.Config{Info: dynamictest.NewConfigInfo()}) req, err := require.NewRegistry() r.NoError(t, err) req.Enable(vm) diff --git a/js/script.go b/js/script.go index a26504457..77be59fdd 100644 --- a/js/script.go +++ b/js/script.go @@ -64,6 +64,9 @@ func NewScript(opts ...Option) (*Script, error) { func (s *Script) Run() error { if err := s.ensureRuntime(); err != nil { + if isClosingError(err) { + return nil + } return err } @@ -76,6 +79,9 @@ func (s *Script) Run() error { ) return nil } + if isClosingError(err) { + return nil + } return err } @@ -178,7 +184,10 @@ func EnableInternal(vm *goja.Runtime, host engine.Host, loop *eventloop.EventLoo _ = o.Set("host", host) _ = o.Set("loop", loop) _ = o.Set("file", file) - _ = vm.Set("mokapi/internal", o) + err := vm.Set("mokapi/internal", o) + if err != nil { + log.Errorf("js: internal error: %s", err) + } } func (s *Script) processObject(v goja.Value) { @@ -261,3 +270,13 @@ func RegisterNativeModules(registry *require.Registry) { registry.RegisterNativeModule("mokapi/ldap", ldap.Require) registry.RegisterNativeModule("mokapi/encoding", encoding.Require) } + +func isClosingError(err error) bool { + var ie *goja.InterruptedError + if errors.As(err, &ie) { + if strings.HasSuffix(ie.String(), "closing") { + return true + } + } + return false +} diff --git a/kafka/client_context.go b/kafka/client_context.go index 2bd0f95ab..98322dc16 100644 --- a/kafka/client_context.go +++ b/kafka/client_context.go @@ -28,6 +28,7 @@ type ClientContext struct { Member map[string]string Close func() AllowAutoTopicCreation bool + ServerAddress string } func (c *ClientContext) AddGroup(groupName, memberId string) { @@ -50,10 +51,10 @@ func (c *ClientContext) GetOrCreateMemberId(groupName string) string { return memberId } -func ClientFromContext(req *Request) *ClientContext { - return req.Context.Value(clientKey).(*ClientContext) +func ClientFromContext(ctx context.Context) *ClientContext { + return ctx.Value(clientKey).(*ClientContext) } -func NewClientContext(ctx context.Context, addr string) context.Context { - return context.WithValue(ctx, clientKey, &ClientContext{Addr: addr, AllowAutoTopicCreation: true, Heartbeat: time.Now()}) +func NewClientContext(ctx context.Context, addr, serverAddress string) context.Context { + return context.WithValue(ctx, clientKey, &ClientContext{Addr: addr, ServerAddress: serverAddress, AllowAutoTopicCreation: true, Heartbeat: time.Now()}) } diff --git a/kafka/kafkatest/kafkatest.go b/kafka/kafkatest/kafkatest.go index cd8e4ade1..63ee1d5d6 100644 --- a/kafka/kafkatest/kafkatest.go +++ b/kafka/kafkatest/kafkatest.go @@ -23,15 +23,16 @@ import ( func NewRequest(clientId string, version int16, msg kafka.Message) *kafka.Request { r := &kafka.Request{ + Host: "127.0.0.1:9092", Header: &kafka.Header{ ApiKey: getApiKey(msg), ApiVersion: version, ClientId: clientId, }, Message: msg, - Context: kafka.NewClientContext(context.Background(), "127.0.0.1:42424"), + Context: kafka.NewClientContext(context.Background(), "127.0.0.1:42424", "127.0.0.1:9092"), } - ctx := kafka.ClientFromContext(r) + ctx := kafka.ClientFromContext(r.Context) ctx.ClientId = clientId return r } @@ -50,7 +51,7 @@ func getApiKey(msg kafka.Message) kafka.ApiKey { case *fetch.Request, *fetch.Response: return kafka.Fetch case *offset.Request, *offset.Response: - return kafka.Offset + return kafka.ListOffsets case *metaData.Request, *metaData.Response: return kafka.Metadata case *offsetCommit.Request, *offsetCommit.Response: diff --git a/kafka/offset/offsets.go b/kafka/offset/offsets.go index d3c0ec7a9..aa2d72f37 100644 --- a/kafka/offset/offsets.go +++ b/kafka/offset/offsets.go @@ -7,7 +7,7 @@ import ( func init() { kafka.Register( kafka.ApiReg{ - ApiKey: kafka.Offset, + ApiKey: kafka.ListOffsets, MinVersion: 0, MaxVersion: 8}, &Request{}, diff --git a/kafka/offset/offsets_test.go b/kafka/offset/offsets_test.go index 5b608ed73..86e9c3c95 100644 --- a/kafka/offset/offsets_test.go +++ b/kafka/offset/offsets_test.go @@ -12,7 +12,7 @@ import ( ) func TestInit(t *testing.T) { - reg := kafka.ApiTypes[kafka.Offset] + reg := kafka.ApiTypes[kafka.ListOffsets] require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(8), reg.MaxVersion) } @@ -70,13 +70,13 @@ func TestRequest(t *testing.T) { }) expected := new(bytes.Buffer) // header - _ = binary.Write(expected, binary.BigEndian, int32(43)) // length - _ = binary.Write(expected, binary.BigEndian, int16(kafka.Offset)) // ApiKey - _ = binary.Write(expected, binary.BigEndian, int16(8)) // ApiVersion - _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId - _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length - _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId - _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(43)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.ListOffsets)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(8)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer // message _ = binary.Write(expected, binary.BigEndian, int32(1)) // ReplicaId _ = binary.Write(expected, binary.BigEndian, int8(0)) // IsolationLevel diff --git a/kafka/protocol.go b/kafka/protocol.go index 2f5453920..440378762 100644 --- a/kafka/protocol.go +++ b/kafka/protocol.go @@ -13,7 +13,7 @@ type ApiKey int16 const ( Produce ApiKey = 0 Fetch ApiKey = 1 - Offset ApiKey = 2 + ListOffsets ApiKey = 2 Metadata ApiKey = 3 OffsetCommit ApiKey = 8 OffsetFetch ApiKey = 9 @@ -30,7 +30,7 @@ const ( var apitext = map[ApiKey]string{ Produce: "Produce", Fetch: "Fetch", - Offset: "Offset", + ListOffsets: "ListOffsets", Metadata: "Metadata", OffsetCommit: "OffsetCommit", OffsetFetch: "OffsetFetch", diff --git a/kafka/response_test.go b/kafka/response_test.go index e8384d7cc..64e690d08 100644 --- a/kafka/response_test.go +++ b/kafka/response_test.go @@ -3,10 +3,11 @@ package kafka_test import ( "bufio" "bytes" - "github.com/stretchr/testify/require" "mokapi/kafka" "mokapi/kafka/offset" "testing" + + "github.com/stretchr/testify/require" ) func TestResponse_Write(t *testing.T) { @@ -28,7 +29,7 @@ func TestResponse_Write(t *testing.T) { res := &kafka.Response{ Header: &kafka.Header{ - ApiKey: kafka.Offset, + ApiKey: kafka.ListOffsets, ApiVersion: int16(0), CorrelationId: int32(0), }, diff --git a/kafka/server.go b/kafka/server.go index 960c47a69..da575b55a 100644 --- a/kafka/server.go +++ b/kafka/server.go @@ -159,6 +159,10 @@ func (s *Server) closeConn(conn net.Conn) { } ctx.Done() _ = conn.Close() + client := ClientFromContext(ctx) + if client != nil && client.Close != nil { + client.Close() + } delete(s.activeConn, conn) } @@ -169,7 +173,7 @@ func (s *Server) trackConn(conn net.Conn) context.Context { if s.activeConn == nil { s.activeConn = make(map[net.Conn]context.Context) } - ctx := NewClientContext(context.Background(), conn.RemoteAddr().String()) + ctx := NewClientContext(context.Background(), conn.RemoteAddr().String(), conn.LocalAddr().String()) s.activeConn[conn] = ctx return ctx @@ -183,7 +187,7 @@ func (s *Server) getCloseChan() chan bool { } func (s *Server) handleMessage(rw ResponseWriter, req *Request) { - client := ClientFromContext(req) + client := ClientFromContext(req.Context) client.Heartbeat = time.Now() client.ClientId = req.Header.ClientId diff --git a/pkg/cli/bind.go b/pkg/cli/bind.go index 98e8fc802..95b322181 100644 --- a/pkg/cli/bind.go +++ b/pkg/cli/bind.go @@ -23,6 +23,10 @@ type bindContext struct { func (f *flagConfigBinder) Decode(flags *FlagSet, element interface{}) error { return flags.Visit(func(flag *Flag) error { + if !flag.Value.IsSet() { + return nil + } + paths := ParsePath(flag.Name) v := flag.Value.Value() ctx := &bindContext{path: flag.Name, paths: paths, value: v, element: reflect.ValueOf(element)} @@ -179,40 +183,53 @@ func (f *flagConfigBinder) setArray(ctx *bindContext) error { } return f.setValue(ctx.Next(ctx.element.Index(index))) - } else { - var values []string - if arr, ok := ctx.value.([]string); ok { - if arr == nil { - return nil - } - values = arr - } else if s, ok := ctx.value.(string); ok { - values = []string{s} + } + + var values []string + if arr, ok := ctx.value.([]string); ok { + if arr == nil { + return nil } + values = arr + } else if s, ok := ctx.value.(string); ok { + values = []string{s} + } - if len(values) == 1 { - values = splitArrayItems(values[0]) + if len(values) == 1 { + values = splitArrayItems(values[0]) + } + + arr := ctx.element.Interface() + _ = arr + + for index, v := range values { + var ptr reflect.Value + if index < ctx.element.Len() { + item := ctx.element.Index(index) + ptr = reflect.New(ctx.element.Type().Elem()) + ptr.Elem().Set(item) + } else { + ptr = reflect.New(ctx.element.Type().Elem()) } - if len(values) > 0 { - // reset slice; remove default values - ctx.element.Set(reflect.MakeSlice(ctx.element.Type(), 0, len(values))) + ctxItem := &bindContext{ + paths: ctx.paths, + element: ptr, + value: v, + } + if err := f.setValue(ctxItem); err != nil { + return err } - for _, v := range values { - ptr := reflect.New(ctx.element.Type().Elem()) - ctxItem := &bindContext{ - paths: ctx.paths, - element: ptr, - value: v, - } - if err := f.setValue(ctxItem); err != nil { - return err - } + if index < ctx.element.Len() { + ctx.element.Index(index).Set(ptr.Elem()) + } else { ctx.element.Set(reflect.Append(ctx.element, ptr.Elem())) } } + arr = ctx.element.Interface() + return nil } @@ -429,7 +446,16 @@ func (f *flagConfigBinder) setJson(element reflect.Value, i interface{}) error { i = int64(o) element.Set(reflect.ValueOf(i)) case int64, string, bool: - element.Set(reflect.ValueOf(i)) + v := reflect.ValueOf(i) + t := element.Type() + if v.Type().AssignableTo(t) { + element.Set(v) + return nil + } else if v.Type().ConvertibleTo(t) { + element.Set(v.Convert(t)) + return nil + } + return fmt.Errorf("value %v can not be set", i) case []interface{}: // reset array element.Set(reflect.MakeSlice(element.Type(), 0, len(o))) @@ -443,6 +469,13 @@ func (f *flagConfigBinder) setJson(element reflect.Value, i interface{}) error { } case map[string]interface{}: for k, v := range o { + if element.Kind() == reflect.Ptr { + if element.IsNil() { + // allocate a new struct of the pointer's element type + element.Set(reflect.New(element.Type().Elem())) + } + element = element.Elem() + } field := element.FieldByNameFunc(func(f string) bool { return strings.ToLower(f) == strings.ToLower(k) }) if field.IsValid() { err := f.setJson(field, v) diff --git a/pkg/cli/command.go b/pkg/cli/command.go index f0b3b74a4..1f39c60d0 100644 --- a/pkg/cli/command.go +++ b/pkg/cli/command.go @@ -72,9 +72,7 @@ func (c *Command) Execute() error { if err != nil { return err } - } - if cmd.Config != nil { b := flagConfigBinder{} err = b.Decode(cmd.Flags(), cmd.Config) if err != nil { @@ -84,9 +82,8 @@ func (c *Command) Execute() error { if cmd.Run != nil { return cmd.Run(cmd, positional) - } else { - return fmt.Errorf("no command run specified") } + return fmt.Errorf("no command run specified") } func (c *Command) ExecuteWithContext(ctx context.Context) error { diff --git a/pkg/cli/command_test.go b/pkg/cli/command_test.go index 3ff506d44..71395eec6 100644 --- a/pkg/cli/command_test.go +++ b/pkg/cli/command_test.go @@ -105,7 +105,7 @@ func TestCommand(t *testing.T) { { name: "--skip-prefix default", cmd: func() *cli.Command { - c := &cli.Command{Config: &config{}} + c := &cli.Command{Config: &config{SkipPrefix: []string{"_"}}} c.Flags().StringSlice("skip-prefix", []string{"_"}, false, cli.FlagDoc{}) return c }, diff --git a/pkg/cli/dynamic.go b/pkg/cli/dynamic.go index 7d5f887ea..4049bfe92 100644 --- a/pkg/cli/dynamic.go +++ b/pkg/cli/dynamic.go @@ -81,7 +81,8 @@ func (fs *FlagSet) DynamicStringSlice(name string, explode bool, doc FlagDoc) *F } func convertToPattern(s string) *regexp.Regexp { - pattern := regexIndex.ReplaceAllString(s, "\\[[0-9]+]") + // index is either [0] or _0_. The latter is the old version. + pattern := regexIndex.ReplaceAllString(s, "(\\[[0-9]+])|(-[0-9]+-?)") pattern = regexKey.ReplaceAllString(pattern, "[a-zA-Z]+") regex, err := regexp.Compile(fmt.Sprintf("^%s$", pattern)) if err != nil { diff --git a/pkg/cli/dynamic_test.go b/pkg/cli/dynamic_test.go index 874c27960..b2ab7071f 100644 --- a/pkg/cli/dynamic_test.go +++ b/pkg/cli/dynamic_test.go @@ -98,6 +98,19 @@ func TestDynamic(t *testing.T) { require.Equal(t, []int{12}, s.Foo) }, }, + { + name: "old style", + test: func(t *testing.T) { + s := &struct { + Foo []int + }{} + c := newCmd([]string{"--foo-0", "12"}, &s) + c.Flags().DynamicInt("foo[]", cli.FlagDoc{}) + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []int{12}, s.Foo) + }, + }, } for _, tc := range testcases { diff --git a/pkg/cli/file.go b/pkg/cli/file.go index 3c2669a90..5f2be3e5b 100644 --- a/pkg/cli/file.go +++ b/pkg/cli/file.go @@ -57,7 +57,7 @@ func (c *Command) readConfigFile() error { return fmt.Errorf("read config file '%s' failed: %w", file, err) } - return mapConfigToFlags(c.Config, c.flags) + return nil } func (c *Command) findConfigFile() string { @@ -97,86 +97,6 @@ func readConfigFile(path string, config any) error { return nil } -func mapConfigToFlags(config any, flags *FlagSet) error { - return mapValueToFlags(reflect.ValueOf(config), "", flags) -} - -func mapValueToFlags(v reflect.Value, key string, flags *FlagSet) error { - switch v.Kind() { - case reflect.Ptr: - return mapValueToFlags(v.Elem(), key, flags) - case reflect.Struct: - t := v.Type() - for i := 0; i < v.NumField(); i++ { - field := t.Field(i) - if !field.IsExported() { - continue - } - - name := strings.ToLower(field.Name) - tag := field.Tag.Get("name") - if tag != "" { - name = strings.Split(tag, ",")[0] - } else { - tag = field.Tag.Get("flag") - if tag != "" { - name = strings.Split(tag, ",")[0] - } - } - if name == "-" { - continue - } - if key != "" { - name = key + "-" + name - } - - err := mapValueToFlags(v.Field(i), name, flags) - if err != nil { - return err - } - } - return nil - case reflect.Slice: - if _, ok := flags.GetValue(key); ok { - var values []string - for i := 0; i < v.Len(); i++ { - values = append(values, fmt.Sprintf("%v", v.Index(i))) - } - return flags.setValue(key, values, SourceFile) - } - for i := 0; i < v.Len(); i++ { - err := mapValueToFlags(v.Index(i), fmt.Sprintf("%s[%v]", key, i), flags) - if err != nil { - return err - } - } - return nil - case reflect.Map: - for _, k := range v.MapKeys() { - err := mapValueToFlags(v.MapIndex(k), fmt.Sprintf("%s-%v", key, k.Interface()), flags) - if err != nil { - return err - } - } - - return nil - default: - if canBeNil(v) && v.IsNil() { - return nil - } - return flags.setValue(key, []string{fmt.Sprintf("%v", v.Interface())}, SourceFile) - } -} - -func canBeNil(v reflect.Value) bool { - switch v.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return true - default: - return false - } -} - func unmarshalYaml(b []byte, config any) error { m := map[string]interface{}{} err := yaml.Unmarshal(b, m) diff --git a/pkg/cli/flag_file.go b/pkg/cli/flag_file.go index de0f9b2b6..a7c733863 100644 --- a/pkg/cli/flag_file.go +++ b/pkg/cli/flag_file.go @@ -18,6 +18,7 @@ func (f *fileFlag) Set(values []string, source Source) error { f.value = values[0] f.setConfigFile(f.value) f.source = source + f.isSet = true } return nil } diff --git a/pkg/cli/flag_file_test.go b/pkg/cli/flag_file_test.go index 45f32537e..e308cf7a8 100644 --- a/pkg/cli/flag_file_test.go +++ b/pkg/cli/flag_file_test.go @@ -63,18 +63,6 @@ func TestFileDecoder_Decode(t *testing.T) { require.NoError(t, err) }, }, - { - name: "yaml schema error", - test: func(t *testing.T) { - s := &struct{ Count string }{} - cli.SetFileReader(&clitest.TestFileReader{Files: map[string][]byte{"/etc/foo.yaml": []byte("count: foo")}}) - c := newCmd([]string{}, &s) - c.SetConfigPath("/etc") - c.Flags().Int("count", 0, cli.FlagDoc{}) - err := c.Execute() - require.EqualError(t, err, "failed to set flag count: parsing foo: invalid syntax") - }, - }, { name: "use file flag", test: func(t *testing.T) { diff --git a/pkg/cli/flags.go b/pkg/cli/flags.go index 702bc7ea6..20c246aae 100644 --- a/pkg/cli/flags.go +++ b/pkg/cli/flags.go @@ -23,6 +23,7 @@ type FlagSet struct { dynamic []*DynamicFlag orderedFlags map[string]int setConfigFile func(string) + orderFlag int } type Flag struct { @@ -69,7 +70,8 @@ func (fs *FlagSet) setFlag(f *Flag) { if f.Shorthand != "" { fs.flags[f.Shorthand] = f } - fs.orderedFlags[f.Name] = len(fs.flags) + fs.orderedFlags[f.Name] = fs.orderFlag + fs.orderFlag++ } func (fs *FlagSet) setValue(name string, value []string, source Source) error { @@ -82,7 +84,8 @@ func (fs *FlagSet) setValue(name string, value []string, source Source) error { if err != nil { return fmt.Errorf("failed to set flag %s: %w", name, err) } - fs.orderedFlags[name] = len(fs.orderedFlags) + fs.orderedFlags[name] = fs.orderFlag + fs.orderFlag++ return nil } } @@ -92,7 +95,8 @@ func (fs *FlagSet) setValue(name string, value []string, source Source) error { if err != nil { return fmt.Errorf("failed to set flag %s: %w", name, err) } - fs.orderedFlags[name] = len(fs.orderedFlags) + fs.orderedFlags[name] = fs.orderFlag + fs.orderFlag++ return nil } } diff --git a/pkg/cli/parse.go b/pkg/cli/parse.go index 491299003..1a1e779f3 100644 --- a/pkg/cli/parse.go +++ b/pkg/cli/parse.go @@ -11,8 +11,9 @@ func parseFlags(args []string, envNamePrefix string, flags *FlagSet) ([]string, if envNamePrefix != "" { for _, s := range os.Environ() { kv := strings.SplitN(s, "=", 2) - if strings.HasPrefix(strings.ToUpper(kv[0]), envNamePrefix) { - key := strings.Replace(kv[0], envNamePrefix, "", 1) + key := strings.ToUpper(kv[0]) + if strings.HasPrefix(key, envNamePrefix) { + key = strings.Replace(key, envNamePrefix, "", 1) name := strings.ReplaceAll(strings.ToLower(key), "_", "-") if err := flags.setValue(name, []string{kv[1]}, SourceEnv); err != nil { return nil, fmt.Errorf("unknown environment variable '%s' (value '%s')", kv[0], kv[1]) diff --git a/pkg/cmd/mokapi/flags/providers_git.go b/pkg/cmd/mokapi/flags/providers_git.go index 4356d7a6c..85c51f88c 100644 --- a/pkg/cmd/mokapi/flags/providers_git.go +++ b/pkg/cmd/mokapi/flags/providers_git.go @@ -18,6 +18,9 @@ func RegisterGitProvider(cmd *cli.Command) { cmd.Flags().DynamicStringSlice("providers-git-repositories[]-files", false, providerGitRepositoriesFiles) cmd.Flags().DynamicStringSlice("providers-git-repositories[]-include", false, providerGitRepositoriesInclude) cmd.Flags().DynamicString("providers-git-repositories[]-auth-github", providerGitRepositoriesAuthGitHub) + cmd.Flags().DynamicString("providers-git-repositories[]-auth-github-appid", providerGitRepositoriesAuthGitHubAppId) + cmd.Flags().DynamicString("providers-git-repositories[]-auth-github-installationid", providerGitRepositoriesAuthGitHubInstallationId) + cmd.Flags().DynamicString("providers-git-repositories[]-auth-github-privatekey", providerGitRepositoriesAuthGitHubPrivateKey) cmd.Flags().DynamicString("providers-git-repositories[]-pull-interval", cli.FlagDoc{Short: "Override pull interval for this repository"}) } @@ -216,3 +219,45 @@ This option allows accessing private repositories hosted on GitHub by using cred }, }, } + +var providerGitRepositoriesAuthGitHubAppId = cli.FlagDoc{ + Short: "GitHub App ID used for authentication", + Long: `Sets or overrides the GitHub App ID used to authenticate access to the repository at the specified index.`, + Examples: []cli.Example{ + { + Codes: []cli.Code{ + {Title: "CLI", Source: "--providers-git-repositories[0]-auth-github-appId 12345"}, + {Title: "Env", Source: "MOKAPI_PROVIDERS_GIT_REPOSITORIES[0]_AUTH_GITHUB_APPID=12345"}, + {Title: "File", Source: "providers:\n git:\n repositories:\n - auth:\n github:\n appId: 12345", Language: "yaml"}, + }, + }, + }, +} + +var providerGitRepositoriesAuthGitHubInstallationId = cli.FlagDoc{ + Short: "GitHub App installation ID", + Long: `Sets or overrides the GitHub App installation ID used to authenticate access to the repository at the specified index.`, + Examples: []cli.Example{ + { + Codes: []cli.Code{ + {Title: "CLI", Source: "--providers-git-repositories[0]-auth-github-installationid 123456789"}, + {Title: "Env", Source: "MOKAPI_PROVIDERS_GIT_REPOSITORIES[0]_AUTH_GITHUB_INSTALLATIONID=123456789"}, + {Title: "File", Source: "providers:\n git:\n repositories:\n - auth:\n github:\n installationId: 12345\n", Language: "yaml"}, + }, + }, + }, +} + +var providerGitRepositoriesAuthGitHubPrivateKey = cli.FlagDoc{ + Short: "Private key for GitHub App authentication", + Long: `Sets or overrides the private key used for GitHub App authentication for the repository at the specified index.`, + Examples: []cli.Example{ + { + Codes: []cli.Code{ + {Title: "CLI", Source: "--providers-git-repositories[0]-auth-github-privatekey 2024-2-25.private-key.pem"}, + {Title: "Env", Source: "MOKAPI_PROVIDERS_GIT_REPOSITORIES[0]_AUTH_GITHUB_PRIVATEKEY=2024-2-25.private-key.pem"}, + {Title: "File", Source: "providers:\n git:\n repositories:\n - auth:\n github:\n privateKey: 2024-2-25.private-key.pem", Language: "yaml"}, + }, + }, + }, +} diff --git a/pkg/cmd/mokapi/flags/providers_git_test.go b/pkg/cmd/mokapi/flags/providers_git_test.go index 91b0e439b..7525ff665 100644 --- a/pkg/cmd/mokapi/flags/providers_git_test.go +++ b/pkg/cmd/mokapi/flags/providers_git_test.go @@ -3,26 +3,29 @@ package flags_test import ( "mokapi/config/static" "mokapi/pkg/cli" + "mokapi/pkg/cli/clitest" "mokapi/pkg/cmd/mokapi" "os" + "strings" "testing" + "github.com/golang-jwt/jwt/v4" "github.com/stretchr/testify/require" ) func TestRoot_Providers_Git(t *testing.T) { testcases := []struct { name string - cmd *cli.Command + cmd func(t *testing.T) *cli.Command test func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) }{ { name: "--providers-git-repositories", - cmd: func() *cli.Command { + cmd: func(t *testing.T) *cli.Command { cmd := mokapi.NewCmdMokapi() cmd.SetArgs([]string{"--providers-git-repositories url=https://github.com/foo/foo.git,include=*.json url=https://github.com/bar/bar.git,include=*.yaml"}) return cmd - }(), + }, test: func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) { require.Equal(t, []static.GitRepo{ {Url: "https://github.com/foo/foo.git", Include: []string{"*.json"}}, @@ -32,7 +35,7 @@ func TestRoot_Providers_Git(t *testing.T) { }, { name: "env variable using shorthand syntax", - cmd: func() *cli.Command { + cmd: func(t *testing.T) *cli.Command { key := "MOKAPI_PROVIDERS_GIT" err := os.Setenv(key, "pullInterval=10s,tempDir=/tempdir") require.NoError(t, err) @@ -43,7 +46,7 @@ func TestRoot_Providers_Git(t *testing.T) { cmd := mokapi.NewCmdMokapi() cmd.SetArgs([]string{}) return cmd - }(), + }, test: func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) { require.Equal(t, "10s", cfg.Providers.Git.PullInterval) require.Equal(t, "/tempdir", cfg.Providers.Git.TempDir) @@ -51,17 +54,144 @@ func TestRoot_Providers_Git(t *testing.T) { }, { name: "index url", - cmd: func() *cli.Command { + cmd: func(t *testing.T) *cli.Command { cmd := mokapi.NewCmdMokapi() cmd.SetArgs([]string{"--providers-git-repositories[0]-url https://github.com/foo/foo.git"}) return cmd - }(), + }, test: func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) { require.Equal(t, []static.GitRepo{ {Url: "https://github.com/foo/foo.git"}, }, cfg.Providers.Git.Repositories) }, }, + { + name: "github private key from env variable and cli flags", + cmd: func(t *testing.T) *cli.Command { + key1 := "MOKAPI_Providers_Git_Repositories_0_Auth_GitHub_PrivateKey" + err := os.Setenv(key1, `-----BEGIN RSA PRIVATE KEY----- +MIIBOQIBAAJAXWRPQyGlEY+SXz8Uslhe+MLjTgWd8lf/nA0hgCm9JFKC1tq1S73c +Q9naClNXsMqY7pwPt1bSY8jYRqHHbdoUvwIDAQABAkAfJkz1pCwtfkig8iZSEf2j +VUWBiYgUA9vizdJlsAZBLceLrdk8RZF2YOYCWHrpUtZVea37dzZJe99Dr53K0UZx +AiEAtyHQBGoCVHfzPM//a+4tv2ba3tx9at+3uzGR86YNMzcCIQCCjWHcLW/+sQTW +OXeXRrtxqHPp28ir8AVYuNX0nT1+uQIgJm158PMtufvRlpkux78a6mby1oD98Ecx +jp5AOhhF/NECICyHsQN69CJ5mt6/R01wMOt5u9/eubn76rbyhPgk0h7xAiEAjn6m +EmLwkIYD9VnZfp9+2UoWSh0qZiTIHyNwFpJH78o= +-----END RSA PRIVATE KEY----- + +`) + require.NoError(t, err) + key2 := "MOKAPI_Providers_Git_Repositories_1_Auth_GitHub_PrivateKey" + err = os.Setenv(key2, `-----BEGIN RSA PRIVATE KEY----- +MIIBOAIBAAJARsF2wfXtjllRR8nnz8+CLULn0bqgZtYktJB2BdcB5bw6OYmmDVCc +TeTC3VXZATdSqNA6WDWCkSVinC05uYEOEwIDAQABAkArUAaYmSkAeKCO54Pl7Ert +1gT+l9XU3cW+WqhEzuc0cC4Eiqe9phpdiQXNosI60a8YyeyBUjCtQGFwbJ1Kl8Hh +AiEAioOWu1s5nbB6ioOXdhbW4Ov5xfI62TYJNxdz656/njsCIQCCxRfwRVfDcC0h +hvuOpFzvZ870deo1/OD8j4U8jG+aCQIgXeU55qO+eODLEN6Ha+urmikc1kyQC/KP +aKMjV5PzfUUCIHX2s4yEERJ1K9EVwfE/5bH1E+TERb3j21UZZphjGv15AiBBs0w5 +WRuPspPXIAHPKrjEHkUsgDZHW/V0fJWbIjJarw== +-----END RSA PRIVATE KEY----- +`) + require.NoError(t, err) + t.Cleanup(func() { + _ = os.Unsetenv(key1) + _ = os.Unsetenv(key2) + }) + + cmd := mokapi.NewCmdMokapi() + cmd.SetArgs([]string{"--providers-git-repositories url=https://github.com/foo/foo.git url=https://github.com/bar/bar.git"}) + return cmd + }, + test: func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) { + require.Len(t, cfg.Providers.Git.Repositories, 2) + + require.Equal(t, "https://github.com/foo/foo.git", cfg.Providers.Git.Repositories[0].Url) + require.NotNil(t, cfg.Providers.Git.Repositories[0].Auth) + require.True(t, strings.HasPrefix(cfg.Providers.Git.Repositories[0].Auth.GitHub.PrivateKey.String(), "-----BEGIN RSA PRIVATE KEY-----")) + _, err := jwt.ParseRSAPrivateKeyFromPEM([]byte(cfg.Providers.Git.Repositories[0].Auth.GitHub.PrivateKey)) + require.NoError(t, err) + + require.Equal(t, "https://github.com/foo/foo.git", cfg.Providers.Git.Repositories[0].Url) + require.True(t, strings.HasPrefix(cfg.Providers.Git.Repositories[1].Auth.GitHub.PrivateKey.String(), "-----BEGIN RSA PRIVATE KEY-----")) + _, err = jwt.ParseRSAPrivateKeyFromPEM([]byte(cfg.Providers.Git.Repositories[1].Auth.GitHub.PrivateKey)) + require.NoError(t, err) + }, + }, + { + name: "github private key from env variable and config file", + cmd: func(t *testing.T) *cli.Command { + key1 := "MOKAPI_Providers_Git_Repositories_0_Auth_GitHub_PrivateKey" + err := os.Setenv(key1, `-----BEGIN RSA PRIVATE KEY----- +MIIBOQIBAAJAXWRPQyGlEY+SXz8Uslhe+MLjTgWd8lf/nA0hgCm9JFKC1tq1S73c +Q9naClNXsMqY7pwPt1bSY8jYRqHHbdoUvwIDAQABAkAfJkz1pCwtfkig8iZSEf2j +VUWBiYgUA9vizdJlsAZBLceLrdk8RZF2YOYCWHrpUtZVea37dzZJe99Dr53K0UZx +AiEAtyHQBGoCVHfzPM//a+4tv2ba3tx9at+3uzGR86YNMzcCIQCCjWHcLW/+sQTW +OXeXRrtxqHPp28ir8AVYuNX0nT1+uQIgJm158PMtufvRlpkux78a6mby1oD98Ecx +jp5AOhhF/NECICyHsQN69CJ5mt6/R01wMOt5u9/eubn76rbyhPgk0h7xAiEAjn6m +EmLwkIYD9VnZfp9+2UoWSh0qZiTIHyNwFpJH78o= +-----END RSA PRIVATE KEY----- + +`) + require.NoError(t, err) + key2 := "MOKAPI_Providers_Git_Repositories_1_Auth_GitHub_PrivateKey" + err = os.Setenv(key2, `-----BEGIN RSA PRIVATE KEY----- +MIIBOAIBAAJARsF2wfXtjllRR8nnz8+CLULn0bqgZtYktJB2BdcB5bw6OYmmDVCc +TeTC3VXZATdSqNA6WDWCkSVinC05uYEOEwIDAQABAkArUAaYmSkAeKCO54Pl7Ert +1gT+l9XU3cW+WqhEzuc0cC4Eiqe9phpdiQXNosI60a8YyeyBUjCtQGFwbJ1Kl8Hh +AiEAioOWu1s5nbB6ioOXdhbW4Ov5xfI62TYJNxdz656/njsCIQCCxRfwRVfDcC0h +hvuOpFzvZ870deo1/OD8j4U8jG+aCQIgXeU55qO+eODLEN6Ha+urmikc1kyQC/KP +aKMjV5PzfUUCIHX2s4yEERJ1K9EVwfE/5bH1E+TERb3j21UZZphjGv15AiBBs0w5 +WRuPspPXIAHPKrjEHkUsgDZHW/V0fJWbIjJarw== +-----END RSA PRIVATE KEY----- +`) + require.NoError(t, err) + t.Cleanup(func() { + _ = os.Unsetenv(key1) + _ = os.Unsetenv(key2) + }) + + cmd := mokapi.NewCmdMokapi() + + cli.SetFileReader(&clitest.TestFileReader{Files: map[string][]byte{ + "/etc/foo/mokapi.yaml": []byte(` +providers: + git: + repositories: + - url: https://github.com/foo/foo.git + auth: + github: + appId: 1 + installationId: 123456 + - url: https://github.com/bar/bar.git + auth: + github: + appId: 2 + installationId: 823242 +`), + }}) + t.Cleanup(func() { + cli.SetFileReader(&cli.FileReader{}) + }) + cmd.SetConfigFile("/etc/foo/mokapi.yaml") + cmd.SetArgs([]string{}) + + return cmd + }, + test: func(t *testing.T, cfg *static.Config, flags *cli.FlagSet) { + require.Len(t, cfg.Providers.Git.Repositories, 2) + + require.Equal(t, "https://github.com/foo/foo.git", cfg.Providers.Git.Repositories[0].Url) + require.NotNil(t, cfg.Providers.Git.Repositories[0].Auth) + require.True(t, strings.HasPrefix(cfg.Providers.Git.Repositories[0].Auth.GitHub.PrivateKey.String(), "-----BEGIN RSA PRIVATE KEY-----")) + _, err := jwt.ParseRSAPrivateKeyFromPEM([]byte(cfg.Providers.Git.Repositories[0].Auth.GitHub.PrivateKey)) + require.NoError(t, err) + + require.Equal(t, "https://github.com/foo/foo.git", cfg.Providers.Git.Repositories[0].Url) + require.True(t, strings.HasPrefix(cfg.Providers.Git.Repositories[1].Auth.GitHub.PrivateKey.String(), "-----BEGIN RSA PRIVATE KEY-----")) + _, err = jwt.ParseRSAPrivateKeyFromPEM([]byte(cfg.Providers.Git.Repositories[1].Auth.GitHub.PrivateKey)) + require.NoError(t, err) + }, + }, } for _, tc := range testcases { @@ -70,7 +200,7 @@ func TestRoot_Providers_Git(t *testing.T) { cli.SetFileReader(&cli.FileReader{}) }() - cmd := tc.cmd + cmd := tc.cmd(t) var cfg *static.Config cmd.Run = func(cmd *cli.Command, args []string) error { cfg = cmd.Config.(*static.Config) diff --git a/pkg/cmd/mokapi/mokapi_test.go b/pkg/cmd/mokapi/mokapi_test.go index d8c39768b..ebbc0da08 100644 --- a/pkg/cmd/mokapi/mokapi_test.go +++ b/pkg/cmd/mokapi/mokapi_test.go @@ -13,35 +13,6 @@ import ( "github.com/stretchr/testify/require" ) -/*func TestMokapi_Cmd(t *testing.T) { - stdOut := os.Stdout - stdErr := os.Stderr - - reader, writer, err := os.Pipe() - require.NoError(t, err) - os.Stdout = writer - os.Stderr = writer - defer func() { - os.Stdout = stdOut - os.Stderr = stdErr - }() - - os.Args = nil - os.Args = append(os.Args, "mokapi.exe") - os.Args = append(os.Args, []string{"version"}...) - - cmd := mokapi.NewCmdMokapi(context.Background()) - err = cmd.Execute() - require.NoError(t, err) - - _ = writer.Close() - var buf bytes.Buffer - _, _ = io.Copy(&buf, reader) - _ = reader.Close() - - require.Equal(t, "", buf.String()) -}*/ - func TestMain_Flags(t *testing.T) { testcases := []struct { name string diff --git a/pkg/cmd/mokapi/providers_test.go b/pkg/cmd/mokapi/providers_test.go index c841ef703..4169cb3f9 100644 --- a/pkg/cmd/mokapi/providers_test.go +++ b/pkg/cmd/mokapi/providers_test.go @@ -2,6 +2,7 @@ package mokapi_test import ( "mokapi/config/static" + "mokapi/config/tls" "mokapi/pkg/cli" "mokapi/pkg/cmd/mokapi" "os" @@ -457,6 +458,113 @@ func TestRoot_Providers_Git(t *testing.T) { }, cfg.Providers.Git.Repositories[0].Auth.GitHub) }, }, + { + name: "auth github in config file", + args: func(t *testing.T) []string { + temp := t.TempDir() + f := path.Join(temp, "cfg.yaml") + err := os.WriteFile(f, []byte(` +providers: + git: + repositories: + - url: foo + auth: + github: + appId: 1001042 +`), 0644) + require.NoError(t, err) + + return []string{"--config-file", f} + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, &static.GitHubAuth{ + AppId: 1001042, + }, cfg.Providers.Git.Repositories[0].Auth.GitHub) + }, + }, + { + name: "auth github private key from env", + args: func(t *testing.T) []string { + key := "MOKAPI_Providers_Git_Repositories_1_Auth_GitHub_PrivateKey" + err := os.Setenv(key, "-----BEGIN RSA PRIVATE KEY-----") + require.NoError(t, err) + t.Cleanup(func() { + _ = os.Unsetenv(key) + }) + + temp := t.TempDir() + f := path.Join(temp, "cfg.yaml") + err = os.WriteFile(f, []byte(` +providers: + git: + repositories: + - url: foo + - url: bar +`), 0644) + require.NoError(t, err) + + return []string{"--config-file", f} + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, tls.FileOrContent("-----BEGIN RSA PRIVATE KEY-----"), cfg.Providers.Git.Repositories[1].Auth.GitHub.PrivateKey) + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + cmd := mokapi.NewCmdMokapi() + cmd.SetArgs(tc.args(t)) + + cfg := static.NewConfig() + cmd.Run = func(cmd *cli.Command, args []string) error { + cfg = cmd.Config.(*static.Config) + return nil + } + err := cmd.Execute() + require.NoError(t, err) + + tc.test(t, cfg) + }) + } +} + +func TestRoot_Providers_Npm(t *testing.T) { + testcases := []struct { + name string + args func(t *testing.T) []string + test func(t *testing.T, cfg *static.Config) + }{ + { + name: "npm packages from file", + args: func(t *testing.T) []string { + temp := t.TempDir() + f := path.Join(temp, "cfg.json") + err := os.WriteFile(f, []byte(`{ +"providers": { + "npm": { + "packages": [ + { + "name": "foo", + "files": ["dist/foo.json"] + } + ] + } +}}`), 0644) + require.NoError(t, err) + + return []string{"--config-file", f} + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []static.NpmPackage{ + { + Name: "foo", + Files: []string{"dist/foo.json"}, + Include: []string(nil), + }, + }, cfg.Providers.Npm.Packages) + }, + }, } for _, tc := range testcases { diff --git a/providers/asyncapi3/asyncapi3test/channel.go b/providers/asyncapi3/asyncapi3test/channel.go index 205586a83..bcdfc94b4 100644 --- a/providers/asyncapi3/asyncapi3test/channel.go +++ b/providers/asyncapi3/asyncapi3test/channel.go @@ -54,3 +54,14 @@ func AssignToServer(ref string) ChannelOptions { c.Servers = append(c.Servers, &asyncapi3.ServerRef{Reference: dynamic.Reference{Ref: ref}}) } } + +func WithChannelTag(name, description string) ChannelOptions { + return func(c *asyncapi3.Channel) { + c.Tags = append(c.Tags, &asyncapi3.TagRef{ + Value: &asyncapi3.Tag{ + Name: name, + Description: description, + }, + }) + } +} diff --git a/providers/asyncapi3/asyncapi3test/config.go b/providers/asyncapi3/asyncapi3test/config.go index 81e47f9b1..f15c296a9 100644 --- a/providers/asyncapi3/asyncapi3test/config.go +++ b/providers/asyncapi3/asyncapi3test/config.go @@ -2,6 +2,7 @@ package asyncapi3test import ( "mokapi/providers/asyncapi3" + "mokapi/sortedmap" ) type ConfigOptions func(c *asyncapi3.Config) @@ -10,7 +11,7 @@ func NewConfig(opts ...ConfigOptions) *asyncapi3.Config { c := &asyncapi3.Config{ Version: "2.0.0", Info: asyncapi3.Info{Name: "test", Version: "1.0"}, - Servers: map[string]*asyncapi3.ServerRef{}, + Servers: &sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef]{}, DefaultContentType: asyncapi3.DefaultContentType, } for _, opt := range opts { @@ -57,7 +58,7 @@ func WithContact(name, url, mail string) ConfigOptions { func WithServer(name, protocol, host string, opts ...ServerOptions) ConfigOptions { return func(c *asyncapi3.Config) { if c.Servers == nil { - c.Servers = make(map[string]*asyncapi3.ServerRef) + c.Servers = &sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef]{} } s := &asyncapi3.Server{ @@ -68,7 +69,7 @@ func WithServer(name, protocol, host string, opts ...ServerOptions) ConfigOption opt(s) } - c.Servers[name] = &asyncapi3.ServerRef{Value: s} + c.Servers.Set(name, &asyncapi3.ServerRef{Value: s}) } } diff --git a/providers/asyncapi3/bindings_kafka.go b/providers/asyncapi3/bindings_kafka.go index a65951683..201bb1b43 100644 --- a/providers/asyncapi3/bindings_kafka.go +++ b/providers/asyncapi3/bindings_kafka.go @@ -2,11 +2,14 @@ package asyncapi3 import ( "fmt" - "gopkg.in/yaml.v3" "mokapi/schema/json/schema" + + "gopkg.in/yaml.v3" ) type BrokerBindings struct { + configs map[string]any + // LogRetentionBytes the maximum size of the log before deleting it LogRetentionBytes int64 @@ -77,55 +80,59 @@ type TopicBindings struct { } func (b *BrokerBindings) UnmarshalYAML(value *yaml.Node) error { - m := make(map[string]interface{}) - err := value.Decode(m) + b.configs = make(map[string]any) + err := value.Decode(b.configs) if err != nil { return err } - b.LogRetentionBytes, err = getInt64(m, "log.retention.bytes") + b.LogRetentionBytes, err = getInt64(b.configs, "log.retention.bytes") if err != nil { return fmt.Errorf("invalid log.retention.bytes: %w", err) } - b.LogRetentionMs, err = getMs(m, "log.retention") + b.LogRetentionMs, err = getMs(b.configs, "log.retention") if err != nil { return err } - b.LogRetentionCheckIntervalMs, err = getInt64(m, "log.retention.check.interval.ms") + b.LogRetentionCheckIntervalMs, err = getInt64(b.configs, "log.retention.check.interval.ms") if err != nil { return fmt.Errorf("invalid log.retention.check.interval.ms: %w", err) } - b.LogSegmentDeleteDelayMs, err = getInt64(m, "log.segment.delete.delay.ms") + b.LogSegmentDeleteDelayMs, err = getInt64(b.configs, "log.segment.delete.delay.ms") if err != nil { return fmt.Errorf("invalid log.segment.delete.delay.ms: %w", err) } - b.LogRollMs, err = getMs(m, "log.roll") + b.LogRollMs, err = getMs(b.configs, "log.roll") if err != nil { return err } - b.LogSegmentBytes, err = getInt64(m, "log.segment.bytes") + b.LogSegmentBytes, err = getInt64(b.configs, "log.segment.bytes") if err != nil { return fmt.Errorf("invalid log.segment.bytes: %w", err) } - b.GroupInitialRebalanceDelayMs, err = getInt64(m, "group.initial.rebalance.delay.ms") + b.GroupInitialRebalanceDelayMs, err = getInt64(b.configs, "group.initial.rebalance.delay.ms") if err != nil { return fmt.Errorf("invalid group.initial.rebalance.delay.ms: %w", err) } - b.GroupMinSessionTimeoutMs, err = getInt64(m, "group.min.session.timeout.ms") + b.GroupMinSessionTimeoutMs, err = getInt64(b.configs, "group.min.session.timeout.ms") if err != nil { return fmt.Errorf("invalid group.min.session.timeout.ms: %w", err) } - if s, ok := m["schemaRegistryUrl"]; ok { + if s, ok := b.configs["schemaRegistryUrl"]; ok { b.SchemaRegistryUrl = s.(string) } - if s, ok := m["schemaRegistryVendor"]; ok { + if s, ok := b.configs["schemaRegistryVendor"]; ok { b.SchemaRegistryVendor = s.(string) } return nil } +func (b *BrokerBindings) Configs() map[string]any { + return b.configs +} + func (t *TopicBindings) UnmarshalYAML(value *yaml.Node) error { t.ValueSchemaValidation = true t.KeySchemaValidation = true diff --git a/providers/asyncapi3/bindings_kafka_test.go b/providers/asyncapi3/bindings_kafka_test.go index 30d93b78f..33381286e 100644 --- a/providers/asyncapi3/bindings_kafka_test.go +++ b/providers/asyncapi3/bindings_kafka_test.go @@ -1,10 +1,11 @@ package asyncapi3_test import ( - "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" "mokapi/providers/asyncapi3" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestKafkaBindingsServer_Yaml(t *testing.T) { @@ -24,7 +25,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogRetentionBytes) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRetentionBytes) }, }, { @@ -51,7 +52,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogRetentionMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRetentionMs) }, }, { @@ -78,7 +79,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(600000), config.Servers["test"].Value.Bindings.Kafka.LogRetentionMs) + require.Equal(t, int64(600000), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRetentionMs) }, }, { @@ -105,7 +106,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(36000000), config.Servers["test"].Value.Bindings.Kafka.LogRetentionMs) + require.Equal(t, int64(36000000), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRetentionMs) }, }, { @@ -132,7 +133,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogRetentionCheckIntervalMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRetentionCheckIntervalMs) }, }, { @@ -159,7 +160,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogSegmentDeleteDelayMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogSegmentDeleteDelayMs) }, }, { @@ -186,7 +187,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogRollMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRollMs) }, }, { @@ -213,7 +214,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(600000), config.Servers["test"].Value.Bindings.Kafka.LogRollMs) + require.Equal(t, int64(600000), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRollMs) }, }, { @@ -240,7 +241,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(36000000), config.Servers["test"].Value.Bindings.Kafka.LogRollMs) + require.Equal(t, int64(36000000), config.Servers.Lookup("test").Value.Bindings.Kafka.LogRollMs) }, }, { @@ -267,7 +268,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.LogSegmentBytes) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.LogSegmentBytes) }, }, { @@ -294,7 +295,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) }, }, { @@ -321,7 +322,7 @@ servers: `, test: func(t *testing.T, config *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, int64(10), config.Servers["test"].Value.Bindings.Kafka.GroupMinSessionTimeoutMs) + require.Equal(t, int64(10), config.Servers.Lookup("test").Value.Bindings.Kafka.GroupMinSessionTimeoutMs) }, }, { @@ -347,7 +348,7 @@ servers: schemaRegistryUrl: foo.bar `, test: func(t *testing.T, config *asyncapi3.Config, err error) { - require.Equal(t, "foo.bar", config.Servers["test"].Value.Bindings.Kafka.SchemaRegistryUrl) + require.Equal(t, "foo.bar", config.Servers.Lookup("test").Value.Bindings.Kafka.SchemaRegistryUrl) }, }, { @@ -360,7 +361,7 @@ servers: schemaRegistryVendor: foo `, test: func(t *testing.T, config *asyncapi3.Config, err error) { - require.Equal(t, "foo", config.Servers["test"].Value.Bindings.Kafka.SchemaRegistryVendor) + require.Equal(t, "foo", config.Servers.Lookup("test").Value.Bindings.Kafka.SchemaRegistryVendor) }, }, } diff --git a/providers/asyncapi3/channel.go b/providers/asyncapi3/channel.go index 2cdb466e3..06a98a0a1 100644 --- a/providers/asyncapi3/channel.go +++ b/providers/asyncapi3/channel.go @@ -22,23 +22,11 @@ type Channel struct { Parameters map[string]*ParameterRef `yaml:"parameters" json:"parameters"` Bindings ChannelBindings `yaml:"bindings" json:"bindings"` + Tags []*TagRef `yaml:"tags" json:"tags"` ExternalDocs []ExternalDocRef `yaml:"externalDocs" json:"externalDocs"` Config *Config } -type ChannelTrait struct { - Title string `yaml:"title" json:"title"` - Address string `yaml:"address" json:"address"` - Summary string `yaml:"summary" json:"summary"` - Description string `yaml:"description" json:"description"` - Servers []*ServerRef `yaml:"servers" json:"servers"` - Messages map[string]*MessageRef `yaml:"messages" json:"messages"` - Parameters map[string]*ParameterRef `yaml:"parameters" json:"parameters"` - Bindings ChannelBindings `yaml:"bindings" json:"bindings"` - - ExternalDocs []ExternalDocRef `yaml:"externalDocs" json:"externalDocs"` -} - func (r *ChannelRef) UnmarshalYAML(node *yaml.Node) error { return r.Reference.UnmarshalYaml(node, &r.Value) } diff --git a/providers/asyncapi3/channel_test.go b/providers/asyncapi3/channel_test.go new file mode 100644 index 000000000..2292523c9 --- /dev/null +++ b/providers/asyncapi3/channel_test.go @@ -0,0 +1,74 @@ +package asyncapi3_test + +import ( + "encoding/json" + "mokapi/providers/asyncapi3" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestChannel_UnmarshalJSON(t *testing.T) { + testcases := []struct { + name string + data string + test func(t *testing.T, cfg *asyncapi3.Channel, err error) + }{ + { + name: "tags", + data: `{ "tags": [ { "name": "foo", "description": "bar" } ] }`, + test: func(t *testing.T, cfg *asyncapi3.Channel, err error) { + require.NoError(t, err) + require.Len(t, cfg.Tags, 1) + require.Equal(t, "foo", cfg.Tags[0].Value.Name) + }, + }, + } + + t.Parallel() + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var ch *asyncapi3.Channel + err := json.Unmarshal([]byte(tc.data), &ch) + tc.test(t, ch, err) + }) + } +} + +func TestChannel_UnmarshalYAML(t *testing.T) { + testcases := []struct { + name string + data string + test func(t *testing.T, cfg *asyncapi3.Channel, err error) + }{ + { + name: "tags", + data: ` +tags: + - name: foo + description: bar +`, + test: func(t *testing.T, cfg *asyncapi3.Channel, err error) { + require.NoError(t, err) + require.Len(t, cfg.Tags, 1) + require.Equal(t, "foo", cfg.Tags[0].Value.Name) + }, + }, + } + + t.Parallel() + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var ch *asyncapi3.Channel + err := yaml.Unmarshal([]byte(tc.data), &ch) + tc.test(t, ch, err) + }) + } +} diff --git a/providers/asyncapi3/config.go b/providers/asyncapi3/config.go index 685138d7b..7ba0adb17 100644 --- a/providers/asyncapi3/config.go +++ b/providers/asyncapi3/config.go @@ -2,6 +2,7 @@ package asyncapi3 import ( "mokapi/config/dynamic" + "mokapi/sortedmap" "gopkg.in/yaml.v3" ) @@ -17,7 +18,7 @@ type Config struct { // Default content type to use when encoding/decoding a message's payload. DefaultContentType string `yaml:"defaultContentType" json:"defaultContentType"` - Servers map[string]*ServerRef `yaml:"servers" json:"servers"` + Servers *sortedmap.LinkedHashMap[string, *ServerRef] `yaml:"servers" json:"servers"` Channels map[string]*ChannelRef Operations map[string]*OperationRef `yaml:"operations" json:"operations"` @@ -47,15 +48,18 @@ type License struct { } func (c *Config) Parse(config *dynamic.Config, reader dynamic.Reader) error { - for _, server := range c.Servers { - if len(server.Ref) > 0 { - return dynamic.Resolve(server.Ref, &server.Value, config, reader) - } - if server.Value == nil { - return nil - } - if err := server.parse(config, reader); err != nil { - return err + if c.Servers != nil { + for it := c.Servers.Iter(); it.Next(); { + server := it.Value() + if len(server.Ref) > 0 { + return dynamic.Resolve(server.Ref, &server.Value, config, reader) + } + if server.Value == nil { + return nil + } + if err := server.parse(config, reader); err != nil { + return err + } } } @@ -110,7 +114,8 @@ func (c *Config) HasKafkaServer() bool { if c == nil { return false } - for _, server := range c.Servers { + for it := c.Servers.Iter(); it.Next(); { + server := it.Value() if server.Value.Protocol == "kafka" { return true } diff --git a/providers/asyncapi3/config_test.go b/providers/asyncapi3/config_test.go index 3c636756e..db6df0d94 100644 --- a/providers/asyncapi3/config_test.go +++ b/providers/asyncapi3/config_test.go @@ -2,8 +2,6 @@ package asyncapi3_test import ( "encoding/json" - "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" "mokapi/config/dynamic" "mokapi/config/dynamic/dynamictest" "mokapi/providers/asyncapi3" @@ -15,6 +13,9 @@ import ( "os" "strings" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestConfig3_Schema(t *testing.T) { @@ -71,13 +72,13 @@ func TestStreetlightKafka(t *testing.T) { require.Equal(t, "application/json", cfg.DefaultContentType) // Server - require.Len(t, cfg.Servers, 2) - server := cfg.Servers["scram-connections"] + require.Equal(t, cfg.Servers.Len(), 2) + server := cfg.Servers.Lookup("scram-connections") require.Equal(t, "test.mykafkacluster.org:18092", server.Value.Host) require.Equal(t, "kafka-secure", server.Value.Protocol) require.Equal(t, "Test broker secured with scramSha256", server.Value.Description) - server = cfg.Servers["mtls-connections"] + server = cfg.Servers.Lookup("mtls-connections") require.Equal(t, "test.mykafkacluster.org:28092", server.Value.Host) require.Equal(t, "kafka-secure", server.Value.Protocol) require.Equal(t, "Test broker secured with X509", server.Value.Description) diff --git a/providers/asyncapi3/kafka/store/apiversion.go b/providers/asyncapi3/kafka/store/apiversion.go index e4c9a17bf..370b09a6e 100644 --- a/providers/asyncapi3/kafka/store/apiversion.go +++ b/providers/asyncapi3/kafka/store/apiversion.go @@ -21,7 +21,7 @@ func (s *Store) apiversion(rw kafka.ResponseWriter, req *kafka.Request) error { } if req.Header.ApiVersion >= 3 { - client := kafka.ClientFromContext(req) + client := kafka.ClientFromContext(req.Context) client.ClientSoftwareName = r.ClientSwName client.ClientSoftwareVersion = r.ClientSwVersion } diff --git a/providers/asyncapi3/kafka/store/apiversion_test.go b/providers/asyncapi3/kafka/store/apiversion_test.go index 6a783d147..161a10ad0 100644 --- a/providers/asyncapi3/kafka/store/apiversion_test.go +++ b/providers/asyncapi3/kafka/store/apiversion_test.go @@ -8,6 +8,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "net" "testing" "time" @@ -16,7 +17,7 @@ import ( ) func TestApiVersion(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() rr := kafkatest.NewRecorder() @@ -39,7 +40,7 @@ func TestApiVersion(t *testing.T) { } func TestApiVersion_Raw(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() b := kafkatest.NewBroker(kafkatest.WithHandler(s)) defer b.Close() @@ -95,7 +96,7 @@ func TestApiVersion_Raw(t *testing.T) { } func TestApiVersion_Client_Is_Ahead(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() r := kafkatest.NewRequest("kafkatest", 30, &apiVersion.Request{ diff --git a/providers/asyncapi3/kafka/store/client.go b/providers/asyncapi3/kafka/store/client.go index 0c96dbeac..9032cbedc 100644 --- a/providers/asyncapi3/kafka/store/client.go +++ b/providers/asyncapi3/kafka/store/client.go @@ -48,6 +48,9 @@ type RecordResult struct { } type Client struct { + ClientId string + ScriptFile string + store *Store monitor *monitor.Kafka } @@ -99,14 +102,12 @@ func (c *Client) Write(topic string, records []Record, ct media.ContentType) ([] }) } b := kafka.RecordBatch{Records: []*kafka.Record{rec}} - var write func(batch kafka.RecordBatch) (WriteResult, error) - if r.SkipValidation { - write = p.WriteSkipValidation - } else { - write = p.Write - } - wr, err := write(b) + wr, err := p.write(b, WriteOptions{ + SkipValidation: r.SkipValidation, + ClientId: c.ClientId, + ScriptFile: c.ScriptFile, + }) if err != nil { result = append(result, RecordResult{ Partition: -1, @@ -360,7 +361,11 @@ func selectMessage(value any, topic *asyncapi3.Channel) (*asyncapi3.Message, err } if noOperationDefined { - return nil, fmt.Errorf("no 'send' or 'receive' operation defined in specification") + for _, msg := range topic.Messages { + if validationErr = valueMatchMessagePayload(value, msg.Value); validationErr == nil { + return msg.Value, nil + } + } } if value != nil { @@ -373,9 +378,12 @@ func selectMessage(value any, topic *asyncapi3.Channel) (*asyncapi3.Message, err value = string(b) } } - return nil, fmt.Errorf("no matching message configuration found for the given value: %v\nhint:\n%w\n", value, validationErr) + if validationErr != nil { + return nil, fmt.Errorf("no matching message configuration found for the given value: %v\nhint:\n%w\n", value, validationErr) + } + return nil, nil } - return nil, fmt.Errorf("no message ") + return nil, fmt.Errorf("channel defines no message schema; define a message payload in the channel or provide an explicit message") } func valueMatchMessagePayload(value any, msg *asyncapi3.Message) error { diff --git a/providers/asyncapi3/kafka/store/client_test.go b/providers/asyncapi3/kafka/store/client_test.go index 968958f22..90b79e04c 100644 --- a/providers/asyncapi3/kafka/store/client_test.go +++ b/providers/asyncapi3/kafka/store/client_test.go @@ -407,7 +407,7 @@ func TestClient(t *testing.T) { for _, tc := range testcases { tc := tc t.Run(tc.name, func(t *testing.T) { - s := store.New(tc.cfg, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(tc.cfg, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) m := monitor.NewKafka() tc.test(t, s, m) }) diff --git a/providers/asyncapi3/kafka/store/create_topics_test.go b/providers/asyncapi3/kafka/store/create_topics_test.go index 16df3501d..8f6f2692e 100644 --- a/providers/asyncapi3/kafka/store/create_topics_test.go +++ b/providers/asyncapi3/kafka/store/create_topics_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/createTopics" @@ -10,11 +9,14 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" + + "github.com/stretchr/testify/require" ) func TestCreateTopic(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() rr := kafkatest.NewRecorder() @@ -35,7 +37,7 @@ func TestCreateTopic(t *testing.T) { } func TestCreateTopic_AlreadyExists(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(asyncapi3test.AddChannel("test", &asyncapi3.Channel{})), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(asyncapi3test.AddChannel("test", &asyncapi3.Channel{})), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() rr := kafkatest.NewRecorder() diff --git a/providers/asyncapi3/kafka/store/fetch_test.go b/providers/asyncapi3/kafka/store/fetch_test.go index a6fa06562..76123bbfa 100644 --- a/providers/asyncapi3/kafka/store/fetch_test.go +++ b/providers/asyncapi3/kafka/store/fetch_test.go @@ -9,6 +9,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" "time" @@ -380,7 +381,7 @@ func TestFetch(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.test(t, s) diff --git a/providers/asyncapi3/kafka/store/find_coordinator.go b/providers/asyncapi3/kafka/store/find_coordinator.go index 8d76ef05d..516d68d7f 100644 --- a/providers/asyncapi3/kafka/store/find_coordinator.go +++ b/providers/asyncapi3/kafka/store/find_coordinator.go @@ -4,7 +4,6 @@ import ( "fmt" "mokapi/kafka" "mokapi/kafka/findCoordinator" - "net" log "github.com/sirupsen/logrus" ) @@ -13,41 +12,46 @@ func (s *Store) findCoordinator(rw kafka.ResponseWriter, req *kafka.Request) err r := req.Message.(*findCoordinator.Request) res := &findCoordinator.Response{} - writeError := func(code kafka.ErrorCode, msg string) error { - res.ErrorCode = code - res.ErrorMessage = msg - log.Errorf("kafka FindCoordinator: %v", msg) - return rw.Write(res) + reqLog := &KafkaFindCoordinatorRequest{ + Key: r.Key, + KeyType: r.KeyType, } + resLog := &KafkaFindCoordinatorResponse{} switch r.KeyType { case findCoordinator.KeyTypeGroup: - b := s.getBrokerByHost(req.Host) - if b == nil { - return writeError(kafka.UnknownServerError, fmt.Sprintf("broker %v not found", req.Host)) - } - g := s.GetOrCreateGroup(r.Key, b.Id) - if g.Coordinator == nil { - return writeError(kafka.CoordinatorNotAvailable, fmt.Sprintf("no coordinator for group %v available", r.Key)) - } else { - host := g.Coordinator.Host - if len(host) == 0 { - var err error - host, _, err = net.SplitHostPort(req.Host) - if err != nil { - return writeError(kafka.UnknownServerError, fmt.Sprintf("broker %v not found: %v", req.Host, err)) - } - } - - res.NodeId = int32(g.Coordinator.Id) - res.Host = host - res.Port = int32(g.Coordinator.Port) + host, port := parseHostAndPort(req.Host) + b := s.getBrokerByPort(req.Host) + if b != nil && b.Host != "" { + host = b.Host } + // Mokapi does no leader management: always return fixed node id + res.NodeId = 0 + res.Host = host + res.Port = int32(port) default: res.ErrorCode = kafka.UnknownServerError res.ErrorMessage = fmt.Sprintf("unsupported request key_type=%v", r.KeyType) log.Errorf("kafka FindCoordinator: %v", res.ErrorMessage) + resLog.ErrorMessage = fmt.Sprintf("unsupported request key_type=%v", r.KeyType) + resLog.ErrorCode = kafka.UnknownServerError.String() } + go func() { + s.logRequest(req.Header, reqLog)(newKafkaFindCoordinatorResponse(res)) + }() + return rw.Write(res) } + +func newKafkaFindCoordinatorResponse(res *findCoordinator.Response) *KafkaFindCoordinatorResponse { + r := &KafkaFindCoordinatorResponse{ + Host: res.Host, + Port: int(res.Port), + } + if res.ErrorCode != kafka.None { + r.ErrorCode = res.ErrorCode.String() + r.ErrorMessage = res.ErrorMessage + } + return r +} diff --git a/providers/asyncapi3/kafka/store/find_coordinator_test.go b/providers/asyncapi3/kafka/store/find_coordinator_test.go index 1425c4a57..f4f2ec3d9 100644 --- a/providers/asyncapi3/kafka/store/find_coordinator_test.go +++ b/providers/asyncapi3/kafka/store/find_coordinator_test.go @@ -8,6 +8,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" "github.com/stretchr/testify/require" @@ -65,8 +66,9 @@ func TestFindCoordinator(t *testing.T) { s.ServeMessage(rr, r) res, ok := rr.Message.(*findCoordinator.Response) require.True(t, ok) - require.Equal(t, kafka.UnknownServerError, res.ErrorCode) - require.Equal(t, "broker 127.0.0.1:9092 not found", res.ErrorMessage) + require.Equal(t, kafka.None, res.ErrorCode) + require.Equal(t, "127.0.0.1", res.Host) + require.Equal(t, int32(9092), res.Port) }, }, { @@ -90,6 +92,75 @@ func TestFindCoordinator(t *testing.T) { require.Equal(t, int32(9092), res.Port) }, }, + { + "broker without host should use IP", + func(t *testing.T, s *store.Store) { + s.Update(asyncapi3test.NewConfig(asyncapi3test.WithServer("foo", "kafka", ":9092"))) + + r := kafkatest.NewRequest("kafkatest", 3, &findCoordinator.Request{ + Key: "foo", + KeyType: findCoordinator.KeyTypeGroup, + }) + r.Host = "127.0.0.1:9092" + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, r) + + res, ok := rr.Message.(*findCoordinator.Response) + require.True(t, ok) + require.Equal(t, kafka.None, res.ErrorCode, "expected no kafka error") + + require.Equal(t, "127.0.0.1", res.Host) + require.Equal(t, int32(9092), res.Port) + }, + }, + { + "broker with host has priority over broker without host ", + func(t *testing.T, s *store.Store) { + s.Update(asyncapi3test.NewConfig( + asyncapi3test.WithServer("foo", "kafka", ":9092"), + asyncapi3test.WithServer("bar", "kafka", "foo.bar:9092"), + )) + + r := kafkatest.NewRequest("kafkatest", 3, &findCoordinator.Request{ + Key: "foo", + KeyType: findCoordinator.KeyTypeGroup, + }) + r.Host = "127.0.0.1:9092" + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, r) + + res, ok := rr.Message.(*findCoordinator.Response) + require.True(t, ok) + require.Equal(t, kafka.None, res.ErrorCode, "expected no kafka error") + + require.Equal(t, "foo.bar", res.Host) + require.Equal(t, int32(9092), res.Port) + }, + }, + { + "use first broker with matching port", + func(t *testing.T, s *store.Store) { + s.Update(asyncapi3test.NewConfig( + asyncapi3test.WithServer("foo", "kafka", "mokapi.io:9092"), + asyncapi3test.WithServer("bar", "kafka", "foo.bar:9092"), + )) + + r := kafkatest.NewRequest("kafkatest", 3, &findCoordinator.Request{ + Key: "foo", + KeyType: findCoordinator.KeyTypeGroup, + }) + r.Host = "127.0.0.1:9092" + rr := kafkatest.NewRecorder() + s.ServeMessage(rr, r) + + res, ok := rr.Message.(*findCoordinator.Response) + require.True(t, ok) + require.Equal(t, kafka.None, res.ErrorCode, "expected no kafka error") + + require.Equal(t, "mokapi.io", res.Host) + require.Equal(t, int32(9092), res.Port) + }, + }, } t.Parallel() @@ -98,7 +169,7 @@ func TestFindCoordinator(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) diff --git a/providers/asyncapi3/kafka/store/group.go b/providers/asyncapi3/kafka/store/group.go index 300c3bbdb..9f3a5d0fd 100644 --- a/providers/asyncapi3/kafka/store/group.go +++ b/providers/asyncapi3/kafka/store/group.go @@ -1,8 +1,11 @@ package store import ( - log "github.com/sirupsen/logrus" "mokapi/kafka" + "mokapi/runtime/monitor" + "time" + + log "github.com/sirupsen/logrus" ) type GroupState int @@ -22,10 +25,9 @@ var states = [...]string{ } type Group struct { - Name string - Coordinator *Broker - State GroupState - Generation *Generation + Name string + State GroupState + Generation *Generation // todo add timestamp and metadata to commit Commits map[string]map[int]int64 @@ -33,12 +35,11 @@ type Group struct { balancer *groupBalancer } -func NewGroup(name string, coordinator *Broker) *Group { +func (s *Store) newGroup(name string, coordinator *Broker) *Group { g := &Group{ - Name: name, - Coordinator: coordinator, + Name: name, } - g.balancer = newGroupBalancer(g, coordinator.kafkaConfig) + g.balancer = newGroupBalancer(g, coordinator.kafkaConfig, &groupMonitor{cluster: s.cluster, monitor: s.monitor}) go g.balancer.run() return g } @@ -118,3 +119,12 @@ func newMember(ctx *kafka.ClientContext, sessionTimeout int) *Member { SessionTimeout: sessionTimeout, } } + +type groupMonitor struct { + cluster string + monitor *monitor.Kafka +} + +func (gm *groupMonitor) LastRebalancing(group string, time time.Time) { + gm.monitor.LastRebalancing.WithLabel(gm.cluster, group).Set(float64(time.Unix())) +} diff --git a/providers/asyncapi3/kafka/store/group_balancer.go b/providers/asyncapi3/kafka/store/group_balancer.go index f05abe2e0..0a9ea3534 100644 --- a/providers/asyncapi3/kafka/store/group_balancer.go +++ b/providers/asyncapi3/kafka/store/group_balancer.go @@ -1,8 +1,6 @@ package store import ( - "bufio" - "bytes" "mokapi/kafka" "mokapi/kafka/joinGroup" "mokapi/kafka/syncGroup" @@ -18,8 +16,9 @@ type groupBalancer struct { sync chan syncdata stop chan bool - joins []joindata - config asyncapi3.BrokerBindings + joins []joindata + config asyncapi3.BrokerBindings + monitor *groupMonitor } type joindata struct { @@ -29,12 +28,17 @@ type joindata struct { protocols []joinGroup.Protocol rebalanceTimeout int sessionTimeout int + log func(res any) } type syncdata struct { - client *kafka.ClientContext - writer kafka.ResponseWriter - assigns map[string]*groupAssignment + client *kafka.ClientContext + writer kafka.ResponseWriter + generationId int32 + protocolType string + protocolName string + assigns map[string]*groupAssignment + log func(res any) } type protocoldata struct { @@ -49,13 +53,14 @@ type groupAssignment struct { raw []byte } -func newGroupBalancer(group *Group, config asyncapi3.BrokerBindings) *groupBalancer { +func newGroupBalancer(group *Group, config asyncapi3.BrokerBindings, monitor *groupMonitor) *groupBalancer { return &groupBalancer{ - group: group, - join: make(chan joindata), - sync: make(chan syncdata), - stop: make(chan bool, 1), - config: config, + group: group, + join: make(chan joindata), + sync: make(chan syncdata), + stop: make(chan bool, 1), + config: config, + monitor: monitor, } } @@ -119,13 +124,18 @@ func (b *groupBalancer) run() { syncs = append(syncs, s) log.Infof("kafka: group %v state changed from %v to %v", b.group.Name, states[b.group.State], states[Stable]) b.group.State = Stable - for _, s := range syncs { - memberName := s.client.Member[b.group.Name] + for _, sync := range syncs { + memberName := sync.client.Member[b.group.Name] assign := assigns[memberName] res := &syncGroup.Response{ - Assignment: assign.raw, + ProtocolType: sync.protocolType, + ProtocolName: sync.protocolName, + Assignment: assign.raw, } - go b.respond(s.writer, res) + go b.respond(sync.writer, res) + go func() { + sync.log(newKafkaSyncGroupResponse(res, assign)) + }() } for memberName, assign := range assigns { @@ -134,6 +144,10 @@ func (b *groupBalancer) run() { } } + if b.monitor != nil { + b.monitor.LastRebalancing(b.group.Name, time.Now()) + } + log.Infof("kafka: received assignments from leader '%v' for group '%v'", s.client.ClientId, b.group.Name) case assigns == nil: // waiting for leader syncs = append(syncs, s) @@ -219,23 +233,31 @@ StopWaitingForConsumers: MemberId: memberId, MetaData: counter[protocol].metadata[memberId], }) - go b.respond(j.writer, &joinGroup.Response{ + res := &joinGroup.Response{ GenerationId: int32(generation.Id), Leader: generation.LeaderId, MemberId: memberId, ProtocolType: j.protocolType, ProtocolName: protocol, - }) + } + go b.respond(j.writer, res) + go func() { + j.log(newKafkaJoinGroupResponse(res)) + }() } - go b.respond(leader.writer, &joinGroup.Response{ + res := &joinGroup.Response{ GenerationId: int32(generation.Id), Leader: generation.LeaderId, MemberId: generation.LeaderId, ProtocolType: leader.protocolType, ProtocolName: protocol, Members: members, - }) + } + go b.respond(leader.writer, res) + go func() { + leader.log(newKafkaJoinGroupResponse(res)) + }() } func (b *groupBalancer) sendRebalanceInProgress(w kafka.ResponseWriter) { @@ -251,28 +273,27 @@ func (b *groupBalancer) respond(w kafka.ResponseWriter, msg kafka.Message) { }() } -func newGroupAssignment(b []byte) *groupAssignment { - g := &groupAssignment{} - g.raw = b - r := bufio.NewReader(bytes.NewReader(b)) - d := kafka.NewDecoder(r, len(b)) - g.version = d.ReadInt16() - - g.topics = make(map[string][]int) - n := int(d.ReadInt32()) - for i := 0; i < n; i++ { - key := d.ReadString() - value := make([]int, 0) - - nPartition := int(d.ReadInt32()) - for j := 0; j < nPartition; j++ { - index := d.ReadInt32() - value = append(value, int(index)) - } - g.topics[key] = value +func newKafkaJoinGroupResponse(res *joinGroup.Response) *KafkaJoinGroupResponse { + r := &KafkaJoinGroupResponse{ + GenerationId: res.GenerationId, + ProtocolName: res.ProtocolName, + MemberId: res.MemberId, + LeaderId: res.Leader, + } + for _, m := range res.Members { + r.Members = append(r.Members, m.MemberId) } - g.userData = d.ReadBytes() + return r +} - return g +func newKafkaSyncGroupResponse(res *syncGroup.Response, assign *groupAssignment) *KafkaSyncGroupResponse { + return &KafkaSyncGroupResponse{ + ProtocolType: res.ProtocolType, + ProtocolName: res.ProtocolName, + Assignment: KafkaSyncGroupAssignment{ + Version: assign.version, + Topics: assign.topics, + }, + } } diff --git a/providers/asyncapi3/kafka/store/group_balancer_test.go b/providers/asyncapi3/kafka/store/group_balancer_test.go index ea6586845..6c88f1283 100644 --- a/providers/asyncapi3/kafka/store/group_balancer_test.go +++ b/providers/asyncapi3/kafka/store/group_balancer_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/joinGroup" @@ -10,8 +9,11 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" "time" + + "github.com/stretchr/testify/require" ) func TestGroupBalancing(t *testing.T) { @@ -364,7 +366,7 @@ func TestGroupBalancing(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() b := kafkatest.NewBroker(kafkatest.WithHandler(s)) defer b.Close() diff --git a/providers/asyncapi3/kafka/store/heartbeat.go b/providers/asyncapi3/kafka/store/heartbeat.go index bf09e4e03..158c57545 100644 --- a/providers/asyncapi3/kafka/store/heartbeat.go +++ b/providers/asyncapi3/kafka/store/heartbeat.go @@ -1,15 +1,16 @@ package store import ( - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/heartbeat" + + log "github.com/sirupsen/logrus" ) func (s *Store) heartbeat(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*heartbeat.Request) - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) if _, ok := ctx.Member[r.GroupId]; !ok { log.Errorf("kafka Heartbeat: unknown member %v", ctx.ClientId) return rw.Write(&heartbeat.Response{ErrorCode: kafka.UnknownMemberId}) diff --git a/providers/asyncapi3/kafka/store/heartbeat_test.go b/providers/asyncapi3/kafka/store/heartbeat_test.go index 56e834181..bacaae6ff 100644 --- a/providers/asyncapi3/kafka/store/heartbeat_test.go +++ b/providers/asyncapi3/kafka/store/heartbeat_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/heartbeat" @@ -10,7 +9,10 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" + + "github.com/stretchr/testify/require" ) func TestHeartbeat(t *testing.T) { @@ -75,7 +77,7 @@ func TestHeartbeat(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) diff --git a/providers/asyncapi3/kafka/store/init_producerid.go b/providers/asyncapi3/kafka/store/init_producerid.go index 7d8178700..38787c4c1 100644 --- a/providers/asyncapi3/kafka/store/init_producerid.go +++ b/providers/asyncapi3/kafka/store/init_producerid.go @@ -24,17 +24,44 @@ func (s *Store) initProducerID(rw kafka.ResponseWriter, req *kafka.Request) erro res.ProducerId = ps.ProducerId res.ProducerEpoch = ps.ProducerEpoch } - return rw.Write(res) - } - res.ProducerId = atomic.AddInt64(&s.nextPID, 1) - res.ProducerEpoch = 0 - ps := &ProducerState{ProducerId: res.ProducerId, ProducerEpoch: res.ProducerEpoch} - s.producers[res.ProducerId] = ps + } else { + res.ProducerId = atomic.AddInt64(&s.nextPID, 1) + res.ProducerEpoch = 0 + ps := &ProducerState{ProducerId: res.ProducerId, ProducerEpoch: res.ProducerEpoch} + s.producers[res.ProducerId] = ps + } } else { res.ErrorCode = kafka.UnsupportedForMessageFormat log.Errorf("kafka: mokapi does not support transactional producer: %s", r.TransactionalId) } + go func() { + s.logRequest(req.Header, newKafkaInitProducerIdRequest(r))(newKafkaInitProducerIdResponse(res)) + }() + return rw.Write(res) } + +func newKafkaInitProducerIdRequest(req *initProducerId.Request) *KafkaInitProducerIdRequest { + return &KafkaInitProducerIdRequest{ + TransactionalId: req.TransactionalId, + TransactionTimeoutMs: req.TransactionTimeoutMs, + ProducerId: req.ProducerId, + ProducerEpoch: req.ProducerEpoch, + Enable2PC: req.Enable2PC, + } +} + +func newKafkaInitProducerIdResponse(res *initProducerId.Response) *KafkaInitProducerIdResponse { + r := &KafkaInitProducerIdResponse{ + ProducerId: res.ProducerId, + ProducerEpoch: res.ProducerEpoch, + OngoingTxnProducerId: res.OngoingTxnProducerId, + OngoingTxnProducerEpoch: res.OngoingTxnProducerEpoch, + } + if res.ErrorCode != kafka.None { + r.ErrorCode = res.ErrorCode.String() + } + return r +} diff --git a/providers/asyncapi3/kafka/store/joingroup.go b/providers/asyncapi3/kafka/store/joingroup.go index b40d7e73d..bec24bf07 100644 --- a/providers/asyncapi3/kafka/store/joingroup.go +++ b/providers/asyncapi3/kafka/store/joingroup.go @@ -7,34 +7,54 @@ import ( func (s *Store) joingroup(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*joinGroup.Request) - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) - b := s.getBrokerByHost(req.Host) + reqLog := newKafkaJoinGroupRequest(r) + + data := joindata{ + client: ctx, + writer: rw, + protocolType: r.ProtocolType, + protocols: r.Protocols, + rebalanceTimeout: int(r.RebalanceTimeoutMs), + sessionTimeout: int(r.SessionTimeoutMs), + log: s.logRequest(req.Header, reqLog), + } + + b := s.getBrokerByPort(req.Host) if b == nil { res := &joinGroup.Response{ ErrorCode: kafka.UnknownServerError, } + go func() { + resLog := &KafkaJoinGroupResponse{} + resLog.ErrorCode = res.ErrorCode.String() + + s.logRequest(req.Header, reqLog)(&KafkaRequestLogEvent{ + Response: resLog, + }) + }() return rw.Write(res) } g := s.GetOrCreateGroup(r.GroupId, b.Id) - if g.Coordinator.Id != b.Id { - return rw.Write(&joinGroup.Response{ErrorCode: kafka.NotCoordinator}) - } ctx.AddGroup(g.Name, r.MemberId) - data := joindata{ - client: ctx, - writer: rw, - protocolType: r.ProtocolType, - protocols: r.Protocols, - rebalanceTimeout: int(r.RebalanceTimeoutMs), - sessionTimeout: int(r.SessionTimeoutMs), - } - // balancer writes the response g.balancer.join <- data return nil } + +func newKafkaJoinGroupRequest(req *joinGroup.Request) *KafkaJoinGroupRequest { + r := &KafkaJoinGroupRequest{ + GroupName: req.GroupId, + MemberId: req.MemberId, + ProtocolType: req.ProtocolType, + } + for _, proto := range req.Protocols { + r.Protocols = append(r.Protocols, proto.Name) + } + return r +} diff --git a/providers/asyncapi3/kafka/store/listgroup_test.go b/providers/asyncapi3/kafka/store/listgroup_test.go index 2bebd0ae6..3961fc14d 100644 --- a/providers/asyncapi3/kafka/store/listgroup_test.go +++ b/providers/asyncapi3/kafka/store/listgroup_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/kafkatest" @@ -9,7 +8,10 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" + + "github.com/stretchr/testify/require" ) func TestListGroup(t *testing.T) { @@ -80,7 +82,7 @@ func TestListGroup(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) diff --git a/providers/asyncapi3/kafka/store/log.go b/providers/asyncapi3/kafka/store/log.go index 737f11c08..cca696fab 100644 --- a/providers/asyncapi3/kafka/store/log.go +++ b/providers/asyncapi3/kafka/store/log.go @@ -1,13 +1,15 @@ package store import ( + "fmt" "mokapi/kafka" "mokapi/runtime/events" + "strings" ) -type LogRecord func(log *KafkaLog, traits events.Traits) +type LogRecord func(log *KafkaMessageLog, traits events.Traits) -type KafkaLog struct { +type KafkaMessageLog struct { Offset int64 `json:"offset"` Key LogValue `json:"key"` Message LogValue `json:"message"` @@ -20,6 +22,8 @@ type KafkaLog struct { SequenceNumber int32 `json:"sequenceNumber"` Deleted bool `json:"deleted"` Api string `json:"api"` + ClientId string `json:"clientId"` + ScriptFile string `json:"script"` } type LogValue struct { @@ -27,7 +31,7 @@ type LogValue struct { Binary []byte `json:"binary"` } -func (l *KafkaLog) Title() string { +func (l *KafkaMessageLog) Title() string { if l.Key.Value != "" { return l.Key.Value } else { @@ -35,8 +39,8 @@ func (l *KafkaLog) Title() string { } } -func newKafkaLog(record *kafka.Record) *KafkaLog { - return &KafkaLog{ +func newKafkaLog(record *kafka.Record) *KafkaMessageLog { + return &KafkaMessageLog{ Key: LogValue{Binary: kafka.Read(record.Key)}, Message: LogValue{Binary: kafka.Read(record.Value)}, Headers: convertHeader(record.Headers), @@ -45,3 +49,162 @@ func newKafkaLog(record *kafka.Record) *KafkaLog { SequenceNumber: record.SequenceNumber, } } + +type KafkaRequestData interface { + Title() string +} + +func (s *Store) logRequest(h *kafka.Header, req KafkaRequest) func(res any) { + log := &KafkaRequestLogEvent{Api: s.cluster, Request: req} + log.Header.set(h) + return func(res any) { + log.Response = res + t := events.NewTraits(). + WithNamespace("kafka"). + WithName(s.cluster). + With("type", "request"). + With("clientId", h.ClientId) + _ = s.eh.Push(log, t) + } +} + +type KafkaRequestLogEvent struct { + Api string `json:"api"` + Header KafkaRequestHeader `json:"header"` + Request KafkaRequest `json:"request"` + Response any `json:"response"` +} + +func (l *KafkaRequestLogEvent) Title() string { + return l.Request.Title() +} + +type KafkaRequest interface { + Title() string +} + +type KafkaRequestHeader struct { + RequestKey kafka.ApiKey `json:"requestKey"` + RequestName string `json:"requestName"` + Version int16 `json:"version"` +} + +func (h *KafkaRequestHeader) set(header *kafka.Header) { + h.RequestKey = header.ApiKey + h.RequestName = strings.Split(header.ApiKey.String(), " ")[0] + h.Version = header.ApiVersion +} + +type KafkaResponseError struct { + ErrorCode string `json:"errorCode"` + ErrorMessage string `json:"errorMessage"` +} + +type KafkaJoinGroupRequest struct { + GroupName string `json:"groupName"` + MemberId string `json:"memberId"` + ProtocolType string `json:"protocolType"` + Protocols []string `json:"protocols"` +} + +func (r *KafkaJoinGroupRequest) Title() string { + return fmt.Sprintf("JoinGroup %s", r.GroupName) +} + +type KafkaJoinGroupResponse struct { + KafkaResponseError + GenerationId int32 `json:"generationId"` + ProtocolName string `json:"protocolName"` + MemberId string `json:"memberId"` + LeaderId string `json:"leaderId"` + Members []string `json:"members,omitempty"` +} + +type KafkaSyncGroupRequest struct { + GroupName string `json:"groupName"` + GenerationId int32 `json:"generationId"` + MemberId string `json:"memberId"` + ProtocolType string `json:"protocolType"` + ProtocolName string `json:"protocolName"` + GroupAssignments map[string]KafkaSyncGroupAssignment `json:"groupAssignments,omitempty"` +} + +type KafkaSyncGroupAssignment struct { + Version int16 `json:"version"` + Topics map[string][]int `json:"topics"` +} + +func (r *KafkaSyncGroupRequest) Title() string { + return fmt.Sprintf("SyncGroup %s", r.GroupName) +} + +type KafkaSyncGroupResponse struct { + KafkaResponseError + ProtocolType string `json:"protocolType"` + ProtocolName string `json:"protocolName"` + Assignment KafkaSyncGroupAssignment `json:"assignment"` +} + +type KafkaListOffsetsRequest struct { + Topics map[string][]KafkaListOffsetsRequestPartition `json:"topics"` +} + +func (r *KafkaListOffsetsRequest) Title() string { + return "ListOffsets" +} + +type KafkaListOffsetsRequestPartition struct { + Partition int `json:"partition"` + Timestamp int64 `json:"timestamp"` +} + +type KafkaListOffsetsResponse struct { + Topics map[string][]KafkaListOffsetsResponsePartition `json:"topics"` +} + +type KafkaListOffsetsResponsePartition struct { + Partition int `json:"partition"` + Timestamp int64 `json:"timestamp"` + Offset int64 `json:"offset"` + Snapshot KafkaListOffsetsResponseSnapshot `json:"snapshot"` +} + +type KafkaListOffsetsResponseSnapshot struct { + StartOffset int64 `json:"startOffset"` + EndOffset int64 `json:"endOffset"` +} + +type KafkaFindCoordinatorRequest struct { + Key string `json:"key"` + KeyType int8 `json:"keyType"` +} + +func (r *KafkaFindCoordinatorRequest) Title() string { + return "FindCoordinator" +} + +type KafkaFindCoordinatorResponse struct { + KafkaResponseError + Host string `json:"host"` + Port int `json:"port"` +} + +type KafkaInitProducerIdRequest struct { + TransactionalId string `json:"transactionalId"` + TransactionTimeoutMs int32 `json:"transactionTimeoutMs"` + ProducerId int64 `json:"producerId"` + ProducerEpoch int16 `json:"producerEpoch"` + Enable2PC bool `json:"enable2PC"` +} + +func (r *KafkaInitProducerIdRequest) Title() string { + return "InitProducerId" +} + +type KafkaInitProducerIdResponse struct { + KafkaResponseError + ProducerId int64 `json:"producerId"` + ProducerEpoch int16 `json:"producerEpoch"` + OngoingTxnProducerId int64 `json:"ongoingTxnProducerId"` + OngoingTxnProducerEpoch int16 `json:"ongoingTxnProducerEpoch"` +} diff --git a/providers/asyncapi3/kafka/store/log_cleaner.go b/providers/asyncapi3/kafka/store/log_cleaner.go index 08f2990b7..d55757501 100644 --- a/providers/asyncapi3/kafka/store/log_cleaner.go +++ b/providers/asyncapi3/kafka/store/log_cleaner.go @@ -38,7 +38,7 @@ func (s *Store) cleanLog(b *Broker) { } for _, p := range topic.Partitions { - if p.Leader.Id != b.Id { + if p.leader.Id != b.Id { continue } diff --git a/providers/asyncapi3/kafka/store/log_cleaner_test.go b/providers/asyncapi3/kafka/store/log_cleaner_test.go index 0ed726040..111bdbd66 100644 --- a/providers/asyncapi3/kafka/store/log_cleaner_test.go +++ b/providers/asyncapi3/kafka/store/log_cleaner_test.go @@ -1,16 +1,18 @@ package store_test import ( - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/providers/asyncapi3" "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" "time" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" ) func TestCleaner(t *testing.T) { @@ -33,7 +35,7 @@ func TestCleaner(t *testing.T) { ), asyncapi3test.WithChannel("foo"), ) - s := store.New(cfg, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(cfg, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) topic := s.Topic("foo") require.NotNil(t, topic) @@ -71,7 +73,7 @@ func TestCleaner(t *testing.T) { ), asyncapi3test.WithChannel("foo"), ) - s := store.New(cfg, enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(cfg, enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) topic := s.Topic("foo") require.NotNil(t, topic) diff --git a/providers/asyncapi3/kafka/store/log_test.go b/providers/asyncapi3/kafka/store/log_test.go index 0a8133a89..3ce4ee878 100644 --- a/providers/asyncapi3/kafka/store/log_test.go +++ b/providers/asyncapi3/kafka/store/log_test.go @@ -1,18 +1,19 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/providers/asyncapi3/kafka/store" "testing" + + "github.com/stretchr/testify/require" ) func TestKafkaLog_Title(t *testing.T) { - v := store.KafkaLog{} + v := store.KafkaMessageLog{} require.Equal(t, "", v.Title()) - v = store.KafkaLog{Key: store.LogValue{Value: "foo"}} + v = store.KafkaMessageLog{Key: store.LogValue{Value: "foo"}} require.Equal(t, "foo", v.Title()) - v = store.KafkaLog{Key: store.LogValue{Binary: []byte("foo")}} + v = store.KafkaMessageLog{Key: store.LogValue{Binary: []byte("foo")}} require.Equal(t, "foo", v.Title()) } diff --git a/providers/asyncapi3/kafka/store/metadata.go b/providers/asyncapi3/kafka/store/metadata.go index 2416ad7c8..b50150edd 100644 --- a/providers/asyncapi3/kafka/store/metadata.go +++ b/providers/asyncapi3/kafka/store/metadata.go @@ -10,38 +10,39 @@ func (s *Store) metadata(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*metaData.Request) brokers := s.Brokers() - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) ctx.AllowAutoTopicCreation = r.AllowAutoTopicCreation res := &metaData.Response{ Brokers: make([]metaData.ResponseBroker, 0, len(brokers)), Topics: make([]metaData.ResponseTopic, 0, len(r.Topics)), - ClusterId: "mokapi", + ClusterId: s.cluster, } - for _, b := range brokers { - res.Brokers = append(res.Brokers, metaData.ResponseBroker{ - NodeId: int32(b.Id), - Host: b.Host, - Port: int32(b.Port), - }) + // Mokapi does no leader management, therefore only the current server is returned as the broker. + host, port := parseHostAndPort(req.Host) + b := s.getBrokerByPort(req.Host) + if b != nil && b.Host != "" { + host = b.Host } + res.Brokers = append(res.Brokers, metaData.ResponseBroker{ + NodeId: 0, + Host: host, + Port: int32(port), + }) - b := s.getBrokerByHost(req.Host) var getTopic func(string) (*Topic, kafka.ErrorCode) if len(r.Topics) > 0 { getTopic = func(name string) (*Topic, kafka.ErrorCode) { if kafka.ValidateTopicName(name) != nil { return nil, kafka.InvalidTopic - } else { - topic := s.Topic(name) - if topic != nil && isTopicAvailable(topic, b) { - return topic, kafka.None - } else { - return nil, kafka.UnknownTopicOrPartition - } } + topic := s.Topic(name) + if topic != nil && isTopicAvailable(topic, b) { + return topic, kafka.None + } + return nil, kafka.UnknownTopicOrPartition } } else { topics := make(map[string]*Topic) @@ -71,21 +72,10 @@ func (s *Store) metadata(rw kafka.ResponseWriter, req *kafka.Request) error { Name: t.Name, } - for i, p := range t.Partitions { - replicas := p.Replicas - nodes := make([]int32, 0, len(replicas)) - for _, n := range replicas { - nodes = append(nodes, int32(n)) - } - brokerId := -1 - if p.Leader != nil { - brokerId = p.Leader.Id - } + for i := range t.Partitions { resTopic.Partitions = append(resTopic.Partitions, metaData.ResponsePartition{ PartitionIndex: int32(i), - LeaderId: int32(brokerId), - ReplicaNodes: nodes, - IsrNodes: nodes, + LeaderId: 0, }) } @@ -101,7 +91,7 @@ func isTopicAvailable(t *Topic, b *Broker) bool { } for _, s := range t.Config.Servers { name := path.Base(s.Ref) - if name == b.Name { + if b != nil && name == b.Name { return true } } diff --git a/providers/asyncapi3/kafka/store/metadata_test.go b/providers/asyncapi3/kafka/store/metadata_test.go index 60ce9cc41..d0ba51f87 100644 --- a/providers/asyncapi3/kafka/store/metadata_test.go +++ b/providers/asyncapi3/kafka/store/metadata_test.go @@ -2,7 +2,6 @@ package store_test import ( "fmt" - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/kafkatest" @@ -11,8 +10,11 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "strings" "testing" + + "github.com/stretchr/testify/require" ) func TestMetadata(t *testing.T) { @@ -24,6 +26,7 @@ func TestMetadata(t *testing.T) { "default", func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig( + asyncapi3test.WithInfo("Cluster Test", "", ""), asyncapi3test.WithServer("", "kafka", "127.0.0.1:9092"), asyncapi3test.WithChannel("foo"), )) @@ -36,6 +39,7 @@ func TestMetadata(t *testing.T) { // controller require.Equal(t, int32(0), res.ControllerId) + require.Equal(t, "Cluster Test", res.ClusterId) // brokers require.Len(t, res.Brokers, 1) @@ -55,7 +59,7 @@ func TestMetadata(t *testing.T) { require.Len(t, res.Topics[0].Partitions[0].IsrNodes, 0) require.False(t, res.Topics[0].IsInternal) - require.False(t, kafka.ClientFromContext(r).AllowAutoTopicCreation) + require.False(t, kafka.ClientFromContext(r.Context).AllowAutoTopicCreation) }, }, { @@ -110,7 +114,7 @@ func TestMetadata(t *testing.T) { }) s.ServeMessage(rr, r) - require.True(t, kafka.ClientFromContext(r).AllowAutoTopicCreation) + require.True(t, kafka.ClientFromContext(r.Context).AllowAutoTopicCreation) }, }, { @@ -145,6 +149,7 @@ func TestMetadata(t *testing.T) { )) rr := kafkatest.NewRecorder() r := kafkatest.NewRequest("kafkatest", 4, &metaData.Request{}) + r.Host = "127.0.0.1:9092" s.ServeMessage(rr, r) res, ok := rr.Message.(*metaData.Response) @@ -180,6 +185,14 @@ func TestMetadata(t *testing.T) { res, ok = rr.Message.(*metaData.Response) require.True(t, ok) require.Len(t, res.Topics, 0) + + r = kafkatest.NewRequest("kafkatest", 4, &metaData.Request{}) + r.Host = "foo.bar:9093" + s.ServeMessage(rr, r) + + res, ok = rr.Message.(*metaData.Response) + require.True(t, ok) + require.Len(t, res.Topics, 0) }, }, } @@ -190,7 +203,7 @@ func TestMetadata(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) diff --git a/providers/asyncapi3/kafka/store/offset.go b/providers/asyncapi3/kafka/store/offset.go index 82f102a9d..24afcaa89 100644 --- a/providers/asyncapi3/kafka/store/offset.go +++ b/providers/asyncapi3/kafka/store/offset.go @@ -1,16 +1,24 @@ package store import ( - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/offset" + + log "github.com/sirupsen/logrus" ) func (s *Store) offset(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*offset.Request) res := &offset.Response{Topics: make([]offset.ResponseTopic, 0)} - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) + + reqLog := &KafkaListOffsetsRequest{ + Topics: make(map[string][]KafkaListOffsetsRequestPartition), + } + resLog := &KafkaListOffsetsResponse{ + Topics: make(map[string][]KafkaListOffsetsResponsePartition), + } for _, rt := range r.Topics { topic := s.Topic(rt.Name) @@ -21,6 +29,11 @@ func (s *Store) offset(rw kafka.ResponseWriter, req *kafka.Request) error { Index: rp.Index, Timestamp: rp.Timestamp, } + + reqLog.Topics[rt.Name] = append(reqLog.Topics[rt.Name], KafkaListOffsetsRequestPartition{ + Partition: int(rp.Index), + }) + if topic == nil { log.Errorf("kafka Offset: unknown topic %v, client=%v", topic, ctx.ClientId) resPartition.ErrorCode = kafka.UnknownTopicOrPartition @@ -34,8 +47,10 @@ func (s *Store) offset(rw kafka.ResponseWriter, req *kafka.Request) error { switch { case rp.Timestamp == kafka.Earliest || rp.Timestamp == 0: resPartition.Offset = partition.StartOffset() + resPartition.Timestamp = partition.OffsetTimestamp(resPartition.Offset) case rp.Timestamp == kafka.Latest: resPartition.Offset = partition.Offset() + resPartition.Timestamp = partition.OffsetTimestamp(resPartition.Offset) default: // TODO // look up the offsets for the given partitions by timestamp. The returned offset @@ -54,9 +69,18 @@ func (s *Store) offset(rw kafka.ResponseWriter, req *kafka.Request) error { resPartition.ErrorCode = kafka.UnknownServerError } } + resLog.Topics[rt.Name] = append(resLog.Topics[rt.Name], KafkaListOffsetsResponsePartition{ + Partition: int(rp.Index), + Timestamp: resPartition.Timestamp, + Snapshot: KafkaListOffsetsResponseSnapshot{ + StartOffset: partition.StartOffset(), + EndOffset: partition.Offset(), + }, + }) } } resPartitions = append(resPartitions, resPartition) + } res.Topics = append(res.Topics, offset.ResponseTopic{ @@ -65,5 +89,9 @@ func (s *Store) offset(rw kafka.ResponseWriter, req *kafka.Request) error { }) } + go func() { + s.logRequest(req.Header, reqLog)(resLog) + }() + return rw.Write(res) } diff --git a/providers/asyncapi3/kafka/store/offset_commit.go b/providers/asyncapi3/kafka/store/offset_commit.go index f188f0419..c012eebea 100644 --- a/providers/asyncapi3/kafka/store/offset_commit.go +++ b/providers/asyncapi3/kafka/store/offset_commit.go @@ -2,11 +2,12 @@ package store import ( "context" - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/offsetCommit" "mokapi/runtime/monitor" "strconv" + + log "github.com/sirupsen/logrus" ) func (s *Store) offsetCommit(rw kafka.ResponseWriter, req *kafka.Request) error { @@ -15,7 +16,7 @@ func (s *Store) offsetCommit(rw kafka.ResponseWriter, req *kafka.Request) error Topics: make([]offsetCommit.ResponseTopic, 0, len(r.Topics)), } - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) for _, rt := range r.Topics { log.Infof("kafa OffsetCommit: topic %v, client=%v", rt.Name, ctx.ClientId) @@ -82,4 +83,5 @@ func (s *Store) processMetricsOffsetCommit(ctx context.Context, g *Group, topic lag := float64(partition.Offset() - g.Commits[topic][partition.Index]) m.Lags.WithLabel(s.cluster, g.Name, topic, strconv.Itoa(partition.Index)).Set(lag) + m.Commits.WithLabel(s.cluster, g.Name, topic, strconv.Itoa(partition.Index)).Set(float64(g.Commits[topic][partition.Index])) } diff --git a/providers/asyncapi3/kafka/store/offset_commit_test.go b/providers/asyncapi3/kafka/store/offset_commit_test.go index 03e870787..eaf1d7532 100644 --- a/providers/asyncapi3/kafka/store/offset_commit_test.go +++ b/providers/asyncapi3/kafka/store/offset_commit_test.go @@ -1,9 +1,6 @@ package store_test import ( - "github.com/sirupsen/logrus" - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/kafkatest" @@ -12,8 +9,13 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "mokapi/schema/json/schema/schematest" "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" ) func TestOffsetCommit(t *testing.T) { @@ -186,7 +188,7 @@ func TestOffsetCommit(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) @@ -331,7 +333,7 @@ func TestOffsetCommit_Validation(t *testing.T) { for _, tc := range testcases { tc := tc t.Run(tc.name, func(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() hook := test.NewGlobal() tc.fn(t, s, hook) diff --git a/providers/asyncapi3/kafka/store/offset_fetch.go b/providers/asyncapi3/kafka/store/offset_fetch.go index 60f57b576..f63af80ae 100644 --- a/providers/asyncapi3/kafka/store/offset_fetch.go +++ b/providers/asyncapi3/kafka/store/offset_fetch.go @@ -13,7 +13,7 @@ func (s *Store) offsetFetch(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*offsetFetch.Request) res := &offsetFetch.Response{} - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) if req.Header.ApiVersion >= 8 { for _, g := range r.Groups { diff --git a/providers/asyncapi3/kafka/store/offset_fetch_test.go b/providers/asyncapi3/kafka/store/offset_fetch_test.go index 6f587b9bc..0037ad019 100644 --- a/providers/asyncapi3/kafka/store/offset_fetch_test.go +++ b/providers/asyncapi3/kafka/store/offset_fetch_test.go @@ -10,6 +10,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "mokapi/schema/json/schema/schematest" "testing" @@ -308,7 +309,7 @@ func TestOffsetFetch(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) @@ -451,7 +452,7 @@ func TestOffsetFetch_Validation(t *testing.T) { for _, tc := range testcases { tc := tc t.Run(tc.name, func(t *testing.T) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() hook := test.NewGlobal() tc.fn(t, s, hook) diff --git a/providers/asyncapi3/kafka/store/offset_test.go b/providers/asyncapi3/kafka/store/offset_test.go index 1be5d5805..fd09f01ff 100644 --- a/providers/asyncapi3/kafka/store/offset_test.go +++ b/providers/asyncapi3/kafka/store/offset_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/kafkatest" @@ -9,7 +8,10 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" + "mokapi/runtime/monitor" "testing" + + "github.com/stretchr/testify/require" ) func TestOffsets(t *testing.T) { @@ -80,7 +82,7 @@ func TestOffsets(t *testing.T) { func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig( asyncapi3test.WithChannel("foo"))) - s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -88,6 +90,7 @@ func TestOffsets(t *testing.T) { }, }, }) + require.NoError(t, err) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &offset.Request{ @@ -106,7 +109,7 @@ func TestOffsets(t *testing.T) { require.True(t, ok) p := res.Topics[0].Partitions[0] require.Equal(t, kafka.None, p.ErrorCode) - require.Equal(t, kafka.Earliest, p.Timestamp) + require.Greater(t, p.Timestamp, int64(0)) require.Equal(t, int64(0), p.Offset) }, }, @@ -115,7 +118,7 @@ func TestOffsets(t *testing.T) { func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig( asyncapi3test.WithChannel("foo"))) - s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -123,6 +126,7 @@ func TestOffsets(t *testing.T) { }, }, }) + require.NoError(t, err) rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 3, &offset.Request{ @@ -194,7 +198,7 @@ func TestOffsets(t *testing.T) { func(t *testing.T, s *store.Store) { s.Update(asyncapi3test.NewConfig( asyncapi3test.WithChannel("foo"))) - s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, err := s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -202,6 +206,8 @@ func TestOffsets(t *testing.T) { }, }, }) + require.NoError(t, err) + rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("kafkatest", 0, &offset.Request{ Topics: []offset.RequestTopic{ @@ -230,7 +236,7 @@ func TestOffsets(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), &eventstest.Handler{}, monitor.NewKafka()) defer s.Close() tc.fn(t, s) }) diff --git a/providers/asyncapi3/kafka/store/partition.go b/providers/asyncapi3/kafka/store/partition.go index 868024e62..83a13bc47 100644 --- a/providers/asyncapi3/kafka/store/partition.go +++ b/providers/asyncapi3/kafka/store/partition.go @@ -22,8 +22,8 @@ type Partition struct { Tail int64 Topic *Topic - Leader *Broker - Replicas []int + // only for log cleaner + leader *Broker validator *validator logger LogRecord @@ -46,11 +46,13 @@ type Segment struct { type record struct { Data *kafka.Record - Log *KafkaLog + Log *KafkaMessageLog } type WriteOptions struct { SkipValidation bool + ClientId string + ScriptFile string } type WriteResult struct { @@ -67,19 +69,16 @@ type PartitionProducerState struct { } func newPartition(index int, brokers Brokers, logger LogRecord, trigger Trigger, topic *Topic) *Partition { - brokerIds := make([]int, 0, len(brokers)) brokerList := make([]*Broker, 0, len(brokers)) - for i, b := range brokers { + for _, b := range brokers { if topic.Config != nil && len(topic.Config.Servers) > 0 { if slices.ContainsFunc(topic.Config.Servers, func(s *asyncapi3.ServerRef) bool { return s.Value == b.config }) { brokerList = append(brokerList, b) - brokerIds = append(brokerIds, i) } } else { brokerList = append(brokerList, b) - brokerIds = append(brokerIds, i) } } p := &Partition{ @@ -93,14 +92,8 @@ func newPartition(index int, brokers Brokers, logger LogRecord, trigger Trigger, producers: make(map[int64]*PartitionProducerState), } if len(brokerList) > 0 { - p.Leader = brokerList[0] + p.leader = brokerList[0] } - if len(brokerList) > 1 { - p.Replicas = brokerIds[1:] - } else { - p.Replicas = make([]int, 0) - } - return p } @@ -149,6 +142,10 @@ func (p *Partition) Write(batch kafka.RecordBatch) (WriteResult, error) { return p.write(batch, WriteOptions{SkipValidation: false}) } +func (p *Partition) WriteWithOptions(batch kafka.RecordBatch, opts WriteOptions) (WriteResult, error) { + return p.write(batch, opts) +} + func (p *Partition) write(batch kafka.RecordBatch, opts WriteOptions) (WriteResult, error) { if p == nil { return WriteResult{}, fmt.Errorf("partition is nil") @@ -223,6 +220,9 @@ func (p *Partition) write(batch kafka.RecordBatch, opts WriteOptions) (WriteResu baseTime = r.Time } + kLog.ClientId = opts.ClientId + kLog.ScriptFile = opts.ScriptFile + writeFuncs = append(writeFuncs, func() { r.Offset = p.Tail @@ -276,6 +276,18 @@ func (p *Partition) StartOffset() int64 { return p.Head } +func (p *Partition) OffsetTimestamp(offset int64) int64 { + s := p.GetSegment(offset) + if s == nil { + return -1 + } + r := s.record(offset) + if r == nil { + return -1 + } + return r.Time.Unix() +} + func (p *Partition) GetSegment(offset int64) *Segment { p.m.RLock() defer p.m.RUnlock() @@ -318,17 +330,6 @@ func (p *Partition) removeSegment(s *Segment) { delete(p.Segments, s.Head) } -func (p *Partition) removeReplica(id int) { - i := 0 - for _, replica := range p.Replicas { - if replica != id { - p.Replicas[i] = replica - i++ - } - } - p.Replicas = p.Replicas[:i] -} - func (p *Partition) addSegment() *Segment { p.m.RLock() defer p.m.RUnlock() @@ -360,7 +361,10 @@ func (s *Segment) contains(offset int64) bool { } func (s *Segment) record(offset int64) *kafka.Record { - index := offset - s.Head + index := int(offset - s.Head) + if index < 0 || index >= len(s.Log) { + return nil + } return s.Log[index].Data } diff --git a/providers/asyncapi3/kafka/store/partition_test.go b/providers/asyncapi3/kafka/store/partition_test.go index f15c955f9..773be892f 100644 --- a/providers/asyncapi3/kafka/store/partition_test.go +++ b/providers/asyncapi3/kafka/store/partition_test.go @@ -16,7 +16,7 @@ func TestPartition(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -24,8 +24,6 @@ func TestPartition(t *testing.T) { require.Equal(t, 0, p.Index) require.Equal(t, int64(0), p.StartOffset()) require.Equal(t, int64(0), p.Offset()) - require.Equal(t, 1, p.Leader.Id) - require.Equal(t, []int{}, p.Replicas) } func TestPartition_Write(t *testing.T) { @@ -33,7 +31,7 @@ func TestPartition_Write(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) { + func(log *KafkaMessageLog, traits events.Traits) { logs = append(logs, log.Offset) }, func(record *kafka.Record, schemaId int) bool { return false }, @@ -80,7 +78,7 @@ func TestPartition_Read_Empty(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -93,7 +91,7 @@ func TestPartition_Read(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -120,7 +118,7 @@ func TestPartition_Read_OutOfOffset_Empty(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -133,7 +131,7 @@ func TestPartition_Read_OutOfOffset(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -157,7 +155,7 @@ func TestPartition_Write_Value_Validator(t *testing.T) { p := newPartition( 0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, _ events.Traits) { + func(log *KafkaMessageLog, _ events.Traits) { }, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{Config: &asyncapi3.Channel{Bindings: asyncapi3.ChannelBindings{ Kafka: asyncapi3.TopicBindings{ValueSchemaValidation: true}, @@ -220,7 +218,7 @@ func TestPartition_Write_Value_Validator(t *testing.T) { func TestPatition_Retention(t *testing.T) { p := newPartition(0, map[int]*Broker{1: {Id: 1}}, - func(log *KafkaLog, traits events.Traits) {}, + func(log *KafkaMessageLog, traits events.Traits) {}, func(record *kafka.Record, schemaId int) bool { return false }, &Topic{}, ) @@ -261,3 +259,31 @@ func TestPatition_Retention(t *testing.T) { require.Len(t, p.Segments, 1) require.Equal(t, int64(2), p.Head) } + +func TestPartition_Write_Producer_ClientId(t *testing.T) { + var logs []*KafkaMessageLog + p := newPartition( + 0, + map[int]*Broker{1: {Id: 1}}, + func(log *KafkaMessageLog, traits events.Traits) { + logs = append(logs, log) + }, + func(record *kafka.Record, schemaId int) bool { return false }, + &Topic{}, + ) + + wr, err := p.WriteWithOptions(kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Time: time.Now(), + Key: kafka.NewBytes([]byte(`"foo-1"`)), + Value: kafka.NewBytes([]byte(`"bar-1"`)), + Headers: nil, + }, + }, + }, WriteOptions{ClientId: "foo"}) + require.NoError(t, err) + require.Len(t, wr.Records, 0) + + require.Equal(t, "foo", logs[0].ClientId) +} diff --git a/providers/asyncapi3/kafka/store/produce.go b/providers/asyncapi3/kafka/store/produce.go index 6ad0af618..b2f01634c 100644 --- a/providers/asyncapi3/kafka/store/produce.go +++ b/providers/asyncapi3/kafka/store/produce.go @@ -13,10 +13,12 @@ import ( func (s *Store) produce(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*produce.Request) res := &produce.Response{} - ctx := kafka.ClientFromContext(req) + ctx := kafka.ClientFromContext(req.Context) m, withMonitor := monitor.KafkaFromContext(req.Context) - opts := WriteOptions{} + opts := WriteOptions{ + ClientId: ctx.ClientId, + } for _, rt := range r.Topics { topic := s.Topic(rt.Name) diff --git a/providers/asyncapi3/kafka/store/produce_test.go b/providers/asyncapi3/kafka/store/produce_test.go index 71ba202ff..7c7ff7ead 100644 --- a/providers/asyncapi3/kafka/store/produce_test.go +++ b/providers/asyncapi3/kafka/store/produce_test.go @@ -130,11 +130,14 @@ func TestProduce(t *testing.T) { logs := sm.GetEvents(events.NewTraits().WithNamespace("kafka").WithName("test").With("topic", "foo")) require.Len(t, logs, 2) - require.Equal(t, []byte("foo-2"), logs[0].Data.(*store.KafkaLog).Key.Binary) - require.Equal(t, []byte("bar-2"), logs[0].Data.(*store.KafkaLog).Message.Binary) - require.Equal(t, int64(1), logs[0].Data.(*store.KafkaLog).Offset) - - require.Equal(t, int64(0), logs[1].Data.(*store.KafkaLog).Offset) + require.Equal(t, []byte("foo-2"), logs[0].Data.(*store.KafkaMessageLog).Key.Binary) + require.Equal(t, []byte("bar-2"), logs[0].Data.(*store.KafkaMessageLog).Message.Binary) + require.Equal(t, int64(1), logs[0].Data.(*store.KafkaMessageLog).Offset) + require.Equal(t, "kafkatest", logs[0].Data.(*store.KafkaMessageLog).ClientId) + require.Equal(t, "kafkatest", logs[0].Traits.Get("clientId")) + + require.Equal(t, int64(0), logs[1].Data.(*store.KafkaMessageLog).Offset) + require.Equal(t, "kafkatest", logs[1].Data.(*store.KafkaMessageLog).ClientId) }, }, { @@ -369,7 +372,7 @@ func TestProduce(t *testing.T) { ), )) hook := test.NewGlobal() - ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424", "127.0.0.1:9092") sm.SetStore(5, events.NewTraits().WithNamespace("kafka")) rr := kafkatest.NewRecorder() @@ -411,12 +414,12 @@ func TestProduce(t *testing.T) { logs := sm.GetEvents(events.NewTraits().WithNamespace("kafka").WithName("test").With("topic", "foo")) require.Len(t, logs, 1) - require.Equal(t, `"foo-1"`, string(logs[0].Data.(*store.KafkaLog).Key.Binary)) - require.Equal(t, "4", string(logs[0].Data.(*store.KafkaLog).Message.Binary)) - require.Equal(t, int64(0), logs[0].Data.(*store.KafkaLog).Offset) - require.Equal(t, int64(1), logs[0].Data.(*store.KafkaLog).ProducerId) - require.Equal(t, int16(0), logs[0].Data.(*store.KafkaLog).ProducerEpoch) - require.Equal(t, int32(0), logs[0].Data.(*store.KafkaLog).SequenceNumber) + require.Equal(t, `"foo-1"`, string(logs[0].Data.(*store.KafkaMessageLog).Key.Binary)) + require.Equal(t, "4", string(logs[0].Data.(*store.KafkaMessageLog).Message.Binary)) + require.Equal(t, int64(0), logs[0].Data.(*store.KafkaMessageLog).Offset) + require.Equal(t, int64(1), logs[0].Data.(*store.KafkaMessageLog).ProducerId) + require.Equal(t, int16(0), logs[0].Data.(*store.KafkaMessageLog).ProducerEpoch) + require.Equal(t, int32(0), logs[0].Data.(*store.KafkaMessageLog).SequenceNumber) }, }, { @@ -435,7 +438,7 @@ func TestProduce(t *testing.T) { ), )) hook := test.NewGlobal() - ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424", "127.0.0.1:9092") rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &initProducerId.Request{}).WithContext(ctx)) @@ -493,7 +496,7 @@ func TestProduce(t *testing.T) { ), )) hook := test.NewGlobal() - ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424") + ctx := kafka.NewClientContext(context.Background(), "127.0.0.1:42424", "127.0.0.1:9092") rr := kafkatest.NewRecorder() s.ServeMessage(rr, kafkatest.NewRequest("MOKAPITEST1", 3, &initProducerId.Request{}).WithContext(ctx)) @@ -541,7 +544,7 @@ func TestProduce(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { sm := &events.StoreManager{} - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() tc.fn(t, s, sm) }) @@ -555,7 +558,7 @@ func TestProduceTriggersEvent(t *testing.T) { s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngineWithHandler(func(event string, args ...interface{}) []*common.Action { triggerCount++ return nil - }), sm) + }), sm, monitor.NewKafka()) defer s.Close() s.Update(asyncapi3test.NewConfig( diff --git a/providers/asyncapi3/kafka/store/store.go b/providers/asyncapi3/kafka/store/store.go index 61af430cf..31363cb06 100644 --- a/providers/asyncapi3/kafka/store/store.go +++ b/providers/asyncapi3/kafka/store/store.go @@ -39,6 +39,8 @@ type Store struct { eventEmitter common.EventEmitter eh events.Handler producers map[int64]*ProducerState + monitor *monitor.Kafka + clients map[string]*kafka.ClientContext nextPID int64 m sync.RWMutex @@ -49,19 +51,21 @@ type ProducerState struct { ProducerEpoch int16 } -func NewEmpty(eventEmitter common.EventEmitter, eh events.Handler) *Store { +func NewEmpty(eventEmitter common.EventEmitter, eh events.Handler, monitor *monitor.Kafka) *Store { return &Store{ topics: make(map[string]*Topic), brokers: make(map[int]*Broker), groups: make(map[string]*Group), eventEmitter: eventEmitter, eh: eh, + monitor: monitor, producers: make(map[int64]*ProducerState), + clients: make(map[string]*kafka.ClientContext), } } -func New(config *asyncapi3.Config, eventEmitter common.EventEmitter, eh events.Handler) *Store { - s := NewEmpty(eventEmitter, eh) +func New(config *asyncapi3.Config, eventEmitter common.EventEmitter, eh events.Handler, monitor *monitor.Kafka) *Store { + s := NewEmpty(eventEmitter, eh, monitor) s.Update(config) return s } @@ -137,32 +141,36 @@ func (s *Store) GetOrCreateGroup(name string, brokerId int) *Group { return g } - g := NewGroup(name, b) + g := s.newGroup(name, b) s.groups[name] = g return g } func (s *Store) Update(c *asyncapi3.Config) { s.cluster = c.Info.Name - for n, server := range c.Servers { - if server.Value.Protocol != "" && server.Value.Protocol != "kafka" { - continue - } - if b := s.getBroker(n); b != nil { - host, port := parseHostAndPort(server.Value.Host) - if len(host) == 0 { - log.Errorf("unable to update broker '%v' to cluster '%v': missing host in url '%v'", n, s.cluster, server.Value.Host) + if c.Servers != nil { + for it := c.Servers.Iter(); it.Next(); { + name := it.Key() + server := it.Value() + if server.Value.Protocol != "" && server.Value.Protocol != "kafka" { continue } - b.Host = host - b.Port = port - } else { - s.addBroker(n, server.Value) + if b := s.getBroker(name); b != nil { + host, port := parseHostAndPort(server.Value.Host) + if len(host) == 0 { + log.Errorf("unable to update broker '%v' to cluster '%v': missing host in url '%v'", name, s.cluster, server.Value.Host) + continue + } + b.Host = host + b.Port = port + } else { + s.addBroker(name, server.Value) + } } - } - for _, b := range s.brokers { - if _, ok := c.Servers[b.Name]; !ok { - s.deleteBroker(b.Id) + for _, b := range s.brokers { + if _, ok := c.Servers.Get(b.Name); !ok { + s.deleteBroker(b.Id) + } } } @@ -201,6 +209,22 @@ func (s *Store) Update(c *asyncapi3.Config) { func (s *Store) ServeMessage(rw kafka.ResponseWriter, req *kafka.Request) { var err error + + client := kafka.ClientFromContext(req.Context) + if client != nil { + s.m.Lock() + if _, ok := s.clients[client.ClientId]; !ok { + s.clients[client.ClientId] = client + client.Close = func() { + s.m.Lock() + defer s.m.Unlock() + + delete(s.clients, client.ClientId) + } + } + s.m.Unlock() + } + switch req.Message.(type) { case *produce.Request: err = s.produce(rw, req) @@ -282,11 +306,6 @@ func (s *Store) deleteBroker(id int) { s.m.Lock() defer s.m.Unlock() - for _, t := range s.topics { - for _, p := range t.Partitions { - p.removeReplica(id) - } - } if b, ok := s.brokers[id]; ok { b.stopCleaner() } @@ -302,22 +321,25 @@ func (s *Store) getBroker(name string) *Broker { return nil } -func (s *Store) getBrokerByHost(addr string) *Broker { +func (s *Store) getBrokerByPort(addr string) *Broker { for _, b := range s.brokers { _, p := parseHostAndPort(addr) - if b.Port == p { + if b.Port == p && b.Host != "" { return b } } return nil } -func (s *Store) log(log *KafkaLog, traits events.Traits) { +func (s *Store) log(log *KafkaMessageLog, traits events.Traits) { log.Api = s.cluster - _ = s.eh.Push( - log, - traits.WithNamespace("kafka").WithName(s.cluster), - ) + t := traits.WithNamespace("kafka"). + WithName(s.cluster). + With("type", "message") + if log.ClientId != "" { + t = t.With("clientId", log.ClientId) + } + _ = s.eh.Push(log, t) } func (s *Store) trigger(record *kafka.Record, schemaId int) bool { @@ -406,6 +428,15 @@ func parseHostAndPort(s string) (host string, port int) { port = int(p) } + ip := net.ParseIP(host) + if ip != nil && ip.IsLoopback() { + if host != "localhost" && host != "127.0.0.1" { + // Some Kafka clients still have problems with IPv6 literals. + // Docker / CI / older JVMs are safer with IPv4 + host = "127.0.0.1" + } + } + return } @@ -439,3 +470,11 @@ func getOperations(channel *asyncapi3.Channel, config *asyncapi3.Config) []*asyn } return ops } + +func (s *Store) Clients() []kafka.ClientContext { + var result []kafka.ClientContext + for _, c := range s.clients { + result = append(result, *c) + } + return result +} diff --git a/providers/asyncapi3/kafka/store/store_test.go b/providers/asyncapi3/kafka/store/store_test.go index 7445b82ec..c885b3048 100644 --- a/providers/asyncapi3/kafka/store/store_test.go +++ b/providers/asyncapi3/kafka/store/store_test.go @@ -7,6 +7,7 @@ import ( "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events" + "mokapi/runtime/monitor" "mokapi/schema/json/schema/schematest" "testing" @@ -21,7 +22,7 @@ func TestStore(t *testing.T) { { "empty", func(t *testing.T, sm *events.StoreManager) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() require.Equal(t, 0, len(s.Brokers())) require.Equal(t, 0, len(s.Topics())) @@ -34,7 +35,7 @@ func TestStore(t *testing.T) { func(t *testing.T, sm *events.StoreManager) { s := store.New(asyncapi3test.NewConfig( asyncapi3test.WithServer("foo", "kafka", "foo:9092"), - ), enginetest.NewEngine(), sm) + ), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() require.Equal(t, 1, len(s.Brokers())) require.Equal(t, 0, len(s.Topics())) @@ -49,7 +50,7 @@ func TestStore(t *testing.T) { func(t *testing.T, sm *events.StoreManager) { s := store.New(asyncapi3test.NewConfig( asyncapi3test.WithChannel("foo"), - ), enginetest.NewEngine(), sm) + ), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() require.Equal(t, 0, len(s.Brokers())) require.Equal(t, 1, len(s.Topics())) @@ -63,7 +64,7 @@ func TestStore(t *testing.T) { { "create topic", func(t *testing.T, sm *events.StoreManager) { - s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() topic, err := s.NewTopic("foo", asyncapi3test.NewChannel(), []*asyncapi3.Operation{}) require.NoError(t, err) @@ -74,7 +75,7 @@ func TestStore(t *testing.T) { { "create topic, already exists", func(t *testing.T, sm *events.StoreManager) { - s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo")), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo")), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() _, err := s.NewTopic("foo", asyncapi3test.NewChannel(), []*asyncapi3.Operation{}) require.Error(t, err, "topic foo already exists") @@ -83,7 +84,7 @@ func TestStore(t *testing.T) { { "update topic add partition", func(t *testing.T, sm *events.StoreManager) { - s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 1}))), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 1}))), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 2})))) @@ -94,7 +95,7 @@ func TestStore(t *testing.T) { { "update topic remove partition", func(t *testing.T, sm *events.StoreManager) { - s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 2}))), enginetest.NewEngine(), sm) + s := store.New(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 2}))), enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() s.Update(asyncapi3test.NewConfig(asyncapi3test.WithChannel("foo", asyncapi3test.WithKafkaChannelBinding(asyncapi3.TopicBindings{Partitions: 1})))) @@ -112,7 +113,7 @@ func TestStore(t *testing.T) { asyncapi3test.WithContentType("application/json"), ), )), - enginetest.NewEngine(), sm) + enginetest.NewEngine(), sm, monitor.NewKafka()) defer s.Close() s.Update(asyncapi3test.NewConfig( diff --git a/providers/asyncapi3/kafka/store/syncgroup.go b/providers/asyncapi3/kafka/store/syncgroup.go index 1aae89ddb..8a601230b 100644 --- a/providers/asyncapi3/kafka/store/syncgroup.go +++ b/providers/asyncapi3/kafka/store/syncgroup.go @@ -1,57 +1,112 @@ package store import ( + "bufio" + "bytes" "mokapi/kafka" "mokapi/kafka/syncGroup" ) func (s *Store) syncgroup(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*syncGroup.Request) - ctx := kafka.ClientFromContext(req) + res := &syncGroup.Response{} + ctx := kafka.ClientFromContext(req.Context) - if len(r.MemberId) == 0 { - return rw.Write(&syncGroup.Response{ErrorCode: kafka.MemberIdRequired}) + data := syncdata{ + client: ctx, + writer: rw, + protocolType: r.ProtocolType, + protocolName: r.ProtocolName, + generationId: r.GenerationId, } - b := s.getBrokerByHost(req.Host) - if b == nil { - res := &syncGroup.Response{ - ErrorCode: kafka.UnknownServerError, + if len(r.GroupAssignments) > 0 { + data.assigns = make(map[string]*groupAssignment) + for _, assign := range r.GroupAssignments { + data.assigns[assign.MemberId] = newGroupAssignment(assign.Assignment) } - return rw.Write(res) } - g := s.GetOrCreateGroup(r.GroupId, b.Id) - if g.Coordinator.Id != b.Id { - return rw.Write(&syncGroup.Response{ErrorCode: kafka.NotCoordinator}) - } + reqLog := newKafkaSyncGroupRequest(r, data.assigns) + data.log = s.logRequest(req.Header, reqLog) - if g.State == PreparingRebalance { - return rw.Write(&syncGroup.Response{ErrorCode: kafka.RebalanceInProgress}) - } + if len(r.MemberId) != 0 { + b := s.getBrokerByPort(req.Host) + if b != nil { + g := s.GetOrCreateGroup(r.GroupId, b.Id) - if g.Generation == nil || g.Generation.Id != int(r.GenerationId) { - return rw.Write(&syncGroup.Response{ErrorCode: kafka.IllegalGeneration}) + if g.State != PreparingRebalance { + if g.Generation == nil || g.Generation.Id != int(r.GenerationId) { + res.ErrorCode = kafka.IllegalGeneration + } else { + if _, ok := ctx.Member[r.GroupId]; !ok { + res.ErrorCode = kafka.RebalanceInProgress + } else { + // balancer writes the response + g.balancer.sync <- data + return nil + } + } + } else { + res.ErrorCode = kafka.RebalanceInProgress + } + } else { + res.ErrorCode = kafka.UnknownServerError + } + } else { + res.ErrorCode = kafka.MemberIdRequired } - if _, ok := ctx.Member[r.GroupId]; !ok { - return rw.Write(&syncGroup.Response{ErrorCode: kafka.RebalanceInProgress}) - } + go func() { + resLog := &KafkaSyncGroupResponse{} + resLog.ErrorCode = res.ErrorCode.String() - data := syncdata{ - client: ctx, - writer: rw, - } + s.logRequest(req.Header, reqLog)(&KafkaRequestLogEvent{ + Response: resLog, + }) + }() - if len(r.GroupAssignments) > 0 { - data.assigns = make(map[string]*groupAssignment) - for _, assign := range r.GroupAssignments { - data.assigns[assign.MemberId] = newGroupAssignment(assign.Assignment) + return rw.Write(res) +} + +func newGroupAssignment(b []byte) *groupAssignment { + g := &groupAssignment{} + g.raw = b + r := bufio.NewReader(bytes.NewReader(b)) + d := kafka.NewDecoder(r, len(b)) + g.version = d.ReadInt16() + + g.topics = make(map[string][]int) + n := int(d.ReadInt32()) + for i := 0; i < n; i++ { + key := d.ReadString() + value := make([]int, 0) + + nPartition := int(d.ReadInt32()) + for j := 0; j < nPartition; j++ { + index := d.ReadInt32() + value = append(value, int(index)) } + g.topics[key] = value } - // balancer writes the response - g.balancer.sync <- data + g.userData = d.ReadBytes() + + return g +} - return nil +func newKafkaSyncGroupRequest(req *syncGroup.Request, assigns map[string]*groupAssignment) *KafkaSyncGroupRequest { + r := &KafkaSyncGroupRequest{ + GroupName: req.GroupId, + MemberId: req.MemberId, + ProtocolType: req.ProtocolType, + GroupAssignments: map[string]KafkaSyncGroupAssignment{}, + } + for m, a := range assigns { + r.GroupAssignments[m] = KafkaSyncGroupAssignment{ + Version: a.version, + Topics: a.topics, + } + } + return r } diff --git a/providers/asyncapi3/kafka/store/topic.go b/providers/asyncapi3/kafka/store/topic.go index 05bbaeeea..eebb3360d 100644 --- a/providers/asyncapi3/kafka/store/topic.go +++ b/providers/asyncapi3/kafka/store/topic.go @@ -73,7 +73,7 @@ func (t *Topic) update(config *asyncapi3.Channel, s *Store) { t.Partitions = t.Partitions[:numPartitions] } -func (t *Topic) log(r *KafkaLog, traits events.Traits) { +func (t *Topic) log(r *KafkaMessageLog, traits events.Traits) { t.logger(r, traits.With("topic", t.Name)) } diff --git a/providers/asyncapi3/kafka/store/validation.go b/providers/asyncapi3/kafka/store/validation.go index 8367f1b2d..327b23010 100644 --- a/providers/asyncapi3/kafka/store/validation.go +++ b/providers/asyncapi3/kafka/store/validation.go @@ -25,7 +25,7 @@ type validator struct { } type recordValidator interface { - Validate(record *kafka.Record) (*KafkaLog, error) + Validate(record *kafka.Record) (*KafkaMessageLog, error) } func newValidator(c *asyncapi3.Channel) *validator { @@ -41,7 +41,7 @@ func newValidator(c *asyncapi3.Channel) *validator { return v } -func (v *validator) Validate(record *kafka.Record) (l *KafkaLog, err error) { +func (v *validator) Validate(record *kafka.Record) (l *KafkaMessageLog, err error) { if v == nil { return newKafkaLog(record), nil } @@ -133,8 +133,8 @@ func newMessageValidator(messageId string, msg *asyncapi3.Message, channel *asyn return v } -func (mv *messageValidator) Validate(record *kafka.Record) (*KafkaLog, error) { - r := &KafkaLog{ +func (mv *messageValidator) Validate(record *kafka.Record) (*KafkaMessageLog, error) { + r := &KafkaMessageLog{ Key: LogValue{}, Message: LogValue{}, Headers: make(map[string]LogValue), diff --git a/providers/asyncapi3/kafka/store/validation_test.go b/providers/asyncapi3/kafka/store/validation_test.go index 056d4015d..875f42263 100644 --- a/providers/asyncapi3/kafka/store/validation_test.go +++ b/providers/asyncapi3/kafka/store/validation_test.go @@ -9,6 +9,7 @@ import ( opSchema "mokapi/providers/openapi/schema" opSchematest "mokapi/providers/openapi/schema/schematest" "mokapi/runtime/events" + "mokapi/runtime/monitor" "mokapi/schema/json/schema/schematest" "testing" @@ -55,7 +56,7 @@ func TestValidation(t *testing.T) { require.Len(t, e, 2) // latest message is first require.Equal(t, - &store.KafkaLog{ + &store.KafkaMessageLog{ Offset: 1, Key: store.LogValue{Value: "", Binary: []byte("key-bar")}, Message: store.LogValue{Value: "", Binary: []byte("bar")}, @@ -66,9 +67,9 @@ func TestValidation(t *testing.T) { Deleted: false, Api: "test", }, - e[0].Data.(*store.KafkaLog)) + e[0].Data.(*store.KafkaMessageLog)) require.Equal(t, - &store.KafkaLog{ + &store.KafkaMessageLog{ Offset: 0, Key: store.LogValue{Value: "", Binary: []byte("key-foo")}, Message: store.LogValue{Value: "", Binary: []byte("foo")}, @@ -79,7 +80,7 @@ func TestValidation(t *testing.T) { Deleted: false, Api: "test", }, - e[1].Data.(*store.KafkaLog)) + e[1].Data.(*store.KafkaMessageLog)) }, }, { @@ -133,7 +134,7 @@ func TestValidation(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, []byte("foo"), e[0].Data.(*store.KafkaLog).Key.Binary) + require.Equal(t, []byte("foo"), e[0].Data.(*store.KafkaMessageLog).Key.Binary) }, }, { @@ -166,8 +167,8 @@ func TestValidation(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, `"12"`, e[0].Data.(*store.KafkaLog).Key.Value) - require.Equal(t, `"foo"`, e[0].Data.(*store.KafkaLog).Message.Value) + require.Equal(t, `"12"`, e[0].Data.(*store.KafkaMessageLog).Key.Value) + require.Equal(t, `"foo"`, e[0].Data.(*store.KafkaMessageLog).Message.Value) }, }, { @@ -198,7 +199,7 @@ func TestValidation(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, 1, e[0].Data.(*store.KafkaLog).SchemaId) + require.Equal(t, 1, e[0].Data.(*store.KafkaMessageLog).SchemaId) }, }, { @@ -253,7 +254,7 @@ func TestValidation(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, "123", e[0].Data.(*store.KafkaLog).Message.Value) + require.Equal(t, "123", e[0].Data.(*store.KafkaMessageLog).Message.Value) }, }, { @@ -293,7 +294,7 @@ func TestValidation(t *testing.T) { sm := &events.StoreManager{} sm.SetStore(5, events.NewTraits().WithNamespace("kafka")) - s := store.New(tc.cfg, enginetest.NewEngine(), sm) + s := store.New(tc.cfg, enginetest.NewEngine(), sm, monitor.NewKafka()) tc.test(t, s, sm) }) } @@ -329,7 +330,7 @@ func TestValidation_Header(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, "\u0001\u0000\u0000\u0000", e[0].Data.(*store.KafkaLog).Headers["foo"].Value) + require.Equal(t, "\u0001\u0000\u0000\u0000", e[0].Data.(*store.KafkaMessageLog).Headers["foo"].Value) }, }, { @@ -356,7 +357,7 @@ func TestValidation_Header(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, "1", e[0].Data.(*store.KafkaLog).Headers["foo"].Value) + require.Equal(t, "1", e[0].Data.(*store.KafkaMessageLog).Headers["foo"].Value) }, }, { @@ -380,7 +381,7 @@ func TestValidation_Header(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, []byte{1, 0, 0, 0}, e[0].Data.(*store.KafkaLog).Headers["foo"].Binary) + require.Equal(t, []byte{1, 0, 0, 0}, e[0].Data.(*store.KafkaMessageLog).Headers["foo"].Binary) }, }, { @@ -407,7 +408,7 @@ func TestValidation_Header(t *testing.T) { e := sm.GetEvents(events.NewTraits()) require.Len(t, e, 1) - require.Equal(t, "3.141629934310913", e[0].Data.(*store.KafkaLog).Headers["foo"].Value) + require.Equal(t, "3.141629934310913", e[0].Data.(*store.KafkaMessageLog).Headers["foo"].Value) }, }, } @@ -417,7 +418,7 @@ func TestValidation_Header(t *testing.T) { sm := &events.StoreManager{} sm.SetStore(5, events.NewTraits().WithNamespace("kafka")) - s := store.New(tc.cfg, enginetest.NewEngine(), sm) + s := store.New(tc.cfg, enginetest.NewEngine(), sm, monitor.NewKafka()) tc.test(t, s, sm) }) } diff --git a/providers/asyncapi3/patch.go b/providers/asyncapi3/patch.go index b2f124a5c..451316c50 100644 --- a/providers/asyncapi3/patch.go +++ b/providers/asyncapi3/patch.go @@ -51,14 +51,14 @@ func (l *License) patch(patch *License) { } func (c *Config) patchServer(patch *Config) { - if len(c.Servers) == 0 { + if c.Servers.Len() == 0 { c.Servers = patch.Servers } else { - for name, ps := range patch.Servers { - if s, ok := c.Servers[name]; ok { - s.patch(ps) + for it := patch.Servers.Iter(); it.Next(); { + if s, ok := c.Servers.Get(it.Key()); ok { + s.patch(it.Value()) } else { - c.Servers[name] = ps + c.Servers.Set(it.Key(), it.Value()) } } } diff --git a/providers/asyncapi3/patch_test.go b/providers/asyncapi3/patch_test.go index 55e1b7698..b126c895e 100644 --- a/providers/asyncapi3/patch_test.go +++ b/providers/asyncapi3/patch_test.go @@ -1,12 +1,13 @@ package asyncapi3_test import ( - "github.com/stretchr/testify/require" "mokapi/providers/asyncapi3" "mokapi/providers/asyncapi3/asyncapi3test" "mokapi/schema/json/schema" "mokapi/schema/json/schema/schematest" "testing" + + "github.com/stretchr/testify/require" ) func TestConfig_Patch_Info(t *testing.T) { @@ -113,9 +114,9 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 1) - require.Equal(t, "foo.bar", result.Servers["foo"].Value.Host) - require.Equal(t, "description", result.Servers["foo"].Value.Description) + require.Equal(t, result.Servers.Len(), 1) + require.Equal(t, "foo.bar", result.Servers.Lookup("foo").Value.Host) + require.Equal(t, "description", result.Servers.Lookup("foo").Value.Description) }, }, { @@ -125,9 +126,9 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(asyncapi3test.WithServer("foo", "kafka", "foo.bar", asyncapi3test.WithServerDescription("description"))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 1) - require.Equal(t, "foo.bar", result.Servers["foo"].Value.Host) - require.Equal(t, "description", result.Servers["foo"].Value.Description) + require.Equal(t, result.Servers.Len(), 1) + require.Equal(t, "foo.bar", result.Servers.Lookup("foo").Value.Host) + require.Equal(t, "description", result.Servers.Lookup("foo").Value.Description) }, }, { @@ -137,8 +138,8 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(asyncapi3test.WithServer("foo", "kafka", "bar.foo")), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 1) - require.Equal(t, "bar.foo", result.Servers["foo"].Value.Host) + require.Equal(t, result.Servers.Len(), 1) + require.Equal(t, "bar.foo", result.Servers.Lookup("foo").Value.Host) }, }, { @@ -148,11 +149,11 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(asyncapi3test.WithServer("bar", "kafka", "bar.foo", asyncapi3test.WithServerDescription("other"))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 2) - require.Equal(t, "foo.bar", result.Servers["foo"].Value.Host) - require.Equal(t, "description", result.Servers["foo"].Value.Description) - require.Equal(t, "bar.foo", result.Servers["bar"].Value.Host) - require.Equal(t, "other", result.Servers["bar"].Value.Description) + require.Equal(t, result.Servers.Len(), 2) + require.Equal(t, "foo.bar", result.Servers.Lookup("foo").Value.Host) + require.Equal(t, "description", result.Servers.Lookup("foo").Value.Description) + require.Equal(t, "bar.foo", result.Servers.Lookup("bar").Value.Host) + require.Equal(t, "other", result.Servers.Lookup("bar").Value.Description) }, }, { @@ -162,9 +163,9 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(asyncapi3test.WithServer("foo", "kafka", "foo.bar", asyncapi3test.WithServerDescription("mokapi"))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 1) - require.Equal(t, "foo.bar", result.Servers["foo"].Value.Host) - require.Equal(t, "mokapi", result.Servers["foo"].Value.Description) + require.Equal(t, result.Servers.Len(), 1) + require.Equal(t, "foo.bar", result.Servers.Lookup("foo").Value.Host) + require.Equal(t, "mokapi", result.Servers.Lookup("foo").Value.Description) }, }, { @@ -174,9 +175,9 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.NewConfig(asyncapi3test.WithServer("foo", "kafka", "foo.bar", asyncapi3test.WithServerDescription("mokapi"))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Len(t, result.Servers, 1) - require.Equal(t, "foo.bar", result.Servers["foo"].Value.Host) - require.Equal(t, "mokapi", result.Servers["foo"].Value.Description) + require.Equal(t, result.Servers.Len(), 1) + require.Equal(t, "foo.bar", result.Servers.Lookup("foo").Value.Host) + require.Equal(t, "mokapi", result.Servers.Lookup("foo").Value.Description) }, }, { @@ -187,7 +188,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogRetentionBytes: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionBytes) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionBytes) }, }, { @@ -201,7 +202,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionBytes) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionBytes) }, }, { @@ -212,7 +213,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogRetentionMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionMs) }, }, { @@ -226,7 +227,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionMs) }, }, { @@ -237,7 +238,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogRetentionCheckIntervalMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionCheckIntervalMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionCheckIntervalMs) }, }, { @@ -251,7 +252,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRetentionCheckIntervalMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRetentionCheckIntervalMs) }, }, { @@ -262,7 +263,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogSegmentDeleteDelayMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogSegmentDeleteDelayMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogSegmentDeleteDelayMs) }, }, { @@ -276,7 +277,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogSegmentDeleteDelayMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogSegmentDeleteDelayMs) }, }, { @@ -287,7 +288,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogRollMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRollMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRollMs) }, }, { @@ -301,7 +302,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogRollMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogRollMs) }, }, { @@ -312,7 +313,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{LogSegmentBytes: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogSegmentBytes) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogSegmentBytes) }, }, { @@ -326,7 +327,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.LogSegmentBytes) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.LogSegmentBytes) }, }, { @@ -337,7 +338,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{GroupInitialRebalanceDelayMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) }, }, { @@ -351,7 +352,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.GroupInitialRebalanceDelayMs) }, }, { @@ -362,7 +363,7 @@ func TestConfig_Patch_Server(t *testing.T) { asyncapi3test.WithKafkaServerBinding(asyncapi3.BrokerBindings{GroupMinSessionTimeoutMs: 1}))), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.GroupMinSessionTimeoutMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.GroupMinSessionTimeoutMs) }, }, { @@ -376,7 +377,7 @@ func TestConfig_Patch_Server(t *testing.T) { )), }, test: func(t *testing.T, result *asyncapi3.Config) { - require.Equal(t, int64(1), result.Servers["foo"].Value.Bindings.Kafka.GroupMinSessionTimeoutMs) + require.Equal(t, int64(1), result.Servers.Lookup("foo").Value.Bindings.Kafka.GroupMinSessionTimeoutMs) }, }, } diff --git a/providers/asyncapi3/tag_test.go b/providers/asyncapi3/tag_test.go index df9db6c3b..72aa33465 100644 --- a/providers/asyncapi3/tag_test.go +++ b/providers/asyncapi3/tag_test.go @@ -1,12 +1,13 @@ package asyncapi3_test import ( - "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" "mokapi/config/dynamic" "mokapi/config/dynamic/dynamictest" "mokapi/providers/asyncapi3" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestServerTags(t *testing.T) { @@ -26,8 +27,8 @@ servers: `, test: func(t *testing.T, cfg *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, "foo", cfg.Servers["foo"].Value.Tags[0].Value.Name) - require.Equal(t, "bar", cfg.Servers["foo"].Value.Tags[0].Value.Description) + require.Equal(t, "foo", cfg.Servers.Lookup("foo").Value.Tags[0].Value.Name) + require.Equal(t, "bar", cfg.Servers.Lookup("foo").Value.Tags[0].Value.Description) }, }, { @@ -45,8 +46,8 @@ components: `, test: func(t *testing.T, cfg *asyncapi3.Config, err error) { require.NoError(t, err) - require.Equal(t, "foo", cfg.Servers["foo"].Value.Tags[0].Value.Name) - require.Equal(t, "bar", cfg.Servers["foo"].Value.Tags[0].Value.Description) + require.Equal(t, "foo", cfg.Servers.Lookup("foo").Value.Tags[0].Value.Name) + require.Equal(t, "bar", cfg.Servers.Lookup("foo").Value.Tags[0].Value.Description) }, }, } diff --git a/providers/openapi/handler.go b/providers/openapi/handler.go index fac9a20e2..f573b5b52 100644 --- a/providers/openapi/handler.go +++ b/providers/openapi/handler.go @@ -271,8 +271,8 @@ func (h *operationHandler) ServeHTTP(rw http.ResponseWriter, r *http.Request) *H r = r.WithContext(context.WithValue(r.Context(), "endpointPath", op.Path.Path)) if m, ok := monitor.HttpFromContext(r.Context()); ok { - m.LastRequest.WithLabel(h.config.Info.Name, op.Path.Path).Set(float64(time.Now().Unix())) - m.RequestCounter.WithLabel(h.config.Info.Name, op.Path.Path).Add(1) + m.LastRequest.WithLabel(h.config.Info.Name, op.Path.Path, r.Method).Set(float64(time.Now().Unix())) + m.RequestCounter.WithLabel(h.config.Info.Name, op.Path.Path, r.Method).Add(1) } if ctx, err := NewLogEventContext( @@ -344,11 +344,11 @@ func writeError(rw http.ResponseWriter, r *http.Request, err error, serviceName endpointPath := r.Context().Value("endpointPath") if endpointPath != nil { endpointPathString := endpointPath.(string) - m.RequestErrorCounter.WithLabel(serviceName, endpointPathString).Add(1) - m.LastRequest.WithLabel(serviceName, endpointPathString).Set(float64(time.Now().Unix())) + m.RequestErrorCounter.WithLabel(serviceName, endpointPathString, r.Method).Add(1) + m.LastRequest.WithLabel(serviceName, endpointPathString, r.Method).Set(float64(time.Now().Unix())) } else { - m.RequestErrorCounter.WithLabel(serviceName, "").Add(1) - m.LastRequest.WithLabel(serviceName, "").Set(float64(time.Now().Unix())) + m.RequestErrorCounter.WithLabel(serviceName, "", r.Method).Add(1) + m.LastRequest.WithLabel(serviceName, "", r.Method).Set(float64(time.Now().Unix())) } } rw.Header().Add("Content-Type", "text/plain") diff --git a/runtime/events/events.go b/runtime/events/events.go index e53ac7578..03d9553a7 100644 --- a/runtime/events/events.go +++ b/runtime/events/events.go @@ -2,12 +2,13 @@ package events import ( "fmt" - "github.com/blevesearch/bleve/v2" - "github.com/google/uuid" - log "github.com/sirupsen/logrus" "sort" "sync" "time" + + "github.com/blevesearch/bleve/v2" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" ) type Handler interface { @@ -92,7 +93,7 @@ func (m *StoreManager) GetEvents(traits Traits) []Event { events := make([]Event, 0) for _, s := range m.stores { - if len(traits) == 0 || traits.Match(s.traits) || s.traits.Match(traits) { + if s.some(traits) { events = append(events, s.Events(traits)...) } } diff --git a/runtime/events/events_test.go b/runtime/events/events_test.go index b1da8179a..a8fbc27cd 100644 --- a/runtime/events/events_test.go +++ b/runtime/events/events_test.go @@ -1,10 +1,11 @@ package events_test import ( - "github.com/stretchr/testify/require" "mokapi/runtime/events" "mokapi/runtime/events/eventstest" "testing" + + "github.com/stretchr/testify/require" ) func TestPush(t *testing.T) { @@ -122,6 +123,23 @@ func TestPush(t *testing.T) { require.Equal(t, "2", evts[1].Data.Title()) }, }, + { + "get events by traits", + func(t *testing.T, sm *events.StoreManager) { + sm.SetStore(10, events.NewTraits().WithNamespace("foo").With("name", "bar")) + err := sm.Push(nil, events.NewTraits().WithNamespace("foo").With("name", "bar").With("type", "baz").With("client", "client-1")) + require.NoError(t, err) + + result := sm.GetEvents(events.NewTraits().WithNamespace("foo").With("name", "bar").With("type", "baz")) + require.Len(t, result, 1) + + result = sm.GetEvents(events.NewTraits().WithNamespace("foo").With("type", "baz")) + require.Len(t, result, 1) + + result = sm.GetEvents(events.NewTraits().WithNamespace("foo").With("type", "yuh")) + require.Len(t, result, 0) + }, + }, } for _, tc := range testcase { diff --git a/runtime/events/store.go b/runtime/events/store.go index 46cf15052..5f2d20b4c 100644 --- a/runtime/events/store.go +++ b/runtime/events/store.go @@ -43,3 +43,15 @@ func (s *store) Events(traits Traits) []Event { } return events } + +func (s *store) some(traits Traits) bool { + if len(traits) == 0 { + return true + } + for key, value := range traits { + if s.traits.Has(key, value) { + return true + } + } + return true +} diff --git a/runtime/events/traits.go b/runtime/events/traits.go index 0ba0e038e..1949acfe5 100644 --- a/runtime/events/traits.go +++ b/runtime/events/traits.go @@ -81,3 +81,11 @@ func (t Traits) Contains(traits Traits) bool { } return true } + +func (t Traits) Has(name, value string) bool { + v, ok := t[name] + if !ok { + return false + } + return v == value +} diff --git a/runtime/monitor/http.go b/runtime/monitor/http.go index 3ac7a0406..71d1d2682 100644 --- a/runtime/monitor/http.go +++ b/runtime/monitor/http.go @@ -16,13 +16,13 @@ type Http struct { func NewHttp() *Http { httpRequestCounter := metrics.NewCounterMap( metrics.WithFQName("http", "requests_total"), - metrics.WithLabelNames("service", "endpoint")) + metrics.WithLabelNames("service", "endpoint", "method")) httpRequestErrorCounter := metrics.NewCounterMap( metrics.WithFQName("http", "requests_errors_total"), - metrics.WithLabelNames("service", "endpoint")) + metrics.WithLabelNames("service", "endpoint", "method")) httpLastRequest := metrics.NewGaugeMap( metrics.WithFQName("http", "request_timestamp"), - metrics.WithLabelNames("service", "endpoint")) + metrics.WithLabelNames("service", "endpoint", "method")) return &Http{ RequestCounter: httpRequestCounter, diff --git a/runtime/monitor/http_test.go b/runtime/monitor/http_test.go index bd0632351..6e4e15851 100644 --- a/runtime/monitor/http_test.go +++ b/runtime/monitor/http_test.go @@ -2,26 +2,27 @@ package monitor import ( "context" - "github.com/stretchr/testify/require" "mokapi/runtime/metrics" "testing" + + "github.com/stretchr/testify/require" ) func TestHttp_Metrics_Request_Total(t *testing.T) { h := NewHttp() - h.RequestCounter.WithLabel("service_a", "endpoint_a").Add(1) + h.RequestCounter.WithLabel("service_a", "endpoint_a", "post").Add(1) require.Equal(t, float64(1), h.RequestCounter.Sum()) } func TestHttp_Metrics_Request_Errors_Total(t *testing.T) { h := NewHttp() - h.RequestErrorCounter.WithLabel("service_a", "endpoint_a").Add(1) + h.RequestErrorCounter.WithLabel("service_a", "endpoint_a", "put").Add(1) require.Equal(t, float64(1), h.RequestErrorCounter.Sum()) } func TestHttp_Metrics_LastRequest(t *testing.T) { h := NewHttp() - h.LastRequest.WithLabel("service_a", "endpoint_a").Set(10) + h.LastRequest.WithLabel("service_a", "endpoint_a", "delete").Set(10) require.Equal(t, float64(10), h.LastRequest.Value(metrics.NewQuery(metrics.ByLabel("service", "service_a")))) } diff --git a/runtime/monitor/kafka.go b/runtime/monitor/kafka.go index 30bc6f8ad..b1ad40bd7 100644 --- a/runtime/monitor/kafka.go +++ b/runtime/monitor/kafka.go @@ -8,9 +8,11 @@ import ( var kafkaKey = contextKey("kafka") type Kafka struct { - Messages *metrics.CounterMap - LastMessage *metrics.GaugeMap - Lags *metrics.GaugeMap + Messages *metrics.CounterMap + LastMessage *metrics.GaugeMap + Lags *metrics.GaugeMap + Commits *metrics.GaugeMap + LastRebalancing *metrics.GaugeMap } func NewKafka() *Kafka { @@ -24,21 +26,34 @@ func NewKafka() *Kafka { metrics.WithFQName("kafka", "consumer_group_lag"), metrics.WithLabelNames("service", "group", "topic", "partition")) + commits := metrics.NewGaugeMap( + metrics.WithFQName("kafka", "consumer_group_commit"), + metrics.WithLabelNames("service", "group", "topic", "partition")) + lastRebalancing := + metrics.NewGaugeMap( + metrics.WithFQName("kafka", "rebalance_timestamp"), + metrics.WithLabelNames("service", "group"), + ) + return &Kafka{ - Messages: messages, - LastMessage: lastMessage, - Lags: lag, + Messages: messages, + LastMessage: lastMessage, + Lags: lag, + LastRebalancing: lastRebalancing, + Commits: commits, } } func (k *Kafka) Metrics() []metrics.Metric { - return []metrics.Metric{k.Messages, k.LastMessage, k.Lags} + return []metrics.Metric{k.Messages, k.LastMessage, k.Lags, k.Commits, k.LastRebalancing} } func (k *Kafka) Reset() { k.Messages.Reset() k.LastMessage.Reset() k.Lags.Reset() + k.Commits.Reset() + k.LastRebalancing.Reset() } func NewKafkaContext(ctx context.Context, kafka *Kafka) context.Context { diff --git a/runtime/monitor/monitor_test.go b/runtime/monitor/monitor_test.go index d626d7b9f..5eb61df98 100644 --- a/runtime/monitor/monitor_test.go +++ b/runtime/monitor/monitor_test.go @@ -2,11 +2,12 @@ package monitor import ( "context" - "github.com/stretchr/testify/require" "mokapi/runtime/metrics" "mokapi/safe" "testing" "time" + + "github.com/stretchr/testify/require" ) func TestMonitor_Start(t *testing.T) { @@ -27,7 +28,7 @@ func TestMonitor_FindAll(t *testing.T) { t.Parallel() m := New() - m.Http.RequestCounter.WithLabel("s", "e").Add(1) + m.Http.RequestCounter.WithLabel("s", "e", "m").Add(1) r := m.FindAll(metrics.ByNamespace("http")) require.Len(t, r, 1) } diff --git a/runtime/runtime_kafka.go b/runtime/runtime_kafka.go index 64aff6bbd..4209709c8 100644 --- a/runtime/runtime_kafka.go +++ b/runtime/runtime_kafka.go @@ -1,8 +1,6 @@ package runtime import ( - "github.com/blevesearch/bleve/v2" - log "github.com/sirupsen/logrus" "mokapi/config/dynamic" "mokapi/config/dynamic/asyncApi" "mokapi/config/static" @@ -12,9 +10,13 @@ import ( "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events" "mokapi/runtime/monitor" + "mokapi/sortedmap" "path/filepath" "sort" "sync" + + "github.com/blevesearch/bleve/v2" + log "github.com/sirupsen/logrus" ) type KafkaStore struct { @@ -96,7 +98,7 @@ func (s *KafkaStore) Add(c *dynamic.Config, emitter common.EventEmitter) (*Kafka s.events.ResetStores(events.NewTraits().WithNamespace("kafka").WithName(cfg.Info.Name)) s.events.SetStore(int(eventStore.Size), events.NewTraits().WithNamespace("kafka").WithName(cfg.Info.Name)) - ki = NewKafkaInfo(c, store.New(cfg, emitter, s.events), s.updateEventStore) + ki = NewKafkaInfo(c, store.NewEmpty(emitter, s.events, s.monitor.Kafka), s.updateEventStore) s.infos[cfg.Info.Name] = ki } else { ki.AddConfig(c) @@ -186,6 +188,21 @@ func (c *KafkaInfo) update() { } } + if cfg.Servers.Len() == 0 { + log.Infof("no servers defined in AsyncAPI spec — using default Mokapi broker for cluster '%s'", cfg.Info.Name) + if cfg.Servers == nil { + cfg.Servers = &sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef]{} + } + cfg.Servers.Set("mokapi", &asyncapi3.ServerRef{ + Value: &asyncapi3.Server{ + Host: ":9092", + Protocol: "kafka", + Title: "Mokapi Default Broker", + Summary: "Automatically added broker because no servers are defined in the AsyncAPI spec", + }, + }) + } + c.Config = cfg c.updateEventAndMetrics(c) c.Store.Update(cfg) diff --git a/runtime/runtime_kafka_search.go b/runtime/runtime_kafka_search.go index b401aae0f..e7a4b595f 100644 --- a/runtime/runtime_kafka_search.go +++ b/runtime/runtime_kafka_search.go @@ -2,11 +2,12 @@ package runtime import ( "fmt" - log "github.com/sirupsen/logrus" "mokapi/providers/asyncapi3" "mokapi/runtime/search" "mokapi/schema/json/schema" "strings" + + log "github.com/sirupsen/logrus" ) type kafkaSearchIndexData struct { @@ -65,7 +66,9 @@ func (s *KafkaStore) addToIndex(cfg *asyncapi3.Config) { Description: cfg.Info.Description, Contact: cfg.Info.Contact, } - for name, server := range cfg.Servers { + for it := cfg.Servers.Iter(); it.Next(); { + name := it.Key() + server := it.Value() if server == nil || server.Value == nil { continue } diff --git a/runtime/runtime_kafka_test.go b/runtime/runtime_kafka_test.go index 938ade59d..c431c0952 100644 --- a/runtime/runtime_kafka_test.go +++ b/runtime/runtime_kafka_test.go @@ -1,7 +1,6 @@ package runtime_test import ( - "github.com/stretchr/testify/require" "mokapi/config/dynamic" "mokapi/config/static" "mokapi/engine/enginetest" @@ -17,6 +16,9 @@ import ( "net/url" "testing" "time" + + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" ) func TestApp_AddKafka(t *testing.T) { @@ -24,6 +26,28 @@ func TestApp_AddKafka(t *testing.T) { name string test func(t *testing.T, app *runtime.App) }{ + { + name: "add default server if no is specified", + test: func(t *testing.T, app *runtime.App) { + hook := test.NewGlobal() + + c := asyncapi3test.NewConfig(asyncapi3test.WithInfo("foo", "", "")) + ki, err := app.Kafka.Add(getConfig("foo.bar", c), enginetest.NewEngine()) + require.Nil(t, err) + require.Equal(t, ki.Servers.Len(), 1) + require.Equal(t, + &asyncapi3.Server{ + Host: ":9092", + Protocol: "kafka", + Title: "Mokapi Default Broker", + Summary: "Automatically added broker because no servers are defined in the AsyncAPI spec", + }, + ki.Servers.Lookup("mokapi").Value) + + require.Len(t, hook.Entries, 1) + require.Equal(t, "no servers defined in AsyncAPI spec — using default Mokapi broker for cluster 'foo'", hook.Entries[0].Message) + }, + }, { name: "event store available", test: func(t *testing.T, app *runtime.App) { diff --git a/runtime/runtime_mqtt.go b/runtime/runtime_mqtt.go index 8983c5aca..ee02851d1 100644 --- a/runtime/runtime_mqtt.go +++ b/runtime/runtime_mqtt.go @@ -1,7 +1,6 @@ package runtime import ( - log "github.com/sirupsen/logrus" "mokapi/config/dynamic" "mokapi/config/dynamic/asyncApi" "mokapi/config/static" @@ -14,6 +13,8 @@ import ( "path/filepath" "sort" "sync" + + log "github.com/sirupsen/logrus" ) type MqttStore struct { @@ -217,7 +218,8 @@ func IsMqttConfig(c *dynamic.Config) (*asyncapi3.Config, bool) { } func hasMqttBroker(c *asyncapi3.Config) bool { - for _, server := range c.Servers { + for it := c.Servers.Iter(); it.Next(); { + server := it.Value() if server.Value.Protocol == "mqtt" { return true } diff --git a/server/server_kafka.go b/server/server_kafka.go index 7306e1996..646e9190d 100644 --- a/server/server_kafka.go +++ b/server/server_kafka.go @@ -1,17 +1,18 @@ package server import ( - log "github.com/sirupsen/logrus" - "maps" "mokapi/config/dynamic" "mokapi/engine/common" "mokapi/providers/asyncapi3" "mokapi/runtime" "mokapi/runtime/monitor" "mokapi/server/service" + "mokapi/sortedmap" "net/url" "slices" "sync" + + log "github.com/sirupsen/logrus" ) type Broker interface { @@ -55,7 +56,7 @@ func (m *KafkaManager) UpdateConfig(e dynamic.ConfigEvent) { return } } - var servers map[string]*asyncapi3.ServerRef + var servers *sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef] if info != nil { servers = info.Servers } @@ -98,10 +99,12 @@ func (m *KafkaManager) removeCluster(name string) { delete(m.clusters, name) } -func (c *kafkaCluster) updateBrokers(cfg *runtime.KafkaInfo, old map[string]*asyncapi3.ServerRef) { - servers := slices.Collect(maps.Values(cfg.Servers)) +func (c *kafkaCluster) updateBrokers(cfg *runtime.KafkaInfo, old *sortedmap.LinkedHashMap[string, *asyncapi3.ServerRef]) { + servers := cfg.Servers.Values() - for name, server := range old { + for it := old.Iter(); it.Next(); { + name := it.Key() + server := it.Value() if !slices.ContainsFunc(servers, func(s *asyncapi3.ServerRef) bool { return s.Value != nil && server.Value != nil && s.Value.Host == server.Value.Host }) { @@ -114,7 +117,9 @@ func (c *kafkaCluster) updateBrokers(cfg *runtime.KafkaInfo, old map[string]*asy } } - for name, server := range cfg.Servers { + for it := cfg.Servers.Iter(); it.Next(); { + name := it.Key() + server := it.Value() if server == nil || server.Value == nil { continue } diff --git a/server/server_kafka_test.go b/server/server_kafka_test.go index da76c1dd8..c36648555 100644 --- a/server/server_kafka_test.go +++ b/server/server_kafka_test.go @@ -2,7 +2,6 @@ package server import ( "fmt" - "github.com/stretchr/testify/require" "mokapi/config/dynamic" "mokapi/config/static" "mokapi/kafka/kafkatest" @@ -14,6 +13,8 @@ import ( "mokapi/try" "testing" "time" + + "github.com/stretchr/testify/require" ) func TestKafkaServer(t *testing.T) { @@ -52,32 +53,41 @@ func TestKafkaServer_Update(t *testing.T) { { "add another broker", func(t *testing.T, m *KafkaManager) { - port := try.GetFreePort() - addr := fmt.Sprintf("127.0.0.1:%v", port) + port1 := try.GetFreePort() + addr1 := fmt.Sprintf("127.0.0.1:%v", port1) cfg := asyncapi3test.NewConfig( asyncapi3test.WithTitle("foo"), - asyncapi3test.WithServer("foo", "kafka", addr), + asyncapi3test.WithServer("foo", "kafka", addr1), ) m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) - port = try.GetFreePort() - addr = fmt.Sprintf("127.0.0.1:%v", port) - cfg.Servers["bar"] = &asyncapi3.ServerRef{Value: &asyncapi3.Server{ - Host: addr, + port2 := try.GetFreePort() + addr2 := fmt.Sprintf("127.0.0.1:%v", port2) + cfg.Servers.Set("bar", &asyncapi3.ServerRef{Value: &asyncapi3.Server{ + Host: addr2, Protocol: "kafka", - }} + }}) m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) // wait for kafka start time.Sleep(500 * time.Millisecond) - client := kafkatest.NewClient(addr, "test") - defer client.Close() + client1 := kafkatest.NewClient(addr1, "test") + defer client1.Close() - r, err := client.Metadata(0, &metaData.Request{}) + r, err := client1.Metadata(0, &metaData.Request{}) + require.NoError(t, err) + require.Len(t, r.Brokers, 1) + require.Equal(t, int32(port1), r.Brokers[0].Port) + + client2 := kafkatest.NewClient(addr2, "test") + defer client2.Close() + + r, err = client2.Metadata(0, &metaData.Request{}) require.NoError(t, err) - require.Len(t, r.Brokers, 2) + require.Len(t, r.Brokers, 1) + require.Equal(t, int32(port2), r.Brokers[0].Port) }, }, { @@ -90,10 +100,10 @@ func TestKafkaServer_Update(t *testing.T) { ) m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) - cfg.Servers["broker"] = &asyncapi3.ServerRef{Value: &asyncapi3.Server{ + cfg.Servers.Set("broker", &asyncapi3.ServerRef{Value: &asyncapi3.Server{ Host: addr, Protocol: "kafka", - }} + }}) m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) @@ -129,7 +139,7 @@ func TestKafkaServer_Update(t *testing.T) { require.NoError(t, err) require.Len(t, r.Brokers, 1) - delete(cfg.Servers, "") + cfg.Servers.Del("") m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) r, err = client.Metadata(0, &metaData.Request{}) @@ -178,7 +188,7 @@ func TestKafkaServer_Update(t *testing.T) { r, err = client2.Metadata(0, &metaData.Request{}) require.NoError(t, err) - require.Len(t, r.Brokers, 2) + require.Len(t, r.Brokers, 1) cfg = asyncapi3test.NewConfig( asyncapi3test.WithServer("bar", "kafka", addr2), @@ -210,11 +220,11 @@ func TestKafkaServer_Update(t *testing.T) { ) m.UpdateConfig(dynamic.ConfigEvent{Config: &dynamic.Config{Data: cfg, Info: dynamic.ConfigInfo{Url: MustParseUrl("foo.yml")}}}) - delete(cfg.Servers, "kafka") - cfg.Servers["broker"] = &asyncapi3.ServerRef{Value: &asyncapi3.Server{ + cfg.Servers.Del("kafka") + cfg.Servers.Set("broker", &asyncapi3.ServerRef{Value: &asyncapi3.Server{ Host: addr, Protocol: "kafka", - }} + }}) // wait for kafka start time.Sleep(500 * time.Millisecond) diff --git a/server/server_mqtt.go b/server/server_mqtt.go index 4c7b80750..e95467bdb 100644 --- a/server/server_mqtt.go +++ b/server/server_mqtt.go @@ -1,13 +1,14 @@ package server import ( - log "github.com/sirupsen/logrus" "mokapi/config/dynamic" "mokapi/engine/common" "mokapi/runtime" "mokapi/runtime/monitor" "mokapi/server/service" "sync" + + log "github.com/sirupsen/logrus" ) type MqttManager struct { @@ -96,7 +97,9 @@ func (c *mqttCluster) update(cfg *runtime.MqttInfo, monitor *monitor.Mqtt) { func (c *mqttCluster) updateBrokers(cfg *runtime.MqttInfo, monitor *monitor.Mqtt) { brokers := c.brokers c.brokers = make(map[string]Broker) - for name, server := range cfg.Servers { + for it := cfg.Servers.Iter(); it.Next(); { + name := it.Key() + server := it.Value() if server == nil || server.Value == nil { continue } diff --git a/sortedmap/sortedmap.go b/sortedmap/sortedmap.go index 7af39fa08..691c48de8 100644 --- a/sortedmap/sortedmap.go +++ b/sortedmap/sortedmap.go @@ -6,6 +6,8 @@ import ( "encoding/json" "fmt" "strings" + + "gopkg.in/yaml.v3" ) // LinkedHashMap defines the iteration ordering by the order @@ -90,7 +92,7 @@ func (m *LinkedHashMap[K, V]) Lookup(key K) V { } func (m *LinkedHashMap[K, V]) Iter() *Iterator[K, V] { - if m.list == nil { + if m == nil || m.list == nil { return &Iterator[K, V]{} } return &Iterator[K, V]{next: m.list.Front()} @@ -173,3 +175,50 @@ func (m *LinkedHashMap[K, V]) Resolve(token string) (interface{}, error) { } return nil, fmt.Errorf("unable to resolve %v", token) } + +func (m *LinkedHashMap[K, V]) UnmarshalJSON(b []byte) error { + dec := json.NewDecoder(bytes.NewReader(b)) + token, err := dec.Token() + if err != nil { + return err + } + if delim, ok := token.(json.Delim); ok && delim != '{' { + return fmt.Errorf("expected map, got %s", token) + } + for { + token, err = dec.Token() + if err != nil { + return err + } + if delim, ok := token.(json.Delim); ok && delim == '}' { + return nil + } + + v := *new(V) + err = dec.Decode(&v) + if err != nil { + return err + } + m.Set(token.(K), v) + } +} + +func (m *LinkedHashMap[K, V]) UnmarshalYAML(value *yaml.Node) error { + if value.Kind != yaml.MappingNode { + return fmt.Errorf("expected map, got %v", value.Tag) + } + for i := 0; i < len(value.Content); i += 2 { + var key any + err := value.Content[i].Decode(&key) + if err != nil { + return err + } + v := *new(V) + err = value.Content[i+1].Decode(&v) + if err != nil { + return err + } + m.Set(key.(K), v) + } + return nil +} diff --git a/sortedmap/sortedmap_test.go b/sortedmap/sortedmap_test.go index b8028d490..826182882 100644 --- a/sortedmap/sortedmap_test.go +++ b/sortedmap/sortedmap_test.go @@ -2,9 +2,11 @@ package sortedmap_test import ( "encoding/json" - "github.com/stretchr/testify/require" "mokapi/sortedmap" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func TestLinkedHashMap_Empty(t *testing.T) { @@ -78,3 +80,40 @@ func TestLinkedHashMap_Merge(t *testing.T) { m1.Merge(m2) require.Equal(t, map[string]int{"foo": 10, "yuh": 3, "bar": 2}, m1.ToMap()) } + +func TestLinkedHashMap(t *testing.T) { + testcases := []struct { + name string + test func(t *testing.T) + }{ + { + name: "json", + test: func(t *testing.T) { + m := &sortedmap.LinkedHashMap[string, int]{} + err := json.Unmarshal([]byte(`{"foo":1}`), m) + require.NoError(t, err) + require.Equal(t, 1, m.Len()) + require.Equal(t, 1, m.Lookup("foo")) + }, + }, + { + name: "yaml", + test: func(t *testing.T) { + m := &sortedmap.LinkedHashMap[string, int]{} + err := yaml.Unmarshal([]byte(`foo: 1`), m) + require.NoError(t, err) + require.Equal(t, 1, m.Len()) + require.Equal(t, 1, m.Lookup("foo")) + }, + }, + } + + t.Parallel() + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.test(t) + }) + } +} diff --git a/webui/e2e/Dashboard/http/books.spec.ts b/webui/e2e/Dashboard/http/books.spec.ts index 22c16a616..a0e897d15 100644 --- a/webui/e2e/Dashboard/http/books.spec.ts +++ b/webui/e2e/Dashboard/http/books.spec.ts @@ -1,115 +1,151 @@ +import { getCellByColumnName } from '../../helpers/table' import { test, expect } from '../../models/fixture-dashboard' test.describe('Visit Books API', () => { test.use({ colorScheme: 'dark' }) - const service = { - paths: [ - { path: '/books', summary: '', method: 'GET POST', lastRequest: '-', requests: '0 / 0' }, - ], - } - - test('Visit overview', async ({ dashboard }) => { + test('Verify overview', async ({ dashboard, page }) => { await dashboard.open() - const http = dashboard.http - await http.clickService('Books API') - - // service info - await expect(http.serviceInfo.name).toHaveText('Books API') - await expect(http.serviceInfo.version).toHaveText('1.0.0') - await expect(http.serviceInfo.contact).not.toBeVisible() - await expect(http.serviceInfo.description).toHaveText('A simple API to manage books in a library') - - // servers - const server = http.servers.getByRole('cell') - await expect(server.nth(0)).toHaveText('https://api.example.com/v1') - await expect(server.nth(1)).toHaveText('') - - // endpoints - const endpoints = http.endpoints.locator('tbody tr') - for (const [i, path] of service.paths.entries()) { - const cells = endpoints.nth(i).getByRole('cell') - await expect(cells.nth(0)).toHaveText(path.path) - await expect(cells.nth(1)).toHaveText(path.summary) - await expect(cells.nth(2)).toHaveText(path.method, {ignoreCase: false}) - await expect(cells.nth(3)).toHaveText(path.lastRequest) - await expect(cells.nth(4)).toHaveText(path.requests) - } - }) - test('Visit endpoint', async ({ dashboard, page }) => { - await dashboard.open() - const http = dashboard.http - await http.clickService('Books API') - - await test.step('/books', async () => { - await http.clickPath('/books') - const path = http.getPathModel() - await expect(path.path).toHaveText('/books') - await expect(path.service).toHaveText('Books API') - await expect(path.type).toHaveText('HTTP') - - let cells = path.methods.locator('tbody tr').nth(0).getByRole('cell') - await expect(cells.nth(0)).toHaveText('GET', {ignoreCase: false}) - await expect(cells.nth(0).locator('span')).toHaveClass('badge operation get') - await expect(cells.nth(1)).toHaveText('listBooks') - await expect(cells.nth(2)).toHaveText('Get books from the store') - - cells = path.methods.locator('tbody tr').nth(1).getByRole('cell') - await expect(cells.nth(0)).toHaveText('POST', {ignoreCase: false}) - await expect(cells.nth(0).locator('span')).toHaveClass('badge operation post') - await expect(cells.nth(1)).toHaveText('addBook') - await expect(cells.nth(2)).toHaveText('Add a new book') - - await test.step('visit method post', async () => { - await path.clickOperation('POST') - const op = http.getOperationModel() - - await expect(op.operation).toHaveText('POST', {ignoreCase: false}) - await expect(op.path).toHaveText('/books') - await expect(op.operationId).toHaveText('addBook') - await expect(op.service).toHaveText('Books API') - await expect(op.type).toHaveText('HTTP') - await expect(op.summary).toHaveText('Add a new book') - await expect(op.description).not.toBeVisible() - - await test.step("http request", async () => { - await expect(op.request.tabs.locator('.active')).toHaveText('Body') - await expect(op.request.body).not.toHaveText('') - - await test.step('click expand', async () => { - const expand = op.request.expand - await expand.button.click() - await expect(expand.code).toBeVisible() - await expect(expand.code).not.toHaveText('') - await expand.code.press('Escape', { delay: 500 }) - // without a second time, dialog does not disappear - await page.locator('body').press('Escape') - await expect(expand.code).not.toBeVisible() - }) + await page.getByRole('link', { name: 'Books API' }).click(); + + await test.step('Verify service info', async () => { + + const region = page.getByRole('region', { name: 'Info' }); + await expect(region.getByLabel('Name')).toHaveText('Books API'); + await expect(region.getByLabel('Version')).toHaveText('1.0.0'); + await expect(region.getByLabel('Contact')).not.toBeVisible(); + await expect(region.getByLabel('Description')).toHaveText('A simple API to manage books in a library'); + + }); + + await test.step('Verify servers', async () => { + + await page.getByRole('tab', { name: 'Servers' }).click(); + const table = page.getByRole('table', { name: 'Servers' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect(await getCellByColumnName(table, 'Url', rows.nth(0))).toHaveText('https://api.example.com/v1'); + await expect(await getCellByColumnName(table, 'Description', rows.nth(0))).toHaveText('-'); + + }); + + await test.step('Verify configs', async () => { + + await page.getByRole('tab', { name: 'Configs' }).click(); + const table = page.getByRole('table', { name: 'Configs' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(4); + await expect(await getCellByColumnName(table, 'URL', rows.nth(0))).toHaveText('file://cron.js'); + await expect(await getCellByColumnName(table, 'Provider', rows.nth(0))).toHaveText('File'); + await expect(await getCellByColumnName(table, 'Last Update', rows.nth(0))).not.toBeEmpty(); + + }); + + await test.step('Verify paths', async () => { + + await page.getByRole('tab', { name: 'Paths' }).click(); + + const table = page.getByRole('table', { name: 'Paths' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect(await getCellByColumnName(table, 'Path', rows.nth(0))).toHaveText('/books'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(0))).toHaveText(''); + await expect(await getCellByColumnName(table, 'Operations', rows.nth(0))).toHaveText('GET POST'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(0))).toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(0))).toHaveText('0 / 0'); + + await test.step('Verify path', async () => { + + await page.getByRole('link', { name: '/books' }).click(); + await expect(page).toHaveURL(/Books%20API\/books/) + + await expect(page.getByLabel('Path')).toHaveText('/books'); + await expect(page.getByLabel('Service', { exact: true })).toHaveText('Books API') + await expect(page.getByLabel('Type of API')).toHaveText('HTTP') + + await test.step('Verify methods', async () => { + + const table = page.getByRole('table', { name: 'Methods' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(2); + await expect(await getCellByColumnName(table, 'Method', rows.nth(0))).toHaveText('GET'); + await expect(await getCellByColumnName(table, 'Operation ID', rows.nth(0))).toHaveText('listBooks'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(0))).toHaveText('Get books from the store'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(0))).toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(0))).toHaveText('0 / 0'); + + await expect(await getCellByColumnName(table, 'Method', rows.nth(1))).toHaveText('POST'); + await expect(await getCellByColumnName(table, 'Operation ID', rows.nth(1))).toHaveText(' addBook'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(1))).toHaveText('Add a new book'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(1))).toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(1))).toHaveText('0 / 0'); + + await test.step('Verify method post', async () => { + + await page.getByRole('link', { name: 'POST', exact: true }).click(); + + await expect(page.getByLabel('Operation', { exact: true })).toHaveText('POST /books'); + await expect(page.getByLabel('Operation ID')).toHaveText('addBook'); + await expect(page.getByLabel('Service', { exact: true })).toHaveText('Books API'); + await expect(page.getByLabel('Type of API')).toHaveText('HTTP'); + await expect(page.getByLabel('Summary')).toHaveText('Add a new book'); + await expect(page.getByLabel('Description', { exact: true })).not.toBeVisible(); + + await test.step("Verify HTTP request", async () => { + + const request = page.getByRole('region', { name: 'Request' }); + await expect(request.getByLabel('Request content type')).toHaveText('application/json'); + await expect(request.getByLabel('Required')).toHaveText('true'); + + await test.step('Verify expand schema', async () => { + + await request.getByRole('button', { name: 'Expand' }).click(); + const dialog = page.getByRole('dialog'); + await expect(dialog).toBeVisible(); + await expect(dialog.getByRole('region', { name: 'Content' })).not.toHaveText(''); + + // first press is effectively a focus reset, not a close. + await page.keyboard.press('Escape', { delay: 500 }); + await page.keyboard.press('Escape', { delay: 500 }); + await expect(dialog).not.toBeVisible(); + + }); + + await test.step('Verify example', async () => { + + await request.getByRole('button', { name: 'Example' }).click(); + const dialog = page.getByRole('dialog'); + await expect(dialog).toBeVisible() + await dialog.getByRole('button', { name: 'Example' }).click(); + await expect(dialog.getByRole('region', { name: 'Source' })).toContainText(`"id":`) + + // first press is effectively a focus reset, not a close. + await page.keyboard.press('Escape', { delay: 500 }); + await page.keyboard.press('Escape', { delay: 500 }); + await expect(dialog).not.toBeVisible(); + + }); + }); + + await test.step("Verify response", async () => { + + const response = page.getByRole('region', { name: 'Response' }); + await expect(response.getByRole('tab', {name: '201 Created'})).toBeVisible(); + await expect(response.getByLabel('Description')).toHaveText('The created book'); + + await expect(response.getByRole('tab', {name: 'Body'})).not.toContainClass('disabled'); + await expect(response.getByRole('tab', {name: 'Headers'})).toContainClass('disabled'); + + }) - await test.step('click example', async () => { - const example = op.request.example - await example.button.click() - await example.example.click() - await expect(example.code).toBeVisible() - await expect(example.code).toContainText(`"id":`) - await op.request.example.code.press('Escape', { delay: 500 }) - // without a second time, dialog does not disappear - await page.locator('body').press('Escape') - await expect(example.code).not.toBeVisible() }) - }) + + }); - await test.step("http response", async () => { - await expect(op.response.element.getByRole('tab', {name: '201 Created'})).toBeVisible() - await expect(op.response.element.getByTestId('response-description-201')).toHaveText('The created book') - await expect(op.response.element.getByRole('tab', {name: 'Body'})).not.toContainClass('disabled') - await expect(op.response.element.getByRole('tab', {name: 'Headers'})).toContainClass('disabled') - }) + }); - }) - }) + }); }) }) \ No newline at end of file diff --git a/webui/e2e/Dashboard/kafka/cluster.spec.ts b/webui/e2e/Dashboard/kafka/cluster.spec.ts index ed3b71508..eede01fec 100644 --- a/webui/e2e/Dashboard/kafka/cluster.spec.ts +++ b/webui/e2e/Dashboard/kafka/cluster.spec.ts @@ -27,7 +27,9 @@ test('Visit Kafka cluster "Kafka World"', async ({ page }) => { }) await test.step('Check broker section', async () => { - const brokers = useTable(page.getByRole('region', { name: "Brokers" }).getByRole('table', { name: 'Brokers' }), ['Name', 'Host', 'Description', 'Tags']) + await page.getByRole('tab', { name: 'Servers' }).click(); + + const brokers = useTable(page.getByRole('table', { name: 'Servers' }), ['Name', 'Host', 'Description', 'Tags']) const broker = brokers.getRow(1) await expect(broker.getCellByName('Name')).toHaveText(cluster.brokers[0].name) await expect(broker.getCellByName('Host')).toHaveText(cluster.brokers[0].url) @@ -36,15 +38,21 @@ test('Visit Kafka cluster "Kafka World"', async ({ page }) => { }) await test.step('Check topic section', async () => { - const table = page.getByRole('region', { name: "Topics" }).getByRole('table', { name: 'Topics' }) + await page.getByRole('tab', { name: 'Topics' }).click(); + + const table = page.getByRole('table', { name: 'Topics' }) await expect(table).toBeVisible() const topics = useKafkaTopics(table) - await topics.testTopic(0, cluster.topics[0]) - await topics.testTopic(0, cluster.topics[0]) + await topics.testTopic(1, cluster.topics[0]) + await topics.testTopic(2, cluster.topics[1]) }) + await useKafkaMessages(page).test(page.getByRole('region', { name: "Recent Messages" }).getByRole('table', { name: 'Recent Messages' })) + await test.step('Check groups section', async () => { - const table = page.getByRole('region', { name: "Groups" }).getByRole('table', { name: 'Groups' }) + await page.getByRole('tab', { name: 'Groups' }).click(); + + const table = page.getByRole('table', { name: 'Groups' }) await expect(table).toBeVisible() const groups = useKafkaGroups(table) await groups.testGroup(0, cluster.groups[0]) @@ -52,14 +60,14 @@ test('Visit Kafka cluster "Kafka World"', async ({ page }) => { }) await test.step('Check config section', async () => { - const configs = useTable(page.getByRole('region', { name: "Configs" }).getByRole('table', { name: 'Configs' }), ['URL', 'Provider', 'Last Update']) + await page.getByRole('tab', { name: 'Configs' }).click(); + + const configs = useTable(page.getByRole('table', { name: 'Configs' }), ['URL', 'Provider', 'Last Update']) const config = configs.getRow(1) await expect(config.getCellByName('URL')).toHaveText('https://www.example.com/foo/bar/communication/service/asyncapi.json') await expect(config.getCellByName('Provider')).toHaveText('HTTP') await expect(config.getCellByName('Last Update')).toHaveText(formatDateTime('2023-02-15T08:49:25.482366+01:00')) }) - - await useKafkaMessages(page).test(page.getByRole('region', { name: "Recent Messages" }).getByRole('table', { name: 'Recent Messages' })) }) test('Visit Kafka cluster config file', async ({ page, context }) => { @@ -70,6 +78,8 @@ test('Visit Kafka cluster config file', async ({ page, context }) => { await tabs.kafka.click() await page.getByRole('table', { name: 'Kafka Clusters' }).getByText(cluster.name).click() + + await page.getByRole('tab', { name: 'Configs' }).click(); await page.getByRole('table', { name: 'Configs' }).getByText('https://www.example.com/foo/bar/communication/service/asyncapi.json').click() await expect(page.getByLabel('URL')).toHaveText('https://www.example.com/foo/bar/communication/service/asyncapi.json') @@ -78,8 +88,8 @@ test('Visit Kafka cluster config file', async ({ page, context }) => { const { test: testSourceView } = useSourceView(page.getByRole('region', { name: 'Content' })) await testSourceView({ - lines: '338 lines', - size: '8.84 kB', + lines: '342 lines', + size: '8.94 kB', content: /"name": "Kafka World"/, filename: 'asyncapi.json', clipboard: '"name": "Kafka World"' diff --git a/webui/e2e/Dashboard/kafka/cluster.ts b/webui/e2e/Dashboard/kafka/cluster.ts index 7a87b045a..60ae6e487 100644 --- a/webui/e2e/Dashboard/kafka/cluster.ts +++ b/webui/e2e/Dashboard/kafka/cluster.ts @@ -23,21 +23,18 @@ export const cluster = { partitions: [ { id: '0', - leader: 'foo (localhost:9002)', startOffset: '0', offset: '4', segments: '1' }, { id: '1', - leader: 'foo (localhost:9002)', startOffset: '0', offset: '3', segments: '1' }, { id: '2', - leader: 'foo (localhost:9002)', startOffset: '0', offset: '3', segments: '1' @@ -60,12 +57,11 @@ export const cluster = { { name: 'mokapi.shop.userSignedUp', description: 'This channel contains a message per each user who signs up in our application.', - lastMessage: formatTimestamp(1652035690), - messages: '1', + lastMessage: '-', + messages: '0', partitions: [ { id: '0', - leader: 'foo (localhost:9002)', startOffset: '0', offset: '0', segments: '1' @@ -101,40 +97,17 @@ export const cluster = { name: 'foo', state: 'Stable', protocol: 'Range', - coordinator: 'localhost:9092', - leader: 'julie', - members: [ - { - name: 'julie', - address: '127.0.0.1:15001', - clientSoftware: 'mokapi 1.0', - lastHeartbeat: formatTimestamp(1654771269), - partitions: { 'mokapi.shop.products': [ 0, 1 ], 'mokapi.shop.userSignedUp': [ 0 ] } - }, - { - name: 'herman', - address: '127.0.0.1:15002', - clientSoftware: 'mokapi 1.0', - lastHeartbeat: formatTimestamp(1654872269), - partitions: { 'mokapi.shop.products': [ 2 ], 'mokapi.shop.userSignedUp': [ ] } - } - ], + generation: '-', + lastRebalancing: '-', + members: 2, }, { name: 'bar', state: 'Stable', protocol: 'Range', - coordinator: 'localhost:9092', - leader: 'george', - members: [ - { - name: 'george', - address: '127.0.0.1:15003', - clientSoftware: 'mokapi 1.0', - lastHeartbeat: formatTimestamp(1654721269), - partitions: { 'mokapi.shop.userSignedUp': [ 0 ] } - }, - ], + generation: '-', + lastRebalancing: '-', + members: 1 } ] } \ No newline at end of file diff --git a/webui/e2e/components/kafka.ts b/webui/e2e/components/kafka.ts index 968adbb5b..13aa410a3 100644 --- a/webui/e2e/components/kafka.ts +++ b/webui/e2e/components/kafka.ts @@ -28,22 +28,16 @@ export interface Group { name: string state: string protocol: string - coordinator: string - leader: string - members: { - name: string - address: string - clientSoftware: string - lastHeartbeat: string - partitions: { [topicName: string]: number[] } - }[], + generation: string + lastRebalancing: string + members: number } export function useKafkaGroups(table: Locator, topic?: string) { return { async testGroup(row: number, group: Group, lags?: string) { await test.step(`Check Kafka group in row ${row}`, async () => { - let columns = ['Name', 'State', 'Protocol', 'Coordinator', 'Leader', 'Members'] + let columns = ['Name', 'State', 'Protocol', 'Generation', 'Last Rebalancing', 'Members'] if (lags) { columns.push('Lag') } @@ -53,25 +47,9 @@ export function useKafkaGroups(table: Locator, topic?: string) { await expect(g.getCellByName('Name')).toHaveText(group.name) await expect(g.getCellByName('State')).toHaveText(group.state) await expect(g.getCellByName('Protocol')).toHaveText(group.protocol) - await expect(g.getCellByName('Coordinator')).toHaveText(group.coordinator) - await expect(g.getCellByName('Leader')).toHaveText(group.leader) if (lags) { await expect(g.getCellByName('Lag')).toHaveText(lags) } - - const page = table.page() - for (const [i, member] of group.members.entries()) { - await g.getCellByName('Members').getByRole('listitem').nth(i).hover() - await expect(page.getByRole('tooltip', { name: member.name })).toBeVisible() - await expect(page.getByRole('tooltip', { name: member.name }).getByLabel('Address')).toHaveText(member.address) - await expect(page.getByRole('tooltip', { name: member.name }).getByLabel('Client Software')).toHaveText(member.clientSoftware) - await expect(page.getByRole('tooltip', { name: member.name }).getByLabel('Last Heartbeat')).toHaveText(member.lastHeartbeat) - if (topic) { - await expect(page.getByRole('tooltip', { name: member.name }).getByLabel('Partitions')).toHaveText(member.partitions[topic].join(', ')) - }else { - await expect(page.getByRole('tooltip', { name: member.name }).getByLabel('Topics')).toHaveText(Object.keys(member.partitions).join(',')) - } - } }) } } @@ -79,20 +57,18 @@ export function useKafkaGroups(table: Locator, topic?: string) { export interface Partition { id: string - leader: string startOffset: string offset: string segments: string } export function useKafkaPartitions(table: Locator) { - const partitions = useTable(table, ['ID', 'Leader', 'Start Offset', 'Offset', 'Segments']) + const partitions = useTable(table, ['ID', 'Start Offset', 'Offset', 'Segments']) return { async testPartition(row: number, partition: Partition) { await test.step(`Check Kafka partition in row ${row}`, async () => { const p = partitions.getRow(row + 1) await expect(p.getCellByName('ID')).toHaveText(partition.id) - await expect(p.getCellByName('Leader')).toHaveText(partition.leader) await expect(p.getCellByName('Start Offset')).toHaveText(partition.startOffset) await expect(p.getCellByName('Offset')).toHaveText(partition.offset) await expect(p.getCellByName('Segments')).toHaveText(partition.segments) diff --git a/webui/e2e/dashboard-demo/kafka.spec.ts b/webui/e2e/dashboard-demo/kafka.spec.ts index 9dba22b2c..ba3ea3b3a 100644 --- a/webui/e2e/dashboard-demo/kafka.spec.ts +++ b/webui/e2e/dashboard-demo/kafka.spec.ts @@ -18,89 +18,111 @@ test('Visit Kafka Order Service', async ({ page }) => { }); - await test.step('Verify Brokers', async () => { + await test.step('Verify Servers', async () => { - await expect(page.getByRole('region', { name: 'Brokers' })).toBeVisible(); - const table = page.getByRole('table', { name: 'Brokers' }); - const rows = table.locator('tbody tr'); - await expect(rows).toHaveCount(1); - await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('development'); - await expect(await getCellByColumnName(table, 'Host', rows.nth(0))).toHaveText('localhost:9092'); - await expect(await getCellByColumnName(table, 'Description', rows.nth(0))).toHaveText('Local development Kafka broker.'); + await page.getByRole('tab', { name: 'Servers' }).click(); + const table = page.getByRole('table', { name: 'Servers' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('development'); + await expect(await getCellByColumnName(table, 'Host', rows.nth(0))).toHaveText('localhost:9092'); + await expect(await getCellByColumnName(table, 'Description', rows.nth(0))).toHaveText('Local development Kafka broker.'); }); await test.step('Verify Topics', async () => { - await expect(page.getByRole('region', { name: 'Topics' })).toBeVisible(); - const table = page.getByRole('table', { name: 'Topics' }); - const rows = table.locator('tbody tr'); - await expect(rows).toHaveCount(1); - await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('order-topic'); - await expect(await getCellByColumnName(table, 'Description', rows.nth(0))).toHaveText('The Kafka topic for order events.'); - await expect(await getCellByColumnName(table, 'Last Message', rows.nth(0))).not.toHaveText('-'); - await expect(await getCellByColumnName(table, 'Messages', rows.nth(0))).toHaveText('2'); + await page.getByRole('tab', { name: 'Topics' }).click(); + const table = page.getByRole('table', { name: 'Topics' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(2); + await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('order-topic'); + await expect(await getCellByColumnName(table, 'Description', rows.nth(0))).toHaveText('The Kafka topic for order events.'); + await expect(await getCellByColumnName(table, 'Last Message', rows.nth(0))).not.toHaveText('-'); + await expect(await getCellByColumnName(table, 'Messages', rows.nth(0))).toHaveText('2'); + + await test.step('Verify filtering by tags', async () => { + + const tags = page.getByRole('group', { name: 'Filter topics by tags' }); + await expect(tags.getByRole('checkbox', { name: 'All' })).toBeChecked(); + await tags.getByRole('checkbox', { name: 'user' }).click(); + await expect(rows).toHaveCount(1); + + }); }); await test.step('Verify Groups', async () => { - await expect(page.getByRole('region', { name: 'Groups' })).toBeVisible(); - const table = page.getByRole('table', { name: 'Groups' }); - const rows = table.locator('tbody tr'); - await expect(rows).toHaveCount(1); - await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('order-status-group-100'); - await expect(await getCellByColumnName(table, 'State', rows.nth(0))).toHaveText('Stable'); - await expect(await getCellByColumnName(table, 'Protocol', rows.nth(0))).toHaveText('RoundRobinAssigner'); - await expect(await getCellByColumnName(table, 'Coordinator', rows.nth(0))).toHaveText('localhost:9092'); - await expect(await getCellByColumnName(table, 'Leader', rows.nth(0))).toHaveText(/^producer/); - const members = await getCellByColumnName(table, 'Members', rows.nth(0)) - await expect(members).toHaveText(/^producer/); - - await members.hover(); - const tooltip = page.getByRole('tooltip') - await expect(tooltip).toBeVisible(); - await expect(tooltip.getByLabel('Address')).not.toBeEmpty(); - await expect(tooltip.getByLabel('Client Software')).toHaveText('-'); - await expect(tooltip.getByLabel('Last Heartbeat')).not.toBeEmpty(); - await expect(tooltip.getByLabel('Topics')).toHaveText('order-topic'); - - await rows.nth(0).getByRole('cell').nth(0).click(); - const dialog = page.getByRole('dialog', { name: 'Group Details' }); - await expect(dialog).toBeVisible(); - await expect(dialog.getByLabel('Name')).toHaveText('order-status-group-100'); - await expect(dialog.getByLabel('State')).toHaveText('Stable'); - await expect(dialog.getByLabel('Protocol')).toHaveText('RoundRobinAssigner'); - await expect(dialog.getByLabel('Coordinator')).toHaveText('localhost:9092'); - await expect(dialog.getByLabel('Leader')).toHaveText(/^producer/); - - await dialog.getByRole('tab', { name: 'Topics' }).click(); - const topics = dialog.getByRole('table', { name: 'Topics' }); - await expect(await getCellByColumnName(topics, 'Topic')).toHaveText('order-topic'); - - await dialog.getByRole('tab', { name: 'Members' }).click(); - const membersPanel = dialog.getByRole('tabpanel', { name: 'Members' }); - await expect(membersPanel).toBeVisible(); - await expect(membersPanel.getByRole('tab', { name: /^producer/ })).toHaveAttribute('aria-selected', 'true'); - await expect(membersPanel.getByLabel('Address')).not.toBeEmpty(); - await expect(membersPanel.getByLabel('Client Software')).toHaveText('-'); - await expect(membersPanel.getByLabel('Heartbeat')).not.toBeEmpty(); - const memberPartitions = membersPanel.getByRole('table', { name: 'Member Partitions' }) - await expect(await getCellByColumnName(memberPartitions, 'Topic')).toHaveText('order-topic'); - await expect(await getCellByColumnName(memberPartitions, 'Partitions')).toHaveText('0'); - - await dialog.getByRole('button', { name: 'Close' }).click(); + await page.getByRole('tab', { name: 'Groups' }).click(); + const table = page.getByRole('table', { name: 'Groups' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect(await getCellByColumnName(table, 'Name', rows.nth(0))).toHaveText('order-status-group-100'); + await expect(await getCellByColumnName(table, 'State', rows.nth(0))).toHaveText('Stable'); + await expect(await getCellByColumnName(table, 'Protocol', rows.nth(0))).toHaveText('RoundRobinAssigner'); + await expect(await getCellByColumnName(table, 'Generation', rows.nth(0))).toHaveText('0'); + await expect(await getCellByColumnName(table, 'Last Rebalancing', rows.nth(0))).not.toBeEmpty(); + await expect(await getCellByColumnName(table, 'Members', rows.nth(0))).toHaveText('1'); + + await rows.nth(0).getByRole('cell').nth(0).click(); + await expect(page.getByLabel('Group Name')).toHaveText('order-status-group-100'); + await expect(page.getByLabel('State')).toHaveText('Stable'); + await expect(page.getByLabel('Protocol')).toHaveText('RoundRobinAssigner'); + await expect(page.getByLabel('Generation', { exact: true })).toHaveText('0'); + + await test.step('Verify Members', async () => { + + const region = page.getByRole('region', { name: 'Members' }); + await expect(region).toBeVisible(); + + const members = region.getByRole('table', { name: 'Members' }); + const rows = members.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect((await getCellByColumnName(members, 'Group leader', rows.nth(0))).getByLabel('Group leader')).toBeVisible(); + await expect(await getCellByColumnName(members, 'Name', rows.nth(0))).toHaveText(/^consumer-1/); + await expect(await getCellByColumnName(members, 'Address', rows.nth(0))).not.toBeEmpty(); + await expect(await getCellByColumnName(members, 'Client Software', rows.nth(0))).toHaveText('-'); + await expect(await getCellByColumnName(members, 'Heartbeat', rows.nth(0))).not.toBeEmpty(); + + await test.step('Verify Member', async () => { + + await members.locator('tbody tr').click(); + + await expect(page.getByLabel('Member Name')).toHaveText(/^consumer-1/); + await expect(page.getByLabel('Client')).toHaveText(/^consumer-1/); + await expect(page.getByLabel('Heartbeat')).not.toBeEmpty(); + + const region = page.getByRole('region', { name: 'Partitions' }); + await expect(region).toBeVisible(); + + const table = region.getByRole('table', { name: 'Partitions' }); + const rows = table.locator('tbody tr'); + await expect(rows).toHaveCount(1); + await expect((await getCellByColumnName(table, 'Topic', rows.nth(0)))).toHaveText('order-topic'); + await expect(await getCellByColumnName(table, 'Partition', rows.nth(0))).toHaveText('0'); + + await page.goBack(); + + }); + }); + + await page.goBack(); }); await test.step('Verify Configs', async () => { + + await page.getByRole('tab', { name: 'Configs' }).click(); const table = page.getByRole('table', { name: 'Configs' }); await expect(await getCellByColumnName(table, 'URL')).toContainText('/asyncapi.yaml'); await expect(await getCellByColumnName(table, 'Provider')).toHaveText('File'); + }); await test.step('Verify Recent Messages', async () => { + await page.getByRole('tab', { name: 'Topics' }).click(); const region = page.getByRole('region', { name: 'Recent Messages' }); await expect(region).toBeVisible(); @@ -116,6 +138,7 @@ test('Visit Kafka Order Service', async ({ page }) => { await test.step('Visit Kafka Topic', async () => { + await page.getByRole('tab', { name: 'Topics' }).click(); await page.getByRole('table', { name: 'Topics' }).getByText('order-topic').click(); await expect(page.getByLabel('Topic', { exact: true })).toHaveText('order-topic'); await expect(page.getByLabel('Cluster')).toHaveText('Kafka Order Service API'); @@ -124,23 +147,55 @@ test('Visit Kafka Order Service', async ({ page }) => { await expect(page.getByLabel('Type of API')).toHaveText('Kafka'); - await test.step('Verify Message', async () => { + await test.step('Verify Message 1', async () => { await page.getByRole('table', { name: 'Recent Messages' }).locator('tbody tr').getByRole('link', { name: 'a914817b-c5f0-433e-8280-1cd2fe44234e' }).click(); await expect(page.getByLabel('Kafka Key')).toHaveText('a914817b-c5f0-433e-8280-1cd2fe44234e'); await expect(page.getByLabel('Kafka Topic')).toHaveText('order-topic'); - await expect(page.getByLabel('Kafka Topic')).toHaveAttribute('href', '/dashboard-demo/kafka/service/Kafka%20Order%20Service%20API/topic/order-topic'); + await expect(page.getByLabel('Kafka Topic')).toHaveAttribute('href', '/dashboard-demo/kafka/service/Kafka%20Order%20Service%20API/topics/order-topic'); await expect(page.getByLabel('Offset')).toHaveText('1'); await expect(page.getByRole('region', { name: 'Meta' }).getByLabel('Content Type')).toHaveText('application/json'); await expect(page.getByLabel('Key Type')).toHaveText('-'); await expect(page.getByLabel('Key Type')).not.toBeEmpty(); - + await expect(page.getByLabel('Client')).toHaveText('producer-1'); + const value = page.getByRole('region', { name: 'Value' }); await expect(value.getByLabel('Content Type')).toHaveText('application/json'); await expect(value.getByLabel('Lines of Code')).toHaveText('8 lines'); await expect(value.getByLabel('Size of Code')).toHaveText('249 B'); await expect(value.getByLabel('Content', { exact: true })).toContainText('"orderId": "a914817b-c5f0-433e-8280-1cd2fe44234e",') + await test.step('Verify Producer', async () => { + await page.getByLabel('Client').getByRole('link').click(); + await expect(page.getByLabel('ClientId')).toHaveText('producer-1'); + await expect(page.getByLabel('Address')).not.toBeEmpty(); + + await page.goBack(); + }) + + await page.goBack(); + + }); + + await test.step('Verify Message 2', async () => { + + await page.getByRole('table', { name: 'Recent Messages' }).locator('tbody tr').getByRole('link', { name: 'random-message-1' }).click(); + await expect(page.getByLabel('Kafka Key')).toHaveText('random-message-1'); + await expect(page.getByLabel('Kafka Topic')).toHaveText('order-topic'); + await expect(page.getByLabel('Kafka Topic')).toHaveAttribute('href', '/dashboard-demo/kafka/service/Kafka%20Order%20Service%20API/topics/order-topic'); + await expect(page.getByLabel('Offset')).toHaveText('0'); + await expect(page.getByRole('region', { name: 'Meta' }).getByLabel('Content Type')).toHaveText('application/json'); + await expect(page.getByLabel('Key Type')).toHaveText('-'); + await expect(page.getByLabel('Key Type')).not.toBeEmpty(); + await expect(page.getByLabel('Client')).toHaveText('mokapi-script'); + + await test.step('Verify Producer Script', async () => { + await page.getByLabel('Client').getByRole('link').click(); + await expect(page.getByLabel('URL')).toHaveText(/kafka.ts$/); + + await page.goBack(); + }) + await page.goBack(); }); @@ -153,7 +208,6 @@ test('Visit Kafka Order Service', async ({ page }) => { const rows = table.locator('tbody tr'); await expect(rows).toHaveCount(1); await expect(await getCellByColumnName(table, 'ID')).toHaveText('0'); - await expect(await getCellByColumnName(table, 'Leader')).toHaveText('development (localhost:9092)'); await expect(await getCellByColumnName(table, 'Start Offset')).toHaveText('0'); await expect(await getCellByColumnName(table, 'Offset')).toHaveText('2'); await expect(await getCellByColumnName(table, 'Segments')).toHaveText('1'); @@ -170,9 +224,9 @@ test('Visit Kafka Order Service', async ({ page }) => { await expect(await getCellByColumnName(table, 'Name')).toHaveText('order-status-group-100'); await expect(await getCellByColumnName(table, 'State')).toHaveText('Stable'); await expect(await getCellByColumnName(table, 'Protocol')).toHaveText('RoundRobinAssigner'); - await expect(await getCellByColumnName(table, 'Coordinator')).toHaveText('localhost:9092'); - await expect(await getCellByColumnName(table, 'Leader')).toHaveText(/^producer/); - await expect(await getCellByColumnName(table, 'Members')).toContainText(/^producer/); + await expect(await getCellByColumnName(table, 'Generation')).toHaveText('0'); + await expect(await getCellByColumnName(table, 'Last Rebalancing')).not.toBeEmpty(); + await expect(await getCellByColumnName(table, 'Members')).toHaveText('1') await expect(await getCellByColumnName(table, 'Lag')).toHaveText('0'); }); diff --git a/webui/e2e/dashboard-demo/petstore.spec.ts b/webui/e2e/dashboard-demo/petstore.spec.ts index 0080e31ef..f6881c83c 100644 --- a/webui/e2e/dashboard-demo/petstore.spec.ts +++ b/webui/e2e/dashboard-demo/petstore.spec.ts @@ -23,12 +23,17 @@ test('Visit Petstore Demo', async ({ page }) => { }); await test.step('Verify Servers', async () => { + + await page.getByRole('tab', { name: 'Servers' }).click(); const table = page.getByRole('table', { name: 'Servers'}); const url = await getCellByColumnName(table, 'Url'); await expect(url).toHaveText('http://petstore.swagger.io/v2'); + }); await test.step('Verify Paths', async () => { + + await page.getByRole('tab', { name: 'Paths' }).click(); const region = page.getByRole('region', { name: 'Paths' }); await expect(region).toBeVisible(); @@ -42,7 +47,7 @@ test('Visit Petstore Demo', async ({ page }) => { const row = table.locator('tbody tr').nth(0); await expect(await getCellByColumnName(table, 'Path', row)).toHaveText('/pet'); await expect(await getCellByColumnName(table, 'Operations', row)).toHaveText('POST PUT'); - await expect(await getCellByColumnName(table, 'Requests / Errors', row)).toHaveText('1 / 0'); + await expect(await getCellByColumnName(table, 'Req / Err', row)).toHaveText('1 / 0'); await region.getByRole('checkbox', { name: 'store' }).click(); await region.getByRole('checkbox', { name: 'user' }).click(); @@ -58,6 +63,8 @@ test('Visit Petstore Demo', async ({ page }) => { }); await test.step('Verify Configs', async () => { + + await page.getByRole('tab', { name: 'Configs' }).click(); const table = page.getByRole('table', { name: 'Configs' }); const rows = table.locator('tbody tr'); await expect(await getCellByColumnName(table, 'URL', rows.nth(0))).toContainText('/petstore.yaml'); @@ -65,9 +72,12 @@ test('Visit Petstore Demo', async ({ page }) => { await expect(await getCellByColumnName(table, 'URL', rows.nth(1))).toContainText('/z.petstore.fix.yaml'); await expect(await getCellByColumnName(table, 'Provider', rows.nth(1))).toHaveText('File'); + }); await test.step('Verify Recent Requests', async () => { + + await page.getByRole('tab', { name: 'Paths' }).click(); const table = page.getByRole('table', { name: 'Recent Requests' }); let rows = table.locator('tbody tr'); @@ -117,7 +127,21 @@ test('Visit Petstore Demo', async ({ page }) => { await expect(rows).toHaveCount(3); await expect(await getCellByColumnName(table, 'Method', rows.nth(0))).toHaveText('DELETE'); await expect(await getCellByColumnName(table, 'Operation ID', rows.nth(0))).toHaveText('deletePet'); - await expect(await getCellByColumnName(table, 'Summary', rows.nth(0))).toHaveText(' Deletes a pet'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(0))).toHaveText('Deletes a pet'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(0))).toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(0))).toHaveText('0 / 0'); + + await expect(await getCellByColumnName(table, 'Method', rows.nth(1))).toHaveText('GET'); + await expect(await getCellByColumnName(table, 'Operation ID', rows.nth(1))).toHaveText('getPetById'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(1))).toHaveText('Find pet by ID'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(1))).not.toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(1))).toHaveText('1 / 0'); + + await expect(await getCellByColumnName(table, 'Method', rows.nth(2))).toHaveText('POST'); + await expect(await getCellByColumnName(table, 'Operation ID', rows.nth(2))).toHaveText('updatePetWithForm'); + await expect(await getCellByColumnName(table, 'Summary', rows.nth(2))).toHaveText('Updates a pet in the store with form data'); + await expect(await getCellByColumnName(table, 'Last Request', rows.nth(2))).toHaveText('-'); + await expect(await getCellByColumnName(table, 'Req / Err', rows.nth(2))).toHaveText('0 / 0'); const requests = page.getByRole('table', { name: 'Recent Requests' }); await expect(requests.locator('tbody tr')).toHaveCount(1); diff --git a/webui/package-lock.json b/webui/package-lock.json index f1cbdd000..e0d2b55ee 100644 --- a/webui/package-lock.json +++ b/webui/package-lock.json @@ -12,7 +12,7 @@ "@ssthouse/vue3-tree-chart": "^0.3.0", "@types/bootstrap": "^5.2.10", "@types/mokapi": "^0.29.1", - "@types/nodemailer": "^7.0.5", + "@types/nodemailer": "^7.0.9", "@types/whatwg-mimetype": "^3.0.2", "ace-builds": "^1.43.5", "bootstrap": "^5.3.8", @@ -28,9 +28,9 @@ "ldapts": "^8.1.3", "mime-types": "^3.0.2", "ncp": "^2.0.0", - "nodemailer": "^7.0.12", + "nodemailer": "^7.0.13", "vue": "^3.5.27", - "vue-router": "^4.6.4", + "vue-router": "^5.0.0", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", "vue3-markdown-it": "^1.0.10", @@ -41,7 +41,7 @@ "@playwright/test": "^1.57.0", "@rushstack/eslint-patch": "^1.15.0", "@types/js-yaml": "^4.0.9", - "@types/node": "^25.0.9", + "@types/node": "^25.1.0", "@vitejs/plugin-vue": "^6.0.3", "@vue/eslint-config-prettier": "^10.2.0", "@vue/eslint-config-typescript": "^14.6.0", @@ -52,792 +52,24 @@ "prettier": "^3.8.1", "typescript": "~5.9.3", "vite": "^7.3.1", - "vue-tsc": "^3.2.3", + "vue-tsc": "^3.2.4", "xml2js": "^0.6.2" } }, - "node_modules/@aws-crypto/sha256-browser": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", - "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-crypto/sha256-js": "^5.2.0", - "@aws-crypto/supports-web-crypto": "^5.2.0", - "@aws-crypto/util": "^5.2.0", - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-locate-window": "^3.0.0", - "@smithy/util-utf8": "^2.0.0", - "tslib": "^2.6.2" - } - }, - "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", - "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", - "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/is-array-buffer": "^2.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", - "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/util-buffer-from": "^2.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-crypto/sha256-js": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", - "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", - "license": "Apache-2.0", - "dependencies": { - "@aws-crypto/util": "^5.2.0", - "@aws-sdk/types": "^3.222.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-crypto/supports-web-crypto": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", - "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - } - }, - "node_modules/@aws-crypto/util": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", - "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.222.0", - "@smithy/util-utf8": "^2.0.0", - "tslib": "^2.6.2" - } - }, - "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", - "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", - "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/is-array-buffer": "^2.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", - "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/util-buffer-from": "^2.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@aws-sdk/client-sesv2": { - "version": "3.975.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sesv2/-/client-sesv2-3.975.0.tgz", - "integrity": "sha512-4R+hR6N2LbvTIf6Y2e9b9PQlVkAD5WmSRMAGslul5L/jCE0LzOYC+4RQ7u5EOv0mERozcYleLPK2Zc0jTn4gTg==", - "license": "Apache-2.0", - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.1", - "@aws-sdk/credential-provider-node": "^3.972.1", - "@aws-sdk/middleware-host-header": "^3.972.1", - "@aws-sdk/middleware-logger": "^3.972.1", - "@aws-sdk/middleware-recursion-detection": "^3.972.1", - "@aws-sdk/middleware-user-agent": "^3.972.2", - "@aws-sdk/region-config-resolver": "^3.972.1", - "@aws-sdk/signature-v4-multi-region": "3.972.0", - "@aws-sdk/types": "^3.973.0", - "@aws-sdk/util-endpoints": "3.972.0", - "@aws-sdk/util-user-agent-browser": "^3.972.1", - "@aws-sdk/util-user-agent-node": "^3.972.1", - "@smithy/config-resolver": "^4.4.6", - "@smithy/core": "^3.21.1", - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/hash-node": "^4.2.8", - "@smithy/invalid-dependency": "^4.2.8", - "@smithy/middleware-content-length": "^4.2.8", - "@smithy/middleware-endpoint": "^4.4.11", - "@smithy/middleware-retry": "^4.4.27", - "@smithy/middleware-serde": "^4.2.9", - "@smithy/middleware-stack": "^4.2.8", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/node-http-handler": "^4.4.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.26", - "@smithy/util-defaults-mode-node": "^4.2.29", - "@smithy/util-endpoints": "^3.2.8", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-retry": "^4.2.8", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/client-sso": { - "version": "3.974.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.974.0.tgz", - "integrity": "sha512-ci+GiM0c4ULo4D79UMcY06LcOLcfvUfiyt8PzNY0vbt5O8BfCPYf4QomwVgkNcLLCYmroO4ge2Yy1EsLUlcD6g==", - "license": "Apache-2.0", - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/middleware-host-header": "^3.972.1", - "@aws-sdk/middleware-logger": "^3.972.1", - "@aws-sdk/middleware-recursion-detection": "^3.972.1", - "@aws-sdk/middleware-user-agent": "^3.972.1", - "@aws-sdk/region-config-resolver": "^3.972.1", - "@aws-sdk/types": "^3.973.0", - "@aws-sdk/util-endpoints": "3.972.0", - "@aws-sdk/util-user-agent-browser": "^3.972.1", - "@aws-sdk/util-user-agent-node": "^3.972.1", - "@smithy/config-resolver": "^4.4.6", - "@smithy/core": "^3.21.0", - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/hash-node": "^4.2.8", - "@smithy/invalid-dependency": "^4.2.8", - "@smithy/middleware-content-length": "^4.2.8", - "@smithy/middleware-endpoint": "^4.4.10", - "@smithy/middleware-retry": "^4.4.26", - "@smithy/middleware-serde": "^4.2.9", - "@smithy/middleware-stack": "^4.2.8", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/node-http-handler": "^4.4.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/smithy-client": "^4.10.11", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.25", - "@smithy/util-defaults-mode-node": "^4.2.28", - "@smithy/util-endpoints": "^3.2.8", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-retry": "^4.2.8", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/core": { - "version": "3.973.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.973.1.tgz", - "integrity": "sha512-Ocubx42QsMyVs9ANSmFpRm0S+hubWljpPLjOi9UFrtcnVJjrVJTzQ51sN0e5g4e8i8QZ7uY73zosLmgYL7kZTQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@aws-sdk/xml-builder": "^3.972.1", - "@smithy/core": "^3.21.1", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/signature-v4": "^5.3.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.1.tgz", - "integrity": "sha512-/etNHqnx96phy/SjI0HRC588o4vKH5F0xfkZ13yAATV7aNrb+5gYGNE6ePWafP+FuZ3HkULSSlJFj0AxgrAqYw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.972.2", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.2.tgz", - "integrity": "sha512-mXgdaUfe5oM+tWKyeZ7Vh/iQ94FrkMky1uuzwTOmFADiRcSk5uHy/e3boEFedXiT/PRGzgBmqvJVK4F6lUISCg==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.1", - "@aws-sdk/types": "^3.973.0", - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/node-http-handler": "^4.4.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "@smithy/util-stream": "^4.5.10", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.1.tgz", - "integrity": "sha512-OdbJA3v+XlNDsrYzNPRUwr8l7gw1r/nR8l4r96MDzSBDU8WEo8T6C06SvwaXR8SpzsjO3sq5KMP86wXWg7Rj4g==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/credential-provider-env": "^3.972.1", - "@aws-sdk/credential-provider-http": "^3.972.1", - "@aws-sdk/credential-provider-login": "^3.972.1", - "@aws-sdk/credential-provider-process": "^3.972.1", - "@aws-sdk/credential-provider-sso": "^3.972.1", - "@aws-sdk/credential-provider-web-identity": "^3.972.1", - "@aws-sdk/nested-clients": "3.974.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/credential-provider-imds": "^4.2.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.1.tgz", - "integrity": "sha512-CccqDGL6ZrF3/EFWZefvKW7QwwRdxlHUO8NVBKNVcNq6womrPDvqB6xc9icACtE0XB0a7PLoSTkAg8bQVkTO2w==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/nested-clients": "3.974.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.1.tgz", - "integrity": "sha512-DwXPk9GfuU/xG9tmCyXFVkCr6X3W8ZCoL5Ptb0pbltEx1/LCcg7T+PBqDlPiiinNCD6ilIoMJDWsnJ8ikzZA7Q==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/credential-provider-env": "^3.972.1", - "@aws-sdk/credential-provider-http": "^3.972.1", - "@aws-sdk/credential-provider-ini": "^3.972.1", - "@aws-sdk/credential-provider-process": "^3.972.1", - "@aws-sdk/credential-provider-sso": "^3.972.1", - "@aws-sdk/credential-provider-web-identity": "^3.972.1", - "@aws-sdk/types": "^3.973.0", - "@smithy/credential-provider-imds": "^4.2.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.1.tgz", - "integrity": "sha512-bi47Zigu3692SJwdBvo8y1dEwE6B61stCwCFnuRWJVTfiM84B+VTSCV661CSWJmIZzmcy7J5J3kWyxL02iHj0w==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.1.tgz", - "integrity": "sha512-dLZVNhM7wSgVUFsgVYgI5hb5Z/9PUkT46pk/SHrSmUqfx6YDvoV4YcPtaiRqviPpEGGiRtdQMEadyOKIRqulUQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/client-sso": "3.974.0", - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/token-providers": "3.974.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.1.tgz", - "integrity": "sha512-YMDeYgi0u687Ay0dAq/pFPKuijrlKTgsaB/UATbxCs/FzZfMiG4If5ksywHmmW7MiYUF8VVv+uou3TczvLrN4w==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/nested-clients": "3.974.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.1.tgz", - "integrity": "sha512-/R82lXLPmZ9JaUGSUdKtBp2k/5xQxvBT3zZWyKiBOhyulFotlfvdlrO8TnqstBimsl4lYEYySDL+W6ldFh6ALg==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-logger": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.1.tgz", - "integrity": "sha512-JGgFl6cHg9G2FHu4lyFIzmFN8KESBiRr84gLC3Aeni0Gt1nKm+KxWLBuha/RPcXxJygGXCcMM4AykkIwxor8RA==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.1.tgz", - "integrity": "sha512-taGzNRe8vPHjnliqXIHp9kBgIemLE/xCaRTMH1NH0cncHeaPcjxtnCroAAM9aOlPuKvBe2CpZESyvM1+D8oI7Q==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.972.0.tgz", - "integrity": "sha512-0bcKFXWx+NZ7tIlOo7KjQ+O2rydiHdIQahrq+fN6k9Osky29v17guy68urUKfhTobR6iY6KvxkroFWaFtTgS5w==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.972.0", - "@aws-sdk/types": "3.972.0", - "@aws-sdk/util-arn-parser": "3.972.0", - "@smithy/core": "^3.20.6", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/signature-v4": "^5.3.8", - "@smithy/smithy-client": "^4.10.8", - "@smithy/types": "^4.12.0", - "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-stream": "^4.5.10", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/core": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.972.0.tgz", - "integrity": "sha512-nEeUW2M9F+xdIaD98F5MBcQ4ITtykj3yKbgFZ6J0JtL3bq+Z90szQ6Yy8H/BLPYXTs3V4n9ifnBo8cprRDiE6A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.972.0", - "@aws-sdk/xml-builder": "3.972.0", - "@smithy/core": "^3.20.6", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/signature-v4": "^5.3.8", - "@smithy/smithy-client": "^4.10.8", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/types": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.972.0.tgz", - "integrity": "sha512-U7xBIbLSetONxb2bNzHyDgND3oKGoIfmknrEVnoEU4GUSs+0augUOIn9DIWGUO2ETcRFdsRUnmx9KhPT9Ojbug==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/xml-builder": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.0.tgz", - "integrity": "sha512-POaGMcXnozzqBUyJM3HLUZ9GR6OKJWPGJEmhtTnxZXt8B6JcJ/6K3xRJ5H/j8oovVLz8Wg6vFxAHv8lvuASxMg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "fast-xml-parser": "5.2.5", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.972.2", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.2.tgz", - "integrity": "sha512-d+Exq074wy0X6wvShg/kmZVtkah+28vMuqCtuY3cydg8LUZOJBtbAolCpEJizSyb8mJJZF9BjWaTANXL4OYnkg==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.1", - "@aws-sdk/types": "^3.973.0", - "@aws-sdk/util-endpoints": "3.972.0", - "@smithy/core": "^3.21.1", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/nested-clients": { - "version": "3.974.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.974.0.tgz", - "integrity": "sha512-k3dwdo/vOiHMJc9gMnkPl1BA5aQfTrZbz+8fiDkWrPagqAioZgmo5oiaOaeX0grObfJQKDtcpPFR4iWf8cgl8Q==", - "license": "Apache-2.0", - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/middleware-host-header": "^3.972.1", - "@aws-sdk/middleware-logger": "^3.972.1", - "@aws-sdk/middleware-recursion-detection": "^3.972.1", - "@aws-sdk/middleware-user-agent": "^3.972.1", - "@aws-sdk/region-config-resolver": "^3.972.1", - "@aws-sdk/types": "^3.973.0", - "@aws-sdk/util-endpoints": "3.972.0", - "@aws-sdk/util-user-agent-browser": "^3.972.1", - "@aws-sdk/util-user-agent-node": "^3.972.1", - "@smithy/config-resolver": "^4.4.6", - "@smithy/core": "^3.21.0", - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/hash-node": "^4.2.8", - "@smithy/invalid-dependency": "^4.2.8", - "@smithy/middleware-content-length": "^4.2.8", - "@smithy/middleware-endpoint": "^4.4.10", - "@smithy/middleware-retry": "^4.4.26", - "@smithy/middleware-serde": "^4.2.9", - "@smithy/middleware-stack": "^4.2.8", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/node-http-handler": "^4.4.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/smithy-client": "^4.10.11", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.25", - "@smithy/util-defaults-mode-node": "^4.2.28", - "@smithy/util-endpoints": "^3.2.8", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-retry": "^4.2.8", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.1.tgz", - "integrity": "sha512-voIY8RORpxLAEgEkYaTFnkaIuRwVBEc+RjVZYcSSllPV+ZEKAacai6kNhJeE3D70Le+JCfvRb52tng/AVHY+jQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@smithy/config-resolver": "^4.4.6", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.972.0.tgz", - "integrity": "sha512-2udiRijmjpN81Pvajje4TsjbXDZNP6K9bYUanBYH8hXa/tZG5qfGCySD+TyX0sgDxCQmEDMg3LaQdfjNHBDEgQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.972.0", - "@aws-sdk/types": "3.972.0", - "@smithy/protocol-http": "^5.3.8", - "@smithy/signature-v4": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/signature-v4-multi-region/node_modules/@aws-sdk/types": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.972.0.tgz", - "integrity": "sha512-U7xBIbLSetONxb2bNzHyDgND3oKGoIfmknrEVnoEU4GUSs+0augUOIn9DIWGUO2ETcRFdsRUnmx9KhPT9Ojbug==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/token-providers": { - "version": "3.974.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.974.0.tgz", - "integrity": "sha512-cBykL0LiccKIgNhGWvQRTPvsBLPZxnmJU3pYxG538jpFX8lQtrCy1L7mmIHNEdxIdIGEPgAEHF8/JQxgBToqUQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "^3.973.0", - "@aws-sdk/nested-clients": "3.974.0", - "@aws-sdk/types": "^3.973.0", - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/types": { - "version": "3.973.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.0.tgz", - "integrity": "sha512-jYIdB7a7jhRTvyb378nsjyvJh1Si+zVduJ6urMNGpz8RjkmHZ+9vM2H07XaIB2Cfq0GhJRZYOfUCH8uqQhqBkQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.972.0.tgz", - "integrity": "sha512-RM5Mmo/KJ593iMSrALlHEOcc9YOIyOsDmS5x2NLOMdEmzv1o00fcpAkCQ02IGu1eFneBFT7uX0Mpag0HI+Cz2g==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/util-endpoints": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.972.0.tgz", - "integrity": "sha512-6JHsl1V/a1ZW8D8AFfd4R52fwZPnZ5H4U6DS8m/bWT8qad72NvbOFAC7U2cDtFs2TShqUO3TEiX/EJibtY3ijg==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.972.0", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "@smithy/util-endpoints": "^3.2.8", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/util-endpoints/node_modules/@aws-sdk/types": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.972.0.tgz", - "integrity": "sha512-U7xBIbLSetONxb2bNzHyDgND3oKGoIfmknrEVnoEU4GUSs+0augUOIn9DIWGUO2ETcRFdsRUnmx9KhPT9Ojbug==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/util-locate-window": { - "version": "3.965.3", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.965.3.tgz", - "integrity": "sha512-FNUqAjlKAGA7GM05kywE99q8wiPHPZqrzhq3wXRga6PRD6A0kzT85Pb0AzYBVTBRpSrKyyr6M92Y6bnSBVp2BA==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.1.tgz", - "integrity": "sha512-IgF55NFmJX8d9Wql9M0nEpk2eYbuD8G4781FN4/fFgwTXBn86DvlZJuRWDCMcMqZymnBVX7HW9r+3r9ylqfW0w==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "^3.973.0", - "@smithy/types": "^4.12.0", - "bowser": "^2.11.0", - "tslib": "^2.6.2" - } - }, - "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.972.1.tgz", - "integrity": "sha512-oIs4JFcADzoZ0c915R83XvK2HltWupxNsXUIuZse2rgk7b97zTpkxaqXiH0h9ylh31qtgo/t8hp4tIqcsMrEbQ==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/middleware-user-agent": "^3.972.1", - "@aws-sdk/types": "^3.973.0", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=20.0.0" - }, - "peerDependencies": { - "aws-crt": ">=1.0.0" - }, - "peerDependenciesMeta": { - "aws-crt": { - "optional": true - } - } - }, - "node_modules/@aws-sdk/xml-builder": { - "version": "3.972.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.1.tgz", - "integrity": "sha512-6zZGlPOqn7Xb+25MAXGb1JhgvaC5HjZj6GzszuVrnEgbhvzBRFGKYemuHBV4bho+dtqeYKPgaZUv7/e80hIGNg==", - "license": "Apache-2.0", + "node_modules/@babel/generator": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.6.tgz", + "integrity": "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw==", + "license": "MIT", "dependencies": { - "@smithy/types": "^4.12.0", - "fast-xml-parser": "5.2.5", - "tslib": "^2.6.2" + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" }, "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@aws/lambda-invoke-store": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@aws/lambda-invoke-store/-/lambda-invoke-store-0.2.3.tgz", - "integrity": "sha512-oLvsaPMTBejkkmHhjf09xTgk71mOqyr/409NKhRIL08If7AhVfUsJhVsx386uJaqNd42v9kWamQ9lFbkoC2dYw==", - "license": "Apache-2.0", - "engines": { - "node": ">=18.0.0" + "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { @@ -1559,19 +791,58 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "license": "MIT" - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "license": "MIT", "dependencies": { - "@nodelib/fs.stat": "2.0.5", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" }, "engines": { @@ -2015,586 +1286,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@smithy/abort-controller": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.8.tgz", - "integrity": "sha512-peuVfkYHAmS5ybKxWcfraK7WBBP0J+rkfUcbHJJKQ4ir3UAUNQI+Y4Vt/PqSzGqgloJ5O1dk7+WzNL8wcCSXbw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/config-resolver": { - "version": "4.4.6", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.6.tgz", - "integrity": "sha512-qJpzYC64kaj3S0fueiu3kXm8xPrR3PcXDPEgnaNMRn0EjNSZFoFjvbUp0YUDsRhN1CB90EnHJtbxWKevnH99UQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.8", - "@smithy/types": "^4.12.0", - "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.8", - "@smithy/util-middleware": "^4.2.8", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/core": { - "version": "3.21.1", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.21.1.tgz", - "integrity": "sha512-NUH8R4O6FkN8HKMojzbGg/5pNjsfTjlMmeFclyPfPaXXUrbr5TzhWgbf7t92wfrpCHRgpjyz7ffASIS3wX28aA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/middleware-serde": "^4.2.9", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-stream": "^4.5.10", - "@smithy/util-utf8": "^4.2.0", - "@smithy/uuid": "^1.1.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.8.tgz", - "integrity": "sha512-FNT0xHS1c/CPN8upqbMFP83+ul5YgdisfCfkZ86Jh2NSmnqw/AJ6x5pEogVCTVvSm7j9MopRU89bmDelxuDMYw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.9", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.9.tgz", - "integrity": "sha512-I4UhmcTYXBrct03rwzQX1Y/iqQlzVQaPxWjCjula++5EmWq9YGBrx6bbGqluGc1f0XEfhSkiY4jhLgbsJUMKRA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/protocol-http": "^5.3.8", - "@smithy/querystring-builder": "^4.2.8", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/hash-node": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.8.tgz", - "integrity": "sha512-7ZIlPbmaDGxVoxErDZnuFG18WekhbA/g2/i97wGj+wUBeS6pcUeAym8u4BXh/75RXWhgIJhyC11hBzig6MljwA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "@smithy/util-buffer-from": "^4.2.0", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/invalid-dependency": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.8.tgz", - "integrity": "sha512-N9iozRybwAQ2dn9Fot9kI6/w9vos2oTXLhtK7ovGqwZjlOcxu6XhPlpLpC+INsxktqHinn5gS2DXDjDF2kG5sQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/is-array-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.2.0.tgz", - "integrity": "sha512-DZZZBvC7sjcYh4MazJSGiWMI2L7E0oCiRHREDzIxi/M2LY79/21iXt6aPLHge82wi5LsuRF5A06Ds3+0mlh6CQ==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/middleware-content-length": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.8.tgz", - "integrity": "sha512-RO0jeoaYAB1qBRhfVyq0pMgBoUK34YEJxVxyjOWYZiOKOq2yMZ4MnVXMZCUDenpozHue207+9P5ilTV1zeda0A==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/middleware-endpoint": { - "version": "4.4.11", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.11.tgz", - "integrity": "sha512-/WqsrycweGGfb9sSzME4CrsuayjJF6BueBmkKlcbeU5q18OhxRrvvKlmfw3tpDsK5ilx2XUJvoukwxHB0nHs/Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/core": "^3.21.1", - "@smithy/middleware-serde": "^4.2.9", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "@smithy/url-parser": "^4.2.8", - "@smithy/util-middleware": "^4.2.8", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/middleware-retry": { - "version": "4.4.27", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.27.tgz", - "integrity": "sha512-xFUYCGRVsfgiN5EjsJJSzih9+yjStgMTCLANPlf0LVQkPDYCe0hz97qbdTZosFOiYlGBlHYityGRxrQ/hxhfVQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/service-error-classification": "^4.2.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-retry": "^4.2.8", - "@smithy/uuid": "^1.1.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/middleware-serde": { - "version": "4.2.9", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.9.tgz", - "integrity": "sha512-eMNiej0u/snzDvlqRGSN3Vl0ESn3838+nKyVfF2FKNXFbi4SERYT6PR392D39iczngbqqGG0Jl1DlCnp7tBbXQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/middleware-stack": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.8.tgz", - "integrity": "sha512-w6LCfOviTYQjBctOKSwy6A8FIkQy7ICvglrZFl6Bw4FmcQ1Z420fUtIhxaUZZshRe0VCq4kvDiPiXrPZAe8oRA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/node-config-provider": { - "version": "4.3.8", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.8.tgz", - "integrity": "sha512-aFP1ai4lrbVlWjfpAfRSL8KFcnJQYfTl5QxLJXY32vghJrDuFyPZ6LtUL+JEGYiFRG1PfPLHLoxj107ulncLIg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/property-provider": "^4.2.8", - "@smithy/shared-ini-file-loader": "^4.4.3", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/node-http-handler": { - "version": "4.4.8", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.8.tgz", - "integrity": "sha512-q9u+MSbJVIJ1QmJ4+1u+cERXkrhuILCBDsJUBAW1MPE6sFonbCNaegFuwW9ll8kh5UdyY3jOkoOGlc7BesoLpg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/abort-controller": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/querystring-builder": "^4.2.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/property-provider": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.8.tgz", - "integrity": "sha512-EtCTbyIveCKeOXDSWSdze3k612yCPq1YbXsbqX3UHhkOSW8zKsM9NOJG5gTIya0vbY2DIaieG8pKo1rITHYL0w==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/protocol-http": { - "version": "5.3.8", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.8.tgz", - "integrity": "sha512-QNINVDhxpZ5QnP3aviNHQFlRogQZDfYlCkQT+7tJnErPQbDhysondEjhikuANxgMsZrkGeiAxXy4jguEGsDrWQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/querystring-builder": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.8.tgz", - "integrity": "sha512-Xr83r31+DrE8CP3MqPgMJl+pQlLLmOfiEUnoyAlGzzJIrEsbKsPy1hqH0qySaQm4oWrCBlUqRt+idEgunKB+iw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "@smithy/util-uri-escape": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/querystring-parser": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.8.tgz", - "integrity": "sha512-vUurovluVy50CUlazOiXkPq40KGvGWSdmusa3130MwrR1UNnNgKAlj58wlOe61XSHRpUfIIh6cE0zZ8mzKaDPA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/service-error-classification": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.8.tgz", - "integrity": "sha512-mZ5xddodpJhEt3RkCjbmUQuXUOaPNTkbMGR0bcS8FE0bJDLMZlhmpgrvPNCYglVw5rsYTpSnv19womw9WWXKQQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.3.tgz", - "integrity": "sha512-DfQjxXQnzC5UbCUPeC3Ie8u+rIWZTvuDPAGU/BxzrOGhRvgUanaP68kDZA+jaT3ZI+djOf+4dERGlm9mWfFDrg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/signature-v4": { - "version": "5.3.8", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.8.tgz", - "integrity": "sha512-6A4vdGj7qKNRF16UIcO8HhHjKW27thsxYci+5r/uVRkdcBEkOEiY8OMPuydLX4QHSrJqGHPJzPRwwVTqbLZJhg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-uri-escape": "^4.2.0", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/smithy-client": { - "version": "4.10.12", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.12.tgz", - "integrity": "sha512-VKO/HKoQ5OrSHW6AJUmEnUKeXI1/5LfCwO9cwyao7CmLvGnZeM1i36Lyful3LK1XU7HwTVieTqO1y2C/6t3qtA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/core": "^3.21.1", - "@smithy/middleware-endpoint": "^4.4.11", - "@smithy/middleware-stack": "^4.2.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "@smithy/util-stream": "^4.5.10", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/types": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.12.0.tgz", - "integrity": "sha512-9YcuJVTOBDjg9LWo23Qp0lTQ3D7fQsQtwle0jVfpbUHy9qBwCEgKuVH4FqFB3VYu0nwdHKiEMA+oXz7oV8X1kw==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/url-parser": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.8.tgz", - "integrity": "sha512-NQho9U68TGMEU639YkXnVMV3GEFFULmmaWdlu1E9qzyIePOHsoSnagTGSDv1Zi8DCNN6btxOSdgmy5E/hsZwhA==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/querystring-parser": "^4.2.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-base64": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.3.0.tgz", - "integrity": "sha512-GkXZ59JfyxsIwNTWFnjmFEI8kZpRNIBfxKjv09+nkAWPt/4aGaEWMM04m4sxgNVWkbt2MdSvE3KF/PfX4nFedQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/util-buffer-from": "^4.2.0", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-body-length-browser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.2.0.tgz", - "integrity": "sha512-Fkoh/I76szMKJnBXWPdFkQJl2r9SjPt3cMzLdOB6eJ4Pnpas8hVoWPYemX/peO0yrrvldgCUVJqOAjUrOLjbxg==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-body-length-node": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.2.1.tgz", - "integrity": "sha512-h53dz/pISVrVrfxV1iqXlx5pRg3V2YWFcSQyPyXZRrZoZj4R4DeWRDo1a7dd3CPTcFi3kE+98tuNyD2axyZReA==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-buffer-from": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.2.0.tgz", - "integrity": "sha512-kAY9hTKulTNevM2nlRtxAG2FQ3B2OR6QIrPY3zE5LqJy1oxzmgBGsHLWTcNhWXKchgA0WHW+mZkQrng/pgcCew==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/is-array-buffer": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-config-provider": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.2.0.tgz", - "integrity": "sha512-YEjpl6XJ36FTKmD+kRJJWYvrHeUvm5ykaUS5xK+6oXffQPHeEM4/nXlZPe+Wu0lsgRUcNZiliYNh/y7q9c2y6Q==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.26", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.26.tgz", - "integrity": "sha512-vva0dzYUTgn7DdE0uaha10uEdAgmdLnNFowKFjpMm6p2R0XDk5FHPX3CBJLzWQkQXuEprsb0hGz9YwbicNWhjw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/property-provider": "^4.2.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.29", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.29.tgz", - "integrity": "sha512-c6D7IUBsZt/aNnTBHMTf+OVh+h/JcxUUgfTcIJaWRe6zhOum1X+pNKSZtZ+7fbOn5I99XVFtmrnXKv8yHHErTQ==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/config-resolver": "^4.4.6", - "@smithy/credential-provider-imds": "^4.2.8", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/property-provider": "^4.2.8", - "@smithy/smithy-client": "^4.10.12", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-endpoints": { - "version": "3.2.8", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.8.tgz", - "integrity": "sha512-8JaVTn3pBDkhZgHQ8R0epwWt+BqPSLCjdjXXusK1onwJlRuN69fbvSK66aIKKO7SwVFM6x2J2ox5X8pOaWcUEw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-hex-encoding": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.2.0.tgz", - "integrity": "sha512-CCQBwJIvXMLKxVbO88IukazJD9a4kQ9ZN7/UMGBjBcJYvatpWk+9g870El4cB8/EJxfe+k+y0GmR9CAzkF+Nbw==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-middleware": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.8.tgz", - "integrity": "sha512-PMqfeJxLcNPMDgvPbbLl/2Vpin+luxqTGPpW3NAQVLbRrFRzTa4rNAASYeIGjRV9Ytuhzny39SpyU04EQreF+A==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-retry": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.8.tgz", - "integrity": "sha512-CfJqwvoRY0kTGe5AkQokpURNCT1u/MkRzMTASWMPPo2hNSnKtF1D45dQl3DE2LKLr4m+PW9mCeBMJr5mCAVThg==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/service-error-classification": "^4.2.8", - "@smithy/types": "^4.12.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-stream": { - "version": "4.5.10", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.10.tgz", - "integrity": "sha512-jbqemy51UFSZSp2y0ZmRfckmrzuKww95zT9BYMmuJ8v3altGcqjwoV1tzpOwuHaKrwQrCjIzOib499ymr2f98g==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/node-http-handler": "^4.4.8", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-buffer-from": "^4.2.0", - "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-utf8": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-uri-escape": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.2.0.tgz", - "integrity": "sha512-igZpCKV9+E/Mzrpq6YacdTQ0qTiLm85gD6N/IrmyDvQFA4UnU3d5g3m8tMT/6zG/vVkWSU+VxeUyGonL62DuxA==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/util-utf8": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.2.0.tgz", - "integrity": "sha512-zBPfuzoI8xyBtR2P6WQj63Rz8i3AmfAaJLuNG8dWsfvPe8lO4aCPYLn879mEgHndZH1zQ2oXmG8O1GGzzaoZiw==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/util-buffer-from": "^4.2.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/@smithy/uuid": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@smithy/uuid/-/uuid-1.1.0.tgz", - "integrity": "sha512-4aUIteuyxtBUhVdiQqcDhKFitwfd9hqoSDYY2KRXiWtgoWJ9Bmise+KfEPDiVHWeJepvF8xJO9/9+WDIciMFFw==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18.0.0" - } - }, "node_modules/@ssthouse/tree-chart-core": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ssthouse/tree-chart-core/-/tree-chart-core-1.2.0.tgz", @@ -2674,21 +1365,20 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "25.0.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.10.tgz", - "integrity": "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg==", + "version": "25.1.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.1.0.tgz", + "integrity": "sha512-t7frlewr6+cbx+9Ohpl0NOTKXZNV9xHRmNOvql47BFJKcEG1CxtxlPEEe+gR9uhVWM4DwhnvTF110mIL4yP9RA==", "license": "MIT", "dependencies": { "undici-types": "~7.16.0" } }, "node_modules/@types/nodemailer": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/@types/nodemailer/-/nodemailer-7.0.5.tgz", - "integrity": "sha512-7WtR4MFJUNN2UFy0NIowBRJswj5KXjXDhlZY43Hmots5eGu5q/dTeFd/I6GgJA/qj3RqO6dDy4SvfcV3fOVeIA==", + "version": "7.0.9", + "resolved": "https://registry.npmjs.org/@types/nodemailer/-/nodemailer-7.0.9.tgz", + "integrity": "sha512-vI8oF1M+8JvQhsId0Pc38BdUP2evenIIys7c7p+9OZXSPOH5c1dyINP1jT8xQ2xPuBUXmIC87s+91IZMDjH8Ow==", "license": "MIT", "dependencies": { - "@aws-sdk/client-sesv2": "^3.839.0", "@types/node": "*" } }, @@ -2987,6 +1677,33 @@ "vscode-uri": "^3.0.8" } }, + "node_modules/@vue-macros/common": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vue-macros/common/-/common-3.1.2.tgz", + "integrity": "sha512-h9t4ArDdniO9ekYHAD95t9AZcAbb19lEGK+26iAjUODOIJKmObDNBSe4+6ELQAA3vtYiFPPBtHh7+cQCKi3Dng==", + "license": "MIT", + "dependencies": { + "@vue/compiler-sfc": "^3.5.22", + "ast-kit": "^2.1.2", + "local-pkg": "^1.1.2", + "magic-string-ast": "^1.0.2", + "unplugin-utils": "^0.3.0" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/vue-macros" + }, + "peerDependencies": { + "vue": "^2.7.0 || ^3.2.25" + }, + "peerDependenciesMeta": { + "vue": { + "optional": true + } + } + }, "node_modules/@vue/compiler-core": { "version": "3.5.27", "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.27.tgz", @@ -3038,10 +1755,37 @@ } }, "node_modules/@vue/devtools-api": { - "version": "6.6.4", - "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.6.4.tgz", - "integrity": "sha512-sGhTPMuXqZ1rVOk32RylztWkfXTRhuS7vgAKv0zjqk8gbsHkJ7xfFf+jbySxt7tWObEJwyKaHMikV/WGDiQm8g==", - "license": "MIT" + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-8.0.5.tgz", + "integrity": "sha512-DgVcW8H/Nral7LgZEecYFFYXnAvGuN9C3L3DtWekAncFBedBczpNW8iHKExfaM559Zm8wQWrwtYZ9lXthEHtDw==", + "license": "MIT", + "dependencies": { + "@vue/devtools-kit": "^8.0.5" + } + }, + "node_modules/@vue/devtools-kit": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-8.0.5.tgz", + "integrity": "sha512-q2VV6x1U3KJMTQPUlRMyWEKVbcHuxhqJdSr6Jtjz5uAThAIrfJ6WVZdGZm5cuO63ZnSUz0RCsVwiUUb0mDV0Yg==", + "license": "MIT", + "dependencies": { + "@vue/devtools-shared": "^8.0.5", + "birpc": "^2.6.1", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^2.0.0", + "speakingurl": "^14.0.1", + "superjson": "^2.2.2" + } + }, + "node_modules/@vue/devtools-shared": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-8.0.5.tgz", + "integrity": "sha512-bRLn6/spxpmgLk+iwOrR29KrYnJjG9DGpHGkDFG82UM21ZpJ39ztUT9OXX3g+usW7/b2z+h46I9ZiYyB07XMXg==", + "license": "MIT", + "dependencies": { + "rfdc": "^1.4.1" + } }, "node_modules/@vue/eslint-config-prettier": { "version": "10.2.0", @@ -3085,9 +1829,9 @@ } }, "node_modules/@vue/language-core": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.2.3.tgz", - "integrity": "sha512-VpN/GnYDzGLh44AI6i1OB/WsLXo6vwnl0EWHBelGc4TyC0yEq6azwNaed/+Tgr8anFlSdWYnMEkyHJDPe7ii7A==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.2.4.tgz", + "integrity": "sha512-bqBGuSG4KZM45KKTXzGtoCl9cWju5jsaBKaJJe3h5hRAAWpZUuj5G+L+eI01sPIkm4H6setKRlw7E85wLdDNew==", "dev": true, "license": "MIT", "dependencies": { @@ -3205,7 +1949,6 @@ "version": "8.15.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", - "dev": true, "license": "MIT", "bin": { "acorn": "bin/acorn" @@ -3309,6 +2052,38 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/ast-kit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ast-kit/-/ast-kit-2.2.0.tgz", + "integrity": "sha512-m1Q/RaVOnTp9JxPX+F+Zn7IcLYMzM8kZofDImfsKZd8MbR+ikdOzTeztStWqfrqIxZnYWryyI9ePm3NGjnZgGw==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.5", + "pathe": "^2.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/ast-walker-scope": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/ast-walker-scope/-/ast-walker-scope-0.8.3.tgz", + "integrity": "sha512-cbdCP0PGOBq0ASG+sjnKIoYkWMKhhz+F/h9pRexUdX2Hd38+WOlBkRKlqkGOSm0YQpcFMQBJeK4WspUAkwsEdg==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.28.4", + "ast-kit": "^2.1.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, "node_modules/async-function": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", @@ -3342,6 +2117,15 @@ "dev": true, "license": "MIT" }, + "node_modules/birpc": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/birpc/-/birpc-2.9.0.tgz", + "integrity": "sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/body-parser": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", @@ -3424,12 +2208,6 @@ ], "license": "MIT" }, - "node_modules/bowser": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.13.1.tgz", - "integrity": "sha512-OHawaAbjwx6rqICCKgSG0SAnT05bzd7ppyKLVUITZpANBaaMFBAsaNkto3LoQ31tyFP5kNujE8Cdx85G9VzOkw==", - "license": "MIT" - }, "node_modules/brace-expansion": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", @@ -3536,6 +2314,21 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chokidar": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-5.0.0.tgz", + "integrity": "sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==", + "license": "MIT", + "dependencies": { + "readdirp": "^5.0.0" + }, + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -3572,6 +2365,12 @@ "dev": true, "license": "MIT" }, + "node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "license": "MIT" + }, "node_modules/content-disposition": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", @@ -3612,6 +2411,21 @@ "node": ">=6.6.0" } }, + "node_modules/copy-anything": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-4.0.5.tgz", + "integrity": "sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==", + "license": "MIT", + "dependencies": { + "is-what": "^5.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/cors": { "version": "2.8.6", "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz", @@ -4830,6 +3644,12 @@ "url": "https://opencollective.com/express" } }, + "node_modules/exsolve": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.8.tgz", + "integrity": "sha512-LmDxfWXwcTArk8fUEnOfSZpHOJ6zOMUJKOtFLFqJLoKJetuQG874Uc7/Kki7zFLzYybmZhp1M7+98pfMqeX8yA==", + "license": "MIT" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -4886,24 +3706,6 @@ "dev": true, "license": "MIT" }, - "node_modules/fast-xml-parser": { - "version": "5.2.5", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.2.5.tgz", - "integrity": "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "license": "MIT", - "dependencies": { - "strnum": "^2.1.0" - }, - "bin": { - "fxparser": "src/cli/cli.js" - } - }, "node_modules/fastq": { "version": "1.20.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", @@ -5352,6 +4154,12 @@ "node": "*" } }, + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "license": "MIT" + }, "node_modules/hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", @@ -5899,6 +4707,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-what": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-5.5.0.tgz", + "integrity": "sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, "node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", @@ -5925,6 +4745,18 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/json-buffer": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", @@ -5953,6 +4785,18 @@ "dev": true, "license": "MIT" }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/kafkajs": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz", @@ -6024,6 +4868,23 @@ "node": ">=4" } }, + "node_modules/local-pkg": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-1.1.2.tgz", + "integrity": "sha512-arhlxbFRmoQHl33a0Zkle/YWlmNwoyt6QNZEIJcqNbdrsix5Lvc4HyyI3EnwxTYlZYc32EbYrQ8SzEZ7dqgg9A==", + "license": "MIT", + "dependencies": { + "mlly": "^1.7.4", + "pkg-types": "^2.3.0", + "quansync": "^0.2.11" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -6062,6 +4923,21 @@ "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/magic-string-ast": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/magic-string-ast/-/magic-string-ast-1.0.3.tgz", + "integrity": "sha512-CvkkH1i81zl7mmb94DsRiFeG9V2fR2JeuK8yDgS8oiZSFa++wWLEgZ5ufEOyLHbvSbD1gTRKv9NdX69Rnvr9JA==", + "license": "MIT", + "dependencies": { + "magic-string": "^0.30.19" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, "node_modules/markdown-it": { "version": "12.3.2", "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", @@ -6296,6 +5172,41 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", + "license": "MIT" + }, + "node_modules/mlly": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", + "license": "MIT", + "dependencies": { + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" + } + }, + "node_modules/mlly/node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "license": "MIT" + }, + "node_modules/mlly/node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "license": "MIT", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -6306,7 +5217,6 @@ "version": "0.4.1", "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.4.1.tgz", "integrity": "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ==", - "dev": true, "license": "MIT" }, "node_modules/nanoid": { @@ -6360,9 +5270,9 @@ "license": "MIT" }, "node_modules/nodemailer": { - "version": "7.0.12", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.12.tgz", - "integrity": "sha512-H+rnK5bX2Pi/6ms3sN4/jRQvYSMltV6vqup/0SFOrxYYY/qoNvhXPlYq3e+Pm9RFJRwrMGbMIwi81M4dxpomhA==", + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.13.tgz", + "integrity": "sha512-PNDFSJdP+KFgdsG3ZzMXCgquO7I6McjY2vlqILjtJd0hy8wEvtugS9xKRF2NWlPNGxvLCXlTNIae4serI7dinw==", "license": "MIT-0", "engines": { "node": ">=6.0.0" @@ -6850,6 +5760,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "node_modules/perfect-debounce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-2.1.0.tgz", + "integrity": "sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==", + "license": "MIT" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -6891,6 +5813,17 @@ "node": ">=4" } }, + "node_modules/pkg-types": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, "node_modules/playwright": { "version": "1.58.0", "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.58.0.tgz", @@ -7063,6 +5996,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/quansync": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.11.tgz", + "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/antfu" + }, + { + "type": "individual", + "url": "https://github.com/sponsors/sxzz" + } + ], + "license": "MIT" + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -7151,6 +6100,19 @@ "node": ">=4" } }, + "node_modules/readdirp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-5.0.0.tgz", + "integrity": "sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==", + "license": "MIT", + "engines": { + "node": ">= 20.19.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/reflect.getprototypeof": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", @@ -7242,6 +6204,12 @@ "node": ">=0.10.0" } }, + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "license": "MIT" + }, "node_modules/robust-predicates": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", @@ -7409,6 +6377,12 @@ "node": ">=11.0.0" } }, + "node_modules/scule": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/scule/-/scule-1.3.0.tgz", + "integrity": "sha512-6FtHJEvt+pVMIB9IBY+IcCJ6Z5f1iQnytgyfKMhDKgmzYG+TeH/wx1y3l27rshSbLiSanrR9ffZDrEsmjlQF2g==", + "license": "MIT" + }, "node_modules/semver": { "version": "7.7.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", @@ -7687,6 +6661,15 @@ "dev": true, "license": "CC0-1.0" }, + "node_modules/speakingurl": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/speakingurl/-/speakingurl-14.0.1.tgz", + "integrity": "sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/statuses": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", @@ -7817,17 +6800,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/strnum": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz", - "integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "license": "MIT" + "node_modules/superjson": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.6.tgz", + "integrity": "sha512-H+ue8Zo4vJmV2nRjpx86P35lzwDT3nItnIsocgumgr0hHMQ+ZGq5vrERg9kJBo5AWGmxZDhzDo+WVIJqkB0cGA==", + "license": "MIT", + "dependencies": { + "copy-anything": "^4" + }, + "engines": { + "node": ">=16" + } }, "node_modules/supports-color": { "version": "7.2.0", @@ -7875,7 +6858,6 @@ "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", - "dev": true, "license": "MIT", "dependencies": { "fdir": "^6.5.0", @@ -7892,7 +6874,6 @@ "version": "6.5.0", "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "dev": true, "license": "MIT", "engines": { "node": ">=12.0.0" @@ -7910,7 +6891,6 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "dev": true, "license": "MIT", "engines": { "node": ">=12" @@ -7965,12 +6945,6 @@ "typescript": ">=4.8.4" } }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -8120,6 +7094,12 @@ "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", "license": "MIT" }, + "node_modules/ufo": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", + "license": "MIT" + }, "node_modules/unbox-primitive": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", @@ -8166,6 +7146,61 @@ "node": ">= 0.8" } }, + "node_modules/unplugin": { + "version": "2.3.11", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-2.3.11.tgz", + "integrity": "sha512-5uKD0nqiYVzlmCRs01Fhs2BdkEgBS3SAVP6ndrBsuK42iC2+JHyxM05Rm9G8+5mkmRtzMZGY8Ct5+mliZxU/Ww==", + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "acorn": "^8.15.0", + "picomatch": "^4.0.3", + "webpack-virtual-modules": "^0.6.2" + }, + "engines": { + "node": ">=18.12.0" + } + }, + "node_modules/unplugin-utils": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/unplugin-utils/-/unplugin-utils-0.3.1.tgz", + "integrity": "sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==", + "license": "MIT", + "dependencies": { + "pathe": "^2.0.3", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + } + }, + "node_modules/unplugin-utils/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/unplugin/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -8390,29 +7425,71 @@ } }, "node_modules/vue-router": { - "version": "4.6.4", - "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz", - "integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-5.0.0.tgz", + "integrity": "sha512-xWHlps4o1ScODWqvyapl0v1uGy0g7ozmsTSO/dguyGb/9RL6oSU2HfN/8oMXnoFOH1BuTaAkbiOz4OWdkfjcZg==", "license": "MIT", "dependencies": { - "@vue/devtools-api": "^6.6.4" + "@babel/generator": "^7.28.6", + "@vue-macros/common": "^3.1.1", + "@vue/devtools-api": "^8.0.0", + "ast-walker-scope": "^0.8.3", + "chokidar": "^5.0.0", + "json5": "^2.2.3", + "local-pkg": "^1.1.2", + "magic-string": "^0.30.21", + "mlly": "^1.8.0", + "muggle-string": "^0.4.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "scule": "^1.3.0", + "tinyglobby": "^0.2.15", + "unplugin": "^2.3.11", + "unplugin-utils": "^0.3.1", + "yaml": "^2.8.2" }, "funding": { "url": "https://github.com/sponsors/posva" }, "peerDependencies": { + "@pinia/colada": "^0.18.1", + "@vue/compiler-sfc": "^3.5.17", + "pinia": "^3.0.4", "vue": "^3.5.0" + }, + "peerDependenciesMeta": { + "@pinia/colada": { + "optional": true + }, + "@vue/compiler-sfc": { + "optional": true + }, + "pinia": { + "optional": true + } + } + }, + "node_modules/vue-router/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" } }, "node_modules/vue-tsc": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.2.3.tgz", - "integrity": "sha512-1RdRB7rQXGFMdpo0aXf9spVzWEPGAk7PEb/ejHQwVrcuQA/HsGiixIc3uBQeqY2YjeEEgvr2ShQewBgcN4c1Cw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.2.4.tgz", + "integrity": "sha512-xj3YCvSLNDKt1iF9OcImWHhmYcihVu9p4b9s4PGR/qp6yhW+tZJaypGxHScRyOrdnHvaOeF+YkZOdKwbgGvp5g==", "dev": true, "license": "MIT", "dependencies": { "@volar/typescript": "2.4.27", - "@vue/language-core": "3.2.3" + "@vue/language-core": "3.2.4" }, "bin": { "vue-tsc": "bin/vue-tsc.js" @@ -8476,6 +7553,12 @@ "node": ">=20" } }, + "node_modules/webpack-virtual-modules": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", + "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==", + "license": "MIT" + }, "node_modules/whatwg-mimetype": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-5.0.0.tgz", @@ -8674,6 +7757,21 @@ "node": ">=4.0" } }, + "node_modules/yaml": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", diff --git a/webui/package.json b/webui/package.json index bdca98ea3..9b188f55c 100644 --- a/webui/package.json +++ b/webui/package.json @@ -21,7 +21,7 @@ "@ssthouse/vue3-tree-chart": "^0.3.0", "@types/bootstrap": "^5.2.10", "@types/mokapi": "^0.29.1", - "@types/nodemailer": "^7.0.5", + "@types/nodemailer": "^7.0.9", "@types/whatwg-mimetype": "^3.0.2", "ace-builds": "^1.43.5", "bootstrap": "^5.3.8", @@ -37,9 +37,9 @@ "ldapts": "^8.1.3", "mime-types": "^3.0.2", "ncp": "^2.0.0", - "nodemailer": "^7.0.12", + "nodemailer": "^7.0.13", "vue": "^3.5.27", - "vue-router": "^4.6.4", + "vue-router": "^5.0.0", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", "vue3-markdown-it": "^1.0.10", @@ -50,7 +50,7 @@ "@playwright/test": "^1.57.0", "@rushstack/eslint-patch": "^1.15.0", "@types/js-yaml": "^4.0.9", - "@types/node": "^25.0.9", + "@types/node": "^25.1.0", "@vitejs/plugin-vue": "^6.0.3", "@vue/eslint-config-prettier": "^10.2.0", "@vue/eslint-config-typescript": "^14.6.0", @@ -61,7 +61,7 @@ "prettier": "^3.8.1", "typescript": "~5.9.3", "vite": "^7.3.1", - "vue-tsc": "^3.2.3", + "vue-tsc": "^3.2.4", "xml2js": "^0.6.2" } } diff --git a/webui/scripts/dashboard-demo/demo-configs/asyncapi.yaml b/webui/scripts/dashboard-demo/demo-configs/asyncapi.yaml index 613679b19..5082f5220 100644 --- a/webui/scripts/dashboard-demo/demo-configs/asyncapi.yaml +++ b/webui/scripts/dashboard-demo/demo-configs/asyncapi.yaml @@ -11,13 +11,18 @@ servers: description: Local development Kafka broker. channels: - # The "order_events" channel is where order-related messages flow order_events: address: order-topic description: The Kafka topic for order events. messages: OrderCreated: $ref: '#/components/messages/OrderCreated' + tags: + - name: order + user-events: + description: The Kafka topic for user events. + tags: + - name: user operations: # Operation for publishing new orders diff --git a/webui/scripts/dashboard-demo/drive-kafka.ts b/webui/scripts/dashboard-demo/drive-kafka.ts index 19b4ca4c5..ffb9777a0 100644 --- a/webui/scripts/dashboard-demo/drive-kafka.ts +++ b/webui/scripts/dashboard-demo/drive-kafka.ts @@ -1,12 +1,19 @@ import { Kafka } from 'kafkajs'; -const kafka = new Kafka({ +const producerClient = new Kafka({ clientId: 'producer-1', brokers: ['localhost:9092'] }); -const consumer = kafka.consumer({ groupId: 'order-status-group-100' }); -const producer = kafka.producer(); +const consumerClient = new Kafka({ + clientId: 'consumer-1', + brokers: ['localhost:9092'] +}); + +const consumer = consumerClient.consumer({ groupId: 'order-status-group-100' }); +const producer = producerClient.producer({ + idempotent: true +}); export async function driveKafka() { await consumer.connect(); @@ -18,27 +25,28 @@ export async function driveKafka() { eachMessage: async ({ topic, partition, message }) => { const value = JSON.parse(message.value!.toString()); console.log('Received command:', value); - }}); - - const order = { - orderId: 'a914817b-c5f0-433e-8280-1cd2fe44234e', - productId: '2adb46de-1c96-4290-a215-9701b0a7900c', - productName: 'Wireless Ergonomic Mouse (Black)', - quantity: 65, - customerEmail: 'johnnydibbert@feil.org', - status: 'SHIPPED' - }; - - await producer.send({ - topic: 'order-topic', - messages: [{ key: order.orderId, value: JSON.stringify(order) }] - }); - - console.log('Published order:', order); + } + }); + + const order = { + orderId: 'a914817b-c5f0-433e-8280-1cd2fe44234e', + productId: '2adb46de-1c96-4290-a215-9701b0a7900c', + productName: 'Wireless Ergonomic Mouse (Black)', + quantity: 65, + customerEmail: 'johnnydibbert@feil.org', + status: 'SHIPPED' + }; + + await producer.send({ + topic: 'order-topic', + messages: [{ key: order.orderId, value: JSON.stringify(order) }] + }); + + console.log('Published order:', order); }; export async function closeKafka() { - await consumer.stop(); - await consumer.disconnect(); - await producer.disconnect(); + await consumer.stop(); + await consumer.disconnect(); + await producer.disconnect(); } \ No newline at end of file diff --git a/webui/src/App.vue b/webui/src/App.vue index 570952542..f15b359d6 100644 --- a/webui/src/App.vue +++ b/webui/src/App.vue @@ -1,11 +1,15 @@ \ No newline at end of file diff --git a/webui/src/components/Shortcuts.vue b/webui/src/components/Shortcuts.vue new file mode 100644 index 000000000..ca564703b --- /dev/null +++ b/webui/src/components/Shortcuts.vue @@ -0,0 +1,203 @@ + + + + + \ No newline at end of file diff --git a/webui/src/components/dashboard/AppStartCard.vue b/webui/src/components/dashboard/AppStartCard.vue index a9fea5ba3..379a8e345 100644 --- a/webui/src/components/dashboard/AppStartCard.vue +++ b/webui/src/components/dashboard/AppStartCard.vue @@ -1,30 +1,36 @@ \ No newline at end of file diff --git a/webui/src/components/dashboard/ConfigCard.vue b/webui/src/components/dashboard/ConfigCard.vue index 9bdaa8a45..f7078d9b1 100644 --- a/webui/src/components/dashboard/ConfigCard.vue +++ b/webui/src/components/dashboard/ConfigCard.vue @@ -1,129 +1,21 @@ \ No newline at end of file diff --git a/webui/src/components/dashboard/Configs.vue b/webui/src/components/dashboard/Configs.vue new file mode 100644 index 000000000..f6c53a315 --- /dev/null +++ b/webui/src/components/dashboard/Configs.vue @@ -0,0 +1,96 @@ + + + \ No newline at end of file diff --git a/webui/src/components/dashboard/MemoryCard.vue b/webui/src/components/dashboard/MemoryCard.vue index 99326633f..e5e944875 100644 --- a/webui/src/components/dashboard/MemoryCard.vue +++ b/webui/src/components/dashboard/MemoryCard.vue @@ -1,28 +1,36 @@ \ No newline at end of file diff --git a/webui/src/components/dashboard/Search.vue b/webui/src/components/dashboard/Search.vue index b2c4b02c9..4bdb77cff 100644 --- a/webui/src/components/dashboard/Search.vue +++ b/webui/src/components/dashboard/Search.vue @@ -122,6 +122,8 @@ onMounted(async () => { } if (queryText.value !== '') { await search() + } else { + document.getElementById('search-input')?.focus(); } }) @@ -236,7 +238,7 @@ function facetTitle(s: string) {
- + diff --git a/webui/src/components/dashboard/ServiceInfoCard.vue b/webui/src/components/dashboard/ServiceInfoCard.vue index 79f165f3d..8f342e569 100644 --- a/webui/src/components/dashboard/ServiceInfoCard.vue +++ b/webui/src/components/dashboard/ServiceInfoCard.vue @@ -12,7 +12,7 @@ const props = defineProps<{
- +

Name

{{ service.name }}

diff --git a/webui/src/components/dashboard/http/EndpointsCard.vue b/webui/src/components/dashboard/http/EndpointsCard.vue index 8226c5023..7c9c29801 100644 --- a/webui/src/components/dashboard/http/EndpointsCard.vue +++ b/webui/src/components/dashboard/http/EndpointsCard.vue @@ -1,18 +1,19 @@ @@ -184,25 +172,38 @@ function toggleTag(name: string) {

Paths

-
+
+ + + Filter topics by tags + + +

+ Select one or more tags to filter the topics. Selecting “All” enables all tags. +

+
+ value="all" @change="toggleTag('__all')" :checked="tags.includes('__all')" aria-controls="tag-list">
-
- - +
+
+ + + + + {{ tag.description }} + +
-
+
@@ -213,7 +214,7 @@ function toggleTag(name: string) { - + diff --git a/webui/src/components/dashboard/http/HttpOperation.vue b/webui/src/components/dashboard/http/HttpOperation.vue index 1a11f70c8..4cb0eb00e 100644 --- a/webui/src/components/dashboard/http/HttpOperation.vue +++ b/webui/src/components/dashboard/http/HttpOperation.vue @@ -50,7 +50,7 @@ const route = useRoute()

Deprecated

- HTTP + HTTP
diff --git a/webui/src/components/dashboard/http/HttpOperationsCard.vue b/webui/src/components/dashboard/http/HttpOperationsCard.vue index 901a9f279..3833d03ad 100644 --- a/webui/src/components/dashboard/http/HttpOperationsCard.vue +++ b/webui/src/components/dashboard/http/HttpOperationsCard.vue @@ -1,12 +1,17 @@ - - \ No newline at end of file + \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaGroupsCard.vue b/webui/src/components/dashboard/kafka/KafkaGroupsCard.vue deleted file mode 100644 index e95a10948..000000000 --- a/webui/src/components/dashboard/kafka/KafkaGroupsCard.vue +++ /dev/null @@ -1,17 +0,0 @@ - - - \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaMessages.vue b/webui/src/components/dashboard/kafka/KafkaMessages.vue index 67fed0c32..7f06314dd 100644 --- a/webui/src/components/dashboard/kafka/KafkaMessages.vue +++ b/webui/src/components/dashboard/kafka/KafkaMessages.vue @@ -6,25 +6,39 @@ import { usePrettyLanguage } from '@/composables/usePrettyLanguage' import SourceView from '../SourceView.vue' import router from '@/router' import { getRouteName, useDashboard } from '@/composables/dashboard' +import { useLocalStorage } from '@/composables/local-storage' const props = defineProps<{ service?: KafkaService, topicName?: string + clientId?: string }>() -const labels = [] -if (props.service) { - labels.push({name: 'name', value: props.service.name}) -} -if (props.topicName){ - labels.push({name: 'topic', value: props.topicName}) -} +const emit = defineEmits<{ + (e: "loaded", count: number): void +}>(); + +const tags = useLocalStorage(`kafka-${props.service?.name}-tags`, ['__all']) +const labels = computed(() => { + const result = []; + if (props.service) { + result.push({name: 'name', value: props.service.name}) + } + if (props.topicName) { + result.push({name: 'topic', value: props.topicName}) + } + result.push({ name: 'type', value: 'message' }) + if (props.clientId){ + result.push({name: 'clientId', value: props.clientId}) + } + return result; +}) const { format } = usePrettyDates() const { formatLanguage } = usePrettyLanguage() const { dashboard } = useDashboard() -const { events, close } = dashboard.value.getEvents('kafka', ...labels) +const { events, close } = dashboard.value.getEvents('kafka', ...labels.value) const messageDialog = ref(null) const tabDetailData = ref(null) let dialog: Modal @@ -32,12 +46,23 @@ let tab: Tab const messages = computed(() => { const result = []; + emit("loaded", events.value.length); for (const event of events.value) { const data = eventData(event) if (!data){ continue } + if (props.service && !props.clientId && !props.topicName && !tags.value.includes('__all')) { + const topic = props.service.topics.find(t => t.name === event.traits['topic']); + if (!topic) { + continue + } + if (!topic.tags || !topic.tags.some(tag => tags.value.some(x => x == tag.name))) { + continue + } + } + result.push({ id: event.id, key: key(data), @@ -51,11 +76,11 @@ const messages = computed(() => { return result; }) -function eventData(event: ServiceEvent | null): KafkaEventData | null{ +function eventData(event: ServiceEvent | null): KafkaMessageData | null{ if (!event) { return null } - return event.data + return event.data as KafkaMessageData } function isAvro(event: ServiceEvent): boolean { const msg = getMessageConfig(event) @@ -240,7 +265,7 @@ function getContentType(msg: KafkaMessage): [string, boolean] { return [ msg.contentType, false ] } -function key(data: KafkaEventData | null): string { +function key(data: KafkaMessageData | null): string { if (!data) { return '' } diff --git a/webui/src/components/dashboard/kafka/KafkaMessagesCard.vue b/webui/src/components/dashboard/kafka/KafkaMessagesCard.vue index 60a56f6f3..c782e161b 100644 --- a/webui/src/components/dashboard/kafka/KafkaMessagesCard.vue +++ b/webui/src/components/dashboard/kafka/KafkaMessagesCard.vue @@ -1,17 +1,32 @@ \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaPartition.vue b/webui/src/components/dashboard/kafka/KafkaPartition.vue index e8258f2fe..1ea7c7f59 100644 --- a/webui/src/components/dashboard/kafka/KafkaPartition.vue +++ b/webui/src/components/dashboard/kafka/KafkaPartition.vue @@ -11,7 +11,6 @@ defineProps<{
- @@ -20,7 +19,6 @@ defineProps<{ - diff --git a/webui/src/components/dashboard/kafka/KafkaRequests.vue b/webui/src/components/dashboard/kafka/KafkaRequests.vue new file mode 100644 index 000000000..87de776bc --- /dev/null +++ b/webui/src/components/dashboard/kafka/KafkaRequests.vue @@ -0,0 +1,138 @@ + + + \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaService.vue b/webui/src/components/dashboard/kafka/KafkaService.vue index c18c58b2a..4860d726a 100644 --- a/webui/src/components/dashboard/kafka/KafkaService.vue +++ b/webui/src/components/dashboard/kafka/KafkaService.vue @@ -1,52 +1,146 @@ \ No newline at end of file +
+
+ +
+ +
+
+
+ +
+
+ +
+ +
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+
+
+
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+ + + + + \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaTopic.vue b/webui/src/components/dashboard/kafka/KafkaTopic.vue index ced49f3c4..06f45de31 100644 --- a/webui/src/components/dashboard/kafka/KafkaTopic.vue +++ b/webui/src/components/dashboard/kafka/KafkaTopic.vue @@ -1,5 +1,5 @@
Summary Operations Last RequestRequests / ErrorsReq / Err
IDLeader Start Offset Offset Segments
{{ partition.id }}{{ partition.leader.name }} ({{ partition.leader.addr }}) {{ partition.startOffset }} {{ partition.offset }} {{ partition.segments }}