diff --git a/.github/workflows/ow-exporter.yaml b/.github/workflows/ow-exporter.yaml new file mode 100644 index 00000000..f659b5dd --- /dev/null +++ b/.github/workflows/ow-exporter.yaml @@ -0,0 +1,76 @@ +name: Build Container Image for ow-exporter + +on: + push: + paths: + - "build/ow-exporter/**" + - "cmd/ow-exporter/**" + - "go.mod" + - "go.sum" + - ".github/workflows/ow-exporter.yaml" + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Docker meta + id: docker_meta + uses: docker/metadata-action@v5.8.0 + with: + images: ghcr.io/lexfrei/ow-exporter + flavor: | + latest=true + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3.6.0 + with: + platforms: "arm64,amd64" + + - name: Checkout + uses: actions/checkout@v5.0.0 + + - name: Set up Docker Context for Buildx + id: buildx-context + run: | + docker context create builders + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3.11.1 + with: + endpoint: builders + config-inline: | + [registry."docker.io"] + mirrors = ["mirror.gcr.io"] + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3.5.0 + if: github.ref == 'refs/heads/master' + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.CR_PAT }} + + - name: Build and push + uses: docker/build-push-action@v6.18.0 + if: github.ref == 'refs/heads/master' + with: + push: true + context: . + file: ./build/ow-exporter/Containerfile + platforms: linux/arm64, linux/amd64 + tags: ${{ steps.docker_meta.outputs.tags }} + + - name: Build + uses: docker/build-push-action@v6.18.0 + if: github.ref == !'refs/heads/master' + with: + context: . + file: ./build/ow-exporter/Containerfile + platforms: linux/arm64, linux/amd64 + tags: ${{ steps.docker_meta.outputs.tags }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b80dcd02..6ad539ec 100644 --- a/.gitignore +++ b/.gitignore @@ -106,3 +106,24 @@ $RECYCLE.BIN/ # End of https://www.toptal.com/developers/gitignore/api/go,visualstudiocode,windows,macos deployments/vk2tg/compose/local.env + +# Project-specific binaries +/ow-exporter +cmd/ow-exporter/ow-exporter + +# OverFast API source code for reference +overfast-api/ + +# Node.js (not needed for Go project) +node_modules/ +package.json +package-lock.json + +# Temporary debug/development files +*.html +debug_profile.html + +# Configuration files with personal data +config/players.yaml +cmd/ow-exporter/config/ +*/config/players.yaml diff --git a/.golangci.yaml b/.golangci.yaml index a4a20c19..c314415b 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -7,6 +7,8 @@ linters: - gochecknoglobals - gochecknoinits - nonamedreturns + - wsl + - wsl_v5 settings: dupl: threshold: 100 diff --git a/CLAUDE.md b/CLAUDE.md index 2c0ba5be..4be367fc 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -78,6 +78,169 @@ The project follows standard Go conventions with strict linting rules. Pay atten - Import formatting with goimports and gofumpt - Error handling patterns using `github.com/cockroachdb/errors` +## Go Code Style Guide + +Based on golangci-lint configuration and current linting errors, follow these specific style guidelines: + +### Standard Libraries (from ~/PROMPT.md) +- **Logging**: `slog` (standard library) +- **Errors**: `github.com/cockroachdb/errors` +- **Web framework**: `github.com/labstack/echo/v4` +- **CLI**: `github.com/spf13/cobra` +- **Configuration**: `github.com/spf13/viper` + +### Function and Method Guidelines +- **Function length**: Maximum 60 lines (funlen) +- **Cognitive complexity**: Maximum 30 (gocognit) +- **Cyclomatic complexity**: Maximum 10 (gocyclo) +- Break down complex functions into smaller, focused functions +- Use helper methods to reduce complexity + +### Line Length and Formatting +- **Maximum line length**: 120 characters (lll) +- For struct tags that exceed line length: + ```go + // Good - break long struct tags across lines + type HeroStats struct { + TimePlayed time.Duration `ow:"time_played" + prometheus:"ow_hero_time_played_seconds" + help:"Total time played on hero" + path:"[data-category-id='0x0860000000000021']" + type:"duration"` + } + ``` + +### Constants and Magic Numbers +- **Extract repeated strings** as constants (goconst): + ```go + // Good + const MouseKeyboardViewActiveSelector = ".mouseKeyboard-view.is-active" + const QuickPlayViewActiveSelector = ".quickPlay-view.is-active" + ``` +- **Avoid magic numbers** (mnd) - define as constants with meaningful names: + ```go + // Bad + time.Sleep(30 * time.Second) + + // Good + const DefaultTimeout = 30 * time.Second + time.Sleep(DefaultTimeout) + ``` + +### Variable Naming +- **Minimum 3 characters** for variable names (varnamelen) +- Use descriptive names: + ```go + // Bad + e := echo.New() + s := "hello" + + // Good + server := echo.New() + message := "hello" + ``` + +### Documentation Standards +- **All comments must end with periods** (godot): + ```go + // Good comment ends with a period. + func doSomething() {} + ``` +- **Document all exported functions and types** (godoclint) +- Use proper Go doc comment format + +### Struct Tags and JSON Naming +- **Use camelCase for JSON tags** (tagliatelle): + ```go + // Good + type Player struct { + BattleTag string `json:"battleTag" yaml:"battletag"` + LastResolved *time.Time `json:"lastResolved" yaml:"lastResolved"` + } + ``` +- **Align struct tags** (tagalign) for better readability: + ```go + type Config struct { + Host string `json:"host" yaml:"host"` + Port int `json:"port" yaml:"port"` + Database string `json:"database" yaml:"database"` + } + ``` + +### Context Handling +- **Pass context as first parameter** (noctx) in functions that might need it: + ```go + // Good + func fetchData(ctx context.Context, url string) error { + // implementation + } + ``` + +### Loop Patterns +- **Use integer ranges for Go 1.22+** (intrange): + ```go + // Modern Go 1.22+ style + for i := range 10 { + // process i + } + + // Instead of + for i := 0; i < 10; i++ { + // process i + } + ``` + +### Line Spacing (nlreturn) +- **Add blank lines before return statements** when they follow blocks: + ```go + // Good + if condition { + doSomething() + } + + return result + ``` + +### Testing Standards +- **Use parallel tests** where appropriate (paralleltest): + ```go + func TestSomething(t *testing.T) { + t.Parallel() // Add this for independent tests + + // test implementation + } + ``` + +### TODOs and Technical Debt +- **Minimize TODO comments** (godox) +- When TODOs are necessary, make them specific and actionable +- Include issue references or deadlines where possible + +### Prometheus Metrics +- **Follow Prometheus naming conventions** (promlinter): + ```go + // Good metric names + "http_requests_total" // counter + "request_duration_seconds" // histogram + "current_connections" // gauge + ``` + +### Error Handling Patterns +- Use sentinel errors for expected conditions +- Wrap errors with context using `github.com/cockroachdb/errors` +- Don't return `nil, nil` - use meaningful errors instead + +### Code Organization +- Keep related functionality together +- Use meaningful package names +- Prefer composition over inheritance +- Use interfaces for dependencies + +### File Permissions and Security +- Use octal notation for file permissions: `0o600` not `0600` +- Never commit secrets or credentials +- Use environment variables for sensitive configuration + ### Adding New Tools When adding a new CLI tool: 1. Create directory under `cmd//` diff --git a/README.md b/README.md index 6d424502..0599c04b 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ This monorepo contains various Go-based CLI tools and containerized services for |------|-------------|-----------|--------| | **[me-site](cmd/me-site/)** | Personal website with static content | `ghcr.io/lexfrei/me-site` | βœ… Active | | **[a200](build/a200/)** | Simple nginx server that responds 200 to all requests | `ghcr.io/lexfrei/a200` | βœ… Active | +| **[ow-exporter](cmd/ow-exporter/)** | Overwatch 2 statistics exporter for Prometheus | `ghcr.io/lexfrei/ow-exporter` | 🚧 Development | | **[redis-ui](cmd/redis-ui/)** | Web UI for Redis database management | - | πŸ”§ Development | ### Social & Communication diff --git a/build/ow-exporter/Containerfile b/build/ow-exporter/Containerfile index 6fd2778d..ded8ef8e 100644 --- a/build/ow-exporter/Containerfile +++ b/build/ow-exporter/Containerfile @@ -6,7 +6,7 @@ RUN echo 'nobody:x:65534:65534:Nobody:/:' > /tmp/passwd && \ WORKDIR /go/src/github.com/lexfrei/tools/ COPY . /go/src/github.com/lexfrei/tools/ -RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -ldflags="-s -w" -o ow-exporter ./cmd/ow-exporter/ow-exporter.go && \ +RUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -ldflags="-s -w" -o ow-exporter ./cmd/ow-exporter && \ upx --best --lzma ow-exporter FROM scratch diff --git a/cmd/ow-exporter/api_inspector.go b/cmd/ow-exporter/api_inspector.go new file mode 100644 index 00000000..0502da7a --- /dev/null +++ b/cmd/ow-exporter/api_inspector.go @@ -0,0 +1,226 @@ +package main + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "net/http" + "strings" + "time" + + "github.com/cockroachdb/errors" +) + +const ( + defaultHTTPTimeoutSeconds = 30 + maxRecursionDepth = 3 +) + +// APIInspector helps find API endpoints that might contain detailed hero stats. +type APIInspector struct { + client *http.Client +} + +// NewAPIInspector creates a new API inspector. +func NewAPIInspector() *APIInspector { + return &APIInspector{ + client: &http.Client{ + Timeout: defaultHTTPTimeoutSeconds * time.Second, + }, + } +} + +// PotentialAPIEndpoint represents a potential API endpoint to test. +type PotentialAPIEndpoint struct { + URL string + Description string + Headers map[string]string +} + +// InspectPotentialAPIEndpoints tries to find API calls that load detailed stats. +func (a *APIInspector) InspectPotentialAPIEndpoints(ctx context.Context, profileURL string) error { + slog.Info("πŸ” Starting API endpoint discovery...") + + // Extract profile ID from URL + profileID := extractProfileIDFromURL(profileURL) + if profileID == "" { + return errors.New("could not extract profile ID from URL") + } + + slog.Info("Extracted profile ID", "id", profileID) + + // Define potential API endpoints to test + endpoints := []PotentialAPIEndpoint{ + { + URL: fmt.Sprintf("https://overwatch.blizzard.com/en-us/api/career/%s/", profileID), + Description: "Main career API endpoint", + }, + { + URL: fmt.Sprintf("https://overwatch.blizzard.com/en-us/api/career/%s/hero-stats", profileID), + Description: "Hero stats API endpoint", + }, + { + URL: fmt.Sprintf("https://overwatch.blizzard.com/en-us/api/career/%s/detailed-stats", profileID), + Description: "Detailed stats API endpoint", + }, + { + URL: fmt.Sprintf("https://overwatch.blizzard.com/en-us/api/career/%s/heroes", profileID), + Description: "Heroes API endpoint", + }, + { + URL: fmt.Sprintf("https://overwatch.blizzard.com/api/career/%s/", profileID), + Description: "Alternative API path", + }, + { + URL: fmt.Sprintf("https://playoverwatch.com/en-us/api/career/%s/", profileID), + Description: "Legacy API endpoint", + }, + } + + // Test each endpoint + for _, endpoint := range endpoints { + slog.Info("Testing API endpoint", "url", endpoint.URL, "description", endpoint.Description) + + err := a.testAPIEndpoint(ctx, endpoint) + if err != nil { + slog.Debug("API endpoint failed", "url", endpoint.URL, "error", err.Error()) + } + } + + return nil +} + +// testAPIEndpoint tests a single API endpoint. +func (a *APIInspector) testAPIEndpoint(ctx context.Context, endpoint PotentialAPIEndpoint) error { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint.URL, http.NoBody) + if err != nil { + return errors.Wrap(err, "failed to create request") + } + + // Add browser-like headers + req.Header.Set("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36") + req.Header.Set("Accept", "application/json, text/plain, */*") + req.Header.Set("Accept-Language", "en-US,en;q=0.9") + req.Header.Set("Referer", "https://overwatch.blizzard.com/") + + // Add any custom headers + for key, value := range endpoint.Headers { + req.Header.Set(key, value) + } + + resp, err := a.client.Do(req) + if err != nil { + return errors.Wrap(err, "request failed") + } + defer resp.Body.Close() + + slog.Info("API response", "url", endpoint.URL, "status", resp.StatusCode, + "content_type", resp.Header.Get("Content-Type")) + + if resp.StatusCode == http.StatusOK { + // Try to read and analyze the response + var jsonData interface{} + decoder := json.NewDecoder(resp.Body) + err := decoder.Decode(&jsonData) + if err == nil { + a.analyzeJSONResponse(endpoint.URL, jsonData) + } else { + slog.Debug("Response is not JSON", "url", endpoint.URL) + } + } + + return nil +} + +// analyzeJSONResponse analyzes a JSON response for hero stats. +func (a *APIInspector) analyzeJSONResponse(url string, data interface{}) { + slog.Info("βœ… Found JSON response", "url", url) + + // Convert to JSON string for analysis + jsonBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + slog.Error("Failed to marshal JSON", "error", err) + + return + } + + jsonStr := string(jsonBytes) + + // Look for hero-related keywords + heroKeywords := []string{ + "cassidy", "mccree", "mercy", "reinhardt", "tracer", + "resurrects", "damage_amplified", "rocket_hammer_kills", + "pulse_bomb_kills", "earthshatter_kills", + } + + foundKeywords := []string{} + for _, keyword := range heroKeywords { + if strings.Contains(strings.ToLower(jsonStr), keyword) { + foundKeywords = append(foundKeywords, keyword) + } + } + + if len(foundKeywords) > 0 { + slog.Info("🎯 Found hero-related data!", "url", url, "keywords", foundKeywords) + + // Save the response for analysis + fileName := fmt.Sprintf("/tmp/claude/api_response_%d.json", time.Now().Unix()) + saveJSONToFile(fileName, jsonStr) + slog.Info("Saved API response", "file", fileName) + } else { + slog.Debug("No hero keywords found in response", "url", url) + } + + // Check structure + if m, ok := data.(map[string]interface{}); ok { + a.analyzeJSONStructure("root", m, 0) + } +} + +// analyzeJSONStructure recursively analyzes JSON structure. +func (a *APIInspector) analyzeJSONStructure(key string, data interface{}, depth int) { + if depth > maxRecursionDepth { // Limit recursion depth + return + } + + indent := strings.Repeat(" ", depth) + + switch value := data.(type) { + case map[string]interface{}: + for k, val := range value { + if strings.Contains(strings.ToLower(k), "hero") || + strings.Contains(strings.ToLower(k), "stat") || + strings.Contains(strings.ToLower(k), "cassidy") { + slog.Debug("Interesting JSON key", "path", fmt.Sprintf("%s%s.%s", indent, key, k)) + a.analyzeJSONStructure(k, val, depth+1) + } + } + case []interface{}: + if len(value) > 0 { + slog.Debug("JSON array", "path", fmt.Sprintf("%s%s", indent, key), "length", len(value)) + a.analyzeJSONStructure(key+"[0]", value[0], depth+1) + } + } +} + +// extractProfileIDFromURL extracts profile ID from Overwatch URL. +func extractProfileIDFromURL(url string) string { + // Extract from URL like: + // https://overwatch.blizzard.com/en-us/career/de5bb4aca17492e0bba120a1d1%7Ca92a11ef8d304356fccfff8df12e1dc6/ + parts := strings.Split(url, "/career/") + if len(parts) < 2 { + return "" + } + + profilePart := parts[1] + profilePart = strings.TrimSuffix(profilePart, "/") + + return profilePart +} + +// saveJSONToFile saves JSON string to file. +func saveJSONToFile(fileName, jsonStr string) { + // This is a placeholder - in real implementation you'd use os.WriteFile + slog.Debug("Would save JSON to file", "file", fileName, "size", len(jsonStr)) +} diff --git a/cmd/ow-exporter/browser_parser.go b/cmd/ow-exporter/browser_parser.go new file mode 100644 index 00000000..16cfb58b --- /dev/null +++ b/cmd/ow-exporter/browser_parser.go @@ -0,0 +1,191 @@ +package main + +import ( + "context" + "log/slog" + "net/http" + "strings" + "time" + + "github.com/PuerkitoBio/goquery" + "github.com/cockroachdb/errors" +) + +const ( + browserHTTPTimeoutSeconds = 30 +) + +// BrowserLikeParser attempts to handle JavaScript-loaded content. +type BrowserLikeParser struct { + client *http.Client +} + +// NewBrowserLikeParser creates a new parser that tries to handle JS content. +func NewBrowserLikeParser() *BrowserLikeParser { + return &BrowserLikeParser{ + client: &http.Client{ + Timeout: browserHTTPTimeoutSeconds * time.Second, + }, + } +} + +// FetchWithJSSupport attempts to fetch content including JS-loaded sections. +func (p *BrowserLikeParser) FetchWithJSSupport(ctx context.Context, profileURL string) (*goquery.Document, error) { + slog.Debug("Attempting to fetch profile with JS-like behavior", "url", profileURL) + + // Create request with browser-like headers + req, err := http.NewRequestWithContext(ctx, http.MethodGet, profileURL, http.NoBody) + if err != nil { + return nil, errors.Wrap(err, "failed to create request") + } + + // Add browser-like headers to appear more like a real browser + req.Header.Set("User-Agent", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "+ + "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8") + req.Header.Set("Accept-Language", "en-US,en;q=0.5") + req.Header.Set("Accept-Encoding", "gzip, deflate") + req.Header.Set("Connection", "keep-alive") + req.Header.Set("Upgrade-Insecure-Requests", "1") + req.Header.Set("Sec-Fetch-Dest", "document") + req.Header.Set("Sec-Fetch-Mode", "navigate") + req.Header.Set("Sec-Fetch-Site", "none") + + // Perform request + resp, err := p.client.Do(req) + if err != nil { + return nil, errors.Wrap(err, "failed to perform HTTP request") + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, errors.Errorf("unexpected status code: %d", resp.StatusCode) + } + + // Parse the HTML + doc, err := goquery.NewDocumentFromReader(resp.Body) + if err != nil { + return nil, errors.Wrap(err, "failed to parse HTML") + } + + slog.Debug("Successfully fetched HTML", "url", profileURL) + + return doc, nil +} + +// AnalyzeJSLoadedContent looks for signs of JavaScript-loaded content. +func (p *BrowserLikeParser) AnalyzeJSLoadedContent(doc *goquery.Document) { + slog.Info("πŸ” Analyzing HTML structure for JS-loaded content...") + + p.analyzeScriptTags(doc) + p.analyzeDataAttributes(doc) + p.analyzeSelectors(doc) + p.analyzeHeroContainers(doc) +} + +// InspectFullStructure provides detailed analysis of the page structure. +func (p *BrowserLikeParser) InspectFullStructure(doc *goquery.Document) { + slog.Info("πŸ“‹ Full structure analysis...") + + // Look for any elements with "stats" in class name + statsElements := doc.Find("*[class*='stats']") + slog.Info("Elements with 'stats' in class", "count", statsElements.Length()) + + statsElements.Each(func(_ int, s *goquery.Selection) { + classes, _ := s.Attr("class") + tagName := goquery.NodeName(s) + slog.Debug("Stats element", "tag", tagName, "classes", classes) + }) + + // Look for view containers + viewElements := doc.Find("*[class*='view']") + slog.Info("Elements with 'view' in class", "count", viewElements.Length()) + + viewCount := make(map[string]int) + viewElements.Each(func(_ int, s *goquery.Selection) { + classes, _ := s.Attr("class") + for _, class := range strings.Split(classes, " ") { + if strings.Contains(class, "view") { + viewCount[class]++ + } + } + }) + + for viewClass, count := range viewCount { + slog.Debug("View class frequency", "class", viewClass, "count", count) + } +} + +// analyzeScriptTags checks for script tags that might load additional content. +func (p *BrowserLikeParser) analyzeScriptTags(doc *goquery.Document) { + scriptCount := 0 + doc.Find("script").Each(func(_ int, s *goquery.Selection) { + scriptCount++ + src, exists := s.Attr("src") + if exists && strings.Contains(src, "career") { + slog.Debug("Found career-related script", "src", src) + } + }) + slog.Info("Found script tags", "count", scriptCount) +} + +// analyzeDataAttributes looks for data attributes that might be populated by JS. +func (p *BrowserLikeParser) analyzeDataAttributes(doc *goquery.Document) { + dataAttrs := make(map[string]int) + doc.Find("*").Each(func(_ int, s *goquery.Selection) { + for _, attr := range []string{"data-stat", "data-category-id", "data-hero-id"} { + if val, exists := s.Attr(attr); exists { + dataAttrs[attr]++ + if attr == "data-stat" { + slog.Debug("Found data-stat attribute", "value", val) + } + } + } + }) + + for attr, count := range dataAttrs { + slog.Info("Found data attributes", "attribute", attr, "count", count) + } +} + +// analyzeSelectors checks for specific selectors the user mentioned. +func (p *BrowserLikeParser) analyzeSelectors(doc *goquery.Document) { + selectors := []string{ + "blz-section.stats", + "span.stats-container", + ".option-15", + ".mouseKeyboard-view.Profile-view.is-active", + ".quickPlay-view.is-active", + } + + for _, selector := range selectors { + elements := doc.Find(selector) + count := elements.Length() + slog.Info("Checking selector", "selector", selector, "found", count) + + if count > 0 { + elements.Each(func(_ int, s *goquery.Selection) { + classes, _ := s.Attr("class") + id, _ := s.Attr("id") + slog.Debug("Found element", "selector", selector, "classes", classes, "id", id) + }) + } + } +} + +// analyzeHeroContainers looks for containers that might hold hero stats. +func (p *BrowserLikeParser) analyzeHeroContainers(doc *goquery.Document) { + heroContainers := doc.Find("[data-hero-id]") + slog.Info("Found hero containers", "count", heroContainers.Length()) + + heroContainers.Each(func(_ int, s *goquery.Selection) { + heroID, _ := s.Attr("data-hero-id") + classes, _ := s.Attr("class") + text := strings.TrimSpace(s.Text()) + if len(text) > 100 { + text = text[:100] + "..." + } + slog.Debug("Hero container", "hero", heroID, "classes", classes, "text", text) + }) +} diff --git a/cmd/ow-exporter/config.go b/cmd/ow-exporter/config.go new file mode 100644 index 00000000..d9432120 --- /dev/null +++ b/cmd/ow-exporter/config.go @@ -0,0 +1,193 @@ +package main + +import ( + "os" + "path/filepath" + "time" + + "github.com/cockroachdb/errors" + "github.com/spf13/viper" + "gopkg.in/yaml.v3" +) + +const ( + // ConfigDirPermissions sets directory permissions for config directory (rwxr-xr-x). + ConfigDirPermissions = 0o755 + // ConfigFilePermissions sets file permissions for config file (rw-------). + ConfigFilePermissions = 0o600 +) + +// PlayersConfig represents the structure of players.yaml. +type PlayersConfig struct { + Players []PlayerEntry `yaml:"players"` +} + +// PlayerEntry represents a single player configuration entry. +type PlayerEntry struct { + BattleTag string `yaml:"battletag"` + ResolvedURL string `yaml:"resolvedUrl"` + LastResolved *time.Time `yaml:"lastResolved"` +} + +var ( + playersConfig *PlayersConfig + configPath string +) + +// initConfig initializes the configuration system. +func initConfig() error { + // Set config file path + configPath = getConfigPath() + + // Create config directory if it doesn't exist + configDir := filepath.Dir(configPath) + err := os.MkdirAll(configDir, ConfigDirPermissions) + if err != nil { + return errors.Wrap(err, "failed to create config directory") + } + + // Initialize viper + viper.SetConfigName("players") + viper.SetConfigType("yaml") + viper.AddConfigPath(configDir) + + // Try to read existing config + err = viper.ReadInConfig() + if err != nil { + var configFileNotFoundError viper.ConfigFileNotFoundError + if errors.As(err, &configFileNotFoundError) { + // Config file not found, create default + return createDefaultConfig() + } + + return errors.Wrap(err, "failed to read config") + } + + // Load config into struct + return loadConfig() +} + +// getConfigPath returns the path to the players.yaml config file. +func getConfigPath() string { + if configFile := viper.GetString("config"); configFile != "" { + return configFile + } + + return "config/players.yaml" +} + +// createDefaultConfig creates a default players.yaml file. +func createDefaultConfig() error { + defaultConfig := &PlayersConfig{ + Players: []PlayerEntry{}, + } + + return saveConfig(defaultConfig) +} + +// loadConfig loads the configuration from file into memory. +func loadConfig() error { + playersConfig = &PlayersConfig{} + + return errors.Wrap(viper.Unmarshal(playersConfig), "failed to unmarshal config") +} + +// saveConfig saves the configuration to file. +func saveConfig(config *PlayersConfig) error { + data, err := yaml.Marshal(config) + if err != nil { + return errors.Wrap(err, "failed to marshal config") + } + + err = os.WriteFile(configPath, data, ConfigFilePermissions) + if err != nil { + return errors.Wrap(err, "failed to write config file") + } + + // Update in-memory config + playersConfig = config + + return nil +} + +// findPlayerByBattleTag finds a player entry by BattleTag. +func findPlayerByBattleTag(battleTag string) *PlayerEntry { + if playersConfig == nil { + return nil + } + + for i := range playersConfig.Players { + if playersConfig.Players[i].BattleTag == battleTag { + return &playersConfig.Players[i] + } + } + + return nil +} + +// addPlayerToConfig adds a new player to the configuration. +func addPlayerToConfig(battleTag, resolvedURL string) error { + if playersConfig == nil { + playersConfig = &PlayersConfig{} + } + + // Check if player already exists + if existingPlayer := findPlayerByBattleTag(battleTag); existingPlayer != nil { + // Update existing player + existingPlayer.ResolvedURL = resolvedURL + now := time.Now() + existingPlayer.LastResolved = &now + } else { + // Add new player + now := time.Now() + newPlayer := PlayerEntry{ + BattleTag: battleTag, + ResolvedURL: resolvedURL, + LastResolved: &now, + } + playersConfig.Players = append(playersConfig.Players, newPlayer) + } + + return saveConfig(playersConfig) +} + +// updatePlayerURL updates the resolved URL for an existing player. +func updatePlayerURL(battleTag, resolvedURL string) error { + player := findPlayerByBattleTag(battleTag) + if player == nil { + return errors.Wrapf(ErrPlayerNotFound, "%s", battleTag) + } + + player.ResolvedURL = resolvedURL + now := time.Now() + player.LastResolved = &now + + return saveConfig(playersConfig) +} + +// getAllPlayers returns all configured players. +func getAllPlayers() []PlayerEntry { + if playersConfig == nil { + return []PlayerEntry{} + } + + return playersConfig.Players +} + +// removePlayerFromConfig removes a player from the configuration. +func removePlayerFromConfig(battleTag string) error { + if playersConfig == nil { + return ErrNoConfigLoaded + } + + for i, player := range playersConfig.Players { + if player.BattleTag == battleTag { + // Remove player from slice + playersConfig.Players = append(playersConfig.Players[:i], playersConfig.Players[i+1:]...) + + return saveConfig(playersConfig) + } + } + + return errors.Wrapf(ErrPlayerNotFound, "%s", battleTag) +} diff --git a/cmd/ow-exporter/errors.go b/cmd/ow-exporter/errors.go new file mode 100644 index 00000000..75ceb6b1 --- /dev/null +++ b/cmd/ow-exporter/errors.go @@ -0,0 +1,36 @@ +package main + +import "github.com/cockroachdb/errors" + +// Static errors for err113 linter compliance. +var ( + // ErrPlayerNotFound occurs when a player is not found in config. + ErrPlayerNotFound = errors.New("player not found in config") + ErrNoConfigLoaded = errors.New("no config loaded") + + // ErrProfileNotFound occurs when a profile is not found (404). + ErrProfileNotFound = errors.New("profile not found (404)") + ErrTooManyRedirects = errors.New("too many redirects") + + // ErrMetricsNil occurs when metrics cannot be nil. + ErrMetricsNil = errors.New("metrics cannot be nil") + ErrEmptyBattleTag = errors.New("battle tag cannot be empty") + ErrZeroLastUpdated = errors.New("last updated time cannot be zero") + + // ErrBattleTagNotResolved occurs when failed to resolve BattleTag on any platform. + ErrBattleTagNotResolved = errors.New("failed to resolve BattleTag on any platform") + ErrUnexpectedStatusCode = errors.New("unexpected status code") + ErrHTTPError = errors.New("HTTP error") + + // ErrUnknownHero occurs when unknown hero detected - not in registry. + ErrUnknownHero = errors.New("unknown hero detected - not in registry") + + // ErrNoPlatformData occurs when no data found for platform. + ErrNoPlatformData = errors.New("no data found for platform") + // ErrNoGameModeData occurs when no data found for game mode. + ErrNoGameModeData = errors.New("no data found for game mode") + // ErrNoHeroID occurs when no hero ID found in hero element. + ErrNoHeroID = errors.New("no hero ID found in hero element") + // ErrNoHeroName occurs when no hero name found in hero element. + ErrNoHeroName = errors.New("no hero name found in hero element") +) diff --git a/cmd/ow-exporter/headless_parser.go b/cmd/ow-exporter/headless_parser.go new file mode 100644 index 00000000..715acc4c --- /dev/null +++ b/cmd/ow-exporter/headless_parser.go @@ -0,0 +1,187 @@ +package main + +import ( + "context" + "fmt" + "log/slog" + "net/http" + "os/exec" + "strings" + "time" + + "github.com/PuerkitoBio/goquery" + "github.com/cockroachdb/errors" +) + +const ( + headlessHTTPTimeoutSeconds = 60 +) + +// HeadlessParser provides JavaScript-enabled HTML parsing. +type HeadlessParser struct { + client *http.Client +} + +// NewHeadlessParser creates a new headless parser. +func NewHeadlessParser() *HeadlessParser { + return &HeadlessParser{ + client: &http.Client{ + Timeout: headlessHTTPTimeoutSeconds * time.Second, + }, + } +} + +// FetchWithJavaScript fetches a page and executes JavaScript to get full DOM. +func (h *HeadlessParser) FetchWithJavaScript(ctx context.Context, profileURL string) (*goquery.Document, error) { + slog.Info("🌐 Fetching page with JavaScript execution", "url", profileURL) + + // Try using Node.js with Puppeteer if available + doc, err := h.tryPuppeteer(ctx, profileURL) + if err == nil { + return doc, nil + } + + slog.Debug("Puppeteer not available, trying alternative approach", "error", err) + + // Fallback to enhanced HTTP fetching with better headers and delays + return h.fetchWithEnhancedHTTP(ctx, profileURL) +} + +// AnalyzeJSLoadedStructure analyzes the JavaScript-loaded DOM structure. +func (h *HeadlessParser) AnalyzeJSLoadedStructure(_ context.Context, _ string) error { + slog.Info("πŸ” Analyzing JavaScript-loaded structure") + + return nil +} + +// tryPuppeteer attempts to use Node.js with Puppeteer for JavaScript execution. +func (h *HeadlessParser) tryPuppeteer(ctx context.Context, profileURL string) (*goquery.Document, error) { + script := h.generatePuppeteerScript(profileURL) + output, err := h.executePuppeteerScript(ctx, script) + if err != nil { + return nil, err + } + + return h.parsePuppeteerOutput(output) +} + +// generatePuppeteerScript creates the Node.js script for Puppeteer. +func (h *HeadlessParser) generatePuppeteerScript(profileURL string) string { + return fmt.Sprintf(` +const puppeteer = require('puppeteer'); + +(async () => { + try { + const browser = await puppeteer.launch({ + headless: true, + args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage'] + }); + const page = await browser.newPage(); + + await page.setUserAgent('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' + + 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'); + + console.error('Navigating to URL: %s'); + await page.goto('%s', {waitUntil: 'networkidle2', timeout: 30000}); + + console.error('Waiting for JavaScript to execute...'); + await new Promise(resolve => setTimeout(resolve, 10000)); + + // Try to wait for specific elements + try { + await page.waitForSelector('.mouseKeyboard-view', {timeout: 15000}); + console.error('Found mouseKeyboard-view element'); + } catch (e) { + console.error('mouseKeyboard-view not found:', e.message); + } + + // Check for the specific selectors + const selectors = ['.stats-container', 'blz-section', '.option-15', '[data-stat]']; + for (const selector of selectors) { + try { + const elements = await page.$$(selector); + console.error('Selector ' + selector + ' found ' + elements.length + ' elements'); + } catch (e) { + console.error('Error checking selector ' + selector + ':', e.message); + } + } + + const html = await page.content(); + console.log(html); + await browser.close(); + } catch (error) { + console.error('Puppeteer error:', error.message); + process.exit(1); + } +})(); +`, profileURL, profileURL) +} + +// executePuppeteerScript runs the Node.js Puppeteer script. +func (h *HeadlessParser) executePuppeteerScript(ctx context.Context, script string) ([]byte, error) { + cmd := exec.CommandContext(ctx, "node", "-e", script) + output, err := cmd.CombinedOutput() + if err != nil { + slog.Error("Puppeteer script failed", "error", err, "output", string(output)) + + return nil, errors.Wrap(err, "failed to execute Puppeteer script") + } + + return output, nil +} + +// parsePuppeteerOutput parses the HTML output from Puppeteer. +func (h *HeadlessParser) parsePuppeteerOutput(output []byte) (*goquery.Document, error) { + doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(output))) + if err != nil { + return nil, errors.Wrap(err, "failed to parse Puppeteer HTML output") + } + + slog.Info("βœ… Successfully fetched page with Puppeteer") + + return doc, nil +} + +// fetchWithEnhancedHTTP uses enhanced HTTP with better timing and headers. +func (h *HeadlessParser) fetchWithEnhancedHTTP(ctx context.Context, profileURL string) (*goquery.Document, error) { + slog.Info("πŸ”„ Using enhanced HTTP fetching with delays") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, profileURL, http.NoBody) + if err != nil { + return nil, errors.Wrap(err, "failed to create request") + } + + // Set comprehensive browser-like headers + req.Header.Set("User-Agent", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "+ + "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + req.Header.Set("Accept", + "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8") + req.Header.Set("Accept-Language", "en-US,en;q=0.9") + req.Header.Set("Accept-Encoding", "gzip, deflate, br") + req.Header.Set("Connection", "keep-alive") + req.Header.Set("Upgrade-Insecure-Requests", "1") + req.Header.Set("Sec-Fetch-Dest", "document") + req.Header.Set("Sec-Fetch-Mode", "navigate") + req.Header.Set("Sec-Fetch-Site", "none") + req.Header.Set("Cache-Control", "max-age=0") + + resp, err := h.client.Do(req) + if err != nil { + return nil, errors.Wrap(err, "HTTP request failed") + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, errors.Errorf("unexpected status code: %d", resp.StatusCode) + } + + doc, err := goquery.NewDocumentFromReader(resp.Body) + if err != nil { + return nil, errors.Wrap(err, "failed to parse HTML") + } + + slog.Info("πŸ“„ Enhanced HTTP fetch completed") + + return doc, nil +} diff --git a/cmd/ow-exporter/hero_metrics.go b/cmd/ow-exporter/hero_metrics.go new file mode 100644 index 00000000..0c3b31eb --- /dev/null +++ b/cmd/ow-exporter/hero_metrics.go @@ -0,0 +1,787 @@ +//nolint:govet,lll,tagalign // Multi-line struct tags and alignment are intentionally used for readability per CLAUDE.md guidelines +package main + +import ( + "reflect" + "time" +) + +// Constants for frequently used strings. +const ( + CountMetricType = "count" + DurationMetricType = "duration" + PercentageMetricType = "percentage" + NumberMetricType = "number" + MouseKeyboardViewActiveSelector = ".mouseKeyboard-view.is-active" + ControllerViewActiveSelector = ".controller-view.is-active" + QuickPlayViewActiveSelector = ".quickPlay-view.is-active" + CompetitiveViewActiveSelector = ".competitive-view.is-active" +) + +// CommonMetrics defines the 15 core metrics available for all heroes. +type CommonMetrics struct { + TimePlayed time.Duration `ow:"time_played" + prometheus:"ow_hero_time_played_seconds" + help:"Total time played on hero" + path:"[data-category-id='0x0860000000000021']" + type:"duration"` + + GamesWon int `ow:"games_won" + prometheus:"ow_hero_games_won_total" + help:"Total number of games won with hero" + path:"[data-category-id='0x0860000000000039']" + type:"number"` + + WinPercentage float64 `ow:"win_percentage" + prometheus:"ow_hero_win_percentage" + help:"Win percentage with hero" + path:"[data-category-id='0x08600000000003D1']" + type:"percentage"` + + WeaponAccuracy float64 `ow:"weapon_accuracy" + prometheus:"ow_hero_weapon_accuracy_percent" + help:"Best weapon accuracy percentage with hero" + path:"[data-category-id='0x08600000000001BB']" + type:"percentage"` + + EliminationsPerLife float64 `ow:"eliminations_per_life" + prometheus:"ow_hero_eliminations_per_life" + help:"Average eliminations per life with hero" + path:"[data-category-id='0x08600000000003D2']" + type:"number"` + + KillStreakBest int `ow:"kill_streak_best" + prometheus:"ow_hero_kill_streak_best" + help:"Best kill streak achieved with hero" + path:"[data-category-id='0x0860000000000223']" + type:"number"` + + MultikillBest int `ow:"multikill_best" + prometheus:"ow_hero_multikill_best" + help:"Best multikill achieved with hero" + path:"[data-category-id='0x0860000000000346']" + type:"number"` + + EliminationsPer10Min float64 `ow:"eliminations_per_10min" + prometheus:"ow_hero_eliminations_per_10min_avg" + help:"Average eliminations per 10 minutes with hero" + path:"[data-category-id='0x08600000000004D4']" + type:"number"` + + DeathsPer10Min float64 `ow:"deaths_per_10min" + prometheus:"ow_hero_deaths_per_10min_avg" + help:"Average deaths per 10 minutes with hero" + path:"[data-category-id='0x08600000000004D3']" + type:"number"` + + FinalBlowsPer10Min float64 `ow:"final_blows_per_10min" + prometheus:"ow_hero_final_blows_per_10min_avg" + help:"Average final blows per 10 minutes with hero" + path:"[data-category-id='0x08600000000004D5']" + type:"number"` + + SoloKillsPer10Min float64 `ow:"solo_kills_per_10min" + prometheus:"ow_hero_solo_kills_per_10min_avg" + help:"Average solo kills per 10 minutes with hero" + path:"[data-category-id='0x08600000000004DA']" + type:"number"` + + ObjectiveKillsPer10Min float64 `ow:"objective_kills_per_10min" + prometheus:"ow_hero_objective_kills_per_10min_avg" + help:"Average objective kills per 10 minutes with hero" + path:"[data-category-id='0x08600000000004D8']" + type:"number"` + + ObjectiveTimePer10Min time.Duration `ow:"objective_time_per_10min" + prometheus:"ow_hero_objective_time_per_10min_avg" + help:"Average objective time per 10 minutes with hero" + path:"[data-category-id='0x08600000000004D9']" + type:"duration"` + + HeroDamagePer10Min int64 `ow:"hero_damage_per_10min" + prometheus:"ow_hero_damage_per_10min_avg" + help:"Average hero damage per 10 minutes" + path:"[data-category-id='0x08600000000004BD']" + type:"number"` + + HealingPer10Min int64 `ow:"healing_per_10min" + prometheus:"ow_hero_healing_per_10min_avg" + help:"Average healing done per 10 minutes" + path:"[data-category-id='0x08600000000004D6']" + type:"number"` +} + +// Soldier76Metrics defines Soldier: 76 specific metrics. +type Soldier76Metrics struct { + CommonMetrics // Embedded common metrics + + HelixRocketKills int `ow:"helix_rocket_kills" + prometheus:"ow_hero_helix_rocket_kills_total" + help:"Total eliminations with helix rockets" + path:"[data-stat='helix_rocket_kills']" + type:"number"` + + HelixRocketKillsBest int `ow:"helix_rocket_kills_best" + prometheus:"ow_hero_helix_rocket_kills_best" + help:"Most helix rocket kills in a single game" + path:"[data-stat='helix_rocket_kills_best']" + type:"number"` + + BioticFieldHealing int64 `ow:"biotic_field_healing" + prometheus:"ow_hero_biotic_field_healing_total" + help:"Total healing provided by biotic field" + path:"[data-stat='biotic_field_healing']" + type:"number"` + + TacticalVisorKills int `ow:"tactical_visor_kills" + prometheus:"ow_hero_tactical_visor_kills_total" + help:"Total eliminations during tactical visor ultimate" + path:"[data-stat='tactical_visor_kills']" + type:"number"` +} + +// WidowmakerMetrics defines Widowmaker specific metrics. +type WidowmakerMetrics struct { + CommonMetrics // Embedded common metrics + + ScopedAccuracy float64 `ow:"scoped_accuracy" + prometheus:"ow_hero_scoped_accuracy_percent" + help:"Scoped weapon accuracy percentage" + path:"[data-stat='scoped_accuracy']" + type:"percentage"` + + ScopedCriticalHits int `ow:"scoped_critical_hits" + prometheus:"ow_hero_scoped_critical_hits_total" + help:"Total scoped critical hits" + path:"[data-stat='scoped_critical_hits']" + type:"number"` + + VenomMineKills int `ow:"venom_mine_kills" + prometheus:"ow_hero_venom_mine_kills_total" + help:"Total eliminations with venom mine" + path:"[data-stat='venom_mine_kills']" + type:"number"` + + InfraSightAssists int `ow:"infra_sight_assists" + prometheus:"ow_hero_infra_sight_assists_total" + help:"Team assists provided by infra-sight ultimate" + path:"[data-stat='infra_sight_assists']" + type:"number"` +} + +// GenjiMetrics defines Genji specific metrics. +type GenjiMetrics struct { + CommonMetrics // Embedded common metrics + + DeflectionKills int `ow:"deflection_kills" prometheus:"ow_hero_deflection_kills_total" help:"Total eliminations with deflected projectiles" path:"[data-stat='deflection_kills']" type:"number"` + SwiftStrikeKills int `ow:"swift_strike_kills" prometheus:"ow_hero_swift_strike_kills_total" help:"Total eliminations with swift strike" path:"[data-stat='swift_strike_kills']" type:"number"` + DragonbladeKills int `ow:"dragonblade_kills" prometheus:"ow_hero_dragonblade_kills_total" help:"Total eliminations during dragonblade ultimate" path:"[data-stat='dragonblade_kills']" type:"number"` + DamageDeflected int64 `ow:"damage_deflected" prometheus:"ow_hero_damage_deflected_total" help:"Total damage deflected by deflect ability" path:"[data-stat='damage_deflected']" type:"number"` +} + +// TorbjornMetrics defines TorbjΓΆrn specific metrics. +type TorbjornMetrics struct { + CommonMetrics // Embedded common metrics + + TurretKills int `ow:"turret_kills" prometheus:"ow_hero_turret_kills_total" help:"Total eliminations by deployed turret" path:"[data-stat='turret_kills']" type:"number"` + TurretDamage int64 `ow:"turret_damage" prometheus:"ow_hero_turret_damage_total" help:"Total damage dealt by turret" path:"[data-stat='turret_damage']" type:"number"` + HammerKills int `ow:"hammer_kills" prometheus:"ow_hero_hammer_kills_total" help:"Total eliminations with forge hammer" path:"[data-stat='hammer_kills']" type:"number"` + ArmorPacksCreated int `ow:"armor_packs_created" prometheus:"ow_hero_armor_packs_created_total" help:"Total armor packs created for teammates" path:"[data-stat='armor_packs_created']" type:"number"` + MoltenCoreKills int `ow:"molten_core_kills" prometheus:"ow_hero_molten_core_kills_total" help:"Eliminations during molten core ultimate" path:"[data-stat='molten_core_kills']" type:"number"` +} + +// MercyMetrics defines Mercy specific metrics. +type MercyMetrics struct { + CommonMetrics // Embedded common metrics + + DamageAmplified int64 `ow:"damage_amplified" prometheus:"ow_hero_damage_amplified_total" help:"Total damage amplified with damage boost" path:"[data-stat='damage_amplified']" type:"number"` + Resurrects int `ow:"resurrects" prometheus:"ow_hero_resurrects_total" help:"Total number of resurrections performed" path:"[data-stat='resurrects']" type:"number"` + PlayersRezed int `ow:"players_rezed" prometheus:"ow_hero_players_rezed_total" help:"Total players resurrected" path:"[data-stat='players_rezed']" type:"number"` + ValkyrieDamageAmp int64 `ow:"valkyrie_damage_amp" prometheus:"ow_hero_valkyrie_damage_amp_total" help:"Damage amplified during valkyrie ultimate" path:"[data-stat='valkyrie_damage_amp']" type:"number"` + ValkyrieHealing int64 `ow:"valkyrie_healing" prometheus:"ow_hero_valkyrie_healing_total" help:"Healing done during valkyrie ultimate" path:"[data-stat='valkyrie_healing']" type:"number"` +} + +// ReinhardtMetrics defines Reinhardt specific metrics. +type ReinhardtMetrics struct { + CommonMetrics // Embedded common metrics + + DamageBlocked int64 `ow:"damage_blocked" prometheus:"ow_hero_damage_blocked_total" help:"Total damage blocked by barrier shield" path:"[data-stat='damage_blocked']" type:"number"` + ChargeKills int `ow:"charge_kills" prometheus:"ow_hero_charge_kills_total" help:"Total eliminations with charge ability" path:"[data-stat='charge_kills']" type:"number"` + FireStrikeKills int `ow:"fire_strike_kills" prometheus:"ow_hero_fire_strike_kills_total" help:"Total eliminations with fire strike" path:"[data-stat='fire_strike_kills']" type:"number"` + EarthshatterKills int `ow:"earthshatter_kills" prometheus:"ow_hero_earthshatter_kills_total" help:"Eliminations during earthshatter ultimate" path:"[data-stat='earthshatter_kills']" type:"number"` + RocketHammerKills int `ow:"rocket_hammer_kills" prometheus:"ow_hero_rocket_hammer_kills_total" help:"Total eliminations with rocket hammer" path:"[data-stat='rocket_hammer_kills']" type:"number"` +} + +// AnaMetrics defines Ana specific metrics. +type AnaMetrics struct { + CommonMetrics // Embedded common metrics + + ScopedAccuracy float64 `ow:"scoped_accuracy" prometheus:"ow_hero_scoped_accuracy_percent" help:"Scoped weapon accuracy percentage" path:"[data-stat='scoped_accuracy']" type:"percentage"` + UnscopedAccuracy float64 `ow:"unscoped_accuracy" prometheus:"ow_hero_unscoped_accuracy_percent" help:"Unscoped weapon accuracy percentage" path:"[data-stat='unscoped_accuracy']" type:"percentage"` + EnemiesSlept int `ow:"enemies_slept" prometheus:"ow_hero_enemies_slept_total" help:"Total enemies put to sleep with sleep dart" path:"[data-stat='enemies_slept']" type:"number"` + BioticGrenadeKills int `ow:"biotic_grenade_kills" prometheus:"ow_hero_biotic_grenade_kills_total" help:"Total eliminations with biotic grenade" path:"[data-stat='biotic_grenade_kills']" type:"number"` + NanoboostAssists int `ow:"nanoboost_assists" prometheus:"ow_hero_nanoboost_assists_total" help:"Eliminations assisted by nanoboost ultimate" path:"[data-stat='nanoboost_assists']" type:"number"` +} + +// DVaMetrics defines D.Va specific metrics. +type DVaMetrics struct { + CommonMetrics // Embedded common metrics + + MechKills int `ow:"mech_kills" prometheus:"ow_hero_mech_kills_total" help:"Total eliminations while in mech" path:"[data-stat='mech_kills']" type:"number"` + PilotKills int `ow:"pilot_kills" prometheus:"ow_hero_pilot_kills_total" help:"Total eliminations while out of mech" path:"[data-stat='pilot_kills']" type:"number"` + DamageBlocked int64 `ow:"damage_blocked" prometheus:"ow_hero_damage_blocked_total" help:"Total damage blocked by defense matrix" path:"[data-stat='damage_blocked']" type:"number"` + SelfDestructKills int `ow:"self_destruct_kills" prometheus:"ow_hero_self_destruct_kills_total" help:"Eliminations with self-destruct ultimate" path:"[data-stat='self_destruct_kills']" type:"number"` + CallMechKills int `ow:"call_mech_kills" prometheus:"ow_hero_call_mech_kills_total" help:"Eliminations by calling down mech" path:"[data-stat='call_mech_kills']" type:"number"` +} + +// GenerateMetricDefs uses reflection to generate MetricDef from a hero struct (defaults to PC QuickPlay). +func GenerateMetricDefs(heroStruct any) map[string]MetricDef { + return GenerateMetricDefsWithContext(heroStruct, PlatformPC, GameModeQuickPlay) +} + +// GenerateMetricDefsWithContext uses reflection to generate MetricDef with platform/gamemode context. +func GenerateMetricDefsWithContext(heroStruct any, platform Platform, gameMode GameMode) map[string]MetricDef { + t := reflect.TypeOf(heroStruct) + metrics := make(map[string]MetricDef) + + for i := range t.NumField() { + field := t.Field(i) + + // Skip embedded CommonMetrics (will be handled separately) + if field.Anonymous { + continue + } + + owTag := field.Tag.Get("ow") + if owTag == "" { + continue + } + + // Generate platform/gamemode aware selector + baseSelector := field.Tag.Get("path") + selector := generatePlatformSelector(baseSelector, platform, gameMode) + + metrics[owTag] = MetricDef{ + PrometheusName: field.Tag.Get("prometheus"), + Help: field.Tag.Get("help"), + Selector: selector, + ValueType: field.Tag.Get("type"), + Unit: inferUnit(field.Tag.Get("type")), + } + } + + return metrics +} + +// inferUnit determines the unit based on value type. +func inferUnit(valueType string) string { + switch valueType { + case DurationMetricType: + return "seconds" + case PercentageMetricType: + return "percent" + case NumberMetricType: + return CountMetricType + default: + return CountMetricType + } +} + +// HeroMetricsRegistry maps hero IDs to their metric generation functions. +var HeroMetricsRegistry = map[string]func() any{ + // Initial 8 heroes + "soldier-76": func() any { return Soldier76Metrics{} }, + "widowmaker": func() any { return WidowmakerMetrics{} }, + "genji": func() any { return GenjiMetrics{} }, + "torbjorn": func() any { return TorbjornMetrics{} }, + "mercy": func() any { return MercyMetrics{} }, + "reinhardt": func() any { return ReinhardtMetrics{} }, + "ana": func() any { return AnaMetrics{} }, + "dva": func() any { return DVaMetrics{} }, + + // Support heroes + "illari": func() any { return IllariMetrics{} }, + "lifeweaver": func() any { return LifeweaverMetrics{} }, + "kiriko": func() any { return KirikoMetrics{} }, + "baptiste": func() any { return BaptisteMetrics{} }, + "lucio": func() any { return LucioMetrics{} }, + "zenyatta": func() any { return ZenyattaMetrics{} }, + "brigitte": func() any { return BrigitteMetrics{} }, + + // DPS heroes + "cassidy": func() any { return CassidyMetrics{} }, + "tracer": func() any { return TracerMetrics{} }, + "pharah": func() any { return PharahMetrics{} }, + "sojourn": func() any { return SojournMetrics{} }, + "mei": func() any { return MeiMetrics{} }, + "junkrat": func() any { return JunkratMetrics{} }, + "reaper": func() any { return ReaperMetrics{} }, + "hanzo": func() any { return HanzoMetrics{} }, + + // Tank heroes + "winston": func() any { return WinstonMetrics{} }, + "roadhog": func() any { return RoadhogMetrics{} }, + "zarya": func() any { return ZaryaMetrics{} }, + "mauga": func() any { return MaugaMetrics{} }, + "hazard": func() any { return HazardMetrics{} }, + "junker-queen": func() any { return JunkerQueenMetrics{} }, + "orisa": func() any { return OrisaMetrics{} }, + "sigma": func() any { return SigmaMetrics{} }, + "wrecking-ball": func() any { return WreckingBallMetrics{} }, + + // Additional DPS heroes + "doomfist": func() any { return DoomfistMetrics{} }, + "sombra": func() any { return SombraMetrics{} }, + "symmetra": func() any { return SymmetraMetrics{} }, + "bastion": func() any { return BastionMetrics{} }, + "ashe": func() any { return AsheMetrics{} }, + "echo": func() any { return EchoMetrics{} }, + "venture": func() any { return VentureMetrics{} }, + + // Additional Tank heroes + "ramattra": func() any { return RamattraMetrics{} }, + + // Additional Support heroes + "moira": func() any { return MoiraMetrics{} }, + "juno": func() any { return JunoMetrics{} }, + "wuyang": func() any { return WuyangMetrics{} }, + "freja": func() any { return FrejaMetrics{} }, +} + +// GetHeroMetrics returns MetricDef map for a specific hero (defaults to PC QuickPlay). +func GetHeroMetrics(heroID string) map[string]MetricDef { + return GetHeroMetricsForPlatform(heroID, PlatformPC, GameModeQuickPlay) +} + +// GetHeroMetricsForPlatform returns MetricDef map for a specific hero with platform and gamemode context. +func GetHeroMetricsForPlatform(heroID string, platform Platform, gameMode GameMode) map[string]MetricDef { + factory, exists := HeroMetricsRegistry[heroID] + if !exists { + // Fallback to common metrics only with platform context + return generateCommonMetricsWithContext(platform, gameMode) + } + + heroStruct := factory() + heroSpecific := GenerateMetricDefsWithContext(heroStruct, platform, gameMode) + + // Merge with common metrics from embedded struct + commonStruct := CommonMetrics{} + commonMetrics := GenerateMetricDefsWithContext(commonStruct, platform, gameMode) + + // Combine both maps + result := make(map[string]MetricDef) + for k, v := range commonMetrics { + result[k] = v + } + + for k, v := range heroSpecific { + result[k] = v + } + + return result +} + +// generateCommonMetricsWithContext generates common metrics with platform context. +func generateCommonMetricsWithContext(platform Platform, gameMode GameMode) map[string]MetricDef { + commonStruct := CommonMetrics{} + + return GenerateMetricDefsWithContext(commonStruct, platform, gameMode) +} + +// generatePlatformSelector creates platform/gamemode aware CSS selectors. +func generatePlatformSelector(baseSelector string, platform Platform, gameMode GameMode) string { + if baseSelector == "" { + return "" + } + + // Platform selector wrapper + var platformWrapper string + + switch platform { + case PlatformPC: + platformWrapper = MouseKeyboardViewActiveSelector + case PlatformConsole: + platformWrapper = ".controller-view.is-active" + default: + platformWrapper = MouseKeyboardViewActiveSelector // Default to PC + } + + // GameMode selector wrapper + var gameModeWrapper string + + switch gameMode { + case GameModeQuickPlay: + gameModeWrapper = QuickPlayViewActiveSelector + case GameModeCompetitive: + gameModeWrapper = ".competitive-view.is-active" + default: + gameModeWrapper = QuickPlayViewActiveSelector // Default to QuickPlay + } + + // Combine platform + gamemode + base selector + return platformWrapper + " " + gameModeWrapper + " " + baseSelector +} + +// IllariMetrics defines Illari specific metrics. +type IllariMetrics struct { + CommonMetrics // Embedded common metrics + + SolarRifleHealing int64 `ow:"solar_rifle_healing" prometheus:"ow_hero_solar_rifle_healing_total" help:"Total healing done with solar rifle" path:"[data-stat='solar_rifle_healing']" type:"number"` + HealingPylonHealing int64 `ow:"healing_pylon_healing" prometheus:"ow_hero_healing_pylon_healing_total" help:"Total healing provided by healing pylon" path:"[data-stat='healing_pylon_healing']" type:"number"` + CaptiveSunKills int `ow:"captive_sun_kills" prometheus:"ow_hero_captive_sun_kills_total" help:"Eliminations with captive sun ultimate" path:"[data-stat='captive_sun_kills']" type:"number"` + OutburstKills int `ow:"outburst_kills" prometheus:"ow_hero_outburst_kills_total" help:"Eliminations with outburst ability" path:"[data-stat='outburst_kills']" type:"number"` +} + +// CassidyMetrics defines Cassidy specific metrics. +type CassidyMetrics struct { + CommonMetrics // Embedded common metrics + + PeacekeeperAccuracy float64 `ow:"peacekeeper_accuracy" prometheus:"ow_hero_peacekeeper_accuracy_percent" help:"Peacekeeper weapon accuracy percentage" path:"[data-stat='peacekeeper_accuracy']" type:"percentage"` + FlashbangEnemies int `ow:"flashbang_enemies" prometheus:"ow_hero_flashbang_enemies_total" help:"Total enemies stunned with flashbang" path:"[data-stat='flashbang_enemies']" type:"number"` + CombatRollKills int `ow:"combat_roll_kills" prometheus:"ow_hero_combat_roll_kills_total" help:"Eliminations after using combat roll" path:"[data-stat='combat_roll_kills']" type:"number"` + DeadeyeKills int `ow:"deadeye_kills" prometheus:"ow_hero_deadeye_kills_total" help:"Eliminations with deadeye ultimate" path:"[data-stat='deadeye_kills']" type:"number"` +} + +// LifeweaverMetrics defines Lifeweaver specific metrics. +type LifeweaverMetrics struct { + CommonMetrics // Embedded common metrics + + HealingBlossomHealing int64 `ow:"healing_blossom_healing" prometheus:"ow_hero_healing_blossom_healing_total" help:"Total healing done with healing blossom" path:"[data-stat='healing_blossom_healing']" type:"number"` + LifeGripSaves int `ow:"life_grip_saves" prometheus:"ow_hero_life_grip_saves_total" help:"Teammates saved with life grip" path:"[data-stat='life_grip_saves']" type:"number"` + PetalPlatformUptime time.Duration `ow:"petal_platform_uptime" prometheus:"ow_hero_petal_platform_uptime_seconds" help:"Total uptime of petal platforms" path:"[data-stat='petal_platform_uptime']" type:"duration"` + TreeOfLifeHealing int64 `ow:"tree_of_life_healing" prometheus:"ow_hero_tree_of_life_healing_total" help:"Healing provided by tree of life ultimate" path:"[data-stat='tree_of_life_healing']" type:"number"` +} + +// TracerMetrics defines Tracer specific metrics. +type TracerMetrics struct { + CommonMetrics // Embedded common metrics + + PulseGunsAccuracy float64 `ow:"pulse_guns_accuracy" prometheus:"ow_hero_pulse_guns_accuracy_percent" help:"Pulse guns weapon accuracy percentage" path:"[data-stat='pulse_guns_accuracy']" type:"percentage"` + BlinkDistance float64 `ow:"blink_distance" prometheus:"ow_hero_blink_distance_meters" help:"Total distance traveled with blink" path:"[data-stat='blink_distance']" type:"number"` + RecallHealing int64 `ow:"recall_healing" prometheus:"ow_hero_recall_healing_total" help:"Health recovered using recall" path:"[data-stat='recall_healing']" type:"number"` + PulseBombKills int `ow:"pulse_bomb_kills" prometheus:"ow_hero_pulse_bomb_kills_total" help:"Eliminations with pulse bomb ultimate" path:"[data-stat='pulse_bomb_kills']" type:"number"` +} + +// KirikoMetrics defines Kiriko specific metrics. +type KirikoMetrics struct { + CommonMetrics // Embedded common metrics + + HealingOfuudaHealing int64 `ow:"healing_ofuuda_healing" prometheus:"ow_hero_healing_ofuuda_healing_total" help:"Total healing done with healing ofuuda" path:"[data-stat='healing_ofuuda_healing']" type:"number"` + KunaiCriticalHits int `ow:"kunai_critical_hits" prometheus:"ow_hero_kunai_critical_hits_total" help:"Critical hits with kunai" path:"[data-stat='kunai_critical_hits']" type:"number"` + SwiftStepTeleports int `ow:"swift_step_teleports" prometheus:"ow_hero_swift_step_teleports_total" help:"Number of swift step teleports used" path:"[data-stat='swift_step_teleports']" type:"number"` + KitsuneFinalBlows int `ow:"kitsune_final_blows" prometheus:"ow_hero_kitsune_final_blows_total" help:"Final blows during kitsune rush ultimate" path:"[data-stat='kitsune_final_blows']" type:"number"` +} + +// PharahMetrics defines Pharah specific metrics. +type PharahMetrics struct { + CommonMetrics // Embedded common metrics + + RocketLauncherAccuracy float64 `ow:"rocket_launcher_accuracy" prometheus:"ow_hero_rocket_launcher_accuracy_percent" help:"Rocket launcher weapon accuracy percentage" path:"[data-stat='rocket_launcher_accuracy']" type:"percentage"` + ConcussiveBlastKills int `ow:"concussive_blast_kills" prometheus:"ow_hero_concussive_blast_kills_total" help:"Environmental kills with concussive blast" path:"[data-stat='concussive_blast_kills']" type:"number"` + BarrageKills int `ow:"barrage_kills" prometheus:"ow_hero_barrage_kills_total" help:"Eliminations with barrage ultimate" path:"[data-stat='barrage_kills']" type:"number"` + AirborneTime time.Duration `ow:"airborne_time" prometheus:"ow_hero_airborne_time_seconds" help:"Total time spent airborne" path:"[data-stat='airborne_time']" type:"duration"` +} + +// WinstonMetrics defines Winston specific metrics. +type WinstonMetrics struct { + CommonMetrics // Embedded common metrics + + TeslaCcnnonKills int `ow:"tesla_cannon_kills" prometheus:"ow_hero_tesla_cannon_kills_total" help:"Eliminations with tesla cannon" path:"[data-stat='tesla_cannon_kills']" type:"number"` + JumpPackKills int `ow:"jump_pack_kills" prometheus:"ow_hero_jump_pack_kills_total" help:"Eliminations with jump pack" path:"[data-stat='jump_pack_kills']" type:"number"` + BarrierProjectorUptime time.Duration `ow:"barrier_projector_uptime" prometheus:"ow_hero_barrier_projector_uptime_seconds" help:"Total uptime of barrier projector" path:"[data-stat='barrier_projector_uptime']" type:"duration"` + PrimalRageKills int `ow:"primal_rage_kills" prometheus:"ow_hero_primal_rage_kills_total" help:"Eliminations during primal rage ultimate" path:"[data-stat='primal_rage_kills']" type:"number"` +} + +// RoadhogMetrics defines Roadhog specific metrics. +type RoadhogMetrics struct { + CommonMetrics // Embedded common metrics + + ScrapGunAccuracy float64 `ow:"scrap_gun_accuracy" prometheus:"ow_hero_scrap_gun_accuracy_percent" help:"Scrap gun weapon accuracy percentage" path:"[data-stat='scrap_gun_accuracy']" type:"percentage"` + ChainHookAccuracy float64 `ow:"chain_hook_accuracy" prometheus:"ow_hero_chain_hook_accuracy_percent" help:"Chain hook accuracy percentage" path:"[data-stat='chain_hook_accuracy']" type:"percentage"` + ChainHookKills int `ow:"chain_hook_kills" prometheus:"ow_hero_chain_hook_kills_total" help:"Eliminations after chain hook" path:"[data-stat='chain_hook_kills']" type:"number"` + TakeABreatheHealing int64 `ow:"take_a_breathe_healing" prometheus:"ow_hero_take_a_breathe_healing_total" help:"Health recovered with take a breathe" path:"[data-stat='take_a_breathe_healing']" type:"number"` + WholePigKills int `ow:"whole_pig_kills" prometheus:"ow_hero_whole_pig_kills_total" help:"Eliminations with whole hog ultimate" path:"[data-stat='whole_pig_kills']" type:"number"` +} + +// SojournMetrics defines Sojourn specific metrics. +type SojournMetrics struct { + CommonMetrics // Embedded common metrics + + RailgunAccuracy float64 `ow:"railgun_accuracy" prometheus:"ow_hero_railgun_accuracy_percent" help:"Railgun weapon accuracy percentage" path:"[data-stat='railgun_accuracy']" type:"percentage"` + RailgunCriticalHits int `ow:"railgun_critical_hits" prometheus:"ow_hero_railgun_critical_hits_total" help:"Critical hits with railgun" path:"[data-stat='railgun_critical_hits']" type:"number"` + PowerSlideKills int `ow:"power_slide_kills" prometheus:"ow_hero_power_slide_kills_total" help:"Eliminations using power slide" path:"[data-stat='power_slide_kills']" type:"number"` + OverclockKills int `ow:"overclock_kills" prometheus:"ow_hero_overclock_kills_total" help:"Eliminations during overclock ultimate" path:"[data-stat='overclock_kills']" type:"number"` +} + +// BaptisteMetrics defines Baptiste specific metrics. +type BaptisteMetrics struct { + CommonMetrics // Embedded common metrics + + BioticLauncherHealing int64 `ow:"biotic_launcher_healing" prometheus:"ow_hero_biotic_launcher_healing_total" help:"Total healing done with biotic launcher" path:"[data-stat='biotic_launcher_healing']" type:"number"` + RegenerativeBurstHealing int64 `ow:"regenerative_burst_healing" prometheus:"ow_hero_regenerative_burst_healing_total" help:"Healing provided by regenerative burst" path:"[data-stat='regenerative_burst_healing']" type:"number"` + ImmortalityFieldSaves int `ow:"immortality_field_saves" prometheus:"ow_hero_immortality_field_saves_total" help:"Teammates saved with immortality field" path:"[data-stat='immortality_field_saves']" type:"number"` + AmplificationMatrixDamage int64 `ow:"amplification_matrix_damage" prometheus:"ow_hero_amplification_matrix_damage_total" help:"Damage amplified by amplification matrix ultimate" path:"[data-stat='amplification_matrix_damage']" type:"number"` +} + +// MeiMetrics defines Mei specific metrics. +type MeiMetrics struct { + CommonMetrics // Embedded common metrics + + EndothermlcBlasterAccuracy float64 `ow:"endothermic_blaster_accuracy" prometheus:"ow_hero_endothermic_blaster_accuracy_percent" help:"Endothermic blaster weapon accuracy percentage" path:"[data-stat='endothermic_blaster_accuracy']" type:"percentage"` + EnemiesFrozen int `ow:"enemies_frozen" prometheus:"ow_hero_enemies_frozen_total" help:"Total enemies frozen" path:"[data-stat='enemies_frozen']" type:"number"` + IceWallUptime time.Duration `ow:"ice_wall_uptime" prometheus:"ow_hero_ice_wall_uptime_seconds" help:"Total uptime of ice walls" path:"[data-stat='ice_wall_uptime']" type:"duration"` + BlizzardKills int `ow:"blizzard_kills" prometheus:"ow_hero_blizzard_kills_total" help:"Eliminations with blizzard ultimate" path:"[data-stat='blizzard_kills']" type:"number"` +} + +// ZaryaMetrics defines Zarya specific metrics. +type ZaryaMetrics struct { + CommonMetrics // Embedded common metrics + + ParticannonKills int `ow:"particle_cannon_kills" prometheus:"ow_hero_particle_cannon_kills_total" help:"Eliminations with particle cannon" path:"[data-stat='particle_cannon_kills']" type:"number"` + ParticleBarrierDamageAbsorbed int64 `ow:"particle_barrier_absorbed" prometheus:"ow_hero_particle_barrier_absorbed_total" help:"Damage absorbed by particle barriers" path:"[data-stat='particle_barrier_absorbed']" type:"number"` + ProjectedBarrierSaves int `ow:"projected_barrier_saves" prometheus:"ow_hero_projected_barrier_saves_total" help:"Teammates saved with projected barrier" path:"[data-stat='projected_barrier_saves']" type:"number"` + GravitonSurgeKills int `ow:"graviton_surge_kills" prometheus:"ow_hero_graviton_surge_kills_total" help:"Eliminations with graviton surge ultimate" path:"[data-stat='graviton_surge_kills']" type:"number"` + HighEnergyKills int `ow:"high_energy_kills" prometheus:"ow_hero_high_energy_kills_total" help:"Eliminations while at high energy" path:"[data-stat='high_energy_kills']" type:"number"` +} + +// JunkratMetrics defines Junkrat specific metrics. +type JunkratMetrics struct { + CommonMetrics // Embedded common metrics + + FragLauncherAccuracy float64 `ow:"frag_launcher_accuracy" prometheus:"ow_hero_frag_launcher_accuracy_percent" help:"Frag launcher weapon accuracy percentage" path:"[data-stat='frag_launcher_accuracy']" type:"percentage"` + ConcussionMineKills int `ow:"concussion_mine_kills" prometheus:"ow_hero_concussion_mine_kills_total" help:"Eliminations with concussion mine" path:"[data-stat='concussion_mine_kills']" type:"number"` + SteelTrapKills int `ow:"steel_trap_kills" prometheus:"ow_hero_steel_trap_kills_total" help:"Eliminations with steel trap" path:"[data-stat='steel_trap_kills']" type:"number"` + RipTireKills int `ow:"rip_tire_kills" prometheus:"ow_hero_rip_tire_kills_total" help:"Eliminations with rip-tire ultimate" path:"[data-stat='rip_tire_kills']" type:"number"` + EnemiesTrapped int `ow:"enemies_trapped" prometheus:"ow_hero_enemies_trapped_total" help:"Enemies caught in steel trap" path:"[data-stat='enemies_trapped']" type:"number"` +} + +// LucioMetrics defines LΓΊcio specific metrics. +type LucioMetrics struct { + CommonMetrics // Embedded common metrics + + SonicAmplifierAccuracy float64 `ow:"sonic_amplifier_accuracy" prometheus:"ow_hero_sonic_amplifier_accuracy_percent" help:"Sonic amplifier weapon accuracy percentage" path:"[data-stat='sonic_amplifier_accuracy']" type:"percentage"` + SoundBoopKills int `ow:"sound_boop_kills" prometheus:"ow_hero_sound_boop_kills_total" help:"Environmental kills with sound wave" path:"[data-stat='sound_boop_kills']" type:"number"` + WallRideTime time.Duration `ow:"wall_ride_time" prometheus:"ow_hero_wall_ride_time_seconds" help:"Total time spent wall riding" path:"[data-stat='wall_ride_time']" type:"duration"` + SoundBarrierProvided int64 `ow:"sound_barrier_provided" prometheus:"ow_hero_sound_barrier_provided_total" help:"Shield health provided by sound barrier ultimate" path:"[data-stat='sound_barrier_provided']" type:"number"` +} + +// ReaperMetrics defines Reaper specific metrics. +type ReaperMetrics struct { + CommonMetrics // Embedded common metrics + + HellfireshotgunsAccuracy float64 `ow:"hellfire_shotguns_accuracy" prometheus:"ow_hero_hellfire_shotguns_accuracy_percent" help:"Hellfire shotguns weapon accuracy percentage" path:"[data-stat='hellfire_shotguns_accuracy']" type:"percentage"` + WraithFormDistance float64 `ow:"wraith_form_distance" prometheus:"ow_hero_wraith_form_distance_meters" help:"Distance traveled in wraith form" path:"[data-stat='wraith_form_distance']" type:"number"` + ShadowStepTeleports int `ow:"shadow_step_teleports" prometheus:"ow_hero_shadow_step_teleports_total" help:"Number of shadow step teleports" path:"[data-stat='shadow_step_teleports']" type:"number"` + DeathBlossomKills int `ow:"death_blossom_kills" prometheus:"ow_hero_death_blossom_kills_total" help:"Eliminations with death blossom ultimate" path:"[data-stat='death_blossom_kills']" type:"number"` +} + +// ZenyattaMetrics defines Zenyatta specific metrics. +type ZenyattaMetrics struct { + CommonMetrics // Embedded common metrics + + OrbOfDestructionAccuracy float64 `ow:"orb_destruction_accuracy" prometheus:"ow_hero_orb_destruction_accuracy_percent" help:"Orb of destruction weapon accuracy percentage" path:"[data-stat='orb_destruction_accuracy']" type:"percentage"` + OrbOfDiscordAssists int `ow:"orb_discord_assists" prometheus:"ow_hero_orb_discord_assists_total" help:"Eliminations assisted by orb of discord" path:"[data-stat='orb_discord_assists']" type:"number"` + OrbOfHarmonyHealing int64 `ow:"orb_harmony_healing" prometheus:"ow_hero_orb_harmony_healing_total" help:"Healing provided by orb of harmony" path:"[data-stat='orb_harmony_healing']" type:"number"` + TranscendenceHealing int64 `ow:"transcendence_healing" prometheus:"ow_hero_transcendence_healing_total" help:"Healing provided during transcendence ultimate" path:"[data-stat='transcendence_healing']" type:"number"` +} + +// MaugaMetrics defines Mauga specific metrics. +type MaugaMetrics struct { + CommonMetrics // Embedded common metrics + + IncendiaryChaingunDamage int64 `ow:"incendiary_chaingun_damage" prometheus:"ow_hero_incendiary_chaingun_damage_total" help:"Damage dealt with incendiary chaingun" path:"[data-stat='incendiary_chaingun_damage']" type:"number"` + VolatileChaingunDamage int64 `ow:"volatile_chaingun_damage" prometheus:"ow_hero_volatile_chaingun_damage_total" help:"Damage dealt with volatile chaingun" path:"[data-stat='volatile_chaingun_damage']" type:"number"` + OverrunKills int `ow:"overrun_kills" prometheus:"ow_hero_overrun_kills_total" help:"Eliminations with overrun charge" path:"[data-stat='overrun_kills']" type:"number"` + CageFightKills int `ow:"cage_fight_kills" prometheus:"ow_hero_cage_fight_kills_total" help:"Eliminations during cage fight ultimate" path:"[data-stat='cage_fight_kills']" type:"number"` +} + +// BrigitteMetrics defines Brigitte specific metrics. +type BrigitteMetrics struct { + CommonMetrics // Embedded common metrics + + RocketFlailAccuracy float64 `ow:"rocket_flail_accuracy" prometheus:"ow_hero_rocket_flail_accuracy_percent" help:"Rocket flail weapon accuracy percentage" path:"[data-stat='rocket_flail_accuracy']" type:"percentage"` + WhipShotKills int `ow:"whip_shot_kills" prometheus:"ow_hero_whip_shot_kills_total" help:"Environmental kills with whip shot" path:"[data-stat='whip_shot_kills']" type:"number"` + RepairPackHealing int64 `ow:"repair_pack_healing" prometheus:"ow_hero_repair_pack_healing_total" help:"Healing provided by repair pack" path:"[data-stat='repair_pack_healing']" type:"number"` + BarrierShieldDamageBlocked int64 `ow:"barrier_shield_blocked" prometheus:"ow_hero_barrier_shield_blocked_total" help:"Damage blocked by barrier shield" path:"[data-stat='barrier_shield_blocked']" type:"number"` + RallyShieldProvided int64 `ow:"rally_shield_provided" prometheus:"ow_hero_rally_shield_provided_total" help:"Shield health provided by rally ultimate" path:"[data-stat='rally_shield_provided']" type:"number"` +} + +// HazardMetrics defines Hazard specific metrics. +type HazardMetrics struct { + CommonMetrics // Embedded common metrics + + SpikeTrapKills int `ow:"spike_trap_kills" prometheus:"ow_hero_spike_trap_kills_total" help:"Eliminations with spike trap" path:"[data-stat='spike_trap_kills']" type:"number"` + ViolentLeapKills int `ow:"violent_leap_kills" prometheus:"ow_hero_violent_leap_kills_total" help:"Eliminations with violent leap" path:"[data-stat='violent_leap_kills']" type:"number"` + DowntimeUptime time.Duration `ow:"downtime_uptime" prometheus:"ow_hero_downtime_uptime_seconds" help:"Total uptime of downtime walls" path:"[data-stat='downtime_uptime']" type:"duration"` + VanadiumRageKills int `ow:"vanadium_rage_kills" prometheus:"ow_hero_vanadium_rage_kills_total" help:"Eliminations during vanadium rage ultimate" path:"[data-stat='vanadium_rage_kills']" type:"number"` +} + +// JunkerQueenMetrics defines Junker Queen specific metrics. +type JunkerQueenMetrics struct { + CommonMetrics // Embedded common metrics + + ScattergunAccuracy float64 `ow:"scattergun_accuracy" prometheus:"ow_hero_scattergun_accuracy_percent" help:"Scattergun weapon accuracy percentage" path:"[data-stat='scattergun_accuracy']" type:"percentage"` + JaggedBladeKills int `ow:"jagged_blade_kills" prometheus:"ow_hero_jagged_blade_kills_total" help:"Eliminations with jagged blade" path:"[data-stat='jagged_blade_kills']" type:"number"` + CommandingShoutHealing int64 `ow:"commanding_shout_healing" prometheus:"ow_hero_commanding_shout_healing_total" help:"Healing provided by commanding shout" path:"[data-stat='commanding_shout_healing']" type:"number"` + RampageKills int `ow:"rampage_kills" prometheus:"ow_hero_rampage_kills_total" help:"Eliminations with rampage ultimate" path:"[data-stat='rampage_kills']" type:"number"` +} + +// HanzoMetrics defines Hanzo specific metrics. +type HanzoMetrics struct { + CommonMetrics // Embedded common metrics + + StormBowAccuracy float64 `ow:"storm_bow_accuracy" prometheus:"ow_hero_storm_bow_accuracy_percent" help:"Storm bow weapon accuracy percentage" path:"[data-stat='storm_bow_accuracy']" type:"percentage"` + SonicArrowAssists int `ow:"sonic_arrow_assists" prometheus:"ow_hero_sonic_arrow_assists_total" help:"Eliminations assisted by sonic arrow" path:"[data-stat='sonic_arrow_assists']" type:"number"` + ScatterArrowKills int `ow:"scatter_arrow_kills" prometheus:"ow_hero_scatter_arrow_kills_total" help:"Eliminations with scatter arrow" path:"[data-stat='scatter_arrow_kills']" type:"number"` + DragonstrikeKills int `ow:"dragonstrike_kills" prometheus:"ow_hero_dragonstrike_kills_total" help:"Eliminations with dragonstrike ultimate" path:"[data-stat='dragonstrike_kills']" type:"number"` +} + +// DoomfistMetrics defines Doomfist specific metrics. +type DoomfistMetrics struct { + CommonMetrics // Embedded common metrics + + HandCannonAccuracy float64 `ow:"hand_cannon_accuracy" prometheus:"ow_hero_hand_cannon_accuracy_percent" help:"Hand cannon weapon accuracy percentage" path:"[data-stat='hand_cannon_accuracy']" type:"percentage"` + SeismicSlamKills int `ow:"seismic_slam_kills" prometheus:"ow_hero_seismic_slam_kills_total" help:"Eliminations with seismic slam" path:"[data-stat='seismic_slam_kills']" type:"number"` + RocketPunchKills int `ow:"rocket_punch_kills" prometheus:"ow_hero_rocket_punch_kills_total" help:"Eliminations with rocket punch" path:"[data-stat='rocket_punch_kills']" type:"number"` + MeteorStrikeKills int `ow:"meteor_strike_kills" prometheus:"ow_hero_meteor_strike_kills_total" help:"Eliminations with meteor strike ultimate" path:"[data-stat='meteor_strike_kills']" type:"number"` +} + +// MoiraMetrics defines Moira specific metrics. +type MoiraMetrics struct { + CommonMetrics // Embedded common metrics + + BiotiGraspAccuracy float64 `ow:"biotic_grasp_accuracy" prometheus:"ow_hero_biotic_grasp_accuracy_percent" help:"Biotic grasp weapon accuracy percentage" path:"[data-stat='biotic_grasp_accuracy']" type:"percentage"` + CoalescenceKills int `ow:"coalescence_kills" prometheus:"ow_hero_coalescence_kills_total" help:"Eliminations with coalescence ultimate" path:"[data-stat='coalescence_kills']" type:"number"` + BiotiOrbKills int `ow:"biotic_orb_kills" prometheus:"ow_hero_biotic_orb_kills_total" help:"Eliminations with biotic orb" path:"[data-stat='biotic_orb_kills']" type:"number"` + SelfHealing int64 `ow:"self_healing" prometheus:"ow_hero_self_healing_total" help:"Self healing done" path:"[data-stat='self_healing']" type:"number"` +} + +// OrisaMetrics defines Orisa specific metrics. +type OrisaMetrics struct { + CommonMetrics // Embedded common metrics + + FusionDriverAccuracy float64 `ow:"fusion_driver_accuracy" prometheus:"ow_hero_fusion_driver_accuracy_percent" help:"Fusion driver weapon accuracy percentage" path:"[data-stat='fusion_driver_accuracy']" type:"percentage"` + EnergyJavelinKills int `ow:"energy_javelin_kills" prometheus:"ow_hero_energy_javelin_kills_total" help:"Eliminations with energy javelin" path:"[data-stat='energy_javelin_kills']" type:"number"` + TerraForceKills int `ow:"terra_force_kills" prometheus:"ow_hero_terra_force_kills_total" help:"Eliminations with terra force ultimate" path:"[data-stat='terra_force_kills']" type:"number"` + DamageAmplified int64 `ow:"damage_amplified" prometheus:"ow_hero_damage_amplified_total" help:"Damage amplified for teammates" path:"[data-stat='damage_amplified']" type:"number"` +} + +// SigmaMetrics defines Sigma specific metrics. +type SigmaMetrics struct { + CommonMetrics // Embedded common metrics + + HyperSpheresAccuracy float64 `ow:"hyper_spheres_accuracy" prometheus:"ow_hero_hyper_spheres_accuracy_percent" help:"Hyper spheres weapon accuracy percentage" path:"[data-stat='hyper_spheres_accuracy']" type:"percentage"` + AccretionKills int `ow:"accretion_kills" prometheus:"ow_hero_accretion_kills_total" help:"Eliminations with accretion" path:"[data-stat='accretion_kills']" type:"number"` + GraviticFluxKills int `ow:"gravitic_flux_kills" prometheus:"ow_hero_gravitic_flux_kills_total" help:"Eliminations with gravitic flux ultimate" path:"[data-stat='gravitic_flux_kills']" type:"number"` + DamageAbsorbed int64 `ow:"damage_absorbed" prometheus:"ow_hero_damage_absorbed_total" help:"Damage absorbed by experimental barrier" path:"[data-stat='damage_absorbed']" type:"number"` +} + +// SombraMetrics defines Sombra specific metrics. +type SombraMetrics struct { + CommonMetrics // Embedded common metrics + + MachineGunAccuracy float64 `ow:"machine_gun_accuracy" prometheus:"ow_hero_machine_gun_accuracy_percent" help:"Machine gun weapon accuracy percentage" path:"[data-stat='machine_gun_accuracy']" type:"percentage"` + EnemiesHacked int `ow:"enemies_hacked" prometheus:"ow_hero_enemies_hacked_total" help:"Total enemies hacked" path:"[data-stat='enemies_hacked']" type:"number"` + EnemiesEMPd int `ow:"enemies_empd" prometheus:"ow_hero_enemies_empd_total" help:"Total enemies hit with EMP ultimate" path:"[data-stat='enemies_empd']" type:"number"` + HealthPacksHacked int `ow:"health_packs_hacked" prometheus:"ow_hero_health_packs_hacked_total" help:"Total health packs hacked" path:"[data-stat='health_packs_hacked']" type:"number"` +} + +// SymmetraMetrics defines Symmetra specific metrics. +type SymmetraMetrics struct { + CommonMetrics // Embedded common metrics + + PhotonProjectorAccuracy float64 `ow:"photon_projector_accuracy" prometheus:"ow_hero_photon_projector_accuracy_percent" help:"Photon projector weapon accuracy percentage" path:"[data-stat='photon_projector_accuracy']" type:"percentage"` + SentryTurretKills int `ow:"sentry_turret_kills" prometheus:"ow_hero_sentry_turret_kills_total" help:"Eliminations with sentry turrets" path:"[data-stat='sentry_turret_kills']" type:"number"` + TeleporterPadsSummoned int `ow:"teleporter_pads_summoned" prometheus:"ow_hero_teleporter_pads_summoned_total" help:"Total teleporter pads summoned" path:"[data-stat='teleporter_pads_summoned']" type:"number"` + PlayersTeleported int `ow:"players_teleported" prometheus:"ow_hero_players_teleported_total" help:"Total players teleported" path:"[data-stat='players_teleported']" type:"number"` +} + +// WreckingBallMetrics defines Wrecking Ball specific metrics. +type WreckingBallMetrics struct { + CommonMetrics // Embedded common metrics + + QuadCannonAccuracy float64 `ow:"quad_cannon_accuracy" prometheus:"ow_hero_quad_cannon_accuracy_percent" help:"Quad cannon weapon accuracy percentage" path:"[data-stat='quad_cannon_accuracy']" type:"percentage"` + PiledriveKills int `ow:"piledrive_kills" prometheus:"ow_hero_piledrive_kills_total" help:"Eliminations with piledrive" path:"[data-stat='piledrive_kills']" type:"number"` + MinefieldKills int `ow:"minefield_kills" prometheus:"ow_hero_minefield_kills_total" help:"Eliminations with minefield ultimate" path:"[data-stat='minefield_kills']" type:"number"` + PlayersKnockedBack int `ow:"players_knocked_back" prometheus:"ow_hero_players_knocked_back_total" help:"Total players knocked back" path:"[data-stat='players_knocked_back']" type:"number"` +} + +// BastionMetrics defines Bastion specific metrics. +type BastionMetrics struct { + CommonMetrics // Embedded common metrics + + ConfigurationAssaultAccuracy float64 `ow:"configuration_assault_accuracy" prometheus:"ow_hero_configuration_assault_accuracy_percent" help:"Configuration assault weapon accuracy percentage" path:"[data-stat='configuration_assault_accuracy']" type:"percentage"` + ConfigurationReconAccuracy float64 `ow:"configuration_recon_accuracy" prometheus:"ow_hero_configuration_recon_accuracy_percent" help:"Configuration recon weapon accuracy percentage" path:"[data-stat='configuration_recon_accuracy']" type:"percentage"` + SelfRepairUsed int `ow:"self_repair_used" prometheus:"ow_hero_self_repair_used_total" help:"Total times self repair was used" path:"[data-stat='self_repair_used']" type:"number"` + ConfigurationArtilleryKills int `ow:"configuration_artillery_kills" prometheus:"ow_hero_configuration_artillery_kills_total" help:"Eliminations with configuration artillery ultimate" path:"[data-stat='configuration_artillery_kills']" type:"number"` +} + +// AsheMetrics defines Ashe specific metrics. +type AsheMetrics struct { + CommonMetrics // Embedded common metrics + + ViperAccuracy float64 `ow:"viper_accuracy" prometheus:"ow_hero_viper_accuracy_percent" help:"Viper rifle weapon accuracy percentage" path:"[data-stat='viper_accuracy']" type:"percentage"` + DynamiteKills int `ow:"dynamite_kills" prometheus:"ow_hero_dynamite_kills_total" help:"Eliminations with dynamite" path:"[data-stat='dynamite_kills']" type:"number"` + CoachGunKills int `ow:"coach_gun_kills" prometheus:"ow_hero_coach_gun_kills_total" help:"Eliminations with coach gun" path:"[data-stat='coach_gun_kills']" type:"number"` + BOBKills int `ow:"bob_kills" prometheus:"ow_hero_bob_kills_total" help:"Eliminations with BOB ultimate" path:"[data-stat='bob_kills']" type:"number"` +} + +// EchoMetrics defines Echo specific metrics. +type EchoMetrics struct { + CommonMetrics // Embedded common metrics + + TriShotAccuracy float64 `ow:"tri_shot_accuracy" prometheus:"ow_hero_tri_shot_accuracy_percent" help:"Tri-shot weapon accuracy percentage" path:"[data-stat='tri_shot_accuracy']" type:"percentage"` + StickyBombKills int `ow:"sticky_bomb_kills" prometheus:"ow_hero_sticky_bomb_kills_total" help:"Eliminations with sticky bombs" path:"[data-stat='sticky_bomb_kills']" type:"number"` + FlightTimeUsed int `ow:"flight_time_used" prometheus:"ow_hero_flight_time_used_total" help:"Total flight time used" path:"[data-stat='flight_time_used']" type:"number"` + DuplicateUltimateKills int `ow:"duplicate_ultimate_kills" prometheus:"ow_hero_duplicate_ultimate_kills_total" help:"Eliminations with duplicated ultimates" path:"[data-stat='duplicate_ultimate_kills']" type:"number"` +} + +// VentureMetrics defines Venture specific metrics. +type VentureMetrics struct { + CommonMetrics // Embedded common metrics + + SmartExcavatorAccuracy float64 `ow:"smart_excavator_accuracy" prometheus:"ow_hero_smart_excavator_accuracy_percent" help:"Smart excavator weapon accuracy percentage" path:"[data-stat='smart_excavator_accuracy']" type:"percentage"` + BurrowKills int `ow:"burrow_kills" prometheus:"ow_hero_burrow_kills_total" help:"Eliminations with burrow" path:"[data-stat='burrow_kills']" type:"number"` + DrillDashKills int `ow:"drill_dash_kills" prometheus:"ow_hero_drill_dash_kills_total" help:"Eliminations with drill dash" path:"[data-stat='drill_dash_kills']" type:"number"` + TectonicShockKills int `ow:"tectonic_shock_kills" prometheus:"ow_hero_tectonic_shock_kills_total" help:"Eliminations with tectonic shock ultimate" path:"[data-stat='tectonic_shock_kills']" type:"number"` +} + +// RamattraMetrics defines Ramattra specific metrics. +type RamattraMetrics struct { + CommonMetrics // Embedded common metrics + + VoidAcceleratorAccuracy float64 `ow:"void_accelerator_accuracy" prometheus:"ow_hero_void_accelerator_accuracy_percent" help:"Void accelerator weapon accuracy percentage" path:"[data-stat='void_accelerator_accuracy']" type:"percentage"` + VoidBarrierDamageBlocked int64 `ow:"void_barrier_damage_blocked" prometheus:"ow_hero_void_barrier_damage_blocked_total" help:"Damage blocked by void barrier" path:"[data-stat='void_barrier_damage_blocked']" type:"number"` + RavenousVortexKills int `ow:"ravenous_vortex_kills" prometheus:"ow_hero_ravenous_vortex_kills_total" help:"Eliminations with ravenous vortex" path:"[data-stat='ravenous_vortex_kills']" type:"number"` + AnnihilationKills int `ow:"annihilation_kills" prometheus:"ow_hero_annihilation_kills_total" help:"Eliminations with annihilation ultimate" path:"[data-stat='annihilation_kills']" type:"number"` +} + +// JunoMetrics defines Juno specific metrics. +type JunoMetrics struct { + CommonMetrics // Embedded common metrics + + MediBlasterAccuracy float64 `ow:"medi_blaster_accuracy" prometheus:"ow_hero_medi_blaster_accuracy_percent" help:"Medi-blaster weapon accuracy percentage" path:"[data-stat='medi_blaster_accuracy']" type:"percentage"` + PulsarTorpedoKills int `ow:"pulsar_torpedo_kills" prometheus:"ow_hero_pulsar_torpedo_kills_total" help:"Eliminations with pulsar torpedo" path:"[data-stat='pulsar_torpedo_kills']" type:"number"` + GlideBoostUsed int `ow:"glide_boost_used" prometheus:"ow_hero_glide_boost_used_total" help:"Times glide boost was used" path:"[data-stat='glide_boost_used']" type:"number"` + OrbitalRayKills int `ow:"orbital_ray_kills" prometheus:"ow_hero_orbital_ray_kills_total" help:"Eliminations with orbital ray ultimate" path:"[data-stat='orbital_ray_kills']" type:"number"` +} + +// WuyangMetrics defines Wuyang specific metrics. +type WuyangMetrics struct { + CommonMetrics // Embedded common metrics + + UnloadAccuracy float64 `ow:"unload_accuracy" prometheus:"ow_hero_unload_accuracy_percent" help:"Unload weapon accuracy percentage" path:"[data-stat='unload_accuracy']" type:"percentage"` + SteadyingKills int `ow:"steadying_kills" prometheus:"ow_hero_steadying_kills_total" help:"Eliminations with steadying" path:"[data-stat='steadying_kills']" type:"number"` + CriticalMomentKills int `ow:"critical_moment_kills" prometheus:"ow_hero_critical_moment_kills_total" help:"Eliminations with critical moment" path:"[data-stat='critical_moment_kills']" type:"number"` + CriticalMomentUses int `ow:"critical_moment_uses" prometheus:"ow_hero_critical_moment_uses_total" help:"Times critical moment ability was used" path:"[data-stat='critical_moment_uses']" type:"number"` +} + +// FrejaMetrics defines Freja specific metrics. +type FrejaMetrics struct { + CommonMetrics // Embedded common metrics + + FrostBiteAccuracy float64 `ow:"frost_bite_accuracy" prometheus:"ow_hero_frost_bite_accuracy_percent" help:"Frost bite weapon accuracy percentage" path:"[data-stat='frost_bite_accuracy']" type:"percentage"` + IceWallUsed int `ow:"ice_wall_used" prometheus:"ow_hero_ice_wall_used_total" help:"Times ice wall was used" path:"[data-stat='ice_wall_used']" type:"number"` + SlowDebuffApplied int `ow:"slow_debuff_applied" prometheus:"ow_hero_slow_debuff_applied_total" help:"Times slow debuff was applied" path:"[data-stat='slow_debuff_applied']" type:"number"` + ArcticInversionKills int `ow:"arctic_inversion_kills" prometheus:"ow_hero_arctic_inversion_kills_total" help:"Eliminations with arctic inversion ultimate" path:"[data-stat='arctic_inversion_kills']" type:"number"` +} diff --git a/cmd/ow-exporter/main.go b/cmd/ow-exporter/main.go new file mode 100644 index 00000000..1f206712 --- /dev/null +++ b/cmd/ow-exporter/main.go @@ -0,0 +1,1200 @@ +package main + +import ( + "context" + "encoding/json" + "io" + "log/slog" + "net/http" + "net/url" + "os" + "os/signal" + "strings" + "time" + "unicode" + + "github.com/cockroachdb/errors" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +const ( + defaultPort = "9420" + + // MinArgsWithParam command argument counts. + MinArgsWithParam = 3 // program name + command + parameter + + // DefaultServerPort sets the default Prometheus server port. + DefaultServerPort = "9090" + // DefaultHTTPTimeout sets the default HTTP client timeout. + + // maxSampleMetricsCount sets maximum number of sample metrics to show. + maxSampleMetricsCount = 3 + DefaultHTTPTimeout = 30 * time.Second + // MaxHeroesDisplayed sets the maximum number of heroes displayed in logs. + MaxHeroesDisplayed = 3 + // DebugFilePermissions sets file permissions for debug files. + DebugFilePermissions = 0o600 +) + +// toTitle converts the first character of each word to uppercase (replacement for deprecated strings.Title). +func toTitle(str string) string { + return strings.Map(func(r rune) rune { + if unicode.IsSpace(r) { + return r + } + + return unicode.ToTitle(r) + }, str) +} + +// handleCommand processes CLI commands and returns true if a command was handled. +// commandHandler defines a function type for handling commands. +type commandHandler func([]string) + +// getCommandHandlers returns a map of command names to their handlers. +func getCommandHandlers() map[string]commandHandler { + return map[string]commandHandler{ + "parse-profile": handleParseProfile, + "parse-battletag": handleParseBattleTag, + "config": handleConfigCommand, + "metrics": handleMetricsCommand, + "server": handleServerCommand, + "test-api": handleTestAPI, + "test-headless": handleTestHeadless, + "test-parser": handleTestParser, + } +} + +func handleCommand(args []string) bool { + if len(args) <= 1 { + return false + } + + handlers := getCommandHandlers() + if handler, exists := handlers[args[1]]; exists { + handler(args) + + return true + } + + printMainUsage() + os.Exit(1) + + return true +} + +// handleParseProfile handles the parse-profile command. +func handleParseProfile(_ []string) { + runPoC() +} + +// handleTestAPI handles the test-api command. +func handleTestAPI(_ []string) { + runAPITests() +} + +// handleTestHeadless handles the test-headless command. +func handleTestHeadless(_ []string) { + runHeadlessTests() +} + +// handleTestParser handles the test-parser command. +func handleTestParser(_ []string) { + runParserTests() +} + +// handleParseBattleTag handles the parse-battletag command. +func handleParseBattleTag(args []string) { + if len(args) < MinArgsWithParam { + slog.Error("Usage error", "command", "parse-battletag", "message", "BattleTag argument required") + os.Exit(1) + } + runParseBattleTag(args[2]) +} + +// handleConfigCommand handles the config command. +func handleConfigCommand(args []string) { + if len(args) < MinArgsWithParam { + printConfigUsage() + os.Exit(1) + } + runConfigCommand(args[2:]) +} + +// handleMetricsCommand handles the metrics command. +func handleMetricsCommand(args []string) { + if len(args) < MinArgsWithParam { + printMetricsUsage() + os.Exit(1) + } + runMetricsCommand(args[2:]) +} + +// handleServerCommand handles the server command. +func handleServerCommand(args []string) { + port := DefaultServerPort + if len(args) > 2 { + port = args[2] + } + startPrometheusServer(port) +} + +func main() { + // Setup structured logging + programLevel := new(slog.LevelVar) + programLevel.Set(slog.LevelDebug) // Set to Debug level for troubleshooting + slog.SetDefault(slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: programLevel}))) + + // Initialize systems + err := initConfig() + if err != nil { + slog.Error("Failed to initialize config", "error", err) + os.Exit(1) + } + + initRuntimeMetrics() + + // Initialize Prometheus metrics + initPrometheusMetrics() + + // Handle CLI commands + if handleCommand(os.Args) { + return + } + + // Start automatic profile parsing + go startPeriodicParsing() + + slog.Info("ow-exporter starting", "version", "development") + + // Create Echo instance + server := echo.New() + server.HideBanner = true + + // Middleware + server.Use(middleware.Logger()) + server.Use(middleware.Recover()) + server.Use(middleware.CORS()) + + // Routes + setupRoutes(server) + + // Get port from environment or use default + port := os.Getenv("PORT") + if port == "" { + port = defaultPort + } + + // Start server + go func() { + slog.Info("starting HTTP server", "port", port) + err := server.Start(":" + port) + if err != nil && !errors.Is(err, http.ErrServerClosed) { + slog.Error("failed to start server", "error", err) + } + }() + + // Wait for interrupt signal to gracefully shutdown the server + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt) + <-quit + + slog.Info("shutting down server...") + + // Graceful shutdown with timeout + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + err = server.Shutdown(ctx) + if err != nil { + slog.Error("server forced to shutdown", "error", err) + } + + slog.Info("server shutdown complete") +} + +func setupRoutes(server *echo.Echo) { + // Health check + server.GET("/health", healthHandler) + + // API routes + api := server.Group("/api") + api.GET("/users", listUsersHandler) + api.POST("/users", createUserHandler) + api.GET("/users/:username", getUserHandler) + api.PUT("/users/:username", updateUserHandler) + api.DELETE("/users/:username", deleteUserHandler) + + // Prometheus metrics + server.GET("/metrics", metricsHandler) + + // Parse endpoints + api.POST("/parse", parseAllPlayersHandler) + api.POST("/parse/:username", parsePlayerHandler) + + // Development info + server.GET("/", indexHandler) +} + +// HTTP handlers. +func healthHandler(c echo.Context) error { + err := c.JSON(http.StatusOK, map[string]string{ + "status": "ok", + "service": "ow-exporter", + "version": "development", + }) + if err != nil { + return errors.Wrap(err, "failed to write health response") + } + + return nil +} + +func indexHandler(c echo.Context) error { + err := c.JSON(http.StatusOK, map[string]interface{}{ + "service": "ow-exporter", + "version": "development", + "status": "in development", + "endpoints": map[string]string{ + "health": "/health", + "metrics": "/metrics", + "users": "/api/users", + }, + "documentation": "https://github.com/lexfrei/tools/issues/439", + }) + if err != nil { + return errors.Wrap(err, "failed to write index response") + } + + return nil +} + +func listUsersHandler(ctx echo.Context) error { + players := getAllPlayers() + users := make([]map[string]interface{}, len(players)) + + for i, player := range players { + users[i] = map[string]interface{}{ + "battleTag": player.BattleTag, + "resolvedUrl": player.ResolvedURL, + "lastResolved": player.LastResolved, + } + } + + err := ctx.JSON(http.StatusOK, map[string]interface{}{ + "users": users, + "total": len(players), + }) + if err != nil { + return errors.Wrap(err, "failed to write users response") + } + + return nil +} + +// CreateUserRequest represents the request body for creating a user. +type CreateUserRequest struct { + BattleTag string `json:"battleTag" validate:"required"` + ResolvedURL string `json:"resolvedUrl,omitempty"` +} + +func createUserHandler(ctx echo.Context) error { + var req CreateUserRequest + + err := ctx.Bind(&req) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, "invalid request body") + } + + if req.BattleTag == "" { + return echo.NewHTTPError(http.StatusBadRequest, "battleTag is required") + } + + // If no resolved URL provided, try to resolve the BattleTag + if req.ResolvedURL == "" { + resolvedURL, resolveErr := getOrResolveURL(req.BattleTag) + if resolveErr != nil { + return echo.NewHTTPError(http.StatusBadRequest, "failed to resolve BattleTag") + } + req.ResolvedURL = resolvedURL + } + + err = addPlayerToConfig(req.BattleTag, req.ResolvedURL) + if err != nil { + return errors.Wrap(err, "failed to add player to config") + } + + err = ctx.JSON(http.StatusCreated, map[string]interface{}{ + "message": "User created successfully", + "battleTag": req.BattleTag, + "resolvedUrl": req.ResolvedURL, + }) + if err != nil { + return errors.Wrap(err, "failed to send create user response") + } + + return nil +} + +func getUserHandler(ctx echo.Context) error { + battleTag := ctx.Param("username") + + player := findPlayerByBattleTag(battleTag) + if player == nil { + return echo.NewHTTPError(http.StatusNotFound, "player not found") + } + + err := ctx.JSON(http.StatusOK, map[string]interface{}{ + "battleTag": player.BattleTag, + "resolvedUrl": player.ResolvedURL, + "lastResolved": player.LastResolved, + }) + if err != nil { + return errors.Wrap(err, "failed to send get user response") + } + + return nil +} + +// UpdateUserRequest represents the request body for updating a user. +type UpdateUserRequest struct { + ResolvedURL string `json:"resolvedUrl,omitempty"` +} + +func updateUserHandler(ctx echo.Context) error { + battleTag := ctx.Param("username") + var req UpdateUserRequest + + err := ctx.Bind(&req) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, "invalid request body") + } + + // Check if player exists + player := findPlayerByBattleTag(battleTag) + if player == nil { + return echo.NewHTTPError(http.StatusNotFound, "player not found") + } + + // If no resolved URL provided, try to resolve the BattleTag + if req.ResolvedURL == "" { + resolvedURL, resolveErr := getOrResolveURL(battleTag) + if resolveErr != nil { + return echo.NewHTTPError(http.StatusBadRequest, "failed to resolve BattleTag") + } + req.ResolvedURL = resolvedURL + } + + err = updatePlayerURL(battleTag, req.ResolvedURL) + if err != nil { + return errors.Wrap(err, "failed to update player") + } + + err = ctx.JSON(http.StatusOK, map[string]interface{}{ + "message": "User updated successfully", + "battleTag": battleTag, + "resolvedUrl": req.ResolvedURL, + }) + if err != nil { + return errors.Wrap(err, "failed to send update user response") + } + + return nil +} + +func deleteUserHandler(ctx echo.Context) error { + battleTag := ctx.Param("username") + + err := removePlayerFromConfig(battleTag) + if err != nil { + if errors.Is(err, ErrPlayerNotFound) { + return echo.NewHTTPError(http.StatusNotFound, "player not found") + } + + return errors.Wrap(err, "failed to remove player") + } + + err = ctx.JSON(http.StatusOK, map[string]interface{}{ + "message": "User deleted successfully", + "battleTag": battleTag, + }) + if err != nil { + return errors.Wrap(err, "failed to send delete user response") + } + + return nil +} + +func metricsHandler(ctx echo.Context) error { + // Update Prometheus metrics with fresh data + updatePrometheusMetrics() + + // Serve Prometheus metrics directly using Echo + handler := promhttp.Handler() + handler.ServeHTTP(ctx.Response(), ctx.Request()) + + return nil +} + +// parseAllPlayersHandler triggers parsing of all configured players. +func parseAllPlayersHandler(ctx echo.Context) error { + go func() { + slog.Info("πŸ”„ Manual parse triggered via API") + parseAllPlayers() + }() + + err := ctx.JSON(http.StatusAccepted, map[string]interface{}{ + "message": "Parsing started for all configured players", + "status": "in_progress", + }) + if err != nil { + return errors.Wrap(err, "failed to send parse response") + } + + return nil +} + +// parsePlayerHandler triggers parsing of a specific player. +func parsePlayerHandler(ctx echo.Context) error { + battleTag := ctx.Param("username") + + go func() { + slog.Info("πŸ”„ Manual parse triggered for player", "battletag", battleTag) + success := parsePlayerSafely(battleTag) + if success { + slog.Info("βœ… Manual parse completed", "battletag", battleTag) + } else { + slog.Error("❌ Manual parse failed", "battletag", battleTag) + } + }() + + err := ctx.JSON(http.StatusAccepted, map[string]interface{}{ + "message": "Parsing started for player", + "battleTag": battleTag, + "status": "in_progress", + }) + if err != nil { + return errors.Wrap(err, "failed to send parse response") + } + + return nil +} + +// startPeriodicParsing starts automatic parsing of all configured players. +func startPeriodicParsing() { + const parseInterval = 30 * time.Minute + + slog.Info("πŸ”„ Starting periodic profile parsing", "interval", parseInterval.String()) + + // Initial parse on startup + parseAllPlayers() + + // Set up periodic parsing + ticker := time.NewTicker(parseInterval) + defer ticker.Stop() + + for range ticker.C { + parseAllPlayers() + } +} + +// parseAllPlayers parses all players from config. +func parseAllPlayers() { + players := getAllPlayers() + if len(players) == 0 { + slog.Info("No players configured for parsing") + + return + } + + slog.Info("🎯 Starting batch profile parsing", "players", len(players)) + + successCount := 0 + for _, player := range players { + if parsePlayerSafely(player.BattleTag) { + successCount++ + } + // Small delay between requests to be respectful to the server + time.Sleep(2 * time.Second) + } + + slog.Info("βœ… Batch parsing completed", + "success", successCount, + "total", len(players), + "failed", len(players)-successCount) +} + +// parsePlayerSafely parses a single player with error handling. +func parsePlayerSafely(battleTag string) bool { + slog.Info("🎯 Parsing profile", "battletag", battleTag) + + // Get or resolve URL + profileURL, err := getOrResolveURL(battleTag) + if err != nil { + slog.Error("❌ Failed to resolve BattleTag", "battletag", battleTag, "error", err) + + return false + } + + // Parse the profile + parser := NewParser() + profile, err := parser.ParseProfile(fetchProfileHTML(profileURL), battleTag) + if err != nil { + slog.Error("❌ Failed to parse profile", "battletag", battleTag, "error", err) + + return false + } + + // Create/update runtime metrics + existingMetrics, exists := getPlayerMetrics(battleTag) + if exists { + updatePlayerFromProfile(existingMetrics, profile) + slog.Info("πŸ“Š Updated player metrics", "battletag", battleTag) + } else { + newMetrics := createPlayerMetrics(battleTag, profile) + setPlayerMetrics(battleTag, newMetrics) + slog.Info("πŸ“Š Created new player metrics", "battletag", battleTag) + } + + // Log key metrics + if profile != nil { + slog.Info("πŸŽ–οΈ Endorsement", "battletag", battleTag, "level", profile.ProfileMetrics.Endorsement.Level) + + // Count heroes + totalHeroes := 0 + for _, platform := range profile.Platforms { + for _, gameMode := range platform.GameModes { + totalHeroes += len(gameMode.Heroes) + } + } + slog.Info("πŸ‘€ Heroes parsed", "battletag", battleTag, "count", totalHeroes) + } + + return true +} + +// runPoC runs the Proof of Concept profile parser. +func runPoC() { + profileURL := "https://overwatch.blizzard.com/en-us/career/" + + "de5bb4aca17492e0bba120a1d1%7Ca92a11ef8d304356fccfff8df12e1dc6/" + + if len(os.Args) > 2 { + profileURL = os.Args[2] + } + + slog.Info("🎯 PoC: Parsing Overwatch Profile") + slog.Info("πŸ“‹ Profile URL", "url", profileURL) + + // Fetch profile HTML + html, err := fetchProfile(profileURL) + if err != nil { + slog.Error("❌ Error fetching profile", "error", err) + os.Exit(1) + } + + slog.Info("βœ… Successfully fetched profile HTML", "bytes", len(html)) + + // Debug: Save HTML to file for inspection + err = os.WriteFile("debug_profile.html", []byte(html), DebugFilePermissions) + if err != nil { + slog.Warn("⚠️ Warning: could not save debug HTML", "error", err) + } else { + slog.Info("πŸ’Ύ Saved HTML to debug_profile.html for inspection") + } + + // Parse profile using our parser + parser := NewParser() + username := extractUsernameFromURL(profileURL) + + slog.Info("πŸ‘€ Detected username", "username", username) + slog.Info("πŸ” Parsing profile data...") + + stats, err := parser.ParseProfile(html, username) + if err != nil { + slog.Error("❌ Error parsing profile", "error", err) + os.Exit(1) + } + + // Pretty print results + printPrettyResults(stats) +} + +// fetchProfile fetches the profile HTML with proper headers. +func fetchProfile(profileURL string) (string, error) { + client := &http.Client{ + Timeout: DefaultHTTPTimeout, + } + + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, profileURL, http.NoBody) + if err != nil { + return "", errors.Wrap(err, "failed to create HTTP request") + } + + // Add browser-like headers + req.Header.Set("User-Agent", + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") + req.Header.Set("Accept-Language", "en-US,en;q=0.5") + // Don't request compression to avoid parsing issues + req.Header.Set("DNT", "1") + req.Header.Set("Connection", "keep-alive") + req.Header.Set("Upgrade-Insecure-Requests", "1") + + resp, err := client.Do(req) + if err != nil { + return "", errors.Wrap(err, "failed to perform HTTP request") + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", errors.Wrapf(ErrHTTPError, "%d %s", resp.StatusCode, resp.Status) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", errors.Wrap(err, "failed to read response body") + } + + return string(body), nil +} + +// extractUsernameFromURL extracts username from Overwatch profile URL. +func extractUsernameFromURL(profileURL string) string { + // Extract from URL like: /en-us/career/pc/Username/ or + // /en-us/career/de5bb4aca17492e0bba120a1d1%7Ca92a11ef8d304356fccfff8df12e1dc6/ + parts := strings.Split(profileURL, "/") + if len(parts) >= 2 { + username := parts[len(parts)-2] // Get second to last part + if strings.Contains(username, "%7C") { + // Handle encoded URLs - decode BattleTag with encoded | + decoded, err := url.QueryUnescape(username) + if err != nil { + return "BattleTag-User" + } + // Replace | with # for BattleTag format + return strings.ReplaceAll(decoded, "|", "#") + } + + return username + } + + return "Unknown" +} + +// printPrettyResults prints the parsed results in a nice format. +func printPrettyResults(stats *FullPlayerProfile) { + slog.Info("🎠===========================================") + slog.Info("πŸ“Š OVERWATCH PROFILE PARSING RESULTS") + slog.Info("===========================================") + + slog.Info("πŸ‘€ Username", "username", stats.Username) + slog.Info("πŸ• Last Update", "timestamp", stats.LastUpdate.Format(time.RFC3339)) + slog.Info("🎯 Platforms Found", "count", len(stats.Platforms)) + + for platform, platformStats := range stats.Platforms { + slog.Info("πŸ–₯️ Platform", "platform", strings.ToUpper(string(platform))) + slog.Info("πŸ“ˆ Game Modes", "count", len(platformStats.GameModes)) + + for gameMode, gameModeStats := range platformStats.GameModes { + slog.Info(" 🎠Game Mode", "mode", toTitle(string(gameMode)), "heroes", len(gameModeStats.Heroes)) + + // Show first few heroes as examples + count := 0 + for heroID, heroStats := range gameModeStats.Heroes { + if count >= MaxHeroesDisplayed { + break + } + slog.Info(" β€’ Hero", "name", heroStats.HeroName, "id", heroID, "metrics", len(heroStats.Metrics)) + + // Show a few metrics + metricCount := 0 + for metricName, value := range heroStats.Metrics { + if metricCount >= 2 { + break + } + slog.Info(" - Metric", "name", metricName, "value", value) + metricCount++ + } + count++ + } + + if len(gameModeStats.Heroes) > MaxHeroesDisplayed { + slog.Info(" ... and more heroes", "additional", len(gameModeStats.Heroes)-MaxHeroesDisplayed) + } + } + } + + // Pretty print full JSON + slog.Info("πŸ“‹ FULL JSON OUTPUT:") + slog.Info("====================") + + jsonData, err := json.MarshalIndent(stats, "", " ") + if err != nil { + slog.Error("❌ Error marshaling JSON", "error", err) + + return + } + + slog.Info(string(jsonData)) + slog.Info("βœ… PoC Complete! Profile successfully parsed and displayed.") +} + +// CLI command implementations. + +// runParseBattleTag parses a profile by BattleTag. +func runParseBattleTag(battleTag string) { + slog.Info("🎯 Parsing Overwatch profile by BattleTag", "battletag", battleTag) + + // Get or resolve URL + profileURL, err := getOrResolveURL(battleTag) + if err != nil { + slog.Error("❌ Failed to resolve BattleTag", "battletag", battleTag, "error", err) + os.Exit(1) + } + + slog.Info("βœ… Resolved profile URL", "battletag", battleTag, "url", profileURL) + + // Parse the profile + parser := NewParser() + profile, err := parser.ParseProfile(fetchProfileHTML(profileURL), battleTag) + if err != nil { + slog.Error("❌ Failed to parse profile", "error", err) + os.Exit(1) + } + + // Create/update runtime metrics + existingMetrics, exists := getPlayerMetrics(battleTag) + if exists { + updatePlayerFromProfile(existingMetrics, profile) + slog.Info("πŸ“Š Updated existing player metrics", "battletag", battleTag) + } else { + newMetrics := createPlayerMetrics(battleTag, profile) + setPlayerMetrics(battleTag, newMetrics) + slog.Info("πŸ“Š Created new player metrics", "battletag", battleTag) + } + + // Display results + printParsedProfile(profile) + slog.Info("βœ… Parse complete! Metrics stored in runtime.") +} + +// fetchProfileHTML fetches HTML content from URL. +func fetchProfileHTML(profileURL string) string { + html, err := fetchProfile(profileURL) + if err != nil { + slog.Error("Failed to fetch profile HTML", "error", err) + os.Exit(1) + } + + return html +} + +// printParsedProfile prints profile information. +func printParsedProfile(profile *FullPlayerProfile) { + slog.Info("πŸ“‹ PROFILE INFORMATION") + slog.Info("πŸ‘€ Player", "battletag", profile.BattleTag, "title", profile.PlayerTitle) + slog.Info("πŸŽ–οΈ Endorsement", "level", profile.ProfileMetrics.Endorsement.Level) + + // Print skill ratings + for platform, roles := range profile.ProfileMetrics.SkillRatings { + for role, rank := range roles { + slog.Info("πŸ† Skill Rating", "platform", platform, "role", role, "tier", rank.Tier, "division", rank.Division) + } + } + + // Print platform/gamemode summary + for platform, platformStats := range profile.Platforms { + for gameMode, gameModeStats := range platformStats.GameModes { + slog.Info("🎠Heroes", "platform", platform, "gamemode", gameMode, "count", len(gameModeStats.Heroes)) + } + } +} + +// runConfigCommand handles config subcommands. +func runConfigCommand(args []string) { + if len(args) == 0 { + printConfigUsage() + + return + } + + switch args[0] { + case "list-players": + listPlayersCommand() + case "add-player": + if len(args) < 2 { + slog.Error("Usage error", "command", "config add-player", "message", "BattleTag argument required") + os.Exit(1) + } + addPlayerCommand(args[1]) + case "remove-player": + if len(args) < 2 { + slog.Error("Usage error", "command", "config remove-player", "message", "BattleTag argument required") + os.Exit(1) + } + removePlayerCommand(args[1]) + case "resolve-all": + forceResolve := len(args) > 1 && args[1] == "--force" + resolveAllCommand(forceResolve) + default: + slog.Error("Unknown config command", "command", args[0]) + printConfigUsage() + os.Exit(1) + } +} + +// runMetricsCommand handles metrics subcommands. +func runMetricsCommand(args []string) { + if len(args) == 0 { + printMetricsUsage() + + return + } + + switch args[0] { + case "show": + if len(args) < 2 { + slog.Error("Usage error", "command", "metrics show", "message", "BattleTag argument required") + os.Exit(1) + } + showMetricsCommand(args[1]) + case "list": + listMetricsCommand() + case "clear": + clearMetricsCommand() + case "stats": + statsCommand() + default: + slog.Error("Unknown metrics command", "command", args[0]) + printMetricsUsage() + os.Exit(1) + } +} + +// Config command implementations. +func listPlayersCommand() { + players := getAllPlayers() + if len(players) == 0 { + slog.Info("No players configured.") + + return + } + + slog.Info("Configured players", "count", len(players)) + for _, player := range players { + status := "not_resolved" + lastResolved := "" + if player.ResolvedURL != "" { + status = "resolved" + if player.LastResolved != nil { + lastResolved = player.LastResolved.Format("2006-01-02 15:04") + } + } + slog.Info("Player", "battletag", player.BattleTag, "status", status, "last_resolved", lastResolved) + } +} + +func addPlayerCommand(battleTag string) { + slog.Info("Adding player to config", "battletag", battleTag) + + // Resolve URL immediately + result, err := resolveBattleTagToURL(battleTag) + if err != nil { + slog.Error("Failed to resolve BattleTag", "battletag", battleTag, "error", err) + os.Exit(1) + } + + // Add to config + err = addPlayerToConfig(battleTag, result.ResolvedURL) + if err != nil { + slog.Error("Failed to add player to config", "error", err) + os.Exit(1) + } + + slog.Info("βœ… Player added to config", "battletag", battleTag, "platform", result.Platform) +} + +func removePlayerCommand(battleTag string) { + err := removePlayerFromConfig(battleTag) + if err != nil { + slog.Error("Failed to remove player from config", "battletag", battleTag, "error", err) + os.Exit(1) + } + + // Also remove from runtime metrics + removePlayerMetrics(battleTag) + slog.Info("βœ… Player removed from config and runtime", "battletag", battleTag) +} + +func resolveAllCommand(forceResolve bool) { + err := resolveAllPlayers(forceResolve) + if err != nil { + slog.Error("Failed to resolve all players", "error", err) + os.Exit(1) + } + slog.Info("βœ… Resolved all player URLs") +} + +// Metrics command implementations. +func showMetricsCommand(battleTag string) { + metrics, exists := getPlayerMetrics(battleTag) + if !exists { + slog.Warn("No metrics found", "battletag", battleTag, "suggestion", "run parse-battletag first") + + return + } + + slog.Info("Player metrics", "battletag", battleTag) + slog.Info("Display name", "name", metrics.DisplayName) + slog.Info("Player title", "title", metrics.PlayerTitle) + slog.Info("Last updated", "timestamp", metrics.LastUpdated.Format("2006-01-02 15:04:05")) + slog.Info("Endorsement level", "level", metrics.ProfileMetrics.Endorsement.Level) + + // Count total metrics + totalHeroes := 0 + for _, platforms := range metrics.HeroMetrics { + for _, heroes := range platforms { + totalHeroes += len(heroes) + } + } + slog.Info("Total heroes", "count", totalHeroes) +} + +func listMetricsCommand() { + battleTags := listPlayerBattleTags() + if len(battleTags) == 0 { + slog.Info("No metrics in runtime store") + + return + } + + slog.Info("Players with metrics", "count", len(battleTags)) + for _, battleTag := range battleTags { + if metrics, exists := getPlayerMetrics(battleTag); exists { + slog.Info("Player metrics", + "battletag", battleTag, + "display_name", metrics.DisplayName, + "last_updated", metrics.LastUpdated.Format("2006-01-02 15:04")) + } + } +} + +func clearMetricsCommand() { + clearAllMetrics() + slog.Info("Cleared all runtime metrics") +} + +func statsCommand() { + stats := getMetricsStats() + slog.Info("Runtime metrics statistics") + slog.Info("Total players", "count", stats["total_players"]) + if stats["last_updated"] != nil { + if lastUpdated, ok := stats["last_updated"].(time.Time); ok { + slog.Info("Last updated", "timestamp", lastUpdated.Format("2006-01-02 15:04:05")) + } + } +} + +// Usage printing functions. +func printConfigUsage() { + slog.Info("Config command usage", "program", os.Args[0]) + slog.Info("Available subcommands:") + slog.Info(" list-players List all configured players") + slog.Info(" add-player Add a player to config") + slog.Info(" remove-player Remove a player from config") + slog.Info(" resolve-all [--force] Resolve URLs for all players") +} + +func printMetricsUsage() { + slog.Info("Metrics command usage", "program", os.Args[0]) + slog.Info("Available subcommands:") + slog.Info(" show Show metrics for a player") + slog.Info(" list List all players with metrics") + slog.Info(" clear Clear all runtime metrics") + slog.Info(" stats Show runtime metrics statistics") +} + +func printMainUsage() { + slog.Info("Usage", "program", os.Args[0], "format", " [arguments]") + slog.Info("Available commands:") + slog.Info(" parse-battletag Parse Overwatch profile by BattleTag") + slog.Info(" config Manage player configuration") + slog.Info(" metrics View runtime metrics") + slog.Info(" server [port] Start Prometheus metrics server (default: 9090)") + slog.Info(" parse-profile Parse single profile (development mode)") + slog.Info(" test-api Test API endpoints for detailed stats") + slog.Info(" test-headless Test headless browser parsing for JS content") + slog.Info(" test-parser Test parser with page.html from browser") + slog.Info("Help", "message", "Run command without arguments for command-specific help", "program", os.Args[0]) +} + +// runAPITests runs API endpoint discovery. +func runAPITests() { + slog.Info("πŸ” Starting API endpoint discovery...") + + ctx := context.Background() + inspector := NewAPIInspector() + + // Get player config + players := getAllPlayers() + if len(players) == 0 { + slog.Error("No players configured") + + return + } + + // Test with first player + player := &players[0] + profileURL := player.ResolvedURL + + slog.Info("Testing API endpoints", "battletag", player.BattleTag, "url", profileURL) + + err := inspector.InspectPotentialAPIEndpoints(ctx, profileURL) + if err != nil { + slog.Error("API inspection failed", "error", err) + + return + } + + slog.Info("βœ… API endpoint discovery completed") +} + +// runHeadlessTests runs headless browser testing. +func runHeadlessTests() { + slog.Info("🌐 Starting headless browser testing...") + + ctx := context.Background() + parser := NewHeadlessParser() + + // Get player config + players := getAllPlayers() + if len(players) == 0 { + slog.Error("No players configured") + + return + } + + // Test with first player + player := &players[0] + profileURL := player.ResolvedURL + + slog.Info("Testing headless parsing", "battletag", player.BattleTag, "url", profileURL) + + // Fetch page with JavaScript execution + _, err := parser.FetchWithJavaScript(ctx, profileURL) + if err != nil { + slog.Error("Headless parsing failed", "error", err) + + return + } + + // Note: DOM analysis was removed to simplify the function + + slog.Info("βœ… Headless browser testing completed") +} + +// runParserTests tests the updated parser with real HTML from page.html. +func runParserTests() { + slog.Info("πŸ§ͺ Testing parser with real HTML from page.html...") + + htmlContent, err := loadHTMLFile() + if err != nil { + return + } + + profile, err := parseHTMLContent(htmlContent) + if err != nil { + return + } + + analyzeParsingResults(profile) +} + +// loadHTMLFile reads the HTML file from disk. +func loadHTMLFile() ([]byte, error) { + htmlContent, err := os.ReadFile("page.html") + if err != nil { + slog.Error("Failed to read page.html", "error", err) + slog.Info("Make sure page.html exists in the current directory") + + return nil, errors.Wrap(err, "failed to read page.html") + } + + slog.Info("Loaded HTML file", "size_bytes", len(htmlContent)) + + return htmlContent, nil +} + +// parseHTMLContent creates a parser and parses the HTML content. +func parseHTMLContent(htmlContent []byte) (*FullPlayerProfile, error) { + parser := NewParser() + profile, err := parser.ParseProfile(string(htmlContent), "LexFrei#21715") + if err != nil { + slog.Error("Failed to parse profile", "error", err) + + return nil, err + } + + return profile, nil +} + +// analyzeParsingResults analyzes and reports on the parsing results. +func analyzeParsingResults(profile *FullPlayerProfile) { + totalMetrics := 0 + detailedMetrics := 0 + + for platform, platformStats := range profile.Platforms { + for gameMode, gameModeStats := range platformStats.GameModes { + slog.Info("Game mode stats", + "platform", platform, + "game_mode", gameMode, + "heroes_count", len(gameModeStats.Heroes)) + + for heroID, heroStats := range gameModeStats.Heroes { + heroMetricsCount := len(heroStats.Metrics) + totalMetrics += heroMetricsCount + + if heroMetricsCount > 1 { + detailedMetrics += heroMetricsCount + logHeroMetrics(heroID, heroStats, heroMetricsCount) + } + } + } + } + + logFinalResults(totalMetrics, detailedMetrics, len(profile.Platforms)) +} + +// logHeroMetrics logs detailed information about hero metrics. +func logHeroMetrics(heroID string, heroStats *HeroStats, count int) { + slog.Info("Hero with detailed metrics", + "hero_id", heroID, + "metrics_count", count) + + // Show first few metrics for verification + metricCount := 0 + for metricKey, value := range heroStats.Metrics { + if metricCount < maxSampleMetricsCount { + slog.Info("Sample metric", + "hero_id", heroID, + "metric", metricKey, + "value", value) + } + metricCount++ + } +} + +// logFinalResults logs the final parsing results. +func logFinalResults(totalMetrics, detailedMetrics, platformCount int) { + slog.Info("🎯 Parser test results", + "total_metrics", totalMetrics, + "detailed_metrics", detailedMetrics, + "platforms", platformCount) + + if detailedMetrics > 0 { + slog.Info("βœ… SUCCESS: Found detailed hero metrics!") + } else { + slog.Warn("❌ No detailed hero metrics found") + } +} diff --git a/cmd/ow-exporter/metrics_test.go b/cmd/ow-exporter/metrics_test.go new file mode 100644 index 00000000..bfff6c2b --- /dev/null +++ b/cmd/ow-exporter/metrics_test.go @@ -0,0 +1,249 @@ +package main + +import ( + "fmt" + "testing" +) + +func TestHeroMetricsGeneration(t *testing.T) { + t.Parallel() + testCases := []struct { + heroID string + expectedMin int // Minimum expected metrics (15 common + hero-specific) + description string + }{ + {"soldier-76", 19, "Soldier:76 with 4 specific metrics"}, + {"mercy", 20, "Mercy with 5 specific metrics"}, + {"reinhardt", 20, "Reinhardt with 5 specific metrics"}, + {"widowmaker", 19, "Widowmaker with 4 specific metrics"}, + {"illari", 19, "Illari with 4 specific metrics"}, + {"hazard", 19, "Hazard with 4 specific metrics"}, + {"unknown-hero", 15, "Unknown hero fallback to common metrics"}, + } + + for _, testCase := range testCases { + t.Run(testCase.heroID, func(t *testing.T) { + t.Parallel() + metrics := GetHeroMetrics(testCase.heroID) + + if len(metrics) < testCase.expectedMin { + t.Errorf("Hero %s: expected at least %d metrics, got %d", + testCase.heroID, testCase.expectedMin, len(metrics)) + } + + // Verify common metrics are present + commonMetrics := []string{ + "time_played", "games_won", "win_percentage", + "weapon_accuracy", "eliminations_per_life", + } + + for _, metricName := range commonMetrics { + if _, exists := metrics[metricName]; !exists { + t.Errorf("Hero %s: missing common metric %s", testCase.heroID, metricName) + } + } + + // Log metrics count for debugging + t.Logf("Hero %s: found %d metrics (%s)", testCase.heroID, len(metrics), testCase.description) + }) + } +} + +func TestMetricDefGeneration(t *testing.T) { + t.Parallel() + // Test with Soldier:76 specifically + soldier76 := Soldier76Metrics{} + metrics := GenerateMetricDefs(soldier76) + + expectedMetrics := map[string]string{ + "helix_rocket_kills": "ow_hero_helix_rocket_kills_total", + "helix_rocket_kills_best": "ow_hero_helix_rocket_kills_best", + "biotic_field_healing": "ow_hero_biotic_field_healing_total", + "tactical_visor_kills": "ow_hero_tactical_visor_kills_total", + } + + for metricName, expectedPrometheusName := range expectedMetrics { + metricDef, exists := metrics[metricName] + if !exists { + t.Errorf("Missing Soldier:76 metric: %s", metricName) + + continue + } + + if metricDef.PrometheusName != expectedPrometheusName { + t.Errorf("Metric %s: expected prometheus name %s, got %s", + metricName, expectedPrometheusName, metricDef.PrometheusName) + } + + if metricDef.Help == "" { + t.Errorf("Metric %s: missing help text", metricName) + } + + if metricDef.Selector == "" { + t.Errorf("Metric %s: missing selector", metricName) + } + } + + t.Logf("Soldier:76 specific metrics: %d", len(metrics)) +} + +func TestHeroMetricsRegistry(t *testing.T) { + t.Parallel() + expectedHeroCount := 29 // Current number of implemented heroes + + if len(HeroMetricsRegistry) != expectedHeroCount { + t.Errorf("Expected %d heroes in registry, got %d", expectedHeroCount, len(HeroMetricsRegistry)) + } + + // Test that all heroes in registry can generate metrics + for heroID, factory := range HeroMetricsRegistry { + heroStruct := factory() + metrics := GenerateMetricDefs(heroStruct) + + if len(metrics) == 0 { + t.Errorf("Hero %s: factory returned struct with no metrics", heroID) + } + + t.Logf("Hero %s: %d specific metrics", heroID, len(metrics)) + } +} + +func TestPlatformSpecificMetrics(t *testing.T) { + t.Parallel() + heroID := "soldier-76" + + testCases := []struct { + platform Platform + gameMode GameMode + description string + }{ + {PlatformPC, GameModeQuickPlay, "PC QuickPlay"}, + {PlatformPC, GameModeCompetitive, "PC Competitive"}, + {PlatformConsole, GameModeQuickPlay, "Console QuickPlay"}, + {PlatformConsole, GameModeCompetitive, "Console Competitive"}, + } + + for _, testCase := range testCases { + t.Run(testCase.description, func(t *testing.T) { + t.Parallel() + validatePlatformSpecificMetrics(t, heroID, testCase.platform, testCase.gameMode, testCase.description) + }) + } +} + +// validatePlatformSpecificMetrics validates metrics for a specific platform and game mode. +func validatePlatformSpecificMetrics( + t *testing.T, heroID string, platform Platform, gameMode GameMode, description string, +) { + t.Helper() + metrics := GetHeroMetricsForPlatform(heroID, platform, gameMode) + + if len(metrics) == 0 { + t.Errorf("No metrics found for %s", description) + + return + } + + t.Logf("%s: found %d metrics", description, len(metrics)) + logFirstFewSelectors(t, metrics, description) + validateAllMetricSelectors(t, metrics, platform, gameMode) +} + +// logFirstFewSelectors logs the first few metric selectors for debugging. +func logFirstFewSelectors(t *testing.T, metrics map[string]MetricDef, _ string) { + t.Helper() + count := 0 + for metricName, metricDef := range metrics { + if count >= 2 { + break + } + if metricDef.Selector != "" { + t.Logf(" %s selector: %s", metricName, metricDef.Selector) + count++ + } + } +} + +// validateAllMetricSelectors validates all metric selectors contain platform/gamemode context. +func validateAllMetricSelectors(t *testing.T, metrics map[string]MetricDef, platform Platform, gameMode GameMode) { + t.Helper() + for metricName, metricDef := range metrics { + if metricDef.Selector != "" { + validateMetricSelector(t, metricName, metricDef.Selector, platform, gameMode) + } + } +} + +// validateMetricSelector validates that a metric selector contains the required platform and gamemode wrappers. +func validateMetricSelector(t *testing.T, metricName, selector string, platform Platform, gameMode GameMode) { + t.Helper() + platformWrapper := getPlatformWrapper(platform) + if !contains(selector, platformWrapper) { + t.Errorf("Metric %s selector missing platform wrapper: %s", metricName, selector) + } + + gameModeWrapper := getGameModeWrapper(gameMode) + if !contains(selector, gameModeWrapper) { + t.Errorf("Metric %s selector missing gamemode wrapper: %s", metricName, selector) + } +} + +// getPlatformWrapper returns the CSS selector wrapper for the given platform. +func getPlatformWrapper(platform Platform) string { + switch platform { + case PlatformPC: + return MouseKeyboardViewActiveSelector + case PlatformConsole: + return ControllerViewActiveSelector + default: + return "" + } +} + +// getGameModeWrapper returns the CSS selector wrapper for the given game mode. +func getGameModeWrapper(gameMode GameMode) string { + switch gameMode { + case GameModeQuickPlay: + return QuickPlayViewActiveSelector + case GameModeCompetitive: + return CompetitiveViewActiveSelector + default: + return "" + } +} + +func contains(s, substr string) bool { + return substr != "" && len(s) >= len(substr) && + (s == substr || s[:len(substr)] == substr || + s[len(s)-len(substr):] == substr || + findInString(s, substr)) +} + +func findInString(s, substr string) bool { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return true + } + } + + return false +} + +// Example usage function for documentation. +func ExampleGetHeroMetrics() { + // Get metrics for Soldier:76 + metrics := GetHeroMetrics("soldier-76") + + fmt.Printf("Soldier:76 has %d total metrics\n", len(metrics)) + + // Access a specific metric + if helixMetric, exists := metrics["helix_rocket_kills"]; exists { + fmt.Printf("Helix Rocket Kills: %s\n", helixMetric.PrometheusName) + fmt.Printf("Help: %s\n", helixMetric.Help) + } + + // Output: + // Soldier:76 has 19 total metrics + // Helix Rocket Kills: ow_hero_helix_rocket_kills_total + // Help: Total eliminations with helix rockets +} diff --git a/cmd/ow-exporter/models.go b/cmd/ow-exporter/models.go new file mode 100644 index 00000000..622c2339 --- /dev/null +++ b/cmd/ow-exporter/models.go @@ -0,0 +1,178 @@ +package main + +import "time" + +// MetricDef defines a single metric with its parsing information. +type MetricDef struct { + PrometheusName string `json:"prometheusName"` // "ow_hero_time_played_seconds" + Help string `json:"help"` // Help text for Prometheus + Unit string `json:"unit"` // "seconds", "percent", "count", "ratio" + Selector string `json:"selector"` // CSS selector or data attribute + HexID string `json:"hexId"` // Blizzard's hex ID for the metric + ValueType string `json:"valueType"` // "duration", "number", "percentage" +} + +// Platform represents PC or Console. +type Platform string + +const ( + PlatformPC Platform = "pc" + PlatformConsole Platform = "console" +) + +// GameMode represents Quick Play or Competitive. +type GameMode string + +const ( + GameModeQuickPlay GameMode = "quickplay" + GameModeCompetitive GameMode = "competitive" +) + +// MetricLabels for Prometheus metrics. +type MetricLabels struct { + Username string `json:"username"` + Hero string `json:"hero"` + Platform Platform `json:"platform"` + GameMode GameMode `json:"gamemode"` +} + +// GetCommonMetrics returns common metrics using the new embedded struct approach. +func GetCommonMetrics() map[string]MetricDef { + return GetCommonMetricsForPlatform(PlatformPC, GameModeQuickPlay) +} + +// GetCommonMetricsForPlatform returns common metrics for specific platform and game mode. +func GetCommonMetricsForPlatform(platform Platform, gameMode GameMode) map[string]MetricDef { + commonStruct := CommonMetrics{} + + return GenerateMetricDefsWithContext(commonStruct, platform, gameMode) +} + +// Hero-specific metrics can be added here for special abilities. + +// PlatformSelectors for platform and game mode detection. +var PlatformSelectors = map[Platform]string{ + PlatformPC: ".mouseKeyboard-view", + PlatformConsole: ".controller-view", +} + +var GameModeSelectors = map[GameMode]string{ + GameModeQuickPlay: ".quickPlay-view", + GameModeCompetitive: ".competitive-view", +} + +// HeroSelectors for hero identification and detailed stats parsing. +var HeroSelectors = struct { + Container string + Name string + ID string + TimePlayed string + StatsContainer string + StatItem string + StatName string + StatValue string + CategoryHeader string + BlzStatsSection string +}{ + Container: ".Profile-progressBar", + Name: ".Profile-progressBar-title", + ID: ".Profile-progressBar--bar[data-hero-id]", // data-hero-id is on the bar element + TimePlayed: ".Profile-progressBar-description", + StatsContainer: "span.stats-container", // OverFast API style + StatItem: ".stat-item", // Individual stat items + StatName: "p.name", // Stat name within stat-item + StatValue: "p.value", // Stat value within stat-item + CategoryHeader: ".category .content .header p", // Category headers + BlzStatsSection: "blz-section.stats", // Main stats sections +} + +// PlatformFilters for switching views. +var PlatformFilters = map[Platform]string{ + PlatformPC: "#mouseKeyboardFilter", + PlatformConsole: "#controllerFilter", +} + +// PrometheusMetricName generates Prometheus metric name. +func (m *MetricDef) PrometheusMetricName(_ MetricLabels) string { + return m.PrometheusName +} + +// GetSelector returns CSS selector for this metric. +func (m *MetricDef) GetSelector() string { + return m.Selector +} + +// New structures for the enhanced metrics system. + +// AllHeroesStats represents aggregated statistics across all heroes. +type AllHeroesStats struct { + TotalTimePlayed int64 `json:"totalTimePlayedSeconds" prometheus:"ow_player_total_time_played_seconds"` + TotalGamesWon int `json:"totalGamesWon" prometheus:"ow_player_total_games_won"` + OverallWinPercentage float64 `json:"overallWinPercentage" prometheus:"ow_player_overall_win_percentage"` + WeaponAccuracy float64 `json:"weaponAccuracyPercent" prometheus:"ow_player_weapon_accuracy_percent"` + EliminationsPerLife float64 `json:"eliminationsPerLife" prometheus:"ow_player_eliminations_per_life"` + KillStreakBest int `json:"killStreakBest" prometheus:"ow_player_kill_streak_best"` + MultikillBest int `json:"multikillBest" prometheus:"ow_player_multikill_best"` + EliminationsPer10Min float64 `json:"eliminationsPer10min" prometheus:"ow_player_eliminations_per_10min"` + DeathsPer10Min float64 `json:"deathsPer10min" prometheus:"ow_player_deaths_per_10min"` + FinalBlowsPer10Min float64 `json:"finalBlowsPer10min" prometheus:"ow_player_final_blows_per_10min"` + SoloKillsPer10Min float64 `json:"soloKillsPer10min" prometheus:"ow_player_solo_kills_per_10min"` + ObjectiveKillsPer10Min float64 `json:"objectiveKillsPer10min" prometheus:"ow_player_objective_kills_per_10min"` + ObjectiveTimePer10Min float64 `json:"objectiveTimePer10min" prometheus:"ow_player_objective_time_per_10min"` + HeroDamagePer10Min float64 `json:"heroDamagePer10min" prometheus:"ow_player_hero_damage_per_10min"` + HealingPer10Min float64 `json:"healingPer10min" prometheus:"ow_player_healing_per_10min"` +} + +// HeroMetrics represents metrics for a specific hero. +type HeroMetrics map[string]interface{} + +// RuntimeMetrics contains all runtime metrics data. +type RuntimeMetrics struct { + Players map[string]*PlayerMetrics `json:"players"` // battletag -> metrics +} + +// PlayerMetrics contains all metrics for a single player. +type PlayerMetrics struct { + BattleTag string `json:"battletag"` + DisplayName string `json:"displayName"` // "Joe" from HTML + PlayerTitle string `json:"playerTitle"` // "Peasant" from HTML + LastUpdated time.Time `json:"lastUpdated"` + + // Level 1: Profile-level metrics (SR, endorsement) + ProfileMetrics ProfileMetrics `json:"profileMetrics"` + + // Level 2: All Heroes aggregated metrics by platform/gamemode + AllHeroesMetrics map[Platform]map[GameMode]AllHeroesStats `json:"allHeroesMetrics"` + + // Level 3: Individual hero metrics by platform/gamemode/hero + HeroMetrics map[Platform]map[GameMode]map[string]HeroMetrics `json:"heroMetrics"` +} + +// EnhancedMetricLabels with BattleTag support. +type EnhancedMetricLabels struct { + BattleTag string `json:"battletag"` // LexFrei#21715 + PlayerName string `json:"playerName"` // Joe + Hero string `json:"hero"` // soldier-76, widowmaker, etc. + Platform Platform `json:"platform"` // pc, console + GameMode GameMode `json:"gamemode"` // quickplay, competitive + MetricType string `json:"metricType"` // profile, all_heroes, hero +} + +// AllHeroesHexIDs for parsing. +var AllHeroesHexIDs = map[string]string{ + "time_played": "0x0860000000000021", + "games_won": "0x0860000000000039", + "win_percentage": "0x08600000000003D1", + "weapon_accuracy": "0x08600000000001BB", + "eliminations_per_life": "0x08600000000003D2", + "kill_streak_best": "0x0860000000000223", + "multikill_best": "0x0860000000000346", + "eliminations_per_10min": "0x08600000000004D4", + "deaths_per_10min": "0x08600000000004D3", + "final_blows_per_10min": "0x08600000000004D5", + "solo_kills_per_10min": "0x08600000000004DA", + "objective_kills_per_10min": "0x08600000000004D8", + "objective_time_per_10min": "0x08600000000004D9", + "hero_damage_per_10min": "0x08600000000004BD", + "healing_per_10min": "0x08600000000004D6", +} diff --git a/cmd/ow-exporter/parser.go b/cmd/ow-exporter/parser.go new file mode 100644 index 00000000..65d632f7 --- /dev/null +++ b/cmd/ow-exporter/parser.go @@ -0,0 +1,1111 @@ +package main + +import ( + "fmt" + "log/slog" + "regexp" + "strconv" + "strings" + "time" + + "github.com/cockroachdb/errors" + + "github.com/PuerkitoBio/goquery" +) + +// Constants for metric names. +const ( + GamesWonMetric = "games_won" + WinPercentageMetric = "win_percentage" + WeaponAccuracyMetric = "weapon_accuracy" + EliminationsPerLifeMetric = "eliminations_per_life" + KillStreakBestMetric = "kill_streak_best" + MultikillBestMetric = "multikill_best" + EliminationsPer10MinMetric = "eliminations_per_10min" + DeathsPer10MinMetric = "deaths_per_10min" + FinalBlowsPer10MinMetric = "final_blows_per_10min" + SoloKillsPer10MinMetric = "solo_kills_per_10min" + ObjectiveKillsPer10MinMetric = "objective_kills_per_10min" + ObjectiveTimePer10MinMetric = "objective_time_per_10min" + HeroDamagePer10MinMetric = "hero_damage_per_10min" + HealingPer10MinMetric = "healing_per_10min" +) + +// FullPlayerProfile represents complete player profile with all metrics. +type FullPlayerProfile struct { + // Basic profile information + Username string `json:"username"` + BattleTag string `json:"battleTag"` + PlayerTitle string `json:"playerTitle"` + LastUpdate time.Time `json:"lastUpdate"` + + // Profile-level metrics + ProfileMetrics ProfileMetrics `json:"profileMetrics"` + + // Hero statistics matrix: platform -> gamemode -> hero -> metrics + Platforms map[Platform]*PlatformStats `json:"platforms"` +} + +// ProfileMetrics represents profile-level statistics. +type ProfileMetrics struct { + Endorsement EndorsementData `json:"endorsement"` + SkillRatings map[Platform]map[Role]RankInfo `json:"skillRatings"` +} + +// EndorsementData represents endorsement level and breakdown. +type EndorsementData struct { + Level int `json:"level"` + // Endorsement breakdown by category (if available in HTML) + Breakdown *EndorsementBreakdown `json:"breakdown,omitempty"` +} + +// EndorsementBreakdown represents the breakdown of endorsements by category. +type EndorsementBreakdown struct { + Sportsmanship int `json:"sportsmanship"` // Good teammate, stays positive + Teamwork int `json:"teamwork"` // Team player, communicates + ShotCaller int `json:"shotCaller"` // Good leadership, makes good calls +} + +// Role represents player roles. +type Role string + +const ( + RoleTank Role = "tank" + RoleDamage Role = "damage" + RoleSupport Role = "support" +) + +// RankInfo represents competitive ranking information. +type RankInfo struct { + Tier string `json:"tier"` // Bronze, Silver, Gold, Platinum, Diamond, Master, Grandmaster, Champion + Division int `json:"division"` // 1-5 + SR int `json:"sr"` // Skill Rating (if available) +} + +// PlatformStats represents statistics for a specific platform (PC/Console). +type PlatformStats struct { + Platform Platform `json:"platform"` + GameModes map[GameMode]*GameModeStats `json:"gameModes"` +} + +// GameModeStats represents statistics for a specific game mode. +type GameModeStats struct { + GameMode GameMode `json:"gameMode"` + AllHeroesStats AllHeroesStats `json:"allHeroesStats"` + Heroes map[string]*HeroStats `json:"heroes"` +} + +// HeroStats represents all statistics for a specific hero. +type HeroStats struct { + HeroID string `json:"heroId"` + HeroName string `json:"heroName"` + Metrics map[string]float64 `json:"metrics"` +} + +// Parser handles HTML parsing for Overwatch profiles. +type Parser struct { + // Add any configuration or dependencies here +} + +// NewParser creates a new parser instance. +func NewParser() *Parser { + return &Parser{} +} + +// ParseProfile parses an Overwatch profile HTML and extracts all statistics. +func (p *Parser) ParseProfile(html, username string) (*FullPlayerProfile, error) { + doc, err := goquery.NewDocumentFromReader(strings.NewReader(html)) + if err != nil { + return nil, errors.Wrap(err, "failed to parse HTML") + } + + // Extract basic profile info + playerName := strings.TrimSpace(doc.Find(".Profile-player--name").Text()) + playerTitle := strings.TrimSpace(doc.Find(".Profile-player--title").Text()) + + stats := &FullPlayerProfile{ + Username: username, + BattleTag: playerName, + PlayerTitle: playerTitle, + LastUpdate: time.Now(), + Platforms: make(map[Platform]*PlatformStats), + ProfileMetrics: ProfileMetrics{ + SkillRatings: make(map[Platform]map[Role]RankInfo), + }, + } + + // Parse profile-level metrics + p.parseProfileMetrics(doc, stats) + + // Debug summary + progressBars := doc.Find(".Profile-progressBar") + heroElements := doc.Find("[data-hero-id]") + slog.Info("HTML parsing debug", + "progress_bars", progressBars.Length(), + "hero_elements", heroElements.Length(), + "player_name", playerName, + "player_title", playerTitle) + + // Parse both PC and Console platforms + for platform := range PlatformSelectors { + platformStats, err := p.parsePlatformStats(doc, platform) + if err != nil { + // Skip platforms with no data + if errors.Is(err, ErrNoPlatformData) { + continue // Skip silently + } + + return nil, errors.Wrapf(err, "failed to parse %s stats", platform) + } + if platformStats != nil { + stats.Platforms[platform] = platformStats + } + } + + return stats, nil +} + +// parsePlatformStats parses statistics for a specific platform. +func (p *Parser) parsePlatformStats(doc *goquery.Document, platform Platform) (*PlatformStats, error) { + // Check if this platform has data + platformSelector := PlatformSelectors[platform] + platformView := doc.Find(platformSelector) + + // Debug: print what we found + slog.Info("Platform parsing", + "platform", platform, + "selector", platformSelector, + "elements_found", platformView.Length()) + + if platformView.Length() == 0 { + return nil, ErrNoPlatformData + } + + stats := &PlatformStats{ + Platform: platform, + GameModes: make(map[GameMode]*GameModeStats), + } + + // Parse both Quick Play and Competitive modes + for gameMode := range GameModeSelectors { + gameModeStats, err := p.parseGameModeStats(platformView, gameMode) + if err != nil { + // Skip game modes with no data + if errors.Is(err, ErrNoGameModeData) { + continue // Skip silently + } + + return nil, errors.Wrapf(err, "failed to parse %s stats", gameMode) + } + if gameModeStats != nil { + stats.GameModes[gameMode] = gameModeStats + } + } + + return stats, nil +} + +// parseGameModeStats parses statistics for a specific game mode within a platform. +func (p *Parser) parseGameModeStats(platformView *goquery.Selection, gameMode GameMode) (*GameModeStats, error) { + // Find the game mode view within the platform + gameModeSelector := GameModeSelectors[gameMode] + gameModeView := platformView.Find(gameModeSelector) + + // Debug game mode detection + slog.Info("GameMode parsing", + "gamemode", gameMode, + "selector", gameModeSelector, + "elements_found", gameModeView.Length()) + + if gameModeView.Length() == 0 { + return nil, ErrNoGameModeData + } + + stats := &GameModeStats{ + GameMode: gameMode, + Heroes: make(map[string]*HeroStats), + } + + // Parse All Heroes aggregated statistics first + allHeroesStats := p.parseAllHeroesStats(gameModeView) + if allHeroesStats != nil { + stats.AllHeroesStats = *allHeroesStats + } + + // Parse hero statistics - look for progress bars within this game mode view + heroContainers := gameModeView.Find(HeroSelectors.Container) + slog.Info("Hero containers found", + "gamemode", gameMode, + "hero_count", heroContainers.Length()) + + var parseErrors []error + heroContainers.Each(func(_ int, heroEl *goquery.Selection) { + heroStats, err := p.parseHeroStats(heroEl) + if err != nil { + // Skip heroes with missing data (not real errors) + if errors.Is(err, ErrNoHeroID) || errors.Is(err, ErrNoHeroName) { + return // Skip silently + } + parseErrors = append(parseErrors, err) + + return + } + if heroStats != nil { + stats.Heroes[heroStats.HeroID] = heroStats + slog.Debug("Hero parsed", + "hero_name", heroStats.HeroName, + "hero_id", heroStats.HeroID, + "metrics_count", len(heroStats.Metrics)) + } + }) + + // Check for validation errors + if len(parseErrors) > 0 { + for _, err := range parseErrors { + slog.Error("Hero parsing validation error", "error", err.Error()) + } + + return nil, parseErrors[0] // Return first error to fail fast + } + + return stats, nil +} + +// parseHeroStats parses statistics for a single hero. +func (p *Parser) parseHeroStats(heroEl *goquery.Selection) (*HeroStats, error) { + // Extract and validate hero identification + heroID, heroName, err := p.extractHeroIdentification(heroEl) + if err != nil { + return nil, err + } + + // Validate hero exists in registry + err = p.validateHeroInRegistry(heroID, heroName) + if err != nil { + return nil, err + } + + stats := &HeroStats{ + HeroID: heroID, + HeroName: heroName, + Metrics: make(map[string]float64), + } + + // Extract all metrics for this hero + p.extractAllHeroMetrics(heroEl, heroID, heroName, stats) + + return stats, nil +} + +// extractHeroIdentification extracts hero ID and name from the hero element. +func (p *Parser) extractHeroIdentification(heroEl *goquery.Selection) (heroID, heroName string, err error) { + // Extract hero ID from the bar element + barEl := heroEl.Find(".Profile-progressBar--bar[data-hero-id]") + if barEl.Length() == 0 { + return "", "", ErrNoHeroID + } + + heroID, exists := barEl.Attr("data-hero-id") + if !exists { + return "", "", ErrNoHeroID + } + + // Extract hero name + heroName = strings.TrimSpace(heroEl.Find(HeroSelectors.Name).Text()) + if heroName == "" { + slog.Warn("Hero missing name", "hero_id", heroID, "selector", HeroSelectors.Name) + + return "", "", ErrNoHeroName + } + + return heroID, heroName, nil +} + +// validateHeroInRegistry ensures the hero exists in our metrics registry. +func (p *Parser) validateHeroInRegistry(heroID, heroName string) error { + if _, exists := HeroMetricsRegistry[heroID]; !exists { + return errors.Wrapf(ErrUnknownHero, + "hero_id='%s', hero_name='%s' is not in HeroMetricsRegistry - 100%% coverage validation failed", + heroID, heroName) + } + + return nil +} + +// extractAllHeroMetrics extracts all detailed metrics for the hero. +// Now updated to work with the actual HTML structure from JavaScript-loaded content. +func (p *Parser) extractAllHeroMetrics(heroEl *goquery.Selection, heroID, heroName string, stats *HeroStats) { + // Extract common metrics from progress bars + p.extractCommonMetricsFromProgressBar(heroEl, heroID, heroName, stats) + + // Extract detailed hero-specific metrics from stats-container elements + p.extractDetailedHeroMetrics(heroEl, heroID, heroName, stats) + + slog.Debug("Hero parsing completed", + "hero_id", heroID, + "hero_name", heroName, + "extracted_metrics", len(stats.Metrics)) +} + +// extractCommonMetricsFromProgressBar extracts the common metrics from the progress bar description. +// This reads the value from the .Profile-progressBar-description element. +func (p *Parser) extractCommonMetricsFromProgressBar(heroEl *goquery.Selection, heroID, _ string, stats *HeroStats) { + // Get the progress bar description value (this shows the current metric value) + descriptionEl := heroEl.Find(".Profile-progressBar-description") + if descriptionEl.Length() == 0 { + slog.Debug("No progress bar description found", + "hero_id", heroID, + "selector", ".Profile-progressBar-description") + + return + } + + valueText := strings.TrimSpace(descriptionEl.Text()) + if valueText == "" { + return + } + + // Determine the current metric being displayed by finding the active category + // Look for the active data-category-id in the parent container + parentDoc := heroEl.Parents().Last() + activeCategory := parentDoc.Find(".Profile-progressBars.is-active[data-category-id]") + if activeCategory.Length() == 0 { + slog.Debug("No active category found for hero", + "hero_id", heroID, + "value_text", valueText) + + return + } + + categoryID, exists := activeCategory.Attr("data-category-id") + if !exists { + return + } + + // Map category ID to metric name + metricName := p.mapCategoryIDToMetricName(categoryID) + if metricName == "" { + slog.Debug("Unknown category ID", + "hero_id", heroID, + "category_id", categoryID) + + return + } + + // Parse the value based on metric type + value := p.parseMetricValue(valueText, metricName) + if value > 0 { + stats.Metrics[metricName] = value + slog.Debug("Extracted progress bar metric", + "hero_id", heroID, + "metric", metricName, + "value", value, + "value_text", valueText, + "category_id", categoryID) + } +} + +// mapCategoryIDToMetricName maps hex category IDs to metric names. +func (p *Parser) mapCategoryIDToMetricName(categoryID string) string { + // Reverse lookup from AllHeroesHexIDs + for metricName, hexID := range AllHeroesHexIDs { + if hexID == categoryID { + return metricName + } + } + + return "" +} + +// parseTimeToSeconds converts time strings like "44:28:48" to seconds. +func (p *Parser) parseTimeToSeconds(timeStr string) float64 { + // Handle formats: "HH:MM:SS" or "MM:SS" or just numbers + re := regexp.MustCompile(`^(?:(\d+):)?(\d+):(\d+)$`) + matches := re.FindStringSubmatch(timeStr) + + const expectedRegexMatches = 4 + if len(matches) != expectedRegexMatches { + return 0 + } + + if matches[1] != "" { + return p.parseHHMMSSFormat(matches) + } + + return p.parseMMSSFormat(matches) +} + +// parseHHMMSSFormat parses time in HH:MM:SS format. +func (p *Parser) parseHHMMSSFormat(matches []string) float64 { + hours, err := strconv.Atoi(matches[1]) + if err != nil { + return 0 + } + + minutes, err := strconv.Atoi(matches[2]) + if err != nil { + return 0 + } + + seconds, err := strconv.Atoi(matches[3]) + if err != nil { + return 0 + } + + return float64(hours*3600 + minutes*60 + seconds) +} + +// parseMMSSFormat parses time in MM:SS format. +func (p *Parser) parseMMSSFormat(matches []string) float64 { + minutes, err := strconv.Atoi(matches[2]) + if err != nil { + return 0 + } + + seconds, err := strconv.Atoi(matches[3]) + if err != nil { + return 0 + } + + return float64(minutes*60 + seconds) +} + +// parsePercentage converts percentage strings like "74%" to float. +func (p *Parser) parsePercentage(percentStr string) float64 { + cleaned := strings.TrimSuffix(strings.TrimSpace(percentStr), "%") + value, err := strconv.ParseFloat(cleaned, 64) + if err == nil { + return value + } + + return 0 +} + +// parseNumber converts number strings to float. +func (p *Parser) parseNumber(numStr string) float64 { + // Remove commas and other formatting + cleaned := strings.ReplaceAll(strings.TrimSpace(numStr), ",", "") + value, err := strconv.ParseFloat(cleaned, 64) + if err == nil { + return value + } + + return 0 +} + +// parseProfileMetrics extracts profile-level statistics (endorsement, skill ratings). +func (p *Parser) parseProfileMetrics(doc *goquery.Document, stats *FullPlayerProfile) { + p.parseEndorsementLevel(doc, stats) + p.parseSkillRatings(doc, stats) +} + +// parseEndorsementLevel extracts and sets the endorsement level and breakdown. +func (p *Parser) parseEndorsementLevel(doc *goquery.Document, stats *FullPlayerProfile) { + endorsementImg := doc.Find(".Profile-playerSummary--endorsement") + if endorsementImg.Length() == 0 { + return + } + + src, exists := endorsementImg.Attr("src") + if !exists { + return + } + + level := extractEndorsementLevel(src) + if level > 0 { + stats.ProfileMetrics.Endorsement.Level = level + slog.Debug("Parsed endorsement", "level", level) + } + + // Try to parse endorsement breakdown if available + breakdown := p.parseEndorsementBreakdown(doc) + if breakdown != nil { + stats.ProfileMetrics.Endorsement.Breakdown = breakdown + slog.Debug("Parsed endorsement breakdown", + "sportsmanship", breakdown.Sportsmanship, + "teamwork", breakdown.Teamwork, + "shotCaller", breakdown.ShotCaller) + } +} + +// parseEndorsementBreakdown attempts to extract endorsement breakdown data from various possible selectors. +func (p *Parser) parseEndorsementBreakdown(doc *goquery.Document) *EndorsementBreakdown { + // Try multiple possible selectors for endorsement breakdown data + selectors := []string{ + ".Profile-endorsement--breakdown", + ".endorsement-breakdown", + ".Profile-playerSummary--endorsementBreakdown", + "[data-endorsement-breakdown]", + ".endorsement-stats", + ".Profile-endorsement .endorsement-categories", + } + + for _, selector := range selectors { + if breakdown := p.tryParseEndorsementSelector(doc, selector); breakdown != nil { + return breakdown + } + } + + // Log that breakdown data wasn't found (for debugging purposes) + slog.Debug("Endorsement breakdown data not found in HTML structure") + + return nil +} + +// tryParseEndorsementSelector attempts to parse endorsement breakdown from a specific selector. +func (p *Parser) tryParseEndorsementSelector(doc *goquery.Document, selector string) *EndorsementBreakdown { + element := doc.Find(selector) + if element.Length() == 0 { + return nil + } + + breakdown := &EndorsementBreakdown{} + found := false + + // Try to find individual endorsement category elements + categorySelectors := map[string]*int{ + "sportsmanship": &breakdown.Sportsmanship, + "teamwork": &breakdown.Teamwork, + "shot-caller": &breakdown.ShotCaller, + "shotcaller": &breakdown.ShotCaller, + "leadership": &breakdown.ShotCaller, // Alternative name + } + + element.Find("*").Each(func(_ int, selection *goquery.Selection) { + // Check data attributes + for category, field := range categorySelectors { + if attr, exists := selection.Attr("data-" + category); exists { + if value := parseInt(attr); value > 0 { + *field = value + found = true + } + } + } + + // Check class names + classes := selection.AttrOr("class", "") + text := strings.TrimSpace(selection.Text()) + + for category, field := range categorySelectors { + if strings.Contains(classes, category) && text != "" { + if value := parseInt(text); value > 0 { + *field = value + found = true + } + } + } + }) + + if found { + return breakdown + } + + return nil +} + +// parseInt safely converts a string to an integer, returning 0 if parsing fails. +func parseInt(s string) int { + // Remove any non-numeric characters except digits + cleanStr := regexp.MustCompile(`\D`).ReplaceAllString(s, "") + if cleanStr == "" { + return 0 + } + + value, err := strconv.Atoi(cleanStr) + if err != nil { + return 0 + } + + return value +} + +// parseSkillRatings extracts skill ratings for all platforms. +func (p *Parser) parseSkillRatings(doc *goquery.Document, stats *FullPlayerProfile) { + for platform := range PlatformSelectors { + platformRanks := p.parsePlatformRanks(doc, platform) + if len(platformRanks) > 0 { + stats.ProfileMetrics.SkillRatings[platform] = platformRanks + } + } +} + +// parsePlatformRanks extracts rank information for a specific platform. +func (p *Parser) parsePlatformRanks(doc *goquery.Document, platform Platform) map[Role]RankInfo { + platformRanks := make(map[Role]RankInfo) + + platformSelector := p.getPlatformSelector(platform) + rankWrapper := doc.Find(platformSelector) + slog.Debug("Rank wrapper search", "platform", platform, "selector", platformSelector, "found", rankWrapper.Length()) + + if rankWrapper.Length() == 0 { + return platformRanks + } + + roleWrappers := rankWrapper.Find(".Profile-playerSummary--roleWrapper") + roleWrappers.Each(func(_ int, roleEl *goquery.Selection) { + role, rankInfo := p.parseRoleRank(roleEl, platform) + if role != "" && rankInfo.Tier != "" { + platformRanks[Role(role)] = rankInfo + } + }) + + return platformRanks +} + +// getPlatformSelector returns the CSS selector for a platform. +func (p *Parser) getPlatformSelector(platform Platform) string { + if platform == PlatformPC { + return ".Profile-playerSummary--rankWrapper.is-active" + } + + return ".controller-view.Profile-playerSummary--rankWrapper" +} + +// parseRoleRank extracts role and rank information from a role element. +func (p *Parser) parseRoleRank(roleEl *goquery.Selection, platform Platform) (string, RankInfo) { + roleImg := roleEl.Find(".Profile-playerSummary--role img") + if roleImg.Length() == 0 { + return "", RankInfo{} + } + + roleIconSrc, exists := roleImg.Attr("src") + if !exists { + return "", RankInfo{} + } + + role := extractRoleFromIcon(roleIconSrc) + if role == "" { + return "", RankInfo{} + } + + rankInfo := extractRankInfo(roleEl) + if rankInfo.Tier != "" { + slog.Debug("Parsed rank", + "platform", platform, + "role", role, + "tier", rankInfo.Tier, + "division", rankInfo.Division) + } + + return role, rankInfo +} + +// extractEndorsementLevel extracts endorsement level from icon URL. +func extractEndorsementLevel(iconURL string) int { + // URL format: "...endorsement/2-8b9f0faa25.svg" -> level 2 + re := regexp.MustCompile(`endorsement/(\d+)-`) + matches := re.FindStringSubmatch(iconURL) + if len(matches) >= 2 { + level, err := strconv.Atoi(matches[1]) + if err == nil { + return level + } + } + + return 0 +} + +// extractRoleFromIcon determines role from icon URL. +func extractRoleFromIcon(iconURL string) string { + switch { + case strings.Contains(iconURL, "offense"): + return "damage" + case strings.Contains(iconURL, "support"): + return "support" + case strings.Contains(iconURL, "tank"): + return "tank" + default: + return "" + } +} + +// extractRankInfo extracts tier and division from rank elements. +func extractRankInfo(roleEl *goquery.Selection) RankInfo { + var rankInfo RankInfo + + // Find rank images + rankImages := roleEl.Find(".Profile-playerSummary--rank") + + rankImages.Each(func(_ int, img *goquery.Selection) { + if src, exists := img.Attr("src"); exists { + // Extract tier (Bronze, Silver, Gold, etc.) + if tier := extractTierFromURL(src); tier != "" && rankInfo.Tier == "" { + rankInfo.Tier = tier + } + // Extract division (1-5) + if division := extractDivisionFromURL(src); division > 0 && rankInfo.Division == 0 { + rankInfo.Division = division + } + } + }) + + return rankInfo +} + +// extractTierFromURL extracts tier name from rank icon URL. +func extractTierFromURL(url string) string { + // URL format: "...Rank_DiamondTier-d775ca9c43.png" + re := regexp.MustCompile(`Rank_(\w+)Tier`) + matches := re.FindStringSubmatch(url) + if len(matches) >= 2 { + return matches[1] + } + + return "" +} + +// extractDivisionFromURL extracts division number from division icon URL. +func extractDivisionFromURL(url string) int { + // URL format: "...TierDivision_2-4376c89b41.png" + re := regexp.MustCompile(`TierDivision_(\d+)`) + matches := re.FindStringSubmatch(url) + if len(matches) >= 2 { + div, err := strconv.Atoi(matches[1]) + if err == nil { + return div + } + } + + return 0 +} + +// parseAllHeroesStats parses aggregated statistics across all heroes. +func (p *Parser) parseAllHeroesStats(gameModeView *goquery.Selection) *AllHeroesStats { + stats := &AllHeroesStats{} + + // Parse each metric using the hex IDs from AllHeroesHexIDs + for metricKey, hexID := range AllHeroesHexIDs { + value := p.parseHeroMetricFromGameMode(gameModeView, metricKey, hexID) + if value != 0 { + p.assignValueToAllHeroesStats(stats, metricKey, value) + } + } + + p.logAllHeroesStatsSummary(stats) + + return stats +} + +// parseHeroMetricFromGameMode extracts and parses a single metric value. +func (p *Parser) parseHeroMetricFromGameMode(gameModeView *goquery.Selection, metricKey, hexID string) float64 { + selector := fmt.Sprintf("[data-category-id='%s'] .Profile-progressBar-description", hexID) + element := gameModeView.Find(selector) + + if element.Length() == 0 { + return 0 + } + + valueText := element.First().Text() + value := p.parseMetricValue(valueText, metricKey) + + slog.Debug("All Heroes metric parsed", + "metric", metricKey, + "hex_id", hexID, + "value_text", valueText, + "parsed_value", value) + + return value +} + +// assignValueToAllHeroesStats assigns the parsed value to the appropriate field. +// allHeroesStatAssigner defines a function type for assigning values to AllHeroesStats. +type allHeroesStatAssigner func(*AllHeroesStats, float64) + +// getAllHeroesStatAssigners returns a map of metric keys to their assignment functions. +func (p *Parser) getAllHeroesStatAssigners() map[string]allHeroesStatAssigner { + return map[string]allHeroesStatAssigner{ + "time_played": func(stats *AllHeroesStats, value float64) { + stats.TotalTimePlayed = int64(value) + }, + GamesWonMetric: func(stats *AllHeroesStats, value float64) { + stats.TotalGamesWon = int(value) + }, + WinPercentageMetric: func(stats *AllHeroesStats, value float64) { + stats.OverallWinPercentage = value + }, + WeaponAccuracyMetric: func(stats *AllHeroesStats, value float64) { + stats.WeaponAccuracy = value + }, + EliminationsPerLifeMetric: func(stats *AllHeroesStats, value float64) { + stats.EliminationsPerLife = value + }, + KillStreakBestMetric: func(stats *AllHeroesStats, value float64) { + stats.KillStreakBest = int(value) + }, + MultikillBestMetric: func(stats *AllHeroesStats, value float64) { + stats.MultikillBest = int(value) + }, + EliminationsPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.EliminationsPer10Min = value + }, + DeathsPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.DeathsPer10Min = value + }, + FinalBlowsPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.FinalBlowsPer10Min = value + }, + SoloKillsPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.SoloKillsPer10Min = value + }, + ObjectiveKillsPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.ObjectiveKillsPer10Min = value + }, + ObjectiveTimePer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.ObjectiveTimePer10Min = value + }, + HeroDamagePer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.HeroDamagePer10Min = value + }, + HealingPer10MinMetric: func(stats *AllHeroesStats, value float64) { + stats.HealingPer10Min = value + }, + } +} + +func (p *Parser) assignValueToAllHeroesStats(stats *AllHeroesStats, metricKey string, value float64) { + assigners := p.getAllHeroesStatAssigners() + if assigner, exists := assigners[metricKey]; exists { + assigner(stats, value) + } +} + +// logAllHeroesStatsSummary logs a summary of parsed all heroes statistics. +func (p *Parser) logAllHeroesStatsSummary(stats *AllHeroesStats) { + slog.Info("All Heroes stats parsed", + "time_played", stats.TotalTimePlayed, + "games_won", stats.TotalGamesWon, + "win_percentage", stats.OverallWinPercentage) +} + +// parseMetricValue parses a metric value based on its type. +func (p *Parser) parseMetricValue(valueText, metricType string) float64 { + valueText = strings.TrimSpace(valueText) + + switch { + case strings.Contains(metricType, "time_played"): + return p.parseTimeToSeconds(valueText) + case strings.Contains(metricType, "percentage") || strings.Contains(metricType, "accuracy"): + return p.parsePercentage(valueText) + default: + return p.parseNumber(valueText) + } +} + +// extractDetailedHeroMetrics extracts hero-specific detailed statistics using OverFast API logic. +// Searches blz-section.stats.{gamemode}-view sections for span.stats-container.option-{N} elements. +func (p *Parser) extractDetailedHeroMetrics(heroEl *goquery.Selection, heroID, heroName string, stats *HeroStats) { + slog.Debug("Extracting detailed hero metrics using OverFast API logic", + "hero_id", heroID, "hero_name", heroName) + + // Get document root to search for stats sections + doc := heroEl.Closest("html") + + // Look for blz-section.stats sections with gamemode views + gamemodeViews := []string{".quickPlay-view", ".competitive-view"} + + extractedCount := 0 + for _, gamemodeView := range gamemodeViews { + count := p.processGameModeStats(doc, gamemodeView, heroID, stats) + extractedCount += count + } + + slog.Info("Hero metrics extraction completed", + "hero_id", heroID, + "hero_name", heroName, + "extracted_metrics_count", extractedCount) +} + +// processGameModeStats processes stats for a specific gamemode view. +func (p *Parser) processGameModeStats(doc *goquery.Selection, gamemodeView, heroID string, stats *HeroStats) int { + statsSection := doc.Find(HeroSelectors.BlzStatsSection + gamemodeView) + if statsSection.Length() == 0 { + slog.Debug("No stats section found for gamemode view", + "gamemode_view", gamemodeView, "hero_id", heroID) + + return 0 + } + + slog.Debug("Found stats section for gamemode", + "gamemode_view", gamemodeView, "hero_id", heroID) + + // Find all span.stats-container elements (OverFast API style) + statsContainers := p.findStatsContainers(statsSection) + if statsContainers.Length() == 0 { + slog.Debug("No stats containers found in section", + "gamemode_view", gamemodeView, "hero_id", heroID) + + return 0 + } + + slog.Debug("Found stats containers in section", + "gamemode_view", gamemodeView, + "containers_count", statsContainers.Length(), + "hero_id", heroID) + + extractedCount := 0 + statsContainers.Each(func(containerIdx int, container *goquery.Selection) { + count := p.processStatsContainer(container, containerIdx, heroID, stats) + extractedCount += count + }) + + return extractedCount +} + +// findStatsContainers finds stats-container elements using multiple selectors. +func (p *Parser) findStatsContainers(statsSection *goquery.Selection) *goquery.Selection { + // Look for pattern: span.stats-container.option-{N} + statsContainers := statsSection.Find(HeroSelectors.StatsContainer) + + // Also try broader search if no containers found with specific selector + if statsContainers.Length() == 0 { + statsContainers = statsSection.Find("span.stats-container") + } + + return statsContainers +} + +// processStatsContainer processes a single stats container for hero-specific metrics. +func (p *Parser) processStatsContainer( + container *goquery.Selection, containerIdx int, heroID string, stats *HeroStats, +) int { + // Check if this container has a select element (OverFast API pattern) + selectEl := container.Find("select") + if selectEl.Length() == 0 { + slog.Debug("Container has no select element, skipping", + "container_idx", containerIdx, "hero_id", heroID) + + return 0 + } + + // Check if this is hero-specific data by looking for our hero in the select options + if !p.isHeroInContainer(selectEl, heroID) { + slog.Debug("Hero not found in container options", + "container_idx", containerIdx, "hero_id", heroID) + + return 0 + } + + slog.Debug("Found hero-specific container", + "container_idx", containerIdx, "hero_id", heroID) + + // Extract stat items using OverFast API selectors + statItems := container.Find(HeroSelectors.StatItem) + if statItems.Length() == 0 { + slog.Debug("No stat items found in container", + "container_idx", containerIdx, "hero_id", heroID) + + return 0 + } + + slog.Debug("Processing stat items", + "container_idx", containerIdx, + "stat_items_count", statItems.Length(), + "hero_id", heroID) + + extractedCount := 0 + statItems.Each(func(itemIdx int, item *goquery.Selection) { + if p.processStatItem(item, itemIdx, heroID, stats) { + extractedCount++ + } + }) + + return extractedCount +} + +// isHeroInContainer checks if the specified hero is available in the container's select options. +func (p *Parser) isHeroInContainer(selectEl *goquery.Selection, heroID string) bool { + heroFound := false + selectEl.Find("option").Each(func(_ int, option *goquery.Selection) { + optionValue, exists := option.Attr("value") + if exists && optionValue == heroID { + heroFound = true + } + }) + + return heroFound +} + +// processStatItem processes a single stat item and stores it in the stats structure. +func (p *Parser) processStatItem(item *goquery.Selection, itemIdx int, heroID string, stats *HeroStats) bool { + // Extract stat name and value using OverFast API selectors + statName := strings.TrimSpace(item.Find(HeroSelectors.StatName).Text()) + statValue := strings.TrimSpace(item.Find(HeroSelectors.StatValue).Text()) + + if statName == "" || statValue == "" { + slog.Debug("Empty stat name or value, skipping", + "item_idx", itemIdx, "stat_name", statName, "stat_value", statValue) + + return false + } + + // Convert stat name to metric key using StringToSnakeCase from value_parser.go + metricKey := StringToSnakeCase(statName) + + // Parse the value using ParseValue from value_parser.go + parsedValue := ParseValue(statValue) + + // Convert parsed value to float64 for storage + floatValue, ok := p.convertToFloat(parsedValue, statName) + if !ok { + return false + } + + // Initialize metrics map if needed + if stats.Metrics == nil { + stats.Metrics = make(map[string]float64) + } + + // Store the metric + stats.Metrics[metricKey] = floatValue + + slog.Debug("Extracted hero metric", + "hero_id", heroID, + "stat_name", statName, + "metric_key", metricKey, + "raw_value", statValue, + "parsed_value", parsedValue, + "stored_value", floatValue) + + return true +} + +// convertToFloat converts various parsed value types to float64. +func (p *Parser) convertToFloat(parsedValue interface{}, statName string) (float64, bool) { + switch value := parsedValue.(type) { + case float64: + return value, true + case int: + return float64(value), true + case string: + // Handle string values that might be numeric + val, err := strconv.ParseFloat(value, 64) + if err == nil { + return val, true + } + + slog.Debug("Could not convert string value to float", + "stat_name", statName, "value", value) + + return 0, false + default: + slog.Debug("Unsupported value type for metric storage", + "stat_name", statName, "value", parsedValue, "type", fmt.Sprintf("%T", parsedValue)) + + return 0, false + } +} + +// ExampleParser provides example usage function for testing. +func ExampleParser() { + // This would be called from the main ow-exporter application + _ = NewParser() + + // Read one of our saved profiles + // html := readFile("/Users/lex/git/github.com/lexfrei/tools/tmp/profile_de5bb4aca17492e0.html") + // stats, err := parser.ParseProfile(html, "LexFrei") + // if err != nil { + // log.Fatal(err) + // } + + slog.Info("Parser ready for integration into ow-exporter") +} diff --git a/cmd/ow-exporter/prometheus.go b/cmd/ow-exporter/prometheus.go new file mode 100644 index 00000000..65119cf8 --- /dev/null +++ b/cmd/ow-exporter/prometheus.go @@ -0,0 +1,685 @@ +package main + +import ( + "fmt" + "log/slog" + "net/http" + "strings" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +// ServerReadTimeout defines the server read timeout duration. +const ServerReadTimeout = 15 * time.Second + +// ServerWriteTimeout defines the server write timeout duration. +const ServerWriteTimeout = 15 * time.Second + +// ServerIdleTimeout defines the server idle timeout duration. +const ServerIdleTimeout = 60 * time.Second + +// TimePlayedSecondsMetric defines the metric name for time played in seconds. +const TimePlayedSecondsMetric = "time_played_seconds" + +// PrometheusMetrics contains all Prometheus metrics. +type PrometheusMetrics struct { + // Profile-level metrics + playerEndorsementLevel *prometheus.GaugeVec + playerSkillRating *prometheus.GaugeVec + + // All Heroes aggregated metrics + playerTimePlayed *prometheus.GaugeVec + playerGamesWon *prometheus.GaugeVec + playerWinPercentage *prometheus.GaugeVec + playerWeaponAccuracy *prometheus.GaugeVec + playerEliminationsPerLife *prometheus.GaugeVec + playerKillStreakBest *prometheus.GaugeVec + playerMultikillBest *prometheus.GaugeVec + playerEliminationsPer10Min *prometheus.GaugeVec + playerDeathsPer10Min *prometheus.GaugeVec + playerFinalBlowsPer10Min *prometheus.GaugeVec + playerSoloKillsPer10Min *prometheus.GaugeVec + playerObjectiveKillsPer10Min *prometheus.GaugeVec + playerObjectiveTimePer10Min *prometheus.GaugeVec + playerHeroDamagePer10Min *prometheus.GaugeVec + playerHealingPer10Min *prometheus.GaugeVec + + // Hero-specific metrics + heroTimePlayed *prometheus.GaugeVec + heroGamesWon *prometheus.GaugeVec + heroWinPercentage *prometheus.GaugeVec + heroWeaponAccuracy *prometheus.GaugeVec + heroEliminationsPerLife *prometheus.GaugeVec + heroKillStreakBest *prometheus.GaugeVec + heroMultikillBest *prometheus.GaugeVec + heroEliminationsPer10Min *prometheus.GaugeVec + heroDeathsPer10Min *prometheus.GaugeVec + heroFinalBlowsPer10Min *prometheus.GaugeVec + heroSoloKillsPer10Min *prometheus.GaugeVec + heroObjectiveKillsPer10Min *prometheus.GaugeVec + heroObjectiveTimePer10Min *prometheus.GaugeVec + heroHeroDamagePer10Min *prometheus.GaugeVec + heroHealingPer10Min *prometheus.GaugeVec + + // Dynamic detailed hero metrics - stores all hero-specific metrics by their Prometheus name + detailedHeroMetrics map[string]*prometheus.GaugeVec +} + +var prometheusMetrics *PrometheusMetrics + +// initPrometheusMetrics initializes all Prometheus metrics. +func initPrometheusMetrics() { + labels := createMetricLabels() + prometheusMetrics = &PrometheusMetrics{} + + initProfileMetrics(labels) + initAllHeroesMetrics(labels) + initHeroSpecificMetrics(labels) + + registerAllMetrics() + + slog.Info("Prometheus metrics initialized") +} + +// createMetricLabels defines label sets for different metric types. +func createMetricLabels() map[string][]string { + return map[string][]string{ + "profile": {"battletag", "platform", "gamemode"}, + "allHeroes": {"battletag", "platform", "gamemode"}, + "hero": {"battletag", "hero", "platform", "gamemode"}, + "skillRating": {"battletag", "platform", "gamemode", "rank_tier"}, + } +} + +// initProfileMetrics initializes profile-level metrics. +func initProfileMetrics(labels map[string][]string) { + prometheusMetrics.playerEndorsementLevel = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "ow_player_endorsement_level", + Help: "Player endorsement level", + }, labels["profile"]) + + prometheusMetrics.playerSkillRating = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "ow_player_skill_rating", + Help: "Player skill rating (SR) for competitive modes", + }, labels["skillRating"]) +} + +// MetricDefinition defines a metric field and its configuration. +type MetricDefinition struct { + Field **prometheus.GaugeVec + Name string + Help string +} + +// GameplayMetricType represents different types of gameplay metrics. +type GameplayMetricType struct { + PlayerField **prometheus.GaugeVec + HeroField **prometheus.GaugeVec + BaseName string + Description string +} + +// getGameplayMetricTypes returns all gameplay metrics with their fields and descriptions. +func getGameplayMetricTypes() []GameplayMetricType { + basicMetrics := getBasicGameplayMetrics() + combatMetrics := getCombatGameplayMetrics() + per10MinMetrics := getPer10MinGameplayMetrics() + + result := make([]GameplayMetricType, 0, len(basicMetrics)+len(combatMetrics)+len(per10MinMetrics)) + result = append(result, basicMetrics...) + result = append(result, combatMetrics...) + result = append(result, per10MinMetrics...) + + return result +} + +// getBasicGameplayMetrics returns basic gameplay metrics. +func getBasicGameplayMetrics() []GameplayMetricType { + return []GameplayMetricType{ + { + &prometheusMetrics.playerTimePlayed, &prometheusMetrics.heroTimePlayed, + TimePlayedSecondsMetric, "Time played", + }, + { + &prometheusMetrics.playerGamesWon, &prometheusMetrics.heroGamesWon, + "games_won", "Games won", + }, + { + &prometheusMetrics.playerWinPercentage, &prometheusMetrics.heroWinPercentage, + "win_percentage", "Win percentage", + }, + { + &prometheusMetrics.playerWeaponAccuracy, &prometheusMetrics.heroWeaponAccuracy, + "weapon_accuracy_percent", "Weapon accuracy percentage", + }, + } +} + +// getCombatGameplayMetrics returns combat-related gameplay metrics. +func getCombatGameplayMetrics() []GameplayMetricType { + return []GameplayMetricType{ + { + &prometheusMetrics.playerEliminationsPerLife, &prometheusMetrics.heroEliminationsPerLife, + "eliminations_per_life", "Eliminations per life", + }, + { + &prometheusMetrics.playerKillStreakBest, &prometheusMetrics.heroKillStreakBest, + "kill_streak_best", "Best kill streak achieved", + }, + { + &prometheusMetrics.playerMultikillBest, &prometheusMetrics.heroMultikillBest, + "multikill_best", "Best multikill achieved", + }, + } +} + +// getPer10MinGameplayMetrics returns per-10-minute gameplay metrics. +func getPer10MinGameplayMetrics() []GameplayMetricType { + return []GameplayMetricType{ + { + &prometheusMetrics.playerEliminationsPer10Min, &prometheusMetrics.heroEliminationsPer10Min, + "eliminations_per_10min", "Eliminations per 10 minutes", + }, + { + &prometheusMetrics.playerDeathsPer10Min, &prometheusMetrics.heroDeathsPer10Min, + "deaths_per_10min", "Deaths per 10 minutes", + }, + { + &prometheusMetrics.playerFinalBlowsPer10Min, &prometheusMetrics.heroFinalBlowsPer10Min, + "final_blows_per_10min", "Final blows per 10 minutes", + }, + { + &prometheusMetrics.playerSoloKillsPer10Min, &prometheusMetrics.heroSoloKillsPer10Min, + "solo_kills_per_10min", "Solo kills per 10 minutes", + }, + { + &prometheusMetrics.playerObjectiveKillsPer10Min, &prometheusMetrics.heroObjectiveKillsPer10Min, + "objective_kills_per_10min", "Objective kills per 10 minutes", + }, + { + &prometheusMetrics.playerObjectiveTimePer10Min, &prometheusMetrics.heroObjectiveTimePer10Min, + "objective_time_per_10min", "Objective time per 10 minutes", + }, + { + &prometheusMetrics.playerHeroDamagePer10Min, &prometheusMetrics.heroHeroDamagePer10Min, + "hero_damage_per_10min", "Hero damage per 10 minutes", + }, + { + &prometheusMetrics.playerHealingPer10Min, &prometheusMetrics.heroHealingPer10Min, + "healing_per_10min", "Healing done per 10 minutes", + }, + } +} + +// initAllHeroesMetrics initializes aggregated all-heroes metrics. +func initAllHeroesMetrics(labels map[string][]string) { + for _, metric := range getGameplayMetricTypes() { + *metric.PlayerField = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "ow_player_" + getPlayerMetricName(metric.BaseName), + Help: getPlayerHelpText(metric.Description), + }, labels["allHeroes"]) + } +} + +// initHeroSpecificMetrics initializes hero-specific metrics. +func initHeroSpecificMetrics(labels map[string][]string) { + for _, metric := range getGameplayMetricTypes() { + *metric.HeroField = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "ow_hero_" + getHeroMetricName(metric.BaseName), + Help: getHeroHelpText(metric.Description), + }, labels["hero"]) + } + + // Initialize detailed hero metrics map + prometheusMetrics.detailedHeroMetrics = make(map[string]*prometheus.GaugeVec) + + // Create dynamic metrics for all heroes based on their metric definitions + createDetailedHeroMetrics(labels["hero"]) +} + +// createDetailedHeroMetrics creates all hero-specific metrics from hero metric definitions. +func createDetailedHeroMetrics(heroLabels []string) { + processedMetrics := make(map[string]bool) // Track already created metrics + + // Iterate through all heroes in registry + for heroID, heroFactory := range HeroMetricsRegistry { + heroStruct := heroFactory() + heroMetrics := GenerateMetricDefs(heroStruct) + + slog.Debug("Creating detailed metrics for hero", "hero_id", heroID, "metrics_count", len(heroMetrics)) + + for _, metricDef := range heroMetrics { + prometheusName := metricDef.PrometheusName + if prometheusName == "" { + continue + } + + // Skip if metric already processed (metrics can be shared across heroes) + if processedMetrics[prometheusName] { + continue + } + + // Create the Prometheus metric + prometheusMetrics.detailedHeroMetrics[prometheusName] = prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: prometheusName, + Help: metricDef.Help, + }, heroLabels) + + processedMetrics[prometheusName] = true + + slog.Debug("Created detailed hero metric", + "metric_name", prometheusName, + "help", metricDef.Help) + } + } + + slog.Info("Created detailed hero metrics", "total_metrics", len(prometheusMetrics.detailedHeroMetrics)) +} + +// getPlayerMetricName returns the metric name for player-level metrics. +func getPlayerMetricName(baseName string) string { + if baseName == TimePlayedSecondsMetric { + return "total_time_played_seconds" + } + if baseName == "games_won" { + return "total_games_won" + } + if baseName == "win_percentage" { + return "overall_win_percentage" + } + + return baseName +} + +// getHeroMetricName returns the metric name for hero-level metrics. +func getHeroMetricName(baseName string) string { + if baseName == TimePlayedSecondsMetric { + return baseName + } + + if strings.Contains(baseName, "_per_10min") { + return baseName + "_avg" + } + + return baseName +} + +// getPlayerHelpText returns help text for player metrics. +func getPlayerHelpText(description string) string { + return description + " across all heroes" +} + +// getHeroHelpText returns help text for hero metrics. +func getHeroHelpText(description string) string { + return description + " with specific hero" +} + +// registerAllMetrics registers all metrics with Prometheus. +func registerAllMetrics() { + prometheus.MustRegister( + prometheusMetrics.playerEndorsementLevel, + prometheusMetrics.playerSkillRating, + prometheusMetrics.playerTimePlayed, + prometheusMetrics.playerGamesWon, + prometheusMetrics.playerWinPercentage, + prometheusMetrics.playerWeaponAccuracy, + prometheusMetrics.playerEliminationsPerLife, + prometheusMetrics.playerKillStreakBest, + prometheusMetrics.playerMultikillBest, + prometheusMetrics.playerEliminationsPer10Min, + prometheusMetrics.playerDeathsPer10Min, + prometheusMetrics.playerFinalBlowsPer10Min, + prometheusMetrics.playerSoloKillsPer10Min, + prometheusMetrics.playerObjectiveKillsPer10Min, + prometheusMetrics.playerObjectiveTimePer10Min, + prometheusMetrics.playerHeroDamagePer10Min, + prometheusMetrics.playerHealingPer10Min, + prometheusMetrics.heroTimePlayed, + prometheusMetrics.heroGamesWon, + prometheusMetrics.heroWinPercentage, + prometheusMetrics.heroWeaponAccuracy, + prometheusMetrics.heroEliminationsPerLife, + prometheusMetrics.heroKillStreakBest, + prometheusMetrics.heroMultikillBest, + prometheusMetrics.heroEliminationsPer10Min, + prometheusMetrics.heroDeathsPer10Min, + prometheusMetrics.heroFinalBlowsPer10Min, + prometheusMetrics.heroSoloKillsPer10Min, + prometheusMetrics.heroObjectiveKillsPer10Min, + prometheusMetrics.heroObjectiveTimePer10Min, + prometheusMetrics.heroHeroDamagePer10Min, + prometheusMetrics.heroHealingPer10Min, + ) + + // Register all dynamic detailed hero metrics + for _, metric := range prometheusMetrics.detailedHeroMetrics { + prometheus.MustRegister(metric) + } + + slog.Info("Registered all Prometheus metrics", "detailed_hero_metrics", len(prometheusMetrics.detailedHeroMetrics)) +} + +// updatePrometheusMetrics updates all Prometheus metrics from runtime data. +func updatePrometheusMetrics() { + if prometheusMetrics == nil { + slog.Error("Prometheus metrics not initialized") + + return + } + + resetAllMetrics() + allPlayers := getAllPlayerMetrics() + updateAllPlayerMetrics(allPlayers) + + // Count total heroes for debug + totalHeroes := 0 + for _, player := range allPlayers { + for _, platforms := range player.HeroMetrics { + for _, heroes := range platforms { + totalHeroes += len(heroes) + } + } + } + + slog.Info("Prometheus metrics updated", + "players", len(allPlayers), + "total_heroes", totalHeroes) +} + +// resetAllMetrics clears all Prometheus metrics. +func resetAllMetrics() { + resetPlayerMetrics() + resetHeroMetrics() +} + +// resetPlayerMetrics resets all player-level metrics. +func resetPlayerMetrics() { + prometheusMetrics.playerEndorsementLevel.Reset() + prometheusMetrics.playerSkillRating.Reset() + prometheusMetrics.playerTimePlayed.Reset() + prometheusMetrics.playerGamesWon.Reset() + prometheusMetrics.playerWinPercentage.Reset() + prometheusMetrics.playerWeaponAccuracy.Reset() + prometheusMetrics.playerEliminationsPerLife.Reset() + prometheusMetrics.playerKillStreakBest.Reset() + prometheusMetrics.playerMultikillBest.Reset() + prometheusMetrics.playerEliminationsPer10Min.Reset() + prometheusMetrics.playerDeathsPer10Min.Reset() + prometheusMetrics.playerFinalBlowsPer10Min.Reset() + prometheusMetrics.playerSoloKillsPer10Min.Reset() + prometheusMetrics.playerObjectiveKillsPer10Min.Reset() + prometheusMetrics.playerObjectiveTimePer10Min.Reset() + prometheusMetrics.playerHeroDamagePer10Min.Reset() + prometheusMetrics.playerHealingPer10Min.Reset() +} + +// resetHeroMetrics resets all hero-level metrics. +func resetHeroMetrics() { + prometheusMetrics.heroTimePlayed.Reset() + prometheusMetrics.heroGamesWon.Reset() + prometheusMetrics.heroWinPercentage.Reset() + prometheusMetrics.heroWeaponAccuracy.Reset() + prometheusMetrics.heroEliminationsPerLife.Reset() + prometheusMetrics.heroKillStreakBest.Reset() + prometheusMetrics.heroMultikillBest.Reset() + prometheusMetrics.heroEliminationsPer10Min.Reset() + prometheusMetrics.heroDeathsPer10Min.Reset() + prometheusMetrics.heroFinalBlowsPer10Min.Reset() + prometheusMetrics.heroSoloKillsPer10Min.Reset() + prometheusMetrics.heroObjectiveKillsPer10Min.Reset() + prometheusMetrics.heroObjectiveTimePer10Min.Reset() + prometheusMetrics.heroHeroDamagePer10Min.Reset() + prometheusMetrics.heroHealingPer10Min.Reset() +} + +// updateAllPlayerMetrics updates metrics for all players. +func updateAllPlayerMetrics(allPlayers map[string]*PlayerMetrics) { + for battleTag, playerMetrics := range allPlayers { + updateSinglePlayerMetrics(battleTag, playerMetrics) + } +} + +// updateSinglePlayerMetrics updates metrics for a single player. +func updateSinglePlayerMetrics(battleTag string, playerMetrics *PlayerMetrics) { + updateProfileMetrics(battleTag, playerMetrics) + updateAllHeroesMetrics(battleTag, playerMetrics) + updateHeroSpecificMetrics(battleTag, playerMetrics) +} + +// updateProfileMetrics updates profile-level metrics. +func updateProfileMetrics(battleTag string, playerMetrics *PlayerMetrics) { + prometheusMetrics.playerEndorsementLevel.WithLabelValues( + battleTag, "all", "all").Set(float64(playerMetrics.ProfileMetrics.Endorsement.Level)) + + updateSkillRatings(battleTag, playerMetrics) +} + +// updateSkillRatings updates skill rating metrics. +func updateSkillRatings(battleTag string, playerMetrics *PlayerMetrics) { + for platform, ranks := range playerMetrics.ProfileMetrics.SkillRatings { + for tier, sr := range ranks { + prometheusMetrics.playerSkillRating.WithLabelValues( + battleTag, string(platform), "competitive", string(tier)).Set(float64(sr.SR)) + } + } +} + +// updateAllHeroesMetrics updates aggregated all-heroes metrics. +func updateAllHeroesMetrics(battleTag string, playerMetrics *PlayerMetrics) { + for platform, gameModes := range playerMetrics.AllHeroesMetrics { + for gameMode, allHeroesStats := range gameModes { + baseLabels := createBaseLabels(battleTag, string(platform), string(gameMode)) + setAllHeroesMetricValues(baseLabels, &allHeroesStats) + } + } +} + +// updateHeroSpecificMetrics updates hero-specific metrics. +func updateHeroSpecificMetrics(battleTag string, playerMetrics *PlayerMetrics) { + totalHeroes := 0 + for platform, gameModes := range playerMetrics.HeroMetrics { + for gameMode, heroes := range gameModes { + slog.Debug("Processing heroes for platform/gamemode", + "battletag", battleTag, + "platform", platform, + "gamemode", gameMode, + "hero_count", len(heroes)) + + for heroID, heroMetrics := range heroes { + baseLabels := createHeroLabels(battleTag, heroID, string(platform), string(gameMode)) + setHeroMetricValues(baseLabels, heroMetrics) + totalHeroes++ + } + } + } + + if totalHeroes > 0 { + slog.Info("Updated hero-specific metrics", + "battletag", battleTag, + "heroes_processed", totalHeroes) + } else { + slog.Warn("No hero-specific metrics to update", + "battletag", battleTag, + "hero_metrics_empty", len(playerMetrics.HeroMetrics) == 0) + } +} + +// createBaseLabels creates base labels for metrics. +func createBaseLabels(battleTag, platform, gameMode string) prometheus.Labels { + return prometheus.Labels{ + "battletag": battleTag, + "platform": platform, + "gamemode": gameMode, + } +} + +// createHeroLabels creates labels for hero-specific metrics. +func createHeroLabels(battleTag, heroID, platform, gameMode string) prometheus.Labels { + return prometheus.Labels{ + "battletag": battleTag, + "hero": heroID, + "platform": platform, + "gamemode": gameMode, + } +} + +// setAllHeroesMetricValues sets all heroes metric values. +func setAllHeroesMetricValues(labels prometheus.Labels, stats *AllHeroesStats) { + prometheusMetrics.playerTimePlayed.With(labels).Set(float64(stats.TotalTimePlayed)) + prometheusMetrics.playerGamesWon.With(labels).Set(float64(stats.TotalGamesWon)) + prometheusMetrics.playerWinPercentage.With(labels).Set(stats.OverallWinPercentage) + prometheusMetrics.playerWeaponAccuracy.With(labels).Set(stats.WeaponAccuracy) + prometheusMetrics.playerEliminationsPerLife.With(labels).Set(stats.EliminationsPerLife) + prometheusMetrics.playerKillStreakBest.With(labels).Set(float64(stats.KillStreakBest)) + prometheusMetrics.playerMultikillBest.With(labels).Set(float64(stats.MultikillBest)) + prometheusMetrics.playerEliminationsPer10Min.With(labels).Set(stats.EliminationsPer10Min) + prometheusMetrics.playerDeathsPer10Min.With(labels).Set(stats.DeathsPer10Min) + prometheusMetrics.playerFinalBlowsPer10Min.With(labels).Set(stats.FinalBlowsPer10Min) + prometheusMetrics.playerSoloKillsPer10Min.With(labels).Set(stats.SoloKillsPer10Min) + prometheusMetrics.playerObjectiveKillsPer10Min.With(labels).Set(stats.ObjectiveKillsPer10Min) + prometheusMetrics.playerObjectiveTimePer10Min.With(labels).Set(stats.ObjectiveTimePer10Min) + prometheusMetrics.playerHeroDamagePer10Min.With(labels).Set(stats.HeroDamagePer10Min) + prometheusMetrics.playerHealingPer10Min.With(labels).Set(stats.HealingPer10Min) +} + +// setHeroMetricValues sets hero-specific metric values. +func setHeroMetricValues(labels prometheus.Labels, heroMetrics map[string]interface{}) { + heroID := labels["hero"] + battleTag := labels["battletag"] + metricsCount := 0 + + for metricKey, metricValue := range heroMetrics { + switch value := metricValue.(type) { + case float64: + updateHeroMetric(metricKey, value, labels) + metricsCount++ + case int: + updateHeroMetric(metricKey, float64(value), labels) + metricsCount++ + case int64: + updateHeroMetric(metricKey, float64(value), labels) + metricsCount++ + default: + slog.Debug("Skipped unsupported metric value type", + "battletag", battleTag, + "hero", heroID, + "metric", metricKey, + "type", fmt.Sprintf("%T", value)) + } + } + + if metricsCount > 0 { + slog.Debug("Set hero metrics", + "battletag", battleTag, + "hero", heroID, + "metrics_count", metricsCount) + } +} + +// updateHeroMetric updates a specific hero metric based on the metric key. +func updateHeroMetric(metricKey string, value float64, labels prometheus.Labels) { + // Try detailed hero metrics + if tryDetailedMetricUpdate(metricKey, value, labels) { + return + } + + slog.Debug("No metric updater found", + "battletag", labels["battletag"], + "hero", labels["hero"], + "metric", metricKey, + "value", value) +} + +// tryDetailedMetricUpdate attempts to update using detailed hero metrics. +func tryDetailedMetricUpdate(metricKey string, value float64, labels prometheus.Labels) bool { + heroID, hasHero := labels["hero"] + if !hasHero { + slog.Debug("No hero in labels for metric", "metric", metricKey) + + return false + } + + // Try direct prometheus name match first + if updateByPrometheusName(metricKey, value, labels) { + return true + } + + // Try ow tag match + return updateByOwTag(heroID, metricKey, value, labels) +} + +// updateByPrometheusName tries to update by matching the prometheus metric name directly. +func updateByPrometheusName(prometheusName string, value float64, labels prometheus.Labels) bool { + if dynamicMetric, exists := prometheusMetrics.detailedHeroMetrics[prometheusName]; exists { + dynamicMetric.With(labels).Set(value) + slog.Debug("Updated detailed hero metric", + "battletag", labels["battletag"], + "hero", labels["hero"], + "prometheus_name", prometheusName, + "value", value) + + return true + } + + return false +} + +// updateByOwTag tries to find and update a metric by matching the ow tag. +func updateByOwTag(heroID, metricKey string, value float64, labels prometheus.Labels) bool { + heroMetrics := GetHeroMetrics(heroID) + for taggedKey, taggedDef := range heroMetrics { + if taggedKey == metricKey && taggedDef.PrometheusName != "" { + if dynamicMetric, exists := prometheusMetrics.detailedHeroMetrics[taggedDef.PrometheusName]; exists { + dynamicMetric.With(labels).Set(value) + slog.Debug("Updated detailed hero metric via ow tag", + "battletag", labels["battletag"], + "hero", labels["hero"], + "metric_key", metricKey, + "ow_tag", taggedKey, + "prometheus_name", taggedDef.PrometheusName, + "value", value) + + return true + } + } + } + + return false +} + +// startPrometheusServer starts the HTTP server for Prometheus metrics. +func startPrometheusServer(port string) { + // Create metrics handler that updates data before serving + http.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) { + updatePrometheusMetrics() + promhttp.Handler().ServeHTTP(w, r) + }) + + // Health check endpoint + http.HandleFunc("/health", func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + _, err := w.Write([]byte("OK")) + if err != nil { + slog.Error("Failed to write health check response", "error", err) + } + }) + + slog.Info("Starting Prometheus metrics server", "port", port) + server := &http.Server{ + Addr: ":" + port, + ReadTimeout: ServerReadTimeout, + WriteTimeout: ServerWriteTimeout, + IdleTimeout: ServerIdleTimeout, + } + err := server.ListenAndServe() + if err != nil { + slog.Error("Failed to start metrics server", "error", err) + } +} diff --git a/cmd/ow-exporter/resolver.go b/cmd/ow-exporter/resolver.go new file mode 100644 index 00000000..71248d01 --- /dev/null +++ b/cmd/ow-exporter/resolver.go @@ -0,0 +1,190 @@ +package main + +import ( + "context" + "log/slog" + "net/http" + "strings" + "time" + + "github.com/cockroachdb/errors" +) + +// PlatformType represents the gaming platform. +type PlatformType string + +const ( + PlatformTypePC PlatformType = "pc" + PlatformTypePSN PlatformType = "psn" + PlatformTypeXBL PlatformType = "xbl" +) + +// ResolveResult contains the result of BattleTag resolution. +type ResolveResult struct { + BattleTag string + ResolvedURL string + Platform PlatformType + Success bool + Error error +} + +// resolveBattleTagToURL resolves a BattleTag to its actual profile URL. +func resolveBattleTagToURL(battleTag string) (*ResolveResult, error) { + slog.Info("Resolving BattleTag to URL", "battletag", battleTag) + + // Convert BattleTag format: "LexFrei#21715" -> "LexFrei-21715" + urlTag := strings.Replace(battleTag, "#", "-", 1) + + // Try to resolve the BattleTag + simpleURL := "https://overwatch.blizzard.com/en-us/career/" + urlTag + slog.Debug("Trying URL", "url", simpleURL) + + resolvedURL, err := followRedirects(simpleURL) + if err != nil { + slog.Debug("Resolution failed", "error", err) + } else if resolvedURL != "" && strings.Contains(resolvedURL, "/career/") && resolvedURL != simpleURL { + // Successfully resolved to a profile-specific URL + slog.Info("Successfully resolved BattleTag", + "battletag", battleTag, + "resolved_url", resolvedURL) + + return &ResolveResult{ + BattleTag: battleTag, + ResolvedURL: resolvedURL, + Platform: PlatformTypePC, // Default to PC + Success: true, + }, nil + } + + err = errors.Wrapf(ErrBattleTagNotResolved, "%s", battleTag) + + return &ResolveResult{ + BattleTag: battleTag, + Success: false, + Error: err, + }, err +} + +// followRedirects follows HTTP redirects and returns the final URL. +func followRedirects(url string) (string, error) { + // Create HTTP client with timeout + client := &http.Client{ + Timeout: 10 * time.Second, + CheckRedirect: func(_ *http.Request, via []*http.Request) error { + // Allow up to 10 redirects + if len(via) >= 10 { + return ErrTooManyRedirects + } + + return nil + }, + } + + // Create request with proper headers + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, url, http.NoBody) + if err != nil { + return "", errors.Wrap(err, "failed to create request") + } + + // Set headers to mimic a real browser + req.Header.Set("User-Agent", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 "+ + "(KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") + req.Header.Set("Accept-Language", "en-US,en;q=0.5") + req.Header.Set("Accept-Encoding", "gzip, deflate, br") + req.Header.Set("Connection", "keep-alive") + req.Header.Set("Upgrade-Insecure-Requests", "1") + + // Perform the request + resp, err := client.Do(req) + if err != nil { + return "", errors.Wrap(err, "failed to perform request") + } + defer resp.Body.Close() + + // Check for successful response + if resp.StatusCode == http.StatusOK { + finalURL := resp.Request.URL.String() + slog.Debug("Redirect successful", "original_url", url, "final_url", finalURL) + + return finalURL, nil + } + + // Check for profile not found + if resp.StatusCode == http.StatusNotFound { + slog.Debug("Profile not found", "url", url, "status", resp.StatusCode) + + return "", ErrProfileNotFound + } + + return "", errors.Wrapf(ErrUnexpectedStatusCode, "%d", resp.StatusCode) +} + +// getOrResolveURL gets the resolved URL for a BattleTag, resolving it if necessary. +func getOrResolveURL(battleTag string) (string, error) { + // First, try to find in config + if player := findPlayerByBattleTag(battleTag); player != nil && player.ResolvedURL != "" { + slog.Debug("Using cached URL from config", "battletag", battleTag, "url", player.ResolvedURL) + + return player.ResolvedURL, nil + } + + // Not in config or URL is empty, resolve it + slog.Info("Resolving BattleTag (not in cache)", "battletag", battleTag) + result, err := resolveBattleTagToURL(battleTag) + if err != nil { + return "", errors.Wrapf(err, "failed to resolve BattleTag %s", battleTag) + } + + // Save to config for future use + err = addPlayerToConfig(battleTag, result.ResolvedURL) + if err != nil { + slog.Warn("Failed to save resolved URL to config", "error", err) + // Don't fail the whole operation just because we couldn't save to config + } else { + slog.Info("Saved resolved URL to config", "battletag", battleTag) + } + + return result.ResolvedURL, nil +} + +// resolveAllPlayers resolves URLs for all players in config. +func resolveAllPlayers(forceResolve bool) error { + players := getAllPlayers() + if len(players) == 0 { + slog.Info("No players found in config") + + return nil + } + + slog.Info("Resolving URLs for all players", "count", len(players), "force", forceResolve) + + for _, player := range players { + // Skip if already resolved (unless forcing) + if !forceResolve && player.ResolvedURL != "" { + slog.Debug("Skipping already resolved player", "battletag", player.BattleTag) + + continue + } + + slog.Info("Resolving player", "battletag", player.BattleTag) + result, err := resolveBattleTagToURL(player.BattleTag) + if err != nil { + slog.Error("Failed to resolve player", "battletag", player.BattleTag, "error", err) + + continue + } + + // Update config with resolved URL + err = updatePlayerURL(player.BattleTag, result.ResolvedURL) + if err != nil { + slog.Error("Failed to update player URL in config", "battletag", player.BattleTag, "error", err) + + return errors.Wrap(err, "failed to update player URL in config") + } + slog.Info("Updated player URL in config", "battletag", player.BattleTag) + } + + return nil +} diff --git a/cmd/ow-exporter/runtime.go b/cmd/ow-exporter/runtime.go new file mode 100644 index 00000000..ad9eacf5 --- /dev/null +++ b/cmd/ow-exporter/runtime.go @@ -0,0 +1,223 @@ +package main + +import ( + "log/slog" + "sync" + "time" +) + +var ( + // Global runtime metrics store. + runtimeMetrics *RuntimeMetrics + runtimeMutex sync.RWMutex +) + +// initRuntimeMetrics initializes the runtime metrics system. +func initRuntimeMetrics() { + runtimeMutex.Lock() + defer runtimeMutex.Unlock() + + runtimeMetrics = &RuntimeMetrics{ + Players: make(map[string]*PlayerMetrics), + } + + slog.Info("Runtime metrics system initialized") +} + +// getPlayerMetrics returns metrics for a specific player. +func getPlayerMetrics(battleTag string) (*PlayerMetrics, bool) { + runtimeMutex.RLock() + defer runtimeMutex.RUnlock() + + if runtimeMetrics == nil { + return nil, false + } + + player, exists := runtimeMetrics.Players[battleTag] + + return player, exists +} + +// setPlayerMetrics stores metrics for a specific player. +func setPlayerMetrics(battleTag string, metrics *PlayerMetrics) { + runtimeMutex.Lock() + defer runtimeMutex.Unlock() + + if runtimeMetrics == nil { + runtimeMetrics = &RuntimeMetrics{ + Players: make(map[string]*PlayerMetrics), + } + } + + runtimeMetrics.Players[battleTag] = metrics + slog.Info("Updated runtime metrics for player", "battletag", battleTag) +} + +// getAllPlayerMetrics returns all player metrics. +func getAllPlayerMetrics() map[string]*PlayerMetrics { + runtimeMutex.RLock() + defer runtimeMutex.RUnlock() + + if runtimeMetrics == nil { + return make(map[string]*PlayerMetrics) + } + + // Create a copy to avoid race conditions + result := make(map[string]*PlayerMetrics) + for k, v := range runtimeMetrics.Players { + result[k] = v + } + + return result +} + +// removePlayerMetrics removes metrics for a specific player. +func removePlayerMetrics(battleTag string) bool { + runtimeMutex.Lock() + defer runtimeMutex.Unlock() + + if runtimeMetrics == nil { + return false + } + + if _, exists := runtimeMetrics.Players[battleTag]; exists { + delete(runtimeMetrics.Players, battleTag) + slog.Info("Removed runtime metrics for player", "battletag", battleTag) + + return true + } + + return false +} + +// clearAllMetrics clears all runtime metrics. +func clearAllMetrics() { + runtimeMutex.Lock() + defer runtimeMutex.Unlock() + + if runtimeMetrics != nil { + count := len(runtimeMetrics.Players) + runtimeMetrics.Players = make(map[string]*PlayerMetrics) + slog.Info("Cleared all runtime metrics", "cleared_count", count) + } +} + +// getMetricsStats returns statistics about the runtime metrics store. +func getMetricsStats() map[string]interface{} { + runtimeMutex.RLock() + defer runtimeMutex.RUnlock() + + stats := map[string]interface{}{ + "total_players": 0, + "last_updated": nil, + } + + if runtimeMetrics == nil { + return stats + } + + stats["total_players"] = len(runtimeMetrics.Players) + + // Find most recent update time + var mostRecent time.Time + for _, player := range runtimeMetrics.Players { + if player.LastUpdated.After(mostRecent) { + mostRecent = player.LastUpdated + } + } + + if !mostRecent.IsZero() { + stats["last_updated"] = mostRecent + } + + return stats +} + +// createPlayerMetrics creates a new PlayerMetrics from a FullPlayerProfile. +func createPlayerMetrics(battleTag string, profile *FullPlayerProfile) *PlayerMetrics { + playerMetrics := &PlayerMetrics{ + BattleTag: battleTag, + DisplayName: profile.BattleTag, + PlayerTitle: profile.PlayerTitle, + LastUpdated: time.Now(), + ProfileMetrics: profile.ProfileMetrics, + AllHeroesMetrics: make(map[Platform]map[GameMode]AllHeroesStats), + HeroMetrics: make(map[Platform]map[GameMode]map[string]HeroMetrics), + } + + // Convert platform data to runtime format + for platformKey, platformStats := range profile.Platforms { + // Initialize platform maps + playerMetrics.AllHeroesMetrics[platformKey] = make(map[GameMode]AllHeroesStats) + playerMetrics.HeroMetrics[platformKey] = make(map[GameMode]map[string]HeroMetrics) + + for gameModeKey, gameModeStats := range platformStats.GameModes { + // Initialize gamemode maps + playerMetrics.HeroMetrics[platformKey][gameModeKey] = make(map[string]HeroMetrics) + + // Convert hero metrics + for heroID, heroStats := range gameModeStats.Heroes { + heroMetrics := make(HeroMetrics) + for key, value := range heroStats.Metrics { + heroMetrics[key] = value + } + playerMetrics.HeroMetrics[platformKey][gameModeKey][heroID] = heroMetrics + } + + // Store AllHeroesStats from parsed data + playerMetrics.AllHeroesMetrics[platformKey][gameModeKey] = gameModeStats.AllHeroesStats + } + } + + return playerMetrics +} + +// updatePlayerFromProfile updates existing PlayerMetrics with new profile data. +func updatePlayerFromProfile(existing *PlayerMetrics, profile *FullPlayerProfile) { + existing.DisplayName = profile.BattleTag + existing.PlayerTitle = profile.PlayerTitle + existing.LastUpdated = time.Now() + existing.ProfileMetrics = profile.ProfileMetrics + + // Clear existing hero metrics + existing.HeroMetrics = make(map[Platform]map[GameMode]map[string]HeroMetrics) + existing.AllHeroesMetrics = make(map[Platform]map[GameMode]AllHeroesStats) + + // Convert new platform data + for platformKey, platformStats := range profile.Platforms { + existing.AllHeroesMetrics[platformKey] = make(map[GameMode]AllHeroesStats) + existing.HeroMetrics[platformKey] = make(map[GameMode]map[string]HeroMetrics) + + for gameModeKey, gameModeStats := range platformStats.GameModes { + existing.HeroMetrics[platformKey][gameModeKey] = make(map[string]HeroMetrics) + + for heroID, heroStats := range gameModeStats.Heroes { + heroMetrics := make(HeroMetrics) + for key, value := range heroStats.Metrics { + heroMetrics[key] = value + } + existing.HeroMetrics[platformKey][gameModeKey][heroID] = heroMetrics + } + + // Store AllHeroesStats from parsed data + existing.AllHeroesMetrics[platformKey][gameModeKey] = gameModeStats.AllHeroesStats + } + } +} + +// listPlayerBattleTags returns a list of all player BattleTags in the runtime store. +func listPlayerBattleTags() []string { + runtimeMutex.RLock() + defer runtimeMutex.RUnlock() + + if runtimeMetrics == nil { + return []string{} + } + + battleTags := make([]string, 0, len(runtimeMetrics.Players)) + for battleTag := range runtimeMetrics.Players { + battleTags = append(battleTags, battleTag) + } + + return battleTags +} diff --git a/cmd/ow-exporter/value_parser.go b/cmd/ow-exporter/value_parser.go new file mode 100644 index 00000000..471469fa --- /dev/null +++ b/cmd/ow-exporter/value_parser.go @@ -0,0 +1,231 @@ +package main + +import ( + "regexp" + "strconv" + "strings" + "unicode" + + "github.com/cockroachdb/errors" +) + +// Regular expression patterns for parsing different value formats. +var ( + // durationHoursPattern matches time in HH:MM:SS format. + durationHoursPattern = regexp.MustCompile(`^(-?\d+,?\d*?):(\d+):(\d+)$`) + // durationMinutesPattern matches time in MM:SS format. + durationMinutesPattern = regexp.MustCompile(`^(-?\d+):(\d+)$`) + // intPattern matches integer values with optional commas and percentage signs. + intPattern = regexp.MustCompile(`^-?\d+(,\d+)*%?$`) + // floatPattern matches float values with optional commas. + floatPattern = regexp.MustCompile(`^-?\d+(,\d+)*\.\d+$`) +) + +// ParseValue converts various string representations into appropriate Go types. +// It handles duration formats (HH:MM:SS, MM:SS) converting them to seconds, +// percentages, integers with commas, and floats. +// Returns 0 for special values like "--" or "NaN". +func ParseValue(input string) interface{} { + if input == "" { + return "" + } + + // Handle special cases for missing or invalid data. + if input == "--" || input == "NaN" { + return 0 + } + + // Try to parse as duration first. + value, err := parseDuration(input) + if err == nil { + return value + } + + // Try to parse as numeric values. + numValue, numErr := parseNumeric(input) + if numErr == nil { + return numValue + } + + // Return original string if no pattern matches. + return input +} + +// parseDuration attempts to parse duration strings in HH:MM:SS or MM:SS format. +func parseDuration(input string) (int, error) { + // Duration format in hour:min:sec => seconds. + if matches := durationHoursPattern.FindStringSubmatch(input); matches != nil { + return parseDurationHours(matches) + } + + // Duration format in min:sec => seconds. + if matches := durationMinutesPattern.FindStringSubmatch(input); matches != nil { + return parseDurationMinutes(matches) + } + + return 0, errors.New("not a duration format") +} + +// parseDurationHours parses HH:MM:SS format and returns total seconds. +func parseDurationHours(matches []string) (int, error) { + hours, err := strconv.Atoi(strings.ReplaceAll(matches[1], ",", "")) + if err != nil { + return 0, errors.Wrap(err, "failed to parse hours") + } + + minutes, err := strconv.Atoi(matches[2]) + if err != nil { + return 0, errors.Wrap(err, "failed to parse minutes") + } + + seconds, err := strconv.Atoi(matches[3]) + if err != nil { + return 0, errors.Wrap(err, "failed to parse seconds") + } + + const ( + secondsPerHour = 3600 + secondsPerMinute = 60 + ) + + return hours*secondsPerHour + minutes*secondsPerMinute + seconds, nil +} + +// parseDurationMinutes parses MM:SS format and returns total seconds. +func parseDurationMinutes(matches []string) (int, error) { + minutes, err := strconv.Atoi(matches[1]) + if err != nil { + return 0, errors.Wrap(err, "failed to parse minutes") + } + + seconds, err := strconv.Atoi(matches[2]) + if err != nil { + return 0, errors.Wrap(err, "failed to parse seconds") + } + + const secondsPerMinute = 60 + + return minutes*secondsPerMinute + seconds, nil +} + +// parseNumeric attempts to parse numeric values (integers and floats). +func parseNumeric(input string) (interface{}, error) { + // Integer format (including percentages). + if intPattern.MatchString(input) { + cleanValue := strings.ReplaceAll(strings.ReplaceAll(input, "%", ""), ",", "") + value, err := strconv.Atoi(cleanValue) + if err != nil { + return 0, errors.Wrap(err, "failed to parse integer") + } + + return value, nil + } + + // Float format. + if floatPattern.MatchString(input) { + cleanValue := strings.ReplaceAll(input, ",", "") + value, err := strconv.ParseFloat(cleanValue, 64) + if err != nil { + return 0, errors.Wrap(err, "failed to parse float") + } + + return value, nil + } + + return nil, errors.New("not a numeric format") +} + +// StringToSnakeCase converts a string to snake_case format. +// It removes accents, handles camelCase conversion, and replaces +// non-alphanumeric characters with underscores. +func StringToSnakeCase(input string) string { + if input == "" { + return "" + } + + // Remove accents and normalize the string. + cleaned := removeAccents(input) + // Remove "- " sequences. + cleaned = strings.ReplaceAll(cleaned, "- ", "") + + var result strings.Builder + result.Grow(len(cleaned) * 2) // Pre-allocate to avoid reallocations. + + for i, char := range cleaned { + if unicode.IsUpper(char) && i > 0 { + // Check if previous character is lowercase. + prevChar := rune(cleaned[i-1]) + if unicode.IsLower(prevChar) { + result.WriteRune('_') + } + } + + if unicode.IsLetter(char) || unicode.IsDigit(char) { + result.WriteRune(unicode.ToLower(char)) + } else { + result.WriteRune('_') + } + } + + // Clean up multiple underscores and trim. + resultStr := result.String() + resultStr = regexp.MustCompile(`_+`).ReplaceAllString(resultStr, "_") + resultStr = strings.Trim(resultStr, "_") + + return resultStr +} + +// removeAccents removes accents from Unicode characters. +// This is a simplified version that handles common accented characters. +func removeAccents(input string) string { + // Map of common accented characters to their base forms. + accentMap := map[rune]rune{ + 'Γ ': 'a', 'Γ‘': 'a', 'Γ’': 'a', 'Γ£': 'a', 'Γ€': 'a', 'Γ₯': 'a', + 'Γ¨': 'e', 'Γ©': 'e', 'Γͺ': 'e', 'Γ«': 'e', + 'Γ¬': 'i', 'Γ­': 'i', 'Γ': 'i', 'Γ―': 'i', + 'Γ²': 'o', 'Γ³': 'o', 'Γ΄': 'o', 'Γ΅': 'o', 'ΓΆ': 'o', + 'ΓΉ': 'u', 'ΓΊ': 'u', 'Γ»': 'u', 'ΓΌ': 'u', + 'Γ½': 'y', 'ΓΏ': 'y', + 'Γ±': 'n', + 'Γ§': 'c', + 'Γ€': 'A', 'Á': 'A', 'Γ‚': 'A', 'Γƒ': 'A', 'Γ„': 'A', 'Γ…': 'A', + 'È': 'E', 'Γ‰': 'E', 'Ê': 'E', 'Γ‹': 'E', + 'Ì': 'I', 'Í': 'I', 'Î': 'I', 'Ï': 'I', + 'Γ’': 'O', 'Γ“': 'O', 'Γ”': 'O', 'Γ•': 'O', 'Γ–': 'O', + 'Γ™': 'U', 'Ú': 'U', 'Γ›': 'U', 'Ü': 'U', + 'Ý': 'Y', 'ΕΈ': 'Y', + 'Γ‘': 'N', + 'Γ‡': 'C', + } + + var result strings.Builder + result.Grow(len(input)) + + for _, char := range input { + if replacement, exists := accentMap[char]; exists { + result.WriteRune(replacement) + } else { + result.WriteRune(char) + } + } + + return result.String() +} + +// ExtractStatsHeroClass extracts the "option-N" pattern from CSS class strings. +// This is used to identify hero-specific CSS classes in the DOM. +func ExtractStatsHeroClass(heroClass string) string { + const optionPrefix = "option-" + startIndex := strings.Index(heroClass, optionPrefix) + if startIndex == -1 { + return "" + } + + endIndex := startIndex + len(optionPrefix) + // Continue while we have digits. + for endIndex < len(heroClass) && unicode.IsDigit(rune(heroClass[endIndex])) { + endIndex++ + } + + return heroClass[startIndex:endIndex] +} diff --git a/go.mod b/go.mod index 855f11c6..ca37d04a 100644 --- a/go.mod +++ b/go.mod @@ -4,18 +4,23 @@ go 1.25 require ( github.com/BlueMonday/go-scryfall v0.9.1 + github.com/PuerkitoBio/goquery v1.10.3 github.com/SevereCloud/vksdk/v3 v3.2.1 github.com/cockroachdb/errors v1.12.0 github.com/labstack/echo/v4 v4.13.4 + github.com/prometheus/client_golang v1.11.1 github.com/redis/go-redis/v9 v9.14.0 github.com/spf13/cobra v1.10.1 github.com/spf13/viper v1.21.0 github.com/tdewolff/minify/v2 v2.24.3 gopkg.in/telebot.v4 v4.0.0-beta.5 + gopkg.in/yaml.v3 v3.0.1 ) require ( + github.com/andybalholm/cascadia v1.3.3 // indirect github.com/benbjohnson/clock v1.3.5 // indirect + github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cockroachdb/logtags v0.0.0-20241215232642-bb51bb14a506 // indirect github.com/cockroachdb/redact v1.1.6 // indirect @@ -25,6 +30,7 @@ require ( github.com/go-errors/errors v1.5.1 // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.7.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -34,8 +40,12 @@ require ( github.com/labstack/gommon v0.4.2 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pkg/errors v0.9.1 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.26.0 // indirect + github.com/prometheus/procfs v0.6.0 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/sagikazarmark/locafero v0.11.0 // indirect github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect @@ -55,4 +65,6 @@ require ( golang.org/x/net v0.43.0 // indirect golang.org/x/sys v0.35.0 // indirect golang.org/x/text v0.28.0 // indirect + golang.org/x/time v0.11.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect ) diff --git a/go.sum b/go.sum index 819e1ff0..d12cb371 100644 --- a/go.sum +++ b/go.sum @@ -60,6 +60,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo= +github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y= github.com/SevereCloud/vksdk/v3 v3.2.1 h1:bmbA9VFDnhMP33pFieS0lKpO9Co9kVSj+49w0v2ofkI= github.com/SevereCloud/vksdk/v3 v3.2.1/go.mod h1:pu8XeDePNv5SaUbp1NzWEdi6O1akYD6xkuM+aCUCOO4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -67,6 +69,8 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM= +github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -77,6 +81,7 @@ github.com/benbjohnson/clock v1.3.5 h1:VvXlSJBzZpA/zum6Sj74hxwYI2DIxRWuNIoXAzHZz github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= @@ -196,6 +201,8 @@ github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -213,6 +220,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= @@ -332,6 +340,7 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= @@ -371,19 +380,23 @@ github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXP github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.1 h1:+4eQaD7vAZ6DsfsxB15hbE0odUjGI5ARs9yskGu1v4s= github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/redis/go-redis/v9 v9.14.0 h1:u4tNCjXOyzfgeLN+vAZaW1xUooqWDqVEsZN0U01jfAE= github.com/redis/go-redis/v9 v9.14.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw= @@ -458,6 +471,7 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/v2 v2.305.4/go.mod h1:Ud+VUwIi9/uQHOMA+4ekToJ12lTxlv0zB/+DHwTGEbU= @@ -489,8 +503,13 @@ golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -528,6 +547,11 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -575,6 +599,13 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -609,6 +640,12 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -687,11 +724,25 @@ golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -701,11 +752,19 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -761,6 +820,10 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -936,6 +999,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/vendor/github.com/PuerkitoBio/goquery/.gitattributes b/vendor/github.com/PuerkitoBio/goquery/.gitattributes new file mode 100644 index 00000000..0cc26ec0 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/.gitattributes @@ -0,0 +1 @@ +testdata/* linguist-vendored diff --git a/vendor/github.com/PuerkitoBio/goquery/.gitignore b/vendor/github.com/PuerkitoBio/goquery/.gitignore new file mode 100644 index 00000000..970381cd --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/.gitignore @@ -0,0 +1,16 @@ +# editor temporary files +*.sublime-* +.DS_Store +*.swp +#*.*# +tags + +# direnv config +.env* + +# test binaries +*.test + +# coverage and profilte outputs +*.out + diff --git a/vendor/github.com/PuerkitoBio/goquery/LICENSE b/vendor/github.com/PuerkitoBio/goquery/LICENSE new file mode 100644 index 00000000..25372c2b --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/LICENSE @@ -0,0 +1,12 @@ +Copyright (c) 2012-2021, Martin Angers & Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/PuerkitoBio/goquery/README.md b/vendor/github.com/PuerkitoBio/goquery/README.md new file mode 100644 index 00000000..89f02399 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/README.md @@ -0,0 +1,214 @@ +# goquery - a little like that j-thing, only in Go + +[![Build Status](https://github.com/PuerkitoBio/goquery/actions/workflows/test.yml/badge.svg?branch=master)](https://github.com/PuerkitoBio/goquery/actions) +[![Go Reference](https://pkg.go.dev/badge/github.com/PuerkitoBio/goquery.svg)](https://pkg.go.dev/github.com/PuerkitoBio/goquery) +[![Sourcegraph Badge](https://sourcegraph.com/github.com/PuerkitoBio/goquery/-/badge.svg)](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge) + +goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off. + +Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this. + +Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...). + +## Table of Contents + +* [Installation](#installation) +* [Changelog](#changelog) +* [API](#api) +* [Examples](#examples) +* [Related Projects](#related-projects) +* [Support](#support) +* [License](#license) + +## Installation + +Required Go version: + +* Starting with version `v1.10.0` of goquery, Go 1.23+ is required due to the use of function-based iterators. +* For `v1.9.0` of goquery, Go 1.18+ is required due to the use of generics. +* For previous goquery versions, a Go version of 1.1+ was required because of the `net/html` dependency. + +Ongoing goquery development is tested on the latest 2 versions of Go. + + $ go get github.com/PuerkitoBio/goquery + +(optional) To run unit tests: + + $ cd $GOPATH/src/github.com/PuerkitoBio/goquery + $ go test + +(optional) To run benchmarks (warning: it runs for a few minutes): + + $ cd $GOPATH/src/github.com/PuerkitoBio/goquery + $ go test -bench=".*" + +## Changelog + +**Note that goquery's API is now stable, and will not break.** + +* **2025-04-11 (v1.10.3)** : Update `go.mod` dependencies, small optimization (thanks [@myxzlpltk](https://github.com/myxzlpltk)). +* **2025-02-13 (v1.10.2)** : Update `go.mod` dependencies, add go1.24 to the test matrix. +* **2024-12-26 (v1.10.1)** : Update `go.mod` dependencies. +* **2024-09-06 (v1.10.0)** : Add `EachIter` which provides an iterator that can be used in `for..range` loops on the `*Selection` object. **goquery now requires Go version 1.23+** (thanks [@amikai](https://github.com/amikai)). +* **2024-09-06 (v1.9.3)** : Update `go.mod` dependencies. +* **2024-04-29 (v1.9.2)** : Update `go.mod` dependencies. +* **2024-02-29 (v1.9.1)** : Improve allocation and performance of the `Map` function and `Selection.Map` method, better document the cascadia differences (thanks [@jwilsson](https://github.com/jwilsson)). +* **2024-02-22 (v1.9.0)** : Add a generic `Map` function, **goquery now requires Go version 1.18+** (thanks [@Fesaa](https://github.com/Fesaa)). +* **2023-02-18 (v1.8.1)** : Update `go.mod` dependencies, update CI workflow. +* **2021-10-25 (v1.8.0)** : Add `Render` function to render a `Selection` to an `io.Writer` (thanks [@anthonygedeon](https://github.com/anthonygedeon)). +* **2021-07-11 (v1.7.1)** : Update go.mod dependencies and add dependabot config (thanks [@jauderho](https://github.com/jauderho)). +* **2021-06-14 (v1.7.0)** : Add `Single` and `SingleMatcher` functions to optimize first-match selection (thanks [@gdollardollar](https://github.com/gdollardollar)). +* **2021-01-11 (v1.6.1)** : Fix panic when calling `{Prepend,Append,Set}Html` on a `Selection` that contains non-Element nodes. +* **2020-10-08 (v1.6.0)** : Parse html in context of the container node for all functions that deal with html strings (`AfterHtml`, `AppendHtml`, etc.). Thanks to [@thiemok][thiemok] and [@davidjwilkins][djw] for their work on this. +* **2020-02-04 (v1.5.1)** : Update module dependencies. +* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505). +* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples. +* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`. +* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue). +* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins). +* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv). +* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb). +* **2016-08-28 (v1.0.1)** : Optimize performance for large documents. +* **2016-07-27 (v1.0.0)** : Tag version 1.0.0. +* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object. +* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see [doc][] for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`). +* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr]. +* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone]. +* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone]. +* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used. +* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s. +* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader. +* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response. +* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility. +* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out). +* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method. +* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases). +* **v0.1.0** : Initial release. + +## API + +goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate. + +jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention: + +* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`) +* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`) +* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`) +* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`) +* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`) +* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`) + +Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour). + +The complete [package reference documentation can be found here][doc]. + +Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Also, the selectors work more like the DOM's `querySelectorAll`, than jQuery's matchers - they have no concept of contextual matching (for some concrete examples of what that means, see [this ticket](https://github.com/andybalholm/cascadia/issues/61)). In practice, it doesn't matter very often but it's something worth mentioning. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string): + +* `Find("~")` returns an empty selection because the selector string doesn't match anything. +* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything). +* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything. +* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element. + +## Examples + +See some tips and tricks in the [wiki][]. + +Adapted from example_test.go: + +```Go +package main + +import ( + "fmt" + "log" + "net/http" + + "github.com/PuerkitoBio/goquery" +) + +func ExampleScrape() { + // Request the HTML page. + res, err := http.Get("http://metalsucks.net") + if err != nil { + log.Fatal(err) + } + defer res.Body.Close() + if res.StatusCode != 200 { + log.Fatalf("status code error: %d %s", res.StatusCode, res.Status) + } + + // Load the HTML document + doc, err := goquery.NewDocumentFromReader(res.Body) + if err != nil { + log.Fatal(err) + } + + // Find the review items + doc.Find(".left-content article .post-title").Each(func(i int, s *goquery.Selection) { + // For each item found, get the title + title := s.Find("a").Text() + fmt.Printf("Review %d: %s\n", i, title) + }) +} + +func main() { + ExampleScrape() +} +``` + +## Related Projects + +- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags. +- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery. +- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors. +- [gocolly/colly](https://github.com/gocolly/colly), a lightning fast and elegant Scraping Framework +- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets. +- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping. +- [tacusci/berrycms](https://github.com/tacusci/berrycms), a modern simple to use CMS with easy to write plugins +- [Dataflow kit](https://github.com/slotix/dataflowkit), Web Scraping framework for Gophers. +- [Geziyor](https://github.com/geziyor/geziyor), a fast web crawling & scraping framework for Go. Supports JS rendering. +- [Pagser](https://github.com/foolin/pagser), a simple, easy, extensible, configurable HTML parser to struct based on goquery and struct tags. +- [stitcherd](https://github.com/vhodges/stitcherd), A server for doing server side includes using css selectors and DOM updates. +- [goskyr](https://github.com/jakopako/goskyr), an easily configurable command-line scraper written in Go. +- [goGetJS](https://github.com/davemolk/goGetJS), a tool for extracting, searching, and saving JavaScript files (with optional headless browser). +- [fitter](https://github.com/PxyUp/fitter), a tool for selecting values from JSON, XML, HTML and XPath formatted pages. +- [seltabl](github.com/conneroisu/seltabl), an orm-like package and supporting language server for extracting values from HTML + +## Support + +There are a number of ways you can support the project: + +* Use it, star it, build something with it, spread the word! + - If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section! +* Raise issues to improve the project (note: doc typos and clarifications are issues too!) + - Please search existing issues before opening a new one - it may have already been addressed. +* Pull requests: please discuss new code in an issue first, unless the fix is really trivial. + - Make sure new code is tested. + - Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue. +* Sponsor the developer + - See the Github Sponsor button at the top of the repo on github + - or via BuyMeACoffee.com, below + +Buy Me A Coffee + +## License + +The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic]. + +[jquery]: https://jquery.com/ +[go]: https://go.dev/ +[cascadia]: https://github.com/andybalholm/cascadia +[cascadiacli]: https://github.com/suntong/cascadia +[bsd]: https://opensource.org/licenses/BSD-3-Clause +[golic]: https://go.dev/LICENSE +[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE +[doc]: https://pkg.go.dev/github.com/PuerkitoBio/goquery +[index]: https://api.jquery.com/index/ +[gonet]: https://github.com/golang/net/ +[html]: https://pkg.go.dev/golang.org/x/net/html +[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks +[thatguystone]: https://github.com/thatguystone +[piotr]: https://github.com/piotrkowalczuk +[goq]: https://github.com/andrewstuart/goq +[thiemok]: https://github.com/thiemok +[djw]: https://github.com/davidjwilkins diff --git a/vendor/github.com/PuerkitoBio/goquery/array.go b/vendor/github.com/PuerkitoBio/goquery/array.go new file mode 100644 index 00000000..1b1f6cbe --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/array.go @@ -0,0 +1,124 @@ +package goquery + +import ( + "golang.org/x/net/html" +) + +const ( + maxUint = ^uint(0) + maxInt = int(maxUint >> 1) + + // ToEnd is a special index value that can be used as end index in a call + // to Slice so that all elements are selected until the end of the Selection. + // It is equivalent to passing (*Selection).Length(). + ToEnd = maxInt +) + +// First reduces the set of matched elements to the first in the set. +// It returns a new Selection object, and an empty Selection object if the +// the selection is empty. +func (s *Selection) First() *Selection { + return s.Eq(0) +} + +// Last reduces the set of matched elements to the last in the set. +// It returns a new Selection object, and an empty Selection object if +// the selection is empty. +func (s *Selection) Last() *Selection { + return s.Eq(-1) +} + +// Eq reduces the set of matched elements to the one at the specified index. +// If a negative index is given, it counts backwards starting at the end of the +// set. It returns a new Selection object, and an empty Selection object if the +// index is invalid. +func (s *Selection) Eq(index int) *Selection { + if index < 0 { + index += len(s.Nodes) + } + + if index >= len(s.Nodes) || index < 0 { + return newEmptySelection(s.document) + } + + return s.Slice(index, index+1) +} + +// Slice reduces the set of matched elements to a subset specified by a range +// of indices. The start index is 0-based and indicates the index of the first +// element to select. The end index is 0-based and indicates the index at which +// the elements stop being selected (the end index is not selected). +// +// The indices may be negative, in which case they represent an offset from the +// end of the selection. +// +// The special value ToEnd may be specified as end index, in which case all elements +// until the end are selected. This works both for a positive and negative start +// index. +func (s *Selection) Slice(start, end int) *Selection { + if start < 0 { + start += len(s.Nodes) + } + if end == ToEnd { + end = len(s.Nodes) + } else if end < 0 { + end += len(s.Nodes) + } + return pushStack(s, s.Nodes[start:end]) +} + +// Get retrieves the underlying node at the specified index. +// Get without parameter is not implemented, since the node array is available +// on the Selection object. +func (s *Selection) Get(index int) *html.Node { + if index < 0 { + index += len(s.Nodes) // Negative index gets from the end + } + return s.Nodes[index] +} + +// Index returns the position of the first element within the Selection object +// relative to its sibling elements. +func (s *Selection) Index() int { + if len(s.Nodes) > 0 { + return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length() + } + return -1 +} + +// IndexSelector returns the position of the first element within the +// Selection object relative to the elements matched by the selector, or -1 if +// not found. +func (s *Selection) IndexSelector(selector string) int { + if len(s.Nodes) > 0 { + sel := s.document.Find(selector) + return indexInSlice(sel.Nodes, s.Nodes[0]) + } + return -1 +} + +// IndexMatcher returns the position of the first element within the +// Selection object relative to the elements matched by the matcher, or -1 if +// not found. +func (s *Selection) IndexMatcher(m Matcher) int { + if len(s.Nodes) > 0 { + sel := s.document.FindMatcher(m) + return indexInSlice(sel.Nodes, s.Nodes[0]) + } + return -1 +} + +// IndexOfNode returns the position of the specified node within the Selection +// object, or -1 if not found. +func (s *Selection) IndexOfNode(node *html.Node) int { + return indexInSlice(s.Nodes, node) +} + +// IndexOfSelection returns the position of the first node in the specified +// Selection object within this Selection object, or -1 if not found. +func (s *Selection) IndexOfSelection(sel *Selection) int { + if sel != nil && len(sel.Nodes) > 0 { + return indexInSlice(s.Nodes, sel.Nodes[0]) + } + return -1 +} diff --git a/vendor/github.com/PuerkitoBio/goquery/doc.go b/vendor/github.com/PuerkitoBio/goquery/doc.go new file mode 100644 index 00000000..71146a78 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/doc.go @@ -0,0 +1,123 @@ +// Copyright (c) 2012-2016, Martin Angers & Contributors +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without modification, +// are permitted provided that the following conditions are met: +// +// * Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation and/or +// other materials provided with the distribution. +// * Neither the name of the author nor the names of its contributors may be used to +// endorse or promote products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS +// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY +// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* +Package goquery implements features similar to jQuery, including the chainable +syntax, to manipulate and query an HTML document. + +It brings a syntax and a set of features similar to jQuery to the Go language. +It is based on Go's net/html package and the CSS Selector library cascadia. +Since the net/html parser returns nodes, and not a full-featured DOM +tree, jQuery's stateful manipulation functions (like height(), css(), detach()) +have been left off. + +Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is +the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. +See the repository's wiki for various options on how to do this. + +Syntax-wise, it is as close as possible to jQuery, with the same method names when +possible, and that warm and fuzzy chainable interface. jQuery being the +ultra-popular library that it is, writing a similar HTML-manipulating +library was better to follow its API than to start anew (in the same spirit as +Go's fmt package), even though some of its methods are less than intuitive (looking +at you, index()...). + +It is hosted on GitHub, along with additional documentation in the README.md +file: https://github.com/puerkitobio/goquery + +Please note that because of the net/html dependency, goquery requires Go1.1+. + +The various methods are split into files based on the category of behavior. +The three dots (...) indicate that various "overloads" are available. + +* array.go : array-like positional manipulation of the selection. + - Eq() + - First() + - Get() + - Index...() + - Last() + - Slice() + +* expand.go : methods that expand or augment the selection's set. + - Add...() + - AndSelf() + - Union(), which is an alias for AddSelection() + +* filter.go : filtering methods, that reduce the selection's set. + - End() + - Filter...() + - Has...() + - Intersection(), which is an alias of FilterSelection() + - Not...() + +* iteration.go : methods to loop over the selection's nodes. + - Each() + - EachWithBreak() + - Map() + +* manipulation.go : methods for modifying the document + - After...() + - Append...() + - Before...() + - Clone() + - Empty() + - Prepend...() + - Remove...() + - ReplaceWith...() + - Unwrap() + - Wrap...() + - WrapAll...() + - WrapInner...() + +* property.go : methods that inspect and get the node's properties values. + - Attr*(), RemoveAttr(), SetAttr() + - AddClass(), HasClass(), RemoveClass(), ToggleClass() + - Html() + - Length() + - Size(), which is an alias for Length() + - Text() + +* query.go : methods that query, or reflect, a node's identity. + - Contains() + - Is...() + +* traversal.go : methods to traverse the HTML document tree. + - Children...() + - Contents() + - Find...() + - Next...() + - Parent[s]...() + - Prev...() + - Siblings...() + +* type.go : definition of the types exposed by goquery. + - Document + - Selection + - Matcher + +* utilities.go : definition of helper functions (and not methods on a *Selection) +that are not part of jQuery, but are useful to goquery. + - NodeName + - OuterHtml +*/ +package goquery diff --git a/vendor/github.com/PuerkitoBio/goquery/expand.go b/vendor/github.com/PuerkitoBio/goquery/expand.go new file mode 100644 index 00000000..af54acf5 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/expand.go @@ -0,0 +1,70 @@ +package goquery + +import "golang.org/x/net/html" + +// Add adds the selector string's matching nodes to those in the current +// selection and returns a new Selection object. +// The selector string is run in the context of the document of the current +// Selection object. +func (s *Selection) Add(selector string) *Selection { + return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...) +} + +// AddMatcher adds the matcher's matching nodes to those in the current +// selection and returns a new Selection object. +// The matcher is run in the context of the document of the current +// Selection object. +func (s *Selection) AddMatcher(m Matcher) *Selection { + return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...) +} + +// AddSelection adds the specified Selection object's nodes to those in the +// current selection and returns a new Selection object. +func (s *Selection) AddSelection(sel *Selection) *Selection { + if sel == nil { + return s.AddNodes() + } + return s.AddNodes(sel.Nodes...) +} + +// Union is an alias for AddSelection. +func (s *Selection) Union(sel *Selection) *Selection { + return s.AddSelection(sel) +} + +// AddNodes adds the specified nodes to those in the +// current selection and returns a new Selection object. +func (s *Selection) AddNodes(nodes ...*html.Node) *Selection { + return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil)) +} + +// AndSelf adds the previous set of elements on the stack to the current set. +// It returns a new Selection object containing the current Selection combined +// with the previous one. +// Deprecated: This function has been deprecated and is now an alias for AddBack(). +func (s *Selection) AndSelf() *Selection { + return s.AddBack() +} + +// AddBack adds the previous set of elements on the stack to the current set. +// It returns a new Selection object containing the current Selection combined +// with the previous one. +func (s *Selection) AddBack() *Selection { + return s.AddSelection(s.prevSel) +} + +// AddBackFiltered reduces the previous set of elements on the stack to those that +// match the selector string, and adds them to the current set. +// It returns a new Selection object containing the current Selection combined +// with the filtered previous one +func (s *Selection) AddBackFiltered(selector string) *Selection { + return s.AddSelection(s.prevSel.Filter(selector)) +} + +// AddBackMatcher reduces the previous set of elements on the stack to those that match +// the matcher, and adds them to the current set. +// It returns a new Selection object containing the current Selection combined +// with the filtered previous one +func (s *Selection) AddBackMatcher(m Matcher) *Selection { + return s.AddSelection(s.prevSel.FilterMatcher(m)) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/filter.go b/vendor/github.com/PuerkitoBio/goquery/filter.go new file mode 100644 index 00000000..9138ffb3 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/filter.go @@ -0,0 +1,163 @@ +package goquery + +import "golang.org/x/net/html" + +// Filter reduces the set of matched elements to those that match the selector string. +// It returns a new Selection object for this subset of matching elements. +func (s *Selection) Filter(selector string) *Selection { + return s.FilterMatcher(compileMatcher(selector)) +} + +// FilterMatcher reduces the set of matched elements to those that match +// the given matcher. It returns a new Selection object for this subset +// of matching elements. +func (s *Selection) FilterMatcher(m Matcher) *Selection { + return pushStack(s, winnow(s, m, true)) +} + +// Not removes elements from the Selection that match the selector string. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) Not(selector string) *Selection { + return s.NotMatcher(compileMatcher(selector)) +} + +// NotMatcher removes elements from the Selection that match the given matcher. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotMatcher(m Matcher) *Selection { + return pushStack(s, winnow(s, m, false)) +} + +// FilterFunction reduces the set of matched elements to those that pass the function's test. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection { + return pushStack(s, winnowFunction(s, f, true)) +} + +// NotFunction removes elements from the Selection that pass the function's test. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection { + return pushStack(s, winnowFunction(s, f, false)) +} + +// FilterNodes reduces the set of matched elements to those that match the specified nodes. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection { + return pushStack(s, winnowNodes(s, nodes, true)) +} + +// NotNodes removes elements from the Selection that match the specified nodes. +// It returns a new Selection object with the matching elements removed. +func (s *Selection) NotNodes(nodes ...*html.Node) *Selection { + return pushStack(s, winnowNodes(s, nodes, false)) +} + +// FilterSelection reduces the set of matched elements to those that match a +// node in the specified Selection object. +// It returns a new Selection object for this subset of elements. +func (s *Selection) FilterSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, winnowNodes(s, nil, true)) + } + return pushStack(s, winnowNodes(s, sel.Nodes, true)) +} + +// NotSelection removes elements from the Selection that match a node in the specified +// Selection object. It returns a new Selection object with the matching elements removed. +func (s *Selection) NotSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, winnowNodes(s, nil, false)) + } + return pushStack(s, winnowNodes(s, sel.Nodes, false)) +} + +// Intersection is an alias for FilterSelection. +func (s *Selection) Intersection(sel *Selection) *Selection { + return s.FilterSelection(sel) +} + +// Has reduces the set of matched elements to those that have a descendant +// that matches the selector. +// It returns a new Selection object with the matching elements. +func (s *Selection) Has(selector string) *Selection { + return s.HasSelection(s.document.Find(selector)) +} + +// HasMatcher reduces the set of matched elements to those that have a descendant +// that matches the matcher. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasMatcher(m Matcher) *Selection { + return s.HasSelection(s.document.FindMatcher(m)) +} + +// HasNodes reduces the set of matched elements to those that have a +// descendant that matches one of the nodes. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasNodes(nodes ...*html.Node) *Selection { + return s.FilterFunction(func(_ int, sel *Selection) bool { + // Add all nodes that contain one of the specified nodes + for _, n := range nodes { + if sel.Contains(n) { + return true + } + } + return false + }) +} + +// HasSelection reduces the set of matched elements to those that have a +// descendant that matches one of the nodes of the specified Selection object. +// It returns a new Selection object with the matching elements. +func (s *Selection) HasSelection(sel *Selection) *Selection { + if sel == nil { + return s.HasNodes() + } + return s.HasNodes(sel.Nodes...) +} + +// End ends the most recent filtering operation in the current chain and +// returns the set of matched elements to its previous state. +func (s *Selection) End() *Selection { + if s.prevSel != nil { + return s.prevSel + } + return newEmptySelection(s.document) +} + +// Filter based on the matcher, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnow(sel *Selection, m Matcher, keep bool) []*html.Node { + // Optimize if keep is requested + if keep { + return m.Filter(sel.Nodes) + } + // Use grep + return grep(sel, func(i int, s *Selection) bool { + return !m.Match(s.Get(0)) + }) +} + +// Filter based on an array of nodes, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node { + if len(nodes)+len(sel.Nodes) < minNodesForSet { + return grep(sel, func(i int, s *Selection) bool { + return isInSlice(nodes, s.Get(0)) == keep + }) + } + + set := make(map[*html.Node]bool) + for _, n := range nodes { + set[n] = true + } + return grep(sel, func(i int, s *Selection) bool { + return set[s.Get(0)] == keep + }) +} + +// Filter based on a function test, and the indicator to keep (Filter) or +// to get rid of (Not) the matching elements. +func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node { + return grep(sel, func(i int, s *Selection) bool { + return f(i, s) == keep + }) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/iteration.go b/vendor/github.com/PuerkitoBio/goquery/iteration.go new file mode 100644 index 00000000..1ca52454 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/iteration.go @@ -0,0 +1,61 @@ +package goquery + +import "iter" + +// Each iterates over a Selection object, executing a function for each +// matched element. It returns the current Selection object. The function +// f is called for each element in the selection with the index of the +// element in that selection starting at 0, and a *Selection that contains +// only that element. +func (s *Selection) Each(f func(int, *Selection)) *Selection { + for i, n := range s.Nodes { + f(i, newSingleSelection(n, s.document)) + } + return s +} + +// EachIter returns an iterator that yields the Selection object in order. +// The implementation is similar to Each, but it returns an iterator instead. +func (s *Selection) EachIter() iter.Seq2[int, *Selection] { + return func(yield func(int, *Selection) bool) { + for i, n := range s.Nodes { + if !yield(i, newSingleSelection(n, s.document)) { + return + } + } + } +} + +// EachWithBreak iterates over a Selection object, executing a function for each +// matched element. It is identical to Each except that it is possible to break +// out of the loop by returning false in the callback function. It returns the +// current Selection object. +func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection { + for i, n := range s.Nodes { + if !f(i, newSingleSelection(n, s.document)) { + return s + } + } + return s +} + +// Map passes each element in the current matched set through a function, +// producing a slice of string holding the returned values. The function +// f is called for each element in the selection with the index of the +// element in that selection starting at 0, and a *Selection that contains +// only that element. +func (s *Selection) Map(f func(int, *Selection) string) (result []string) { + return Map(s, f) +} + +// Map is the generic version of Selection.Map, allowing any type to be +// returned. +func Map[E any](s *Selection, f func(int, *Selection) E) (result []E) { + result = make([]E, len(s.Nodes)) + + for i, n := range s.Nodes { + result[i] = f(i, newSingleSelection(n, s.document)) + } + + return result +} diff --git a/vendor/github.com/PuerkitoBio/goquery/manipulation.go b/vendor/github.com/PuerkitoBio/goquery/manipulation.go new file mode 100644 index 00000000..35febf11 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/manipulation.go @@ -0,0 +1,679 @@ +package goquery + +import ( + "strings" + + "golang.org/x/net/html" +) + +// After applies the selector from the root document and inserts the matched elements +// after the elements in the set of matched elements. +// +// If one of the matched elements in the selection is not currently in the +// document, it's impossible to insert nodes after it, so it will be ignored. +// +// This follows the same rules as Selection.Append. +func (s *Selection) After(selector string) *Selection { + return s.AfterMatcher(compileMatcher(selector)) +} + +// AfterMatcher applies the matcher from the root document and inserts the matched elements +// after the elements in the set of matched elements. +// +// If one of the matched elements in the selection is not currently in the +// document, it's impossible to insert nodes after it, so it will be ignored. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterMatcher(m Matcher) *Selection { + return s.AfterNodes(m.MatchAll(s.document.rootNode)...) +} + +// AfterSelection inserts the elements in the selection after each element in the set of matched +// elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterSelection(sel *Selection) *Selection { + return s.AfterNodes(sel.Nodes...) +} + +// AfterHtml parses the html and inserts it after the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterHtml(htmlStr string) *Selection { + return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) { + nextSibling := node.NextSibling + for _, n := range nodes { + if node.Parent != nil { + node.Parent.InsertBefore(n, nextSibling) + } + } + }) +} + +// AfterNodes inserts the nodes after each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AfterNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) { + if sn.Parent != nil { + sn.Parent.InsertBefore(n, sn.NextSibling) + } + }) +} + +// Append appends the elements specified by the selector to the end of each element +// in the set of matched elements, following those rules: +// +// 1) The selector is applied to the root document. +// +// 2) Elements that are part of the document will be moved to the new location. +// +// 3) If there are multiple locations to append to, cloned nodes will be +// appended to all target locations except the last one, which will be moved +// as noted in (2). +func (s *Selection) Append(selector string) *Selection { + return s.AppendMatcher(compileMatcher(selector)) +} + +// AppendMatcher appends the elements specified by the matcher to the end of each element +// in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendMatcher(m Matcher) *Selection { + return s.AppendNodes(m.MatchAll(s.document.rootNode)...) +} + +// AppendSelection appends the elements in the selection to the end of each element +// in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendSelection(sel *Selection) *Selection { + return s.AppendNodes(sel.Nodes...) +} + +// AppendHtml parses the html and appends it to the set of matched elements. +func (s *Selection) AppendHtml(htmlStr string) *Selection { + return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) { + for _, n := range nodes { + node.AppendChild(n) + } + }) +} + +// AppendNodes appends the specified nodes to each node in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) AppendNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) { + sn.AppendChild(n) + }) +} + +// Before inserts the matched elements before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) Before(selector string) *Selection { + return s.BeforeMatcher(compileMatcher(selector)) +} + +// BeforeMatcher inserts the matched elements before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeMatcher(m Matcher) *Selection { + return s.BeforeNodes(m.MatchAll(s.document.rootNode)...) +} + +// BeforeSelection inserts the elements in the selection before each element in the set of matched +// elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeSelection(sel *Selection) *Selection { + return s.BeforeNodes(sel.Nodes...) +} + +// BeforeHtml parses the html and inserts it before the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeHtml(htmlStr string) *Selection { + return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) { + for _, n := range nodes { + if node.Parent != nil { + node.Parent.InsertBefore(n, node) + } + } + }) +} + +// BeforeNodes inserts the nodes before each element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) { + if sn.Parent != nil { + sn.Parent.InsertBefore(n, sn) + } + }) +} + +// Clone creates a deep copy of the set of matched nodes. The new nodes will not be +// attached to the document. +func (s *Selection) Clone() *Selection { + ns := newEmptySelection(s.document) + ns.Nodes = cloneNodes(s.Nodes) + return ns +} + +// Empty removes all children nodes from the set of matched elements. +// It returns the children nodes in a new Selection. +func (s *Selection) Empty() *Selection { + var nodes []*html.Node + + for _, n := range s.Nodes { + for c := n.FirstChild; c != nil; c = n.FirstChild { + n.RemoveChild(c) + nodes = append(nodes, c) + } + } + + return pushStack(s, nodes) +} + +// Prepend prepends the elements specified by the selector to each element in +// the set of matched elements, following the same rules as Append. +func (s *Selection) Prepend(selector string) *Selection { + return s.PrependMatcher(compileMatcher(selector)) +} + +// PrependMatcher prepends the elements specified by the matcher to each +// element in the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependMatcher(m Matcher) *Selection { + return s.PrependNodes(m.MatchAll(s.document.rootNode)...) +} + +// PrependSelection prepends the elements in the selection to each element in +// the set of matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependSelection(sel *Selection) *Selection { + return s.PrependNodes(sel.Nodes...) +} + +// PrependHtml parses the html and prepends it to the set of matched elements. +func (s *Selection) PrependHtml(htmlStr string) *Selection { + return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) { + firstChild := node.FirstChild + for _, n := range nodes { + node.InsertBefore(n, firstChild) + } + }) +} + +// PrependNodes prepends the specified nodes to each node in the set of +// matched elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) PrependNodes(ns ...*html.Node) *Selection { + return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) { + // sn.FirstChild may be nil, in which case this functions like + // sn.AppendChild() + sn.InsertBefore(n, sn.FirstChild) + }) +} + +// Remove removes the set of matched elements from the document. +// It returns the same selection, now consisting of nodes not in the document. +func (s *Selection) Remove() *Selection { + for _, n := range s.Nodes { + if n.Parent != nil { + n.Parent.RemoveChild(n) + } + } + + return s +} + +// RemoveFiltered removes from the current set of matched elements those that +// match the selector filter. It returns the Selection of removed nodes. +// +// For example if the selection s contains "

", "

" and "

" +// and s.RemoveFiltered("h2") is called, only the "

" node is removed +// (and returned), while "

" and "

" are kept in the document. +func (s *Selection) RemoveFiltered(selector string) *Selection { + return s.RemoveMatcher(compileMatcher(selector)) +} + +// RemoveMatcher removes from the current set of matched elements those that +// match the Matcher filter. It returns the Selection of removed nodes. +// See RemoveFiltered for additional information. +func (s *Selection) RemoveMatcher(m Matcher) *Selection { + return s.FilterMatcher(m).Remove() +} + +// ReplaceWith replaces each element in the set of matched elements with the +// nodes matched by the given selector. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWith(selector string) *Selection { + return s.ReplaceWithMatcher(compileMatcher(selector)) +} + +// ReplaceWithMatcher replaces each element in the set of matched elements with +// the nodes matched by the given Matcher. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection { + return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...) +} + +// ReplaceWithSelection replaces each element in the set of matched elements with +// the nodes from the given Selection. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection { + return s.ReplaceWithNodes(sel.Nodes...) +} + +// ReplaceWithHtml replaces each element in the set of matched elements with +// the parsed HTML. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithHtml(htmlStr string) *Selection { + s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) { + nextSibling := node.NextSibling + for _, n := range nodes { + if node.Parent != nil { + node.Parent.InsertBefore(n, nextSibling) + } + } + }) + return s.Remove() +} + +// ReplaceWithNodes replaces each element in the set of matched elements with +// the given nodes. +// It returns the removed elements. +// +// This follows the same rules as Selection.Append. +func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection { + s.AfterNodes(ns...) + return s.Remove() +} + +// SetHtml sets the html content of each element in the selection to +// specified html string. +func (s *Selection) SetHtml(htmlStr string) *Selection { + for _, context := range s.Nodes { + for c := context.FirstChild; c != nil; c = context.FirstChild { + context.RemoveChild(c) + } + } + return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) { + for _, n := range nodes { + node.AppendChild(n) + } + }) +} + +// SetText sets the content of each element in the selection to specified content. +// The provided text string is escaped. +func (s *Selection) SetText(text string) *Selection { + return s.SetHtml(html.EscapeString(text)) +} + +// Unwrap removes the parents of the set of matched elements, leaving the matched +// elements (and their siblings, if any) in their place. +// It returns the original selection. +func (s *Selection) Unwrap() *Selection { + s.Parent().Each(func(i int, ss *Selection) { + // For some reason, jquery allows unwrap to remove the element, so + // allowing it here too. Same for . Why it allows those elements to + // be unwrapped while not allowing body is a mystery to me. + if ss.Nodes[0].Data != "body" { + ss.ReplaceWithSelection(ss.Contents()) + } + }) + + return s +} + +// Wrap wraps each element in the set of matched elements inside the first +// element matched by the given selector. The matched child is cloned before +// being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) Wrap(selector string) *Selection { + return s.WrapMatcher(compileMatcher(selector)) +} + +// WrapMatcher wraps each element in the set of matched elements inside the +// first element matched by the given matcher. The matched child is cloned +// before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapMatcher(m Matcher) *Selection { + return s.wrapNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapSelection wraps each element in the set of matched elements inside the +// first element in the given Selection. The element is cloned before being +// inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapSelection(sel *Selection) *Selection { + return s.wrapNodes(sel.Nodes...) +} + +// WrapHtml wraps each element in the set of matched elements inside the inner- +// most child of the given HTML. +// +// It returns the original set of elements. +func (s *Selection) WrapHtml(htmlStr string) *Selection { + nodesMap := make(map[string][]*html.Node) + for _, context := range s.Nodes { + var parent *html.Node + if context.Parent != nil { + parent = context.Parent + } else { + parent = &html.Node{Type: html.ElementNode} + } + nodes, found := nodesMap[nodeName(parent)] + if !found { + nodes = parseHtmlWithContext(htmlStr, parent) + nodesMap[nodeName(parent)] = nodes + } + newSingleSelection(context, s.document).wrapAllNodes(cloneNodes(nodes)...) + } + return s +} + +// WrapNode wraps each element in the set of matched elements inside the inner- +// most child of the given node. The given node is copied before being inserted +// into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapNode(n *html.Node) *Selection { + return s.wrapNodes(n) +} + +func (s *Selection) wrapNodes(ns ...*html.Node) *Selection { + s.Each(func(i int, ss *Selection) { + ss.wrapAllNodes(ns...) + }) + + return s +} + +// WrapAll wraps a single HTML structure, matched by the given selector, around +// all elements in the set of matched elements. The matched child is cloned +// before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAll(selector string) *Selection { + return s.WrapAllMatcher(compileMatcher(selector)) +} + +// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher, +// around all elements in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllMatcher(m Matcher) *Selection { + return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapAllSelection wraps a single HTML structure, the first node of the given +// Selection, around all elements in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllSelection(sel *Selection) *Selection { + return s.wrapAllNodes(sel.Nodes...) +} + +// WrapAllHtml wraps the given HTML structure around all elements in the set of +// matched elements. The matched child is cloned before being inserted into the +// document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllHtml(htmlStr string) *Selection { + var context *html.Node + var nodes []*html.Node + if len(s.Nodes) > 0 { + context = s.Nodes[0] + if context.Parent != nil { + nodes = parseHtmlWithContext(htmlStr, context) + } else { + nodes = parseHtml(htmlStr) + } + } + return s.wrapAllNodes(nodes...) +} + +func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection { + if len(ns) > 0 { + return s.WrapAllNode(ns[0]) + } + return s +} + +// WrapAllNode wraps the given node around the first element in the Selection, +// making all other nodes in the Selection children of the given node. The node +// is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapAllNode(n *html.Node) *Selection { + if s.Size() == 0 { + return s + } + + wrap := cloneNode(n) + + first := s.Nodes[0] + if first.Parent != nil { + first.Parent.InsertBefore(wrap, first) + first.Parent.RemoveChild(first) + } + + for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) { + wrap = c + } + + newSingleSelection(wrap, s.document).AppendSelection(s) + + return s +} + +// WrapInner wraps an HTML structure, matched by the given selector, around the +// content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInner(selector string) *Selection { + return s.WrapInnerMatcher(compileMatcher(selector)) +} + +// WrapInnerMatcher wraps an HTML structure, matched by the given selector, +// around the content of element in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerMatcher(m Matcher) *Selection { + return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...) +} + +// WrapInnerSelection wraps an HTML structure, matched by the given selector, +// around the content of element in the set of matched elements. The matched +// child is cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerSelection(sel *Selection) *Selection { + return s.wrapInnerNodes(sel.Nodes...) +} + +// WrapInnerHtml wraps an HTML structure, matched by the given selector, around +// the content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerHtml(htmlStr string) *Selection { + nodesMap := make(map[string][]*html.Node) + for _, context := range s.Nodes { + nodes, found := nodesMap[nodeName(context)] + if !found { + nodes = parseHtmlWithContext(htmlStr, context) + nodesMap[nodeName(context)] = nodes + } + newSingleSelection(context, s.document).wrapInnerNodes(cloneNodes(nodes)...) + } + return s +} + +// WrapInnerNode wraps an HTML structure, matched by the given selector, around +// the content of element in the set of matched elements. The matched child is +// cloned before being inserted into the document. +// +// It returns the original set of elements. +func (s *Selection) WrapInnerNode(n *html.Node) *Selection { + return s.wrapInnerNodes(n) +} + +func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection { + if len(ns) == 0 { + return s + } + + s.Each(func(i int, s *Selection) { + contents := s.Contents() + + if contents.Size() > 0 { + contents.wrapAllNodes(ns...) + } else { + s.AppendNodes(cloneNode(ns[0])) + } + }) + + return s +} + +func parseHtml(h string) []*html.Node { + // Errors are only returned when the io.Reader returns any error besides + // EOF, but strings.Reader never will + nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode}) + if err != nil { + panic("goquery: failed to parse HTML: " + err.Error()) + } + return nodes +} + +func parseHtmlWithContext(h string, context *html.Node) []*html.Node { + // Errors are only returned when the io.Reader returns any error besides + // EOF, but strings.Reader never will + nodes, err := html.ParseFragment(strings.NewReader(h), context) + if err != nil { + panic("goquery: failed to parse HTML: " + err.Error()) + } + return nodes +} + +// Get the first child that is an ElementNode +func getFirstChildEl(n *html.Node) *html.Node { + c := n.FirstChild + for c != nil && c.Type != html.ElementNode { + c = c.NextSibling + } + return c +} + +// Deep copy a slice of nodes. +func cloneNodes(ns []*html.Node) []*html.Node { + cns := make([]*html.Node, 0, len(ns)) + + for _, n := range ns { + cns = append(cns, cloneNode(n)) + } + + return cns +} + +// Deep copy a node. The new node has clones of all the original node's +// children but none of its parents or siblings. +func cloneNode(n *html.Node) *html.Node { + nn := &html.Node{ + Type: n.Type, + DataAtom: n.DataAtom, + Data: n.Data, + Attr: make([]html.Attribute, len(n.Attr)), + } + + copy(nn.Attr, n.Attr) + for c := n.FirstChild; c != nil; c = c.NextSibling { + nn.AppendChild(cloneNode(c)) + } + + return nn +} + +func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool, + f func(sn *html.Node, n *html.Node)) *Selection { + + lasti := s.Size() - 1 + + // net.Html doesn't provide document fragments for insertion, so to get + // things in the correct order with After() and Prepend(), the callback + // needs to be called on the reverse of the nodes. + if reverse { + for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 { + ns[i], ns[j] = ns[j], ns[i] + } + } + + for i, sn := range s.Nodes { + for _, n := range ns { + if i != lasti { + f(sn, cloneNode(n)) + } else { + if n.Parent != nil { + n.Parent.RemoveChild(n) + } + f(sn, n) + } + } + } + + return s +} + +// eachNodeHtml parses the given html string and inserts the resulting nodes in the dom with the mergeFn. +// The parsed nodes are inserted for each element of the selection. +// isParent can be used to indicate that the elements of the selection should be treated as the parent for the parsed html. +// A cache is used to avoid parsing the html multiple times should the elements of the selection result in the same context. +func (s *Selection) eachNodeHtml(htmlStr string, isParent bool, mergeFn func(n *html.Node, nodes []*html.Node)) *Selection { + // cache to avoid parsing the html for the same context multiple times + nodeCache := make(map[string][]*html.Node) + var context *html.Node + for _, n := range s.Nodes { + if isParent { + context = n.Parent + } else { + if n.Type != html.ElementNode { + continue + } + context = n + } + if context != nil { + nodes, found := nodeCache[nodeName(context)] + if !found { + nodes = parseHtmlWithContext(htmlStr, context) + nodeCache[nodeName(context)] = nodes + } + mergeFn(n, cloneNodes(nodes)) + } + } + return s +} diff --git a/vendor/github.com/PuerkitoBio/goquery/property.go b/vendor/github.com/PuerkitoBio/goquery/property.go new file mode 100644 index 00000000..f1c80b93 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/property.go @@ -0,0 +1,274 @@ +package goquery + +import ( + "regexp" + "strings" + + "golang.org/x/net/html" +) + +var rxClassTrim = regexp.MustCompile("[\t\r\n]") + +// Attr gets the specified attribute's value for the first element in the +// Selection. To get the value for each element individually, use a looping +// construct such as Each or Map method. +func (s *Selection) Attr(attrName string) (val string, exists bool) { + if len(s.Nodes) == 0 { + return + } + return getAttributeValue(attrName, s.Nodes[0]) +} + +// AttrOr works like Attr but returns default value if attribute is not present. +func (s *Selection) AttrOr(attrName, defaultValue string) string { + if len(s.Nodes) == 0 { + return defaultValue + } + + val, exists := getAttributeValue(attrName, s.Nodes[0]) + if !exists { + return defaultValue + } + + return val +} + +// RemoveAttr removes the named attribute from each element in the set of matched elements. +func (s *Selection) RemoveAttr(attrName string) *Selection { + for _, n := range s.Nodes { + removeAttr(n, attrName) + } + + return s +} + +// SetAttr sets the given attribute on each element in the set of matched elements. +func (s *Selection) SetAttr(attrName, val string) *Selection { + for _, n := range s.Nodes { + attr := getAttributePtr(attrName, n) + if attr == nil { + n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val}) + } else { + attr.Val = val + } + } + + return s +} + +// Text gets the combined text contents of each element in the set of matched +// elements, including their descendants. +func (s *Selection) Text() string { + var builder strings.Builder + + // Slightly optimized vs calling Each: no single selection object created + var f func(*html.Node) + f = func(n *html.Node) { + if n.Type == html.TextNode { + // Keep newlines and spaces, like jQuery + builder.WriteString(n.Data) + } + if n.FirstChild != nil { + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + } + for _, n := range s.Nodes { + f(n) + } + + return builder.String() +} + +// Size is an alias for Length. +func (s *Selection) Size() int { + return s.Length() +} + +// Length returns the number of elements in the Selection object. +func (s *Selection) Length() int { + return len(s.Nodes) +} + +// Html gets the HTML contents of the first element in the set of matched +// elements. It includes text and comment nodes. +func (s *Selection) Html() (ret string, e error) { + // Since there is no .innerHtml, the HTML content must be re-created from + // the nodes using html.Render. + var builder strings.Builder + + if len(s.Nodes) > 0 { + for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling { + e = html.Render(&builder, c) + if e != nil { + return + } + } + ret = builder.String() + } + + return +} + +// AddClass adds the given class(es) to each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +func (s *Selection) AddClass(class ...string) *Selection { + classStr := strings.TrimSpace(strings.Join(class, " ")) + + if classStr == "" { + return s + } + + tcls := getClassesSlice(classStr) + for _, n := range s.Nodes { + curClasses, attr := getClassesAndAttr(n, true) + for _, newClass := range tcls { + if !strings.Contains(curClasses, " "+newClass+" ") { + curClasses += newClass + " " + } + } + + setClasses(n, attr, curClasses) + } + + return s +} + +// HasClass determines whether any of the matched elements are assigned the +// given class. +func (s *Selection) HasClass(class string) bool { + class = " " + class + " " + for _, n := range s.Nodes { + classes, _ := getClassesAndAttr(n, false) + if strings.Contains(classes, class) { + return true + } + } + return false +} + +// RemoveClass removes the given class(es) from each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +// If no class name is provided, all classes are removed. +func (s *Selection) RemoveClass(class ...string) *Selection { + var rclasses []string + + classStr := strings.TrimSpace(strings.Join(class, " ")) + remove := classStr == "" + + if !remove { + rclasses = getClassesSlice(classStr) + } + + for _, n := range s.Nodes { + if remove { + removeAttr(n, "class") + } else { + classes, attr := getClassesAndAttr(n, true) + for _, rcl := range rclasses { + classes = strings.Replace(classes, " "+rcl+" ", " ", -1) + } + + setClasses(n, attr, classes) + } + } + + return s +} + +// ToggleClass adds or removes the given class(es) for each element in the set of matched elements. +// Multiple class names can be specified, separated by a space or via multiple arguments. +func (s *Selection) ToggleClass(class ...string) *Selection { + classStr := strings.TrimSpace(strings.Join(class, " ")) + + if classStr == "" { + return s + } + + tcls := getClassesSlice(classStr) + + for _, n := range s.Nodes { + classes, attr := getClassesAndAttr(n, true) + for _, tcl := range tcls { + if strings.Contains(classes, " "+tcl+" ") { + classes = strings.Replace(classes, " "+tcl+" ", " ", -1) + } else { + classes += tcl + " " + } + } + + setClasses(n, attr, classes) + } + + return s +} + +func getAttributePtr(attrName string, n *html.Node) *html.Attribute { + if n == nil { + return nil + } + + for i, a := range n.Attr { + if a.Key == attrName { + return &n.Attr[i] + } + } + return nil +} + +// Private function to get the specified attribute's value from a node. +func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) { + if a := getAttributePtr(attrName, n); a != nil { + val = a.Val + exists = true + } + return +} + +// Get and normalize the "class" attribute from the node. +func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) { + // Applies only to element nodes + if n.Type == html.ElementNode { + attr = getAttributePtr("class", n) + if attr == nil && create { + n.Attr = append(n.Attr, html.Attribute{ + Key: "class", + Val: "", + }) + attr = &n.Attr[len(n.Attr)-1] + } + } + + if attr == nil { + classes = " " + } else { + classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ") + } + + return +} + +func getClassesSlice(classes string) []string { + return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ") +} + +func removeAttr(n *html.Node, attrName string) { + for i, a := range n.Attr { + if a.Key == attrName { + n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr = + n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1] + return + } + } +} + +func setClasses(n *html.Node, attr *html.Attribute, classes string) { + classes = strings.TrimSpace(classes) + if classes == "" { + removeAttr(n, "class") + return + } + + attr.Val = classes +} diff --git a/vendor/github.com/PuerkitoBio/goquery/query.go b/vendor/github.com/PuerkitoBio/goquery/query.go new file mode 100644 index 00000000..fe86bf0b --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/query.go @@ -0,0 +1,49 @@ +package goquery + +import "golang.org/x/net/html" + +// Is checks the current matched set of elements against a selector and +// returns true if at least one of these elements matches. +func (s *Selection) Is(selector string) bool { + return s.IsMatcher(compileMatcher(selector)) +} + +// IsMatcher checks the current matched set of elements against a matcher and +// returns true if at least one of these elements matches. +func (s *Selection) IsMatcher(m Matcher) bool { + if len(s.Nodes) > 0 { + if len(s.Nodes) == 1 { + return m.Match(s.Nodes[0]) + } + return len(m.Filter(s.Nodes)) > 0 + } + + return false +} + +// IsFunction checks the current matched set of elements against a predicate and +// returns true if at least one of these elements matches. +func (s *Selection) IsFunction(f func(int, *Selection) bool) bool { + return s.FilterFunction(f).Length() > 0 +} + +// IsSelection checks the current matched set of elements against a Selection object +// and returns true if at least one of these elements matches. +func (s *Selection) IsSelection(sel *Selection) bool { + return s.FilterSelection(sel).Length() > 0 +} + +// IsNodes checks the current matched set of elements against the specified nodes +// and returns true if at least one of these elements matches. +func (s *Selection) IsNodes(nodes ...*html.Node) bool { + return s.FilterNodes(nodes...).Length() > 0 +} + +// Contains returns true if the specified Node is within, +// at any depth, one of the nodes in the Selection object. +// It is NOT inclusive, to behave like jQuery's implementation, and +// unlike Javascript's .contains, so if the contained +// node is itself in the selection, it returns false. +func (s *Selection) Contains(n *html.Node) bool { + return sliceContains(s.Nodes, n) +} diff --git a/vendor/github.com/PuerkitoBio/goquery/traversal.go b/vendor/github.com/PuerkitoBio/goquery/traversal.go new file mode 100644 index 00000000..c45fa5db --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/traversal.go @@ -0,0 +1,704 @@ +package goquery + +import "golang.org/x/net/html" + +type siblingType int + +// Sibling type, used internally when iterating over children at the same +// level (siblings) to specify which nodes are requested. +const ( + siblingPrevUntil siblingType = iota - 3 + siblingPrevAll + siblingPrev + siblingAll + siblingNext + siblingNextAll + siblingNextUntil + siblingAllIncludingNonElements +) + +// Find gets the descendants of each element in the current set of matched +// elements, filtered by a selector. It returns a new Selection object +// containing these matched elements. +// +// Note that as for all methods accepting a selector string, the selector is +// compiled and applied by the cascadia package and inherits its behavior and +// constraints regarding supported selectors. See the note on cascadia in +// the goquery documentation here: +// https://github.com/PuerkitoBio/goquery?tab=readme-ov-file#api +func (s *Selection) Find(selector string) *Selection { + return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector))) +} + +// FindMatcher gets the descendants of each element in the current set of matched +// elements, filtered by the matcher. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindMatcher(m Matcher) *Selection { + return pushStack(s, findWithMatcher(s.Nodes, m)) +} + +// FindSelection gets the descendants of each element in the current +// Selection, filtered by a Selection. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, nil) + } + return s.FindNodes(sel.Nodes...) +} + +// FindNodes gets the descendants of each element in the current +// Selection, filtered by some nodes. It returns a new Selection object +// containing these matched elements. +func (s *Selection) FindNodes(nodes ...*html.Node) *Selection { + return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + if sliceContains(s.Nodes, n) { + return []*html.Node{n} + } + return nil + })) +} + +// Contents gets the children of each element in the Selection, +// including text and comment nodes. It returns a new Selection object +// containing these elements. +func (s *Selection) Contents() *Selection { + return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements)) +} + +// ContentsFiltered gets the children of each element in the Selection, +// filtered by the specified selector. It returns a new Selection +// object containing these elements. Since selectors only act on Element nodes, +// this function is an alias to ChildrenFiltered unless the selector is empty, +// in which case it is an alias to Contents. +func (s *Selection) ContentsFiltered(selector string) *Selection { + if selector != "" { + return s.ChildrenFiltered(selector) + } + return s.Contents() +} + +// ContentsMatcher gets the children of each element in the Selection, +// filtered by the specified matcher. It returns a new Selection +// object containing these elements. Since matchers only act on Element nodes, +// this function is an alias to ChildrenMatcher. +func (s *Selection) ContentsMatcher(m Matcher) *Selection { + return s.ChildrenMatcher(m) +} + +// Children gets the child elements of each element in the Selection. +// It returns a new Selection object containing these elements. +func (s *Selection) Children() *Selection { + return pushStack(s, getChildrenNodes(s.Nodes, siblingAll)) +} + +// ChildrenFiltered gets the child elements of each element in the Selection, +// filtered by the specified selector. It returns a new +// Selection object containing these elements. +func (s *Selection) ChildrenFiltered(selector string) *Selection { + return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector)) +} + +// ChildrenMatcher gets the child elements of each element in the Selection, +// filtered by the specified matcher. It returns a new +// Selection object containing these elements. +func (s *Selection) ChildrenMatcher(m Matcher) *Selection { + return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m) +} + +// Parent gets the parent of each element in the Selection. It returns a +// new Selection object containing the matched elements. +func (s *Selection) Parent() *Selection { + return pushStack(s, getParentNodes(s.Nodes)) +} + +// ParentFiltered gets the parent of each element in the Selection filtered by a +// selector. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentFiltered(selector string) *Selection { + return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector)) +} + +// ParentMatcher gets the parent of each element in the Selection filtered by a +// matcher. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentMatcher(m Matcher) *Selection { + return filterAndPush(s, getParentNodes(s.Nodes), m) +} + +// Closest gets the first element that matches the selector by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) Closest(selector string) *Selection { + cs := compileMatcher(selector) + return s.ClosestMatcher(cs) +} + +// ClosestMatcher gets the first element that matches the matcher by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) ClosestMatcher(m Matcher) *Selection { + return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node { + // For each node in the selection, test the node itself, then each parent + // until a match is found. + for ; n != nil; n = n.Parent { + if m.Match(n) { + return []*html.Node{n} + } + } + return nil + })) +} + +// ClosestNodes gets the first element that matches one of the nodes by testing the +// element itself and traversing up through its ancestors in the DOM tree. +func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection { + set := make(map[*html.Node]bool) + for _, n := range nodes { + set[n] = true + } + return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node { + // For each node in the selection, test the node itself, then each parent + // until a match is found. + for ; n != nil; n = n.Parent { + if set[n] { + return []*html.Node{n} + } + } + return nil + })) +} + +// ClosestSelection gets the first element that matches one of the nodes in the +// Selection by testing the element itself and traversing up through its ancestors +// in the DOM tree. +func (s *Selection) ClosestSelection(sel *Selection) *Selection { + if sel == nil { + return pushStack(s, nil) + } + return s.ClosestNodes(sel.Nodes...) +} + +// Parents gets the ancestors of each element in the current Selection. It +// returns a new Selection object with the matched elements. +func (s *Selection) Parents() *Selection { + return pushStack(s, getParentsNodes(s.Nodes, nil, nil)) +} + +// ParentsFiltered gets the ancestors of each element in the current +// Selection. It returns a new Selection object with the matched elements. +func (s *Selection) ParentsFiltered(selector string) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector)) +} + +// ParentsMatcher gets the ancestors of each element in the current +// Selection. It returns a new Selection object with the matched elements. +func (s *Selection) ParentsMatcher(m Matcher) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m) +} + +// ParentsUntil gets the ancestors of each element in the Selection, up to but +// not including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsUntil(selector string) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil)) +} + +// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but +// not including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, m, nil)) +} + +// ParentsUntilSelection gets the ancestors of each element in the Selection, +// up to but not including the elements in the specified Selection. It returns a +// new Selection object containing the matched elements. +func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.Parents() + } + return s.ParentsUntilNodes(sel.Nodes...) +} + +// ParentsUntilNodes gets the ancestors of each element in the Selection, +// up to but not including the specified nodes. It returns a +// new Selection object containing the matched elements. +func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getParentsNodes(s.Nodes, nil, nodes)) +} + +// ParentsFilteredUntil is like ParentsUntil, with the option to filter the +// results based on a selector string. It returns a new Selection +// object containing the matched elements. +func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the +// results based on a matcher. It returns a new Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter) +} + +// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.ParentsMatcher(filter) + } + return s.ParentsMatcherUntilNodes(filter, sel.Nodes...) +} + +// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector)) +} + +// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter) +} + +// Siblings gets the siblings of each element in the Selection. It returns +// a new Selection object containing the matched elements. +func (s *Selection) Siblings() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil)) +} + +// SiblingsFiltered gets the siblings of each element in the Selection +// filtered by a selector. It returns a new Selection object containing the +// matched elements. +func (s *Selection) SiblingsFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector)) +} + +// SiblingsMatcher gets the siblings of each element in the Selection +// filtered by a matcher. It returns a new Selection object containing the +// matched elements. +func (s *Selection) SiblingsMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m) +} + +// Next gets the immediately following sibling of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) Next() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil)) +} + +// NextFiltered gets the immediately following sibling of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector)) +} + +// NextMatcher gets the immediately following sibling of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m) +} + +// NextAll gets all the following siblings of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) NextAll() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil)) +} + +// NextAllFiltered gets all the following siblings of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextAllFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector)) +} + +// NextAllMatcher gets all the following siblings of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) NextAllMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m) +} + +// Prev gets the immediately preceding sibling of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) Prev() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil)) +} + +// PrevFiltered gets the immediately preceding sibling of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector)) +} + +// PrevMatcher gets the immediately preceding sibling of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m) +} + +// PrevAll gets all the preceding siblings of each element in the +// Selection. It returns a new Selection object containing the matched elements. +func (s *Selection) PrevAll() *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil)) +} + +// PrevAllFiltered gets all the preceding siblings of each element in the +// Selection filtered by a selector. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevAllFiltered(selector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector)) +} + +// PrevAllMatcher gets all the preceding siblings of each element in the +// Selection filtered by a matcher. It returns a new Selection object +// containing the matched elements. +func (s *Selection) PrevAllMatcher(m Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m) +} + +// NextUntil gets all following siblings of each element up to but not +// including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntil(selector string) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + compileMatcher(selector), nil)) +} + +// NextUntilMatcher gets all following siblings of each element up to but not +// including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilMatcher(m Matcher) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + m, nil)) +} + +// NextUntilSelection gets all following siblings of each element up to but not +// including the element matched by the Selection. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.NextAll() + } + return s.NextUntilNodes(sel.Nodes...) +} + +// NextUntilNodes gets all following siblings of each element up to but not +// including the element matched by the nodes. It returns a new Selection +// object containing the matched elements. +func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes)) +} + +// PrevUntil gets all preceding siblings of each element up to but not +// including the element matched by the selector. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntil(selector string) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + compileMatcher(selector), nil)) +} + +// PrevUntilMatcher gets all preceding siblings of each element up to but not +// including the element matched by the matcher. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilMatcher(m Matcher) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + m, nil)) +} + +// PrevUntilSelection gets all preceding siblings of each element up to but not +// including the element matched by the Selection. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilSelection(sel *Selection) *Selection { + if sel == nil { + return s.PrevAll() + } + return s.PrevUntilNodes(sel.Nodes...) +} + +// PrevUntilNodes gets all preceding siblings of each element up to but not +// including the element matched by the nodes. It returns a new Selection +// object containing the matched elements. +func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection { + return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes)) +} + +// NextFilteredUntil is like NextUntil, with the option to filter +// the results based on a selector string. +// It returns a new Selection object containing the matched elements. +func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter +// the results based on a matcher. +// It returns a new Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + until, nil), filter) +} + +// NextFilteredUntilSelection is like NextUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// NextMatcherUntilSelection is like NextUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.NextMatcher(filter) + } + return s.NextMatcherUntilNodes(filter, sel.Nodes...) +} + +// NextFilteredUntilNodes is like NextUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes), compileMatcher(filterSelector)) +} + +// NextMatcherUntilNodes is like NextUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil, + nil, nodes), filter) +} + +// PrevFilteredUntil is like PrevUntil, with the option to filter +// the results based on a selector string. +// It returns a new Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + compileMatcher(untilSelector), nil), compileMatcher(filterSelector)) +} + +// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter +// the results based on a matcher. +// It returns a new Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + until, nil), filter) +} + +// PrevFilteredUntilSelection is like PrevUntilSelection, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection { + return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel) +} + +// PrevMatcherUntilSelection is like PrevUntilSelection, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection { + if sel == nil { + return s.PrevMatcher(filter) + } + return s.PrevMatcherUntilNodes(filter, sel.Nodes...) +} + +// PrevFilteredUntilNodes is like PrevUntilNodes, with the +// option to filter the results based on a selector string. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes), compileMatcher(filterSelector)) +} + +// PrevMatcherUntilNodes is like PrevUntilNodes, with the +// option to filter the results based on a matcher. It returns a new +// Selection object containing the matched elements. +func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection { + return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil, + nil, nodes), filter) +} + +// Filter and push filters the nodes based on a matcher, and pushes the results +// on the stack, with the srcSel as previous selection. +func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection { + // Create a temporary Selection with the specified nodes to filter using winnow + sel := &Selection{nodes, srcSel.document, nil} + // Filter based on matcher and push on stack + return pushStack(srcSel, winnow(sel, m, true)) +} + +// Internal implementation of Find that return raw nodes. +func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node { + // Map nodes to find the matches within the children of each node + return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) { + // Go down one level, becausejQuery's Find selects only within descendants + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.ElementNode { + result = append(result, m.MatchAll(c)...) + } + } + return + }) +} + +// Internal implementation to get all parent nodes, stopping at the specified +// node (or nil if no stop). +func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) { + for p := n.Parent; p != nil; p = p.Parent { + sel := newSingleSelection(p, nil) + if stopm != nil { + if sel.IsMatcher(stopm) { + break + } + } else if len(stopNodes) > 0 { + if sel.IsNodes(stopNodes...) { + break + } + } + if p.Type == html.ElementNode { + result = append(result, p) + } + } + return + }) +} + +// Internal implementation of sibling nodes that return a raw slice of matches. +func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node { + var f func(*html.Node) bool + + // If the requested siblings are ...Until, create the test function to + // determine if the until condition is reached (returns true if it is) + if st == siblingNextUntil || st == siblingPrevUntil { + f = func(n *html.Node) bool { + if untilm != nil { + // Matcher-based condition + sel := newSingleSelection(n, nil) + return sel.IsMatcher(untilm) + } else if len(untilNodes) > 0 { + // Nodes-based condition + sel := newSingleSelection(n, nil) + return sel.IsNodes(untilNodes...) + } + return false + } + } + + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + return getChildrenWithSiblingType(n.Parent, st, n, f) + }) +} + +// Gets the children nodes of each node in the specified slice of nodes, +// based on the sibling type request. +func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + return getChildrenWithSiblingType(n, st, nil, nil) + }) +} + +// Gets the children of the specified parent, based on the requested sibling +// type, skipping a specified node if required. +func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node, + untilFunc func(*html.Node) bool) (result []*html.Node) { + + // Create the iterator function + var iter = func(cur *html.Node) (ret *html.Node) { + // Based on the sibling type requested, iterate the right way + for { + switch st { + case siblingAll, siblingAllIncludingNonElements: + if cur == nil { + // First iteration, start with first child of parent + // Skip node if required + if ret = parent.FirstChild; ret == skipNode && skipNode != nil { + ret = skipNode.NextSibling + } + } else { + // Skip node if required + if ret = cur.NextSibling; ret == skipNode && skipNode != nil { + ret = skipNode.NextSibling + } + } + case siblingPrev, siblingPrevAll, siblingPrevUntil: + if cur == nil { + // Start with previous sibling of the skip node + ret = skipNode.PrevSibling + } else { + ret = cur.PrevSibling + } + case siblingNext, siblingNextAll, siblingNextUntil: + if cur == nil { + // Start with next sibling of the skip node + ret = skipNode.NextSibling + } else { + ret = cur.NextSibling + } + default: + panic("Invalid sibling type.") + } + if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements { + return + } + // Not a valid node, try again from this one + cur = ret + } + } + + for c := iter(nil); c != nil; c = iter(c) { + // If this is an ...Until case, test before append (returns true + // if the until condition is reached) + if st == siblingNextUntil || st == siblingPrevUntil { + if untilFunc(c) { + return + } + } + result = append(result, c) + if st == siblingNext || st == siblingPrev { + // Only one node was requested (immediate next or previous), so exit + return + } + } + return +} + +// Internal implementation of parent nodes that return a raw slice of Nodes. +func getParentNodes(nodes []*html.Node) []*html.Node { + return mapNodes(nodes, func(i int, n *html.Node) []*html.Node { + if n.Parent != nil && n.Parent.Type == html.ElementNode { + return []*html.Node{n.Parent} + } + return nil + }) +} + +// Internal map function used by many traversing methods. Takes the source nodes +// to iterate on and the mapping function that returns an array of nodes. +// Returns an array of nodes mapped by calling the callback function once for +// each node in the source nodes. +func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) { + set := make(map[*html.Node]bool) + for i, n := range nodes { + if vals := f(i, n); len(vals) > 0 { + result = appendWithoutDuplicates(result, vals, set) + } + } + return result +} diff --git a/vendor/github.com/PuerkitoBio/goquery/type.go b/vendor/github.com/PuerkitoBio/goquery/type.go new file mode 100644 index 00000000..6646c143 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/type.go @@ -0,0 +1,203 @@ +package goquery + +import ( + "errors" + "io" + "net/http" + "net/url" + + "github.com/andybalholm/cascadia" + "golang.org/x/net/html" +) + +// Document represents an HTML document to be manipulated. Unlike jQuery, which +// is loaded as part of a DOM document, and thus acts upon its containing +// document, GoQuery doesn't know which HTML document to act upon. So it needs +// to be told, and that's what the Document class is for. It holds the root +// document node to manipulate, and can make selections on this document. +type Document struct { + *Selection + Url *url.URL + rootNode *html.Node +} + +// NewDocumentFromNode is a Document constructor that takes a root html Node +// as argument. +func NewDocumentFromNode(root *html.Node) *Document { + return newDocument(root, nil) +} + +// NewDocument is a Document constructor that takes a string URL as argument. +// It loads the specified document, parses it, and stores the root Document +// node, ready to be manipulated. +// +// Deprecated: Use the net/http standard library package to make the request +// and validate the response before calling goquery.NewDocumentFromReader +// with the response's body. +func NewDocument(url string) (*Document, error) { + // Load the URL + res, e := http.Get(url) + if e != nil { + return nil, e + } + return NewDocumentFromResponse(res) +} + +// NewDocumentFromReader returns a Document from an io.Reader. +// It returns an error as second value if the reader's data cannot be parsed +// as html. It does not check if the reader is also an io.Closer, the +// provided reader is never closed by this call. It is the responsibility +// of the caller to close it if required. +func NewDocumentFromReader(r io.Reader) (*Document, error) { + root, e := html.Parse(r) + if e != nil { + return nil, e + } + return newDocument(root, nil), nil +} + +// NewDocumentFromResponse is another Document constructor that takes an http response as argument. +// It loads the specified response's document, parses it, and stores the root Document +// node, ready to be manipulated. The response's body is closed on return. +// +// Deprecated: Use goquery.NewDocumentFromReader with the response's body. +func NewDocumentFromResponse(res *http.Response) (*Document, error) { + if res == nil { + return nil, errors.New("Response is nil") + } + defer res.Body.Close() + if res.Request == nil { + return nil, errors.New("Response.Request is nil") + } + + // Parse the HTML into nodes + root, e := html.Parse(res.Body) + if e != nil { + return nil, e + } + + // Create and fill the document + return newDocument(root, res.Request.URL), nil +} + +// CloneDocument creates a deep-clone of a document. +func CloneDocument(doc *Document) *Document { + return newDocument(cloneNode(doc.rootNode), doc.Url) +} + +// Private constructor, make sure all fields are correctly filled. +func newDocument(root *html.Node, url *url.URL) *Document { + // Create and fill the document + d := &Document{nil, url, root} + d.Selection = newSingleSelection(root, d) + return d +} + +// Selection represents a collection of nodes matching some criteria. The +// initial Selection can be created by using Document.Find, and then +// manipulated using the jQuery-like chainable syntax and methods. +type Selection struct { + Nodes []*html.Node + document *Document + prevSel *Selection +} + +// Helper constructor to create an empty selection +func newEmptySelection(doc *Document) *Selection { + return &Selection{nil, doc, nil} +} + +// Helper constructor to create a selection of only one node +func newSingleSelection(node *html.Node, doc *Document) *Selection { + return &Selection{[]*html.Node{node}, doc, nil} +} + +// Matcher is an interface that defines the methods to match +// HTML nodes against a compiled selector string. Cascadia's +// Selector implements this interface. +type Matcher interface { + Match(*html.Node) bool + MatchAll(*html.Node) []*html.Node + Filter([]*html.Node) []*html.Node +} + +// Single compiles a selector string to a Matcher that stops after the first +// match is found. +// +// By default, Selection.Find and other functions that accept a selector string +// to select nodes will use all matches corresponding to that selector. By +// using the Matcher returned by Single, at most the first match will be +// selected. +// +// For example, those two statements are semantically equivalent: +// +// sel1 := doc.Find("a").First() +// sel2 := doc.FindMatcher(goquery.Single("a")) +// +// The one using Single is optimized to be potentially much faster on large +// documents. +// +// Only the behaviour of the MatchAll method of the Matcher interface is +// altered compared to standard Matchers. This means that the single-selection +// property of the Matcher only applies for Selection methods where the Matcher +// is used to select nodes, not to filter or check if a node matches the +// Matcher - in those cases, the behaviour of the Matcher is unchanged (e.g. +// FilterMatcher(Single("div")) will still result in a Selection with multiple +// "div"s if there were many "div"s in the Selection to begin with). +func Single(selector string) Matcher { + return singleMatcher{compileMatcher(selector)} +} + +// SingleMatcher returns a Matcher matches the same nodes as m, but that stops +// after the first match is found. +// +// See the documentation of function Single for more details. +func SingleMatcher(m Matcher) Matcher { + if _, ok := m.(singleMatcher); ok { + // m is already a singleMatcher + return m + } + return singleMatcher{m} +} + +// compileMatcher compiles the selector string s and returns +// the corresponding Matcher. If s is an invalid selector string, +// it returns a Matcher that fails all matches. +func compileMatcher(s string) Matcher { + cs, err := cascadia.Compile(s) + if err != nil { + return invalidMatcher{} + } + return cs +} + +type singleMatcher struct { + Matcher +} + +func (m singleMatcher) MatchAll(n *html.Node) []*html.Node { + // Optimized version - stops finding at the first match (cascadia-compiled + // matchers all use this code path). + if mm, ok := m.Matcher.(interface{ MatchFirst(*html.Node) *html.Node }); ok { + node := mm.MatchFirst(n) + if node == nil { + return nil + } + return []*html.Node{node} + } + + // Fallback version, for e.g. test mocks that don't provide the MatchFirst + // method. + nodes := m.Matcher.MatchAll(n) + if len(nodes) > 0 { + return nodes[:1:1] + } + return nil +} + +// invalidMatcher is a Matcher that always fails to match. +type invalidMatcher struct{} + +func (invalidMatcher) Match(n *html.Node) bool { return false } +func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil } +func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil } diff --git a/vendor/github.com/PuerkitoBio/goquery/utilities.go b/vendor/github.com/PuerkitoBio/goquery/utilities.go new file mode 100644 index 00000000..361795b4 --- /dev/null +++ b/vendor/github.com/PuerkitoBio/goquery/utilities.go @@ -0,0 +1,177 @@ +package goquery + +import ( + "io" + "strings" + + "golang.org/x/net/html" +) + +// used to determine if a set (map[*html.Node]bool) should be used +// instead of iterating over a slice. The set uses more memory and +// is slower than slice iteration for small N. +const minNodesForSet = 1000 + +var nodeNames = []string{ + html.ErrorNode: "#error", + html.TextNode: "#text", + html.DocumentNode: "#document", + html.CommentNode: "#comment", +} + +// NodeName returns the node name of the first element in the selection. +// It tries to behave in a similar way as the DOM's nodeName property +// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName). +// +// Go's net/html package defines the following node types, listed with +// the corresponding returned value from this function: +// +// ErrorNode : #error +// TextNode : #text +// DocumentNode : #document +// ElementNode : the element's tag name +// CommentNode : #comment +// DoctypeNode : the name of the document type +func NodeName(s *Selection) string { + if s.Length() == 0 { + return "" + } + return nodeName(s.Get(0)) +} + +// nodeName returns the node name of the given html node. +// See NodeName for additional details on behaviour. +func nodeName(node *html.Node) string { + if node == nil { + return "" + } + + switch node.Type { + case html.ElementNode, html.DoctypeNode: + return node.Data + default: + if int(node.Type) < len(nodeNames) { + return nodeNames[node.Type] + } + return "" + } +} + +// Render renders the HTML of the first item in the selection and writes it to +// the writer. It behaves the same as OuterHtml but writes to w instead of +// returning the string. +func Render(w io.Writer, s *Selection) error { + if s.Length() == 0 { + return nil + } + n := s.Get(0) + return html.Render(w, n) +} + +// OuterHtml returns the outer HTML rendering of the first item in +// the selection - that is, the HTML including the first element's +// tag and attributes. +// +// Unlike Html, this is a function and not a method on the Selection, +// because this is not a jQuery method (in javascript-land, this is +// a property provided by the DOM). +func OuterHtml(s *Selection) (string, error) { + var builder strings.Builder + if err := Render(&builder, s); err != nil { + return "", err + } + return builder.String(), nil +} + +// Loop through all container nodes to search for the target node. +func sliceContains(container []*html.Node, contained *html.Node) bool { + for _, n := range container { + if nodeContains(n, contained) { + return true + } + } + + return false +} + +// Checks if the contained node is within the container node. +func nodeContains(container *html.Node, contained *html.Node) bool { + // Check if the parent of the contained node is the container node, traversing + // upward until the top is reached, or the container is found. + for contained = contained.Parent; contained != nil; contained = contained.Parent { + if container == contained { + return true + } + } + return false +} + +// Checks if the target node is in the slice of nodes. +func isInSlice(slice []*html.Node, node *html.Node) bool { + return indexInSlice(slice, node) > -1 +} + +// Returns the index of the target node in the slice, or -1. +func indexInSlice(slice []*html.Node, node *html.Node) int { + if node != nil { + for i, n := range slice { + if n == node { + return i + } + } + } + return -1 +} + +// Appends the new nodes to the target slice, making sure no duplicate is added. +// There is no check to the original state of the target slice, so it may still +// contain duplicates. The target slice is returned because append() may create +// a new underlying array. If targetSet is nil, a local set is created with the +// target if len(target) + len(nodes) is greater than minNodesForSet. +func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node { + // if there are not that many nodes, don't use the map, faster to just use nested loops + // (unless a non-nil targetSet is passed, in which case the caller knows better). + if targetSet == nil && len(target)+len(nodes) < minNodesForSet { + for _, n := range nodes { + if !isInSlice(target, n) { + target = append(target, n) + } + } + return target + } + + // if a targetSet is passed, then assume it is reliable, otherwise create one + // and initialize it with the current target contents. + if targetSet == nil { + targetSet = make(map[*html.Node]bool, len(target)) + for _, n := range target { + targetSet[n] = true + } + } + for _, n := range nodes { + if !targetSet[n] { + target = append(target, n) + targetSet[n] = true + } + } + + return target +} + +// Loop through a selection, returning only those nodes that pass the predicate +// function. +func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) { + for i, n := range sel.Nodes { + if predicate(i, newSingleSelection(n, sel.document)) { + result = append(result, n) + } + } + return result +} + +// Creates a new Selection object based on the specified nodes, and keeps the +// source Selection object on the stack (linked list). +func pushStack(fromSel *Selection, nodes []*html.Node) *Selection { + result := &Selection{nodes, fromSel.document, fromSel} + return result +} diff --git a/vendor/github.com/andybalholm/cascadia/.travis.yml b/vendor/github.com/andybalholm/cascadia/.travis.yml new file mode 100644 index 00000000..6f227517 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/.travis.yml @@ -0,0 +1,14 @@ +language: go + +go: + - 1.3 + - 1.4 + +install: + - go get github.com/andybalholm/cascadia + +script: + - go test -v + +notifications: + email: false diff --git a/vendor/github.com/andybalholm/cascadia/LICENSE b/vendor/github.com/andybalholm/cascadia/LICENSE new file mode 100644 index 00000000..ee5ad35a --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2011 Andy Balholm. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/andybalholm/cascadia/README.md b/vendor/github.com/andybalholm/cascadia/README.md new file mode 100644 index 00000000..6433cb9c --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/README.md @@ -0,0 +1,144 @@ +# cascadia + +[![](https://travis-ci.org/andybalholm/cascadia.svg)](https://travis-ci.org/andybalholm/cascadia) + +The Cascadia package implements CSS selectors for use with the parse trees produced by the html package. + +To test CSS selectors without writing Go code, check out [cascadia](https://github.com/suntong/cascadia) the command line tool, a thin wrapper around this package. + +[Refer to godoc here](https://godoc.org/github.com/andybalholm/cascadia). + +## Example + +The following is an example of how you can use Cascadia. + +```go +package main + +import ( + "fmt" + "log" + "strings" + + "github.com/andybalholm/cascadia" + "golang.org/x/net/html" +) + +var pricingHtml string = ` +
+
+

Free

+
+
+

$0/mo

+
    +
  • 10 users included
  • +
  • 2 GB of storage
  • +
  • See more
  • +
+
+
+ +
+
+

Pro

+
+
+

$15/mo

+
    +
  • 20 users included
  • +
  • 10 GB of storage
  • +
  • See more
  • +
+
+
+ +
+
+

Enterprise

+
+
+

$29/mo

+
    +
  • 30 users included
  • +
  • 15 GB of storage
  • +
  • See more
  • +
+
+
+` + +func Query(n *html.Node, query string) *html.Node { + sel, err := cascadia.Parse(query) + if err != nil { + return &html.Node{} + } + return cascadia.Query(n, sel) +} + +func QueryAll(n *html.Node, query string) []*html.Node { + sel, err := cascadia.Parse(query) + if err != nil { + return []*html.Node{} + } + return cascadia.QueryAll(n, sel) +} + +func AttrOr(n *html.Node, attrName, or string) string { + for _, a := range n.Attr { + if a.Key == attrName { + return a.Val + } + } + return or +} + +func main() { + doc, err := html.Parse(strings.NewReader(pricingHtml)) + if err != nil { + log.Fatal(err) + } + fmt.Printf("List of pricing plans:\n\n") + for i, p := range QueryAll(doc, "div.card.mb-4.box-shadow") { + planName := Query(p, "h4").FirstChild.Data + price := Query(p, ".pricing-card-title").FirstChild.Data + usersIncluded := Query(p, "li:first-child").FirstChild.Data + storage := Query(p, "li:nth-child(2)").FirstChild.Data + detailsUrl := AttrOr(Query(p, "li:last-child a"), "href", "(No link available)") + fmt.Printf( + "Plan #%d\nName: %s\nPrice: %s\nUsers: %s\nStorage: %s\nDetails: %s\n\n", + i+1, + planName, + price, + usersIncluded, + storage, + detailsUrl, + ) + } +} +``` +The output is: +``` +List of pricing plans: + +Plan #1 +Name: Free +Price: $0/mo +Users: 10 users included +Storage: 2 GB of storage +Details: https://example.com + +Plan #2 +Name: Pro +Price: $15/mo +Users: 20 users included +Storage: 10 GB of storage +Details: https://example.com + +Plan #3 +Name: Enterprise +Price: $29/mo +Users: 30 users included +Storage: 15 GB of storage +Details: (No link available) +``` diff --git a/vendor/github.com/andybalholm/cascadia/parser.go b/vendor/github.com/andybalholm/cascadia/parser.go new file mode 100644 index 00000000..06eccd58 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/parser.go @@ -0,0 +1,889 @@ +// Package cascadia is an implementation of CSS selectors. +package cascadia + +import ( + "errors" + "fmt" + "regexp" + "strconv" + "strings" +) + +// a parser for CSS selectors +type parser struct { + s string // the source text + i int // the current position + + // if `false`, parsing a pseudo-element + // returns an error. + acceptPseudoElements bool +} + +// parseEscape parses a backslash escape. +func (p *parser) parseEscape() (result string, err error) { + if len(p.s) < p.i+2 || p.s[p.i] != '\\' { + return "", errors.New("invalid escape sequence") + } + + start := p.i + 1 + c := p.s[start] + switch { + case c == '\r' || c == '\n' || c == '\f': + return "", errors.New("escaped line ending outside string") + case hexDigit(c): + // unicode escape (hex) + var i int + for i = start; i < start+6 && i < len(p.s) && hexDigit(p.s[i]); i++ { + // empty + } + v, _ := strconv.ParseUint(p.s[start:i], 16, 64) + if len(p.s) > i { + switch p.s[i] { + case '\r': + i++ + if len(p.s) > i && p.s[i] == '\n' { + i++ + } + case ' ', '\t', '\n', '\f': + i++ + } + } + p.i = i + return string(rune(v)), nil + } + + // Return the literal character after the backslash. + result = p.s[start : start+1] + p.i += 2 + return result, nil +} + +// toLowerASCII returns s with all ASCII capital letters lowercased. +func toLowerASCII(s string) string { + var b []byte + for i := 0; i < len(s); i++ { + if c := s[i]; 'A' <= c && c <= 'Z' { + if b == nil { + b = make([]byte, len(s)) + copy(b, s) + } + b[i] = s[i] + ('a' - 'A') + } + } + + if b == nil { + return s + } + + return string(b) +} + +func hexDigit(c byte) bool { + return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' +} + +// nameStart returns whether c can be the first character of an identifier +// (not counting an initial hyphen, or an escape sequence). +func nameStart(c byte) bool { + return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 +} + +// nameChar returns whether c can be a character within an identifier +// (not counting an escape sequence). +func nameChar(c byte) bool { + return 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || c == '_' || c > 127 || + c == '-' || '0' <= c && c <= '9' +} + +// parseIdentifier parses an identifier. +func (p *parser) parseIdentifier() (result string, err error) { + const prefix = '-' + var numPrefix int + + for len(p.s) > p.i && p.s[p.i] == prefix { + p.i++ + numPrefix++ + } + + if len(p.s) <= p.i { + return "", errors.New("expected identifier, found EOF instead") + } + + if c := p.s[p.i]; !(nameStart(c) || c == '\\') { + return "", fmt.Errorf("expected identifier, found %c instead", c) + } + + result, err = p.parseName() + if numPrefix > 0 && err == nil { + result = strings.Repeat(string(prefix), numPrefix) + result + } + return +} + +// parseName parses a name (which is like an identifier, but doesn't have +// extra restrictions on the first character). +func (p *parser) parseName() (result string, err error) { + i := p.i +loop: + for i < len(p.s) { + c := p.s[i] + switch { + case nameChar(c): + start := i + for i < len(p.s) && nameChar(p.s[i]) { + i++ + } + result += p.s[start:i] + case c == '\\': + p.i = i + val, err := p.parseEscape() + if err != nil { + return "", err + } + i = p.i + result += val + default: + break loop + } + } + + if result == "" { + return "", errors.New("expected name, found EOF instead") + } + + p.i = i + return result, nil +} + +// parseString parses a single- or double-quoted string. +func (p *parser) parseString() (result string, err error) { + i := p.i + if len(p.s) < i+2 { + return "", errors.New("expected string, found EOF instead") + } + + quote := p.s[i] + i++ + +loop: + for i < len(p.s) { + switch p.s[i] { + case '\\': + if len(p.s) > i+1 { + switch c := p.s[i+1]; c { + case '\r': + if len(p.s) > i+2 && p.s[i+2] == '\n' { + i += 3 + continue loop + } + fallthrough + case '\n', '\f': + i += 2 + continue loop + } + } + p.i = i + val, err := p.parseEscape() + if err != nil { + return "", err + } + i = p.i + result += val + case quote: + break loop + case '\r', '\n', '\f': + return "", errors.New("unexpected end of line in string") + default: + start := i + for i < len(p.s) { + if c := p.s[i]; c == quote || c == '\\' || c == '\r' || c == '\n' || c == '\f' { + break + } + i++ + } + result += p.s[start:i] + } + } + + if i >= len(p.s) { + return "", errors.New("EOF in string") + } + + // Consume the final quote. + i++ + + p.i = i + return result, nil +} + +// parseRegex parses a regular expression; the end is defined by encountering an +// unmatched closing ')' or ']' which is not consumed +func (p *parser) parseRegex() (rx *regexp.Regexp, err error) { + i := p.i + if len(p.s) < i+2 { + return nil, errors.New("expected regular expression, found EOF instead") + } + + // number of open parens or brackets; + // when it becomes negative, finished parsing regex + open := 0 + +loop: + for i < len(p.s) { + switch p.s[i] { + case '(', '[': + open++ + case ')', ']': + open-- + if open < 0 { + break loop + } + } + i++ + } + + if i >= len(p.s) { + return nil, errors.New("EOF in regular expression") + } + rx, err = regexp.Compile(p.s[p.i:i]) + p.i = i + return rx, err +} + +// skipWhitespace consumes whitespace characters and comments. +// It returns true if there was actually anything to skip. +func (p *parser) skipWhitespace() bool { + i := p.i + for i < len(p.s) { + switch p.s[i] { + case ' ', '\t', '\r', '\n', '\f': + i++ + continue + case '/': + if strings.HasPrefix(p.s[i:], "/*") { + end := strings.Index(p.s[i+len("/*"):], "*/") + if end != -1 { + i += end + len("/**/") + continue + } + } + } + break + } + + if i > p.i { + p.i = i + return true + } + + return false +} + +// consumeParenthesis consumes an opening parenthesis and any following +// whitespace. It returns true if there was actually a parenthesis to skip. +func (p *parser) consumeParenthesis() bool { + if p.i < len(p.s) && p.s[p.i] == '(' { + p.i++ + p.skipWhitespace() + return true + } + return false +} + +// consumeClosingParenthesis consumes a closing parenthesis and any preceding +// whitespace. It returns true if there was actually a parenthesis to skip. +func (p *parser) consumeClosingParenthesis() bool { + i := p.i + p.skipWhitespace() + if p.i < len(p.s) && p.s[p.i] == ')' { + p.i++ + return true + } + p.i = i + return false +} + +// parseTypeSelector parses a type selector (one that matches by tag name). +func (p *parser) parseTypeSelector() (result tagSelector, err error) { + tag, err := p.parseIdentifier() + if err != nil { + return + } + return tagSelector{tag: toLowerASCII(tag)}, nil +} + +// parseIDSelector parses a selector that matches by id attribute. +func (p *parser) parseIDSelector() (idSelector, error) { + if p.i >= len(p.s) { + return idSelector{}, fmt.Errorf("expected id selector (#id), found EOF instead") + } + if p.s[p.i] != '#' { + return idSelector{}, fmt.Errorf("expected id selector (#id), found '%c' instead", p.s[p.i]) + } + + p.i++ + id, err := p.parseName() + if err != nil { + return idSelector{}, err + } + + return idSelector{id: id}, nil +} + +// parseClassSelector parses a selector that matches by class attribute. +func (p *parser) parseClassSelector() (classSelector, error) { + if p.i >= len(p.s) { + return classSelector{}, fmt.Errorf("expected class selector (.class), found EOF instead") + } + if p.s[p.i] != '.' { + return classSelector{}, fmt.Errorf("expected class selector (.class), found '%c' instead", p.s[p.i]) + } + + p.i++ + class, err := p.parseIdentifier() + if err != nil { + return classSelector{}, err + } + + return classSelector{class: class}, nil +} + +// parseAttributeSelector parses a selector that matches by attribute value. +func (p *parser) parseAttributeSelector() (attrSelector, error) { + if p.i >= len(p.s) { + return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found EOF instead") + } + if p.s[p.i] != '[' { + return attrSelector{}, fmt.Errorf("expected attribute selector ([attribute]), found '%c' instead", p.s[p.i]) + } + + p.i++ + p.skipWhitespace() + key, err := p.parseIdentifier() + if err != nil { + return attrSelector{}, err + } + key = toLowerASCII(key) + + p.skipWhitespace() + if p.i >= len(p.s) { + return attrSelector{}, errors.New("unexpected EOF in attribute selector") + } + + if p.s[p.i] == ']' { + p.i++ + return attrSelector{key: key, operation: ""}, nil + } + + if p.i+2 >= len(p.s) { + return attrSelector{}, errors.New("unexpected EOF in attribute selector") + } + + op := p.s[p.i : p.i+2] + if op[0] == '=' { + op = "=" + } else if op[1] != '=' { + return attrSelector{}, fmt.Errorf(`expected equality operator, found "%s" instead`, op) + } + p.i += len(op) + + p.skipWhitespace() + if p.i >= len(p.s) { + return attrSelector{}, errors.New("unexpected EOF in attribute selector") + } + var val string + var rx *regexp.Regexp + if op == "#=" { + rx, err = p.parseRegex() + } else { + switch p.s[p.i] { + case '\'', '"': + val, err = p.parseString() + default: + val, err = p.parseIdentifier() + } + } + if err != nil { + return attrSelector{}, err + } + + p.skipWhitespace() + if p.i >= len(p.s) { + return attrSelector{}, errors.New("unexpected EOF in attribute selector") + } + + // check if the attribute contains an ignore case flag + ignoreCase := false + if p.s[p.i] == 'i' || p.s[p.i] == 'I' { + ignoreCase = true + p.i++ + } + + p.skipWhitespace() + if p.i >= len(p.s) { + return attrSelector{}, errors.New("unexpected EOF in attribute selector") + } + + if p.s[p.i] != ']' { + return attrSelector{}, fmt.Errorf("expected ']', found '%c' instead", p.s[p.i]) + } + p.i++ + + switch op { + case "=", "!=", "~=", "|=", "^=", "$=", "*=", "#=": + return attrSelector{key: key, val: val, operation: op, regexp: rx, insensitive: ignoreCase}, nil + default: + return attrSelector{}, fmt.Errorf("attribute operator %q is not supported", op) + } +} + +var ( + errExpectedParenthesis = errors.New("expected '(' but didn't find it") + errExpectedClosingParenthesis = errors.New("expected ')' but didn't find it") + errUnmatchedParenthesis = errors.New("unmatched '('") +) + +// parsePseudoclassSelector parses a pseudoclass selector like :not(p) or a pseudo-element +// For backwards compatibility, both ':' and '::' prefix are allowed for pseudo-elements. +// https://drafts.csswg.org/selectors-3/#pseudo-elements +// Returning a nil `Sel` (and a nil `error`) means we found a pseudo-element. +func (p *parser) parsePseudoclassSelector() (out Sel, pseudoElement string, err error) { + if p.i >= len(p.s) { + return nil, "", fmt.Errorf("expected pseudoclass selector (:pseudoclass), found EOF instead") + } + if p.s[p.i] != ':' { + return nil, "", fmt.Errorf("expected attribute selector (:pseudoclass), found '%c' instead", p.s[p.i]) + } + + p.i++ + var mustBePseudoElement bool + if p.i >= len(p.s) { + return nil, "", fmt.Errorf("got empty pseudoclass (or pseudoelement)") + } + if p.s[p.i] == ':' { // we found a pseudo-element + mustBePseudoElement = true + p.i++ + } + + name, err := p.parseIdentifier() + if err != nil { + return + } + name = toLowerASCII(name) + if mustBePseudoElement && (name != "after" && name != "backdrop" && name != "before" && + name != "cue" && name != "first-letter" && name != "first-line" && name != "grammar-error" && + name != "marker" && name != "placeholder" && name != "selection" && name != "spelling-error") { + return out, "", fmt.Errorf("unknown pseudoelement :%s", name) + } + + switch name { + case "not", "has", "haschild": + if !p.consumeParenthesis() { + return out, "", errExpectedParenthesis + } + sel, parseErr := p.parseSelectorGroup() + if parseErr != nil { + return out, "", parseErr + } + if !p.consumeClosingParenthesis() { + return out, "", errExpectedClosingParenthesis + } + + out = relativePseudoClassSelector{name: name, match: sel} + + case "contains", "containsown": + if !p.consumeParenthesis() { + return out, "", errExpectedParenthesis + } + if p.i == len(p.s) { + return out, "", errUnmatchedParenthesis + } + var val string + switch p.s[p.i] { + case '\'', '"': + val, err = p.parseString() + default: + val, err = p.parseIdentifier() + } + if err != nil { + return out, "", err + } + val = strings.ToLower(val) + p.skipWhitespace() + if p.i >= len(p.s) { + return out, "", errors.New("unexpected EOF in pseudo selector") + } + if !p.consumeClosingParenthesis() { + return out, "", errExpectedClosingParenthesis + } + + out = containsPseudoClassSelector{own: name == "containsown", value: val} + + case "matches", "matchesown": + if !p.consumeParenthesis() { + return out, "", errExpectedParenthesis + } + rx, err := p.parseRegex() + if err != nil { + return out, "", err + } + if p.i >= len(p.s) { + return out, "", errors.New("unexpected EOF in pseudo selector") + } + if !p.consumeClosingParenthesis() { + return out, "", errExpectedClosingParenthesis + } + + out = regexpPseudoClassSelector{own: name == "matchesown", regexp: rx} + + case "nth-child", "nth-last-child", "nth-of-type", "nth-last-of-type": + if !p.consumeParenthesis() { + return out, "", errExpectedParenthesis + } + a, b, err := p.parseNth() + if err != nil { + return out, "", err + } + if !p.consumeClosingParenthesis() { + return out, "", errExpectedClosingParenthesis + } + last := name == "nth-last-child" || name == "nth-last-of-type" + ofType := name == "nth-of-type" || name == "nth-last-of-type" + out = nthPseudoClassSelector{a: a, b: b, last: last, ofType: ofType} + + case "first-child": + out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: false} + case "last-child": + out = nthPseudoClassSelector{a: 0, b: 1, ofType: false, last: true} + case "first-of-type": + out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: false} + case "last-of-type": + out = nthPseudoClassSelector{a: 0, b: 1, ofType: true, last: true} + case "only-child": + out = onlyChildPseudoClassSelector{ofType: false} + case "only-of-type": + out = onlyChildPseudoClassSelector{ofType: true} + case "input": + out = inputPseudoClassSelector{} + case "empty": + out = emptyElementPseudoClassSelector{} + case "root": + out = rootPseudoClassSelector{} + case "link": + out = linkPseudoClassSelector{} + case "lang": + if !p.consumeParenthesis() { + return out, "", errExpectedParenthesis + } + if p.i == len(p.s) { + return out, "", errUnmatchedParenthesis + } + val, err := p.parseIdentifier() + if err != nil { + return out, "", err + } + val = strings.ToLower(val) + p.skipWhitespace() + if p.i >= len(p.s) { + return out, "", errors.New("unexpected EOF in pseudo selector") + } + if !p.consumeClosingParenthesis() { + return out, "", errExpectedClosingParenthesis + } + out = langPseudoClassSelector{lang: val} + case "enabled": + out = enabledPseudoClassSelector{} + case "disabled": + out = disabledPseudoClassSelector{} + case "checked": + out = checkedPseudoClassSelector{} + case "visited", "hover", "active", "focus", "target": + // Not applicable in a static context: never match. + out = neverMatchSelector{value: ":" + name} + case "after", "backdrop", "before", "cue", "first-letter", "first-line", "grammar-error", "marker", "placeholder", "selection", "spelling-error": + return nil, name, nil + default: + return out, "", fmt.Errorf("unknown pseudoclass or pseudoelement :%s", name) + } + return +} + +// parseInteger parses a decimal integer. +func (p *parser) parseInteger() (int, error) { + i := p.i + start := i + for i < len(p.s) && '0' <= p.s[i] && p.s[i] <= '9' { + i++ + } + if i == start { + return 0, errors.New("expected integer, but didn't find it") + } + p.i = i + + val, err := strconv.Atoi(p.s[start:i]) + if err != nil { + return 0, err + } + + return val, nil +} + +// parseNth parses the argument for :nth-child (normally of the form an+b). +func (p *parser) parseNth() (a, b int, err error) { + // initial state + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '-': + p.i++ + goto negativeA + case '+': + p.i++ + goto positiveA + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + goto positiveA + case 'n', 'N': + a = 1 + p.i++ + goto readN + case 'o', 'O', 'e', 'E': + id, nameErr := p.parseName() + if nameErr != nil { + return 0, 0, nameErr + } + id = toLowerASCII(id) + if id == "odd" { + return 2, 1, nil + } + if id == "even" { + return 2, 0, nil + } + return 0, 0, fmt.Errorf("expected 'odd' or 'even', but found '%s' instead", id) + default: + goto invalid + } + +positiveA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + a, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + goto readA + case 'n', 'N': + a = 1 + p.i++ + goto readN + default: + goto invalid + } + +negativeA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + a, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + a = -a + goto readA + case 'n', 'N': + a = -1 + p.i++ + goto readN + default: + goto invalid + } + +readA: + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case 'n', 'N': + p.i++ + goto readN + default: + // The number we read as a is actually b. + return 0, a, nil + } + +readN: + p.skipWhitespace() + if p.i >= len(p.s) { + goto eof + } + switch p.s[p.i] { + case '+': + p.i++ + p.skipWhitespace() + b, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + return a, b, nil + case '-': + p.i++ + p.skipWhitespace() + b, err = p.parseInteger() + if err != nil { + return 0, 0, err + } + return a, -b, nil + default: + return a, 0, nil + } + +eof: + return 0, 0, errors.New("unexpected EOF while attempting to parse expression of form an+b") + +invalid: + return 0, 0, errors.New("unexpected character while attempting to parse expression of form an+b") +} + +// parseSimpleSelectorSequence parses a selector sequence that applies to +// a single element. +func (p *parser) parseSimpleSelectorSequence() (Sel, error) { + var selectors []Sel + + if p.i >= len(p.s) { + return nil, errors.New("expected selector, found EOF instead") + } + + switch p.s[p.i] { + case '*': + // It's the universal selector. Just skip over it, since it doesn't affect the meaning. + p.i++ + if p.i+2 < len(p.s) && p.s[p.i:p.i+2] == "|*" { // other version of universal selector + p.i += 2 + } + case '#', '.', '[', ':': + // There's no type selector. Wait to process the other till the main loop. + default: + r, err := p.parseTypeSelector() + if err != nil { + return nil, err + } + selectors = append(selectors, r) + } + + var pseudoElement string +loop: + for p.i < len(p.s) { + var ( + ns Sel + newPseudoElement string + err error + ) + switch p.s[p.i] { + case '#': + ns, err = p.parseIDSelector() + case '.': + ns, err = p.parseClassSelector() + case '[': + ns, err = p.parseAttributeSelector() + case ':': + ns, newPseudoElement, err = p.parsePseudoclassSelector() + default: + break loop + } + if err != nil { + return nil, err + } + // From https://drafts.csswg.org/selectors-3/#pseudo-elements : + // "Only one pseudo-element may appear per selector, and if present + // it must appear after the sequence of simple selectors that + // represents the subjects of the selector."" + if ns == nil { // we found a pseudo-element + if pseudoElement != "" { + return nil, fmt.Errorf("only one pseudo-element is accepted per selector, got %s and %s", pseudoElement, newPseudoElement) + } + if !p.acceptPseudoElements { + return nil, fmt.Errorf("pseudo-element %s found, but pseudo-elements support is disabled", newPseudoElement) + } + pseudoElement = newPseudoElement + } else { + if pseudoElement != "" { + return nil, fmt.Errorf("pseudo-element %s must be at the end of selector", pseudoElement) + } + selectors = append(selectors, ns) + } + + } + if len(selectors) == 1 && pseudoElement == "" { // no need wrap the selectors in compoundSelector + return selectors[0], nil + } + return compoundSelector{selectors: selectors, pseudoElement: pseudoElement}, nil +} + +// parseSelector parses a selector that may include combinators. +func (p *parser) parseSelector() (Sel, error) { + p.skipWhitespace() + result, err := p.parseSimpleSelectorSequence() + if err != nil { + return nil, err + } + + for { + var ( + combinator byte + c Sel + ) + if p.skipWhitespace() { + combinator = ' ' + } + if p.i >= len(p.s) { + return result, nil + } + + switch p.s[p.i] { + case '+', '>', '~': + combinator = p.s[p.i] + p.i++ + p.skipWhitespace() + case ',', ')': + // These characters can't begin a selector, but they can legally occur after one. + return result, nil + } + + if combinator == 0 { + return result, nil + } + + c, err = p.parseSimpleSelectorSequence() + if err != nil { + return nil, err + } + result = combinedSelector{first: result, combinator: combinator, second: c} + } +} + +// parseSelectorGroup parses a group of selectors, separated by commas. +func (p *parser) parseSelectorGroup() (SelectorGroup, error) { + current, err := p.parseSelector() + if err != nil { + return nil, err + } + result := SelectorGroup{current} + + for p.i < len(p.s) { + if p.s[p.i] != ',' { + break + } + p.i++ + c, err := p.parseSelector() + if err != nil { + return nil, err + } + result = append(result, c) + } + return result, nil +} diff --git a/vendor/github.com/andybalholm/cascadia/pseudo_classes.go b/vendor/github.com/andybalholm/cascadia/pseudo_classes.go new file mode 100644 index 00000000..6234c3eb --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/pseudo_classes.go @@ -0,0 +1,458 @@ +package cascadia + +import ( + "bytes" + "fmt" + "regexp" + "strings" + + "golang.org/x/net/html" + "golang.org/x/net/html/atom" +) + +// This file implements the pseudo classes selectors, +// which share the implementation of PseudoElement() and Specificity() + +type abstractPseudoClass struct{} + +func (s abstractPseudoClass) Specificity() Specificity { + return Specificity{0, 1, 0} +} + +func (c abstractPseudoClass) PseudoElement() string { + return "" +} + +type relativePseudoClassSelector struct { + name string // one of "not", "has", "haschild" + match SelectorGroup +} + +func (s relativePseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + switch s.name { + case "not": + // matches elements that do not match a. + return !s.match.Match(n) + case "has": + // matches elements with any descendant that matches a. + return hasDescendantMatch(n, s.match) + case "haschild": + // matches elements with a child that matches a. + return hasChildMatch(n, s.match) + default: + panic(fmt.Sprintf("unsupported relative pseudo class selector : %s", s.name)) + } +} + +// hasChildMatch returns whether n has any child that matches a. +func hasChildMatch(n *html.Node, a Matcher) bool { + for c := n.FirstChild; c != nil; c = c.NextSibling { + if a.Match(c) { + return true + } + } + return false +} + +// hasDescendantMatch performs a depth-first search of n's descendants, +// testing whether any of them match a. It returns true as soon as a match is +// found, or false if no match is found. +func hasDescendantMatch(n *html.Node, a Matcher) bool { + for c := n.FirstChild; c != nil; c = c.NextSibling { + if a.Match(c) || (c.Type == html.ElementNode && hasDescendantMatch(c, a)) { + return true + } + } + return false +} + +// Specificity returns the specificity of the most specific selectors +// in the pseudo-class arguments. +// See https://www.w3.org/TR/selectors/#specificity-rules +func (s relativePseudoClassSelector) Specificity() Specificity { + var max Specificity + for _, sel := range s.match { + newSpe := sel.Specificity() + if max.Less(newSpe) { + max = newSpe + } + } + return max +} + +func (c relativePseudoClassSelector) PseudoElement() string { + return "" +} + +type containsPseudoClassSelector struct { + abstractPseudoClass + value string + own bool +} + +func (s containsPseudoClassSelector) Match(n *html.Node) bool { + var text string + if s.own { + // matches nodes that directly contain the given text + text = strings.ToLower(nodeOwnText(n)) + } else { + // matches nodes that contain the given text. + text = strings.ToLower(nodeText(n)) + } + return strings.Contains(text, s.value) +} + +type regexpPseudoClassSelector struct { + abstractPseudoClass + regexp *regexp.Regexp + own bool +} + +func (s regexpPseudoClassSelector) Match(n *html.Node) bool { + var text string + if s.own { + // matches nodes whose text directly matches the specified regular expression + text = nodeOwnText(n) + } else { + // matches nodes whose text matches the specified regular expression + text = nodeText(n) + } + return s.regexp.MatchString(text) +} + +// writeNodeText writes the text contained in n and its descendants to b. +func writeNodeText(n *html.Node, b *bytes.Buffer) { + switch n.Type { + case html.TextNode: + b.WriteString(n.Data) + case html.ElementNode: + for c := n.FirstChild; c != nil; c = c.NextSibling { + writeNodeText(c, b) + } + } +} + +// nodeText returns the text contained in n and its descendants. +func nodeText(n *html.Node) string { + var b bytes.Buffer + writeNodeText(n, &b) + return b.String() +} + +// nodeOwnText returns the contents of the text nodes that are direct +// children of n. +func nodeOwnText(n *html.Node) string { + var b bytes.Buffer + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.TextNode { + b.WriteString(c.Data) + } + } + return b.String() +} + +type nthPseudoClassSelector struct { + abstractPseudoClass + a, b int + last, ofType bool +} + +func (s nthPseudoClassSelector) Match(n *html.Node) bool { + if s.a == 0 { + if s.last { + return simpleNthLastChildMatch(s.b, s.ofType, n) + } else { + return simpleNthChildMatch(s.b, s.ofType, n) + } + } + return nthChildMatch(s.a, s.b, s.last, s.ofType, n) +} + +// nthChildMatch implements :nth-child(an+b). +// If last is true, implements :nth-last-child instead. +// If ofType is true, implements :nth-of-type instead. +func nthChildMatch(a, b int, last, ofType bool, n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + i := -1 + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if (c.Type != html.ElementNode) || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + i = count + if !last { + break + } + } + } + + if i == -1 { + // This shouldn't happen, since n should always be one of its parent's children. + return false + } + + if last { + i = count - i + 1 + } + + i -= b + if a == 0 { + return i == 0 + } + + return i%a == 0 && i/a >= 0 +} + +// simpleNthChildMatch implements :nth-child(b). +// If ofType is true, implements :nth-of-type instead. +func simpleNthChildMatch(b int, ofType bool, n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if c.Type != html.ElementNode || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + return count == b + } + if count >= b { + return false + } + } + return false +} + +// simpleNthLastChildMatch implements :nth-last-child(b). +// If ofType is true, implements :nth-last-of-type instead. +func simpleNthLastChildMatch(b int, ofType bool, n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + count := 0 + for c := parent.LastChild; c != nil; c = c.PrevSibling { + if c.Type != html.ElementNode || (ofType && c.Data != n.Data) { + continue + } + count++ + if c == n { + return count == b + } + if count >= b { + return false + } + } + return false +} + +type onlyChildPseudoClassSelector struct { + abstractPseudoClass + ofType bool +} + +// Match implements :only-child. +// If `ofType` is true, it implements :only-of-type instead. +func (s onlyChildPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + parent := n.Parent + if parent == nil { + return false + } + + count := 0 + for c := parent.FirstChild; c != nil; c = c.NextSibling { + if (c.Type != html.ElementNode) || (s.ofType && c.Data != n.Data) { + continue + } + count++ + if count > 1 { + return false + } + } + + return count == 1 +} + +type inputPseudoClassSelector struct { + abstractPseudoClass +} + +// Matches input, select, textarea and button elements. +func (s inputPseudoClassSelector) Match(n *html.Node) bool { + return n.Type == html.ElementNode && (n.Data == "input" || n.Data == "select" || n.Data == "textarea" || n.Data == "button") +} + +type emptyElementPseudoClassSelector struct { + abstractPseudoClass +} + +// Matches empty elements. +func (s emptyElementPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + switch c.Type { + case html.ElementNode: + return false + case html.TextNode: + if strings.TrimSpace(nodeText(c)) == "" { + continue + } else { + return false + } + } + } + + return true +} + +type rootPseudoClassSelector struct { + abstractPseudoClass +} + +// Match implements :root +func (s rootPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + if n.Parent == nil { + return false + } + return n.Parent.Type == html.DocumentNode +} + +func hasAttr(n *html.Node, attr string) bool { + return matchAttribute(n, attr, func(string) bool { return true }) +} + +type linkPseudoClassSelector struct { + abstractPseudoClass +} + +// Match implements :link +func (s linkPseudoClassSelector) Match(n *html.Node) bool { + return (n.DataAtom == atom.A || n.DataAtom == atom.Area || n.DataAtom == atom.Link) && hasAttr(n, "href") +} + +type langPseudoClassSelector struct { + abstractPseudoClass + lang string +} + +func (s langPseudoClassSelector) Match(n *html.Node) bool { + own := matchAttribute(n, "lang", func(val string) bool { + return val == s.lang || strings.HasPrefix(val, s.lang+"-") + }) + if n.Parent == nil { + return own + } + return own || s.Match(n.Parent) +} + +type enabledPseudoClassSelector struct { + abstractPseudoClass +} + +func (s enabledPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + switch n.DataAtom { + case atom.A, atom.Area, atom.Link: + return hasAttr(n, "href") + case atom.Optgroup, atom.Menuitem, atom.Fieldset: + return !hasAttr(n, "disabled") + case atom.Button, atom.Input, atom.Select, atom.Textarea, atom.Option: + return !hasAttr(n, "disabled") && !inDisabledFieldset(n) + } + return false +} + +type disabledPseudoClassSelector struct { + abstractPseudoClass +} + +func (s disabledPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + switch n.DataAtom { + case atom.Optgroup, atom.Menuitem, atom.Fieldset: + return hasAttr(n, "disabled") + case atom.Button, atom.Input, atom.Select, atom.Textarea, atom.Option: + return hasAttr(n, "disabled") || inDisabledFieldset(n) + } + return false +} + +func hasLegendInPreviousSiblings(n *html.Node) bool { + for s := n.PrevSibling; s != nil; s = s.PrevSibling { + if s.DataAtom == atom.Legend { + return true + } + } + return false +} + +func inDisabledFieldset(n *html.Node) bool { + if n.Parent == nil { + return false + } + if n.Parent.DataAtom == atom.Fieldset && hasAttr(n.Parent, "disabled") && + (n.DataAtom != atom.Legend || hasLegendInPreviousSiblings(n)) { + return true + } + return inDisabledFieldset(n.Parent) +} + +type checkedPseudoClassSelector struct { + abstractPseudoClass +} + +func (s checkedPseudoClassSelector) Match(n *html.Node) bool { + if n.Type != html.ElementNode { + return false + } + switch n.DataAtom { + case atom.Input, atom.Menuitem: + return hasAttr(n, "checked") && matchAttribute(n, "type", func(val string) bool { + t := toLowerASCII(val) + return t == "checkbox" || t == "radio" + }) + case atom.Option: + return hasAttr(n, "selected") + } + return false +} diff --git a/vendor/github.com/andybalholm/cascadia/selector.go b/vendor/github.com/andybalholm/cascadia/selector.go new file mode 100644 index 00000000..87549be2 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/selector.go @@ -0,0 +1,586 @@ +package cascadia + +import ( + "fmt" + "regexp" + "strings" + + "golang.org/x/net/html" +) + +// Matcher is the interface for basic selector functionality. +// Match returns whether a selector matches n. +type Matcher interface { + Match(n *html.Node) bool +} + +// Sel is the interface for all the functionality provided by selectors. +type Sel interface { + Matcher + Specificity() Specificity + + // Returns a CSS input compiling to this selector. + String() string + + // Returns a pseudo-element, or an empty string. + PseudoElement() string +} + +// Parse parses a selector. Use `ParseWithPseudoElement` +// if you need support for pseudo-elements. +func Parse(sel string) (Sel, error) { + p := &parser{s: sel} + compiled, err := p.parseSelector() + if err != nil { + return nil, err + } + + if p.i < len(sel) { + return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i) + } + + return compiled, nil +} + +// ParseWithPseudoElement parses a single selector, +// with support for pseudo-element. +func ParseWithPseudoElement(sel string) (Sel, error) { + p := &parser{s: sel, acceptPseudoElements: true} + compiled, err := p.parseSelector() + if err != nil { + return nil, err + } + + if p.i < len(sel) { + return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i) + } + + return compiled, nil +} + +// ParseGroup parses a selector, or a group of selectors separated by commas. +// Use `ParseGroupWithPseudoElements` +// if you need support for pseudo-elements. +func ParseGroup(sel string) (SelectorGroup, error) { + p := &parser{s: sel} + compiled, err := p.parseSelectorGroup() + if err != nil { + return nil, err + } + + if p.i < len(sel) { + return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i) + } + + return compiled, nil +} + +// ParseGroupWithPseudoElements parses a selector, or a group of selectors separated by commas. +// It supports pseudo-elements. +func ParseGroupWithPseudoElements(sel string) (SelectorGroup, error) { + p := &parser{s: sel, acceptPseudoElements: true} + compiled, err := p.parseSelectorGroup() + if err != nil { + return nil, err + } + + if p.i < len(sel) { + return nil, fmt.Errorf("parsing %q: %d bytes left over", sel, len(sel)-p.i) + } + + return compiled, nil +} + +// A Selector is a function which tells whether a node matches or not. +// +// This type is maintained for compatibility; I recommend using the newer and +// more idiomatic interfaces Sel and Matcher. +type Selector func(*html.Node) bool + +// Compile parses a selector and returns, if successful, a Selector object +// that can be used to match against html.Node objects. +func Compile(sel string) (Selector, error) { + compiled, err := ParseGroup(sel) + if err != nil { + return nil, err + } + + return Selector(compiled.Match), nil +} + +// MustCompile is like Compile, but panics instead of returning an error. +func MustCompile(sel string) Selector { + compiled, err := Compile(sel) + if err != nil { + panic(err) + } + return compiled +} + +// MatchAll returns a slice of the nodes that match the selector, +// from n and its children. +func (s Selector) MatchAll(n *html.Node) []*html.Node { + return s.matchAllInto(n, nil) +} + +func (s Selector) matchAllInto(n *html.Node, storage []*html.Node) []*html.Node { + if s(n) { + storage = append(storage, n) + } + + for child := n.FirstChild; child != nil; child = child.NextSibling { + storage = s.matchAllInto(child, storage) + } + + return storage +} + +func queryInto(n *html.Node, m Matcher, storage []*html.Node) []*html.Node { + for child := n.FirstChild; child != nil; child = child.NextSibling { + if m.Match(child) { + storage = append(storage, child) + } + storage = queryInto(child, m, storage) + } + + return storage +} + +// QueryAll returns a slice of all the nodes that match m, from the descendants +// of n. +func QueryAll(n *html.Node, m Matcher) []*html.Node { + return queryInto(n, m, nil) +} + +// Match returns true if the node matches the selector. +func (s Selector) Match(n *html.Node) bool { + return s(n) +} + +// MatchFirst returns the first node that matches s, from n and its children. +func (s Selector) MatchFirst(n *html.Node) *html.Node { + if s.Match(n) { + return n + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + m := s.MatchFirst(c) + if m != nil { + return m + } + } + return nil +} + +// Query returns the first node that matches m, from the descendants of n. +// If none matches, it returns nil. +func Query(n *html.Node, m Matcher) *html.Node { + for c := n.FirstChild; c != nil; c = c.NextSibling { + if m.Match(c) { + return c + } + if matched := Query(c, m); matched != nil { + return matched + } + } + + return nil +} + +// Filter returns the nodes in nodes that match the selector. +func (s Selector) Filter(nodes []*html.Node) (result []*html.Node) { + for _, n := range nodes { + if s(n) { + result = append(result, n) + } + } + return result +} + +// Filter returns the nodes that match m. +func Filter(nodes []*html.Node, m Matcher) (result []*html.Node) { + for _, n := range nodes { + if m.Match(n) { + result = append(result, n) + } + } + return result +} + +type tagSelector struct { + tag string +} + +// Matches elements with a given tag name. +func (t tagSelector) Match(n *html.Node) bool { + return n.Type == html.ElementNode && n.Data == t.tag +} + +func (c tagSelector) Specificity() Specificity { + return Specificity{0, 0, 1} +} + +func (c tagSelector) PseudoElement() string { + return "" +} + +type classSelector struct { + class string +} + +// Matches elements by class attribute. +func (t classSelector) Match(n *html.Node) bool { + return matchAttribute(n, "class", func(s string) bool { + return matchInclude(t.class, s, false) + }) +} + +func (c classSelector) Specificity() Specificity { + return Specificity{0, 1, 0} +} + +func (c classSelector) PseudoElement() string { + return "" +} + +type idSelector struct { + id string +} + +// Matches elements by id attribute. +func (t idSelector) Match(n *html.Node) bool { + return matchAttribute(n, "id", func(s string) bool { + return s == t.id + }) +} + +func (c idSelector) Specificity() Specificity { + return Specificity{1, 0, 0} +} + +func (c idSelector) PseudoElement() string { + return "" +} + +type attrSelector struct { + key, val, operation string + regexp *regexp.Regexp + insensitive bool +} + +// Matches elements by attribute value. +func (t attrSelector) Match(n *html.Node) bool { + switch t.operation { + case "": + return matchAttribute(n, t.key, func(string) bool { return true }) + case "=": + return matchAttribute(n, t.key, func(s string) bool { return matchInsensitiveValue(s, t.val, t.insensitive) }) + case "!=": + return attributeNotEqualMatch(t.key, t.val, n, t.insensitive) + case "~=": + // matches elements where the attribute named key is a whitespace-separated list that includes val. + return matchAttribute(n, t.key, func(s string) bool { return matchInclude(t.val, s, t.insensitive) }) + case "|=": + return attributeDashMatch(t.key, t.val, n, t.insensitive) + case "^=": + return attributePrefixMatch(t.key, t.val, n, t.insensitive) + case "$=": + return attributeSuffixMatch(t.key, t.val, n, t.insensitive) + case "*=": + return attributeSubstringMatch(t.key, t.val, n, t.insensitive) + case "#=": + return attributeRegexMatch(t.key, t.regexp, n) + default: + panic(fmt.Sprintf("unsuported operation : %s", t.operation)) + } +} + +// matches elements where we ignore (or not) the case of the attribute value +// the user attribute is the value set by the user to match elements +// the real attribute is the attribute value found in the code parsed +func matchInsensitiveValue(userAttr string, realAttr string, ignoreCase bool) bool { + if ignoreCase { + return strings.EqualFold(userAttr, realAttr) + } + return userAttr == realAttr + +} + +// matches elements where the attribute named key satisifes the function f. +func matchAttribute(n *html.Node, key string, f func(string) bool) bool { + if n.Type != html.ElementNode { + return false + } + for _, a := range n.Attr { + if a.Key == key && f(a.Val) { + return true + } + } + return false +} + +// attributeNotEqualMatch matches elements where +// the attribute named key does not have the value val. +func attributeNotEqualMatch(key, val string, n *html.Node, ignoreCase bool) bool { + if n.Type != html.ElementNode { + return false + } + for _, a := range n.Attr { + if a.Key == key && matchInsensitiveValue(a.Val, val, ignoreCase) { + return false + } + } + return true +} + +// returns true if s is a whitespace-separated list that includes val. +func matchInclude(val string, s string, ignoreCase bool) bool { + for s != "" { + i := strings.IndexAny(s, " \t\r\n\f") + if i == -1 { + return matchInsensitiveValue(s, val, ignoreCase) + } + if matchInsensitiveValue(s[:i], val, ignoreCase) { + return true + } + s = s[i+1:] + } + return false +} + +// matches elements where the attribute named key equals val or starts with val plus a hyphen. +func attributeDashMatch(key, val string, n *html.Node, ignoreCase bool) bool { + return matchAttribute(n, key, + func(s string) bool { + if matchInsensitiveValue(s, val, ignoreCase) { + return true + } + if len(s) <= len(val) { + return false + } + if matchInsensitiveValue(s[:len(val)], val, ignoreCase) && s[len(val)] == '-' { + return true + } + return false + }) +} + +// attributePrefixMatch returns a Selector that matches elements where +// the attribute named key starts with val. +func attributePrefixMatch(key, val string, n *html.Node, ignoreCase bool) bool { + return matchAttribute(n, key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + if ignoreCase { + return strings.HasPrefix(strings.ToLower(s), strings.ToLower(val)) + } + return strings.HasPrefix(s, val) + }) +} + +// attributeSuffixMatch matches elements where +// the attribute named key ends with val. +func attributeSuffixMatch(key, val string, n *html.Node, ignoreCase bool) bool { + return matchAttribute(n, key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + if ignoreCase { + return strings.HasSuffix(strings.ToLower(s), strings.ToLower(val)) + } + return strings.HasSuffix(s, val) + }) +} + +// attributeSubstringMatch matches nodes where +// the attribute named key contains val. +func attributeSubstringMatch(key, val string, n *html.Node, ignoreCase bool) bool { + return matchAttribute(n, key, + func(s string) bool { + if strings.TrimSpace(s) == "" { + return false + } + if ignoreCase { + return strings.Contains(strings.ToLower(s), strings.ToLower(val)) + } + return strings.Contains(s, val) + }) +} + +// attributeRegexMatch matches nodes where +// the attribute named key matches the regular expression rx +func attributeRegexMatch(key string, rx *regexp.Regexp, n *html.Node) bool { + return matchAttribute(n, key, + func(s string) bool { + return rx.MatchString(s) + }) +} + +func (c attrSelector) Specificity() Specificity { + return Specificity{0, 1, 0} +} + +func (c attrSelector) PseudoElement() string { + return "" +} + +// see pseudo_classes.go for pseudo classes selectors + +// on a static context, some selectors can't match anything +type neverMatchSelector struct { + value string +} + +func (s neverMatchSelector) Match(n *html.Node) bool { + return false +} + +func (s neverMatchSelector) Specificity() Specificity { + return Specificity{0, 0, 0} +} + +func (c neverMatchSelector) PseudoElement() string { + return "" +} + +type compoundSelector struct { + selectors []Sel + pseudoElement string +} + +// Matches elements if each sub-selectors matches. +func (t compoundSelector) Match(n *html.Node) bool { + if len(t.selectors) == 0 { + return n.Type == html.ElementNode + } + + for _, sel := range t.selectors { + if !sel.Match(n) { + return false + } + } + return true +} + +func (s compoundSelector) Specificity() Specificity { + var out Specificity + for _, sel := range s.selectors { + out = out.Add(sel.Specificity()) + } + if s.pseudoElement != "" { + // https://drafts.csswg.org/selectors-3/#specificity + out = out.Add(Specificity{0, 0, 1}) + } + return out +} + +func (c compoundSelector) PseudoElement() string { + return c.pseudoElement +} + +type combinedSelector struct { + first Sel + combinator byte + second Sel +} + +func (t combinedSelector) Match(n *html.Node) bool { + if t.first == nil { + return false // maybe we should panic + } + switch t.combinator { + case 0: + return t.first.Match(n) + case ' ': + return descendantMatch(t.first, t.second, n) + case '>': + return childMatch(t.first, t.second, n) + case '+': + return siblingMatch(t.first, t.second, true, n) + case '~': + return siblingMatch(t.first, t.second, false, n) + default: + panic("unknown combinator") + } +} + +// matches an element if it matches d and has an ancestor that matches a. +func descendantMatch(a, d Matcher, n *html.Node) bool { + if !d.Match(n) { + return false + } + + for p := n.Parent; p != nil; p = p.Parent { + if a.Match(p) { + return true + } + } + + return false +} + +// matches an element if it matches d and its parent matches a. +func childMatch(a, d Matcher, n *html.Node) bool { + return d.Match(n) && n.Parent != nil && a.Match(n.Parent) +} + +// matches an element if it matches s2 and is preceded by an element that matches s1. +// If adjacent is true, the sibling must be immediately before the element. +func siblingMatch(s1, s2 Matcher, adjacent bool, n *html.Node) bool { + if !s2.Match(n) { + return false + } + + if adjacent { + for n = n.PrevSibling; n != nil; n = n.PrevSibling { + if n.Type == html.TextNode || n.Type == html.CommentNode { + continue + } + return s1.Match(n) + } + return false + } + + // Walk backwards looking for element that matches s1 + for c := n.PrevSibling; c != nil; c = c.PrevSibling { + if s1.Match(c) { + return true + } + } + + return false +} + +func (s combinedSelector) Specificity() Specificity { + spec := s.first.Specificity() + if s.second != nil { + spec = spec.Add(s.second.Specificity()) + } + return spec +} + +// on combinedSelector, a pseudo-element only makes sens on the last +// selector, although others increase specificity. +func (c combinedSelector) PseudoElement() string { + if c.second == nil { + return "" + } + return c.second.PseudoElement() +} + +// A SelectorGroup is a list of selectors, which matches if any of the +// individual selectors matches. +type SelectorGroup []Sel + +// Match returns true if the node matches one of the single selectors. +func (s SelectorGroup) Match(n *html.Node) bool { + for _, sel := range s { + if sel.Match(n) { + return true + } + } + return false +} diff --git a/vendor/github.com/andybalholm/cascadia/serialize.go b/vendor/github.com/andybalholm/cascadia/serialize.go new file mode 100644 index 00000000..61acf04e --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/serialize.go @@ -0,0 +1,176 @@ +package cascadia + +import ( + "fmt" + "strconv" + "strings" +) + +// implements the reverse operation Sel -> string + +var specialCharReplacer *strings.Replacer + +func init() { + var pairs []string + for _, s := range ",!\"#$%&'()*+ -./:;<=>?@[\\]^`{|}~" { + pairs = append(pairs, string(s), "\\"+string(s)) + } + specialCharReplacer = strings.NewReplacer(pairs...) +} + +// espace special CSS char +func escape(s string) string { return specialCharReplacer.Replace(s) } + +func (c tagSelector) String() string { + return c.tag +} + +func (c idSelector) String() string { + return "#" + escape(c.id) +} + +func (c classSelector) String() string { + return "." + escape(c.class) +} + +func (c attrSelector) String() string { + val := c.val + if c.operation == "#=" { + val = c.regexp.String() + } else if c.operation != "" { + val = fmt.Sprintf(`"%s"`, val) + } + + ignoreCase := "" + + if c.insensitive { + ignoreCase = " i" + } + + return fmt.Sprintf(`[%s%s%s%s]`, c.key, c.operation, val, ignoreCase) +} + +func (c relativePseudoClassSelector) String() string { + return fmt.Sprintf(":%s(%s)", c.name, c.match.String()) +} + +func (c containsPseudoClassSelector) String() string { + s := "contains" + if c.own { + s += "Own" + } + return fmt.Sprintf(`:%s("%s")`, s, c.value) +} + +func (c regexpPseudoClassSelector) String() string { + s := "matches" + if c.own { + s += "Own" + } + return fmt.Sprintf(":%s(%s)", s, c.regexp.String()) +} + +func (c nthPseudoClassSelector) String() string { + if c.a == 0 && c.b == 1 { // special cases + s := ":first-" + if c.last { + s = ":last-" + } + if c.ofType { + s += "of-type" + } else { + s += "child" + } + return s + } + var name string + switch [2]bool{c.last, c.ofType} { + case [2]bool{true, true}: + name = "nth-last-of-type" + case [2]bool{true, false}: + name = "nth-last-child" + case [2]bool{false, true}: + name = "nth-of-type" + case [2]bool{false, false}: + name = "nth-child" + } + s := fmt.Sprintf("+%d", c.b) + if c.b < 0 { // avoid +-8 invalid syntax + s = strconv.Itoa(c.b) + } + return fmt.Sprintf(":%s(%dn%s)", name, c.a, s) +} + +func (c onlyChildPseudoClassSelector) String() string { + if c.ofType { + return ":only-of-type" + } + return ":only-child" +} + +func (c inputPseudoClassSelector) String() string { + return ":input" +} + +func (c emptyElementPseudoClassSelector) String() string { + return ":empty" +} + +func (c rootPseudoClassSelector) String() string { + return ":root" +} + +func (c linkPseudoClassSelector) String() string { + return ":link" +} + +func (c langPseudoClassSelector) String() string { + return fmt.Sprintf(":lang(%s)", c.lang) +} + +func (c neverMatchSelector) String() string { + return c.value +} + +func (c enabledPseudoClassSelector) String() string { + return ":enabled" +} + +func (c disabledPseudoClassSelector) String() string { + return ":disabled" +} + +func (c checkedPseudoClassSelector) String() string { + return ":checked" +} + +func (c compoundSelector) String() string { + if len(c.selectors) == 0 && c.pseudoElement == "" { + return "*" + } + chunks := make([]string, len(c.selectors)) + for i, sel := range c.selectors { + chunks[i] = sel.String() + } + s := strings.Join(chunks, "") + if c.pseudoElement != "" { + s += "::" + c.pseudoElement + } + return s +} + +func (c combinedSelector) String() string { + start := c.first.String() + if c.second != nil { + start += fmt.Sprintf(" %s %s", string(c.combinator), c.second.String()) + } + return start +} + +func (c SelectorGroup) String() string { + ck := make([]string, len(c)) + for i, s := range c { + ck[i] = s.String() + } + return strings.Join(ck, ", ") +} diff --git a/vendor/github.com/andybalholm/cascadia/specificity.go b/vendor/github.com/andybalholm/cascadia/specificity.go new file mode 100644 index 00000000..8db864f9 --- /dev/null +++ b/vendor/github.com/andybalholm/cascadia/specificity.go @@ -0,0 +1,26 @@ +package cascadia + +// Specificity is the CSS specificity as defined in +// https://www.w3.org/TR/selectors/#specificity-rules +// with the convention Specificity = [A,B,C]. +type Specificity [3]int + +// returns `true` if s < other (strictly), false otherwise +func (s Specificity) Less(other Specificity) bool { + for i := range s { + if s[i] < other[i] { + return true + } + if s[i] > other[i] { + return false + } + } + return false +} + +func (s Specificity) Add(other Specificity) Specificity { + for i, sp := range other { + s[i] += sp + } + return s +} diff --git a/vendor/github.com/beorn7/perks/LICENSE b/vendor/github.com/beorn7/perks/LICENSE new file mode 100644 index 00000000..339177be --- /dev/null +++ b/vendor/github.com/beorn7/perks/LICENSE @@ -0,0 +1,20 @@ +Copyright (C) 2013 Blake Mizerany + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/beorn7/perks/quantile/exampledata.txt b/vendor/github.com/beorn7/perks/quantile/exampledata.txt new file mode 100644 index 00000000..1602287d --- /dev/null +++ b/vendor/github.com/beorn7/perks/quantile/exampledata.txt @@ -0,0 +1,2388 @@ +8 +5 +26 +12 +5 +235 +13 +6 +28 +30 +3 +3 +3 +3 +5 +2 +33 +7 +2 +4 +7 +12 +14 +5 +8 +3 +10 +4 +5 +3 +6 +6 +209 +20 +3 +10 +14 +3 +4 +6 +8 +5 +11 +7 +3 +2 +3 +3 +212 +5 +222 +4 +10 +10 +5 +6 +3 +8 +3 +10 +254 +220 +2 +3 +5 +24 +5 +4 +222 +7 +3 +3 +223 +8 +15 +12 +14 +14 +3 +2 +2 +3 +13 +3 +11 +4 +4 +6 +5 +7 +13 +5 +3 +5 +2 +5 +3 +5 +2 +7 +15 +17 +14 +3 +6 +6 +3 +17 +5 +4 +7 +6 +4 +4 +8 +6 +8 +3 +9 +3 +6 +3 +4 +5 +3 +3 +660 +4 +6 +10 +3 +6 +3 +2 +5 +13 +2 +4 +4 +10 +4 +8 +4 +3 +7 +9 +9 +3 +10 +37 +3 +13 +4 +12 +3 +6 +10 +8 +5 +21 +2 +3 +8 +3 +2 +3 +3 +4 +12 +2 +4 +8 +8 +4 +3 +2 +20 +1 +6 +32 +2 +11 +6 +18 +3 +8 +11 +3 +212 +3 +4 +2 +6 +7 +12 +11 +3 +2 +16 +10 +6 +4 +6 +3 +2 +7 +3 +2 +2 +2 +2 +5 +6 +4 +3 +10 +3 +4 +6 +5 +3 +4 +4 +5 +6 +4 +3 +4 +4 +5 +7 +5 +5 +3 +2 +7 +2 +4 +12 +4 +5 +6 +2 +4 +4 +8 +4 +15 +13 +7 +16 +5 +3 +23 +5 +5 +7 +3 +2 +9 +8 +7 +5 +8 +11 +4 +10 +76 +4 +47 +4 +3 +2 +7 +4 +2 +3 +37 +10 +4 +2 +20 +5 +4 +4 +10 +10 +4 +3 +7 +23 +240 +7 +13 +5 +5 +3 +3 +2 +5 +4 +2 +8 +7 +19 +2 +23 +8 +7 +2 +5 +3 +8 +3 +8 +13 +5 +5 +5 +2 +3 +23 +4 +9 +8 +4 +3 +3 +5 +220 +2 +3 +4 +6 +14 +3 +53 +6 +2 +5 +18 +6 +3 +219 +6 +5 +2 +5 +3 +6 +5 +15 +4 +3 +17 +3 +2 +4 +7 +2 +3 +3 +4 +4 +3 +2 +664 +6 +3 +23 +5 +5 +16 +5 +8 +2 +4 +2 +24 +12 +3 +2 +3 +5 +8 +3 +5 +4 +3 +14 +3 +5 +8 +2 +3 +7 +9 +4 +2 +3 +6 +8 +4 +3 +4 +6 +5 +3 +3 +6 +3 +19 +4 +4 +6 +3 +6 +3 +5 +22 +5 +4 +4 +3 +8 +11 +4 +9 +7 +6 +13 +4 +4 +4 +6 +17 +9 +3 +3 +3 +4 +3 +221 +5 +11 +3 +4 +2 +12 +6 +3 +5 +7 +5 +7 +4 +9 +7 +14 +37 +19 +217 +16 +3 +5 +2 +2 +7 +19 +7 +6 +7 +4 +24 +5 +11 +4 +7 +7 +9 +13 +3 +4 +3 +6 +28 +4 +4 +5 +5 +2 +5 +6 +4 +4 +6 +10 +5 +4 +3 +2 +3 +3 +6 +5 +5 +4 +3 +2 +3 +7 +4 +6 +18 +16 +8 +16 +4 +5 +8 +6 +9 +13 +1545 +6 +215 +6 +5 +6 +3 +45 +31 +5 +2 +2 +4 +3 +3 +2 +5 +4 +3 +5 +7 +7 +4 +5 +8 +5 +4 +749 +2 +31 +9 +11 +2 +11 +5 +4 +4 +7 +9 +11 +4 +5 +4 +7 +3 +4 +6 +2 +15 +3 +4 +3 +4 +3 +5 +2 +13 +5 +5 +3 +3 +23 +4 +4 +5 +7 +4 +13 +2 +4 +3 +4 +2 +6 +2 +7 +3 +5 +5 +3 +29 +5 +4 +4 +3 +10 +2 +3 +79 +16 +6 +6 +7 +7 +3 +5 +5 +7 +4 +3 +7 +9 +5 +6 +5 +9 +6 +3 +6 +4 +17 +2 +10 +9 +3 +6 +2 +3 +21 +22 +5 +11 +4 +2 +17 +2 +224 +2 +14 +3 +4 +4 +2 +4 +4 +4 +4 +5 +3 +4 +4 +10 +2 +6 +3 +3 +5 +7 +2 +7 +5 +6 +3 +218 +2 +2 +5 +2 +6 +3 +5 +222 +14 +6 +33 +3 +2 +5 +3 +3 +3 +9 +5 +3 +3 +2 +7 +4 +3 +4 +3 +5 +6 +5 +26 +4 +13 +9 +7 +3 +221 +3 +3 +4 +4 +4 +4 +2 +18 +5 +3 +7 +9 +6 +8 +3 +10 +3 +11 +9 +5 +4 +17 +5 +5 +6 +6 +3 +2 +4 +12 +17 +6 +7 +218 +4 +2 +4 +10 +3 +5 +15 +3 +9 +4 +3 +3 +6 +29 +3 +3 +4 +5 +5 +3 +8 +5 +6 +6 +7 +5 +3 +5 +3 +29 +2 +31 +5 +15 +24 +16 +5 +207 +4 +3 +3 +2 +15 +4 +4 +13 +5 +5 +4 +6 +10 +2 +7 +8 +4 +6 +20 +5 +3 +4 +3 +12 +12 +5 +17 +7 +3 +3 +3 +6 +10 +3 +5 +25 +80 +4 +9 +3 +2 +11 +3 +3 +2 +3 +8 +7 +5 +5 +19 +5 +3 +3 +12 +11 +2 +6 +5 +5 +5 +3 +3 +3 +4 +209 +14 +3 +2 +5 +19 +4 +4 +3 +4 +14 +5 +6 +4 +13 +9 +7 +4 +7 +10 +2 +9 +5 +7 +2 +8 +4 +6 +5 +5 +222 +8 +7 +12 +5 +216 +3 +4 +4 +6 +3 +14 +8 +7 +13 +4 +3 +3 +3 +3 +17 +5 +4 +3 +33 +6 +6 +33 +7 +5 +3 +8 +7 +5 +2 +9 +4 +2 +233 +24 +7 +4 +8 +10 +3 +4 +15 +2 +16 +3 +3 +13 +12 +7 +5 +4 +207 +4 +2 +4 +27 +15 +2 +5 +2 +25 +6 +5 +5 +6 +13 +6 +18 +6 +4 +12 +225 +10 +7 +5 +2 +2 +11 +4 +14 +21 +8 +10 +3 +5 +4 +232 +2 +5 +5 +3 +7 +17 +11 +6 +6 +23 +4 +6 +3 +5 +4 +2 +17 +3 +6 +5 +8 +3 +2 +2 +14 +9 +4 +4 +2 +5 +5 +3 +7 +6 +12 +6 +10 +3 +6 +2 +2 +19 +5 +4 +4 +9 +2 +4 +13 +3 +5 +6 +3 +6 +5 +4 +9 +6 +3 +5 +7 +3 +6 +6 +4 +3 +10 +6 +3 +221 +3 +5 +3 +6 +4 +8 +5 +3 +6 +4 +4 +2 +54 +5 +6 +11 +3 +3 +4 +4 +4 +3 +7 +3 +11 +11 +7 +10 +6 +13 +223 +213 +15 +231 +7 +3 +7 +228 +2 +3 +4 +4 +5 +6 +7 +4 +13 +3 +4 +5 +3 +6 +4 +6 +7 +2 +4 +3 +4 +3 +3 +6 +3 +7 +3 +5 +18 +5 +6 +8 +10 +3 +3 +3 +2 +4 +2 +4 +4 +5 +6 +6 +4 +10 +13 +3 +12 +5 +12 +16 +8 +4 +19 +11 +2 +4 +5 +6 +8 +5 +6 +4 +18 +10 +4 +2 +216 +6 +6 +6 +2 +4 +12 +8 +3 +11 +5 +6 +14 +5 +3 +13 +4 +5 +4 +5 +3 +28 +6 +3 +7 +219 +3 +9 +7 +3 +10 +6 +3 +4 +19 +5 +7 +11 +6 +15 +19 +4 +13 +11 +3 +7 +5 +10 +2 +8 +11 +2 +6 +4 +6 +24 +6 +3 +3 +3 +3 +6 +18 +4 +11 +4 +2 +5 +10 +8 +3 +9 +5 +3 +4 +5 +6 +2 +5 +7 +4 +4 +14 +6 +4 +4 +5 +5 +7 +2 +4 +3 +7 +3 +3 +6 +4 +5 +4 +4 +4 +3 +3 +3 +3 +8 +14 +2 +3 +5 +3 +2 +4 +5 +3 +7 +3 +3 +18 +3 +4 +4 +5 +7 +3 +3 +3 +13 +5 +4 +8 +211 +5 +5 +3 +5 +2 +5 +4 +2 +655 +6 +3 +5 +11 +2 +5 +3 +12 +9 +15 +11 +5 +12 +217 +2 +6 +17 +3 +3 +207 +5 +5 +4 +5 +9 +3 +2 +8 +5 +4 +3 +2 +5 +12 +4 +14 +5 +4 +2 +13 +5 +8 +4 +225 +4 +3 +4 +5 +4 +3 +3 +6 +23 +9 +2 +6 +7 +233 +4 +4 +6 +18 +3 +4 +6 +3 +4 +4 +2 +3 +7 +4 +13 +227 +4 +3 +5 +4 +2 +12 +9 +17 +3 +7 +14 +6 +4 +5 +21 +4 +8 +9 +2 +9 +25 +16 +3 +6 +4 +7 +8 +5 +2 +3 +5 +4 +3 +3 +5 +3 +3 +3 +2 +3 +19 +2 +4 +3 +4 +2 +3 +4 +4 +2 +4 +3 +3 +3 +2 +6 +3 +17 +5 +6 +4 +3 +13 +5 +3 +3 +3 +4 +9 +4 +2 +14 +12 +4 +5 +24 +4 +3 +37 +12 +11 +21 +3 +4 +3 +13 +4 +2 +3 +15 +4 +11 +4 +4 +3 +8 +3 +4 +4 +12 +8 +5 +3 +3 +4 +2 +220 +3 +5 +223 +3 +3 +3 +10 +3 +15 +4 +241 +9 +7 +3 +6 +6 +23 +4 +13 +7 +3 +4 +7 +4 +9 +3 +3 +4 +10 +5 +5 +1 +5 +24 +2 +4 +5 +5 +6 +14 +3 +8 +2 +3 +5 +13 +13 +3 +5 +2 +3 +15 +3 +4 +2 +10 +4 +4 +4 +5 +5 +3 +5 +3 +4 +7 +4 +27 +3 +6 +4 +15 +3 +5 +6 +6 +5 +4 +8 +3 +9 +2 +6 +3 +4 +3 +7 +4 +18 +3 +11 +3 +3 +8 +9 +7 +24 +3 +219 +7 +10 +4 +5 +9 +12 +2 +5 +4 +4 +4 +3 +3 +19 +5 +8 +16 +8 +6 +22 +3 +23 +3 +242 +9 +4 +3 +3 +5 +7 +3 +3 +5 +8 +3 +7 +5 +14 +8 +10 +3 +4 +3 +7 +4 +6 +7 +4 +10 +4 +3 +11 +3 +7 +10 +3 +13 +6 +8 +12 +10 +5 +7 +9 +3 +4 +7 +7 +10 +8 +30 +9 +19 +4 +3 +19 +15 +4 +13 +3 +215 +223 +4 +7 +4 +8 +17 +16 +3 +7 +6 +5 +5 +4 +12 +3 +7 +4 +4 +13 +4 +5 +2 +5 +6 +5 +6 +6 +7 +10 +18 +23 +9 +3 +3 +6 +5 +2 +4 +2 +7 +3 +3 +2 +5 +5 +14 +10 +224 +6 +3 +4 +3 +7 +5 +9 +3 +6 +4 +2 +5 +11 +4 +3 +3 +2 +8 +4 +7 +4 +10 +7 +3 +3 +18 +18 +17 +3 +3 +3 +4 +5 +3 +3 +4 +12 +7 +3 +11 +13 +5 +4 +7 +13 +5 +4 +11 +3 +12 +3 +6 +4 +4 +21 +4 +6 +9 +5 +3 +10 +8 +4 +6 +4 +4 +6 +5 +4 +8 +6 +4 +6 +4 +4 +5 +9 +6 +3 +4 +2 +9 +3 +18 +2 +4 +3 +13 +3 +6 +6 +8 +7 +9 +3 +2 +16 +3 +4 +6 +3 +2 +33 +22 +14 +4 +9 +12 +4 +5 +6 +3 +23 +9 +4 +3 +5 +5 +3 +4 +5 +3 +5 +3 +10 +4 +5 +5 +8 +4 +4 +6 +8 +5 +4 +3 +4 +6 +3 +3 +3 +5 +9 +12 +6 +5 +9 +3 +5 +3 +2 +2 +2 +18 +3 +2 +21 +2 +5 +4 +6 +4 +5 +10 +3 +9 +3 +2 +10 +7 +3 +6 +6 +4 +4 +8 +12 +7 +3 +7 +3 +3 +9 +3 +4 +5 +4 +4 +5 +5 +10 +15 +4 +4 +14 +6 +227 +3 +14 +5 +216 +22 +5 +4 +2 +2 +6 +3 +4 +2 +9 +9 +4 +3 +28 +13 +11 +4 +5 +3 +3 +2 +3 +3 +5 +3 +4 +3 +5 +23 +26 +3 +4 +5 +6 +4 +6 +3 +5 +5 +3 +4 +3 +2 +2 +2 +7 +14 +3 +6 +7 +17 +2 +2 +15 +14 +16 +4 +6 +7 +13 +6 +4 +5 +6 +16 +3 +3 +28 +3 +6 +15 +3 +9 +2 +4 +6 +3 +3 +22 +4 +12 +6 +7 +2 +5 +4 +10 +3 +16 +6 +9 +2 +5 +12 +7 +5 +5 +5 +5 +2 +11 +9 +17 +4 +3 +11 +7 +3 +5 +15 +4 +3 +4 +211 +8 +7 +5 +4 +7 +6 +7 +6 +3 +6 +5 +6 +5 +3 +4 +4 +26 +4 +6 +10 +4 +4 +3 +2 +3 +3 +4 +5 +9 +3 +9 +4 +4 +5 +5 +8 +2 +4 +2 +3 +8 +4 +11 +19 +5 +8 +6 +3 +5 +6 +12 +3 +2 +4 +16 +12 +3 +4 +4 +8 +6 +5 +6 +6 +219 +8 +222 +6 +16 +3 +13 +19 +5 +4 +3 +11 +6 +10 +4 +7 +7 +12 +5 +3 +3 +5 +6 +10 +3 +8 +2 +5 +4 +7 +2 +4 +4 +2 +12 +9 +6 +4 +2 +40 +2 +4 +10 +4 +223 +4 +2 +20 +6 +7 +24 +5 +4 +5 +2 +20 +16 +6 +5 +13 +2 +3 +3 +19 +3 +2 +4 +5 +6 +7 +11 +12 +5 +6 +7 +7 +3 +5 +3 +5 +3 +14 +3 +4 +4 +2 +11 +1 +7 +3 +9 +6 +11 +12 +5 +8 +6 +221 +4 +2 +12 +4 +3 +15 +4 +5 +226 +7 +218 +7 +5 +4 +5 +18 +4 +5 +9 +4 +4 +2 +9 +18 +18 +9 +5 +6 +6 +3 +3 +7 +3 +5 +4 +4 +4 +12 +3 +6 +31 +5 +4 +7 +3 +6 +5 +6 +5 +11 +2 +2 +11 +11 +6 +7 +5 +8 +7 +10 +5 +23 +7 +4 +3 +5 +34 +2 +5 +23 +7 +3 +6 +8 +4 +4 +4 +2 +5 +3 +8 +5 +4 +8 +25 +2 +3 +17 +8 +3 +4 +8 +7 +3 +15 +6 +5 +7 +21 +9 +5 +6 +6 +5 +3 +2 +3 +10 +3 +6 +3 +14 +7 +4 +4 +8 +7 +8 +2 +6 +12 +4 +213 +6 +5 +21 +8 +2 +5 +23 +3 +11 +2 +3 +6 +25 +2 +3 +6 +7 +6 +6 +4 +4 +6 +3 +17 +9 +7 +6 +4 +3 +10 +7 +2 +3 +3 +3 +11 +8 +3 +7 +6 +4 +14 +36 +3 +4 +3 +3 +22 +13 +21 +4 +2 +7 +4 +4 +17 +15 +3 +7 +11 +2 +4 +7 +6 +209 +6 +3 +2 +2 +24 +4 +9 +4 +3 +3 +3 +29 +2 +2 +4 +3 +3 +5 +4 +6 +3 +3 +2 +4 diff --git a/vendor/github.com/beorn7/perks/quantile/stream.go b/vendor/github.com/beorn7/perks/quantile/stream.go new file mode 100644 index 00000000..d7d14f8e --- /dev/null +++ b/vendor/github.com/beorn7/perks/quantile/stream.go @@ -0,0 +1,316 @@ +// Package quantile computes approximate quantiles over an unbounded data +// stream within low memory and CPU bounds. +// +// A small amount of accuracy is traded to achieve the above properties. +// +// Multiple streams can be merged before calling Query to generate a single set +// of results. This is meaningful when the streams represent the same type of +// data. See Merge and Samples. +// +// For more detailed information about the algorithm used, see: +// +// Effective Computation of Biased Quantiles over Data Streams +// +// http://www.cs.rutgers.edu/~muthu/bquant.pdf +package quantile + +import ( + "math" + "sort" +) + +// Sample holds an observed value and meta information for compression. JSON +// tags have been added for convenience. +type Sample struct { + Value float64 `json:",string"` + Width float64 `json:",string"` + Delta float64 `json:",string"` +} + +// Samples represents a slice of samples. It implements sort.Interface. +type Samples []Sample + +func (a Samples) Len() int { return len(a) } +func (a Samples) Less(i, j int) bool { return a[i].Value < a[j].Value } +func (a Samples) Swap(i, j int) { a[i], a[j] = a[j], a[i] } + +type invariant func(s *stream, r float64) float64 + +// NewLowBiased returns an initialized Stream for low-biased quantiles +// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but +// error guarantees can still be given even for the lower ranks of the data +// distribution. +// +// The provided epsilon is a relative error, i.e. the true quantile of a value +// returned by a query is guaranteed to be within (1Β±Epsilon)*Quantile. +// +// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error +// properties. +func NewLowBiased(epsilon float64) *Stream { + Ζ’ := func(s *stream, r float64) float64 { + return 2 * epsilon * r + } + return newStream(Ζ’) +} + +// NewHighBiased returns an initialized Stream for high-biased quantiles +// (e.g. 0.01, 0.1, 0.5) where the needed quantiles are not known a priori, but +// error guarantees can still be given even for the higher ranks of the data +// distribution. +// +// The provided epsilon is a relative error, i.e. the true quantile of a value +// returned by a query is guaranteed to be within 1-(1Β±Epsilon)*(1-Quantile). +// +// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error +// properties. +func NewHighBiased(epsilon float64) *Stream { + Ζ’ := func(s *stream, r float64) float64 { + return 2 * epsilon * (s.n - r) + } + return newStream(Ζ’) +} + +// NewTargeted returns an initialized Stream concerned with a particular set of +// quantile values that are supplied a priori. Knowing these a priori reduces +// space and computation time. The targets map maps the desired quantiles to +// their absolute errors, i.e. the true quantile of a value returned by a query +// is guaranteed to be within (QuantileΒ±Epsilon). +// +// See http://www.cs.rutgers.edu/~muthu/bquant.pdf for time, space, and error properties. +func NewTargeted(targetMap map[float64]float64) *Stream { + // Convert map to slice to avoid slow iterations on a map. + // Ζ’ is called on the hot path, so converting the map to a slice + // beforehand results in significant CPU savings. + targets := targetMapToSlice(targetMap) + + Ζ’ := func(s *stream, r float64) float64 { + var m = math.MaxFloat64 + var f float64 + for _, t := range targets { + if t.quantile*s.n <= r { + f = (2 * t.epsilon * r) / t.quantile + } else { + f = (2 * t.epsilon * (s.n - r)) / (1 - t.quantile) + } + if f < m { + m = f + } + } + return m + } + return newStream(Ζ’) +} + +type target struct { + quantile float64 + epsilon float64 +} + +func targetMapToSlice(targetMap map[float64]float64) []target { + targets := make([]target, 0, len(targetMap)) + + for quantile, epsilon := range targetMap { + t := target{ + quantile: quantile, + epsilon: epsilon, + } + targets = append(targets, t) + } + + return targets +} + +// Stream computes quantiles for a stream of float64s. It is not thread-safe by +// design. Take care when using across multiple goroutines. +type Stream struct { + *stream + b Samples + sorted bool +} + +func newStream(Ζ’ invariant) *Stream { + x := &stream{Ζ’: Ζ’} + return &Stream{x, make(Samples, 0, 500), true} +} + +// Insert inserts v into the stream. +func (s *Stream) Insert(v float64) { + s.insert(Sample{Value: v, Width: 1}) +} + +func (s *Stream) insert(sample Sample) { + s.b = append(s.b, sample) + s.sorted = false + if len(s.b) == cap(s.b) { + s.flush() + } +} + +// Query returns the computed qth percentiles value. If s was created with +// NewTargeted, and q is not in the set of quantiles provided a priori, Query +// will return an unspecified result. +func (s *Stream) Query(q float64) float64 { + if !s.flushed() { + // Fast path when there hasn't been enough data for a flush; + // this also yields better accuracy for small sets of data. + l := len(s.b) + if l == 0 { + return 0 + } + i := int(math.Ceil(float64(l) * q)) + if i > 0 { + i -= 1 + } + s.maybeSort() + return s.b[i].Value + } + s.flush() + return s.stream.query(q) +} + +// Merge merges samples into the underlying streams samples. This is handy when +// merging multiple streams from separate threads, database shards, etc. +// +// ATTENTION: This method is broken and does not yield correct results. The +// underlying algorithm is not capable of merging streams correctly. +func (s *Stream) Merge(samples Samples) { + sort.Sort(samples) + s.stream.merge(samples) +} + +// Reset reinitializes and clears the list reusing the samples buffer memory. +func (s *Stream) Reset() { + s.stream.reset() + s.b = s.b[:0] +} + +// Samples returns stream samples held by s. +func (s *Stream) Samples() Samples { + if !s.flushed() { + return s.b + } + s.flush() + return s.stream.samples() +} + +// Count returns the total number of samples observed in the stream +// since initialization. +func (s *Stream) Count() int { + return len(s.b) + s.stream.count() +} + +func (s *Stream) flush() { + s.maybeSort() + s.stream.merge(s.b) + s.b = s.b[:0] +} + +func (s *Stream) maybeSort() { + if !s.sorted { + s.sorted = true + sort.Sort(s.b) + } +} + +func (s *Stream) flushed() bool { + return len(s.stream.l) > 0 +} + +type stream struct { + n float64 + l []Sample + Ζ’ invariant +} + +func (s *stream) reset() { + s.l = s.l[:0] + s.n = 0 +} + +func (s *stream) insert(v float64) { + s.merge(Samples{{v, 1, 0}}) +} + +func (s *stream) merge(samples Samples) { + // TODO(beorn7): This tries to merge not only individual samples, but + // whole summaries. The paper doesn't mention merging summaries at + // all. Unittests show that the merging is inaccurate. Find out how to + // do merges properly. + var r float64 + i := 0 + for _, sample := range samples { + for ; i < len(s.l); i++ { + c := s.l[i] + if c.Value > sample.Value { + // Insert at position i. + s.l = append(s.l, Sample{}) + copy(s.l[i+1:], s.l[i:]) + s.l[i] = Sample{ + sample.Value, + sample.Width, + math.Max(sample.Delta, math.Floor(s.Ζ’(s, r))-1), + // TODO(beorn7): How to calculate delta correctly? + } + i++ + goto inserted + } + r += c.Width + } + s.l = append(s.l, Sample{sample.Value, sample.Width, 0}) + i++ + inserted: + s.n += sample.Width + r += sample.Width + } + s.compress() +} + +func (s *stream) count() int { + return int(s.n) +} + +func (s *stream) query(q float64) float64 { + t := math.Ceil(q * s.n) + t += math.Ceil(s.Ζ’(s, t) / 2) + p := s.l[0] + var r float64 + for _, c := range s.l[1:] { + r += p.Width + if r+c.Width+c.Delta > t { + return p.Value + } + p = c + } + return p.Value +} + +func (s *stream) compress() { + if len(s.l) < 2 { + return + } + x := s.l[len(s.l)-1] + xi := len(s.l) - 1 + r := s.n - 1 - x.Width + + for i := len(s.l) - 2; i >= 0; i-- { + c := s.l[i] + if c.Width+x.Width+x.Delta <= s.Ζ’(s, r) { + x.Width += c.Width + s.l[xi] = x + // Remove element at i. + copy(s.l[i:], s.l[i+1:]) + s.l = s.l[:len(s.l)-1] + xi -= 1 + } else { + x = c + xi = i + } + r -= c.Width + } +} + +func (s *stream) samples() Samples { + samples := make(Samples, len(s.l)) + copy(samples, s.l) + return samples +} diff --git a/vendor/github.com/golang/protobuf/AUTHORS b/vendor/github.com/golang/protobuf/AUTHORS new file mode 100644 index 00000000..15167cd7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/github.com/golang/protobuf/CONTRIBUTORS b/vendor/github.com/golang/protobuf/CONTRIBUTORS new file mode 100644 index 00000000..1c4577e9 --- /dev/null +++ b/vendor/github.com/golang/protobuf/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/github.com/golang/protobuf/LICENSE b/vendor/github.com/golang/protobuf/LICENSE new file mode 100644 index 00000000..0f646931 --- /dev/null +++ b/vendor/github.com/golang/protobuf/LICENSE @@ -0,0 +1,28 @@ +Copyright 2010 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/golang/protobuf/proto/buffer.go b/vendor/github.com/golang/protobuf/proto/buffer.go new file mode 100644 index 00000000..e810e6fe --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/buffer.go @@ -0,0 +1,324 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "errors" + "fmt" + + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + WireVarint = 0 + WireFixed32 = 5 + WireFixed64 = 1 + WireBytes = 2 + WireStartGroup = 3 + WireEndGroup = 4 +) + +// EncodeVarint returns the varint encoded bytes of v. +func EncodeVarint(v uint64) []byte { + return protowire.AppendVarint(nil, v) +} + +// SizeVarint returns the length of the varint encoded bytes of v. +// This is equal to len(EncodeVarint(v)). +func SizeVarint(v uint64) int { + return protowire.SizeVarint(v) +} + +// DecodeVarint parses a varint encoded integer from b, +// returning the integer value and the length of the varint. +// It returns (0, 0) if there is a parse error. +func DecodeVarint(b []byte) (uint64, int) { + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, 0 + } + return v, n +} + +// Buffer is a buffer for encoding and decoding the protobuf wire format. +// It may be reused between invocations to reduce memory usage. +type Buffer struct { + buf []byte + idx int + deterministic bool +} + +// NewBuffer allocates a new Buffer initialized with buf, +// where the contents of buf are considered the unread portion of the buffer. +func NewBuffer(buf []byte) *Buffer { + return &Buffer{buf: buf} +} + +// SetDeterministic specifies whether to use deterministic serialization. +// +// Deterministic serialization guarantees that for a given binary, equal +// messages will always be serialized to the same bytes. This implies: +// +// - Repeated serialization of a message will return the same bytes. +// - Different processes of the same binary (which may be executing on +// different machines) will serialize equal messages to the same bytes. +// +// Note that the deterministic serialization is NOT canonical across +// languages. It is not guaranteed to remain stable over time. It is unstable +// across different builds with schema changes due to unknown fields. +// Users who need canonical serialization (e.g., persistent storage in a +// canonical form, fingerprinting, etc.) should define their own +// canonicalization specification and implement their own serializer rather +// than relying on this API. +// +// If deterministic serialization is requested, map entries will be sorted +// by keys in lexographical order. This is an implementation detail and +// subject to change. +func (b *Buffer) SetDeterministic(deterministic bool) { + b.deterministic = deterministic +} + +// SetBuf sets buf as the internal buffer, +// where the contents of buf are considered the unread portion of the buffer. +func (b *Buffer) SetBuf(buf []byte) { + b.buf = buf + b.idx = 0 +} + +// Reset clears the internal buffer of all written and unread data. +func (b *Buffer) Reset() { + b.buf = b.buf[:0] + b.idx = 0 +} + +// Bytes returns the internal buffer. +func (b *Buffer) Bytes() []byte { + return b.buf +} + +// Unread returns the unread portion of the buffer. +func (b *Buffer) Unread() []byte { + return b.buf[b.idx:] +} + +// Marshal appends the wire-format encoding of m to the buffer. +func (b *Buffer) Marshal(m Message) error { + var err error + b.buf, err = marshalAppend(b.buf, m, b.deterministic) + return err +} + +// Unmarshal parses the wire-format message in the buffer and +// places the decoded results in m. +// It does not reset m before unmarshaling. +func (b *Buffer) Unmarshal(m Message) error { + err := UnmarshalMerge(b.Unread(), m) + b.idx = len(b.buf) + return err +} + +type unknownFields struct{ XXX_unrecognized protoimpl.UnknownFields } + +func (m *unknownFields) String() string { panic("not implemented") } +func (m *unknownFields) Reset() { panic("not implemented") } +func (m *unknownFields) ProtoMessage() { panic("not implemented") } + +// DebugPrint dumps the encoded bytes of b with a header and footer including s +// to stdout. This is only intended for debugging. +func (*Buffer) DebugPrint(s string, b []byte) { + m := MessageReflect(new(unknownFields)) + m.SetUnknown(b) + b, _ = prototext.MarshalOptions{AllowPartial: true, Indent: "\t"}.Marshal(m.Interface()) + fmt.Printf("==== %s ====\n%s==== %s ====\n", s, b, s) +} + +// EncodeVarint appends an unsigned varint encoding to the buffer. +func (b *Buffer) EncodeVarint(v uint64) error { + b.buf = protowire.AppendVarint(b.buf, v) + return nil +} + +// EncodeZigzag32 appends a 32-bit zig-zag varint encoding to the buffer. +func (b *Buffer) EncodeZigzag32(v uint64) error { + return b.EncodeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) +} + +// EncodeZigzag64 appends a 64-bit zig-zag varint encoding to the buffer. +func (b *Buffer) EncodeZigzag64(v uint64) error { + return b.EncodeVarint(uint64((uint64(v) << 1) ^ uint64((int64(v) >> 63)))) +} + +// EncodeFixed32 appends a 32-bit little-endian integer to the buffer. +func (b *Buffer) EncodeFixed32(v uint64) error { + b.buf = protowire.AppendFixed32(b.buf, uint32(v)) + return nil +} + +// EncodeFixed64 appends a 64-bit little-endian integer to the buffer. +func (b *Buffer) EncodeFixed64(v uint64) error { + b.buf = protowire.AppendFixed64(b.buf, uint64(v)) + return nil +} + +// EncodeRawBytes appends a length-prefixed raw bytes to the buffer. +func (b *Buffer) EncodeRawBytes(v []byte) error { + b.buf = protowire.AppendBytes(b.buf, v) + return nil +} + +// EncodeStringBytes appends a length-prefixed raw bytes to the buffer. +// It does not validate whether v contains valid UTF-8. +func (b *Buffer) EncodeStringBytes(v string) error { + b.buf = protowire.AppendString(b.buf, v) + return nil +} + +// EncodeMessage appends a length-prefixed encoded message to the buffer. +func (b *Buffer) EncodeMessage(m Message) error { + var err error + b.buf = protowire.AppendVarint(b.buf, uint64(Size(m))) + b.buf, err = marshalAppend(b.buf, m, b.deterministic) + return err +} + +// DecodeVarint consumes an encoded unsigned varint from the buffer. +func (b *Buffer) DecodeVarint() (uint64, error) { + v, n := protowire.ConsumeVarint(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeZigzag32 consumes an encoded 32-bit zig-zag varint from the buffer. +func (b *Buffer) DecodeZigzag32() (uint64, error) { + v, err := b.DecodeVarint() + if err != nil { + return 0, err + } + return uint64((uint32(v) >> 1) ^ uint32((int32(v&1)<<31)>>31)), nil +} + +// DecodeZigzag64 consumes an encoded 64-bit zig-zag varint from the buffer. +func (b *Buffer) DecodeZigzag64() (uint64, error) { + v, err := b.DecodeVarint() + if err != nil { + return 0, err + } + return uint64((uint64(v) >> 1) ^ uint64((int64(v&1)<<63)>>63)), nil +} + +// DecodeFixed32 consumes a 32-bit little-endian integer from the buffer. +func (b *Buffer) DecodeFixed32() (uint64, error) { + v, n := protowire.ConsumeFixed32(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeFixed64 consumes a 64-bit little-endian integer from the buffer. +func (b *Buffer) DecodeFixed64() (uint64, error) { + v, n := protowire.ConsumeFixed64(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeRawBytes consumes a length-prefixed raw bytes from the buffer. +// If alloc is specified, it returns a copy the raw bytes +// rather than a sub-slice of the buffer. +func (b *Buffer) DecodeRawBytes(alloc bool) ([]byte, error) { + v, n := protowire.ConsumeBytes(b.buf[b.idx:]) + if n < 0 { + return nil, protowire.ParseError(n) + } + b.idx += n + if alloc { + v = append([]byte(nil), v...) + } + return v, nil +} + +// DecodeStringBytes consumes a length-prefixed raw bytes from the buffer. +// It does not validate whether the raw bytes contain valid UTF-8. +func (b *Buffer) DecodeStringBytes() (string, error) { + v, n := protowire.ConsumeString(b.buf[b.idx:]) + if n < 0 { + return "", protowire.ParseError(n) + } + b.idx += n + return v, nil +} + +// DecodeMessage consumes a length-prefixed message from the buffer. +// It does not reset m before unmarshaling. +func (b *Buffer) DecodeMessage(m Message) error { + v, err := b.DecodeRawBytes(false) + if err != nil { + return err + } + return UnmarshalMerge(v, m) +} + +// DecodeGroup consumes a message group from the buffer. +// It assumes that the start group marker has already been consumed and +// consumes all bytes until (and including the end group marker). +// It does not reset m before unmarshaling. +func (b *Buffer) DecodeGroup(m Message) error { + v, n, err := consumeGroup(b.buf[b.idx:]) + if err != nil { + return err + } + b.idx += n + return UnmarshalMerge(v, m) +} + +// consumeGroup parses b until it finds an end group marker, returning +// the raw bytes of the message (excluding the end group marker) and the +// the total length of the message (including the end group marker). +func consumeGroup(b []byte) ([]byte, int, error) { + b0 := b + depth := 1 // assume this follows a start group marker + for { + _, wtyp, tagLen := protowire.ConsumeTag(b) + if tagLen < 0 { + return nil, 0, protowire.ParseError(tagLen) + } + b = b[tagLen:] + + var valLen int + switch wtyp { + case protowire.VarintType: + _, valLen = protowire.ConsumeVarint(b) + case protowire.Fixed32Type: + _, valLen = protowire.ConsumeFixed32(b) + case protowire.Fixed64Type: + _, valLen = protowire.ConsumeFixed64(b) + case protowire.BytesType: + _, valLen = protowire.ConsumeBytes(b) + case protowire.StartGroupType: + depth++ + case protowire.EndGroupType: + depth-- + default: + return nil, 0, errors.New("proto: cannot parse reserved wire type") + } + if valLen < 0 { + return nil, 0, protowire.ParseError(valLen) + } + b = b[valLen:] + + if depth == 0 { + return b0[:len(b0)-len(b)-tagLen], len(b0) - len(b), nil + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/defaults.go b/vendor/github.com/golang/protobuf/proto/defaults.go new file mode 100644 index 00000000..d399bf06 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/defaults.go @@ -0,0 +1,63 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" +) + +// SetDefaults sets unpopulated scalar fields to their default values. +// Fields within a oneof are not set even if they have a default value. +// SetDefaults is recursively called upon any populated message fields. +func SetDefaults(m Message) { + if m != nil { + setDefaults(MessageReflect(m)) + } +} + +func setDefaults(m protoreflect.Message) { + fds := m.Descriptor().Fields() + for i := 0; i < fds.Len(); i++ { + fd := fds.Get(i) + if !m.Has(fd) { + if fd.HasDefault() && fd.ContainingOneof() == nil { + v := fd.Default() + if fd.Kind() == protoreflect.BytesKind { + v = protoreflect.ValueOf(append([]byte(nil), v.Bytes()...)) // copy the default bytes + } + m.Set(fd, v) + } + continue + } + } + + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + switch { + // Handle singular message. + case fd.Cardinality() != protoreflect.Repeated: + if fd.Message() != nil { + setDefaults(m.Get(fd).Message()) + } + // Handle list of messages. + case fd.IsList(): + if fd.Message() != nil { + ls := m.Get(fd).List() + for i := 0; i < ls.Len(); i++ { + setDefaults(ls.Get(i).Message()) + } + } + // Handle map of messages. + case fd.IsMap(): + if fd.MapValue().Message() != nil { + ms := m.Get(fd).Map() + ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool { + setDefaults(v.Message()) + return true + }) + } + } + return true + }) +} diff --git a/vendor/github.com/golang/protobuf/proto/deprecated.go b/vendor/github.com/golang/protobuf/proto/deprecated.go new file mode 100644 index 00000000..e8db57e0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/deprecated.go @@ -0,0 +1,113 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "encoding/json" + "errors" + "fmt" + "strconv" + + protoV2 "google.golang.org/protobuf/proto" +) + +var ( + // Deprecated: No longer returned. + ErrNil = errors.New("proto: Marshal called with nil") + + // Deprecated: No longer returned. + ErrTooLarge = errors.New("proto: message encodes to over 2 GB") + + // Deprecated: No longer returned. + ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") +) + +// Deprecated: Do not use. +type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 } + +// Deprecated: Do not use. +func GetStats() Stats { return Stats{} } + +// Deprecated: Do not use. +func MarshalMessageSet(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func UnmarshalMessageSet([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func MarshalMessageSetJSON(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func UnmarshalMessageSetJSON([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func RegisterMessageSetType(Message, int32, string) {} + +// Deprecated: Do not use. +func EnumName(m map[int32]string, v int32) string { + s, ok := m[v] + if ok { + return s + } + return strconv.Itoa(int(v)) +} + +// Deprecated: Do not use. +func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { + if data[0] == '"' { + // New style: enums are strings. + var repr string + if err := json.Unmarshal(data, &repr); err != nil { + return -1, err + } + val, ok := m[repr] + if !ok { + return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) + } + return val, nil + } + // Old style: enums are ints. + var val int32 + if err := json.Unmarshal(data, &val); err != nil { + return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) + } + return val, nil +} + +// Deprecated: Do not use; this type existed for intenal-use only. +type InternalMessageInfo struct{} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) DiscardUnknown(m Message) { + DiscardUnknown(m) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Marshal(b []byte, m Message, deterministic bool) ([]byte, error) { + return protoV2.MarshalOptions{Deterministic: deterministic}.MarshalAppend(b, MessageV2(m)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Merge(dst, src Message) { + protoV2.Merge(MessageV2(dst), MessageV2(src)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Size(m Message) int { + return protoV2.Size(MessageV2(m)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Unmarshal(m Message, b []byte) error { + return protoV2.UnmarshalOptions{Merge: true}.Unmarshal(b, MessageV2(m)) +} diff --git a/vendor/github.com/golang/protobuf/proto/discard.go b/vendor/github.com/golang/protobuf/proto/discard.go new file mode 100644 index 00000000..2187e877 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/discard.go @@ -0,0 +1,58 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" +) + +// DiscardUnknown recursively discards all unknown fields from this message +// and all embedded messages. +// +// When unmarshaling a message with unrecognized fields, the tags and values +// of such fields are preserved in the Message. This allows a later call to +// marshal to be able to produce a message that continues to have those +// unrecognized fields. To avoid this, DiscardUnknown is used to +// explicitly clear the unknown fields after unmarshaling. +func DiscardUnknown(m Message) { + if m != nil { + discardUnknown(MessageReflect(m)) + } +} + +func discardUnknown(m protoreflect.Message) { + m.Range(func(fd protoreflect.FieldDescriptor, val protoreflect.Value) bool { + switch { + // Handle singular message. + case fd.Cardinality() != protoreflect.Repeated: + if fd.Message() != nil { + discardUnknown(m.Get(fd).Message()) + } + // Handle list of messages. + case fd.IsList(): + if fd.Message() != nil { + ls := m.Get(fd).List() + for i := 0; i < ls.Len(); i++ { + discardUnknown(ls.Get(i).Message()) + } + } + // Handle map of messages. + case fd.IsMap(): + if fd.MapValue().Message() != nil { + ms := m.Get(fd).Map() + ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool { + discardUnknown(v.Message()) + return true + }) + } + } + return true + }) + + // Discard unknown fields. + if len(m.GetUnknown()) > 0 { + m.SetUnknown(nil) + } +} diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go new file mode 100644 index 00000000..42fc120c --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/extensions.go @@ -0,0 +1,356 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "errors" + "fmt" + "reflect" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoiface" + "google.golang.org/protobuf/runtime/protoimpl" +) + +type ( + // ExtensionDesc represents an extension descriptor and + // is used to interact with an extension field in a message. + // + // Variables of this type are generated in code by protoc-gen-go. + ExtensionDesc = protoimpl.ExtensionInfo + + // ExtensionRange represents a range of message extensions. + // Used in code generated by protoc-gen-go. + ExtensionRange = protoiface.ExtensionRangeV1 + + // Deprecated: Do not use; this is an internal type. + Extension = protoimpl.ExtensionFieldV1 + + // Deprecated: Do not use; this is an internal type. + XXX_InternalExtensions = protoimpl.ExtensionFields +) + +// ErrMissingExtension reports whether the extension was not present. +var ErrMissingExtension = errors.New("proto: missing extension") + +var errNotExtendable = errors.New("proto: not an extendable proto.Message") + +// HasExtension reports whether the extension field is present in m +// either as an explicitly populated field or as an unknown field. +func HasExtension(m Message, xt *ExtensionDesc) (has bool) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return false + } + + // Check whether any populated known field matches the field number. + xtd := xt.TypeDescriptor() + if isValidExtension(mr.Descriptor(), xtd) { + has = mr.Has(xtd) + } else { + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + has = int32(fd.Number()) == xt.Field + return !has + }) + } + + // Check whether any unknown field matches the field number. + for b := mr.GetUnknown(); !has && len(b) > 0; { + num, _, n := protowire.ConsumeField(b) + has = int32(num) == xt.Field + b = b[n:] + } + return has +} + +// ClearExtension removes the extension field from m +// either as an explicitly populated field or as an unknown field. +func ClearExtension(m Message, xt *ExtensionDesc) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return + } + + xtd := xt.TypeDescriptor() + if isValidExtension(mr.Descriptor(), xtd) { + mr.Clear(xtd) + } else { + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + if int32(fd.Number()) == xt.Field { + mr.Clear(fd) + return false + } + return true + }) + } + clearUnknown(mr, fieldNum(xt.Field)) +} + +// ClearAllExtensions clears all extensions from m. +// This includes populated fields and unknown fields in the extension range. +func ClearAllExtensions(m Message) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return + } + + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + if fd.IsExtension() { + mr.Clear(fd) + } + return true + }) + clearUnknown(mr, mr.Descriptor().ExtensionRanges()) +} + +// GetExtension retrieves a proto2 extended field from m. +// +// If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil), +// then GetExtension parses the encoded field and returns a Go value of the specified type. +// If the field is not present, then the default value is returned (if one is specified), +// otherwise ErrMissingExtension is reported. +// +// If the descriptor is type incomplete (i.e., ExtensionDesc.ExtensionType is nil), +// then GetExtension returns the raw encoded bytes for the extension field. +func GetExtension(m Message, xt *ExtensionDesc) (interface{}, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return nil, errNotExtendable + } + + // Retrieve the unknown fields for this extension field. + var bo protoreflect.RawFields + for bi := mr.GetUnknown(); len(bi) > 0; { + num, _, n := protowire.ConsumeField(bi) + if int32(num) == xt.Field { + bo = append(bo, bi[:n]...) + } + bi = bi[n:] + } + + // For type incomplete descriptors, only retrieve the unknown fields. + if xt.ExtensionType == nil { + return []byte(bo), nil + } + + // If the extension field only exists as unknown fields, unmarshal it. + // This is rarely done since proto.Unmarshal eagerly unmarshals extensions. + xtd := xt.TypeDescriptor() + if !isValidExtension(mr.Descriptor(), xtd) { + return nil, fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m) + } + if !mr.Has(xtd) && len(bo) > 0 { + m2 := mr.New() + if err := (proto.UnmarshalOptions{ + Resolver: extensionResolver{xt}, + }.Unmarshal(bo, m2.Interface())); err != nil { + return nil, err + } + if m2.Has(xtd) { + mr.Set(xtd, m2.Get(xtd)) + clearUnknown(mr, fieldNum(xt.Field)) + } + } + + // Check whether the message has the extension field set or a default. + var pv protoreflect.Value + switch { + case mr.Has(xtd): + pv = mr.Get(xtd) + case xtd.HasDefault(): + pv = xtd.Default() + default: + return nil, ErrMissingExtension + } + + v := xt.InterfaceOf(pv) + rv := reflect.ValueOf(v) + if isScalarKind(rv.Kind()) { + rv2 := reflect.New(rv.Type()) + rv2.Elem().Set(rv) + v = rv2.Interface() + } + return v, nil +} + +// extensionResolver is a custom extension resolver that stores a single +// extension type that takes precedence over the global registry. +type extensionResolver struct{ xt protoreflect.ExtensionType } + +func (r extensionResolver) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) { + if xtd := r.xt.TypeDescriptor(); xtd.FullName() == field { + return r.xt, nil + } + return protoregistry.GlobalTypes.FindExtensionByName(field) +} + +func (r extensionResolver) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) { + if xtd := r.xt.TypeDescriptor(); xtd.ContainingMessage().FullName() == message && xtd.Number() == field { + return r.xt, nil + } + return protoregistry.GlobalTypes.FindExtensionByNumber(message, field) +} + +// GetExtensions returns a list of the extensions values present in m, +// corresponding with the provided list of extension descriptors, xts. +// If an extension is missing in m, the corresponding value is nil. +func GetExtensions(m Message, xts []*ExtensionDesc) ([]interface{}, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return nil, errNotExtendable + } + + vs := make([]interface{}, len(xts)) + for i, xt := range xts { + v, err := GetExtension(m, xt) + if err != nil { + if err == ErrMissingExtension { + continue + } + return vs, err + } + vs[i] = v + } + return vs, nil +} + +// SetExtension sets an extension field in m to the provided value. +func SetExtension(m Message, xt *ExtensionDesc, v interface{}) error { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return errNotExtendable + } + + rv := reflect.ValueOf(v) + if reflect.TypeOf(v) != reflect.TypeOf(xt.ExtensionType) { + return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", v, xt.ExtensionType) + } + if rv.Kind() == reflect.Ptr { + if rv.IsNil() { + return fmt.Errorf("proto: SetExtension called with nil value of type %T", v) + } + if isScalarKind(rv.Elem().Kind()) { + v = rv.Elem().Interface() + } + } + + xtd := xt.TypeDescriptor() + if !isValidExtension(mr.Descriptor(), xtd) { + return fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m) + } + mr.Set(xtd, xt.ValueOf(v)) + clearUnknown(mr, fieldNum(xt.Field)) + return nil +} + +// SetRawExtension inserts b into the unknown fields of m. +// +// Deprecated: Use Message.ProtoReflect.SetUnknown instead. +func SetRawExtension(m Message, fnum int32, b []byte) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return + } + + // Verify that the raw field is valid. + for b0 := b; len(b0) > 0; { + num, _, n := protowire.ConsumeField(b0) + if int32(num) != fnum { + panic(fmt.Sprintf("mismatching field number: got %d, want %d", num, fnum)) + } + b0 = b0[n:] + } + + ClearExtension(m, &ExtensionDesc{Field: fnum}) + mr.SetUnknown(append(mr.GetUnknown(), b...)) +} + +// ExtensionDescs returns a list of extension descriptors found in m, +// containing descriptors for both populated extension fields in m and +// also unknown fields of m that are in the extension range. +// For the later case, an type incomplete descriptor is provided where only +// the ExtensionDesc.Field field is populated. +// The order of the extension descriptors is undefined. +func ExtensionDescs(m Message) ([]*ExtensionDesc, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return nil, errNotExtendable + } + + // Collect a set of known extension descriptors. + extDescs := make(map[protoreflect.FieldNumber]*ExtensionDesc) + mr.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if fd.IsExtension() { + xt := fd.(protoreflect.ExtensionTypeDescriptor) + if xd, ok := xt.Type().(*ExtensionDesc); ok { + extDescs[fd.Number()] = xd + } + } + return true + }) + + // Collect a set of unknown extension descriptors. + extRanges := mr.Descriptor().ExtensionRanges() + for b := mr.GetUnknown(); len(b) > 0; { + num, _, n := protowire.ConsumeField(b) + if extRanges.Has(num) && extDescs[num] == nil { + extDescs[num] = nil + } + b = b[n:] + } + + // Transpose the set of descriptors into a list. + var xts []*ExtensionDesc + for num, xt := range extDescs { + if xt == nil { + xt = &ExtensionDesc{Field: int32(num)} + } + xts = append(xts, xt) + } + return xts, nil +} + +// isValidExtension reports whether xtd is a valid extension descriptor for md. +func isValidExtension(md protoreflect.MessageDescriptor, xtd protoreflect.ExtensionTypeDescriptor) bool { + return xtd.ContainingMessage() == md && md.ExtensionRanges().Has(xtd.Number()) +} + +// isScalarKind reports whether k is a protobuf scalar kind (except bytes). +// This function exists for historical reasons since the representation of +// scalars differs between v1 and v2, where v1 uses *T and v2 uses T. +func isScalarKind(k reflect.Kind) bool { + switch k { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + return true + default: + return false + } +} + +// clearUnknown removes unknown fields from m where remover.Has reports true. +func clearUnknown(m protoreflect.Message, remover interface { + Has(protoreflect.FieldNumber) bool +}) { + var bo protoreflect.RawFields + for bi := m.GetUnknown(); len(bi) > 0; { + num, _, n := protowire.ConsumeField(bi) + if !remover.Has(num) { + bo = append(bo, bi[:n]...) + } + bi = bi[n:] + } + if bi := m.GetUnknown(); len(bi) != len(bo) { + m.SetUnknown(bo) + } +} + +type fieldNum protoreflect.FieldNumber + +func (n1 fieldNum) Has(n2 protoreflect.FieldNumber) bool { + return protoreflect.FieldNumber(n1) == n2 +} diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go new file mode 100644 index 00000000..dcdc2202 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -0,0 +1,306 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "fmt" + "reflect" + "strconv" + "strings" + "sync" + + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoimpl" +) + +// StructProperties represents protocol buffer type information for a +// generated protobuf message in the open-struct API. +// +// Deprecated: Do not use. +type StructProperties struct { + // Prop are the properties for each field. + // + // Fields belonging to a oneof are stored in OneofTypes instead, with a + // single Properties representing the parent oneof held here. + // + // The order of Prop matches the order of fields in the Go struct. + // Struct fields that are not related to protobufs have a "XXX_" prefix + // in the Properties.Name and must be ignored by the user. + Prop []*Properties + + // OneofTypes contains information about the oneof fields in this message. + // It is keyed by the protobuf field name. + OneofTypes map[string]*OneofProperties +} + +// Properties represents the type information for a protobuf message field. +// +// Deprecated: Do not use. +type Properties struct { + // Name is a placeholder name with little meaningful semantic value. + // If the name has an "XXX_" prefix, the entire Properties must be ignored. + Name string + // OrigName is the protobuf field name or oneof name. + OrigName string + // JSONName is the JSON name for the protobuf field. + JSONName string + // Enum is a placeholder name for enums. + // For historical reasons, this is neither the Go name for the enum, + // nor the protobuf name for the enum. + Enum string // Deprecated: Do not use. + // Weak contains the full name of the weakly referenced message. + Weak string + // Wire is a string representation of the wire type. + Wire string + // WireType is the protobuf wire type for the field. + WireType int + // Tag is the protobuf field number. + Tag int + // Required reports whether this is a required field. + Required bool + // Optional reports whether this is a optional field. + Optional bool + // Repeated reports whether this is a repeated field. + Repeated bool + // Packed reports whether this is a packed repeated field of scalars. + Packed bool + // Proto3 reports whether this field operates under the proto3 syntax. + Proto3 bool + // Oneof reports whether this field belongs within a oneof. + Oneof bool + + // Default is the default value in string form. + Default string + // HasDefault reports whether the field has a default value. + HasDefault bool + + // MapKeyProp is the properties for the key field for a map field. + MapKeyProp *Properties + // MapValProp is the properties for the value field for a map field. + MapValProp *Properties +} + +// OneofProperties represents the type information for a protobuf oneof. +// +// Deprecated: Do not use. +type OneofProperties struct { + // Type is a pointer to the generated wrapper type for the field value. + // This is nil for messages that are not in the open-struct API. + Type reflect.Type + // Field is the index into StructProperties.Prop for the containing oneof. + Field int + // Prop is the properties for the field. + Prop *Properties +} + +// String formats the properties in the protobuf struct field tag style. +func (p *Properties) String() string { + s := p.Wire + s += "," + strconv.Itoa(p.Tag) + if p.Required { + s += ",req" + } + if p.Optional { + s += ",opt" + } + if p.Repeated { + s += ",rep" + } + if p.Packed { + s += ",packed" + } + s += ",name=" + p.OrigName + if p.JSONName != "" { + s += ",json=" + p.JSONName + } + if len(p.Enum) > 0 { + s += ",enum=" + p.Enum + } + if len(p.Weak) > 0 { + s += ",weak=" + p.Weak + } + if p.Proto3 { + s += ",proto3" + } + if p.Oneof { + s += ",oneof" + } + if p.HasDefault { + s += ",def=" + p.Default + } + return s +} + +// Parse populates p by parsing a string in the protobuf struct field tag style. +func (p *Properties) Parse(tag string) { + // For example: "bytes,49,opt,name=foo,def=hello!" + for len(tag) > 0 { + i := strings.IndexByte(tag, ',') + if i < 0 { + i = len(tag) + } + switch s := tag[:i]; { + case strings.HasPrefix(s, "name="): + p.OrigName = s[len("name="):] + case strings.HasPrefix(s, "json="): + p.JSONName = s[len("json="):] + case strings.HasPrefix(s, "enum="): + p.Enum = s[len("enum="):] + case strings.HasPrefix(s, "weak="): + p.Weak = s[len("weak="):] + case strings.Trim(s, "0123456789") == "": + n, _ := strconv.ParseUint(s, 10, 32) + p.Tag = int(n) + case s == "opt": + p.Optional = true + case s == "req": + p.Required = true + case s == "rep": + p.Repeated = true + case s == "varint" || s == "zigzag32" || s == "zigzag64": + p.Wire = s + p.WireType = WireVarint + case s == "fixed32": + p.Wire = s + p.WireType = WireFixed32 + case s == "fixed64": + p.Wire = s + p.WireType = WireFixed64 + case s == "bytes": + p.Wire = s + p.WireType = WireBytes + case s == "group": + p.Wire = s + p.WireType = WireStartGroup + case s == "packed": + p.Packed = true + case s == "proto3": + p.Proto3 = true + case s == "oneof": + p.Oneof = true + case strings.HasPrefix(s, "def="): + // The default tag is special in that everything afterwards is the + // default regardless of the presence of commas. + p.HasDefault = true + p.Default, i = tag[len("def="):], len(tag) + } + tag = strings.TrimPrefix(tag[i:], ",") + } +} + +// Init populates the properties from a protocol buffer struct tag. +// +// Deprecated: Do not use. +func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { + p.Name = name + p.OrigName = name + if tag == "" { + return + } + p.Parse(tag) + + if typ != nil && typ.Kind() == reflect.Map { + p.MapKeyProp = new(Properties) + p.MapKeyProp.Init(nil, "Key", f.Tag.Get("protobuf_key"), nil) + p.MapValProp = new(Properties) + p.MapValProp.Init(nil, "Value", f.Tag.Get("protobuf_val"), nil) + } +} + +var propertiesCache sync.Map // map[reflect.Type]*StructProperties + +// GetProperties returns the list of properties for the type represented by t, +// which must be a generated protocol buffer message in the open-struct API, +// where protobuf message fields are represented by exported Go struct fields. +// +// Deprecated: Use protobuf reflection instead. +func GetProperties(t reflect.Type) *StructProperties { + if p, ok := propertiesCache.Load(t); ok { + return p.(*StructProperties) + } + p, _ := propertiesCache.LoadOrStore(t, newProperties(t)) + return p.(*StructProperties) +} + +func newProperties(t reflect.Type) *StructProperties { + if t.Kind() != reflect.Struct { + panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t)) + } + + var hasOneof bool + prop := new(StructProperties) + + // Construct a list of properties for each field in the struct. + for i := 0; i < t.NumField(); i++ { + p := new(Properties) + f := t.Field(i) + tagField := f.Tag.Get("protobuf") + p.Init(f.Type, f.Name, tagField, &f) + + tagOneof := f.Tag.Get("protobuf_oneof") + if tagOneof != "" { + hasOneof = true + p.OrigName = tagOneof + } + + // Rename unrelated struct fields with the "XXX_" prefix since so much + // user code simply checks for this to exclude special fields. + if tagField == "" && tagOneof == "" && !strings.HasPrefix(p.Name, "XXX_") { + p.Name = "XXX_" + p.Name + p.OrigName = "XXX_" + p.OrigName + } else if p.Weak != "" { + p.Name = p.OrigName // avoid possible "XXX_" prefix on weak field + } + + prop.Prop = append(prop.Prop, p) + } + + // Construct a mapping of oneof field names to properties. + if hasOneof { + var oneofWrappers []interface{} + if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofFuncs"); ok { + oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[3].Interface().([]interface{}) + } + if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofWrappers"); ok { + oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[0].Interface().([]interface{}) + } + if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(protoreflect.ProtoMessage); ok { + if m, ok := m.ProtoReflect().(interface{ ProtoMessageInfo() *protoimpl.MessageInfo }); ok { + oneofWrappers = m.ProtoMessageInfo().OneofWrappers + } + } + + prop.OneofTypes = make(map[string]*OneofProperties) + for _, wrapper := range oneofWrappers { + p := &OneofProperties{ + Type: reflect.ValueOf(wrapper).Type(), // *T + Prop: new(Properties), + } + f := p.Type.Elem().Field(0) + p.Prop.Name = f.Name + p.Prop.Parse(f.Tag.Get("protobuf")) + + // Determine the struct field that contains this oneof. + // Each wrapper is assignable to exactly one parent field. + var foundOneof bool + for i := 0; i < t.NumField() && !foundOneof; i++ { + if p.Type.AssignableTo(t.Field(i).Type) { + p.Field = i + foundOneof = true + } + } + if !foundOneof { + panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t)) + } + prop.OneofTypes[p.Prop.OrigName] = p + } + } + + return prop +} + +func (sp *StructProperties) Len() int { return len(sp.Prop) } +func (sp *StructProperties) Less(i, j int) bool { return false } +func (sp *StructProperties) Swap(i, j int) { return } diff --git a/vendor/github.com/golang/protobuf/proto/proto.go b/vendor/github.com/golang/protobuf/proto/proto.go new file mode 100644 index 00000000..5aee89c3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/proto.go @@ -0,0 +1,167 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package proto provides functionality for handling protocol buffer messages. +// In particular, it provides marshaling and unmarshaling between a protobuf +// message and the binary wire format. +// +// See https://developers.google.com/protocol-buffers/docs/gotutorial for +// more information. +// +// Deprecated: Use the "google.golang.org/protobuf/proto" package instead. +package proto + +import ( + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" + "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + ProtoPackageIsVersion1 = true + ProtoPackageIsVersion2 = true + ProtoPackageIsVersion3 = true + ProtoPackageIsVersion4 = true +) + +// GeneratedEnum is any enum type generated by protoc-gen-go +// which is a named int32 kind. +// This type exists for documentation purposes. +type GeneratedEnum interface{} + +// GeneratedMessage is any message type generated by protoc-gen-go +// which is a pointer to a named struct kind. +// This type exists for documentation purposes. +type GeneratedMessage interface{} + +// Message is a protocol buffer message. +// +// This is the v1 version of the message interface and is marginally better +// than an empty interface as it lacks any method to programatically interact +// with the contents of the message. +// +// A v2 message is declared in "google.golang.org/protobuf/proto".Message and +// exposes protobuf reflection as a first-class feature of the interface. +// +// To convert a v1 message to a v2 message, use the MessageV2 function. +// To convert a v2 message to a v1 message, use the MessageV1 function. +type Message = protoiface.MessageV1 + +// MessageV1 converts either a v1 or v2 message to a v1 message. +// It returns nil if m is nil. +func MessageV1(m GeneratedMessage) protoiface.MessageV1 { + return protoimpl.X.ProtoMessageV1Of(m) +} + +// MessageV2 converts either a v1 or v2 message to a v2 message. +// It returns nil if m is nil. +func MessageV2(m GeneratedMessage) protoV2.Message { + return protoimpl.X.ProtoMessageV2Of(m) +} + +// MessageReflect returns a reflective view for a message. +// It returns nil if m is nil. +func MessageReflect(m Message) protoreflect.Message { + return protoimpl.X.MessageOf(m) +} + +// Marshaler is implemented by messages that can marshal themselves. +// This interface is used by the following functions: Size, Marshal, +// Buffer.Marshal, and Buffer.EncodeMessage. +// +// Deprecated: Do not implement. +type Marshaler interface { + // Marshal formats the encoded bytes of the message. + // It should be deterministic and emit valid protobuf wire data. + // The caller takes ownership of the returned buffer. + Marshal() ([]byte, error) +} + +// Unmarshaler is implemented by messages that can unmarshal themselves. +// This interface is used by the following functions: Unmarshal, UnmarshalMerge, +// Buffer.Unmarshal, Buffer.DecodeMessage, and Buffer.DecodeGroup. +// +// Deprecated: Do not implement. +type Unmarshaler interface { + // Unmarshal parses the encoded bytes of the protobuf wire input. + // The provided buffer is only valid for during method call. + // It should not reset the receiver message. + Unmarshal([]byte) error +} + +// Merger is implemented by messages that can merge themselves. +// This interface is used by the following functions: Clone and Merge. +// +// Deprecated: Do not implement. +type Merger interface { + // Merge merges the contents of src into the receiver message. + // It clones all data structures in src such that it aliases no mutable + // memory referenced by src. + Merge(src Message) +} + +// RequiredNotSetError is an error type returned when +// marshaling or unmarshaling a message with missing required fields. +type RequiredNotSetError struct { + err error +} + +func (e *RequiredNotSetError) Error() string { + if e.err != nil { + return e.err.Error() + } + return "proto: required field not set" +} +func (e *RequiredNotSetError) RequiredNotSet() bool { + return true +} + +func checkRequiredNotSet(m protoV2.Message) error { + if err := protoV2.CheckInitialized(m); err != nil { + return &RequiredNotSetError{err: err} + } + return nil +} + +// Clone returns a deep copy of src. +func Clone(src Message) Message { + return MessageV1(protoV2.Clone(MessageV2(src))) +} + +// Merge merges src into dst, which must be messages of the same type. +// +// Populated scalar fields in src are copied to dst, while populated +// singular messages in src are merged into dst by recursively calling Merge. +// The elements of every list field in src is appended to the corresponded +// list fields in dst. The entries of every map field in src is copied into +// the corresponding map field in dst, possibly replacing existing entries. +// The unknown fields of src are appended to the unknown fields of dst. +func Merge(dst, src Message) { + protoV2.Merge(MessageV2(dst), MessageV2(src)) +} + +// Equal reports whether two messages are equal. +// If two messages marshal to the same bytes under deterministic serialization, +// then Equal is guaranteed to report true. +// +// Two messages are equal if they are the same protobuf message type, +// have the same set of populated known and extension field values, +// and the same set of unknown fields values. +// +// Scalar values are compared with the equivalent of the == operator in Go, +// except bytes values which are compared using bytes.Equal and +// floating point values which specially treat NaNs as equal. +// Message values are compared by recursively calling Equal. +// Lists are equal if each element value is also equal. +// Maps are equal if they have the same set of keys, where the pair of values +// for each key is also equal. +func Equal(x, y Message) bool { + return protoV2.Equal(MessageV2(x), MessageV2(y)) +} + +func isMessageSet(md protoreflect.MessageDescriptor) bool { + ms, ok := md.(interface{ IsMessageSet() bool }) + return ok && ms.IsMessageSet() +} diff --git a/vendor/github.com/golang/protobuf/proto/registry.go b/vendor/github.com/golang/protobuf/proto/registry.go new file mode 100644 index 00000000..066b4323 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/registry.go @@ -0,0 +1,317 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "bytes" + "compress/gzip" + "fmt" + "io/ioutil" + "reflect" + "strings" + "sync" + + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoimpl" +) + +// filePath is the path to the proto source file. +type filePath = string // e.g., "google/protobuf/descriptor.proto" + +// fileDescGZIP is the compressed contents of the encoded FileDescriptorProto. +type fileDescGZIP = []byte + +var fileCache sync.Map // map[filePath]fileDescGZIP + +// RegisterFile is called from generated code to register the compressed +// FileDescriptorProto with the file path for a proto source file. +// +// Deprecated: Use protoregistry.GlobalFiles.RegisterFile instead. +func RegisterFile(s filePath, d fileDescGZIP) { + // Decompress the descriptor. + zr, err := gzip.NewReader(bytes.NewReader(d)) + if err != nil { + panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err)) + } + b, err := ioutil.ReadAll(zr) + if err != nil { + panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err)) + } + + // Construct a protoreflect.FileDescriptor from the raw descriptor. + // Note that DescBuilder.Build automatically registers the constructed + // file descriptor with the v2 registry. + protoimpl.DescBuilder{RawDescriptor: b}.Build() + + // Locally cache the raw descriptor form for the file. + fileCache.Store(s, d) +} + +// FileDescriptor returns the compressed FileDescriptorProto given the file path +// for a proto source file. It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalFiles.FindFileByPath instead. +func FileDescriptor(s filePath) fileDescGZIP { + if v, ok := fileCache.Load(s); ok { + return v.(fileDescGZIP) + } + + // Find the descriptor in the v2 registry. + var b []byte + if fd, _ := protoregistry.GlobalFiles.FindFileByPath(s); fd != nil { + b, _ = Marshal(protodesc.ToFileDescriptorProto(fd)) + } + + // Locally cache the raw descriptor form for the file. + if len(b) > 0 { + v, _ := fileCache.LoadOrStore(s, protoimpl.X.CompressGZIP(b)) + return v.(fileDescGZIP) + } + return nil +} + +// enumName is the name of an enum. For historical reasons, the enum name is +// neither the full Go name nor the full protobuf name of the enum. +// The name is the dot-separated combination of just the proto package that the +// enum is declared within followed by the Go type name of the generated enum. +type enumName = string // e.g., "my.proto.package.GoMessage_GoEnum" + +// enumsByName maps enum values by name to their numeric counterpart. +type enumsByName = map[string]int32 + +// enumsByNumber maps enum values by number to their name counterpart. +type enumsByNumber = map[int32]string + +var enumCache sync.Map // map[enumName]enumsByName +var numFilesCache sync.Map // map[protoreflect.FullName]int + +// RegisterEnum is called from the generated code to register the mapping of +// enum value names to enum numbers for the enum identified by s. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterEnum instead. +func RegisterEnum(s enumName, _ enumsByNumber, m enumsByName) { + if _, ok := enumCache.Load(s); ok { + panic("proto: duplicate enum registered: " + s) + } + enumCache.Store(s, m) + + // This does not forward registration to the v2 registry since this API + // lacks sufficient information to construct a complete v2 enum descriptor. +} + +// EnumValueMap returns the mapping from enum value names to enum numbers for +// the enum of the given name. It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalTypes.FindEnumByName instead. +func EnumValueMap(s enumName) enumsByName { + if v, ok := enumCache.Load(s); ok { + return v.(enumsByName) + } + + // Check whether the cache is stale. If the number of files in the current + // package differs, then it means that some enums may have been recently + // registered upstream that we do not know about. + var protoPkg protoreflect.FullName + if i := strings.LastIndexByte(s, '.'); i >= 0 { + protoPkg = protoreflect.FullName(s[:i]) + } + v, _ := numFilesCache.Load(protoPkg) + numFiles, _ := v.(int) + if protoregistry.GlobalFiles.NumFilesByPackage(protoPkg) == numFiles { + return nil // cache is up-to-date; was not found earlier + } + + // Update the enum cache for all enums declared in the given proto package. + numFiles = 0 + protoregistry.GlobalFiles.RangeFilesByPackage(protoPkg, func(fd protoreflect.FileDescriptor) bool { + walkEnums(fd, func(ed protoreflect.EnumDescriptor) { + name := protoimpl.X.LegacyEnumName(ed) + if _, ok := enumCache.Load(name); !ok { + m := make(enumsByName) + evs := ed.Values() + for i := evs.Len() - 1; i >= 0; i-- { + ev := evs.Get(i) + m[string(ev.Name())] = int32(ev.Number()) + } + enumCache.LoadOrStore(name, m) + } + }) + numFiles++ + return true + }) + numFilesCache.Store(protoPkg, numFiles) + + // Check cache again for enum map. + if v, ok := enumCache.Load(s); ok { + return v.(enumsByName) + } + return nil +} + +// walkEnums recursively walks all enums declared in d. +func walkEnums(d interface { + Enums() protoreflect.EnumDescriptors + Messages() protoreflect.MessageDescriptors +}, f func(protoreflect.EnumDescriptor)) { + eds := d.Enums() + for i := eds.Len() - 1; i >= 0; i-- { + f(eds.Get(i)) + } + mds := d.Messages() + for i := mds.Len() - 1; i >= 0; i-- { + walkEnums(mds.Get(i), f) + } +} + +// messageName is the full name of protobuf message. +type messageName = string + +var messageTypeCache sync.Map // map[messageName]reflect.Type + +// RegisterType is called from generated code to register the message Go type +// for a message of the given name. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterMessage instead. +func RegisterType(m Message, s messageName) { + mt := protoimpl.X.LegacyMessageTypeOf(m, protoreflect.FullName(s)) + if err := protoregistry.GlobalTypes.RegisterMessage(mt); err != nil { + panic(err) + } + messageTypeCache.Store(s, reflect.TypeOf(m)) +} + +// RegisterMapType is called from generated code to register the Go map type +// for a protobuf message representing a map entry. +// +// Deprecated: Do not use. +func RegisterMapType(m interface{}, s messageName) { + t := reflect.TypeOf(m) + if t.Kind() != reflect.Map { + panic(fmt.Sprintf("invalid map kind: %v", t)) + } + if _, ok := messageTypeCache.Load(s); ok { + panic(fmt.Errorf("proto: duplicate proto message registered: %s", s)) + } + messageTypeCache.Store(s, t) +} + +// MessageType returns the message type for a named message. +// It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead. +func MessageType(s messageName) reflect.Type { + if v, ok := messageTypeCache.Load(s); ok { + return v.(reflect.Type) + } + + // Derive the message type from the v2 registry. + var t reflect.Type + if mt, _ := protoregistry.GlobalTypes.FindMessageByName(protoreflect.FullName(s)); mt != nil { + t = messageGoType(mt) + } + + // If we could not get a concrete type, it is possible that it is a + // pseudo-message for a map entry. + if t == nil { + d, _ := protoregistry.GlobalFiles.FindDescriptorByName(protoreflect.FullName(s)) + if md, _ := d.(protoreflect.MessageDescriptor); md != nil && md.IsMapEntry() { + kt := goTypeForField(md.Fields().ByNumber(1)) + vt := goTypeForField(md.Fields().ByNumber(2)) + t = reflect.MapOf(kt, vt) + } + } + + // Locally cache the message type for the given name. + if t != nil { + v, _ := messageTypeCache.LoadOrStore(s, t) + return v.(reflect.Type) + } + return nil +} + +func goTypeForField(fd protoreflect.FieldDescriptor) reflect.Type { + switch k := fd.Kind(); k { + case protoreflect.EnumKind: + if et, _ := protoregistry.GlobalTypes.FindEnumByName(fd.Enum().FullName()); et != nil { + return enumGoType(et) + } + return reflect.TypeOf(protoreflect.EnumNumber(0)) + case protoreflect.MessageKind, protoreflect.GroupKind: + if mt, _ := protoregistry.GlobalTypes.FindMessageByName(fd.Message().FullName()); mt != nil { + return messageGoType(mt) + } + return reflect.TypeOf((*protoreflect.Message)(nil)).Elem() + default: + return reflect.TypeOf(fd.Default().Interface()) + } +} + +func enumGoType(et protoreflect.EnumType) reflect.Type { + return reflect.TypeOf(et.New(0)) +} + +func messageGoType(mt protoreflect.MessageType) reflect.Type { + return reflect.TypeOf(MessageV1(mt.Zero().Interface())) +} + +// MessageName returns the full protobuf name for the given message type. +// +// Deprecated: Use protoreflect.MessageDescriptor.FullName instead. +func MessageName(m Message) messageName { + if m == nil { + return "" + } + if m, ok := m.(interface{ XXX_MessageName() messageName }); ok { + return m.XXX_MessageName() + } + return messageName(protoimpl.X.MessageDescriptorOf(m).FullName()) +} + +// RegisterExtension is called from the generated code to register +// the extension descriptor. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterExtension instead. +func RegisterExtension(d *ExtensionDesc) { + if err := protoregistry.GlobalTypes.RegisterExtension(d); err != nil { + panic(err) + } +} + +type extensionsByNumber = map[int32]*ExtensionDesc + +var extensionCache sync.Map // map[messageName]extensionsByNumber + +// RegisteredExtensions returns a map of the registered extensions for the +// provided protobuf message, indexed by the extension field number. +// +// Deprecated: Use protoregistry.GlobalTypes.RangeExtensionsByMessage instead. +func RegisteredExtensions(m Message) extensionsByNumber { + // Check whether the cache is stale. If the number of extensions for + // the given message differs, then it means that some extensions were + // recently registered upstream that we do not know about. + s := MessageName(m) + v, _ := extensionCache.Load(s) + xs, _ := v.(extensionsByNumber) + if protoregistry.GlobalTypes.NumExtensionsByMessage(protoreflect.FullName(s)) == len(xs) { + return xs // cache is up-to-date + } + + // Cache is stale, re-compute the extensions map. + xs = make(extensionsByNumber) + protoregistry.GlobalTypes.RangeExtensionsByMessage(protoreflect.FullName(s), func(xt protoreflect.ExtensionType) bool { + if xd, ok := xt.(*ExtensionDesc); ok { + xs[int32(xt.TypeDescriptor().Number())] = xd + } else { + // TODO: This implies that the protoreflect.ExtensionType is a + // custom type not generated by protoc-gen-go. We could try and + // convert the type to an ExtensionDesc. + } + return true + }) + extensionCache.Store(s, xs) + return xs +} diff --git a/vendor/github.com/golang/protobuf/proto/text_decode.go b/vendor/github.com/golang/protobuf/proto/text_decode.go new file mode 100644 index 00000000..47eb3e44 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_decode.go @@ -0,0 +1,801 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "encoding" + "errors" + "fmt" + "reflect" + "strconv" + "strings" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/prototext" + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +const wrapTextUnmarshalV2 = false + +// ParseError is returned by UnmarshalText. +type ParseError struct { + Message string + + // Deprecated: Do not use. + Line, Offset int +} + +func (e *ParseError) Error() string { + if wrapTextUnmarshalV2 { + return e.Message + } + if e.Line == 1 { + return fmt.Sprintf("line 1.%d: %v", e.Offset, e.Message) + } + return fmt.Sprintf("line %d: %v", e.Line, e.Message) +} + +// UnmarshalText parses a proto text formatted string into m. +func UnmarshalText(s string, m Message) error { + if u, ok := m.(encoding.TextUnmarshaler); ok { + return u.UnmarshalText([]byte(s)) + } + + m.Reset() + mi := MessageV2(m) + + if wrapTextUnmarshalV2 { + err := prototext.UnmarshalOptions{ + AllowPartial: true, + }.Unmarshal([]byte(s), mi) + if err != nil { + return &ParseError{Message: err.Error()} + } + return checkRequiredNotSet(mi) + } else { + if err := newTextParser(s).unmarshalMessage(mi.ProtoReflect(), ""); err != nil { + return err + } + return checkRequiredNotSet(mi) + } +} + +type textParser struct { + s string // remaining input + done bool // whether the parsing is finished (success or error) + backed bool // whether back() was called + offset, line int + cur token +} + +type token struct { + value string + err *ParseError + line int // line number + offset int // byte number from start of input, not start of line + unquoted string // the unquoted version of value, if it was a quoted string +} + +func newTextParser(s string) *textParser { + p := new(textParser) + p.s = s + p.line = 1 + p.cur.line = 1 + return p +} + +func (p *textParser) unmarshalMessage(m protoreflect.Message, terminator string) (err error) { + md := m.Descriptor() + fds := md.Fields() + + // A struct is a sequence of "name: value", terminated by one of + // '>' or '}', or the end of the input. A name may also be + // "[extension]" or "[type/url]". + // + // The whole struct can also be an expanded Any message, like: + // [type/url] < ... struct contents ... > + seen := make(map[protoreflect.FieldNumber]bool) + for { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == terminator { + break + } + if tok.value == "[" { + if err := p.unmarshalExtensionOrAny(m, seen); err != nil { + return err + } + continue + } + + // This is a normal, non-extension field. + name := protoreflect.Name(tok.value) + fd := fds.ByName(name) + switch { + case fd == nil: + gd := fds.ByName(protoreflect.Name(strings.ToLower(string(name)))) + if gd != nil && gd.Kind() == protoreflect.GroupKind && gd.Message().Name() == name { + fd = gd + } + case fd.Kind() == protoreflect.GroupKind && fd.Message().Name() != name: + fd = nil + case fd.IsWeak() && fd.Message().IsPlaceholder(): + fd = nil + } + if fd == nil { + typeName := string(md.FullName()) + if m, ok := m.Interface().(Message); ok { + t := reflect.TypeOf(m) + if t.Kind() == reflect.Ptr { + typeName = t.Elem().String() + } + } + return p.errorf("unknown field name %q in %v", name, typeName) + } + if od := fd.ContainingOneof(); od != nil && m.WhichOneof(od) != nil { + return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, od.Name()) + } + if fd.Cardinality() != protoreflect.Repeated && seen[fd.Number()] { + return p.errorf("non-repeated field %q was repeated", fd.Name()) + } + seen[fd.Number()] = true + + // Consume any colon. + if err := p.checkForColon(fd); err != nil { + return err + } + + // Parse into the field. + v := m.Get(fd) + if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) { + v = m.Mutable(fd) + } + if v, err = p.unmarshalValue(v, fd); err != nil { + return err + } + m.Set(fd, v) + + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + } + return nil +} + +func (p *textParser) unmarshalExtensionOrAny(m protoreflect.Message, seen map[protoreflect.FieldNumber]bool) error { + name, err := p.consumeExtensionOrAnyName() + if err != nil { + return err + } + + // If it contains a slash, it's an Any type URL. + if slashIdx := strings.LastIndex(name, "/"); slashIdx >= 0 { + tok := p.next() + if tok.err != nil { + return tok.err + } + // consume an optional colon + if tok.value == ":" { + tok = p.next() + if tok.err != nil { + return tok.err + } + } + + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return p.errorf("expected '{' or '<', found %q", tok.value) + } + + mt, err := protoregistry.GlobalTypes.FindMessageByURL(name) + if err != nil { + return p.errorf("unrecognized message %q in google.protobuf.Any", name[slashIdx+len("/"):]) + } + m2 := mt.New() + if err := p.unmarshalMessage(m2, terminator); err != nil { + return err + } + b, err := protoV2.Marshal(m2.Interface()) + if err != nil { + return p.errorf("failed to marshal message of type %q: %v", name[slashIdx+len("/"):], err) + } + + urlFD := m.Descriptor().Fields().ByName("type_url") + valFD := m.Descriptor().Fields().ByName("value") + if seen[urlFD.Number()] { + return p.errorf("Any message unpacked multiple times, or %q already set", urlFD.Name()) + } + if seen[valFD.Number()] { + return p.errorf("Any message unpacked multiple times, or %q already set", valFD.Name()) + } + m.Set(urlFD, protoreflect.ValueOfString(name)) + m.Set(valFD, protoreflect.ValueOfBytes(b)) + seen[urlFD.Number()] = true + seen[valFD.Number()] = true + return nil + } + + xname := protoreflect.FullName(name) + xt, _ := protoregistry.GlobalTypes.FindExtensionByName(xname) + if xt == nil && isMessageSet(m.Descriptor()) { + xt, _ = protoregistry.GlobalTypes.FindExtensionByName(xname.Append("message_set_extension")) + } + if xt == nil { + return p.errorf("unrecognized extension %q", name) + } + fd := xt.TypeDescriptor() + if fd.ContainingMessage().FullName() != m.Descriptor().FullName() { + return p.errorf("extension field %q does not extend message %q", name, m.Descriptor().FullName()) + } + + if err := p.checkForColon(fd); err != nil { + return err + } + + v := m.Get(fd) + if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) { + v = m.Mutable(fd) + } + v, err = p.unmarshalValue(v, fd) + if err != nil { + return err + } + m.Set(fd, v) + return p.consumeOptionalSeparator() +} + +func (p *textParser) unmarshalValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "" { + return v, p.errorf("unexpected EOF") + } + + switch { + case fd.IsList(): + lv := v.List() + var err error + if tok.value == "[" { + // Repeated field with list notation, like [1,2,3]. + for { + vv := lv.NewElement() + vv, err = p.unmarshalSingularValue(vv, fd) + if err != nil { + return v, err + } + lv.Append(vv) + + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "]" { + break + } + if tok.value != "," { + return v, p.errorf("Expected ']' or ',' found %q", tok.value) + } + } + return v, nil + } + + // One value of the repeated field. + p.back() + vv := lv.NewElement() + vv, err = p.unmarshalSingularValue(vv, fd) + if err != nil { + return v, err + } + lv.Append(vv) + return v, nil + case fd.IsMap(): + // The map entry should be this sequence of tokens: + // < key : KEY value : VALUE > + // However, implementations may omit key or value, and technically + // we should support them in any order. + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return v, p.errorf("expected '{' or '<', found %q", tok.value) + } + + keyFD := fd.MapKey() + valFD := fd.MapValue() + + mv := v.Map() + kv := keyFD.Default() + vv := mv.NewValue() + for { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == terminator { + break + } + var err error + switch tok.value { + case "key": + if err := p.consumeToken(":"); err != nil { + return v, err + } + if kv, err = p.unmarshalSingularValue(kv, keyFD); err != nil { + return v, err + } + if err := p.consumeOptionalSeparator(); err != nil { + return v, err + } + case "value": + if err := p.checkForColon(valFD); err != nil { + return v, err + } + if vv, err = p.unmarshalSingularValue(vv, valFD); err != nil { + return v, err + } + if err := p.consumeOptionalSeparator(); err != nil { + return v, err + } + default: + p.back() + return v, p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) + } + } + mv.Set(kv.MapKey(), vv) + return v, nil + default: + p.back() + return p.unmarshalSingularValue(v, fd) + } +} + +func (p *textParser) unmarshalSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "" { + return v, p.errorf("unexpected EOF") + } + + switch fd.Kind() { + case protoreflect.BoolKind: + switch tok.value { + case "true", "1", "t", "True": + return protoreflect.ValueOfBool(true), nil + case "false", "0", "f", "False": + return protoreflect.ValueOfBool(false), nil + } + case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind: + if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfInt32(int32(x)), nil + } + + // The C++ parser accepts large positive hex numbers that uses + // two's complement arithmetic to represent negative numbers. + // This feature is here for backwards compatibility with C++. + if strings.HasPrefix(tok.value, "0x") { + if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfInt32(int32(-(int64(^x) + 1))), nil + } + } + case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: + if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfInt64(int64(x)), nil + } + + // The C++ parser accepts large positive hex numbers that uses + // two's complement arithmetic to represent negative numbers. + // This feature is here for backwards compatibility with C++. + if strings.HasPrefix(tok.value, "0x") { + if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfInt64(int64(-(int64(^x) + 1))), nil + } + } + case protoreflect.Uint32Kind, protoreflect.Fixed32Kind: + if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfUint32(uint32(x)), nil + } + case protoreflect.Uint64Kind, protoreflect.Fixed64Kind: + if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfUint64(uint64(x)), nil + } + case protoreflect.FloatKind: + // Ignore 'f' for compatibility with output generated by C++, + // but don't remove 'f' when the value is "-inf" or "inf". + v := tok.value + if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" { + v = v[:len(v)-len("f")] + } + if x, err := strconv.ParseFloat(v, 32); err == nil { + return protoreflect.ValueOfFloat32(float32(x)), nil + } + case protoreflect.DoubleKind: + // Ignore 'f' for compatibility with output generated by C++, + // but don't remove 'f' when the value is "-inf" or "inf". + v := tok.value + if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" { + v = v[:len(v)-len("f")] + } + if x, err := strconv.ParseFloat(v, 64); err == nil { + return protoreflect.ValueOfFloat64(float64(x)), nil + } + case protoreflect.StringKind: + if isQuote(tok.value[0]) { + return protoreflect.ValueOfString(tok.unquoted), nil + } + case protoreflect.BytesKind: + if isQuote(tok.value[0]) { + return protoreflect.ValueOfBytes([]byte(tok.unquoted)), nil + } + case protoreflect.EnumKind: + if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfEnum(protoreflect.EnumNumber(x)), nil + } + vd := fd.Enum().Values().ByName(protoreflect.Name(tok.value)) + if vd != nil { + return protoreflect.ValueOfEnum(vd.Number()), nil + } + case protoreflect.MessageKind, protoreflect.GroupKind: + var terminator string + switch tok.value { + case "{": + terminator = "}" + case "<": + terminator = ">" + default: + return v, p.errorf("expected '{' or '<', found %q", tok.value) + } + err := p.unmarshalMessage(v.Message(), terminator) + return v, err + default: + panic(fmt.Sprintf("invalid kind %v", fd.Kind())) + } + return v, p.errorf("invalid %v: %v", fd.Kind(), tok.value) +} + +// Consume a ':' from the input stream (if the next token is a colon), +// returning an error if a colon is needed but not present. +func (p *textParser) checkForColon(fd protoreflect.FieldDescriptor) *ParseError { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ":" { + if fd.Message() == nil { + return p.errorf("expected ':', found %q", tok.value) + } + p.back() + } + return nil +} + +// consumeExtensionOrAnyName consumes an extension name or an Any type URL and +// the following ']'. It returns the name or URL consumed. +func (p *textParser) consumeExtensionOrAnyName() (string, error) { + tok := p.next() + if tok.err != nil { + return "", tok.err + } + + // If extension name or type url is quoted, it's a single token. + if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { + name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) + if err != nil { + return "", err + } + return name, p.consumeToken("]") + } + + // Consume everything up to "]" + var parts []string + for tok.value != "]" { + parts = append(parts, tok.value) + tok = p.next() + if tok.err != nil { + return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) + } + if p.done && tok.value != "]" { + return "", p.errorf("unclosed type_url or extension name") + } + } + return strings.Join(parts, ""), nil +} + +// consumeOptionalSeparator consumes an optional semicolon or comma. +// It is used in unmarshalMessage to provide backward compatibility. +func (p *textParser) consumeOptionalSeparator() error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ";" && tok.value != "," { + p.back() + } + return nil +} + +func (p *textParser) errorf(format string, a ...interface{}) *ParseError { + pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} + p.cur.err = pe + p.done = true + return pe +} + +func (p *textParser) skipWhitespace() { + i := 0 + for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { + if p.s[i] == '#' { + // comment; skip to end of line or input + for i < len(p.s) && p.s[i] != '\n' { + i++ + } + if i == len(p.s) { + break + } + } + if p.s[i] == '\n' { + p.line++ + } + i++ + } + p.offset += i + p.s = p.s[i:len(p.s)] + if len(p.s) == 0 { + p.done = true + } +} + +func (p *textParser) advance() { + // Skip whitespace + p.skipWhitespace() + if p.done { + return + } + + // Start of non-whitespace + p.cur.err = nil + p.cur.offset, p.cur.line = p.offset, p.line + p.cur.unquoted = "" + switch p.s[0] { + case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': + // Single symbol + p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] + case '"', '\'': + // Quoted string + i := 1 + for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { + if p.s[i] == '\\' && i+1 < len(p.s) { + // skip escaped char + i++ + } + i++ + } + if i >= len(p.s) || p.s[i] != p.s[0] { + p.errorf("unmatched quote") + return + } + unq, err := unquoteC(p.s[1:i], rune(p.s[0])) + if err != nil { + p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) + return + } + p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] + p.cur.unquoted = unq + default: + i := 0 + for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { + i++ + } + if i == 0 { + p.errorf("unexpected byte %#x", p.s[0]) + return + } + p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] + } + p.offset += len(p.cur.value) +} + +// Back off the parser by one token. Can only be done between calls to next(). +// It makes the next advance() a no-op. +func (p *textParser) back() { p.backed = true } + +// Advances the parser and returns the new current token. +func (p *textParser) next() *token { + if p.backed || p.done { + p.backed = false + return &p.cur + } + p.advance() + if p.done { + p.cur.value = "" + } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { + // Look for multiple quoted strings separated by whitespace, + // and concatenate them. + cat := p.cur + for { + p.skipWhitespace() + if p.done || !isQuote(p.s[0]) { + break + } + p.advance() + if p.cur.err != nil { + return &p.cur + } + cat.value += " " + p.cur.value + cat.unquoted += p.cur.unquoted + } + p.done = false // parser may have seen EOF, but we want to return cat + p.cur = cat + } + return &p.cur +} + +func (p *textParser) consumeToken(s string) error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != s { + p.back() + return p.errorf("expected %q, found %q", s, tok.value) + } + return nil +} + +var errBadUTF8 = errors.New("proto: bad UTF-8") + +func unquoteC(s string, quote rune) (string, error) { + // This is based on C++'s tokenizer.cc. + // Despite its name, this is *not* parsing C syntax. + // For instance, "\0" is an invalid quoted string. + + // Avoid allocation in trivial cases. + simple := true + for _, r := range s { + if r == '\\' || r == quote { + simple = false + break + } + } + if simple { + return s, nil + } + + buf := make([]byte, 0, 3*len(s)/2) + for len(s) > 0 { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", errBadUTF8 + } + s = s[n:] + if r != '\\' { + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + } else { + buf = append(buf, string(r)...) + } + continue + } + + ch, tail, err := unescape(s) + if err != nil { + return "", err + } + buf = append(buf, ch...) + s = tail + } + return string(buf), nil +} + +func unescape(s string) (ch string, tail string, err error) { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", "", errBadUTF8 + } + s = s[n:] + switch r { + case 'a': + return "\a", s, nil + case 'b': + return "\b", s, nil + case 'f': + return "\f", s, nil + case 'n': + return "\n", s, nil + case 'r': + return "\r", s, nil + case 't': + return "\t", s, nil + case 'v': + return "\v", s, nil + case '?': + return "?", s, nil // trigraph workaround + case '\'', '"', '\\': + return string(r), s, nil + case '0', '1', '2', '3', '4', '5', '6', '7': + if len(s) < 2 { + return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) + } + ss := string(r) + s[:2] + s = s[2:] + i, err := strconv.ParseUint(ss, 8, 8) + if err != nil { + return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) + } + return string([]byte{byte(i)}), s, nil + case 'x', 'X', 'u', 'U': + var n int + switch r { + case 'x', 'X': + n = 2 + case 'u': + n = 4 + case 'U': + n = 8 + } + if len(s) < n { + return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) + } + ss := s[:n] + s = s[n:] + i, err := strconv.ParseUint(ss, 16, 64) + if err != nil { + return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) + } + if r == 'x' || r == 'X' { + return string([]byte{byte(i)}), s, nil + } + if i > utf8.MaxRune { + return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) + } + return string(rune(i)), s, nil + } + return "", "", fmt.Errorf(`unknown escape \%c`, r) +} + +func isIdentOrNumberChar(c byte) bool { + switch { + case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': + return true + case '0' <= c && c <= '9': + return true + } + switch c { + case '-', '+', '.', '_': + return true + } + return false +} + +func isWhitespace(c byte) bool { + switch c { + case ' ', '\t', '\n', '\r': + return true + } + return false +} + +func isQuote(c byte) bool { + switch c { + case '"', '\'': + return true + } + return false +} diff --git a/vendor/github.com/golang/protobuf/proto/text_encode.go b/vendor/github.com/golang/protobuf/proto/text_encode.go new file mode 100644 index 00000000..a31134ee --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_encode.go @@ -0,0 +1,560 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "bytes" + "encoding" + "fmt" + "io" + "math" + "sort" + "strings" + + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +const wrapTextMarshalV2 = false + +// TextMarshaler is a configurable text format marshaler. +type TextMarshaler struct { + Compact bool // use compact text format (one line) + ExpandAny bool // expand google.protobuf.Any messages of known types +} + +// Marshal writes the proto text format of m to w. +func (tm *TextMarshaler) Marshal(w io.Writer, m Message) error { + b, err := tm.marshal(m) + if len(b) > 0 { + if _, err := w.Write(b); err != nil { + return err + } + } + return err +} + +// Text returns a proto text formatted string of m. +func (tm *TextMarshaler) Text(m Message) string { + b, _ := tm.marshal(m) + return string(b) +} + +func (tm *TextMarshaler) marshal(m Message) ([]byte, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return []byte(""), nil + } + + if wrapTextMarshalV2 { + if m, ok := m.(encoding.TextMarshaler); ok { + return m.MarshalText() + } + + opts := prototext.MarshalOptions{ + AllowPartial: true, + EmitUnknown: true, + } + if !tm.Compact { + opts.Indent = " " + } + if !tm.ExpandAny { + opts.Resolver = (*protoregistry.Types)(nil) + } + return opts.Marshal(mr.Interface()) + } else { + w := &textWriter{ + compact: tm.Compact, + expandAny: tm.ExpandAny, + complete: true, + } + + if m, ok := m.(encoding.TextMarshaler); ok { + b, err := m.MarshalText() + if err != nil { + return nil, err + } + w.Write(b) + return w.buf, nil + } + + err := w.writeMessage(mr) + return w.buf, err + } +} + +var ( + defaultTextMarshaler = TextMarshaler{} + compactTextMarshaler = TextMarshaler{Compact: true} +) + +// MarshalText writes the proto text format of m to w. +func MarshalText(w io.Writer, m Message) error { return defaultTextMarshaler.Marshal(w, m) } + +// MarshalTextString returns a proto text formatted string of m. +func MarshalTextString(m Message) string { return defaultTextMarshaler.Text(m) } + +// CompactText writes the compact proto text format of m to w. +func CompactText(w io.Writer, m Message) error { return compactTextMarshaler.Marshal(w, m) } + +// CompactTextString returns a compact proto text formatted string of m. +func CompactTextString(m Message) string { return compactTextMarshaler.Text(m) } + +var ( + newline = []byte("\n") + endBraceNewline = []byte("}\n") + posInf = []byte("inf") + negInf = []byte("-inf") + nan = []byte("nan") +) + +// textWriter is an io.Writer that tracks its indentation level. +type textWriter struct { + compact bool // same as TextMarshaler.Compact + expandAny bool // same as TextMarshaler.ExpandAny + complete bool // whether the current position is a complete line + indent int // indentation level; never negative + buf []byte +} + +func (w *textWriter) Write(p []byte) (n int, _ error) { + newlines := bytes.Count(p, newline) + if newlines == 0 { + if !w.compact && w.complete { + w.writeIndent() + } + w.buf = append(w.buf, p...) + w.complete = false + return len(p), nil + } + + frags := bytes.SplitN(p, newline, newlines+1) + if w.compact { + for i, frag := range frags { + if i > 0 { + w.buf = append(w.buf, ' ') + n++ + } + w.buf = append(w.buf, frag...) + n += len(frag) + } + return n, nil + } + + for i, frag := range frags { + if w.complete { + w.writeIndent() + } + w.buf = append(w.buf, frag...) + n += len(frag) + if i+1 < len(frags) { + w.buf = append(w.buf, '\n') + n++ + } + } + w.complete = len(frags[len(frags)-1]) == 0 + return n, nil +} + +func (w *textWriter) WriteByte(c byte) error { + if w.compact && c == '\n' { + c = ' ' + } + if !w.compact && w.complete { + w.writeIndent() + } + w.buf = append(w.buf, c) + w.complete = c == '\n' + return nil +} + +func (w *textWriter) writeName(fd protoreflect.FieldDescriptor) { + if !w.compact && w.complete { + w.writeIndent() + } + w.complete = false + + if fd.Kind() != protoreflect.GroupKind { + w.buf = append(w.buf, fd.Name()...) + w.WriteByte(':') + } else { + // Use message type name for group field name. + w.buf = append(w.buf, fd.Message().Name()...) + } + + if !w.compact { + w.WriteByte(' ') + } +} + +func requiresQuotes(u string) bool { + // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. + for _, ch := range u { + switch { + case ch == '.' || ch == '/' || ch == '_': + continue + case '0' <= ch && ch <= '9': + continue + case 'A' <= ch && ch <= 'Z': + continue + case 'a' <= ch && ch <= 'z': + continue + default: + return true + } + } + return false +} + +// writeProto3Any writes an expanded google.protobuf.Any message. +// +// It returns (false, nil) if sv value can't be unmarshaled (e.g. because +// required messages are not linked in). +// +// It returns (true, error) when sv was written in expanded format or an error +// was encountered. +func (w *textWriter) writeProto3Any(m protoreflect.Message) (bool, error) { + md := m.Descriptor() + fdURL := md.Fields().ByName("type_url") + fdVal := md.Fields().ByName("value") + + url := m.Get(fdURL).String() + mt, err := protoregistry.GlobalTypes.FindMessageByURL(url) + if err != nil { + return false, nil + } + + b := m.Get(fdVal).Bytes() + m2 := mt.New() + if err := proto.Unmarshal(b, m2.Interface()); err != nil { + return false, nil + } + w.Write([]byte("[")) + if requiresQuotes(url) { + w.writeQuotedString(url) + } else { + w.Write([]byte(url)) + } + if w.compact { + w.Write([]byte("]:<")) + } else { + w.Write([]byte("]: <\n")) + w.indent++ + } + if err := w.writeMessage(m2); err != nil { + return true, err + } + if w.compact { + w.Write([]byte("> ")) + } else { + w.indent-- + w.Write([]byte(">\n")) + } + return true, nil +} + +func (w *textWriter) writeMessage(m protoreflect.Message) error { + md := m.Descriptor() + if w.expandAny && md.FullName() == "google.protobuf.Any" { + if canExpand, err := w.writeProto3Any(m); canExpand { + return err + } + } + + fds := md.Fields() + for i := 0; i < fds.Len(); { + fd := fds.Get(i) + if od := fd.ContainingOneof(); od != nil { + fd = m.WhichOneof(od) + i += od.Fields().Len() + } else { + i++ + } + if fd == nil || !m.Has(fd) { + continue + } + + switch { + case fd.IsList(): + lv := m.Get(fd).List() + for j := 0; j < lv.Len(); j++ { + w.writeName(fd) + v := lv.Get(j) + if err := w.writeSingularValue(v, fd); err != nil { + return err + } + w.WriteByte('\n') + } + case fd.IsMap(): + kfd := fd.MapKey() + vfd := fd.MapValue() + mv := m.Get(fd).Map() + + type entry struct{ key, val protoreflect.Value } + var entries []entry + mv.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { + entries = append(entries, entry{k.Value(), v}) + return true + }) + sort.Slice(entries, func(i, j int) bool { + switch kfd.Kind() { + case protoreflect.BoolKind: + return !entries[i].key.Bool() && entries[j].key.Bool() + case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind, protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: + return entries[i].key.Int() < entries[j].key.Int() + case protoreflect.Uint32Kind, protoreflect.Fixed32Kind, protoreflect.Uint64Kind, protoreflect.Fixed64Kind: + return entries[i].key.Uint() < entries[j].key.Uint() + case protoreflect.StringKind: + return entries[i].key.String() < entries[j].key.String() + default: + panic("invalid kind") + } + }) + for _, entry := range entries { + w.writeName(fd) + w.WriteByte('<') + if !w.compact { + w.WriteByte('\n') + } + w.indent++ + w.writeName(kfd) + if err := w.writeSingularValue(entry.key, kfd); err != nil { + return err + } + w.WriteByte('\n') + w.writeName(vfd) + if err := w.writeSingularValue(entry.val, vfd); err != nil { + return err + } + w.WriteByte('\n') + w.indent-- + w.WriteByte('>') + w.WriteByte('\n') + } + default: + w.writeName(fd) + if err := w.writeSingularValue(m.Get(fd), fd); err != nil { + return err + } + w.WriteByte('\n') + } + } + + if b := m.GetUnknown(); len(b) > 0 { + w.writeUnknownFields(b) + } + return w.writeExtensions(m) +} + +func (w *textWriter) writeSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) error { + switch fd.Kind() { + case protoreflect.FloatKind, protoreflect.DoubleKind: + switch vf := v.Float(); { + case math.IsInf(vf, +1): + w.Write(posInf) + case math.IsInf(vf, -1): + w.Write(negInf) + case math.IsNaN(vf): + w.Write(nan) + default: + fmt.Fprint(w, v.Interface()) + } + case protoreflect.StringKind: + // NOTE: This does not validate UTF-8 for historical reasons. + w.writeQuotedString(string(v.String())) + case protoreflect.BytesKind: + w.writeQuotedString(string(v.Bytes())) + case protoreflect.MessageKind, protoreflect.GroupKind: + var bra, ket byte = '<', '>' + if fd.Kind() == protoreflect.GroupKind { + bra, ket = '{', '}' + } + w.WriteByte(bra) + if !w.compact { + w.WriteByte('\n') + } + w.indent++ + m := v.Message() + if m2, ok := m.Interface().(encoding.TextMarshaler); ok { + b, err := m2.MarshalText() + if err != nil { + return err + } + w.Write(b) + } else { + w.writeMessage(m) + } + w.indent-- + w.WriteByte(ket) + case protoreflect.EnumKind: + if ev := fd.Enum().Values().ByNumber(v.Enum()); ev != nil { + fmt.Fprint(w, ev.Name()) + } else { + fmt.Fprint(w, v.Enum()) + } + default: + fmt.Fprint(w, v.Interface()) + } + return nil +} + +// writeQuotedString writes a quoted string in the protocol buffer text format. +func (w *textWriter) writeQuotedString(s string) { + w.WriteByte('"') + for i := 0; i < len(s); i++ { + switch c := s[i]; c { + case '\n': + w.buf = append(w.buf, `\n`...) + case '\r': + w.buf = append(w.buf, `\r`...) + case '\t': + w.buf = append(w.buf, `\t`...) + case '"': + w.buf = append(w.buf, `\"`...) + case '\\': + w.buf = append(w.buf, `\\`...) + default: + if isPrint := c >= 0x20 && c < 0x7f; isPrint { + w.buf = append(w.buf, c) + } else { + w.buf = append(w.buf, fmt.Sprintf(`\%03o`, c)...) + } + } + } + w.WriteByte('"') +} + +func (w *textWriter) writeUnknownFields(b []byte) { + if !w.compact { + fmt.Fprintf(w, "/* %d unknown bytes */\n", len(b)) + } + + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return + } + b = b[n:] + + if wtyp == protowire.EndGroupType { + w.indent-- + w.Write(endBraceNewline) + continue + } + fmt.Fprint(w, num) + if wtyp != protowire.StartGroupType { + w.WriteByte(':') + } + if !w.compact || wtyp == protowire.StartGroupType { + w.WriteByte(' ') + } + switch wtyp { + case protowire.VarintType: + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.Fixed32Type: + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.Fixed64Type: + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.BytesType: + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprintf(w, "%q", v) + case protowire.StartGroupType: + w.WriteByte('{') + w.indent++ + default: + fmt.Fprintf(w, "/* unknown wire type %d */", wtyp) + } + w.WriteByte('\n') + } +} + +// writeExtensions writes all the extensions in m. +func (w *textWriter) writeExtensions(m protoreflect.Message) error { + md := m.Descriptor() + if md.ExtensionRanges().Len() == 0 { + return nil + } + + type ext struct { + desc protoreflect.FieldDescriptor + val protoreflect.Value + } + var exts []ext + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if fd.IsExtension() { + exts = append(exts, ext{fd, v}) + } + return true + }) + sort.Slice(exts, func(i, j int) bool { + return exts[i].desc.Number() < exts[j].desc.Number() + }) + + for _, ext := range exts { + // For message set, use the name of the message as the extension name. + name := string(ext.desc.FullName()) + if isMessageSet(ext.desc.ContainingMessage()) { + name = strings.TrimSuffix(name, ".message_set_extension") + } + + if !ext.desc.IsList() { + if err := w.writeSingularExtension(name, ext.val, ext.desc); err != nil { + return err + } + } else { + lv := ext.val.List() + for i := 0; i < lv.Len(); i++ { + if err := w.writeSingularExtension(name, lv.Get(i), ext.desc); err != nil { + return err + } + } + } + } + return nil +} + +func (w *textWriter) writeSingularExtension(name string, v protoreflect.Value, fd protoreflect.FieldDescriptor) error { + fmt.Fprintf(w, "[%s]:", name) + if !w.compact { + w.WriteByte(' ') + } + if err := w.writeSingularValue(v, fd); err != nil { + return err + } + w.WriteByte('\n') + return nil +} + +func (w *textWriter) writeIndent() { + if !w.complete { + return + } + for i := 0; i < w.indent*2; i++ { + w.buf = append(w.buf, ' ') + } + w.complete = false +} diff --git a/vendor/github.com/golang/protobuf/proto/wire.go b/vendor/github.com/golang/protobuf/proto/wire.go new file mode 100644 index 00000000..d7c28da5 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/wire.go @@ -0,0 +1,78 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/runtime/protoiface" +) + +// Size returns the size in bytes of the wire-format encoding of m. +func Size(m Message) int { + if m == nil { + return 0 + } + mi := MessageV2(m) + return protoV2.Size(mi) +} + +// Marshal returns the wire-format encoding of m. +func Marshal(m Message) ([]byte, error) { + b, err := marshalAppend(nil, m, false) + if b == nil { + b = zeroBytes + } + return b, err +} + +var zeroBytes = make([]byte, 0, 0) + +func marshalAppend(buf []byte, m Message, deterministic bool) ([]byte, error) { + if m == nil { + return nil, ErrNil + } + mi := MessageV2(m) + nbuf, err := protoV2.MarshalOptions{ + Deterministic: deterministic, + AllowPartial: true, + }.MarshalAppend(buf, mi) + if err != nil { + return buf, err + } + if len(buf) == len(nbuf) { + if !mi.ProtoReflect().IsValid() { + return buf, ErrNil + } + } + return nbuf, checkRequiredNotSet(mi) +} + +// Unmarshal parses a wire-format message in b and places the decoded results in m. +// +// Unmarshal resets m before starting to unmarshal, so any existing data in m is always +// removed. Use UnmarshalMerge to preserve and append to existing data. +func Unmarshal(b []byte, m Message) error { + m.Reset() + return UnmarshalMerge(b, m) +} + +// UnmarshalMerge parses a wire-format message in b and places the decoded results in m. +func UnmarshalMerge(b []byte, m Message) error { + mi := MessageV2(m) + out, err := protoV2.UnmarshalOptions{ + AllowPartial: true, + Merge: true, + }.UnmarshalState(protoiface.UnmarshalInput{ + Buf: b, + Message: mi.ProtoReflect(), + }) + if err != nil { + return err + } + if out.Flags&protoiface.UnmarshalInitialized > 0 { + return nil + } + return checkRequiredNotSet(mi) +} diff --git a/vendor/github.com/golang/protobuf/proto/wrappers.go b/vendor/github.com/golang/protobuf/proto/wrappers.go new file mode 100644 index 00000000..398e3485 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/wrappers.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +// Bool stores v in a new bool value and returns a pointer to it. +func Bool(v bool) *bool { return &v } + +// Int stores v in a new int32 value and returns a pointer to it. +// +// Deprecated: Use Int32 instead. +func Int(v int) *int32 { return Int32(int32(v)) } + +// Int32 stores v in a new int32 value and returns a pointer to it. +func Int32(v int32) *int32 { return &v } + +// Int64 stores v in a new int64 value and returns a pointer to it. +func Int64(v int64) *int64 { return &v } + +// Uint32 stores v in a new uint32 value and returns a pointer to it. +func Uint32(v uint32) *uint32 { return &v } + +// Uint64 stores v in a new uint64 value and returns a pointer to it. +func Uint64(v uint64) *uint64 { return &v } + +// Float32 stores v in a new float32 value and returns a pointer to it. +func Float32(v float32) *float32 { return &v } + +// Float64 stores v in a new float64 value and returns a pointer to it. +func Float64(v float64) *float64 { return &v } + +// String stores v in a new string value and returns a pointer to it. +func String(v string) *string { return &v } diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go new file mode 100644 index 00000000..85f9f573 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -0,0 +1,179 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ptypes + +import ( + "fmt" + "strings" + + "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + + anypb "github.com/golang/protobuf/ptypes/any" +) + +const urlPrefix = "type.googleapis.com/" + +// AnyMessageName returns the message name contained in an anypb.Any message. +// Most type assertions should use the Is function instead. +// +// Deprecated: Call the any.MessageName method instead. +func AnyMessageName(any *anypb.Any) (string, error) { + name, err := anyMessageName(any) + return string(name), err +} +func anyMessageName(any *anypb.Any) (protoreflect.FullName, error) { + if any == nil { + return "", fmt.Errorf("message is nil") + } + name := protoreflect.FullName(any.TypeUrl) + if i := strings.LastIndex(any.TypeUrl, "/"); i >= 0 { + name = name[i+len("/"):] + } + if !name.IsValid() { + return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl) + } + return name, nil +} + +// MarshalAny marshals the given message m into an anypb.Any message. +// +// Deprecated: Call the anypb.New function instead. +func MarshalAny(m proto.Message) (*anypb.Any, error) { + switch dm := m.(type) { + case DynamicAny: + m = dm.Message + case *DynamicAny: + if dm == nil { + return nil, proto.ErrNil + } + m = dm.Message + } + b, err := proto.Marshal(m) + if err != nil { + return nil, err + } + return &anypb.Any{TypeUrl: urlPrefix + proto.MessageName(m), Value: b}, nil +} + +// Empty returns a new message of the type specified in an anypb.Any message. +// It returns protoregistry.NotFound if the corresponding message type could not +// be resolved in the global registry. +// +// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead +// to resolve the message name and create a new instance of it. +func Empty(any *anypb.Any) (proto.Message, error) { + name, err := anyMessageName(any) + if err != nil { + return nil, err + } + mt, err := protoregistry.GlobalTypes.FindMessageByName(name) + if err != nil { + return nil, err + } + return proto.MessageV1(mt.New().Interface()), nil +} + +// UnmarshalAny unmarshals the encoded value contained in the anypb.Any message +// into the provided message m. It returns an error if the target message +// does not match the type in the Any message or if an unmarshal error occurs. +// +// The target message m may be a *DynamicAny message. If the underlying message +// type could not be resolved, then this returns protoregistry.NotFound. +// +// Deprecated: Call the any.UnmarshalTo method instead. +func UnmarshalAny(any *anypb.Any, m proto.Message) error { + if dm, ok := m.(*DynamicAny); ok { + if dm.Message == nil { + var err error + dm.Message, err = Empty(any) + if err != nil { + return err + } + } + m = dm.Message + } + + anyName, err := AnyMessageName(any) + if err != nil { + return err + } + msgName := proto.MessageName(m) + if anyName != msgName { + return fmt.Errorf("mismatched message type: got %q want %q", anyName, msgName) + } + return proto.Unmarshal(any.Value, m) +} + +// Is reports whether the Any message contains a message of the specified type. +// +// Deprecated: Call the any.MessageIs method instead. +func Is(any *anypb.Any, m proto.Message) bool { + if any == nil || m == nil { + return false + } + name := proto.MessageName(m) + if !strings.HasSuffix(any.TypeUrl, name) { + return false + } + return len(any.TypeUrl) == len(name) || any.TypeUrl[len(any.TypeUrl)-len(name)-1] == '/' +} + +// DynamicAny is a value that can be passed to UnmarshalAny to automatically +// allocate a proto.Message for the type specified in an anypb.Any message. +// The allocated message is stored in the embedded proto.Message. +// +// Example: +// var x ptypes.DynamicAny +// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... } +// fmt.Printf("unmarshaled message: %v", x.Message) +// +// Deprecated: Use the any.UnmarshalNew method instead to unmarshal +// the any message contents into a new instance of the underlying message. +type DynamicAny struct{ proto.Message } + +func (m DynamicAny) String() string { + if m.Message == nil { + return "" + } + return m.Message.String() +} +func (m DynamicAny) Reset() { + if m.Message == nil { + return + } + m.Message.Reset() +} +func (m DynamicAny) ProtoMessage() { + return +} +func (m DynamicAny) ProtoReflect() protoreflect.Message { + if m.Message == nil { + return nil + } + return dynamicAny{proto.MessageReflect(m.Message)} +} + +type dynamicAny struct{ protoreflect.Message } + +func (m dynamicAny) Type() protoreflect.MessageType { + return dynamicAnyType{m.Message.Type()} +} +func (m dynamicAny) New() protoreflect.Message { + return dynamicAnyType{m.Message.Type()}.New() +} +func (m dynamicAny) Interface() protoreflect.ProtoMessage { + return DynamicAny{proto.MessageV1(m.Message.Interface())} +} + +type dynamicAnyType struct{ protoreflect.MessageType } + +func (t dynamicAnyType) New() protoreflect.Message { + return dynamicAny{t.MessageType.New()} +} +func (t dynamicAnyType) Zero() protoreflect.Message { + return dynamicAny{t.MessageType.Zero()} +} diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go new file mode 100644 index 00000000..0ef27d33 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go @@ -0,0 +1,62 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: github.com/golang/protobuf/ptypes/any/any.proto + +package any + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + anypb "google.golang.org/protobuf/types/known/anypb" + reflect "reflect" +) + +// Symbols defined in public import of google/protobuf/any.proto. + +type Any = anypb.Any + +var File_github_com_golang_protobuf_ptypes_any_any_proto protoreflect.FileDescriptor + +var file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc = []byte{ + 0x0a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x61, 0x6e, 0x79, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x2b, 0x5a, 0x29, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, + 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2f, 0x61, 0x6e, 0x79, 0x3b, 0x61, 0x6e, 0x79, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_github_com_golang_protobuf_ptypes_any_any_proto_init() } +func file_github_com_golang_protobuf_ptypes_any_any_proto_init() { + if File_github_com_golang_protobuf_ptypes_any_any_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_any_any_proto = out.File + file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs = nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/doc.go b/vendor/github.com/golang/protobuf/ptypes/doc.go new file mode 100644 index 00000000..d3c33259 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/doc.go @@ -0,0 +1,10 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ptypes provides functionality for interacting with well-known types. +// +// Deprecated: Well-known types have specialized functionality directly +// injected into the generated packages for each message type. +// See the deprecation notice for each function for the suggested alternative. +package ptypes diff --git a/vendor/github.com/golang/protobuf/ptypes/duration.go b/vendor/github.com/golang/protobuf/ptypes/duration.go new file mode 100644 index 00000000..b2b55dd8 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration.go @@ -0,0 +1,76 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ptypes + +import ( + "errors" + "fmt" + "time" + + durationpb "github.com/golang/protobuf/ptypes/duration" +) + +// Range of google.protobuf.Duration as specified in duration.proto. +// This is about 10,000 years in seconds. +const ( + maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60) + minSeconds = -maxSeconds +) + +// Duration converts a durationpb.Duration to a time.Duration. +// Duration returns an error if dur is invalid or overflows a time.Duration. +// +// Deprecated: Call the dur.AsDuration and dur.CheckValid methods instead. +func Duration(dur *durationpb.Duration) (time.Duration, error) { + if err := validateDuration(dur); err != nil { + return 0, err + } + d := time.Duration(dur.Seconds) * time.Second + if int64(d/time.Second) != dur.Seconds { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", dur) + } + if dur.Nanos != 0 { + d += time.Duration(dur.Nanos) * time.Nanosecond + if (d < 0) != (dur.Nanos < 0) { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", dur) + } + } + return d, nil +} + +// DurationProto converts a time.Duration to a durationpb.Duration. +// +// Deprecated: Call the durationpb.New function instead. +func DurationProto(d time.Duration) *durationpb.Duration { + nanos := d.Nanoseconds() + secs := nanos / 1e9 + nanos -= secs * 1e9 + return &durationpb.Duration{ + Seconds: int64(secs), + Nanos: int32(nanos), + } +} + +// validateDuration determines whether the durationpb.Duration is valid +// according to the definition in google/protobuf/duration.proto. +// A valid durpb.Duration may still be too large to fit into a time.Duration +// Note that the range of durationpb.Duration is about 10,000 years, +// while the range of time.Duration is about 290 years. +func validateDuration(dur *durationpb.Duration) error { + if dur == nil { + return errors.New("duration: nil Duration") + } + if dur.Seconds < minSeconds || dur.Seconds > maxSeconds { + return fmt.Errorf("duration: %v: seconds out of range", dur) + } + if dur.Nanos <= -1e9 || dur.Nanos >= 1e9 { + return fmt.Errorf("duration: %v: nanos out of range", dur) + } + // Seconds and Nanos must have the same sign, unless d.Nanos is zero. + if (dur.Seconds < 0 && dur.Nanos > 0) || (dur.Seconds > 0 && dur.Nanos < 0) { + return fmt.Errorf("duration: %v: seconds and nanos have different signs", dur) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go new file mode 100644 index 00000000..d0079ee3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go @@ -0,0 +1,63 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: github.com/golang/protobuf/ptypes/duration/duration.proto + +package duration + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + durationpb "google.golang.org/protobuf/types/known/durationpb" + reflect "reflect" +) + +// Symbols defined in public import of google/protobuf/duration.proto. + +type Duration = durationpb.Duration + +var File_github_com_golang_protobuf_ptypes_duration_duration_proto protoreflect.FileDescriptor + +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc = []byte{ + 0x0a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x64, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x35, 0x5a, 0x33, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3b, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_github_com_golang_protobuf_ptypes_duration_duration_proto_init() } +func file_github_com_golang_protobuf_ptypes_duration_duration_proto_init() { + if File_github_com_golang_protobuf_ptypes_duration_duration_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_duration_duration_proto = out.File + file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs = nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go new file mode 100644 index 00000000..8368a3f7 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -0,0 +1,112 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ptypes + +import ( + "errors" + "fmt" + "time" + + timestamppb "github.com/golang/protobuf/ptypes/timestamp" +) + +// Range of google.protobuf.Duration as specified in timestamp.proto. +const ( + // Seconds field of the earliest valid Timestamp. + // This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). + minValidSeconds = -62135596800 + // Seconds field just after the latest valid Timestamp. + // This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). + maxValidSeconds = 253402300800 +) + +// Timestamp converts a timestamppb.Timestamp to a time.Time. +// It returns an error if the argument is invalid. +// +// Unlike most Go functions, if Timestamp returns an error, the first return +// value is not the zero time.Time. Instead, it is the value obtained from the +// time.Unix function when passed the contents of the Timestamp, in the UTC +// locale. This may or may not be a meaningful time; many invalid Timestamps +// do map to valid time.Times. +// +// A nil Timestamp returns an error. The first return value in that case is +// undefined. +// +// Deprecated: Call the ts.AsTime and ts.CheckValid methods instead. +func Timestamp(ts *timestamppb.Timestamp) (time.Time, error) { + // Don't return the zero value on error, because corresponds to a valid + // timestamp. Instead return whatever time.Unix gives us. + var t time.Time + if ts == nil { + t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp + } else { + t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC() + } + return t, validateTimestamp(ts) +} + +// TimestampNow returns a google.protobuf.Timestamp for the current time. +// +// Deprecated: Call the timestamppb.Now function instead. +func TimestampNow() *timestamppb.Timestamp { + ts, err := TimestampProto(time.Now()) + if err != nil { + panic("ptypes: time.Now() out of Timestamp range") + } + return ts +} + +// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. +// It returns an error if the resulting Timestamp is invalid. +// +// Deprecated: Call the timestamppb.New function instead. +func TimestampProto(t time.Time) (*timestamppb.Timestamp, error) { + ts := ×tamppb.Timestamp{ + Seconds: t.Unix(), + Nanos: int32(t.Nanosecond()), + } + if err := validateTimestamp(ts); err != nil { + return nil, err + } + return ts, nil +} + +// TimestampString returns the RFC 3339 string for valid Timestamps. +// For invalid Timestamps, it returns an error message in parentheses. +// +// Deprecated: Call the ts.AsTime method instead, +// followed by a call to the Format method on the time.Time value. +func TimestampString(ts *timestamppb.Timestamp) string { + t, err := Timestamp(ts) + if err != nil { + return fmt.Sprintf("(%v)", err) + } + return t.Format(time.RFC3339Nano) +} + +// validateTimestamp determines whether a Timestamp is valid. +// A valid timestamp represents a time in the range [0001-01-01, 10000-01-01) +// and has a Nanos field in the range [0, 1e9). +// +// If the Timestamp is valid, validateTimestamp returns nil. +// Otherwise, it returns an error that describes the problem. +// +// Every valid Timestamp can be represented by a time.Time, +// but the converse is not true. +func validateTimestamp(ts *timestamppb.Timestamp) error { + if ts == nil { + return errors.New("timestamp: nil Timestamp") + } + if ts.Seconds < minValidSeconds { + return fmt.Errorf("timestamp: %v before 0001-01-01", ts) + } + if ts.Seconds >= maxValidSeconds { + return fmt.Errorf("timestamp: %v after 10000-01-01", ts) + } + if ts.Nanos < 0 || ts.Nanos >= 1e9 { + return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go new file mode 100644 index 00000000..a76f8076 --- /dev/null +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go @@ -0,0 +1,64 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: github.com/golang/protobuf/ptypes/timestamp/timestamp.proto + +package timestamp + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" +) + +// Symbols defined in public import of google/protobuf/timestamp.proto. + +type Timestamp = timestamppb.Timestamp + +var File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto protoreflect.FileDescriptor + +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc = []byte{ + 0x0a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x37, + 0x5a, 0x35, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x3b, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, +} + +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_init() } +func file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_init() { + if File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto = out.File + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs = nil +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go b/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go new file mode 100644 index 00000000..9285f29f --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/basic_auth.go @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "encoding/base64" + "net/http" + "strconv" + "strings" + + "github.com/labstack/echo/v4" +) + +// BasicAuthConfig defines the config for BasicAuth middleware. +type BasicAuthConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Validator is a function to validate BasicAuth credentials. + // Required. + Validator BasicAuthValidator + + // Realm is a string to define realm attribute of BasicAuth. + // Default value "Restricted". + Realm string +} + +// BasicAuthValidator defines a function to validate BasicAuth credentials. +// The function should return a boolean indicating whether the credentials are valid, +// and an error if any error occurs during the validation process. +type BasicAuthValidator func(string, string, echo.Context) (bool, error) + +const ( + basic = "basic" + defaultRealm = "Restricted" +) + +// DefaultBasicAuthConfig is the default BasicAuth middleware config. +var DefaultBasicAuthConfig = BasicAuthConfig{ + Skipper: DefaultSkipper, + Realm: defaultRealm, +} + +// BasicAuth returns an BasicAuth middleware. +// +// For valid credentials it calls the next handler. +// For missing or invalid credentials, it sends "401 - Unauthorized" response. +func BasicAuth(fn BasicAuthValidator) echo.MiddlewareFunc { + c := DefaultBasicAuthConfig + c.Validator = fn + return BasicAuthWithConfig(c) +} + +// BasicAuthWithConfig returns an BasicAuth middleware with config. +// See `BasicAuth()`. +func BasicAuthWithConfig(config BasicAuthConfig) echo.MiddlewareFunc { + // Defaults + if config.Validator == nil { + panic("echo: basic-auth middleware requires a validator function") + } + if config.Skipper == nil { + config.Skipper = DefaultBasicAuthConfig.Skipper + } + if config.Realm == "" { + config.Realm = defaultRealm + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + auth := c.Request().Header.Get(echo.HeaderAuthorization) + l := len(basic) + + if len(auth) > l+1 && strings.EqualFold(auth[:l], basic) { + // Invalid base64 shouldn't be treated as error + // instead should be treated as invalid client input + b, err := base64.StdEncoding.DecodeString(auth[l+1:]) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest).SetInternal(err) + } + + cred := string(b) + for i := 0; i < len(cred); i++ { + if cred[i] == ':' { + // Verify credentials + valid, err := config.Validator(cred[:i], cred[i+1:], c) + if err != nil { + return err + } else if valid { + return next(c) + } + break + } + } + } + + realm := defaultRealm + if config.Realm != defaultRealm { + realm = strconv.Quote(config.Realm) + } + + // Need to return `401` for browsers to pop-up login box. + c.Response().Header().Set(echo.HeaderWWWAuthenticate, basic+" realm="+realm) + return echo.ErrUnauthorized + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/body_dump.go b/vendor/github.com/labstack/echo/v4/middleware/body_dump.go new file mode 100644 index 00000000..e4119ec1 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/body_dump.go @@ -0,0 +1,113 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "bufio" + "bytes" + "errors" + "io" + "net" + "net/http" + + "github.com/labstack/echo/v4" +) + +// BodyDumpConfig defines the config for BodyDump middleware. +type BodyDumpConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Handler receives request and response payload. + // Required. + Handler BodyDumpHandler +} + +// BodyDumpHandler receives the request and response payload. +type BodyDumpHandler func(echo.Context, []byte, []byte) + +type bodyDumpResponseWriter struct { + io.Writer + http.ResponseWriter +} + +// DefaultBodyDumpConfig is the default BodyDump middleware config. +var DefaultBodyDumpConfig = BodyDumpConfig{ + Skipper: DefaultSkipper, +} + +// BodyDump returns a BodyDump middleware. +// +// BodyDump middleware captures the request and response payload and calls the +// registered handler. +func BodyDump(handler BodyDumpHandler) echo.MiddlewareFunc { + c := DefaultBodyDumpConfig + c.Handler = handler + return BodyDumpWithConfig(c) +} + +// BodyDumpWithConfig returns a BodyDump middleware with config. +// See: `BodyDump()`. +func BodyDumpWithConfig(config BodyDumpConfig) echo.MiddlewareFunc { + // Defaults + if config.Handler == nil { + panic("echo: body-dump middleware requires a handler function") + } + if config.Skipper == nil { + config.Skipper = DefaultBodyDumpConfig.Skipper + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) (err error) { + if config.Skipper(c) { + return next(c) + } + + // Request + reqBody := []byte{} + if c.Request().Body != nil { // Read + reqBody, _ = io.ReadAll(c.Request().Body) + } + c.Request().Body = io.NopCloser(bytes.NewBuffer(reqBody)) // Reset + + // Response + resBody := new(bytes.Buffer) + mw := io.MultiWriter(c.Response().Writer, resBody) + writer := &bodyDumpResponseWriter{Writer: mw, ResponseWriter: c.Response().Writer} + c.Response().Writer = writer + + if err = next(c); err != nil { + c.Error(err) + } + + // Callback + config.Handler(c, reqBody, resBody.Bytes()) + + return + } + } +} + +func (w *bodyDumpResponseWriter) WriteHeader(code int) { + w.ResponseWriter.WriteHeader(code) +} + +func (w *bodyDumpResponseWriter) Write(b []byte) (int, error) { + return w.Writer.Write(b) +} + +func (w *bodyDumpResponseWriter) Flush() { + err := http.NewResponseController(w.ResponseWriter).Flush() + if err != nil && errors.Is(err, http.ErrNotSupported) { + panic(errors.New("response writer flushing is not supported")) + } +} + +func (w *bodyDumpResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { + return http.NewResponseController(w.ResponseWriter).Hijack() +} + +func (w *bodyDumpResponseWriter) Unwrap() http.ResponseWriter { + return w.ResponseWriter +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/body_limit.go b/vendor/github.com/labstack/echo/v4/middleware/body_limit.go new file mode 100644 index 00000000..7d3c665f --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/body_limit.go @@ -0,0 +1,114 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "fmt" + "io" + "sync" + + "github.com/labstack/echo/v4" + "github.com/labstack/gommon/bytes" +) + +// BodyLimitConfig defines the config for BodyLimit middleware. +type BodyLimitConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Maximum allowed size for a request body, it can be specified + // as `4x` or `4xB`, where x is one of the multiple from K, M, G, T or P. + Limit string `yaml:"limit"` + limit int64 +} + +type limitedReader struct { + BodyLimitConfig + reader io.ReadCloser + read int64 +} + +// DefaultBodyLimitConfig is the default BodyLimit middleware config. +var DefaultBodyLimitConfig = BodyLimitConfig{ + Skipper: DefaultSkipper, +} + +// BodyLimit returns a BodyLimit middleware. +// +// BodyLimit middleware sets the maximum allowed size for a request body, if the +// size exceeds the configured limit, it sends "413 - Request Entity Too Large" +// response. The BodyLimit is determined based on both `Content-Length` request +// header and actual content read, which makes it super secure. +// Limit can be specified as `4x` or `4xB`, where x is one of the multiple from K, M, +// G, T or P. +func BodyLimit(limit string) echo.MiddlewareFunc { + c := DefaultBodyLimitConfig + c.Limit = limit + return BodyLimitWithConfig(c) +} + +// BodyLimitWithConfig returns a BodyLimit middleware with config. +// See: `BodyLimit()`. +func BodyLimitWithConfig(config BodyLimitConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultBodyLimitConfig.Skipper + } + + limit, err := bytes.Parse(config.Limit) + if err != nil { + panic(fmt.Errorf("echo: invalid body-limit=%s", config.Limit)) + } + config.limit = limit + pool := limitedReaderPool(config) + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + + // Based on content length + if req.ContentLength > config.limit { + return echo.ErrStatusRequestEntityTooLarge + } + + // Based on content read + r := pool.Get().(*limitedReader) + r.Reset(req.Body) + defer pool.Put(r) + req.Body = r + + return next(c) + } + } +} + +func (r *limitedReader) Read(b []byte) (n int, err error) { + n, err = r.reader.Read(b) + r.read += int64(n) + if r.read > r.limit { + return n, echo.ErrStatusRequestEntityTooLarge + } + return +} + +func (r *limitedReader) Close() error { + return r.reader.Close() +} + +func (r *limitedReader) Reset(reader io.ReadCloser) { + r.reader = reader + r.read = 0 +} + +func limitedReaderPool(c BodyLimitConfig) sync.Pool { + return sync.Pool{ + New: func() interface{} { + return &limitedReader{BodyLimitConfig: c} + }, + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/compress.go b/vendor/github.com/labstack/echo/v4/middleware/compress.go new file mode 100644 index 00000000..012b76b0 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/compress.go @@ -0,0 +1,230 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "bufio" + "bytes" + "compress/gzip" + "io" + "net" + "net/http" + "strings" + "sync" + + "github.com/labstack/echo/v4" +) + +// GzipConfig defines the config for Gzip middleware. +type GzipConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Gzip compression level. + // Optional. Default value -1. + Level int `yaml:"level"` + + // Length threshold before gzip compression is applied. + // Optional. Default value 0. + // + // Most of the time you will not need to change the default. Compressing + // a short response might increase the transmitted data because of the + // gzip format overhead. Compressing the response will also consume CPU + // and time on the server and the client (for decompressing). Depending on + // your use case such a threshold might be useful. + // + // See also: + // https://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits + MinLength int +} + +type gzipResponseWriter struct { + io.Writer + http.ResponseWriter + wroteHeader bool + wroteBody bool + minLength int + minLengthExceeded bool + buffer *bytes.Buffer + code int +} + +const ( + gzipScheme = "gzip" +) + +// DefaultGzipConfig is the default Gzip middleware config. +var DefaultGzipConfig = GzipConfig{ + Skipper: DefaultSkipper, + Level: -1, + MinLength: 0, +} + +// Gzip returns a middleware which compresses HTTP response using gzip compression +// scheme. +func Gzip() echo.MiddlewareFunc { + return GzipWithConfig(DefaultGzipConfig) +} + +// GzipWithConfig return Gzip middleware with config. +// See: `Gzip()`. +func GzipWithConfig(config GzipConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultGzipConfig.Skipper + } + if config.Level == 0 { + config.Level = DefaultGzipConfig.Level + } + if config.MinLength < 0 { + config.MinLength = DefaultGzipConfig.MinLength + } + + pool := gzipCompressPool(config) + bpool := bufferPool() + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + res := c.Response() + res.Header().Add(echo.HeaderVary, echo.HeaderAcceptEncoding) + if strings.Contains(c.Request().Header.Get(echo.HeaderAcceptEncoding), gzipScheme) { + i := pool.Get() + w, ok := i.(*gzip.Writer) + if !ok { + return echo.NewHTTPError(http.StatusInternalServerError, i.(error).Error()) + } + rw := res.Writer + w.Reset(rw) + + buf := bpool.Get().(*bytes.Buffer) + buf.Reset() + + grw := &gzipResponseWriter{Writer: w, ResponseWriter: rw, minLength: config.MinLength, buffer: buf} + defer func() { + // There are different reasons for cases when we have not yet written response to the client and now need to do so. + // a) handler response had only response code and no response body (ala 404 or redirects etc). Response code need to be written now. + // b) body is shorter than our minimum length threshold and being buffered currently and needs to be written + if !grw.wroteBody { + if res.Header().Get(echo.HeaderContentEncoding) == gzipScheme { + res.Header().Del(echo.HeaderContentEncoding) + } + if grw.wroteHeader { + rw.WriteHeader(grw.code) + } + // We have to reset response to it's pristine state when + // nothing is written to body or error is returned. + // See issue #424, #407. + res.Writer = rw + w.Reset(io.Discard) + } else if !grw.minLengthExceeded { + // Write uncompressed response + res.Writer = rw + if grw.wroteHeader { + grw.ResponseWriter.WriteHeader(grw.code) + } + grw.buffer.WriteTo(rw) + w.Reset(io.Discard) + } + w.Close() + bpool.Put(buf) + pool.Put(w) + }() + res.Writer = grw + } + return next(c) + } + } +} + +func (w *gzipResponseWriter) WriteHeader(code int) { + w.Header().Del(echo.HeaderContentLength) // Issue #444 + + w.wroteHeader = true + + // Delay writing of the header until we know if we'll actually compress the response + w.code = code +} + +func (w *gzipResponseWriter) Write(b []byte) (int, error) { + if w.Header().Get(echo.HeaderContentType) == "" { + w.Header().Set(echo.HeaderContentType, http.DetectContentType(b)) + } + w.wroteBody = true + + if !w.minLengthExceeded { + n, err := w.buffer.Write(b) + + if w.buffer.Len() >= w.minLength { + w.minLengthExceeded = true + + // The minimum length is exceeded, add Content-Encoding header and write the header + w.Header().Set(echo.HeaderContentEncoding, gzipScheme) // Issue #806 + if w.wroteHeader { + w.ResponseWriter.WriteHeader(w.code) + } + + return w.Writer.Write(w.buffer.Bytes()) + } + + return n, err + } + + return w.Writer.Write(b) +} + +func (w *gzipResponseWriter) Flush() { + if !w.minLengthExceeded { + // Enforce compression because we will not know how much more data will come + w.minLengthExceeded = true + w.Header().Set(echo.HeaderContentEncoding, gzipScheme) // Issue #806 + if w.wroteHeader { + w.ResponseWriter.WriteHeader(w.code) + } + + w.Writer.Write(w.buffer.Bytes()) + } + + w.Writer.(*gzip.Writer).Flush() + _ = http.NewResponseController(w.ResponseWriter).Flush() +} + +func (w *gzipResponseWriter) Unwrap() http.ResponseWriter { + return w.ResponseWriter +} + +func (w *gzipResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { + return http.NewResponseController(w.ResponseWriter).Hijack() +} + +func (w *gzipResponseWriter) Push(target string, opts *http.PushOptions) error { + if p, ok := w.ResponseWriter.(http.Pusher); ok { + return p.Push(target, opts) + } + return http.ErrNotSupported +} + +func gzipCompressPool(config GzipConfig) sync.Pool { + return sync.Pool{ + New: func() interface{} { + w, err := gzip.NewWriterLevel(io.Discard, config.Level) + if err != nil { + return err + } + return w + }, + } +} + +func bufferPool() sync.Pool { + return sync.Pool{ + New: func() interface{} { + b := &bytes.Buffer{} + return b + }, + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go b/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go new file mode 100644 index 00000000..e67173f2 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/context_timeout.go @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "context" + "errors" + "time" + + "github.com/labstack/echo/v4" +) + +// ContextTimeoutConfig defines the config for ContextTimeout middleware. +type ContextTimeoutConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // ErrorHandler is a function when error aries in middleware execution. + ErrorHandler func(err error, c echo.Context) error + + // Timeout configures a timeout for the middleware, defaults to 0 for no timeout + Timeout time.Duration +} + +// ContextTimeout returns a middleware which returns error (503 Service Unavailable error) to client +// when underlying method returns context.DeadlineExceeded error. +func ContextTimeout(timeout time.Duration) echo.MiddlewareFunc { + return ContextTimeoutWithConfig(ContextTimeoutConfig{Timeout: timeout}) +} + +// ContextTimeoutWithConfig returns a Timeout middleware with config. +func ContextTimeoutWithConfig(config ContextTimeoutConfig) echo.MiddlewareFunc { + mw, err := config.ToMiddleware() + if err != nil { + panic(err) + } + return mw +} + +// ToMiddleware converts Config to middleware. +func (config ContextTimeoutConfig) ToMiddleware() (echo.MiddlewareFunc, error) { + if config.Timeout == 0 { + return nil, errors.New("timeout must be set") + } + if config.Skipper == nil { + config.Skipper = DefaultSkipper + } + if config.ErrorHandler == nil { + config.ErrorHandler = func(err error, c echo.Context) error { + if err != nil && errors.Is(err, context.DeadlineExceeded) { + return echo.ErrServiceUnavailable.WithInternal(err) + } + return err + } + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + timeoutContext, cancel := context.WithTimeout(c.Request().Context(), config.Timeout) + defer cancel() + + c.SetRequest(c.Request().WithContext(timeoutContext)) + + if err := next(c); err != nil { + return config.ErrorHandler(err, c) + } + return nil + } + }, nil +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/cors.go b/vendor/github.com/labstack/echo/v4/middleware/cors.go new file mode 100644 index 00000000..a1f44532 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/cors.go @@ -0,0 +1,307 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "net/http" + "regexp" + "strconv" + "strings" + + "github.com/labstack/echo/v4" +) + +// CORSConfig defines the config for CORS middleware. +type CORSConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // AllowOrigins determines the value of the Access-Control-Allow-Origin + // response header. This header defines a list of origins that may access the + // resource. The wildcard characters '*' and '?' are supported and are + // converted to regex fragments '.*' and '.' accordingly. + // + // Security: use extreme caution when handling the origin, and carefully + // validate any logic. Remember that attackers may register hostile domain names. + // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // Optional. Default value []string{"*"}. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin + AllowOrigins []string `yaml:"allow_origins"` + + // AllowOriginFunc is a custom function to validate the origin. It takes the + // origin as an argument and returns true if allowed or false otherwise. If + // an error is returned, it is returned by the handler. If this option is + // set, AllowOrigins is ignored. + // + // Security: use extreme caution when handling the origin, and carefully + // validate any logic. Remember that attackers may register hostile domain names. + // See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // Optional. + AllowOriginFunc func(origin string) (bool, error) `yaml:"-"` + + // AllowMethods determines the value of the Access-Control-Allow-Methods + // response header. This header specified the list of methods allowed when + // accessing the resource. This is used in response to a preflight request. + // + // Optional. Default value DefaultCORSConfig.AllowMethods. + // If `allowMethods` is left empty, this middleware will fill for preflight + // request `Access-Control-Allow-Methods` header value + // from `Allow` header that echo.Router set into context. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Methods + AllowMethods []string `yaml:"allow_methods"` + + // AllowHeaders determines the value of the Access-Control-Allow-Headers + // response header. This header is used in response to a preflight request to + // indicate which HTTP headers can be used when making the actual request. + // + // Optional. Default value []string{}. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers + AllowHeaders []string `yaml:"allow_headers"` + + // AllowCredentials determines the value of the + // Access-Control-Allow-Credentials response header. This header indicates + // whether or not the response to the request can be exposed when the + // credentials mode (Request.credentials) is true. When used as part of a + // response to a preflight request, this indicates whether or not the actual + // request can be made using credentials. See also + // [MDN: Access-Control-Allow-Credentials]. + // + // Optional. Default value false, in which case the header is not set. + // + // Security: avoid using `AllowCredentials = true` with `AllowOrigins = *`. + // See "Exploiting CORS misconfigurations for Bitcoins and bounties", + // https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials + AllowCredentials bool `yaml:"allow_credentials"` + + // UnsafeWildcardOriginWithAllowCredentials UNSAFE/INSECURE: allows wildcard '*' origin to be used with AllowCredentials + // flag. In that case we consider any origin allowed and send it back to the client with `Access-Control-Allow-Origin` header. + // + // This is INSECURE and potentially leads to [cross-origin](https://portswigger.net/research/exploiting-cors-misconfigurations-for-bitcoins-and-bounties) + // attacks. See: https://github.com/labstack/echo/issues/2400 for discussion on the subject. + // + // Optional. Default value is false. + UnsafeWildcardOriginWithAllowCredentials bool `yaml:"unsafe_wildcard_origin_with_allow_credentials"` + + // ExposeHeaders determines the value of Access-Control-Expose-Headers, which + // defines a list of headers that clients are allowed to access. + // + // Optional. Default value []string{}, in which case the header is not set. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Header + ExposeHeaders []string `yaml:"expose_headers"` + + // MaxAge determines the value of the Access-Control-Max-Age response header. + // This header indicates how long (in seconds) the results of a preflight + // request can be cached. + // The header is set only if MaxAge != 0, negative value sends "0" which instructs browsers not to cache that response. + // + // Optional. Default value 0 - meaning header is not sent. + // + // See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age + MaxAge int `yaml:"max_age"` +} + +// DefaultCORSConfig is the default CORS middleware config. +var DefaultCORSConfig = CORSConfig{ + Skipper: DefaultSkipper, + AllowOrigins: []string{"*"}, + AllowMethods: []string{http.MethodGet, http.MethodHead, http.MethodPut, http.MethodPatch, http.MethodPost, http.MethodDelete}, +} + +// CORS returns a Cross-Origin Resource Sharing (CORS) middleware. +// See also [MDN: Cross-Origin Resource Sharing (CORS)]. +// +// Security: Poorly configured CORS can compromise security because it allows +// relaxation of the browser's Same-Origin policy. See [Exploiting CORS +// misconfigurations for Bitcoins and bounties] and [Portswigger: Cross-origin +// resource sharing (CORS)] for more details. +// +// [MDN: Cross-Origin Resource Sharing (CORS)]: https://developer.mozilla.org/en/docs/Web/HTTP/Access_control_CORS +// [Exploiting CORS misconfigurations for Bitcoins and bounties]: https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html +// [Portswigger: Cross-origin resource sharing (CORS)]: https://portswigger.net/web-security/cors +func CORS() echo.MiddlewareFunc { + return CORSWithConfig(DefaultCORSConfig) +} + +// CORSWithConfig returns a CORS middleware with config. +// See: [CORS]. +func CORSWithConfig(config CORSConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultCORSConfig.Skipper + } + if len(config.AllowOrigins) == 0 { + config.AllowOrigins = DefaultCORSConfig.AllowOrigins + } + hasCustomAllowMethods := true + if len(config.AllowMethods) == 0 { + hasCustomAllowMethods = false + config.AllowMethods = DefaultCORSConfig.AllowMethods + } + + allowOriginPatterns := make([]*regexp.Regexp, 0, len(config.AllowOrigins)) + for _, origin := range config.AllowOrigins { + if origin == "*" { + continue // "*" is handled differently and does not need regexp + } + pattern := regexp.QuoteMeta(origin) + pattern = strings.ReplaceAll(pattern, "\\*", ".*") + pattern = strings.ReplaceAll(pattern, "\\?", ".") + pattern = "^" + pattern + "$" + + re, err := regexp.Compile(pattern) + if err != nil { + // this is to preserve previous behaviour - invalid patterns were just ignored. + // If we would turn this to panic, users with invalid patterns + // would have applications crashing in production due unrecovered panic. + // TODO: this should be turned to error/panic in `v5` + continue + } + allowOriginPatterns = append(allowOriginPatterns, re) + } + + allowMethods := strings.Join(config.AllowMethods, ",") + allowHeaders := strings.Join(config.AllowHeaders, ",") + exposeHeaders := strings.Join(config.ExposeHeaders, ",") + + maxAge := "0" + if config.MaxAge > 0 { + maxAge = strconv.Itoa(config.MaxAge) + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + res := c.Response() + origin := req.Header.Get(echo.HeaderOrigin) + allowOrigin := "" + + res.Header().Add(echo.HeaderVary, echo.HeaderOrigin) + + // Preflight request is an OPTIONS request, using three HTTP request headers: Access-Control-Request-Method, + // Access-Control-Request-Headers, and the Origin header. See: https://developer.mozilla.org/en-US/docs/Glossary/Preflight_request + // For simplicity we just consider method type and later `Origin` header. + preflight := req.Method == http.MethodOptions + + // Although router adds special handler in case of OPTIONS method we avoid calling next for OPTIONS in this middleware + // as CORS requests do not have cookies / authentication headers by default, so we could get stuck in auth + // middlewares by calling next(c). + // But we still want to send `Allow` header as response in case of Non-CORS OPTIONS request as router default + // handler does. + routerAllowMethods := "" + if preflight { + tmpAllowMethods, ok := c.Get(echo.ContextKeyHeaderAllow).(string) + if ok && tmpAllowMethods != "" { + routerAllowMethods = tmpAllowMethods + c.Response().Header().Set(echo.HeaderAllow, routerAllowMethods) + } + } + + // No Origin provided. This is (probably) not request from actual browser - proceed executing middleware chain + if origin == "" { + if !preflight { + return next(c) + } + return c.NoContent(http.StatusNoContent) + } + + if config.AllowOriginFunc != nil { + allowed, err := config.AllowOriginFunc(origin) + if err != nil { + return err + } + if allowed { + allowOrigin = origin + } + } else { + // Check allowed origins + for _, o := range config.AllowOrigins { + if o == "*" && config.AllowCredentials && config.UnsafeWildcardOriginWithAllowCredentials { + allowOrigin = origin + break + } + if o == "*" || o == origin { + allowOrigin = o + break + } + if matchSubdomain(origin, o) { + allowOrigin = origin + break + } + } + + checkPatterns := false + if allowOrigin == "" { + // to avoid regex cost by invalid (long) domains (253 is domain name max limit) + if len(origin) <= (253+3+5) && strings.Contains(origin, "://") { + checkPatterns = true + } + } + if checkPatterns { + for _, re := range allowOriginPatterns { + if match := re.MatchString(origin); match { + allowOrigin = origin + break + } + } + } + } + + // Origin not allowed + if allowOrigin == "" { + if !preflight { + return next(c) + } + return c.NoContent(http.StatusNoContent) + } + + res.Header().Set(echo.HeaderAccessControlAllowOrigin, allowOrigin) + if config.AllowCredentials { + res.Header().Set(echo.HeaderAccessControlAllowCredentials, "true") + } + + // Simple request + if !preflight { + if exposeHeaders != "" { + res.Header().Set(echo.HeaderAccessControlExposeHeaders, exposeHeaders) + } + return next(c) + } + + // Preflight request + res.Header().Add(echo.HeaderVary, echo.HeaderAccessControlRequestMethod) + res.Header().Add(echo.HeaderVary, echo.HeaderAccessControlRequestHeaders) + + if !hasCustomAllowMethods && routerAllowMethods != "" { + res.Header().Set(echo.HeaderAccessControlAllowMethods, routerAllowMethods) + } else { + res.Header().Set(echo.HeaderAccessControlAllowMethods, allowMethods) + } + + if allowHeaders != "" { + res.Header().Set(echo.HeaderAccessControlAllowHeaders, allowHeaders) + } else { + h := req.Header.Get(echo.HeaderAccessControlRequestHeaders) + if h != "" { + res.Header().Set(echo.HeaderAccessControlAllowHeaders, h) + } + } + if config.MaxAge != 0 { + res.Header().Set(echo.HeaderAccessControlMaxAge, maxAge) + } + return c.NoContent(http.StatusNoContent) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/csrf.go b/vendor/github.com/labstack/echo/v4/middleware/csrf.go new file mode 100644 index 00000000..92f4019d --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/csrf.go @@ -0,0 +1,218 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "crypto/subtle" + "net/http" + "time" + + "github.com/labstack/echo/v4" +) + +// CSRFConfig defines the config for CSRF middleware. +type CSRFConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // TokenLength is the length of the generated token. + TokenLength uint8 `yaml:"token_length"` + // Optional. Default value 32. + + // TokenLookup is a string in the form of ":" or ":,:" that is used + // to extract token from the request. + // Optional. Default value "header:X-CSRF-Token". + // Possible values: + // - "header:" or "header::" + // - "query:" + // - "form:" + // Multiple sources example: + // - "header:X-CSRF-Token,query:csrf" + TokenLookup string `yaml:"token_lookup"` + + // Context key to store generated CSRF token into context. + // Optional. Default value "csrf". + ContextKey string `yaml:"context_key"` + + // Name of the CSRF cookie. This cookie will store CSRF token. + // Optional. Default value "csrf". + CookieName string `yaml:"cookie_name"` + + // Domain of the CSRF cookie. + // Optional. Default value none. + CookieDomain string `yaml:"cookie_domain"` + + // Path of the CSRF cookie. + // Optional. Default value none. + CookiePath string `yaml:"cookie_path"` + + // Max age (in seconds) of the CSRF cookie. + // Optional. Default value 86400 (24hr). + CookieMaxAge int `yaml:"cookie_max_age"` + + // Indicates if CSRF cookie is secure. + // Optional. Default value false. + CookieSecure bool `yaml:"cookie_secure"` + + // Indicates if CSRF cookie is HTTP only. + // Optional. Default value false. + CookieHTTPOnly bool `yaml:"cookie_http_only"` + + // Indicates SameSite mode of the CSRF cookie. + // Optional. Default value SameSiteDefaultMode. + CookieSameSite http.SameSite `yaml:"cookie_same_site"` + + // ErrorHandler defines a function which is executed for returning custom errors. + ErrorHandler CSRFErrorHandler +} + +// CSRFErrorHandler is a function which is executed for creating custom errors. +type CSRFErrorHandler func(err error, c echo.Context) error + +// ErrCSRFInvalid is returned when CSRF check fails +var ErrCSRFInvalid = echo.NewHTTPError(http.StatusForbidden, "invalid csrf token") + +// DefaultCSRFConfig is the default CSRF middleware config. +var DefaultCSRFConfig = CSRFConfig{ + Skipper: DefaultSkipper, + TokenLength: 32, + TokenLookup: "header:" + echo.HeaderXCSRFToken, + ContextKey: "csrf", + CookieName: "_csrf", + CookieMaxAge: 86400, + CookieSameSite: http.SameSiteDefaultMode, +} + +// CSRF returns a Cross-Site Request Forgery (CSRF) middleware. +// See: https://en.wikipedia.org/wiki/Cross-site_request_forgery +func CSRF() echo.MiddlewareFunc { + c := DefaultCSRFConfig + return CSRFWithConfig(c) +} + +// CSRFWithConfig returns a CSRF middleware with config. +// See `CSRF()`. +func CSRFWithConfig(config CSRFConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultCSRFConfig.Skipper + } + if config.TokenLength == 0 { + config.TokenLength = DefaultCSRFConfig.TokenLength + } + + if config.TokenLookup == "" { + config.TokenLookup = DefaultCSRFConfig.TokenLookup + } + if config.ContextKey == "" { + config.ContextKey = DefaultCSRFConfig.ContextKey + } + if config.CookieName == "" { + config.CookieName = DefaultCSRFConfig.CookieName + } + if config.CookieMaxAge == 0 { + config.CookieMaxAge = DefaultCSRFConfig.CookieMaxAge + } + if config.CookieSameSite == http.SameSiteNoneMode { + config.CookieSecure = true + } + + extractors, cErr := CreateExtractors(config.TokenLookup) + if cErr != nil { + panic(cErr) + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + token := "" + if k, err := c.Cookie(config.CookieName); err != nil { + token = randomString(config.TokenLength) + } else { + token = k.Value // Reuse token + } + + switch c.Request().Method { + case http.MethodGet, http.MethodHead, http.MethodOptions, http.MethodTrace: + default: + // Validate token only for requests which are not defined as 'safe' by RFC7231 + var lastExtractorErr error + var lastTokenErr error + outer: + for _, extractor := range extractors { + clientTokens, err := extractor(c) + if err != nil { + lastExtractorErr = err + continue + } + + for _, clientToken := range clientTokens { + if validateCSRFToken(token, clientToken) { + lastTokenErr = nil + lastExtractorErr = nil + break outer + } + lastTokenErr = ErrCSRFInvalid + } + } + var finalErr error + if lastTokenErr != nil { + finalErr = lastTokenErr + } else if lastExtractorErr != nil { + // ugly part to preserve backwards compatible errors. someone could rely on them + if lastExtractorErr == errQueryExtractorValueMissing { + lastExtractorErr = echo.NewHTTPError(http.StatusBadRequest, "missing csrf token in the query string") + } else if lastExtractorErr == errFormExtractorValueMissing { + lastExtractorErr = echo.NewHTTPError(http.StatusBadRequest, "missing csrf token in the form parameter") + } else if lastExtractorErr == errHeaderExtractorValueMissing { + lastExtractorErr = echo.NewHTTPError(http.StatusBadRequest, "missing csrf token in request header") + } else { + lastExtractorErr = echo.NewHTTPError(http.StatusBadRequest, lastExtractorErr.Error()) + } + finalErr = lastExtractorErr + } + + if finalErr != nil { + if config.ErrorHandler != nil { + return config.ErrorHandler(finalErr, c) + } + return finalErr + } + } + + // Set CSRF cookie + cookie := new(http.Cookie) + cookie.Name = config.CookieName + cookie.Value = token + if config.CookiePath != "" { + cookie.Path = config.CookiePath + } + if config.CookieDomain != "" { + cookie.Domain = config.CookieDomain + } + if config.CookieSameSite != http.SameSiteDefaultMode { + cookie.SameSite = config.CookieSameSite + } + cookie.Expires = time.Now().Add(time.Duration(config.CookieMaxAge) * time.Second) + cookie.Secure = config.CookieSecure + cookie.HttpOnly = config.CookieHTTPOnly + c.SetCookie(cookie) + + // Store token in the context + c.Set(config.ContextKey, token) + + // Protect clients from caching the response + c.Response().Header().Add(echo.HeaderVary, echo.HeaderCookie) + + return next(c) + } + } +} + +func validateCSRFToken(token, clientToken string) bool { + return subtle.ConstantTimeCompare([]byte(token), []byte(clientToken)) == 1 +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/decompress.go b/vendor/github.com/labstack/echo/v4/middleware/decompress.go new file mode 100644 index 00000000..0c56176e --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/decompress.go @@ -0,0 +1,98 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "compress/gzip" + "io" + "net/http" + "sync" + + "github.com/labstack/echo/v4" +) + +// DecompressConfig defines the config for Decompress middleware. +type DecompressConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // GzipDecompressPool defines an interface to provide the sync.Pool used to create/store Gzip readers + GzipDecompressPool Decompressor +} + +// GZIPEncoding content-encoding header if set to "gzip", decompress body contents. +const GZIPEncoding string = "gzip" + +// Decompressor is used to get the sync.Pool used by the middleware to get Gzip readers +type Decompressor interface { + gzipDecompressPool() sync.Pool +} + +// DefaultDecompressConfig defines the config for decompress middleware +var DefaultDecompressConfig = DecompressConfig{ + Skipper: DefaultSkipper, + GzipDecompressPool: &DefaultGzipDecompressPool{}, +} + +// DefaultGzipDecompressPool is the default implementation of Decompressor interface +type DefaultGzipDecompressPool struct { +} + +func (d *DefaultGzipDecompressPool) gzipDecompressPool() sync.Pool { + return sync.Pool{New: func() interface{} { return new(gzip.Reader) }} +} + +// Decompress decompresses request body based if content encoding type is set to "gzip" with default config +func Decompress() echo.MiddlewareFunc { + return DecompressWithConfig(DefaultDecompressConfig) +} + +// DecompressWithConfig decompresses request body based if content encoding type is set to "gzip" with config +func DecompressWithConfig(config DecompressConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultGzipConfig.Skipper + } + if config.GzipDecompressPool == nil { + config.GzipDecompressPool = DefaultDecompressConfig.GzipDecompressPool + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + pool := config.GzipDecompressPool.gzipDecompressPool() + + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + if c.Request().Header.Get(echo.HeaderContentEncoding) != GZIPEncoding { + return next(c) + } + + i := pool.Get() + gr, ok := i.(*gzip.Reader) + if !ok || gr == nil { + return echo.NewHTTPError(http.StatusInternalServerError, i.(error).Error()) + } + defer pool.Put(gr) + + b := c.Request().Body + defer b.Close() + + if err := gr.Reset(b); err != nil { + if err == io.EOF { //ignore if body is empty + return next(c) + } + return err + } + + // only Close gzip reader if it was set to a proper gzip source otherwise it will panic on close. + defer gr.Close() + + c.Request().Body = gr + + return next(c) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/extractor.go b/vendor/github.com/labstack/echo/v4/middleware/extractor.go new file mode 100644 index 00000000..3f274140 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/extractor.go @@ -0,0 +1,207 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "errors" + "fmt" + "github.com/labstack/echo/v4" + "net/textproto" + "strings" +) + +const ( + // extractorLimit is arbitrary number to limit values extractor can return. this limits possible resource exhaustion + // attack vector + extractorLimit = 20 +) + +var errHeaderExtractorValueMissing = errors.New("missing value in request header") +var errHeaderExtractorValueInvalid = errors.New("invalid value in request header") +var errQueryExtractorValueMissing = errors.New("missing value in the query string") +var errParamExtractorValueMissing = errors.New("missing value in path params") +var errCookieExtractorValueMissing = errors.New("missing value in cookies") +var errFormExtractorValueMissing = errors.New("missing value in the form") + +// ValuesExtractor defines a function for extracting values (keys/tokens) from the given context. +type ValuesExtractor func(c echo.Context) ([]string, error) + +// CreateExtractors creates ValuesExtractors from given lookups. +// Lookups is a string in the form of ":" or ":,:" that is used +// to extract key from the request. +// Possible values: +// - "header:" or "header::" +// `` is argument value to cut/trim prefix of the extracted value. This is useful if header +// value has static prefix like `Authorization: ` where part that we +// want to cut is ` ` note the space at the end. +// In case of basic authentication `Authorization: Basic ` prefix we want to remove is `Basic `. +// - "query:" +// - "param:" +// - "form:" +// - "cookie:" +// +// Multiple sources example: +// - "header:Authorization,header:X-Api-Key" +func CreateExtractors(lookups string) ([]ValuesExtractor, error) { + return createExtractors(lookups, "") +} + +func createExtractors(lookups string, authScheme string) ([]ValuesExtractor, error) { + if lookups == "" { + return nil, nil + } + sources := strings.Split(lookups, ",") + var extractors = make([]ValuesExtractor, 0) + for _, source := range sources { + parts := strings.Split(source, ":") + if len(parts) < 2 { + return nil, fmt.Errorf("extractor source for lookup could not be split into needed parts: %v", source) + } + + switch parts[0] { + case "query": + extractors = append(extractors, valuesFromQuery(parts[1])) + case "param": + extractors = append(extractors, valuesFromParam(parts[1])) + case "cookie": + extractors = append(extractors, valuesFromCookie(parts[1])) + case "form": + extractors = append(extractors, valuesFromForm(parts[1])) + case "header": + prefix := "" + if len(parts) > 2 { + prefix = parts[2] + } else if authScheme != "" && parts[1] == echo.HeaderAuthorization { + // backwards compatibility for JWT and KeyAuth: + // * we only apply this fix to Authorization as header we use and uses prefixes like "Bearer " etc + // * previously header extractor assumed that auth-scheme/prefix had a space as suffix we need to retain that + // behaviour for default values and Authorization header. + prefix = authScheme + if !strings.HasSuffix(prefix, " ") { + prefix += " " + } + } + extractors = append(extractors, valuesFromHeader(parts[1], prefix)) + } + } + return extractors, nil +} + +// valuesFromHeader returns a functions that extracts values from the request header. +// valuePrefix is parameter to remove first part (prefix) of the extracted value. This is useful if header value has static +// prefix like `Authorization: ` where part that we want to remove is ` ` +// note the space at the end. In case of basic authentication `Authorization: Basic ` prefix we want to remove +// is `Basic `. In case of JWT tokens `Authorization: Bearer ` prefix is `Bearer `. +// If prefix is left empty the whole value is returned. +func valuesFromHeader(header string, valuePrefix string) ValuesExtractor { + prefixLen := len(valuePrefix) + // standard library parses http.Request header keys in canonical form but we may provide something else so fix this + header = textproto.CanonicalMIMEHeaderKey(header) + return func(c echo.Context) ([]string, error) { + values := c.Request().Header.Values(header) + if len(values) == 0 { + return nil, errHeaderExtractorValueMissing + } + + result := make([]string, 0) + for i, value := range values { + if prefixLen == 0 { + result = append(result, value) + if i >= extractorLimit-1 { + break + } + continue + } + if len(value) > prefixLen && strings.EqualFold(value[:prefixLen], valuePrefix) { + result = append(result, value[prefixLen:]) + if i >= extractorLimit-1 { + break + } + } + } + + if len(result) == 0 { + if prefixLen > 0 { + return nil, errHeaderExtractorValueInvalid + } + return nil, errHeaderExtractorValueMissing + } + return result, nil + } +} + +// valuesFromQuery returns a function that extracts values from the query string. +func valuesFromQuery(param string) ValuesExtractor { + return func(c echo.Context) ([]string, error) { + result := c.QueryParams()[param] + if len(result) == 0 { + return nil, errQueryExtractorValueMissing + } else if len(result) > extractorLimit-1 { + result = result[:extractorLimit] + } + return result, nil + } +} + +// valuesFromParam returns a function that extracts values from the url param string. +func valuesFromParam(param string) ValuesExtractor { + return func(c echo.Context) ([]string, error) { + result := make([]string, 0) + paramVales := c.ParamValues() + for i, p := range c.ParamNames() { + if param == p { + result = append(result, paramVales[i]) + if i >= extractorLimit-1 { + break + } + } + } + if len(result) == 0 { + return nil, errParamExtractorValueMissing + } + return result, nil + } +} + +// valuesFromCookie returns a function that extracts values from the named cookie. +func valuesFromCookie(name string) ValuesExtractor { + return func(c echo.Context) ([]string, error) { + cookies := c.Cookies() + if len(cookies) == 0 { + return nil, errCookieExtractorValueMissing + } + + result := make([]string, 0) + for i, cookie := range cookies { + if name == cookie.Name { + result = append(result, cookie.Value) + if i >= extractorLimit-1 { + break + } + } + } + if len(result) == 0 { + return nil, errCookieExtractorValueMissing + } + return result, nil + } +} + +// valuesFromForm returns a function that extracts values from the form field. +func valuesFromForm(name string) ValuesExtractor { + return func(c echo.Context) ([]string, error) { + if c.Request().Form == nil { + _ = c.Request().ParseMultipartForm(32 << 20) // same what `c.Request().FormValue(name)` does + } + values := c.Request().Form[name] + if len(values) == 0 { + return nil, errFormExtractorValueMissing + } + if len(values) > extractorLimit-1 { + values = values[:extractorLimit] + } + result := append([]string{}, values...) + return result, nil + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/key_auth.go b/vendor/github.com/labstack/echo/v4/middleware/key_auth.go new file mode 100644 index 00000000..79bee207 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/key_auth.go @@ -0,0 +1,179 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "errors" + "github.com/labstack/echo/v4" + "net/http" +) + +// KeyAuthConfig defines the config for KeyAuth middleware. +type KeyAuthConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // KeyLookup is a string in the form of ":" or ":,:" that is used + // to extract key from the request. + // Optional. Default value "header:Authorization". + // Possible values: + // - "header:" or "header::" + // `` is argument value to cut/trim prefix of the extracted value. This is useful if header + // value has static prefix like `Authorization: ` where part that we + // want to cut is ` ` note the space at the end. + // In case of basic authentication `Authorization: Basic ` prefix we want to remove is `Basic `. + // - "query:" + // - "form:" + // - "cookie:" + // Multiple sources example: + // - "header:Authorization,header:X-Api-Key" + KeyLookup string + + // AuthScheme to be used in the Authorization header. + // Optional. Default value "Bearer". + AuthScheme string + + // Validator is a function to validate key. + // Required. + Validator KeyAuthValidator + + // ErrorHandler defines a function which is executed for an invalid key. + // It may be used to define a custom error. + ErrorHandler KeyAuthErrorHandler + + // ContinueOnIgnoredError allows the next middleware/handler to be called when ErrorHandler decides to + // ignore the error (by returning `nil`). + // This is useful when parts of your site/api allow public access and some authorized routes provide extra functionality. + // In that case you can use ErrorHandler to set a default public key auth value in the request context + // and continue. Some logic down the remaining execution chain needs to check that (public) key auth value then. + ContinueOnIgnoredError bool +} + +// KeyAuthValidator defines a function to validate KeyAuth credentials. +type KeyAuthValidator func(auth string, c echo.Context) (bool, error) + +// KeyAuthErrorHandler defines a function which is executed for an invalid key. +type KeyAuthErrorHandler func(err error, c echo.Context) error + +// ErrKeyAuthMissing is error type when KeyAuth middleware is unable to extract value from lookups +type ErrKeyAuthMissing struct { + Err error +} + +// DefaultKeyAuthConfig is the default KeyAuth middleware config. +var DefaultKeyAuthConfig = KeyAuthConfig{ + Skipper: DefaultSkipper, + KeyLookup: "header:" + echo.HeaderAuthorization, + AuthScheme: "Bearer", +} + +// Error returns errors text +func (e *ErrKeyAuthMissing) Error() string { + return e.Err.Error() +} + +// Unwrap unwraps error +func (e *ErrKeyAuthMissing) Unwrap() error { + return e.Err +} + +// KeyAuth returns an KeyAuth middleware. +// +// For valid key it calls the next handler. +// For invalid key, it sends "401 - Unauthorized" response. +// For missing key, it sends "400 - Bad Request" response. +func KeyAuth(fn KeyAuthValidator) echo.MiddlewareFunc { + c := DefaultKeyAuthConfig + c.Validator = fn + return KeyAuthWithConfig(c) +} + +// KeyAuthWithConfig returns an KeyAuth middleware with config. +// See `KeyAuth()`. +func KeyAuthWithConfig(config KeyAuthConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultKeyAuthConfig.Skipper + } + // Defaults + if config.AuthScheme == "" { + config.AuthScheme = DefaultKeyAuthConfig.AuthScheme + } + if config.KeyLookup == "" { + config.KeyLookup = DefaultKeyAuthConfig.KeyLookup + } + if config.Validator == nil { + panic("echo: key-auth middleware requires a validator function") + } + + extractors, cErr := createExtractors(config.KeyLookup, config.AuthScheme) + if cErr != nil { + panic(cErr) + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + var lastExtractorErr error + var lastValidatorErr error + for _, extractor := range extractors { + keys, err := extractor(c) + if err != nil { + lastExtractorErr = err + continue + } + for _, key := range keys { + valid, err := config.Validator(key, c) + if err != nil { + lastValidatorErr = err + continue + } + if valid { + return next(c) + } + lastValidatorErr = errors.New("invalid key") + } + } + + // we are here only when we did not successfully extract and validate any of keys + err := lastValidatorErr + if err == nil { // prioritize validator errors over extracting errors + // ugly part to preserve backwards compatible errors. someone could rely on them + if lastExtractorErr == errQueryExtractorValueMissing { + err = errors.New("missing key in the query string") + } else if lastExtractorErr == errCookieExtractorValueMissing { + err = errors.New("missing key in cookies") + } else if lastExtractorErr == errFormExtractorValueMissing { + err = errors.New("missing key in the form") + } else if lastExtractorErr == errHeaderExtractorValueMissing { + err = errors.New("missing key in request header") + } else if lastExtractorErr == errHeaderExtractorValueInvalid { + err = errors.New("invalid key in the request header") + } else { + err = lastExtractorErr + } + err = &ErrKeyAuthMissing{Err: err} + } + + if config.ErrorHandler != nil { + tmpErr := config.ErrorHandler(err, c) + if config.ContinueOnIgnoredError && tmpErr == nil { + return next(c) + } + return tmpErr + } + if lastValidatorErr != nil { // prioritize validator errors over extracting errors + return &echo.HTTPError{ + Code: http.StatusUnauthorized, + Message: "Unauthorized", + Internal: lastValidatorErr, + } + } + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/logger.go b/vendor/github.com/labstack/echo/v4/middleware/logger.go new file mode 100644 index 00000000..910fce8c --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/logger.go @@ -0,0 +1,244 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "bytes" + "encoding/json" + "io" + "strconv" + "strings" + "sync" + "time" + + "github.com/labstack/echo/v4" + "github.com/labstack/gommon/color" + "github.com/valyala/fasttemplate" +) + +// LoggerConfig defines the config for Logger middleware. +type LoggerConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Tags to construct the logger format. + // + // - time_unix + // - time_unix_milli + // - time_unix_micro + // - time_unix_nano + // - time_rfc3339 + // - time_rfc3339_nano + // - time_custom + // - id (Request ID) + // - remote_ip + // - uri + // - host + // - method + // - path + // - route + // - protocol + // - referer + // - user_agent + // - status + // - error + // - latency (In nanoseconds) + // - latency_human (Human readable) + // - bytes_in (Bytes received) + // - bytes_out (Bytes sent) + // - header: + // - query: + // - form: + // - custom (see CustomTagFunc field) + // + // Example "${remote_ip} ${status}" + // + // Optional. Default value DefaultLoggerConfig.Format. + Format string `yaml:"format"` + + // Optional. Default value DefaultLoggerConfig.CustomTimeFormat. + CustomTimeFormat string `yaml:"custom_time_format"` + + // CustomTagFunc is function called for `${custom}` tag to output user implemented text by writing it to buf. + // Make sure that outputted text creates valid JSON string with other logged tags. + // Optional. + CustomTagFunc func(c echo.Context, buf *bytes.Buffer) (int, error) + + // Output is a writer where logs in JSON format are written. + // Optional. Default value os.Stdout. + Output io.Writer + + template *fasttemplate.Template + colorer *color.Color + pool *sync.Pool +} + +// DefaultLoggerConfig is the default Logger middleware config. +var DefaultLoggerConfig = LoggerConfig{ + Skipper: DefaultSkipper, + Format: `{"time":"${time_rfc3339_nano}","id":"${id}","remote_ip":"${remote_ip}",` + + `"host":"${host}","method":"${method}","uri":"${uri}","user_agent":"${user_agent}",` + + `"status":${status},"error":"${error}","latency":${latency},"latency_human":"${latency_human}"` + + `,"bytes_in":${bytes_in},"bytes_out":${bytes_out}}` + "\n", + CustomTimeFormat: "2006-01-02 15:04:05.00000", + colorer: color.New(), +} + +// Logger returns a middleware that logs HTTP requests. +func Logger() echo.MiddlewareFunc { + return LoggerWithConfig(DefaultLoggerConfig) +} + +// LoggerWithConfig returns a Logger middleware with config. +// See: `Logger()`. +func LoggerWithConfig(config LoggerConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultLoggerConfig.Skipper + } + if config.Format == "" { + config.Format = DefaultLoggerConfig.Format + } + if config.Output == nil { + config.Output = DefaultLoggerConfig.Output + } + + config.template = fasttemplate.New(config.Format, "${", "}") + config.colorer = color.New() + config.colorer.SetOutput(config.Output) + config.pool = &sync.Pool{ + New: func() interface{} { + return bytes.NewBuffer(make([]byte, 256)) + }, + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) (err error) { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + res := c.Response() + start := time.Now() + if err = next(c); err != nil { + c.Error(err) + } + stop := time.Now() + buf := config.pool.Get().(*bytes.Buffer) + buf.Reset() + defer config.pool.Put(buf) + + if _, err = config.template.ExecuteFunc(buf, func(w io.Writer, tag string) (int, error) { + switch tag { + case "custom": + if config.CustomTagFunc == nil { + return 0, nil + } + return config.CustomTagFunc(c, buf) + case "time_unix": + return buf.WriteString(strconv.FormatInt(time.Now().Unix(), 10)) + case "time_unix_milli": + // go 1.17 or later, it supports time#UnixMilli() + return buf.WriteString(strconv.FormatInt(time.Now().UnixNano()/1000000, 10)) + case "time_unix_micro": + // go 1.17 or later, it supports time#UnixMicro() + return buf.WriteString(strconv.FormatInt(time.Now().UnixNano()/1000, 10)) + case "time_unix_nano": + return buf.WriteString(strconv.FormatInt(time.Now().UnixNano(), 10)) + case "time_rfc3339": + return buf.WriteString(time.Now().Format(time.RFC3339)) + case "time_rfc3339_nano": + return buf.WriteString(time.Now().Format(time.RFC3339Nano)) + case "time_custom": + return buf.WriteString(time.Now().Format(config.CustomTimeFormat)) + case "id": + id := req.Header.Get(echo.HeaderXRequestID) + if id == "" { + id = res.Header().Get(echo.HeaderXRequestID) + } + return buf.WriteString(id) + case "remote_ip": + return buf.WriteString(c.RealIP()) + case "host": + return buf.WriteString(req.Host) + case "uri": + return buf.WriteString(req.RequestURI) + case "method": + return buf.WriteString(req.Method) + case "path": + p := req.URL.Path + if p == "" { + p = "/" + } + return buf.WriteString(p) + case "route": + return buf.WriteString(c.Path()) + case "protocol": + return buf.WriteString(req.Proto) + case "referer": + return buf.WriteString(req.Referer()) + case "user_agent": + return buf.WriteString(req.UserAgent()) + case "status": + n := res.Status + s := config.colorer.Green(n) + switch { + case n >= 500: + s = config.colorer.Red(n) + case n >= 400: + s = config.colorer.Yellow(n) + case n >= 300: + s = config.colorer.Cyan(n) + } + return buf.WriteString(s) + case "error": + if err != nil { + // Error may contain invalid JSON e.g. `"` + b, _ := json.Marshal(err.Error()) + b = b[1 : len(b)-1] + return buf.Write(b) + } + case "latency": + l := stop.Sub(start) + return buf.WriteString(strconv.FormatInt(int64(l), 10)) + case "latency_human": + return buf.WriteString(stop.Sub(start).String()) + case "bytes_in": + cl := req.Header.Get(echo.HeaderContentLength) + if cl == "" { + cl = "0" + } + return buf.WriteString(cl) + case "bytes_out": + return buf.WriteString(strconv.FormatInt(res.Size, 10)) + default: + switch { + case strings.HasPrefix(tag, "header:"): + return buf.Write([]byte(c.Request().Header.Get(tag[7:]))) + case strings.HasPrefix(tag, "query:"): + return buf.Write([]byte(c.QueryParam(tag[6:]))) + case strings.HasPrefix(tag, "form:"): + return buf.Write([]byte(c.FormValue(tag[5:]))) + case strings.HasPrefix(tag, "cookie:"): + cookie, err := c.Cookie(tag[7:]) + if err == nil { + return buf.Write([]byte(cookie.Value)) + } + } + } + return 0, nil + }); err != nil { + return + } + + if config.Output == nil { + _, err = c.Logger().Output().Write(buf.Bytes()) + return + } + _, err = config.Output.Write(buf.Bytes()) + return + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/method_override.go b/vendor/github.com/labstack/echo/v4/middleware/method_override.go new file mode 100644 index 00000000..3991e102 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/method_override.go @@ -0,0 +1,91 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "net/http" + + "github.com/labstack/echo/v4" +) + +// MethodOverrideConfig defines the config for MethodOverride middleware. +type MethodOverrideConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Getter is a function that gets overridden method from the request. + // Optional. Default values MethodFromHeader(echo.HeaderXHTTPMethodOverride). + Getter MethodOverrideGetter +} + +// MethodOverrideGetter is a function that gets overridden method from the request +type MethodOverrideGetter func(echo.Context) string + +// DefaultMethodOverrideConfig is the default MethodOverride middleware config. +var DefaultMethodOverrideConfig = MethodOverrideConfig{ + Skipper: DefaultSkipper, + Getter: MethodFromHeader(echo.HeaderXHTTPMethodOverride), +} + +// MethodOverride returns a MethodOverride middleware. +// MethodOverride middleware checks for the overridden method from the request and +// uses it instead of the original method. +// +// For security reasons, only `POST` method can be overridden. +func MethodOverride() echo.MiddlewareFunc { + return MethodOverrideWithConfig(DefaultMethodOverrideConfig) +} + +// MethodOverrideWithConfig returns a MethodOverride middleware with config. +// See: `MethodOverride()`. +func MethodOverrideWithConfig(config MethodOverrideConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultMethodOverrideConfig.Skipper + } + if config.Getter == nil { + config.Getter = DefaultMethodOverrideConfig.Getter + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + if req.Method == http.MethodPost { + m := config.Getter(c) + if m != "" { + req.Method = m + } + } + return next(c) + } + } +} + +// MethodFromHeader is a `MethodOverrideGetter` that gets overridden method from +// the request header. +func MethodFromHeader(header string) MethodOverrideGetter { + return func(c echo.Context) string { + return c.Request().Header.Get(header) + } +} + +// MethodFromForm is a `MethodOverrideGetter` that gets overridden method from the +// form parameter. +func MethodFromForm(param string) MethodOverrideGetter { + return func(c echo.Context) string { + return c.FormValue(param) + } +} + +// MethodFromQuery is a `MethodOverrideGetter` that gets overridden method from +// the query parameter. +func MethodFromQuery(param string) MethodOverrideGetter { + return func(c echo.Context) string { + return c.QueryParam(param) + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/middleware.go b/vendor/github.com/labstack/echo/v4/middleware/middleware.go new file mode 100644 index 00000000..6f33cc5c --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/middleware.go @@ -0,0 +1,90 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "net/http" + "regexp" + "strconv" + "strings" + + "github.com/labstack/echo/v4" +) + +// Skipper defines a function to skip middleware. Returning true skips processing +// the middleware. +type Skipper func(c echo.Context) bool + +// BeforeFunc defines a function which is executed just before the middleware. +type BeforeFunc func(c echo.Context) + +func captureTokens(pattern *regexp.Regexp, input string) *strings.Replacer { + groups := pattern.FindAllStringSubmatch(input, -1) + if groups == nil { + return nil + } + values := groups[0][1:] + replace := make([]string, 2*len(values)) + for i, v := range values { + j := 2 * i + replace[j] = "$" + strconv.Itoa(i+1) + replace[j+1] = v + } + return strings.NewReplacer(replace...) +} + +func rewriteRulesRegex(rewrite map[string]string) map[*regexp.Regexp]string { + // Initialize + rulesRegex := map[*regexp.Regexp]string{} + for k, v := range rewrite { + k = regexp.QuoteMeta(k) + k = strings.ReplaceAll(k, `\*`, "(.*?)") + if strings.HasPrefix(k, `\^`) { + k = strings.ReplaceAll(k, `\^`, "^") + } + k = k + "$" + rulesRegex[regexp.MustCompile(k)] = v + } + return rulesRegex +} + +func rewriteURL(rewriteRegex map[*regexp.Regexp]string, req *http.Request) error { + if len(rewriteRegex) == 0 { + return nil + } + + // Depending on how HTTP request is sent RequestURI could contain Scheme://Host/path or be just /path. + // We only want to use path part for rewriting and therefore trim prefix if it exists + rawURI := req.RequestURI + if rawURI != "" && rawURI[0] != '/' { + prefix := "" + if req.URL.Scheme != "" { + prefix = req.URL.Scheme + "://" + } + if req.URL.Host != "" { + prefix += req.URL.Host // host or host:port + } + if prefix != "" { + rawURI = strings.TrimPrefix(rawURI, prefix) + } + } + + for k, v := range rewriteRegex { + if replacer := captureTokens(k, rawURI); replacer != nil { + url, err := req.URL.Parse(replacer.Replace(v)) + if err != nil { + return err + } + req.URL = url + + return nil // rewrite only once + } + } + return nil +} + +// DefaultSkipper returns false which processes the middleware. +func DefaultSkipper(echo.Context) bool { + return false +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/proxy.go b/vendor/github.com/labstack/echo/v4/middleware/proxy.go new file mode 100644 index 00000000..2744bc4a --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/proxy.go @@ -0,0 +1,434 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "context" + "crypto/tls" + "fmt" + "io" + "math/rand" + "net" + "net/http" + "net/http/httputil" + "net/url" + "regexp" + "strings" + "sync" + "time" + + "github.com/labstack/echo/v4" +) + +// TODO: Handle TLS proxy + +// ProxyConfig defines the config for Proxy middleware. +type ProxyConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Balancer defines a load balancing technique. + // Required. + Balancer ProxyBalancer + + // RetryCount defines the number of times a failed proxied request should be retried + // using the next available ProxyTarget. Defaults to 0, meaning requests are never retried. + RetryCount int + + // RetryFilter defines a function used to determine if a failed request to a + // ProxyTarget should be retried. The RetryFilter will only be called when the number + // of previous retries is less than RetryCount. If the function returns true, the + // request will be retried. The provided error indicates the reason for the request + // failure. When the ProxyTarget is unavailable, the error will be an instance of + // echo.HTTPError with a Code of http.StatusBadGateway. In all other cases, the error + // will indicate an internal error in the Proxy middleware. When a RetryFilter is not + // specified, all requests that fail with http.StatusBadGateway will be retried. A custom + // RetryFilter can be provided to only retry specific requests. Note that RetryFilter is + // only called when the request to the target fails, or an internal error in the Proxy + // middleware has occurred. Successful requests that return a non-200 response code cannot + // be retried. + RetryFilter func(c echo.Context, e error) bool + + // ErrorHandler defines a function which can be used to return custom errors from + // the Proxy middleware. ErrorHandler is only invoked when there has been + // either an internal error in the Proxy middleware or the ProxyTarget is + // unavailable. Due to the way requests are proxied, ErrorHandler is not invoked + // when a ProxyTarget returns a non-200 response. In these cases, the response + // is already written so errors cannot be modified. ErrorHandler is only + // invoked after all retry attempts have been exhausted. + ErrorHandler func(c echo.Context, err error) error + + // Rewrite defines URL path rewrite rules. The values captured in asterisk can be + // retrieved by index e.g. $1, $2 and so on. + // Examples: + // "/old": "/new", + // "/api/*": "/$1", + // "/js/*": "/public/javascripts/$1", + // "/users/*/orders/*": "/user/$1/order/$2", + Rewrite map[string]string + + // RegexRewrite defines rewrite rules using regexp.Rexexp with captures + // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. + // Example: + // "^/old/[0.9]+/": "/new", + // "^/api/.+?/(.*)": "/v2/$1", + RegexRewrite map[*regexp.Regexp]string + + // Context key to store selected ProxyTarget into context. + // Optional. Default value "target". + ContextKey string + + // To customize the transport to remote. + // Examples: If custom TLS certificates are required. + Transport http.RoundTripper + + // ModifyResponse defines function to modify response from ProxyTarget. + ModifyResponse func(*http.Response) error +} + +// ProxyTarget defines the upstream target. +type ProxyTarget struct { + Name string + URL *url.URL + Meta echo.Map +} + +// ProxyBalancer defines an interface to implement a load balancing technique. +type ProxyBalancer interface { + AddTarget(*ProxyTarget) bool + RemoveTarget(string) bool + Next(echo.Context) *ProxyTarget +} + +// TargetProvider defines an interface that gives the opportunity for balancer +// to return custom errors when selecting target. +type TargetProvider interface { + NextTarget(echo.Context) (*ProxyTarget, error) +} + +type commonBalancer struct { + targets []*ProxyTarget + mutex sync.Mutex +} + +// RandomBalancer implements a random load balancing technique. +type randomBalancer struct { + commonBalancer + random *rand.Rand +} + +// RoundRobinBalancer implements a round-robin load balancing technique. +type roundRobinBalancer struct { + commonBalancer + // tracking the index on `targets` slice for the next `*ProxyTarget` to be used + i int +} + +// DefaultProxyConfig is the default Proxy middleware config. +var DefaultProxyConfig = ProxyConfig{ + Skipper: DefaultSkipper, + ContextKey: "target", +} + +func proxyRaw(t *ProxyTarget, c echo.Context, config ProxyConfig) http.Handler { + var dialFunc func(ctx context.Context, network, addr string) (net.Conn, error) + if transport, ok := config.Transport.(*http.Transport); ok { + if transport.TLSClientConfig != nil { + d := tls.Dialer{ + Config: transport.TLSClientConfig, + } + dialFunc = d.DialContext + } + } + if dialFunc == nil { + var d net.Dialer + dialFunc = d.DialContext + } + + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + in, _, err := c.Response().Hijack() + if err != nil { + c.Set("_error", fmt.Errorf("proxy raw, hijack error=%w, url=%s", err, t.URL)) + return + } + defer in.Close() + out, err := dialFunc(c.Request().Context(), "tcp", t.URL.Host) + if err != nil { + c.Set("_error", echo.NewHTTPError(http.StatusBadGateway, fmt.Sprintf("proxy raw, dial error=%v, url=%s", err, t.URL))) + return + } + + // Write header + err = r.Write(out) + if err != nil { + c.Set("_error", echo.NewHTTPError(http.StatusBadGateway, fmt.Sprintf("proxy raw, request header copy error=%v, url=%s", err, t.URL))) + return + } + + errCh := make(chan error, 2) + cp := func(dst io.Writer, src io.Reader) { + _, err = io.Copy(dst, src) + errCh <- err + } + + go cp(out, in) + go cp(in, out) + err = <-errCh + if err != nil && err != io.EOF { + c.Set("_error", fmt.Errorf("proxy raw, copy body error=%w, url=%s", err, t.URL)) + } + }) +} + +// NewRandomBalancer returns a random proxy balancer. +func NewRandomBalancer(targets []*ProxyTarget) ProxyBalancer { + b := randomBalancer{} + b.targets = targets + b.random = rand.New(rand.NewSource(int64(time.Now().Nanosecond()))) + return &b +} + +// NewRoundRobinBalancer returns a round-robin proxy balancer. +func NewRoundRobinBalancer(targets []*ProxyTarget) ProxyBalancer { + b := roundRobinBalancer{} + b.targets = targets + return &b +} + +// AddTarget adds an upstream target to the list and returns `true`. +// +// However, if a target with the same name already exists then the operation is aborted returning `false`. +func (b *commonBalancer) AddTarget(target *ProxyTarget) bool { + b.mutex.Lock() + defer b.mutex.Unlock() + for _, t := range b.targets { + if t.Name == target.Name { + return false + } + } + b.targets = append(b.targets, target) + return true +} + +// RemoveTarget removes an upstream target from the list by name. +// +// Returns `true` on success, `false` if no target with the name is found. +func (b *commonBalancer) RemoveTarget(name string) bool { + b.mutex.Lock() + defer b.mutex.Unlock() + for i, t := range b.targets { + if t.Name == name { + b.targets = append(b.targets[:i], b.targets[i+1:]...) + return true + } + } + return false +} + +// Next randomly returns an upstream target. +// +// Note: `nil` is returned in case upstream target list is empty. +func (b *randomBalancer) Next(c echo.Context) *ProxyTarget { + b.mutex.Lock() + defer b.mutex.Unlock() + if len(b.targets) == 0 { + return nil + } else if len(b.targets) == 1 { + return b.targets[0] + } + return b.targets[b.random.Intn(len(b.targets))] +} + +// Next returns an upstream target using round-robin technique. In the case +// where a previously failed request is being retried, the round-robin +// balancer will attempt to use the next target relative to the original +// request. If the list of targets held by the balancer is modified while a +// failed request is being retried, it is possible that the balancer will +// return the original failed target. +// +// Note: `nil` is returned in case upstream target list is empty. +func (b *roundRobinBalancer) Next(c echo.Context) *ProxyTarget { + b.mutex.Lock() + defer b.mutex.Unlock() + if len(b.targets) == 0 { + return nil + } else if len(b.targets) == 1 { + return b.targets[0] + } + + var i int + const lastIdxKey = "_round_robin_last_index" + // This request is a retry, start from the index of the previous + // target to ensure we don't attempt to retry the request with + // the same failed target + if c.Get(lastIdxKey) != nil { + i = c.Get(lastIdxKey).(int) + i++ + if i >= len(b.targets) { + i = 0 + } + } else { + // This is a first time request, use the global index + if b.i >= len(b.targets) { + b.i = 0 + } + i = b.i + b.i++ + } + + c.Set(lastIdxKey, i) + return b.targets[i] +} + +// Proxy returns a Proxy middleware. +// +// Proxy middleware forwards the request to upstream server using a configured load balancing technique. +func Proxy(balancer ProxyBalancer) echo.MiddlewareFunc { + c := DefaultProxyConfig + c.Balancer = balancer + return ProxyWithConfig(c) +} + +// ProxyWithConfig returns a Proxy middleware with config. +// See: `Proxy()` +func ProxyWithConfig(config ProxyConfig) echo.MiddlewareFunc { + if config.Balancer == nil { + panic("echo: proxy middleware requires balancer") + } + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultProxyConfig.Skipper + } + if config.RetryFilter == nil { + config.RetryFilter = func(c echo.Context, e error) bool { + if httpErr, ok := e.(*echo.HTTPError); ok { + return httpErr.Code == http.StatusBadGateway + } + return false + } + } + if config.ErrorHandler == nil { + config.ErrorHandler = func(c echo.Context, err error) error { + return err + } + } + if config.Rewrite != nil { + if config.RegexRewrite == nil { + config.RegexRewrite = make(map[*regexp.Regexp]string) + } + for k, v := range rewriteRulesRegex(config.Rewrite) { + config.RegexRewrite[k] = v + } + } + + provider, isTargetProvider := config.Balancer.(TargetProvider) + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + res := c.Response() + if err := rewriteURL(config.RegexRewrite, req); err != nil { + return config.ErrorHandler(c, err) + } + + // Fix header + // Basically it's not good practice to unconditionally pass incoming x-real-ip header to upstream. + // However, for backward compatibility, legacy behavior is preserved unless you configure Echo#IPExtractor. + if req.Header.Get(echo.HeaderXRealIP) == "" || c.Echo().IPExtractor != nil { + req.Header.Set(echo.HeaderXRealIP, c.RealIP()) + } + if req.Header.Get(echo.HeaderXForwardedProto) == "" { + req.Header.Set(echo.HeaderXForwardedProto, c.Scheme()) + } + if c.IsWebSocket() && req.Header.Get(echo.HeaderXForwardedFor) == "" { // For HTTP, it is automatically set by Go HTTP reverse proxy. + req.Header.Set(echo.HeaderXForwardedFor, c.RealIP()) + } + + retries := config.RetryCount + for { + var tgt *ProxyTarget + var err error + if isTargetProvider { + tgt, err = provider.NextTarget(c) + if err != nil { + return config.ErrorHandler(c, err) + } + } else { + tgt = config.Balancer.Next(c) + } + + c.Set(config.ContextKey, tgt) + + //If retrying a failed request, clear any previous errors from + //context here so that balancers have the option to check for + //errors that occurred using previous target + if retries < config.RetryCount { + c.Set("_error", nil) + } + + // This is needed for ProxyConfig.ModifyResponse and/or ProxyConfig.Transport to be able to process the Request + // that Balancer may have replaced with c.SetRequest. + req = c.Request() + + // Proxy + switch { + case c.IsWebSocket(): + proxyRaw(tgt, c, config).ServeHTTP(res, req) + default: // even SSE requests + proxyHTTP(tgt, c, config).ServeHTTP(res, req) + } + + err, hasError := c.Get("_error").(error) + if !hasError { + return nil + } + + retry := retries > 0 && config.RetryFilter(c, err) + if !retry { + return config.ErrorHandler(c, err) + } + + retries-- + } + } + } +} + +// StatusCodeContextCanceled is a custom HTTP status code for situations +// where a client unexpectedly closed the connection to the server. +// As there is no standard error code for "client closed connection", but +// various well-known HTTP clients and server implement this HTTP code we use +// 499 too instead of the more problematic 5xx, which does not allow to detect this situation +const StatusCodeContextCanceled = 499 + +func proxyHTTP(tgt *ProxyTarget, c echo.Context, config ProxyConfig) http.Handler { + proxy := httputil.NewSingleHostReverseProxy(tgt.URL) + proxy.ErrorHandler = func(resp http.ResponseWriter, req *http.Request, err error) { + desc := tgt.URL.String() + if tgt.Name != "" { + desc = fmt.Sprintf("%s(%s)", tgt.Name, tgt.URL.String()) + } + // If the client canceled the request (usually by closing the connection), we can report a + // client error (4xx) instead of a server error (5xx) to correctly identify the situation. + // The Go standard library (at of late 2020) wraps the exported, standard + // context.Canceled error with unexported garbage value requiring a substring check, see + // https://github.com/golang/go/blob/6965b01ea248cabb70c3749fd218b36089a21efb/src/net/net.go#L416-L430 + if err == context.Canceled || strings.Contains(err.Error(), "operation was canceled") { + httpError := echo.NewHTTPError(StatusCodeContextCanceled, fmt.Sprintf("client closed connection: %v", err)) + httpError.Internal = err + c.Set("_error", httpError) + } else { + httpError := echo.NewHTTPError(http.StatusBadGateway, fmt.Sprintf("remote %s unreachable, could not forward: %v", desc, err)) + httpError.Internal = err + c.Set("_error", httpError) + } + } + proxy.Transport = config.Transport + proxy.ModifyResponse = config.ModifyResponse + return proxy +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go b/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go new file mode 100644 index 00000000..70b89b0e --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/rate_limiter.go @@ -0,0 +1,267 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "net/http" + "sync" + "time" + + "github.com/labstack/echo/v4" + "golang.org/x/time/rate" +) + +// RateLimiterStore is the interface to be implemented by custom stores. +type RateLimiterStore interface { + // Stores for the rate limiter have to implement the Allow method + Allow(identifier string) (bool, error) +} + +// RateLimiterConfig defines the configuration for the rate limiter +type RateLimiterConfig struct { + Skipper Skipper + BeforeFunc BeforeFunc + // IdentifierExtractor uses echo.Context to extract the identifier for a visitor + IdentifierExtractor Extractor + // Store defines a store for the rate limiter + Store RateLimiterStore + // ErrorHandler provides a handler to be called when IdentifierExtractor returns an error + ErrorHandler func(context echo.Context, err error) error + // DenyHandler provides a handler to be called when RateLimiter denies access + DenyHandler func(context echo.Context, identifier string, err error) error +} + +// Extractor is used to extract data from echo.Context +type Extractor func(context echo.Context) (string, error) + +// ErrRateLimitExceeded denotes an error raised when rate limit is exceeded +var ErrRateLimitExceeded = echo.NewHTTPError(http.StatusTooManyRequests, "rate limit exceeded") + +// ErrExtractorError denotes an error raised when extractor function is unsuccessful +var ErrExtractorError = echo.NewHTTPError(http.StatusForbidden, "error while extracting identifier") + +// DefaultRateLimiterConfig defines default values for RateLimiterConfig +var DefaultRateLimiterConfig = RateLimiterConfig{ + Skipper: DefaultSkipper, + IdentifierExtractor: func(ctx echo.Context) (string, error) { + id := ctx.RealIP() + return id, nil + }, + ErrorHandler: func(context echo.Context, err error) error { + return &echo.HTTPError{ + Code: ErrExtractorError.Code, + Message: ErrExtractorError.Message, + Internal: err, + } + }, + DenyHandler: func(context echo.Context, identifier string, err error) error { + return &echo.HTTPError{ + Code: ErrRateLimitExceeded.Code, + Message: ErrRateLimitExceeded.Message, + Internal: err, + } + }, +} + +/* +RateLimiter returns a rate limiting middleware + + e := echo.New() + + limiterStore := middleware.NewRateLimiterMemoryStore(20) + + e.GET("/rate-limited", func(c echo.Context) error { + return c.String(http.StatusOK, "test") + }, RateLimiter(limiterStore)) +*/ +func RateLimiter(store RateLimiterStore) echo.MiddlewareFunc { + config := DefaultRateLimiterConfig + config.Store = store + + return RateLimiterWithConfig(config) +} + +/* +RateLimiterWithConfig returns a rate limiting middleware + + e := echo.New() + + config := middleware.RateLimiterConfig{ + Skipper: DefaultSkipper, + Store: middleware.NewRateLimiterMemoryStore( + middleware.RateLimiterMemoryStoreConfig{Rate: 10, Burst: 30, ExpiresIn: 3 * time.Minute} + ) + IdentifierExtractor: func(ctx echo.Context) (string, error) { + id := ctx.RealIP() + return id, nil + }, + ErrorHandler: func(context echo.Context, err error) error { + return context.JSON(http.StatusTooManyRequests, nil) + }, + DenyHandler: func(context echo.Context, identifier string) error { + return context.JSON(http.StatusForbidden, nil) + }, + } + + e.GET("/rate-limited", func(c echo.Context) error { + return c.String(http.StatusOK, "test") + }, middleware.RateLimiterWithConfig(config)) +*/ +func RateLimiterWithConfig(config RateLimiterConfig) echo.MiddlewareFunc { + if config.Skipper == nil { + config.Skipper = DefaultRateLimiterConfig.Skipper + } + if config.IdentifierExtractor == nil { + config.IdentifierExtractor = DefaultRateLimiterConfig.IdentifierExtractor + } + if config.ErrorHandler == nil { + config.ErrorHandler = DefaultRateLimiterConfig.ErrorHandler + } + if config.DenyHandler == nil { + config.DenyHandler = DefaultRateLimiterConfig.DenyHandler + } + if config.Store == nil { + panic("Store configuration must be provided") + } + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + if config.BeforeFunc != nil { + config.BeforeFunc(c) + } + + identifier, err := config.IdentifierExtractor(c) + if err != nil { + c.Error(config.ErrorHandler(c, err)) + return nil + } + + if allow, err := config.Store.Allow(identifier); !allow { + c.Error(config.DenyHandler(c, identifier, err)) + return nil + } + return next(c) + } + } +} + +// RateLimiterMemoryStore is the built-in store implementation for RateLimiter +type RateLimiterMemoryStore struct { + visitors map[string]*Visitor + mutex sync.Mutex + rate rate.Limit // for more info check out Limiter docs - https://pkg.go.dev/golang.org/x/time/rate#Limit. + + burst int + expiresIn time.Duration + lastCleanup time.Time + + timeNow func() time.Time +} + +// Visitor signifies a unique user's limiter details +type Visitor struct { + *rate.Limiter + lastSeen time.Time +} + +/* +NewRateLimiterMemoryStore returns an instance of RateLimiterMemoryStore with +the provided rate (as req/s). +for more info check out Limiter docs - https://pkg.go.dev/golang.org/x/time/rate#Limit. + +Burst and ExpiresIn will be set to default values. + +Note that if the provided rate is a float number and Burst is zero, Burst will be treated as the rounded down value of the rate. + +Example (with 20 requests/sec): + + limiterStore := middleware.NewRateLimiterMemoryStore(20) +*/ +func NewRateLimiterMemoryStore(rate rate.Limit) (store *RateLimiterMemoryStore) { + return NewRateLimiterMemoryStoreWithConfig(RateLimiterMemoryStoreConfig{ + Rate: rate, + }) +} + +/* +NewRateLimiterMemoryStoreWithConfig returns an instance of RateLimiterMemoryStore +with the provided configuration. Rate must be provided. Burst will be set to the rounded down value of +the configured rate if not provided or set to 0. + +The built-in memory store is usually capable for modest loads. For higher loads other +store implementations should be considered. + +Characteristics: +* Concurrency above 100 parallel requests may causes measurable lock contention +* A high number of different IP addresses (above 16000) may be impacted by the internally used Go map +* A high number of requests from a single IP address may cause lock contention + +Example: + + limiterStore := middleware.NewRateLimiterMemoryStoreWithConfig( + middleware.RateLimiterMemoryStoreConfig{Rate: 50, Burst: 200, ExpiresIn: 5 * time.Minute}, + ) +*/ +func NewRateLimiterMemoryStoreWithConfig(config RateLimiterMemoryStoreConfig) (store *RateLimiterMemoryStore) { + store = &RateLimiterMemoryStore{} + + store.rate = config.Rate + store.burst = config.Burst + store.expiresIn = config.ExpiresIn + if config.ExpiresIn == 0 { + store.expiresIn = DefaultRateLimiterMemoryStoreConfig.ExpiresIn + } + if config.Burst == 0 { + store.burst = int(config.Rate) + } + store.visitors = make(map[string]*Visitor) + store.timeNow = time.Now + store.lastCleanup = store.timeNow() + return +} + +// RateLimiterMemoryStoreConfig represents configuration for RateLimiterMemoryStore +type RateLimiterMemoryStoreConfig struct { + Rate rate.Limit // Rate of requests allowed to pass as req/s. For more info check out Limiter docs - https://pkg.go.dev/golang.org/x/time/rate#Limit. + Burst int // Burst is maximum number of requests to pass at the same moment. It additionally allows a number of requests to pass when rate limit is reached. + ExpiresIn time.Duration // ExpiresIn is the duration after that a rate limiter is cleaned up +} + +// DefaultRateLimiterMemoryStoreConfig provides default configuration values for RateLimiterMemoryStore +var DefaultRateLimiterMemoryStoreConfig = RateLimiterMemoryStoreConfig{ + ExpiresIn: 3 * time.Minute, +} + +// Allow implements RateLimiterStore.Allow +func (store *RateLimiterMemoryStore) Allow(identifier string) (bool, error) { + store.mutex.Lock() + limiter, exists := store.visitors[identifier] + if !exists { + limiter = new(Visitor) + limiter.Limiter = rate.NewLimiter(store.rate, store.burst) + store.visitors[identifier] = limiter + } + now := store.timeNow() + limiter.lastSeen = now + if now.Sub(store.lastCleanup) > store.expiresIn { + store.cleanupStaleVisitors() + } + store.mutex.Unlock() + return limiter.AllowN(store.timeNow(), 1), nil +} + +/* +cleanupStaleVisitors helps manage the size of the visitors map by removing stale records +of users who haven't visited again after the configured expiry time has elapsed +*/ +func (store *RateLimiterMemoryStore) cleanupStaleVisitors() { + for id, visitor := range store.visitors { + if store.timeNow().Sub(visitor.lastSeen) > store.expiresIn { + delete(store.visitors, id) + } + } + store.lastCleanup = store.timeNow() +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/recover.go b/vendor/github.com/labstack/echo/v4/middleware/recover.go new file mode 100644 index 00000000..e6a5940e --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/recover.go @@ -0,0 +1,133 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "fmt" + "net/http" + "runtime" + + "github.com/labstack/echo/v4" + "github.com/labstack/gommon/log" +) + +// LogErrorFunc defines a function for custom logging in the middleware. +type LogErrorFunc func(c echo.Context, err error, stack []byte) error + +// RecoverConfig defines the config for Recover middleware. +type RecoverConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Size of the stack to be printed. + // Optional. Default value 4KB. + StackSize int `yaml:"stack_size"` + + // DisableStackAll disables formatting stack traces of all other goroutines + // into buffer after the trace for the current goroutine. + // Optional. Default value false. + DisableStackAll bool `yaml:"disable_stack_all"` + + // DisablePrintStack disables printing stack trace. + // Optional. Default value as false. + DisablePrintStack bool `yaml:"disable_print_stack"` + + // LogLevel is log level to printing stack trace. + // Optional. Default value 0 (Print). + LogLevel log.Lvl + + // LogErrorFunc defines a function for custom logging in the middleware. + // If it's set you don't need to provide LogLevel for config. + // If this function returns nil, the centralized HTTPErrorHandler will not be called. + LogErrorFunc LogErrorFunc + + // DisableErrorHandler disables the call to centralized HTTPErrorHandler. + // The recovered error is then passed back to upstream middleware, instead of swallowing the error. + // Optional. Default value false. + DisableErrorHandler bool `yaml:"disable_error_handler"` +} + +// DefaultRecoverConfig is the default Recover middleware config. +var DefaultRecoverConfig = RecoverConfig{ + Skipper: DefaultSkipper, + StackSize: 4 << 10, // 4 KB + DisableStackAll: false, + DisablePrintStack: false, + LogLevel: 0, + LogErrorFunc: nil, + DisableErrorHandler: false, +} + +// Recover returns a middleware which recovers from panics anywhere in the chain +// and handles the control to the centralized HTTPErrorHandler. +func Recover() echo.MiddlewareFunc { + return RecoverWithConfig(DefaultRecoverConfig) +} + +// RecoverWithConfig returns a Recover middleware with config. +// See: `Recover()`. +func RecoverWithConfig(config RecoverConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultRecoverConfig.Skipper + } + if config.StackSize == 0 { + config.StackSize = DefaultRecoverConfig.StackSize + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) (returnErr error) { + if config.Skipper(c) { + return next(c) + } + + defer func() { + if r := recover(); r != nil { + if r == http.ErrAbortHandler { + panic(r) + } + err, ok := r.(error) + if !ok { + err = fmt.Errorf("%v", r) + } + var stack []byte + var length int + + if !config.DisablePrintStack { + stack = make([]byte, config.StackSize) + length = runtime.Stack(stack, !config.DisableStackAll) + stack = stack[:length] + } + + if config.LogErrorFunc != nil { + err = config.LogErrorFunc(c, err, stack) + } else if !config.DisablePrintStack { + msg := fmt.Sprintf("[PANIC RECOVER] %v %s\n", err, stack[:length]) + switch config.LogLevel { + case log.DEBUG: + c.Logger().Debug(msg) + case log.INFO: + c.Logger().Info(msg) + case log.WARN: + c.Logger().Warn(msg) + case log.ERROR: + c.Logger().Error(msg) + case log.OFF: + // None. + default: + c.Logger().Print(msg) + } + } + + if err != nil && !config.DisableErrorHandler { + c.Error(err) + } else { + returnErr = err + } + } + }() + return next(c) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/redirect.go b/vendor/github.com/labstack/echo/v4/middleware/redirect.go new file mode 100644 index 00000000..b772ac13 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/redirect.go @@ -0,0 +1,155 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "net/http" + "strings" + + "github.com/labstack/echo/v4" +) + +// RedirectConfig defines the config for Redirect middleware. +type RedirectConfig struct { + // Skipper defines a function to skip middleware. + Skipper + + // Status code to be used when redirecting the request. + // Optional. Default value http.StatusMovedPermanently. + Code int `yaml:"code"` +} + +// redirectLogic represents a function that given a scheme, host and uri +// can both: 1) determine if redirect is needed (will set ok accordingly) and +// 2) return the appropriate redirect url. +type redirectLogic func(scheme, host, uri string) (ok bool, url string) + +const www = "www." + +// DefaultRedirectConfig is the default Redirect middleware config. +var DefaultRedirectConfig = RedirectConfig{ + Skipper: DefaultSkipper, + Code: http.StatusMovedPermanently, +} + +// HTTPSRedirect redirects http requests to https. +// For example, http://labstack.com will be redirect to https://labstack.com. +// +// Usage `Echo#Pre(HTTPSRedirect())` +func HTTPSRedirect() echo.MiddlewareFunc { + return HTTPSRedirectWithConfig(DefaultRedirectConfig) +} + +// HTTPSRedirectWithConfig returns an HTTPSRedirect middleware with config. +// See `HTTPSRedirect()`. +func HTTPSRedirectWithConfig(config RedirectConfig) echo.MiddlewareFunc { + return redirect(config, func(scheme, host, uri string) (bool, string) { + if scheme != "https" { + return true, "https://" + host + uri + } + return false, "" + }) +} + +// HTTPSWWWRedirect redirects http requests to https www. +// For example, http://labstack.com will be redirect to https://www.labstack.com. +// +// Usage `Echo#Pre(HTTPSWWWRedirect())` +func HTTPSWWWRedirect() echo.MiddlewareFunc { + return HTTPSWWWRedirectWithConfig(DefaultRedirectConfig) +} + +// HTTPSWWWRedirectWithConfig returns an HTTPSRedirect middleware with config. +// See `HTTPSWWWRedirect()`. +func HTTPSWWWRedirectWithConfig(config RedirectConfig) echo.MiddlewareFunc { + return redirect(config, func(scheme, host, uri string) (bool, string) { + if scheme != "https" && !strings.HasPrefix(host, www) { + return true, "https://www." + host + uri + } + return false, "" + }) +} + +// HTTPSNonWWWRedirect redirects http requests to https non www. +// For example, http://www.labstack.com will be redirect to https://labstack.com. +// +// Usage `Echo#Pre(HTTPSNonWWWRedirect())` +func HTTPSNonWWWRedirect() echo.MiddlewareFunc { + return HTTPSNonWWWRedirectWithConfig(DefaultRedirectConfig) +} + +// HTTPSNonWWWRedirectWithConfig returns an HTTPSRedirect middleware with config. +// See `HTTPSNonWWWRedirect()`. +func HTTPSNonWWWRedirectWithConfig(config RedirectConfig) echo.MiddlewareFunc { + return redirect(config, func(scheme, host, uri string) (ok bool, url string) { + if scheme != "https" { + host = strings.TrimPrefix(host, www) + return true, "https://" + host + uri + } + return false, "" + }) +} + +// WWWRedirect redirects non www requests to www. +// For example, http://labstack.com will be redirect to http://www.labstack.com. +// +// Usage `Echo#Pre(WWWRedirect())` +func WWWRedirect() echo.MiddlewareFunc { + return WWWRedirectWithConfig(DefaultRedirectConfig) +} + +// WWWRedirectWithConfig returns an HTTPSRedirect middleware with config. +// See `WWWRedirect()`. +func WWWRedirectWithConfig(config RedirectConfig) echo.MiddlewareFunc { + return redirect(config, func(scheme, host, uri string) (bool, string) { + if !strings.HasPrefix(host, www) { + return true, scheme + "://www." + host + uri + } + return false, "" + }) +} + +// NonWWWRedirect redirects www requests to non www. +// For example, http://www.labstack.com will be redirect to http://labstack.com. +// +// Usage `Echo#Pre(NonWWWRedirect())` +func NonWWWRedirect() echo.MiddlewareFunc { + return NonWWWRedirectWithConfig(DefaultRedirectConfig) +} + +// NonWWWRedirectWithConfig returns an HTTPSRedirect middleware with config. +// See `NonWWWRedirect()`. +func NonWWWRedirectWithConfig(config RedirectConfig) echo.MiddlewareFunc { + return redirect(config, func(scheme, host, uri string) (bool, string) { + if strings.HasPrefix(host, www) { + return true, scheme + "://" + host[4:] + uri + } + return false, "" + }) +} + +func redirect(config RedirectConfig, cb redirectLogic) echo.MiddlewareFunc { + if config.Skipper == nil { + config.Skipper = DefaultRedirectConfig.Skipper + } + if config.Code == 0 { + config.Code = DefaultRedirectConfig.Code + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req, scheme := c.Request(), c.Scheme() + host := req.Host + if ok, url := cb(scheme, host, req.RequestURI); ok { + return c.Redirect(config.Code, url) + } + + return next(c) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/request_id.go b/vendor/github.com/labstack/echo/v4/middleware/request_id.go new file mode 100644 index 00000000..14bd4fd1 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/request_id.go @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "github.com/labstack/echo/v4" +) + +// RequestIDConfig defines the config for RequestID middleware. +type RequestIDConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Generator defines a function to generate an ID. + // Optional. Defaults to generator for random string of length 32. + Generator func() string + + // RequestIDHandler defines a function which is executed for a request id. + RequestIDHandler func(echo.Context, string) + + // TargetHeader defines what header to look for to populate the id + TargetHeader string +} + +// DefaultRequestIDConfig is the default RequestID middleware config. +var DefaultRequestIDConfig = RequestIDConfig{ + Skipper: DefaultSkipper, + Generator: generator, + TargetHeader: echo.HeaderXRequestID, +} + +// RequestID returns a X-Request-ID middleware. +func RequestID() echo.MiddlewareFunc { + return RequestIDWithConfig(DefaultRequestIDConfig) +} + +// RequestIDWithConfig returns a X-Request-ID middleware with config. +func RequestIDWithConfig(config RequestIDConfig) echo.MiddlewareFunc { + // Defaults + if config.Skipper == nil { + config.Skipper = DefaultRequestIDConfig.Skipper + } + if config.Generator == nil { + config.Generator = generator + } + if config.TargetHeader == "" { + config.TargetHeader = echo.HeaderXRequestID + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + res := c.Response() + rid := req.Header.Get(config.TargetHeader) + if rid == "" { + rid = config.Generator() + } + res.Header().Set(config.TargetHeader, rid) + if config.RequestIDHandler != nil { + config.RequestIDHandler(c, rid) + } + + return next(c) + } + } +} + +func generator() string { + return randomString(32) +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/request_logger.go b/vendor/github.com/labstack/echo/v4/middleware/request_logger.go new file mode 100644 index 00000000..7c18200b --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/request_logger.go @@ -0,0 +1,391 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "errors" + "net/http" + "time" + + "github.com/labstack/echo/v4" +) + +// Example for `slog` https://pkg.go.dev/log/slog +// logger := slog.New(slog.NewJSONHandler(os.Stdout, nil)) +// e.Use(middleware.RequestLoggerWithConfig(middleware.RequestLoggerConfig{ +// LogStatus: true, +// LogURI: true, +// LogError: true, +// HandleError: true, // forwards error to the global error handler, so it can decide appropriate status code +// LogValuesFunc: func(c echo.Context, v middleware.RequestLoggerValues) error { +// if v.Error == nil { +// logger.LogAttrs(context.Background(), slog.LevelInfo, "REQUEST", +// slog.String("uri", v.URI), +// slog.Int("status", v.Status), +// ) +// } else { +// logger.LogAttrs(context.Background(), slog.LevelError, "REQUEST_ERROR", +// slog.String("uri", v.URI), +// slog.Int("status", v.Status), +// slog.String("err", v.Error.Error()), +// ) +// } +// return nil +// }, +// })) +// +// Example for `fmt.Printf` +// e.Use(middleware.RequestLoggerWithConfig(middleware.RequestLoggerConfig{ +// LogStatus: true, +// LogURI: true, +// LogError: true, +// HandleError: true, // forwards error to the global error handler, so it can decide appropriate status code +// LogValuesFunc: func(c echo.Context, v middleware.RequestLoggerValues) error { +// if v.Error == nil { +// fmt.Printf("REQUEST: uri: %v, status: %v\n", v.URI, v.Status) +// } else { +// fmt.Printf("REQUEST_ERROR: uri: %v, status: %v, err: %v\n", v.URI, v.Status, v.Error) +// } +// return nil +// }, +// })) +// +// Example for Zerolog (https://github.com/rs/zerolog) +// logger := zerolog.New(os.Stdout) +// e.Use(middleware.RequestLoggerWithConfig(middleware.RequestLoggerConfig{ +// LogURI: true, +// LogStatus: true, +// LogError: true, +// HandleError: true, // forwards error to the global error handler, so it can decide appropriate status code +// LogValuesFunc: func(c echo.Context, v middleware.RequestLoggerValues) error { +// if v.Error == nil { +// logger.Info(). +// Str("URI", v.URI). +// Int("status", v.Status). +// Msg("request") +// } else { +// logger.Error(). +// Err(v.Error). +// Str("URI", v.URI). +// Int("status", v.Status). +// Msg("request error") +// } +// return nil +// }, +// })) +// +// Example for Zap (https://github.com/uber-go/zap) +// logger, _ := zap.NewProduction() +// e.Use(middleware.RequestLoggerWithConfig(middleware.RequestLoggerConfig{ +// LogURI: true, +// LogStatus: true, +// LogError: true, +// HandleError: true, // forwards error to the global error handler, so it can decide appropriate status code +// LogValuesFunc: func(c echo.Context, v middleware.RequestLoggerValues) error { +// if v.Error == nil { +// logger.Info("request", +// zap.String("URI", v.URI), +// zap.Int("status", v.Status), +// ) +// } else { +// logger.Error("request error", +// zap.String("URI", v.URI), +// zap.Int("status", v.Status), +// zap.Error(v.Error), +// ) +// } +// return nil +// }, +// })) +// +// Example for Logrus (https://github.com/sirupsen/logrus) +// log := logrus.New() +// e.Use(middleware.RequestLoggerWithConfig(middleware.RequestLoggerConfig{ +// LogURI: true, +// LogStatus: true, +// LogError: true, +// HandleError: true, // forwards error to the global error handler, so it can decide appropriate status code +// LogValuesFunc: func(c echo.Context, v middleware.RequestLoggerValues) error { +// if v.Error == nil { +// log.WithFields(logrus.Fields{ +// "URI": v.URI, +// "status": v.Status, +// }).Info("request") +// } else { +// log.WithFields(logrus.Fields{ +// "URI": v.URI, +// "status": v.Status, +// "error": v.Error, +// }).Error("request error") +// } +// return nil +// }, +// })) + +// RequestLoggerConfig is configuration for Request Logger middleware. +type RequestLoggerConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // BeforeNextFunc defines a function that is called before next middleware or handler is called in chain. + BeforeNextFunc func(c echo.Context) + // LogValuesFunc defines a function that is called with values extracted by logger from request/response. + // Mandatory. + LogValuesFunc func(c echo.Context, v RequestLoggerValues) error + + // HandleError instructs logger to call global error handler when next middleware/handler returns an error. + // This is useful when you have custom error handler that can decide to use different status codes. + // + // A side-effect of calling global error handler is that now Response has been committed and sent to the client + // and middlewares up in chain can not change Response status code or response body. + HandleError bool + + // LogLatency instructs logger to record duration it took to execute rest of the handler chain (next(c) call). + LogLatency bool + // LogProtocol instructs logger to extract request protocol (i.e. `HTTP/1.1` or `HTTP/2`) + LogProtocol bool + // LogRemoteIP instructs logger to extract request remote IP. See `echo.Context.RealIP()` for implementation details. + LogRemoteIP bool + // LogHost instructs logger to extract request host value (i.e. `example.com`) + LogHost bool + // LogMethod instructs logger to extract request method value (i.e. `GET` etc) + LogMethod bool + // LogURI instructs logger to extract request URI (i.e. `/list?lang=en&page=1`) + LogURI bool + // LogURIPath instructs logger to extract request URI path part (i.e. `/list`) + LogURIPath bool + // LogRoutePath instructs logger to extract route path part to which request was matched to (i.e. `/user/:id`) + LogRoutePath bool + // LogRequestID instructs logger to extract request ID from request `X-Request-ID` header or response if request did not have value. + LogRequestID bool + // LogReferer instructs logger to extract request referer values. + LogReferer bool + // LogUserAgent instructs logger to extract request user agent values. + LogUserAgent bool + // LogStatus instructs logger to extract response status code. If handler chain returns an echo.HTTPError, + // the status code is extracted from the echo.HTTPError returned + LogStatus bool + // LogError instructs logger to extract error returned from executed handler chain. + LogError bool + // LogContentLength instructs logger to extract content length header value. Note: this value could be different from + // actual request body size as it could be spoofed etc. + LogContentLength bool + // LogResponseSize instructs logger to extract response content length value. Note: when used with Gzip middleware + // this value may not be always correct. + LogResponseSize bool + // LogHeaders instructs logger to extract given list of headers from request. Note: request can contain more than + // one header with same value so slice of values is been logger for each given header. + // + // Note: header values are converted to canonical form with http.CanonicalHeaderKey as this how request parser converts header + // names to. For example, the canonical key for "accept-encoding" is "Accept-Encoding". + LogHeaders []string + // LogQueryParams instructs logger to extract given list of query parameters from request URI. Note: request can + // contain more than one query parameter with same name so slice of values is been logger for each given query param name. + LogQueryParams []string + // LogFormValues instructs logger to extract given list of form values from request body+URI. Note: request can + // contain more than one form value with same name so slice of values is been logger for each given form value name. + LogFormValues []string + + timeNow func() time.Time +} + +// RequestLoggerValues contains extracted values from logger. +type RequestLoggerValues struct { + // StartTime is time recorded before next middleware/handler is executed. + StartTime time.Time + // Latency is duration it took to execute rest of the handler chain (next(c) call). + Latency time.Duration + // Protocol is request protocol (i.e. `HTTP/1.1` or `HTTP/2`) + Protocol string + // RemoteIP is request remote IP. See `echo.Context.RealIP()` for implementation details. + RemoteIP string + // Host is request host value (i.e. `example.com`) + Host string + // Method is request method value (i.e. `GET` etc) + Method string + // URI is request URI (i.e. `/list?lang=en&page=1`) + URI string + // URIPath is request URI path part (i.e. `/list`) + URIPath string + // RoutePath is route path part to which request was matched to (i.e. `/user/:id`) + RoutePath string + // RequestID is request ID from request `X-Request-ID` header or response if request did not have value. + RequestID string + // Referer is request referer values. + Referer string + // UserAgent is request user agent values. + UserAgent string + // Status is response status code. Then handler returns an echo.HTTPError then code from there. + Status int + // Error is error returned from executed handler chain. + Error error + // ContentLength is content length header value. Note: this value could be different from actual request body size + // as it could be spoofed etc. + ContentLength string + // ResponseSize is response content length value. Note: when used with Gzip middleware this value may not be always correct. + ResponseSize int64 + // Headers are list of headers from request. Note: request can contain more than one header with same value so slice + // of values is been logger for each given header. + // Note: header values are converted to canonical form with http.CanonicalHeaderKey as this how request parser converts header + // names to. For example, the canonical key for "accept-encoding" is "Accept-Encoding". + Headers map[string][]string + // QueryParams are list of query parameters from request URI. Note: request can contain more than one query parameter + // with same name so slice of values is been logger for each given query param name. + QueryParams map[string][]string + // FormValues are list of form values from request body+URI. Note: request can contain more than one form value with + // same name so slice of values is been logger for each given form value name. + FormValues map[string][]string +} + +// RequestLoggerWithConfig returns a RequestLogger middleware with config. +func RequestLoggerWithConfig(config RequestLoggerConfig) echo.MiddlewareFunc { + mw, err := config.ToMiddleware() + if err != nil { + panic(err) + } + return mw +} + +// ToMiddleware converts RequestLoggerConfig into middleware or returns an error for invalid configuration. +func (config RequestLoggerConfig) ToMiddleware() (echo.MiddlewareFunc, error) { + if config.Skipper == nil { + config.Skipper = DefaultSkipper + } + now := time.Now + if config.timeNow != nil { + now = config.timeNow + } + + if config.LogValuesFunc == nil { + return nil, errors.New("missing LogValuesFunc callback function for request logger middleware") + } + + logHeaders := len(config.LogHeaders) > 0 + headers := append([]string(nil), config.LogHeaders...) + for i, v := range headers { + headers[i] = http.CanonicalHeaderKey(v) + } + + logQueryParams := len(config.LogQueryParams) > 0 + logFormValues := len(config.LogFormValues) > 0 + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + if config.Skipper(c) { + return next(c) + } + + req := c.Request() + res := c.Response() + start := now() + + if config.BeforeNextFunc != nil { + config.BeforeNextFunc(c) + } + err := next(c) + if err != nil && config.HandleError { + c.Error(err) + } + + v := RequestLoggerValues{ + StartTime: start, + } + if config.LogLatency { + v.Latency = now().Sub(start) + } + if config.LogProtocol { + v.Protocol = req.Proto + } + if config.LogRemoteIP { + v.RemoteIP = c.RealIP() + } + if config.LogHost { + v.Host = req.Host + } + if config.LogMethod { + v.Method = req.Method + } + if config.LogURI { + v.URI = req.RequestURI + } + if config.LogURIPath { + p := req.URL.Path + if p == "" { + p = "/" + } + v.URIPath = p + } + if config.LogRoutePath { + v.RoutePath = c.Path() + } + if config.LogRequestID { + id := req.Header.Get(echo.HeaderXRequestID) + if id == "" { + id = res.Header().Get(echo.HeaderXRequestID) + } + v.RequestID = id + } + if config.LogReferer { + v.Referer = req.Referer() + } + if config.LogUserAgent { + v.UserAgent = req.UserAgent() + } + if config.LogStatus { + v.Status = res.Status + if err != nil && !config.HandleError { + // this block should not be executed in case of HandleError=true as the global error handler will decide + // the status code. In that case status code could be different from what err contains. + var httpErr *echo.HTTPError + if errors.As(err, &httpErr) { + v.Status = httpErr.Code + } + } + } + if config.LogError && err != nil { + v.Error = err + } + if config.LogContentLength { + v.ContentLength = req.Header.Get(echo.HeaderContentLength) + } + if config.LogResponseSize { + v.ResponseSize = res.Size + } + if logHeaders { + v.Headers = map[string][]string{} + for _, header := range headers { + if values, ok := req.Header[header]; ok { + v.Headers[header] = values + } + } + } + if logQueryParams { + queryParams := c.QueryParams() + v.QueryParams = map[string][]string{} + for _, param := range config.LogQueryParams { + if values, ok := queryParams[param]; ok { + v.QueryParams[param] = values + } + } + } + if logFormValues { + v.FormValues = map[string][]string{} + for _, formValue := range config.LogFormValues { + if values, ok := req.Form[formValue]; ok { + v.FormValues[formValue] = values + } + } + } + + if errOnLog := config.LogValuesFunc(c, v); errOnLog != nil { + return errOnLog + } + + // in case of HandleError=true we are returning the error that we already have handled with global error handler + // this is deliberate as this error could be useful for upstream middlewares and default global error handler + // will ignore that error when it bubbles up in middleware chain. + return err + } + }, nil +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/rewrite.go b/vendor/github.com/labstack/echo/v4/middleware/rewrite.go new file mode 100644 index 00000000..4c19cc1c --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/rewrite.go @@ -0,0 +1,80 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "regexp" + + "github.com/labstack/echo/v4" +) + +// RewriteConfig defines the config for Rewrite middleware. +type RewriteConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // Rules defines the URL path rewrite rules. The values captured in asterisk can be + // retrieved by index e.g. $1, $2 and so on. + // Example: + // "/old": "/new", + // "/api/*": "/$1", + // "/js/*": "/public/javascripts/$1", + // "/users/*/orders/*": "/user/$1/order/$2", + // Required. + Rules map[string]string `yaml:"rules"` + + // RegexRules defines the URL path rewrite rules using regexp.Rexexp with captures + // Every capture group in the values can be retrieved by index e.g. $1, $2 and so on. + // Example: + // "^/old/[0.9]+/": "/new", + // "^/api/.+?/(.*)": "/v2/$1", + RegexRules map[*regexp.Regexp]string `yaml:"-"` +} + +// DefaultRewriteConfig is the default Rewrite middleware config. +var DefaultRewriteConfig = RewriteConfig{ + Skipper: DefaultSkipper, +} + +// Rewrite returns a Rewrite middleware. +// +// Rewrite middleware rewrites the URL path based on the provided rules. +func Rewrite(rules map[string]string) echo.MiddlewareFunc { + c := DefaultRewriteConfig + c.Rules = rules + return RewriteWithConfig(c) +} + +// RewriteWithConfig returns a Rewrite middleware with config. +// See: `Rewrite()`. +func RewriteWithConfig(config RewriteConfig) echo.MiddlewareFunc { + // Defaults + if config.Rules == nil && config.RegexRules == nil { + panic("echo: rewrite middleware requires url path rewrite rules or regex rules") + } + + if config.Skipper == nil { + config.Skipper = DefaultBodyDumpConfig.Skipper + } + + if config.RegexRules == nil { + config.RegexRules = make(map[*regexp.Regexp]string) + } + for k, v := range rewriteRulesRegex(config.Rules) { + config.RegexRules[k] = v + } + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) (err error) { + if config.Skipper(c) { + return next(c) + } + + if err := rewriteURL(config.RegexRules, c.Request()); err != nil { + return err + } + return next(c) + } + } +} diff --git a/vendor/github.com/labstack/echo/v4/middleware/secure.go b/vendor/github.com/labstack/echo/v4/middleware/secure.go new file mode 100644 index 00000000..c904abf1 --- /dev/null +++ b/vendor/github.com/labstack/echo/v4/middleware/secure.go @@ -0,0 +1,144 @@ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Β© 2015 LabStack LLC and Echo contributors + +package middleware + +import ( + "fmt" + + "github.com/labstack/echo/v4" +) + +// SecureConfig defines the config for Secure middleware. +type SecureConfig struct { + // Skipper defines a function to skip middleware. + Skipper Skipper + + // XSSProtection provides protection against cross-site scripting attack (XSS) + // by setting the `X-XSS-Protection` header. + // Optional. Default value "1; mode=block". + XSSProtection string `yaml:"xss_protection"` + + // ContentTypeNosniff provides protection against overriding Content-Type + // header by setting the `X-Content-Type-Options` header. + // Optional. Default value "nosniff". + ContentTypeNosniff string `yaml:"content_type_nosniff"` + + // XFrameOptions can be used to indicate whether or not a browser should + // be allowed to render a page in a ,