diff --git a/.claude/settings.json b/.claude/settings.json new file mode 100644 index 00000000..1789923c --- /dev/null +++ b/.claude/settings.json @@ -0,0 +1,11 @@ +{ + "permissions": { + "allow": [ + "Bash(go:*)", + "Bash(golangci-lint:*)", + "Bash(git:*)", + "Bash(make:*)" + ], + "deny": [] + } +} \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..aad738c7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b30e1328..4eef7ef6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,29 +1,33 @@ +permissions: + contents: read name: CI on: push: branches: - - master + - main pull_request: branches: - - master + - main jobs: build: name: Build runs-on: ${{ matrix.os }} timeout-minutes: 10 + permissions: + contents: read strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v6 with: - go-version: 1.15 - - uses: actions/checkout@v1 + go-version: 1.25 + - uses: actions/checkout@v6 - run: make build - - uses: actions/upload-artifact@v1 + - uses: actions/upload-artifact@v6 with: name: podsync-${{ matrix.os }} path: bin/ @@ -32,12 +36,14 @@ jobs: name: Test runs-on: ubuntu-latest timeout-minutes: 10 + permissions: + contents: read steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v6 with: - go-version: 1.15 - - uses: actions/checkout@v1 + go-version: 1.25 + - uses: actions/checkout@v6 - env: VIMEO_TEST_API_KEY: ${{ secrets.VIMEO_ACCESS_TOKEN }} YOUTUBE_TEST_API_KEY: ${{ secrets.YOUTUBE_API_KEY }} @@ -47,12 +53,18 @@ jobs: name: Checks runs-on: ubuntu-latest timeout-minutes: 10 + permissions: + # Required: allow read access to the content for analysis. + # See https://github.com/golangci/golangci-lint-action?tab=readme-ov-file#annotations + contents: read + pull-requests: read steps: - - uses: actions/setup-go@v1 + - uses: actions/setup-go@v6 with: - go-version: 1.15 - - uses: actions/checkout@v1 + go-version: 1.25 + - uses: actions/checkout@v6 + - uses: golangci/golangci-lint-action@v9 - name: Go mod env: @@ -68,5 +80,3 @@ jobs: echo exit 1 fi - - - run: make lint diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml new file mode 100644 index 00000000..ef3ffaa9 --- /dev/null +++ b/.github/workflows/claude.yml @@ -0,0 +1,47 @@ +name: Claude + +on: + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + issues: + types: [opened, assigned] + pull_request_review: + types: [submitted] + +jobs: + claude: + if: | + (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || + (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || + (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) + + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + issues: write + id-token: write + actions: read # Required for Claude to read CI results on PRs + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 1 + + - name: Run Claude Code + id: claude + uses: anthropics/claude-code-action@v1 + with: + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + + # Allow Go development tools and file operations for code changes and PRs + claude_args: | + --allowedTools Edit Read Write Glob Grep "Bash(go:*)" "Bash(golangci-lint:*)" "Bash(make:*)" "Bash(git:*)" "Bash(gh:*)" mcp__github + + # This is an optional setting that allows Claude to read CI results on PRs + additional_permissions: | + actions: read diff --git a/.github/workflows/dedup.yml b/.github/workflows/dedup.yml new file mode 100644 index 00000000..08658f6f --- /dev/null +++ b/.github/workflows/dedup.yml @@ -0,0 +1,64 @@ +name: Issue Deduplication + +on: + issues: + types: [opened] + +jobs: + deduplicate: + name: Check + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + contents: read + issues: write + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 1 + + - name: Check for duplicate issues + uses: anthropics/claude-code-action@v1 + with: + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + prompt: | + Analyze this new issue and check if it's a duplicate of existing issues in the repository. + + Issue: #${{ github.event.issue.number }} + Repository: ${{ github.repository }} + + Your task: + 1. Use mcp__github__get_issue to get details of the current issue (#${{ github.event.issue.number }}) + 2. Search for similar existing issues using mcp__github__search_issues with relevant keywords from the issue title and body + 3. Compare the new issue with existing ones to identify potential duplicates + + Criteria for duplicates: + - Same bug or error being reported + - Same feature request (even if worded differently) + - Same question being asked + - Issues describing the same root problem + + If you find duplicates: + - Add a comment on the new issue linking to the original issue(s) + - Apply a "duplicate" label to the new issue + - Be polite and explain why it's a duplicate + - Suggest the user follow the original issue for updates + + If it's NOT a duplicate: + - Don't add any comments + - You may apply appropriate topic labels based on the issue content + + Use these tools: + - mcp__github__get_issue: Get issue details + - mcp__github__search_issues: Search for similar issues + - mcp__github__list_issues: List recent issues if needed + - mcp__github__create_issue_comment: Add a comment if duplicate found + - mcp__github__update_issue: Add labels + + Be thorough but efficient. Focus on finding true duplicates, not just similar issues. + + claude_args: | + --allowedTools "mcp__github__get_issue,mcp__github__search_issues,mcp__github__list_issues,mcp__github__create_issue_comment,mcp__github__update_issue,mcp__github__get_issue_comments" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml new file mode 100644 index 00000000..c6e20bbf --- /dev/null +++ b/.github/workflows/nightly.yml @@ -0,0 +1,48 @@ +name: Nightly + +on: + schedule: + - cron: "0 0 * * *" # Every day at midnight + push: + paths: + - ".github/workflows/nightly.yml" + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + publish: + name: Nightly + runs-on: ubuntu-latest + timeout-minutes: 20 + + permissions: + contents: read + packages: write + + steps: + - name: ๐Ÿ“ฆ Checkout repository + uses: actions/checkout@v6 + + - name: ๐Ÿงช Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: ๐Ÿ”’ Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: ๐Ÿ—๏ธ Build and push + uses: docker/build-push-action@v6 + env: + TAG: nightly + COMMIT: ${{ github.sha }} + with: + cache-from: type=gha + cache-to: type=gha,mode=max + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:nightly diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ce128bac..0771fb10 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,30 +5,53 @@ on: tags: - 'v*' +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + jobs: publish: name: Publish runs-on: ubuntu-latest - timeout-minutes: 10 + timeout-minutes: 20 + + permissions: + contents: write + packages: write steps: - - uses: actions/checkout@v2 + - name: ๐Ÿ“ฆ Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 - # Required for the changelog to work correctly - - run: git fetch --prune --unshallow + - name: ๐Ÿงช Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - - uses: actions/setup-go@v1 + - name: ๐Ÿ”’ Log in to the Container registry + uses: docker/login-action@v3 with: - go-version: 1.15 + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} - - env: - DOCKER_LOGIN: ${{ secrets.DOCKER_LOGIN }} - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - run: docker login --username "$DOCKER_LOGIN" --password "$DOCKER_PASSWORD" + - name: ๐Ÿ—๏ธ Build container and push + uses: docker/build-push-action@v6 + env: + TAG: ${{ github.ref_name }} + COMMIT: ${{ github.sha }} + with: + cache-from: type=gha + cache-to: type=gha,mode=max + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest, ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }} - - uses: goreleaser/goreleaser-action@v1 + - name: ๐Ÿšง๏ธ Make release + uses: goreleaser/goreleaser-action@v6 + if: startsWith(github.ref, 'refs/tags/') + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: version: latest - args: release --rm-dist - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + args: release --clean diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml new file mode 100644 index 00000000..f31321fd --- /dev/null +++ b/.github/workflows/triage.yml @@ -0,0 +1,77 @@ +name: Issue Triage +on: + issues: + types: [opened] + +jobs: + triage-issue: + name: Triage + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + contents: read + issues: write + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Triage issue with Claude + uses: anthropics/claude-code-action@v1 + with: + claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + prompt: | + You're an issue triage assistant for GitHub issues. Your task is to analyze the issue and select appropriate labels from the provided list. + + IMPORTANT: Don't post any comments or messages to the issue. Your only action should be to apply labels. + + Issue Information: + - REPO: ${{ github.repository }} + - ISSUE_NUMBER: ${{ github.event.issue.number }} + + TASK OVERVIEW: + + 1. First, fetch the list of labels available in this repository by running: `gh label list`. Run exactly this command with nothing else. + + 2. Next, use the GitHub tools to get context about the issue: + - You have access to these tools: + - mcp__github__get_issue: Use this to retrieve the current issue's details including title, description, and existing labels + - mcp__github__get_issue_comments: Use this to read any discussion or additional context provided in the comments + - mcp__github__update_issue: Use this to apply labels to the issue (do not use this for commenting) + - mcp__github__search_issues: Use this to find similar issues that might provide context for proper categorization and to identify potential duplicate issues + - mcp__github__list_issues: Use this to understand patterns in how other issues are labeled + - Start by using mcp__github__get_issue to get the issue details + + 3. Analyze the issue content, considering: + - The issue title and description + - The type of issue (bug report, feature request, question, etc.) + - Technical areas mentioned + - Severity or priority indicators + - User impact + - Components affected + + 4. Select appropriate labels from the available labels list provided above: + - Choose labels that accurately reflect the issue's nature + - Be specific but comprehensive + - Select priority labels if you can determine urgency (high-priority, med-priority, or low-priority) + - Consider platform labels (android, ios) if applicable + - If you find similar issues using mcp__github__search_issues, consider using a "duplicate" label if appropriate. Only do so if the issue is a duplicate of another OPEN issue. + + 5. Apply the selected labels: + - Use mcp__github__update_issue to apply your selected labels + - DO NOT post any comments explaining your decision + - DO NOT communicate directly with users + - If no labels are clearly applicable, do not apply any labels + + IMPORTANT GUIDELINES: + - Be thorough in your analysis + - Only select labels from the provided list above + - DO NOT post any comments to the issue + - Your ONLY action should be to apply labels using mcp__github__update_issue + - It's okay to not add any labels if none are clearly applicable + + claude_args: | + --allowedTools "Bash(gh label list),mcp__github__get_issue,mcp__github__get_issue_comments,mcp__github__update_issue,mcp__github__search_issues,mcp__github__list_issues" diff --git a/.gitignore b/.gitignore index eb418d1c..9ccedbd5 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,9 @@ venv/ .DS_Store /podsync +podsync.log + +db +config.toml + +.claude/settings.local.json diff --git a/.golangci.yml b/.golangci.yml index 45a48588..66518463 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,29 +1,38 @@ +version: "2" linters: enable: - - structcheck - - varcheck - - staticcheck - - unconvert - - gofmt - - goimports - - golint - - ineffassign - - vet - - unused - - misspell - bodyclose - - interfacer - - unconvert - - maligned - - depguard + - misspell - nakedret - prealloc + - unconvert - whitespace + - staticcheck + - ineffassign + - unused disable: - errcheck - + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - bin + - docs +formatters: + enable: + - gofmt + - goimports + exclusions: + generated: lax + paths: + - bin + - docs + - third_party$ + - builtin$ + - examples$ run: - deadline: 3m - skip-dirs: - - bin - - docs + timeout: 3m diff --git a/.goreleaser.yml b/.goreleaser.yml index 44f81048..872873b0 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -19,22 +19,15 @@ builds: - arm - arm64 -dockers: - - image_templates: - - 'mxpv/podsync:{{ .Tag }}' - - 'mxpv/podsync:v{{ .Major }}.{{ .Minor }}' - - 'mxpv/podsync:latest' - binaries: - - podsync - dockerfile: Dockerfile - archives: - - replacements: - darwin: Darwin - linux: Linux - windows: Windows - 386: i386 - amd64: x86_64 + - id: arc + name_template: >- + {{- .ProjectName }}_{{.Version}}_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else if eq .Arch "386" }}i386 + {{- else }}{{ .Arch }}{{ end }} + {{- if .Arm }}v{{ .Arm }}{{ end -}} format_overrides: - goos: windows format: zip @@ -43,7 +36,7 @@ checksum: name_template: 'checksums.txt' snapshot: - name_template: "{{ .Tag }}-next" + name_template: '{{ .Tag }}-next' changelog: sort: asc @@ -53,3 +46,13 @@ changelog: - '^test:' - Merge pull request - Merge branch + +release: + # We publish Docker image manually, + # include links to the release notes. + footer: | + # Docker images + ``` + docker pull ghcr.io/mxpv/podsync:{{ .Tag }} + docker pull ghcr.io/mxpv/podsync:latest + ``` diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..44de8286 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,15 @@ +{ + "version": "0.2.0", + "configurations": [ + { + // from https://github.com/vscode-debug-specs/go#debugging-executable-file + "name": "Debug Podsync", + "type": "go", + "request": "launch", + "mode": "debug", + "program": "${workspaceFolder}/cmd/podsync", + "cwd": "${workspaceFolder}", + "args": ["--config", "config.toml"] + } + ] +} \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..89e8774c --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,138 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Podsync is a Go-based service that converts YouTube, Vimeo, and SoundCloud channels into podcast feeds. It downloads video/audio content and generates RSS feeds that can be consumed by podcast clients. + +## Key Architecture Components + +### Main Application (`cmd/podsync/`) +- **main.go**: Entry point with CLI argument parsing, signal handling, and service orchestration +- **config.go**: TOML configuration loading and validation with defaults + +### Core Packages (`pkg/`) +- **builder/**: Media downloaders for different platforms (YouTube, Vimeo, SoundCloud) +- **feed/**: RSS/podcast feed generation and management, OPML export +- **db/**: BadgerDB-based storage for metadata and state +- **fs/**: Storage abstraction supporting local filesystem and S3-compatible storage +- **model/**: Core data structures and domain models +- **ytdl/**: YouTube-dl wrapper for media downloading + +### Services (`services/`) +- **update/**: Feed update orchestration and scheduling +- **web/**: HTTP server for serving podcast feeds and media files + +### Key Dependencies +- youtube-dl/yt-dlp for media downloading +- BadgerDB for local storage +- go-toml for configuration +- robfig/cron for scheduling +- AWS SDK for S3 storage + +## Common Development Commands + +### Building +```bash +make build # Build binary to bin/podsync +make # Build and run tests +``` + +### Testing +```bash +make test # Run all unit tests +go test -v ./... # Run tests with verbose output +go test ./pkg/... # Test specific packages +``` + +### Linting and Formatting +```bash +golangci-lint run # Run all configured linters and formatters +gofmt -s -w . # Format all Go files +goimports -w . # Organize imports and format +``` + +### Running +```bash +./bin/podsync --config config.toml # Run with config file +./bin/podsync --debug # Run with debug logging +./bin/podsync --headless # Run once and exit (no web server) +``` + +### Docker +```bash +make docker # Build local Docker image +docker run -it --rm localhost/podsync:latest +``` + +### Development Debugging +Use VS Code with the Go extension. The repository includes `.vscode/launch.json` with a "Debug Podsync" configuration that runs with `config.toml`. + +## Configuration + +The application uses TOML configuration files. See `config.toml.example` for all available options. Key sections: +- `[server]`: Web server settings (port, hostname, TLS) +- `[storage]`: Local or S3 storage configuration +- `[tokens]`: API keys for YouTube/Vimeo +- `[feeds]`: Feed definitions with URLs and settings +- `[downloader]`: youtube-dl configuration + +## Development Guidelines + +### Code Quality +- Write clean, idiomatic Go code following Go conventions and best practices +- Use structured logging with logrus for consistent log formatting +- Ensure proper error handling and meaningful error messages +- Follow the existing code style and patterns in the repository + +### Testing and Quality Assurance +- **CRITICAL**: Always run ALL of the following commands before making a commit or opening a PR: + 1. `go fmt ./...` - Format all Go files + 2. `golangci-lint run` - Run all configured linters and formatters + 3. `make test` - Run all unit tests +- Run tests first with `make test` to ensure functionality works correctly +- Run linter with `golangci-lint run` to ensure proper formatting and code quality +- Ensure ALL tests pass AND ALL linting checks pass before committing +- Review code carefully for spelling errors, typos, and grammatical mistakes +- Test changes locally with different configurations when applicable +- The project uses golangci-lint with strict formatting rules - code must pass ALL checks + +### Git Workflow +- Keep commit messages brief and to the point +- Use a short, descriptive commit title (50 characters or less) +- Include a brief commit body that summarizes changes in 1-3 sentences when needed (wrap at 120 characters) +- Do not include automated signatures or generation notices in commit messages or pull requests +- Don't add "Generated with Claude Code" to commit messages or pull request descriptions +- Don't add "Co-Authored-By: Claude noreply@anthropic.com" to commit messages or pull request descriptions +- Keep commits focused and atomic - one logical change per commit +- Ensure the build passes before pushing commits + +### Pull Request Guidelines +- Keep PR descriptions concise and focused +- Include the brief commit body summary plus relevant examples if applicable +- Avoid verbose sections like "Changes Made", "Test Plan", or extensive bullet lists +- Focus on what the change does and why, not exhaustive implementation details +- Include code examples only when they help demonstrate usage or key functionality + +## Key Conventions + +- Configuration validation happens at startup +- Graceful shutdown with context cancellation +- Storage abstraction allows switching between local/S3 +- API key rotation support for rate limiting +- Cron-based scheduling for feed updates +- Episode filtering and cleanup capabilities + +## Formatting and Linting Requirements + +This project uses golangci-lint with strict formatting rules configured in `.golangci.yml`. Common formatting requirements include: + +- Proper spacing around operators (`if condition {` not `if(condition){`) +- Correct struct field alignment and spacing +- Proper import ordering (standard library, third-party, local packages) +- No trailing whitespace +- Consistent spacing around assignment operators (`key: value` not `key:value`) +- Space after commas in function parameters and struct literals + +**Always run `go fmt ./...`, `golangci-lint run`, AND `make test` after making ANY code changes to ensure both functionality and formatting are correct before committing.** \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index ded06b9a..fd7f33b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,30 @@ -FROM alpine:3.10 - -WORKDIR /app/ -RUN wget -O /usr/bin/youtube-dl https://github.com/ytdl-org/youtube-dl/releases/latest/download/youtube-dl && \ - chmod +x /usr/bin/youtube-dl && \ - apk --no-cache add ca-certificates python ffmpeg tzdata -COPY podsync /app/podsync -CMD ["/app/podsync"] +FROM golang:1.25 AS builder + +ENV TAG="nightly" +ENV COMMIT="" + +WORKDIR /build + +COPY . . + +RUN make build + +# Download yt-dlp +RUN wget -O /usr/bin/yt-dlp https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp && \ + chmod a+rwx /usr/bin/yt-dlp + +# Alpine 3.22 will go EOL on 2027-05-01 +FROM alpine:3.22 + +WORKDIR /app + +# deno is required for yt-dlp (ref: https://github.com/yt-dlp/yt-dlp/issues/14404) +RUN apk --no-cache add ca-certificates python3 py3-pip ffmpeg tzdata libc6-compat deno + +RUN chmod 777 /usr/local/bin +COPY --from=builder /usr/bin/yt-dlp /usr/local/bin/youtube-dl +COPY --from=builder /build/bin/podsync /app/podsync +COPY --from=builder /build/html/index.html /app/html/index.html + +ENTRYPOINT ["/app/podsync"] +CMD ["--no-banner"] diff --git a/Makefile b/Makefile index 22e1a1cb..715cfb1e 100644 --- a/Makefile +++ b/Makefile @@ -1,39 +1,44 @@ BINPATH := $(abspath ./bin) -GOLANGCI := $(BINPATH)/golangci-lint .PHONY: all -all: build lint test +all: build test # # Build Podsync CLI binary +# Example: +# $ GOOS=amd64 make build # -.PHONY: build -build: - go build -o bin/podsync ./cmd/podsync -# -# Build Docker image -# -TAG ?= localhost/podsync -.PHONY: docker -docker: - GOOS=linux GOARCH=amd64 go build -o podsync ./cmd/podsync - docker build -t $(TAG) . - docker push $(TAG) +GOARCH ?= $(shell go env GOARCH) +GOOS ?= $(shell go env GOOS) + +TAG ?= $(shell git tag --points-at HEAD) +COMMIT ?= $(shell git rev-parse --short HEAD) +DATE := $(shell date) # -# Pull GolangCI-Lint dependency +# Go optimizations +# -ldflags -s Remove symbol table +# -ldflags -w Remove debug information +# -trimpath Remove all file system paths from the compiled binary +# -tags netgo Use the netgo network stack (Go DNS resolver) # -$(GOLANGCI): - curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(BINPATH) v1.31.0 - $(GOLANGCI) --version +LDFLAGS := "-s -w -X 'main.version=${TAG}' -X 'main.commit=${COMMIT}' -X 'main.date=${DATE}' -X 'main.arch=${GOARCH}'" + +.PHONY: build +build: + go build -trimpath -tags netgo -ldflags ${LDFLAGS} -o bin/podsync ./cmd/podsync # -# Run linter +# Build a local Docker image +# Example: +# $ make docker +# $ docker run -it --rm localhost/podsync:latest # -.PHONY: lint -lint: $(GOLANGCI) - $(GOLANGCI) run +IMAGE_TAG ?= localhost/podsync +.PHONY: docker +docker: + docker buildx build -t $(IMAGE_TAG) . # # Run unit tests diff --git a/README.md b/README.md index 35f2c4fb..4aa640d0 100644 --- a/README.md +++ b/README.md @@ -3,10 +3,11 @@ ![Podsync](docs/img/logo.png) [![](https://github.com/mxpv/podsync/workflows/CI/badge.svg)](https://github.com/mxpv/podsync/actions?query=workflow%3ACI) +[![Nightly](https://github.com/mxpv/podsync/actions/workflows/nightly.yml/badge.svg)](https://github.com/mxpv/podsync/actions/workflows/nightly.yml) [![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/mxpv/podsync)](https://github.com/mxpv/podsync/releases) [![Go Report Card](https://goreportcard.com/badge/github.com/mxpv/podsync)](https://goreportcard.com/report/github.com/mxpv/podsync) +[![GitHub Sponsors](https://img.shields.io/github/sponsors/mxpv)](https://github.com/sponsors/mxpv) [![Patreon](https://img.shields.io/badge/support-patreon-E6461A.svg)](https://www.patreon.com/podsync) -[![Twitter Follow](https://img.shields.io/twitter/follow/pod_sync?style=social)](https://twitter.com/pod_sync) Podsync - is a simple, free service that lets you listen to any YouTube / Vimeo channels, playlists or user videos in podcast format. @@ -16,98 +17,79 @@ remembering last played position, sync between devices and offline listening. Th on YouTube and Vimeo. So the aim of Podsync is to make your life easier and enable you to view/listen to content on any device in podcast client. -## Features +## โœจ Features - Works with YouTube and Vimeo. - Supports feeds configuration: video/audio, high/low quality, max video height, etc. - mp3 encoding - Update scheduler supports cron expressions -- Episodes filtering (match by title). +- Episodes filtering (match by title, duration). - Feeds customizations (custom artwork, category, language, etc). - OPML export. - Supports episodes cleanup (keep last X episodes). +- Configurable hooks for custom integrations and workflows. - One-click deployment for AWS. - Runs on Windows, Mac OS, Linux, and Docker. - Supports ARM. -- Automatic youtube-dl self update. +- Automatic yt-dlp self update. - Supports API keys rotation. -## Dependencies +## ๐Ÿ“‹ Dependencies If you're running the CLI as binary (e.g. not via Docker), you need to make sure that dependencies are available on -your system. Currently, Podsync depends on `youtube-dl` and `ffmpeg`. +your system. Currently, Podsync depends on `yt-dlp` , `ffmpeg`, and `go`. On Mac you can install those with `brew`: ``` -brew install youtube-dl ffmpeg +brew install yt-dlp ffmpeg go ``` -## Access tokens +## ๐Ÿ“– Documentation + +- [How to get Vimeo API token](./docs/how_to_get_vimeo_token.md) +- [How to get YouTube API Key](./docs/how_to_get_youtube_api_key.md) +- [Podsync on QNAP NAS Guide](./docs/how_to_setup_podsync_on_qnap_nas.md) +- [Schedule updates with cron](./docs/cron.md) + +## ๐ŸŒ™ Nightly builds + +Nightly builds uploaded every midnight from the `main` branch and available for testing: + +```bash +$ docker run -it --rm ghcr.io/mxpv/podsync:nightly +``` + +### ๐Ÿ”‘ Access tokens In order to query YouTube or Vimeo API you have to obtain an API token first. - [How to get YouTube API key](https://elfsight.com/blog/2016/12/how-to-get-youtube-api-key-tutorial/) - [Generate an access token for Vimeo](https://developer.vimeo.com/api/guides/start#generate-access-token) -## Configuration example +## โš™๏ธ Configuration You need to create a configuration file (for instance `config.toml`) and specify the list of feeds that you're going to host. -Here is an example how configuration might look like: +See [config.toml.example](./config.toml.example) for all possible configuration keys available in Podsync. + +Minimal configuration would look like this: ```toml [server] port = 8080 -# Bind a specific IP addresses for server ,"*": bind all IP addresses which is default option, localhost or 127.0.0.1 bind a single IPv4 address -bind_address = "172.20.10.2" -# Specify path for reverse proxy and only [A-Za-z0-9] -path = "test" -data_dir = "/app/data" # Don't change if you run podsync via docker -# Tokens from `Access tokens` section +[storage] + [storage.local] + # Don't change if you run podsync via docker + data_dir = "/app/data/" + [tokens] -youtube = "YOUTUBE_API_TOKEN" # YouTube API Key. See https://developers.google.com/youtube/registering_an_application -vimeo = [ # Multiple keys will be rotated. - "VIMEO_API_KEY_1", # Vimeo developer keys. See https://developer.vimeo.com/api/guides/start#generate-access-token - "VIMEO_API_KEY_2" -] +youtube = "PASTE YOUR API KEY HERE" # See config.toml.example for environment variables [feeds] - [feeds.ID1] - url = "{FEED_URL}" # URL address of a channel, group, user, or playlist. - page_size = 50 # The number of episodes to query each update (keep in mind, that this might drain API token) - update_period = "12h" # How often query for updates, examples: "60m", "4h", "2h45m" - quality = "high" # or "low" - format = "video" # or "audio" - # custom.cover_art_quality use "high" or "low" to special cover image quality from channel cover default is equal with "quality" and disable when custom.cover_art was set. - # custom = { title = "Level1News", description = "News sections of Level1Techs, in a podcast feed!", author = "Level1Tech", cover_art = "{IMAGE_URL}", cover_art_quality = "high", category = "TV", subcategories = ["Documentary", "Tech News"], explicit = true, lang = "en" } # Optional feed customizations - # max_height = 720 # Optional maximal height of video, example: 720, 1080, 1440, 2160, ... - # cron_schedule = "@every 12h" # Optional cron expression format. If set then overwrite 'update_period'. See details below - # filters = { title = "regex for title here", not_title = "regex for negative title match", description = "...", not_description = "..." } # Optional Golang regexp format. If set, then only download matching episodes. - # opml = true|false # Optional inclusion of the feed in the OPML file (default value: false) - # clean = { keep_last = 10 } # Keep last 10 episodes (order desc by PubDate) - # youtube_dl_args = [ "--write-sub", "--embed-subs", "--sub-lang", "en,en-US,en-GB" ] # Optional extra arguments passed to youtube-dl when downloading videos from this feed. This example would embed available English closed captions in the videos. Note that setting '--audio-format' for audio format feeds, or '--format' or '--output' for any format may cause unexpected behaviour. You should only use this if you know what you are doing, and have read up on youtube-dl's options! - -[database] - badger = { truncate = true, file_io = true } # See https://github.com/dgraph-io/badger#memory-usage - -[downloader] -self_update = true # Optional, auto update youtube-dl every 24 hours -timeout = 15 # Timeout in minutes - -# Optional log config. If not specified logs to the stdout -[log] -filename = "podsync.log" -max_size = 50 # MB -max_age = 30 # days -max_backups = 7 -compress = true - + [feeds.ID1] + url = "https://www.youtube.com/channel/UCxC5Ls6DwqV0e-CYcAKkExQ" ``` -Please note: Automatically clean-up will not work without a database configuration. - -Episodes files will be kept at: `/path/to/data/directory/ID1`, feed will be accessible from: `http://localhost/ID1.xml` - If you want to hide Podsync behind reverse proxy like nginx, you can use `hostname` field: ```toml @@ -122,72 +104,71 @@ hostname = "https://my.test.host:4443" Server will be accessible from `http://localhost:8080`, but episode links will point to `https://my.test.host:4443/ID1/...` +### ๐ŸŒ Environment Variables -### Schedule via cron expression +Podsync supports the following environment variables for configuration and API keys: -You can use `cron_schedule` field to build more precise update checks schedule. -A cron expression represents a set of times, using 5 space-separated fields. +| Variable Name | Description | Example Value(s) | +|------------------------------|-------------------------------------------------------------------------------------------|-----------------------------------------------| +| `PODSYNC_CONFIG_PATH` | Path to the configuration file (overrides `--config` CLI flag) | `/app/config.toml` | +| `PODSYNC_YOUTUBE_API_KEY` | YouTube API key(s), space-separated for rotation | `key1` or `key1 key2 key3` | +| `PODSYNC_VIMEO_API_KEY` | Vimeo API key(s), space-separated for rotation | `key1` or `key1 key2` | +| `PODSYNC_SOUNDCLOUD_API_KEY` | SoundCloud API key(s), space-separated for rotation | `soundcloud_key1 soundcloud_key2` | +| `PODSYNC_TWITCH_API_KEY` | Twitch API credentials in the format `CLIENT_ID:CLIENT_SECRET`, space-separated for multi | `id1:secret1 id2:secret2` | -| Field name | Mandatory? | Allowed values | Allowed special characters | -| ------------ | ---------- | --------------- | -------------------------- | -| Minutes | Yes | 0-59 | * / , - | -| Hours | Yes | 0-23 | * / , - | -| Day of month | Yes | 1-31 | * / , - ? | -| Month | Yes | 1-12 or JAN-DEC | * / , - | -| Day of week | Yes | 0-6 or SUN-SAT | * / , - ? | +## ๐Ÿš€ How to run -Month and Day-of-week field values are case insensitive. `SUN`, `Sun`, and `sun` are equally accepted. -The specific interpretation of the format is based on the Cron Wikipedia page: https://en.wikipedia.org/wiki/Cron -#### Predefined schedules +### Build and run as binary: -You may use one of several pre-defined schedules in place of a cron expression. +Make sure you have created the file `config.toml`. Also note the location of the `data_dir`. Depending on the operating system, you may have to choose a different location since `/app/data` might be not writable. -| Entry | Description | Equivalent to | -| ----------------------- | -------------------------------------------| ------------- | -| `@monthly` | Run once a month, midnight, first of month | `0 0 1 * *` | -| `@weekly` | Run once a week, midnight between Sat/Sun | `0 0 * * 0` | -| `@daily (or @midnight)` | Run once a day, midnight | `0 0 * * *` | -| `@hourly` | Run once an hour, beginning of hour | `0 * * * *` | - -#### Intervals - -You may also schedule a job to execute at fixed intervals, starting at the time it's added -or cron is run. This is supported by formatting the cron spec like this: - - @every - -where "duration" is a string accepted by [time.ParseDuration](http://golang.org/pkg/time/#ParseDuration). - -For example, `@every 1h30m10s` would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that. +``` +$ git clone https://github.com/mxpv/podsync +$ cd podsync +$ make +$ ./bin/podsync --config config.toml +``` -## One click deployment +### ๐Ÿ› How to debug -[![Deploy to AWS](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-west-1#/stacks/new?stackName=Podsync&templateURL=https://podsync-cf.s3.amazonaws.com/cloud_formation.yml) +Use the editor [Visual Studio Code](https://code.visualstudio.com/) and install the official [Go](https://marketplace.visualstudio.com/items?itemName=golang.go) extension. Afterwards you can execute "Run & Debug" โ–ถ๏ธŽ "Debug Podsync" to debug the application. The required configuration is already prepared (see `.vscode/launch.json`). -## How to run -### Run as binary: -``` -$ ./podsync --config config.toml -``` +### ๐Ÿณ Run via Docker: -### Run via Docker: ``` -$ docker pull mxpv/podsync:latest +$ docker pull ghcr.io/mxpv/podsync:latest $ docker run \ -p 8080:8080 \ -v $(pwd)/data:/app/data/ \ + -v $(pwd)/db:/app/db/ \ -v $(pwd)/config.toml:/app/config.toml \ - mxpv/podsync:latest + ghcr.io/mxpv/podsync:latest ``` -### Run via Docker Compose: +### ๐Ÿณ Run via Docker Compose: + ``` -$ docker-compose up +$ cat docker-compose.yml +services: + podsync: + image: ghcr.io/mxpv/podsync + container_name: podsync + volumes: + - ./data:/app/data/ + - ./db:/app/db/ + - ./config.toml:/app/config.toml + ports: + - 8080:8080 + +$ docker compose up ``` -## How to make a release +## ๐Ÿ“ฆ How to make a release Just push a git tag. CI will do the rest. +## ๐Ÿ“„ License + +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. diff --git a/cloud_formation.yml b/cloud_formation.yml index d9de5558..8c513f0d 100644 --- a/cloud_formation.yml +++ b/cloud_formation.yml @@ -6,10 +6,6 @@ Parameters: Default: t3.micro Description: EC2 machine instance size (see https://aws.amazon.com/ec2/instance-types/) - KeyName: - Type: AWS::EC2::KeyPair::KeyName - Description: SSH key to use for logging in (see https://docs.aws.amazon.com/ground-station/latest/ug/create-ec2-ssh-key-pair.html) - AmiId: Type: AWS::SSM::Parameter::Value Default: '/aws/service/ami-amazon-linux-latest/amzn2-ami-hvm-x86_64-gp2' @@ -34,13 +30,13 @@ Parameters: YouTubeApiKey: Type: String - Default: '' + AllowedPattern: '.+' # Required Description: | Key to use for YouTube API access (see https://github.com/mxpv/podsync/blob/master/docs/how_to_get_youtube_api_key.md) VimeoAccessToken: Type: String - Default: '' + AllowedPattern: '.+' # Required Description: | Key to use for Vimeo API access (see https://github.com/mxpv/podsync/blob/master/docs/how_to_get_vimeo_token.md) @@ -60,7 +56,7 @@ Parameters: PageSize: Type: Number Default: 50 - MinValue: 10 + MinValue: 5 Description: | The number of episodes to query each time @@ -109,8 +105,6 @@ Metadata: ParameterLabels: InstanceType: default: 'Instance type' - KeyName: - default: 'SSH key name' AmiId: default: 'AMI ID' VolumeSize: @@ -131,6 +125,10 @@ Metadata: default: 'Page size' Resources: + NewKeyPair: + Type: AWS::EC2::KeyPair + Properties: + KeyName: !Sub "${AWS::StackName}" Ec2Instance: Type: AWS::EC2::Instance CreationPolicy: @@ -138,7 +136,7 @@ Resources: Count: 1 Properties: InstanceType: !Ref InstanceType - KeyName: !Ref KeyName + KeyName: !Ref NewKeyPair ImageId: !Ref AmiId SecurityGroups: - !Ref AccessSecurityGroup @@ -169,8 +167,11 @@ Resources: tee /home/ec2-user/podsync/config.toml < %s (update '%s')", _feed.ID, _feed.CronSchedule) - // Perform initial update after CLI restart - updates <- _feed + m[cronFeed.ID] = cronID + log.Debugf("-> %s (update '%s')", cronFeed.ID, cronFeed.CronSchedule) + + // Only perform initial update if no explicit cron schedule is configured + // This prevents unwanted updates when using fixed schedules in Docker deployments + if !hasExplicitCronSchedule { + updates <- cronFeed + } } c.Start() @@ -177,19 +235,31 @@ func main() { } }) + if cfg.Storage.Type == "s3" { + return // S3 content is hosted externally + } + // Run web server - srv := NewServer(cfg) + srv := web.New(cfg.Server, storage, database) group.Go(func() error { log.Infof("running listener at %s", srv.Addr) - return srv.ListenAndServe() + if cfg.Server.TLS { + return srv.ListenAndServeTLS(cfg.Server.CertificatePath, cfg.Server.KeyFilePath) + } else { + return srv.ListenAndServe() + } }) group.Go(func() error { // Shutdown web server defer func() { + ctxShutDown, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer func() { + cancel() + }() log.Info("shutting down web server") - if err := srv.Shutdown(ctx); err != nil { + if err := srv.Shutdown(ctxShutDown); err != nil { log.WithError(err).Error("server shutdown failed") } }() @@ -204,14 +274,4 @@ func main() { } } }) - - if err := group.Wait(); err != nil && (err != context.Canceled && err != http.ErrServerClosed) { - log.WithError(err).Error("wait error") - } - - if err := database.Close(); err != nil { - log.WithError(err).Error("failed to close database") - } - - log.Info("gracefully stopped") } diff --git a/cmd/podsync/server.go b/cmd/podsync/server.go deleted file mode 100644 index 5ab4fc5c..00000000 --- a/cmd/podsync/server.go +++ /dev/null @@ -1,36 +0,0 @@ -package main - -import ( - "fmt" - "net/http" - - log "github.com/sirupsen/logrus" - - "github.com/mxpv/podsync/pkg/config" -) - -type Server struct { - http.Server -} - -func NewServer(cfg *config.Config) *Server { - port := cfg.Server.Port - if port == 0 { - port = 8080 - } - bindAddress := cfg.Server.BindAddress - if bindAddress == "*" { - bindAddress = "" - } - srv := Server{} - - srv.Addr = fmt.Sprintf("%s:%d", bindAddress, port) - log.Debugf("using address: %s:%s", bindAddress, srv.Addr) - - fs := http.FileServer(http.Dir(cfg.Server.DataDir)) - path := cfg.Server.Path - http.Handle(fmt.Sprintf("/%s", path), fs) - log.Debugf("handle path: /%s", path) - - return &srv -} diff --git a/config.toml.example b/config.toml.example new file mode 100644 index 00000000..0ee44632 --- /dev/null +++ b/config.toml.example @@ -0,0 +1,168 @@ +# This is an example of TOML configuration file for Podsync. + +# Global cleanup policy applied to feeds that don't specify their own cleanup policy. +# When set, this policy is used as a fallback for all feeds. +# Comment out or remove this section if you don't want a global cleanup policy. +[cleanup] +keep_last = 50 # Keep last 50 episodes globally (unless overridden per feed) + +# Web server related configuration. +[server] +# HTTP server port. +port = 8080 +# Optional. If you want to hide Podsync behind reverse proxy like nginx, you can use hostname field. +# Server will be accessible from http://localhost:8080, but episode links will point to https://my.test.host:4443/ID1/XYZ +hostname = "https://my.test.host:4443" +# Bind a specific IP addresses for server ,"*": bind all IP addresses which is default option, localhost or 127.0.0.1 bind a single IPv4 address +bind_address = "172.20.10.2" +# Specify path for reverse proxy and only [A-Za-z0-9] +path = "test" +# Optional. Enable Web UI. Feeds have to be include in OPML file to appear (see below) +web_ui = true +# Optional. If you want to use TLS you must set the TLS flag and path to the certificate file and private key file. +tls = true +certificate_path = "/var/www/cert.pem" +key_file_path = "/var/www/priv.pem" +# Optional. Enable debug endpoints (/debug/vars) for runtime metrics. Disabled by default for security. +# Only enable this if you need to debug the application and the endpoint is not publicly accessible. +debug_endpoints = false + +# Configure where to store the episode data +[storage] + # Could be "local" (default) for the local file system, or "s3" for a S3-compatible storage provider (e.g. AWS S3) + type = "local" + + [storage.local] + data_dir = "/app/data" # Don't change if you run podsync via docker + + # To configure for a S3 provider, set key and secret in environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`, respectively; + # then fillout the API endpoint, region, and bucket below. + [storage.s3] + endpoint_url = "https://s3.us-west-2.amazonaws.com" + region = "us-west-2" + bucket = "example-bucket-name" + # If you use prefix, you may need to add a path to `server.hostname` setting + # e.g. https://example-bucket-name.s3.us-west-2.amazonaws.com/example/prefix/ + prefix = "example/prefix" + +# API keys to be used to access Youtube and Vimeo. +# These can be either specified as string parameter or array of string (so those will be rotated). +# Alternatively, you can set the following environment variables: +# PODSYNC_YOUTUBE_API_KEY for YouTube +# PODSYNC_VIMEO_API_KEY for Vimeo +# PODSYNC_SOUNDCLOUD_API_KEY for Soundcloud +# PODSYNC_TWITCH_API_KEY for Twitch (format: CLIENT_ID:CLIENT_SECRET) +# Environment variables support multiple keys separated by spaces for API key rotation: +# export PODSYNC_YOUTUBE_API_KEY="key1 key2 key3" +[tokens] +youtube = "YOUTUBE_API_TOKEN" # YouTube API Key. See https://developers.google.com/youtube/registering_an_application +vimeo = [ # Multiple keys will be rotated. + "VIMEO_API_KEY_1", # Vimeo developer keys. See https://developer.vimeo.com/api/guides/start#generate-access-token + "VIMEO_API_KEY_2" +] + +# The list of data sources to be hosted by Podsync. +# These are channels, users, playlists, etc. +[feeds] + # Each channel must have a unique identifier (in this example "ID1"). + [feeds.ID1] + # URL address of a channel, group, user, or playlist. + url = "https://www.youtube.com/channel/CHANNEL_NAME_TO_HOST" + + # The number of episodes to query each update (keep in mind, that this might drain API token) + page_size = 50 + + # How often query for updates, examples: "60m", "4h", "2h45m" + update_period = "12h" + + quality = "high" # "high" or "low" + format = "video" # "audio", "video" or "custom" + # When format = "custom" + # YouTubeDL format parameter and result file extension + custom_format = { youtube_dl_format = "bestaudio[ext=m4a]", extension = "m4a" } + + playlist_sort = "asc" # or "desc", which will fetch playlist items from the end + + # Optional maximal height of video, example: 720, 1080, 1440, 2160, ... + max_height = 720 + + # Optionally include this feed in OPML file (default value: false) + opml = true + + # Optional cron expression format for more precise update schedule. + # If set then overwrite 'update_period'. + cron_schedule = "@every 12h" + + # Whether to cleanup old episodes for this specific feed. + # Keep last 10 episodes (order desc by PubDate) + # This overrides the global cleanup policy if one is set. + clean = { keep_last = 10 } + + # Optional Golang regexp format. + # If set, then only download matching episodes. + # Duration filters are in seconds. + # max_age filter is in days. + # min_age filter is in days. + filters = { title = "regex for title here", not_title = "regex for negative title match", description = "...", not_description = "...", min_duration = 0, max_duration = 86400, max_age = 365, min_age = 1 } + + # Optional extra arguments passed to youtube-dl when downloading videos from this feed. + # This example would embed available English closed captions in the videos. + # Note that setting '--audio-format' for audio format feeds, or '--format' or '--output' for any format may cause + # unexpected behaviour. You should only use this if you know what you are doing, and have read up on youtube-dl's options! + youtube_dl_args = ["--write-sub", "--embed-subs", "--sub-lang", "en,en-US,en-GB"] + + # When set to true, podcasts indexers such as iTunes or Google Podcasts will not index this podcast + private_feed = true + + # Optional post-episode download hooks + # Execute commands after each episode is downloaded + # Available environment variables: EPISODE_FILE, FEED_NAME, EPISODE_TITLE + + # Webhook notification example + [[feeds.ID1.post_episode_download]] + command = ["curl", "-X", "POST", "-d", "New episode: $EPISODE_TITLE", "https://webhook.example.com/notify"] + timeout = 30 + + # Custom script example + [[feeds.ID1.post_episode_download]] + command = ["/path/to/your/process-episode.sh"] + timeout = 120 + + # Optional feed customizations + [feeds.ID1.custom] + title = "Level1News" + description = "News sections of Level1Techs, in a podcast feed!" + author = "Level1Tech" + cover_art = "{IMAGE_URL}" + cover_art_quality = "high" + category = "TV" + subcategories = ["Documentary", "Tech News"] + explicit = true + lang = "en" + author = "Mrs. Smith (mrs@smith.org)" + ownerName = "Mrs. Smith" + ownerEmail = "mrs@smith.org" + # optional: this will override the default link (usually the URL address) in the generated RSS feed with another link + link = "https://example.org" + +# Podsync uses local database to store feeds and episodes metadata. +# This section is optional and usually not needed to configure unless some very specific corner cases. +# Refer to https://dgraph.io/docs/badger/get-started/#memory-usage for documentation. +[database] + badger = { truncate = true, file_io = true } + +# Youtube-dl specific configuration. +[downloader] +# Optional, auto update youtube-dl every 24 hours +self_update = true +# Download timeout in minutes. +timeout = 15 + +# Optional log config. If not specified logs to the stdout +[log] +filename = "podsync.log" +max_size = 50 # MB +max_age = 30 # days +max_backups = 7 +compress = true +debug = false diff --git a/docker-compose.yml b/docker-compose.yml index d4ee5e94..a9e4e8b2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,16 @@ -version: '2.2' - services: podsync: container_name: podsync image: mxpv/podsync:latest restart: always ports: - - 80:80 + - 8080:8080 volumes: - ./data:/app/data/ + - ./db:/app/db/ - ./config.toml:/app/config.toml + # environment: + # - PODSYNC_YOUTUBE_API_KEY=${YOUTUBE_API_KEY} + # - PODSYNC_VIMEO_API_KEY=${VIMEO_API_KEY} + # - PODSYNC_SOUNDCLOUD_API_KEY=${SOUNDCLOUD_API_KEY} + # - PODSYNC_TWITCH_API_KEY=${TWITCH_API_KEY} diff --git a/docs/cron.md b/docs/cron.md new file mode 100644 index 00000000..dac5176e --- /dev/null +++ b/docs/cron.md @@ -0,0 +1,37 @@ +# Schedule via cron expression + +You can use `cron_schedule` field to build more precise update checks schedule. +A cron expression represents a set of times, using 5 space-separated fields. + +| Field name | Mandatory? | Allowed values | Allowed special characters | +| ------------ | ---------- | --------------- | -------------------------- | +| Minutes | Yes | 0-59 | * / , - | +| Hours | Yes | 0-23 | * / , - | +| Day of month | Yes | 1-31 | * / , - ? | +| Month | Yes | 1-12 or JAN-DEC | * / , - | +| Day of week | Yes | 0-6 or SUN-SAT | * / , - ? | + +Month and Day-of-week field values are case insensitive. `SUN`, `Sun`, and `sun` are equally accepted. +The specific interpretation of the format is based on the Cron Wikipedia page: https://en.wikipedia.org/wiki/Cron + +#### Predefined schedules + +You may use one of several pre-defined schedules in place of a cron expression. + +| Entry | Description | Equivalent to | +| ----------------------- | -------------------------------------------| ------------- | +| `@monthly` | Run once a month, midnight, first of month | `0 0 1 * *` | +| `@weekly` | Run once a week, midnight between Sat/Sun | `0 0 * * 0` | +| `@daily (or @midnight)` | Run once a day, midnight | `0 0 * * *` | +| `@hourly` | Run once an hour, beginning of hour | `0 * * * *` | + +#### Intervals + +You may also schedule a job to execute at fixed intervals, starting at the time it's added +or cron is run. This is supported by formatting the cron spec like this: + + @every + +where "duration" is a string accepted by [time.ParseDuration](http://golang.org/pkg/time/#ParseDuration). + +For example, `@every 1h30m10s` would indicate a schedule that activates after 1 hour, 30 minutes, 10 seconds, and then every interval after that. diff --git a/docs/how_to_get_vimeo_token.md b/docs/how_to_get_vimeo_token.md index bd84e8ce..b21e5038 100644 --- a/docs/how_to_get_vimeo_token.md +++ b/docs/how_to_get_vimeo_token.md @@ -9,8 +9,17 @@ ![Generate an access token](img/vimeo_access_token.png) 6. Click `Generate`. ![Tokens](img/vimeo_token.png) -7. Copy a token to your CLI's configuration file. +7. Copy a token to your CLI's configuration file or set it as an environment variable. ```toml [tokens] -vimeo = "ecd4d34b07bcb9509ABCD" +vimeo = "key1" +``` +Or set the environment variable: +```sh +export PODSYNC_VIMEO_API_KEY="key1" +``` + +For API key rotation, you can specify multiple keys separated by spaces: +```sh +export PODSYNC_VIMEO_API_KEY="key1 key2" ``` diff --git a/docs/how_to_get_youtube_api_key.md b/docs/how_to_get_youtube_api_key.md index a71f479c..1378cfe8 100644 --- a/docs/how_to_get_youtube_api_key.md +++ b/docs/how_to_get_youtube_api_key.md @@ -15,9 +15,18 @@ 6. Click `Create credentials`. 7. Select `API key`. ![Create API key](img/youtube_create_api_key.png) -8. Copy token to your CLI's configuration file. +8. Copy token to your CLI's configuration file or set it as an environment variable: ![Copy token](img/youtube_copy_token.png) ```toml [tokens] -youtube = "AIzaSyD4w2s-k79YNR98ABC" +youtube = "key1" +``` +Or set the environment variable: +```sh +export PODSYNC_YOUTUBE_API_KEY="key1" +``` + +For API key rotation, you can specify multiple keys separated by spaces: +```sh +export PODSYNC_YOUTUBE_API_KEY="key1 key2" ``` \ No newline at end of file diff --git a/docs/how_to_setup_podsync_on_synology_nas.md b/docs/how_to_setup_podsync_on_synology_nas.md new file mode 100644 index 00000000..aeab0b4b --- /dev/null +++ b/docs/how_to_setup_podsync_on_synology_nas.md @@ -0,0 +1,85 @@ +# Podsync on Synology NAS Guide + +*Written by [@lucasjanin](https://github.com/lucasjanin)* + +This installs `podsync` on a Synology NAS with SSL and port 443 +It requires to have a domain with ddns and an SSL Certificate +I'm using a ddns from Synolgy with a SSL Certificate. By chance, my provider doesn't block ports 80 and 443. + + +1. Open "Package Center" and install "Apache HTTP Server 2.4" +2. In the "Web Station", select the default server, click edit and active "Enable personal website" +3. Create a folder "podsync" in web share using "File Station", the path will be like "/volume1/web/podsync" (where the files will be saved) +4. Create a folder "podsync" in another share using "File Station", the path will be like "/volume1/docker/podsync" (where the config will be saved) +5. Create a `config.toml` file in Notepad (or any other editor) and copy it into the above folder. +Here you will configure your specific settings. Here's mine as an example: + +```toml +[server] +port = 9090 +hostname = "https://xxxxxxxx.xxx" + +[storage] + [storage.local] + data_dir = "/app/data" + +[tokens] +youtube = "xxxxxxx" + +[feeds] + [feeds.ID1] + url = "https://www.youtube.com/channel/UCJldRgT_D7Am-ErRHQZ90uw" + update_period = "1h" + quality = "high" # "high" or "low" + format = "audio" # "audio", "video" or "custom" + filters = { title = "Yann Marguet" } + opml = true + clean = { keep_last = 20 } + private_feed = true + [feeds.ID1.custom] + title = "Yann Marguet - Moi, ce que j'en dis..." + description = "Yann Marguet sur France Inter" + author = "Yann Marguet" + cover_art = "https://www.radiofrance.fr/s3/cruiser-production/2023/01/834dd18e-a74c-4a65-afb0-519a5f7b11c1/1400x1400_moi-ce-que-j-en-dis-marguet.jpg" + cover_art_quality = "high" + category = "Comedy" + subcategories = ["Stand-Up"] + lang = "fr" + ownerName = "xxxx xxxxx" + ownerEmail = "xx@xxxx.xx" +``` + +Note that I'm not using port `8080` because I already have another app on my Synology using that port. +Also, I'm using my own hostname so I can download the podcasts to my podcast app from outside my network, +but you don't need to do this. + +6. Now you need to SSH into Synology using an app like Putty (on Windows - just google for an app). + +5. Copy and paste the following command: + +```bash +docker pull mxpv/podsync:latest +``` + +Docker will download the latest version of Podsync. + +6. Copy and paste the following command: + +```bash +docker run \ + -p 9090:9090 \ + -v /volume1/web/podsync:/app/data/ \ + -v /volume1/docker/podsync/podsync-config.toml:/app/config.toml \ + mxpv/podsync:latest +``` + +This will install a container in Docker and run it. Podsync will load and read your config.toml file and start downloading episodes. + +7. I recommend you go into the container's settings in Container Station and set it to Auto Start. + +8. Once the downloads have finished for each of your feeds, you will then have an XML feed for each feed +that you should be able to access at `https://xxxxxxxx.xxx/podsync/ID1.xml`. Paste them into your podcast app of choice, +and you're good to go! + +Note: you can validate your XML using this website: +https://www.castfeedvalidator.com/validate.php diff --git a/go.mod b/go.mod index 1aa09325..fec5323d 100644 --- a/go.mod +++ b/go.mod @@ -1,27 +1,64 @@ module github.com/mxpv/podsync +go 1.25 + require ( github.com/BrianHicks/finch v0.0.0-20140409222414-419bd73c29ec - github.com/dgraph-io/badger v1.6.0 + github.com/aws/aws-sdk-go v1.44.144 + github.com/dgraph-io/badger v1.6.2 github.com/eduncan911/podcast v1.4.2 + github.com/gabriel-vasile/mimetype v1.4.12 github.com/gilliek/go-opml v1.0.0 - github.com/golang/mock v1.4.3 - github.com/hashicorp/go-multierror v1.0.0 - github.com/jessevdk/go-flags v1.4.0 - github.com/kylelemons/godebug v1.1.0 // indirect - github.com/naoina/go-stringutil v0.1.0 // indirect - github.com/naoina/toml v0.1.1 + github.com/golang/mock v1.6.0 + github.com/hashicorp/go-multierror v1.1.1 + github.com/jessevdk/go-flags v1.6.1 + github.com/nicklaw5/helix v1.25.0 + github.com/pelletier/go-toml v1.9.5 github.com/pkg/errors v0.9.1 github.com/robfig/cron/v3 v3.0.1 github.com/silentsokolov/go-vimeo v0.0.0-20190116124215-06829264260c - github.com/sirupsen/logrus v1.2.0 - github.com/stretchr/testify v1.4.0 - golang.org/x/net v0.0.0-20190620200207-3b0461eec859 - golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd - golang.org/x/sync v0.0.0-20190423024810-112230192c58 - google.golang.org/api v0.0.0-20180718221112-efcb5f25ac56 - google.golang.org/appengine v1.1.0 // indirect - gopkg.in/natefinch/lumberjack.v2 v2.0.0 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.11.1 + github.com/zackradisic/soundcloud-api v0.1.8 + golang.org/x/oauth2 v0.34.0 + golang.org/x/sync v0.19.0 + google.golang.org/api v0.259.0 + gopkg.in/natefinch/lumberjack.v2 v2.2.1 ) -go 1.13 +require ( + cloud.google.com/go/auth v0.18.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/compute/metadata v0.9.0 // indirect + github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 // indirect + github.com/cespare/xxhash v1.1.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgraph-io/ristretto v0.0.2 // indirect + github.com/dustin/go-humanize v1.0.0 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/s2a-go v0.1.9 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect + github.com/googleapis/gax-go/v2 v2.16.0 // indirect + github.com/grafov/m3u8 v0.11.1 // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect + go.opentelemetry.io/otel v1.38.0 // indirect + go.opentelemetry.io/otel/metric v1.38.0 // indirect + go.opentelemetry.io/otel/trace v1.38.0 // indirect + golang.org/x/crypto v0.46.0 // indirect + golang.org/x/net v0.48.0 // indirect + golang.org/x/sys v0.39.0 // indirect + golang.org/x/text v0.32.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b // indirect + google.golang.org/grpc v1.78.0 // indirect + google.golang.org/protobuf v1.36.11 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum index 55670fe9..9eefef89 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,21 @@ -github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9 h1:HD8gA2tkByhMAwYaFAX9w2l7vxvBQ5NMoxDrkhqhtn4= -github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= +cloud.google.com/go/auth v0.18.0 h1:wnqy5hrv7p3k7cShwAU/Br3nzod7fxoqG+k0VZ+/Pk0= +cloud.google.com/go/auth v0.18.0/go.mod h1:wwkPM1AgE1f2u6dG443MiWoD8C3BtOywNsUMcUTVDRo= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs= +cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= +github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= +github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/BrianHicks/finch v0.0.0-20140409222414-419bd73c29ec h1:1VPruZMM1WQC7POhjxbZOWK564cuFz1hlpwYW6ocM4E= github.com/BrianHicks/finch v0.0.0-20140409222414-419bd73c29ec/go.mod h1:+hWo/MWgY8VtjZvdrYM2nPRMaK40zX2iPsH/qD0+Xs0= -github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/aws/aws-sdk-go v1.44.144 h1:mMWdnYL8HZsobrQe1mwvQ18Xt8UbOVhWgipjuma5Mkg= +github.com/aws/aws-sdk-go v1.44.144/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -12,42 +23,75 @@ github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwc github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgraph-io/badger v1.6.0 h1:DshxFxZWXUcO0xX476VJC07Xsr6ZCBVRHKZ93Oh7Evo= -github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= +github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= +github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= +github.com/dgraph-io/ristretto v0.0.2 h1:a5WaUrDa0qm0YrAAS1tUykT5El3kt62KNZZeMxQn3po= +github.com/dgraph-io/ristretto v0.0.2/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eduncan911/podcast v1.4.2 h1:S+fsUlbR2ULFou2Mc52G/MZI8JVJHedbxLQnoA+MY/w= github.com/eduncan911/podcast v1.4.2/go.mod h1:mSxiK1z5KeNO0YFaQ3ElJlUZbbDV9dA7R9c1coeeXkc= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw= +github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/gilliek/go-opml v1.0.0 h1:X8xVjtySRXU/x6KvaiXkn7OV3a4DHqxY8Rpv6U/JvCY= github.com/gilliek/go-opml v1.0.0/go.mod h1:fOxmtlzyBvUjU6bjpdjyxCGlWz+pgtAHrHf/xRZl3lk= -github.com/golang/mock v1.4.3 h1:GV+pQPG/EUUbkh47niozDcADz6go/dUwhVzdUQHIVRw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ= +github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.16.0 h1:iHbQmKLLZrexmb0OSsNGTeSTS0HO4YvFOG8g5E4Zd0Y= +github.com/googleapis/gax-go/v2 v2.16.0/go.mod h1:o1vfQjjNZn4+dPnRdl/4ZD7S9414Y4xA+a/6Icj6l14= +github.com/grafov/m3u8 v0.11.1 h1:igZ7EBIB2IAsPPazKwRKdbhxcoBKO3lO1UY57PZDeNA= +github.com/grafov/m3u8 v0.11.1/go.mod h1:nqzOkfBiZJENr52zTVd/Dcl03yzphIMbJqkXGu+u080= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4= +github.com/jessevdk/go-flags v1.6.1/go.mod h1:Mk8T1hIAWpOiJiHa9rJASDK2UGWji0EuPGBnNLMooyc= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks= -github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= -github.com/naoina/toml v0.1.1 h1:PT/lllxVVN0gzzSqSlHEmP8MJB4MY2U7STGxiouV4X8= -github.com/naoina/toml v0.1.1/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= +github.com/nicklaw5/helix v1.25.0 h1:Mrz537izZVsGdM3I46uGAAlslj61frgkhS/9xQqyT/M= +github.com/nicklaw5/helix v1.25.0/go.mod h1:yvXZFapT6afIoxnAvlWiJiUMsYnoHl7tNs+t0bloAMw= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -55,11 +99,16 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/silentsokolov/go-vimeo v0.0.0-20190116124215-06829264260c h1:KhHx/Ta3c9C1gcSo5UhDeo/D4JnhnxJTrlcOEOFiMfY= github.com/silentsokolov/go-vimeo v0.0.0-20190116124215-06829264260c/go.mod h1:10FeaKUMy5t3KLsYfy54dFrq0rpwcfyKkKcF7vRGIRY= -github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= +github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= @@ -67,45 +116,107 @@ github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb6 github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793 h1:u+LnwYTOOW7Ukr/fppxEb1Nwz0AtPflrblfvUudpo+I= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zackradisic/soundcloud-api v0.1.8 h1:Fc4IVbee8ggGZ/vyx26uyTwKeh6Vn3cCrPXdTbQypjI= +github.com/zackradisic/soundcloud-api v0.1.8/go.mod h1:ycGIZFVZdUVC7B8pcfgze1bRBePPmjYlIGnRptKByQ0= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q= +go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= +go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= +go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= +go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= +go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= +go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= +golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd h1:QQhib242ErYDSMitlBm8V7wYCm/1a25hV8qMadIKLPA= -golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= +golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= +golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= +golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk= +golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -google.golang.org/api v0.0.0-20180718221112-efcb5f25ac56 h1:iDRbkenn0VZEo05mHiCtN6/EfbZj7x1Rg+tPjB5HiQc= -google.golang.org/api v0.0.0-20180718221112-efcb5f25ac56/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= -google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= +golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= +gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/api v0.259.0 h1:90TaGVIxScrh1Vn/XI2426kRpBqHwWIzVBzJsVZ5XrQ= +google.golang.org/api v0.259.0/go.mod h1:LC2ISWGWbRoyQVpxGntWwLWN/vLNxxKBK9KuJRI8Te4= +google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217 h1:GvESR9BIyHUahIb0NcTum6itIWtdoglGX+rnGxm2934= +google.golang.org/genproto v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:yJ2HH4EHEDTd3JiLmhds6NkJ17ITVYOdV3m3VKOnws0= +google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls= +google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b h1:Mv8VFug0MP9e5vUxfBcE3vUkV6CImK3cMNMIDFjmzxU= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251222181119-0a764e51fe1b/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ= +google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc= +google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8= -gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/html/index.html b/html/index.html new file mode 100644 index 00000000..e7846dda --- /dev/null +++ b/html/index.html @@ -0,0 +1,776 @@ + + + + + + Podsync + + + + + + + + + +
+ +
+
+

+ + Latest Episodes +

+
+
+ + +
+
+

Loading episodes from feeds...

+
+ + + + + +
+ +
+
+ + + + + + + + diff --git a/pkg/builder/builder.go b/pkg/builder/builder.go index 7687cd05..54e17e8f 100644 --- a/pkg/builder/builder.go +++ b/pkg/builder/builder.go @@ -3,22 +3,26 @@ package builder import ( "context" + "github.com/mxpv/podsync/pkg/feed" "github.com/pkg/errors" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) type Builder interface { - Build(ctx context.Context, cfg *config.Feed) (*model.Feed, error) + Build(ctx context.Context, cfg *feed.Config) (*model.Feed, error) } -func New(ctx context.Context, provider model.Provider, key string) (Builder, error) { +func New(ctx context.Context, provider model.Provider, key string, downloader Downloader) (Builder, error) { switch provider { case model.ProviderYoutube: - return NewYouTubeBuilder(key) + return NewYouTubeBuilder(key, downloader) case model.ProviderVimeo: return NewVimeoBuilder(ctx, key) + case model.ProviderSoundcloud: + return NewSoundcloudBuilder() + case model.ProviderTwitch: + return NewTwitchBuilder(key) default: return nil, errors.Errorf("unsupported provider %q", provider) } diff --git a/pkg/builder/soundcloud.go b/pkg/builder/soundcloud.go new file mode 100644 index 00000000..494de8aa --- /dev/null +++ b/pkg/builder/soundcloud.go @@ -0,0 +1,96 @@ +package builder + +import ( + "context" + "strconv" + "time" + + "github.com/mxpv/podsync/pkg/feed" + "github.com/pkg/errors" + soundcloudapi "github.com/zackradisic/soundcloud-api" + + "github.com/mxpv/podsync/pkg/model" +) + +type SoundCloudBuilder struct { + client *soundcloudapi.API +} + +func (s *SoundCloudBuilder) Build(_ctx context.Context, cfg *feed.Config) (*model.Feed, error) { + info, err := ParseURL(cfg.URL) + if err != nil { + return nil, err + } + + _feed := &model.Feed{ + ItemID: info.ItemID, + Provider: info.Provider, + LinkType: info.LinkType, + Format: cfg.Format, + Quality: cfg.Quality, + PageSize: cfg.PageSize, + UpdatedAt: time.Now().UTC(), + } + + if info.LinkType == model.TypePlaylist { + if soundcloudapi.IsPlaylistURL(cfg.URL) { + scplaylist, err := s.client.GetPlaylistInfo(cfg.URL) + if err != nil { + return nil, err + } + + _feed.Title = scplaylist.Title + _feed.Description = scplaylist.Description + _feed.ItemURL = cfg.URL + + date, err := time.Parse(time.RFC3339, scplaylist.CreatedAt) + if err == nil { + _feed.PubDate = date + } + _feed.Author = scplaylist.User.Username + _feed.CoverArt = scplaylist.ArtworkURL + + var added = 0 + for _, track := range scplaylist.Tracks { + pubDate, _ := time.Parse(time.RFC3339, track.CreatedAt) + var ( + videoID = strconv.FormatInt(track.ID, 10) + duration = track.DurationMS / 1000 + mediaURL = track.PermalinkURL + trackSize = track.DurationMS * 15 // very rough estimate + ) + + _feed.Episodes = append(_feed.Episodes, &model.Episode{ + ID: videoID, + Title: track.Title, + Description: track.Description, + Duration: duration, + Size: trackSize, + VideoURL: mediaURL, + PubDate: pubDate, + Thumbnail: track.ArtworkURL, + Status: model.EpisodeNew, + }) + + added++ + + if added >= _feed.PageSize { + return _feed, nil + } + } + + return _feed, nil + } + } + + return nil, errors.New(("unsupported soundcloud feed type")) +} + +func NewSoundcloudBuilder() (*SoundCloudBuilder, error) { + sc, err := soundcloudapi.New(soundcloudapi.APIOptions{}) + if err != nil { + return nil, errors.Wrap(err, "failed to create soundcloud client") + } + + return &SoundCloudBuilder{client: sc}, nil +} diff --git a/pkg/builder/soundcloud_test.go b/pkg/builder/soundcloud_test.go new file mode 100644 index 00000000..269b4fa7 --- /dev/null +++ b/pkg/builder/soundcloud_test.go @@ -0,0 +1,63 @@ +package builder + +import ( + "context" + "testing" + + "github.com/mxpv/podsync/pkg/feed" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var testCtx = context.Background() + +// newSoundcloudBuilderSafe attempts to create a SoundCloud builder, +// returning nil if initialization fails (including panics from the library). +func newSoundcloudBuilderSafe() (builder *SoundCloudBuilder) { + defer func() { + if r := recover(); r != nil { + builder = nil + } + }() + + var err error + builder, err = NewSoundcloudBuilder() + if err != nil { + return nil + } + return builder +} + +func TestSoundCloud_BuildFeed(t *testing.T) { + builder := newSoundcloudBuilderSafe() + if builder == nil { + t.Skip("Skipping SoundCloud test: unable to initialize SoundCloud client (service may be unavailable)") + } + + urls := []string{ + "https://soundcloud.com/moby/sets/remixes", + "https://soundcloud.com/npr/sets/soundscapes", + } + + for _, addr := range urls { + t.Run(addr, func(t *testing.T) { + _feed, err := builder.Build(testCtx, &feed.Config{URL: addr}) + require.NoError(t, err) + + assert.NotEmpty(t, _feed.Title) + assert.NotEmpty(t, _feed.Description) + assert.NotEmpty(t, _feed.Author) + assert.NotEmpty(t, _feed.ItemURL) + + assert.NotZero(t, len(_feed.Episodes)) + + for _, item := range _feed.Episodes { + assert.NotEmpty(t, item.Title) + assert.NotEmpty(t, item.VideoURL) + assert.NotZero(t, item.Duration) + assert.NotEmpty(t, item.Title) + assert.NotEmpty(t, item.Thumbnail) + } + }) + } +} diff --git a/pkg/builder/twitch.go b/pkg/builder/twitch.go new file mode 100644 index 00000000..bd458205 --- /dev/null +++ b/pkg/builder/twitch.go @@ -0,0 +1,141 @@ +package builder + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/mxpv/podsync/pkg/feed" + "github.com/mxpv/podsync/pkg/model" + "github.com/nicklaw5/helix" + "github.com/pkg/errors" +) + +type TwitchBuilder struct { + client *helix.Client +} + +func (t *TwitchBuilder) Build(_ctx context.Context, cfg *feed.Config) (*model.Feed, error) { + info, err := ParseURL(cfg.URL) + if err != nil { + return nil, errors.Wrap(err, "failed to parse URL") + } + + feed := &model.Feed{ + ItemID: info.ItemID, + Provider: info.Provider, + LinkType: info.LinkType, + Format: cfg.Format, + Quality: cfg.Quality, + PageSize: cfg.PageSize, + UpdatedAt: time.Now().UTC(), + } + + if info.LinkType == model.TypeUser { + users, err := t.client.GetUsers(&helix.UsersParams{ + Logins: []string{info.ItemID}, + }) + if err != nil { + return nil, errors.Wrapf(err, "failed to get user: %s", info.ItemID) + } + user := users.Data.Users[0] + + feed.Title = user.DisplayName + feed.Author = user.DisplayName + feed.Description = user.Description + feed.ItemURL = fmt.Sprintf("https://www.twitch.tv/%s", user.Login) + feed.CoverArt = user.ProfileImageURL + feed.PubDate = user.CreatedAt.Time + + isStreaming := false + streamID := "" + streams, _ := t.client.GetStreams(&helix.StreamsParams{ + UserIDs: []string{user.ID}, + }) + if len(streams.Data.Streams) > 0 { + isStreaming = true + streamID = streams.Data.Streams[0].ID + } + + videos, err := t.client.GetVideos(&helix.VideosParams{ + UserID: user.ID, + Period: "all", + Type: "archive", + Sort: "time", + First: 100, + }) + if err != nil { + return nil, errors.Wrapf(err, "failed to get videos for user: %s", info.ItemID) + } + + var added = 0 + for _, video := range videos.Data.Videos { + // Do not add the video of an ongoing stream because it will be incomplete + if !isStreaming || video.StreamID != streamID { + date, err := time.Parse(time.RFC3339, video.PublishedAt) + if err != nil { + return nil, errors.Wrapf(err, "cannot parse PublishedAt time: %s", video.PublishedAt) + } + + replacer := strings.NewReplacer("%{width}", "300", "%{height}", "300") + thumbnailUrl := replacer.Replace(video.ThumbnailURL) + + duration, err := time.ParseDuration(video.Duration) + if err != nil { + return nil, errors.Wrapf(err, "cannot parse duration: %s", video.Duration) + } + durationSeconds := int64(duration.Seconds()) + + feed.Episodes = append(feed.Episodes, &model.Episode{ + ID: video.ID, + Title: fmt.Sprintf("%s (%s)", video.Title, date.Format("2006-01-02 15:04 UTC")), + Description: video.Description, + Thumbnail: thumbnailUrl, + Duration: durationSeconds, + Size: durationSeconds * 33013, // Very rough estimate + VideoURL: video.URL, + PubDate: date, + Status: model.EpisodeNew, + }) + + added++ + if added >= feed.PageSize { + return feed, nil + } + } + } + + return feed, nil + } + + return nil, errors.New("unsupported feed type") +} + +func NewTwitchBuilder(clientIDSecret string) (*TwitchBuilder, error) { + parts := strings.Split(clientIDSecret, ":") + if len(parts) != 2 { + return nil, errors.New("invalid twitch key, need to be \"CLIENT_ID:CLIENT_SECRET\"") + } + + clientID := parts[0] + clientSecret := parts[1] + + client, err := helix.NewClient(&helix.Options{ + ClientID: clientID, + ClientSecret: clientSecret, + }) + if err != nil { + return nil, errors.Wrap(err, "failed to create twitch client") + } + + token, err := client.RequestAppAccessToken([]string{}) + if err != nil { + return nil, errors.Wrap(err, "failed to request twitch app token") + } + + // Set the access token on the client + client.SetAppAccessToken(token.Data.AccessToken) + + return &TwitchBuilder{client: client}, nil +} diff --git a/pkg/builder/twitch_test.go b/pkg/builder/twitch_test.go new file mode 100644 index 00000000..190c8c80 --- /dev/null +++ b/pkg/builder/twitch_test.go @@ -0,0 +1,51 @@ +package builder + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/mxpv/podsync/pkg/model" +) + +func TestParseURL_TwitchUser(t *testing.T) { + info, err := ParseURL("https://www.twitch.tv/samueletienne") + require.NoError(t, err) + require.Equal(t, model.TypeUser, info.LinkType) + require.Equal(t, model.ProviderTwitch, info.Provider) + require.Equal(t, "samueletienne", info.ItemID) + + info, err = ParseURL("https://twitch.tv/testuser") + require.NoError(t, err) + require.Equal(t, model.TypeUser, info.LinkType) + require.Equal(t, model.ProviderTwitch, info.Provider) + require.Equal(t, "testuser", info.ItemID) +} + +func TestParseURL_TwitchInvalidLink(t *testing.T) { + _, err := ParseURL("https://www.twitch.tv/") + require.Error(t, err) + require.Contains(t, err.Error(), "invalid id") + + _, err = ParseURL("https://www.twitch.tv//") + require.Error(t, err) + require.Contains(t, err.Error(), "invald twitch user path") + + _, err = ParseURL("https://www.twitch.tv/user/extra/path") + require.Error(t, err) + require.Contains(t, err.Error(), "invald twitch user path") +} + +func TestNewTwitchBuilder_InvalidKey(t *testing.T) { + _, err := NewTwitchBuilder("invalid_key") + require.Error(t, err) + require.Contains(t, err.Error(), "invalid twitch key") + + _, err = NewTwitchBuilder("only_one_part") + require.Error(t, err) + require.Contains(t, err.Error(), "invalid twitch key") + + _, err = NewTwitchBuilder("") + require.Error(t, err) + require.Contains(t, err.Error(), "invalid twitch key") +} diff --git a/pkg/builder/url.go b/pkg/builder/url.go index 7fef9198..9482587e 100644 --- a/pkg/builder/url.go +++ b/pkg/builder/url.go @@ -43,6 +43,32 @@ func ParseURL(link string) (model.Info, error) { return info, nil } + if strings.HasSuffix(parsed.Host, "soundcloud.com") { + kind, id, err := parseSoundcloudURL(parsed) + if err != nil { + return model.Info{}, err + } + + info.Provider = model.ProviderSoundcloud + info.LinkType = kind + info.ItemID = id + + return info, nil + } + + if strings.HasSuffix(parsed.Host, "twitch.tv") { + kind, id, err := parseTwitchURL(parsed) + if err != nil { + return model.Info{}, err + } + + info.Provider = model.ProviderTwitch + info.LinkType = kind + info.ItemID = id + + return info, nil + } + return model.Info{}, errors.New("unsupported URL host") } @@ -109,6 +135,30 @@ func parseYoutubeURL(parsed *url.URL) (model.Type, string, error) { return kind, id, nil } + // - https://www.youtube.com/@username + // - https://www.youtube.com/@username/videos + if strings.HasPrefix(path, "/@") { + kind := model.TypeHandle + + parts := strings.Split(parsed.EscapedPath(), "/") + if len(parts) <= 1 { + return "", "", errors.New("invalid handle link") + } + + handle := parts[1] + if handle == "" || !strings.HasPrefix(handle, "@") { + return "", "", errors.New("invalid handle format") + } + + // Remove the @ prefix for storage + id := strings.TrimPrefix(handle, "@") + if id == "" { + return "", "", errors.New("empty handle") + } + + return kind, id, nil + } + return "", "", errors.New("unsupported link format") } @@ -152,3 +202,42 @@ func parseVimeoURL(parsed *url.URL) (model.Type, string, error) { return "", "", errors.New("unsupported link format") } + +func parseSoundcloudURL(parsed *url.URL) (model.Type, string, error) { + parts := strings.Split(parsed.EscapedPath(), "/") + if len(parts) <= 3 { + return "", "", errors.New("invald soundcloud link path") + } + + var kind model.Type + + // - https://soundcloud.com/user/sets/example-set + switch parts[2] { + case "sets": + kind = model.TypePlaylist + default: + return "", "", errors.New("invalid soundcloud url, missing sets") + } + + id := parts[3] + + return kind, id, nil +} + +func parseTwitchURL(parsed *url.URL) (model.Type, string, error) { + // - https://www.twitch.tv/samueletienne + path := parsed.EscapedPath() + parts := strings.Split(path, "/") + if len(parts) != 2 { + return "", "", errors.Errorf("invald twitch user path: %s", path) + } + + kind := model.TypeUser + + id := parts[1] + if id == "" { + return "", "", errors.New("invalid id") + } + + return kind, id, nil +} diff --git a/pkg/builder/url_test.go b/pkg/builder/url_test.go index 48625b52..460b8b92 100644 --- a/pkg/builder/url_test.go +++ b/pkg/builder/url_test.go @@ -45,6 +45,29 @@ func TestParseYoutubeURL_User(t *testing.T) { require.Equal(t, "fxigr1", id) } +func TestParseYoutubeURL_Handle(t *testing.T) { + // Test basic handle URL + link, _ := url.ParseRequestURI("https://www.youtube.com/@username") + kind, id, err := parseYoutubeURL(link) + require.NoError(t, err) + require.Equal(t, model.TypeHandle, kind) + require.Equal(t, "username", id) + + // Test handle URL with /videos + link, _ = url.ParseRequestURI("https://youtube.com/@testchannel/videos") + kind, id, err = parseYoutubeURL(link) + require.NoError(t, err) + require.Equal(t, model.TypeHandle, kind) + require.Equal(t, "testchannel", id) + + // Test handle URL without www + link, _ = url.ParseRequestURI("https://youtube.com/@myhandle") + kind, id, err = parseYoutubeURL(link) + require.NoError(t, err) + require.Equal(t, model.TypeHandle, kind) + require.Equal(t, "myhandle", id) +} + func TestParseYoutubeURL_InvalidLink(t *testing.T) { link, _ := url.ParseRequestURI("https://www.youtube.com/user///") _, _, err := parseYoutubeURL(link) @@ -53,6 +76,20 @@ func TestParseYoutubeURL_InvalidLink(t *testing.T) { link, _ = url.ParseRequestURI("https://www.youtube.com/channel//videos") _, _, err = parseYoutubeURL(link) require.Error(t, err) + + // Test invalid handle URLs + link, _ = url.ParseRequestURI("https://www.youtube.com/@") + _, _, err = parseYoutubeURL(link) + require.Error(t, err) + + link, _ = url.ParseRequestURI("https://www.youtube.com/") + _, _, err = parseYoutubeURL(link) + require.Error(t, err) + + // Test handle without @ symbol + link, _ = url.ParseRequestURI("https://www.youtube.com/username") + _, _, err = parseYoutubeURL(link) + require.Error(t, err) } func TestParseVimeoURL_Group(t *testing.T) { diff --git a/pkg/builder/vimeo.go b/pkg/builder/vimeo.go index a97747c2..a98f3dda 100644 --- a/pkg/builder/vimeo.go +++ b/pkg/builder/vimeo.go @@ -1,16 +1,16 @@ package builder import ( + "context" "net/http" "strconv" "time" + "github.com/mxpv/podsync/pkg/feed" "github.com/pkg/errors" "github.com/silentsokolov/go-vimeo/vimeo" - "golang.org/x/net/context" "golang.org/x/oauth2" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) @@ -158,13 +158,13 @@ func (v *VimeoBuilder) queryVideos(getVideos getVideosFunc, feed *model.Feed) er } } -func (v *VimeoBuilder) Build(ctx context.Context, cfg *config.Feed) (*model.Feed, error) { +func (v *VimeoBuilder) Build(ctx context.Context, cfg *feed.Config) (*model.Feed, error) { info, err := ParseURL(cfg.URL) if err != nil { return nil, err } - feed := &model.Feed{ + _feed := &model.Feed{ ItemID: info.ItemID, Provider: info.Provider, LinkType: info.LinkType, @@ -175,39 +175,39 @@ func (v *VimeoBuilder) Build(ctx context.Context, cfg *config.Feed) (*model.Feed } if info.LinkType == model.TypeChannel { - if err := v.queryChannel(feed); err != nil { + if err := v.queryChannel(_feed); err != nil { return nil, err } - if err := v.queryVideos(v.client.Channels.ListVideo, feed); err != nil { + if err := v.queryVideos(v.client.Channels.ListVideo, _feed); err != nil { return nil, err } - return feed, nil + return _feed, nil } if info.LinkType == model.TypeGroup { - if err := v.queryGroup(feed); err != nil { + if err := v.queryGroup(_feed); err != nil { return nil, err } - if err := v.queryVideos(v.client.Groups.ListVideo, feed); err != nil { + if err := v.queryVideos(v.client.Groups.ListVideo, _feed); err != nil { return nil, err } - return feed, nil + return _feed, nil } if info.LinkType == model.TypeUser { - if err := v.queryUser(feed); err != nil { + if err := v.queryUser(_feed); err != nil { return nil, err } - if err := v.queryVideos(v.client.Users.ListVideo, feed); err != nil { + if err := v.queryVideos(v.client.Users.ListVideo, _feed); err != nil { return nil, err } - return feed, nil + return _feed, nil } return nil, errors.New("unsupported feed type") diff --git a/pkg/builder/vimeo_test.go b/pkg/builder/vimeo_test.go index dba71e32..475075e4 100644 --- a/pkg/builder/vimeo_test.go +++ b/pkg/builder/vimeo_test.go @@ -5,6 +5,7 @@ import ( "os" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/mxpv/podsync/pkg/model" @@ -26,11 +27,11 @@ func TestQueryVimeoChannel(t *testing.T) { err = builder.queryChannel(podcast) require.NoError(t, err) - require.Equal(t, "https://vimeo.com/channels/staffpicks", podcast.ItemURL) - require.Equal(t, "Vimeo Staff Picks", podcast.Title) - require.Equal(t, "Vimeo Curation", podcast.Author) - require.NotEmpty(t, podcast.Description) - require.NotEmpty(t, podcast.CoverArt) + assert.Equal(t, "https://vimeo.com/channels/staffpicks", podcast.ItemURL) + assert.Equal(t, "Vimeo Staff Picks", podcast.Title) + assert.Equal(t, "Vimeo Curation", podcast.Author) + assert.NotEmpty(t, podcast.Description) + assert.NotEmpty(t, podcast.CoverArt) } func TestQueryVimeoGroup(t *testing.T) { @@ -45,11 +46,11 @@ func TestQueryVimeoGroup(t *testing.T) { err = builder.queryGroup(podcast) require.NoError(t, err) - require.Equal(t, "https://vimeo.com/groups/motion", podcast.ItemURL) - require.Equal(t, "Motion Graphic Artists", podcast.Title) - require.Equal(t, "Danny Garcia", podcast.Author) - require.NotEmpty(t, podcast.Description) - require.NotEmpty(t, podcast.CoverArt) + assert.Equal(t, "https://vimeo.com/groups/motion", podcast.ItemURL) + assert.Equal(t, "Motion Graphic Artists", podcast.Title) + assert.Equal(t, "Danny Garcia", podcast.Author) + assert.NotEmpty(t, podcast.Description) + assert.NotEmpty(t, podcast.CoverArt) } func TestQueryVimeoUser(t *testing.T) { @@ -65,9 +66,9 @@ func TestQueryVimeoUser(t *testing.T) { require.NoError(t, err) require.Equal(t, "https://vimeo.com/motionarray", podcast.ItemURL) - require.Equal(t, "Motion Array", podcast.Title) - require.Equal(t, "Motion Array", podcast.Author) - require.NotEmpty(t, podcast.Description) + assert.NotEmpty(t, podcast.Title) + assert.NotEmpty(t, podcast.Author) + assert.NotEmpty(t, podcast.Description) } func TestQueryVimeoVideos(t *testing.T) { diff --git a/pkg/builder/youtube.go b/pkg/builder/youtube.go index 8e5d4913..a4df798c 100644 --- a/pkg/builder/youtube.go +++ b/pkg/builder/youtube.go @@ -10,13 +10,20 @@ import ( "time" "github.com/BrianHicks/finch/duration" + "github.com/mxpv/podsync/pkg/feed" "github.com/pkg/errors" + log "github.com/sirupsen/logrus" + "google.golang.org/api/option" "google.golang.org/api/youtube/v3" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" + "github.com/mxpv/podsync/pkg/ytdl" ) +type Downloader interface { + PlaylistMetadata(ctx context.Context, url string) (metadata ytdl.PlaylistMetadata, err error) +} + const ( maxYoutubeResults = 50 hdBytesPerSecond = 350000 @@ -32,20 +39,54 @@ func (key apiKey) Get() (string, string) { } type YouTubeBuilder struct { - client *youtube.Service - key apiKey + client *youtube.Service + key apiKey + downloader Downloader +} + +// Cost: 100 units (call: 1, snippet: 99) +// See https://developers.google.com/youtube/v3/docs/search/list#part +func (yt *YouTubeBuilder) resolveHandle(ctx context.Context, handle string) (string, error) { + req := yt.client.Search.List([]string{"snippet"}). + Q(handle). + Type("channel"). + MaxResults(1) + + resp, err := req.Context(ctx).Do(yt.key) + if err != nil { + return "", errors.Wrapf(err, "failed to search for handle: %s", handle) + } + + if len(resp.Items) == 0 { + return "", model.ErrNotFound + } + + // Get the channel ID from the search result + channelID := resp.Items[0].Snippet.ChannelId + if channelID == "" { + return "", errors.New("channel ID not found in search results") + } + + return channelID, nil } // Cost: 5 units (call method: 1, snippet: 2, contentDetails: 2) // See https://developers.google.com/youtube/v3/docs/channels/list#part func (yt *YouTubeBuilder) listChannels(ctx context.Context, linkType model.Type, id string, parts string) (*youtube.Channel, error) { - req := yt.client.Channels.List(parts) + req := yt.client.Channels.List(strings.Split(parts, ",")) switch linkType { case model.TypeChannel: req = req.Id(id) case model.TypeUser: req = req.ForUsername(id) + case model.TypeHandle: + // Resolve handle to channel ID first + channelID, err := yt.resolveHandle(ctx, id) + if err != nil { + return nil, errors.Wrapf(err, "failed to resolve handle: %s", id) + } + req = req.Id(channelID) default: return nil, errors.New("unsupported link type") } @@ -66,7 +107,7 @@ func (yt *YouTubeBuilder) listChannels(ctx context.Context, linkType model.Type, // Cost: 3 units (call method: 1, snippet: 2) // See https://developers.google.com/youtube/v3/docs/playlists/list#part func (yt *YouTubeBuilder) listPlaylists(ctx context.Context, id, channelID string, parts string) (*youtube.Playlist, error) { - req := yt.client.Playlists.List(parts) + req := yt.client.Playlists.List(strings.Split(parts, ",")) if id != "" { req = req.Id(id) @@ -96,7 +137,7 @@ func (yt *YouTubeBuilder) listPlaylistItems(ctx context.Context, feed *model.Fee count = feed.PageSize } - req := yt.client.PlaylistItems.List("id,snippet").MaxResults(int64(count)).PlaylistId(feed.ItemID) + req := yt.client.PlaylistItems.List([]string{"id", "snippet"}).MaxResults(int64(count)).PlaylistId(feed.ItemID) if pageToken != "" { req = req.PageToken(pageToken) } @@ -149,8 +190,8 @@ func (yt *YouTubeBuilder) selectThumbnail(snippet *youtube.ThumbnailDetails, qua func (yt *YouTubeBuilder) GetVideoCount(ctx context.Context, info *model.Info) (uint64, error) { switch info.LinkType { - case model.TypeChannel, model.TypeUser: - // Cost: 3 units + case model.TypeChannel, model.TypeUser, model.TypeHandle: + // Cost: 3 units for channel/user, 103 units for handle (100 + 3) if channel, err := yt.listChannels(ctx, info.LinkType, info.ItemID, "id,statistics"); err != nil { return 0, err } else { // nolint:golint @@ -176,8 +217,8 @@ func (yt *YouTubeBuilder) queryFeed(ctx context.Context, feed *model.Feed, info ) switch info.LinkType { - case model.TypeChannel, model.TypeUser: - // Cost: 5 units for channel or user + case model.TypeChannel, model.TypeUser, model.TypeHandle: + // Cost: 5 units for channel/user, 105 units for handle (100 + 5) channel, err := yt.listChannels(ctx, info.LinkType, info.ItemID, "id,snippet,contentDetails") if err != nil { return err @@ -186,7 +227,11 @@ func (yt *YouTubeBuilder) queryFeed(ctx context.Context, feed *model.Feed, info feed.Title = channel.Snippet.Title feed.Description = channel.Snippet.Description - if channel.Kind == "youtube#channel" { + if info.LinkType == model.TypeHandle { + // For handles, use the handle URL format + feed.ItemURL = fmt.Sprintf("https://youtube.com/@%s", info.ItemID) + feed.Author = fmt.Sprintf("@%s", info.ItemID) + } else if channel.Kind == "youtube#channel" { feed.ItemURL = fmt.Sprintf("https://youtube.com/channel/%s", channel.Id) feed.Author = "" } else { @@ -211,21 +256,30 @@ func (yt *YouTubeBuilder) queryFeed(ctx context.Context, feed *model.Feed, info return err } - feed.Title = fmt.Sprintf("%s: %s", playlist.Snippet.ChannelTitle, playlist.Snippet.Title) + feed.Title = playlist.Snippet.Title feed.Description = playlist.Snippet.Description feed.ItemURL = fmt.Sprintf("https://youtube.com/playlist?list=%s", playlist.Id) feed.ItemID = playlist.Id - feed.Author = "" + feed.Author = playlist.Snippet.ChannelTitle if date, err := yt.parseDate(playlist.Snippet.PublishedAt); err != nil { return err } else { // nolint:golint feed.PubDate = date } - - thumbnails = playlist.Snippet.Thumbnails + metadata, err := yt.downloader.PlaylistMetadata(ctx, feed.ItemURL) + if err != nil { + return errors.Wrapf(err, "failed to get playlist metadata for %s", feed.ItemURL) + } + log.Infof("Playlist metadata: %v", metadata) + if len(metadata.Thumbnails) > 0 { + // best qualtiy thumbnail is the last one + feed.CoverArt = metadata.Thumbnails[len(metadata.Thumbnails)-1].Url + } else { + thumbnails = playlist.Snippet.Thumbnails + } default: return errors.New("unsupported link format") @@ -234,9 +288,9 @@ func (yt *YouTubeBuilder) queryFeed(ctx context.Context, feed *model.Feed, info if feed.Description == "" { feed.Description = fmt.Sprintf("%s (%s)", feed.Title, feed.PubDate) } - - feed.CoverArt = yt.selectThumbnail(thumbnails, feed.CoverArtQuality, "") - + if feed.CoverArt == "" { + feed.CoverArt = yt.selectThumbnail(thumbnails, feed.CoverArtQuality, "") + } return nil } @@ -269,72 +323,100 @@ func (yt *YouTubeBuilder) queryVideoDescriptions(ctx context.Context, playlist m ids = append(ids, s.ResourceId.VideoId) } - req, err := yt.client.Videos.List("id,snippet,contentDetails").Id(strings.Join(ids, ",")).Context(ctx).Do(yt.key) - if err != nil { - return errors.Wrap(err, "failed to query video descriptions") - } - - for _, video := range req.Items { - var ( - snippet = video.Snippet - videoID = video.Id - videoURL = fmt.Sprintf("https://youtube.com/watch?v=%s", video.Id) - image = yt.selectThumbnail(snippet.Thumbnails, feed.Quality, videoID) - ) - - // Parse date added to playlist / publication date - dateStr := "" - playlistItem, ok := playlist[video.Id] - if ok { - dateStr = playlistItem.PublishedAt - } else { - dateStr = snippet.PublishedAt + // Init a list that will contains the aggregated strings of videos IDs (capped at 50 IDs per API Calls) + idsList := make([]string, 0, 1) + + // Chunk the list of IDs by slices limited to maxYoutubeResults + for i := 0; i < len(ids); i += maxYoutubeResults { + end := i + maxYoutubeResults + if end > len(ids) { + end = len(ids) } + // Save each slice as comma-delimited string + idsList = append(idsList, strings.Join(ids[i:end], ",")) + } + + // Show how many API calls will be required + log.Debugf("Expected to make %d API calls to get the descriptions for %d episode(s).", len(idsList), len(ids)) - pubDate, err := yt.parseDate(dateStr) + // Loop in each slices of 50 (or less) IDs and query their description + for _, idsI := range idsList { + req, err := yt.client.Videos.List([]string{"id", "snippet", "contentDetails"}).Id(idsI).Context(ctx).Do(yt.key) if err != nil { - return errors.Wrapf(err, "failed to parse video publish date: %s", dateStr) + return errors.Wrap(err, "failed to query video descriptions") } - // Sometimes YouTube retrun empty content defailt, use arbitrary one - var seconds int64 = 1 - if video.ContentDetails != nil { - // Parse duration - d, err := duration.FromString(video.ContentDetails.Duration) + for _, video := range req.Items { + var ( + snippet = video.Snippet + videoID = video.Id + videoURL = fmt.Sprintf("https://youtube.com/watch?v=%s", video.Id) + image = yt.selectThumbnail(snippet.Thumbnails, feed.Quality, videoID) + ) + + // Skip unreleased/airing Premiere videos + if snippet.LiveBroadcastContent == "upcoming" || snippet.LiveBroadcastContent == "live" { + continue + } + + // Parse date added to playlist / publication date + dateStr := "" + playlistItem, ok := playlist[video.Id] + if ok && playlistItem.PublishedAt > snippet.PublishedAt { + // Use playlist item publish date if it's more recent + dateStr = playlistItem.PublishedAt + } else { + dateStr = snippet.PublishedAt + } + + pubDate, err := yt.parseDate(dateStr) if err != nil { - return errors.Wrapf(err, "failed to parse duration %s", video.ContentDetails.Duration) + return errors.Wrapf(err, "failed to parse video publish date: %s", dateStr) } - seconds = int64(d.ToDuration().Seconds()) - } + // Sometimes YouTube retrun empty content defailt, use arbitrary one + var seconds int64 = 1 + if video.ContentDetails != nil { + // Parse duration + d, err := duration.FromString(video.ContentDetails.Duration) + if err != nil { + return errors.Wrapf(err, "failed to parse duration %s", video.ContentDetails.Duration) + } - var ( - order = strconv.FormatInt(playlistItem.Position, 10) - size = yt.getSize(seconds, feed) - ) - - feed.Episodes = append(feed.Episodes, &model.Episode{ - ID: video.Id, - Title: snippet.Title, - Description: snippet.Description, - Thumbnail: image, - Duration: seconds, - Size: size, - VideoURL: videoURL, - PubDate: pubDate, - Order: order, - Status: model.EpisodeNew, - }) + seconds = int64(d.ToDuration().Seconds()) + } + + var ( + order = strconv.FormatInt(playlistItem.Position, 10) + size = yt.getSize(seconds, feed) + ) + + feed.Episodes = append(feed.Episodes, &model.Episode{ + ID: video.Id, + Title: snippet.Title, + Description: snippet.Description, + Thumbnail: image, + Duration: seconds, + Size: size, + VideoURL: videoURL, + PubDate: pubDate, + Order: order, + Status: model.EpisodeNew, + }) + } } return nil } -// Cost: (3 units + 5 units) * X pages = 8 units per page +// Cost: +// ASC mode = (3 units + 5 units) * X pages = 8 units per page +// DESC mode = 3 units * (number of pages in the entire playlist) + 5 units func (yt *YouTubeBuilder) queryItems(ctx context.Context, feed *model.Feed) error { var ( - token string - count int + token string + count int + allSnippets []*youtube.PlaylistItemSnippet ) for { @@ -346,80 +428,97 @@ func (yt *YouTubeBuilder) queryItems(ctx context.Context, feed *model.Feed) erro token = pageToken if len(items) == 0 { - return nil + break } // Extract playlist snippets - snippets := map[string]*youtube.PlaylistItemSnippet{} for _, item := range items { - snippets[item.Snippet.ResourceId.VideoId] = item.Snippet + allSnippets = append(allSnippets, item.Snippet) count++ } - // Query video descriptions from the list of ids - if err := yt.queryVideoDescriptions(ctx, snippets, feed); err != nil { - return err + if (feed.PlaylistSort != model.SortingDesc && count >= feed.PageSize) || token == "" { + break } + } - if count >= feed.PageSize || token == "" { - return nil + if len(allSnippets) > feed.PageSize { + if feed.PlaylistSort != model.SortingDesc { + allSnippets = allSnippets[:feed.PageSize] + } else { + allSnippets = allSnippets[len(allSnippets)-feed.PageSize:] } } + + snippets := map[string]*youtube.PlaylistItemSnippet{} + for _, snippet := range allSnippets { + snippets[snippet.ResourceId.VideoId] = snippet + } + + // Query video descriptions from the list of ids + if err := yt.queryVideoDescriptions(ctx, snippets, feed); err != nil { + return err + } + + return nil } -func (yt *YouTubeBuilder) Build(ctx context.Context, cfg *config.Feed) (*model.Feed, error) { +func (yt *YouTubeBuilder) Build(ctx context.Context, cfg *feed.Config) (*model.Feed, error) { info, err := ParseURL(cfg.URL) if err != nil { return nil, err } - feed := &model.Feed{ - ItemID: info.ItemID, - Provider: info.Provider, - LinkType: info.LinkType, - Format: cfg.Format, - Quality: cfg.Quality, - PageSize: cfg.PageSize, - UpdatedAt: time.Now().UTC(), + _feed := &model.Feed{ + ItemID: info.ItemID, + Provider: info.Provider, + LinkType: info.LinkType, + Format: cfg.Format, + Quality: cfg.Quality, + CoverArtQuality: cfg.Custom.CoverArtQuality, + PageSize: cfg.PageSize, + PlaylistSort: cfg.PlaylistSort, + PrivateFeed: cfg.PrivateFeed, + UpdatedAt: time.Now().UTC(), } - if feed.PageSize == 0 { - feed.PageSize = maxYoutubeResults + if _feed.PageSize == 0 { + _feed.PageSize = maxYoutubeResults } // Query general information about feed (title, description, lang, etc) - if err := yt.queryFeed(ctx, feed, &info); err != nil { + if err := yt.queryFeed(ctx, _feed, &info); err != nil { return nil, err } - if err := yt.queryItems(ctx, feed); err != nil { + if err := yt.queryItems(ctx, _feed); err != nil { return nil, err } // YT API client gets 50 episodes per query. // Round up to page size. - if len(feed.Episodes) > feed.PageSize { - feed.Episodes = feed.Episodes[:feed.PageSize] + if len(_feed.Episodes) > _feed.PageSize { + _feed.Episodes = _feed.Episodes[:_feed.PageSize] } - sort.Slice(feed.Episodes, func(i, j int) bool { - item1, _ := strconv.Atoi(feed.Episodes[i].Order) - item2, _ := strconv.Atoi(feed.Episodes[j].Order) + sort.Slice(_feed.Episodes, func(i, j int) bool { + item1, _ := strconv.Atoi(_feed.Episodes[i].Order) + item2, _ := strconv.Atoi(_feed.Episodes[j].Order) return item1 < item2 }) - return feed, nil + return _feed, nil } -func NewYouTubeBuilder(key string) (*YouTubeBuilder, error) { +func NewYouTubeBuilder(key string, ytdlp Downloader) (*YouTubeBuilder, error) { if key == "" { return nil, errors.New("empty YouTube API key") } - yt, err := youtube.New(&http.Client{}) + yt, err := youtube.NewService(context.Background(), option.WithHTTPClient(&http.Client{})) if err != nil { return nil, errors.Wrap(err, "failed to create youtube client") } - return &YouTubeBuilder{client: yt, key: apiKey(key)}, nil + return &YouTubeBuilder{client: yt, key: apiKey(key), downloader: ytdlp}, nil } diff --git a/pkg/builder/youtube_test.go b/pkg/builder/youtube_test.go index dfbc31a2..c7c2d9c1 100644 --- a/pkg/builder/youtube_test.go +++ b/pkg/builder/youtube_test.go @@ -2,102 +2,171 @@ package builder import ( "context" - "os" + "net/http" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "google.golang.org/api/option" + "google.golang.org/api/youtube/v3" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) -var ( - testCtx = context.Background() - ytKey = os.Getenv("YOUTUBE_TEST_API_KEY") -) +// MockTransport implements http.RoundTripper for testing +type MockTransport struct { + responses map[string]*http.Response +} -func TestYT_QueryChannel(t *testing.T) { - if ytKey == "" { - t.Skip("YouTube API key is not provided") +func (m *MockTransport) RoundTrip(req *http.Request) (*http.Response, error) { + url := req.URL.String() + if resp, exists := m.responses[url]; exists { + return resp, nil } - - builder, err := NewYouTubeBuilder(ytKey) - require.NoError(t, err) - - channel, err := builder.listChannels(testCtx, model.TypeChannel, "UC2yTVSttx7lxAOAzx1opjoA", "id") - require.NoError(t, err) - require.Equal(t, "UC2yTVSttx7lxAOAzx1opjoA", channel.Id) - - channel, err = builder.listChannels(testCtx, model.TypeUser, "fxigr1", "id") - require.NoError(t, err) - require.Equal(t, "UCr_fwF-n-2_olTYd-m3n32g", channel.Id) + return &http.Response{ + StatusCode: 404, + Body: http.NoBody, + }, nil } -func TestYT_BuildFeed(t *testing.T) { - if ytKey == "" { - t.Skip("YouTube API key is not provided") +func TestResolveHandle(t *testing.T) { + tests := []struct { + name string + handle string + mockResp string + expected string + wantErr bool + }{ + { + name: "valid handle", + handle: "testhandle", + mockResp: `{ + "items": [ + { + "snippet": { + "channelId": "UC_test_channel_id_123" + } + } + ] + }`, + expected: "UC_test_channel_id_123", + wantErr: false, + }, + { + name: "handle not found", + handle: "nonexistent", + mockResp: `{"items": []}`, + expected: "", + wantErr: true, + }, + { + name: "empty channel ID", + handle: "badhandle", + mockResp: `{ + "items": [ + { + "snippet": { + "channelId": "" + } + } + ] + }`, + expected: "", + wantErr: true, + }, } - builder, err := NewYouTubeBuilder(ytKey) - require.NoError(t, err) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create a mock HTTP client + mockTransport := &MockTransport{ + responses: make(map[string]*http.Response), + } - urls := []string{ - "https://youtube.com/user/fxigr1", - "https://www.youtube.com/channel/UCupvZG-5ko_eiXAupbDfxWw", - "https://www.youtube.com/playlist?list=PLF7tUDhGkiCk_Ne30zu7SJ9gZF9R9ZruE", - "https://www.youtube.com/channel/UCK9lZ2lHRBgx2LOcqPifukA", - "https://youtube.com/user/WylsaLive", - "https://www.youtube.com/playlist?list=PLUVl5pafUrBydT_gsCjRGeCy0hFHloec8", - } + // Set up the mock response based on expected API call + mockTransport.responses["https://youtube.googleapis.com/youtube/v3/search"] = &http.Response{ + StatusCode: 200, + Body: http.NoBody, // Simplified for this test + } - for _, addr := range urls { - t.Run(addr, func(t *testing.T) { - feed, err := builder.Build(testCtx, &config.Feed{URL: addr}) - require.NoError(t, err) + client := &http.Client{Transport: mockTransport} - assert.NotEmpty(t, feed.Title) - assert.NotEmpty(t, feed.Description) - assert.NotEmpty(t, feed.Author) - assert.NotEmpty(t, feed.ItemURL) + // Create YouTube service with mock client + yt, err := youtube.NewService(context.Background(), option.WithHTTPClient(client)) + require.NoError(t, err) - assert.NotZero(t, len(feed.Episodes)) + _ = &YouTubeBuilder{ + client: yt, + key: apiKey("test-api-key"), + } - for _, item := range feed.Episodes { - assert.NotEmpty(t, item.Title) - assert.NotEmpty(t, item.VideoURL) - assert.NotZero(t, item.Duration) + // Note: This test demonstrates the structure but won't actually work + // without proper mocking of the YouTube API responses. + // For a real implementation, you'd need more sophisticated mocking + // like using httptest.Server or a proper mock library. - assert.NotEmpty(t, item.Title) - assert.NotEmpty(t, item.Thumbnail) - } + // Skip the actual API call test since it requires complex mocking + t.Skip("Skipping API call test - requires more sophisticated mocking") }) } } -func TestYT_GetVideoCount(t *testing.T) { - if ytKey == "" { - t.Skip("YouTube API key is not provided") +func TestParseURLWithHandles(t *testing.T) { + tests := []struct { + name string + url string + expected model.Info + wantErr bool + }{ + { + name: "valid handle URL", + url: "https://www.youtube.com/@testhandle", + expected: model.Info{ + LinkType: model.TypeHandle, + Provider: model.ProviderYoutube, + ItemID: "testhandle", + }, + wantErr: false, + }, + { + name: "handle URL with videos path", + url: "https://youtube.com/@mychannel/videos", + expected: model.Info{ + LinkType: model.TypeHandle, + Provider: model.ProviderYoutube, + ItemID: "mychannel", + }, + wantErr: false, + }, + { + name: "invalid handle URL", + url: "https://www.youtube.com/@", + wantErr: true, + }, + { + name: "regular channel URL still works", + url: "https://www.youtube.com/channel/UC_test_channel", + expected: model.Info{ + LinkType: model.TypeChannel, + Provider: model.ProviderYoutube, + ItemID: "UC_test_channel", + }, + wantErr: false, + }, } - builder, err := NewYouTubeBuilder(ytKey) - require.NoError(t, err) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := ParseURL(tt.url) - feeds := []*model.Info{ - {Provider: model.ProviderYoutube, LinkType: model.TypeUser, ItemID: "fxigr1"}, - {Provider: model.ProviderYoutube, LinkType: model.TypeChannel, ItemID: "UCupvZG-5ko_eiXAupbDfxWw"}, - {Provider: model.ProviderYoutube, LinkType: model.TypePlaylist, ItemID: "PLF7tUDhGkiCk_Ne30zu7SJ9gZF9R9ZruE"}, - {Provider: model.ProviderYoutube, LinkType: model.TypeChannel, ItemID: "UCK9lZ2lHRBgx2LOcqPifukA"}, - {Provider: model.ProviderYoutube, LinkType: model.TypeUser, ItemID: "WylsaLive"}, - {Provider: model.ProviderYoutube, LinkType: model.TypePlaylist, ItemID: "PLUVl5pafUrBydT_gsCjRGeCy0hFHloec8"}, - } + if tt.wantErr { + require.Error(t, err) + return + } - for _, f := range feeds { - feed := f - t.Run(f.ItemID, func(t *testing.T) { - count, err := builder.GetVideoCount(testCtx, feed) - assert.NoError(t, err) - assert.NotZero(t, count) + require.NoError(t, err) + require.Equal(t, tt.expected.LinkType, result.LinkType) + require.Equal(t, tt.expected.Provider, result.Provider) + require.Equal(t, tt.expected.ItemID, result.ItemID) }) } } diff --git a/pkg/config/config.go b/pkg/config/config.go deleted file mode 100644 index 6930f1dd..00000000 --- a/pkg/config/config.go +++ /dev/null @@ -1,241 +0,0 @@ -package config - -import ( - "fmt" - "io/ioutil" - "path/filepath" - "regexp" - - "github.com/hashicorp/go-multierror" - "github.com/naoina/toml" - "github.com/pkg/errors" - - "github.com/mxpv/podsync/pkg/model" -) - -// Feed is a configuration for a feed -type Feed struct { - ID string `toml:"-"` - // URL is a full URL of the field - URL string `toml:"url"` - // PageSize is the number of pages to query from YouTube API. - // NOTE: larger page sizes/often requests might drain your API token. - PageSize int `toml:"page_size"` - // UpdatePeriod is how often to check for updates. - // Format is "300ms", "1.5h" or "2h45m". - // Valid time units are "ns", "us" (or "ยตs"), "ms", "s", "m", "h". - // NOTE: too often update check might drain your API token. - UpdatePeriod Duration `toml:"update_period"` - // Cron expression format is how often to check update - // NOTE: too often update check might drain your API token. - CronSchedule string `toml:"cron_schedule"` - // Quality to use for this feed - Quality model.Quality `toml:"quality"` - // Maximum height of video - MaxHeight int `toml:"max_height"` - // Format to use for this feed - Format model.Format `toml:"format"` - // Only download episodes that match this regexp (defaults to matching anything) - Filters Filters `toml:"filters"` - // Clean is a cleanup policy to use for this feed - Clean Cleanup `toml:"clean"` - // Custom is a list of feed customizations - Custom Custom `toml:"custom"` - // List of additional youtube-dl arguments passed at download time - YouTubeDLArgs []string `toml:"youtube_dl_args"` - // Included in OPML file - OPML bool `toml:"opml"` -} - -type Filters struct { - Title string `toml:"title"` - NotTitle string `toml:"not_title"` - Description string `toml:"description"` - NotDescription string `toml:"not_description"` - // More filters to be added here -} - -type Custom struct { - CoverArt string `toml:"cover_art"` - CoverArtQuality model.Quality `toml:"cover_art_quality"` - Category string `toml:"category"` - Subcategories []string `toml:"subcategories"` - Explicit bool `toml:"explicit"` - Language string `toml:"lang"` - Author string `toml:"author"` - Title string `toml:"title"` - Description string `toml:"description"` - OwnerName string `toml:"ownerName"` - OwnerEmail string `toml:"ownerEmail"` -} - -type Server struct { - // Hostname to use for download links - Hostname string `toml:"hostname"` - // Port is a server port to listen to - Port int `toml:"port"` - // Bind a specific IP addresses for server - // "*": bind all IP addresses which is default option - // localhost or 127.0.0.1 bind a single IPv4 address - BindAddress string `toml:"bind_address"` - // Specify path for reverse proxy and only [A-Za-z0-9] - Path string `toml:"path"` - // DataDir is a path to a directory to keep XML feeds and downloaded episodes, - // that will be available to user via web server for download. - DataDir string `toml:"data_dir"` -} - -type Database struct { - // Dir is a directory to keep database files - Dir string `toml:"dir"` - Badger *Badger `toml:"badger"` -} - -// Badger represents BadgerDB configuration parameters -// See https://github.com/dgraph-io/badger#memory-usage -type Badger struct { - Truncate bool `toml:"truncate"` - FileIO bool `toml:"file_io"` -} - -type Cleanup struct { - // KeepLast defines how many episodes to keep - KeepLast int `toml:"keep_last"` -} - -type Log struct { - // Filename to write the log to (instead of stdout) - Filename string `toml:"filename"` - // MaxSize is the maximum size of the log file in MB - MaxSize int `toml:"max_size"` - // MaxBackups is the maximum number of log file backups to keep after rotation - MaxBackups int `toml:"max_backups"` - // MaxAge is the maximum number of days to keep the logs for - MaxAge int `toml:"max_age"` - // Compress old backups - Compress bool `toml:"compress"` -} - -// Downloader is a youtube-dl related configuration -type Downloader struct { - // SelfUpdate toggles self update every 24 hour - SelfUpdate bool `toml:"self_update"` - // Timeout in minutes for youtube-dl process to finish download - Timeout int `toml:"timeout"` -} - -type Config struct { - // Server is the web server configuration - Server Server `toml:"server"` - // Log is the optional logging configuration - Log Log `toml:"log"` - // Database configuration - Database Database `toml:"database"` - // Feeds is a list of feeds to host by this app. - // ID will be used as feed ID in http://podsync.net/{FEED_ID}.xml - Feeds map[string]*Feed - // Tokens is API keys to use to access YouTube/Vimeo APIs. - Tokens map[model.Provider]StringSlice `toml:"tokens"` - // Downloader (youtube-dl) configuration - Downloader Downloader `toml:"downloader"` -} - -// LoadConfig loads TOML configuration from a file path -func LoadConfig(path string) (*Config, error) { - data, err := ioutil.ReadFile(path) - if err != nil { - return nil, errors.Wrapf(err, "failed to read config file: %s", path) - } - - config := Config{} - if err := toml.Unmarshal(data, &config); err != nil { - return nil, errors.Wrap(err, "failed to unmarshal toml") - } - - for id, feed := range config.Feeds { - feed.ID = id - } - - config.applyDefaults(path) - - if err := config.validate(); err != nil { - return nil, err - } - - return &config, nil -} - -func (c *Config) validate() error { - var result *multierror.Error - - if c.Server.DataDir == "" { - result = multierror.Append(result, errors.New("data directory is required")) - } - - if c.Server.Path != "" { - var pathReg = regexp.MustCompile(model.PathRegex) - if !pathReg.MatchString(c.Server.Path) { - result = multierror.Append(result, errors.Errorf("Server handle path must be match %s or empty", model.PathRegex)) - } - } - - if len(c.Feeds) == 0 { - result = multierror.Append(result, errors.New("at least one feed must be specified")) - } - - for id, feed := range c.Feeds { - if feed.URL == "" { - result = multierror.Append(result, errors.Errorf("URL is required for %q", id)) - } - } - - return result.ErrorOrNil() -} - -func (c *Config) applyDefaults(configPath string) { - if c.Server.Hostname == "" { - if c.Server.Port != 0 && c.Server.Port != 80 { - c.Server.Hostname = fmt.Sprintf("http://localhost:%d", c.Server.Port) - } else { - c.Server.Hostname = "http://localhost" - } - } - - if c.Log.Filename != "" { - if c.Log.MaxSize == 0 { - c.Log.MaxSize = model.DefaultLogMaxSize - } - if c.Log.MaxAge == 0 { - c.Log.MaxAge = model.DefaultLogMaxAge - } - if c.Log.MaxBackups == 0 { - c.Log.MaxBackups = model.DefaultLogMaxBackups - } - } - - if c.Database.Dir == "" { - c.Database.Dir = filepath.Join(filepath.Dir(configPath), "db") - } - - for _, feed := range c.Feeds { - if feed.UpdatePeriod.Duration == 0 { - feed.UpdatePeriod.Duration = model.DefaultUpdatePeriod - } - - if feed.Quality == "" { - feed.Quality = model.DefaultQuality - } - - if feed.Custom.CoverArtQuality == "" { - feed.Custom.CoverArtQuality = model.DefaultQuality - } - - if feed.Format == "" { - feed.Format = model.DefaultFormat - } - - if feed.PageSize == 0 { - feed.PageSize = model.DefaultPageSize - } - } -} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go deleted file mode 100644 index 8a1694ee..00000000 --- a/pkg/config/config_test.go +++ /dev/null @@ -1,242 +0,0 @@ -package config - -import ( - "io/ioutil" - "os" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/mxpv/podsync/pkg/model" -) - -func TestLoadConfig(t *testing.T) { - const file = ` -[tokens] -youtube = "123" -vimeo = [ - "321", - "456" -] - -[server] -port = 80 -data_dir = "test/data/" - -[database] -dir = "/home/user/db/" - -[downloader] -self_update = true -timeout = 15 - -[feeds] - [feeds.XYZ] - url = "https://youtube.com/watch?v=ygIUF678y40" - page_size = 48 - update_period = "5h" - format = "audio" - quality = "low" - filters = { title = "regex for title here" } - clean = { keep_last = 10 } - [feeds.XYZ.custom] - cover_art = "http://img" - cover_art_quality = "high" - category = "TV" - subcategories = ["1", "2"] - explicit = true - lang = "en" - author = "Mrs. Smith (mrs@smith.org)" - ownerName = "Mrs. Smith" - ownerEmail = "mrs@smith.org" -` - path := setup(t, file) - defer os.Remove(path) - - config, err := LoadConfig(path) - assert.NoError(t, err) - require.NotNil(t, config) - - assert.Equal(t, "test/data/", config.Server.DataDir) - assert.EqualValues(t, 80, config.Server.Port) - - assert.Equal(t, "/home/user/db/", config.Database.Dir) - - require.Len(t, config.Tokens["youtube"], 1) - assert.Equal(t, "123", config.Tokens["youtube"][0]) - require.Len(t, config.Tokens["vimeo"], 2) - assert.Equal(t, "321", config.Tokens["vimeo"][0]) - assert.Equal(t, "456", config.Tokens["vimeo"][1]) - - assert.Len(t, config.Feeds, 1) - feed, ok := config.Feeds["XYZ"] - assert.True(t, ok) - assert.Equal(t, "https://youtube.com/watch?v=ygIUF678y40", feed.URL) - assert.EqualValues(t, 48, feed.PageSize) - assert.EqualValues(t, Duration{5 * time.Hour}, feed.UpdatePeriod) - assert.EqualValues(t, "audio", feed.Format) - assert.EqualValues(t, "low", feed.Quality) - assert.EqualValues(t, "regex for title here", feed.Filters.Title) - assert.EqualValues(t, 10, feed.Clean.KeepLast) - - assert.EqualValues(t, "http://img", feed.Custom.CoverArt) - assert.EqualValues(t, "high", feed.Custom.CoverArtQuality) - assert.EqualValues(t, "TV", feed.Custom.Category) - assert.True(t, feed.Custom.Explicit) - assert.EqualValues(t, "en", feed.Custom.Language) - assert.EqualValues(t, "Mrs. Smith (mrs@smith.org)", feed.Custom.Author) - assert.EqualValues(t, "Mrs. Smith", feed.Custom.OwnerName) - assert.EqualValues(t, "mrs@smith.org", feed.Custom.OwnerEmail) - - assert.EqualValues(t, feed.Custom.Subcategories, []string{"1", "2"}) - - assert.Nil(t, config.Database.Badger) - - assert.True(t, config.Downloader.SelfUpdate) - assert.EqualValues(t, 15, config.Downloader.Timeout) -} - -func TestLoadEmptyKeyList(t *testing.T) { - const file = ` -[tokens] -vimeo = [] - -[server] -data_dir = "/data" -[feeds] - [feeds.A] - url = "https://youtube.com/watch?v=ygIUF678y40" -` - path := setup(t, file) - defer os.Remove(path) - - config, err := LoadConfig(path) - assert.NoError(t, err) - require.NotNil(t, config) - - require.Len(t, config.Tokens, 1) - require.Len(t, config.Tokens["vimeo"], 0) -} - -func TestApplyDefaults(t *testing.T) { - const file = ` -[server] -data_dir = "/data" - -[feeds] - [feeds.A] - url = "https://youtube.com/watch?v=ygIUF678y40" -` - path := setup(t, file) - defer os.Remove(path) - - config, err := LoadConfig(path) - assert.NoError(t, err) - assert.NotNil(t, config) - - assert.Len(t, config.Feeds, 1) - feed, ok := config.Feeds["A"] - require.True(t, ok) - - assert.EqualValues(t, feed.UpdatePeriod, Duration{model.DefaultUpdatePeriod}) - assert.EqualValues(t, feed.PageSize, 50) - assert.EqualValues(t, feed.Quality, "high") - assert.EqualValues(t, feed.Custom.CoverArtQuality, "high") - assert.EqualValues(t, feed.Format, "video") -} - -func TestHttpServerListenAddress(t *testing.T) { - const file = ` -[server] -bind_address = "172.20.10.2" -port = 8080 -path = "test" -data_dir = "/data" - -[feeds] - [feeds.A] - url = "https://youtube.com/watch?v=ygIUF678y40" - -[database] - badger = { truncate = true, file_io = true } -` - path := setup(t, file) - defer os.Remove(path) - - config, err := LoadConfig(path) - assert.NoError(t, err) - require.NotNil(t, config) - require.NotNil(t, config.Server.BindAddress) - require.NotNil(t, config.Server.Path) -} - -func TestDefaultHostname(t *testing.T) { - cfg := Config{ - Server: Server{}, - } - - t.Run("empty hostname", func(t *testing.T) { - cfg.applyDefaults("") - assert.Equal(t, "http://localhost", cfg.Server.Hostname) - }) - - t.Run("empty hostname with port", func(t *testing.T) { - cfg.Server.Hostname = "" - cfg.Server.Port = 7979 - cfg.applyDefaults("") - assert.Equal(t, "http://localhost:7979", cfg.Server.Hostname) - }) - - t.Run("skip overwrite", func(t *testing.T) { - cfg.Server.Hostname = "https://my.host:4443" - cfg.Server.Port = 80 - cfg.applyDefaults("") - assert.Equal(t, "https://my.host:4443", cfg.Server.Hostname) - }) -} - -func TestDefaultDatabasePath(t *testing.T) { - cfg := Config{} - cfg.applyDefaults("/home/user/podsync/config.toml") - assert.Equal(t, "/home/user/podsync/db", cfg.Database.Dir) -} - -func TestLoadBadgerConfig(t *testing.T) { - const file = ` -[server] -data_dir = "/data" - -[feeds] - [feeds.A] - url = "https://youtube.com/watch?v=ygIUF678y40" - -[database] - badger = { truncate = true, file_io = true } -` - path := setup(t, file) - defer os.Remove(path) - - config, err := LoadConfig(path) - assert.NoError(t, err) - require.NotNil(t, config) - require.NotNil(t, config.Database.Badger) - - assert.True(t, config.Database.Badger.Truncate) - assert.True(t, config.Database.Badger.FileIO) -} - -func setup(t *testing.T, file string) string { - t.Helper() - - f, err := ioutil.TempFile("", "") - require.NoError(t, err) - - defer f.Close() - - _, err = f.WriteString(file) - require.NoError(t, err) - - return f.Name() -} diff --git a/pkg/config/toml.go b/pkg/config/toml.go deleted file mode 100644 index bdcf7717..00000000 --- a/pkg/config/toml.go +++ /dev/null @@ -1,41 +0,0 @@ -package config - -import ( - "time" - - "github.com/pkg/errors" -) - -type Duration struct { - time.Duration -} - -func (d *Duration) UnmarshalText(text []byte) error { - res, err := time.ParseDuration(string(text)) - if err != nil { - return err - } - - *d = Duration{res} - return nil -} - -// StringSlice is a toml extension that lets you to specify either a string -// value (a slice with just one element) or a string slice. -type StringSlice []string - -func (s *StringSlice) UnmarshalTOML(decode func(interface{}) error) error { - var single string - if err := decode(&single); err == nil { - *s = []string{single} - return nil - } - - var slice []string - if err := decode(&slice); err == nil { - *s = slice - return nil - } - - return errors.New("failed to decode string (slice) field") -} diff --git a/pkg/db/badger.go b/pkg/db/badger.go index 41e6ad5b..1a4582be 100644 --- a/pkg/db/badger.go +++ b/pkg/db/badger.go @@ -11,7 +11,6 @@ import ( "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) @@ -23,13 +22,19 @@ const ( episodePath = "episode/%s/%s" // FeedID + EpisodeID ) +// BadgerConfig represents BadgerDB configuration parameters +type BadgerConfig struct { + Truncate bool `toml:"truncate"` + FileIO bool `toml:"file_io"` +} + type Badger struct { db *badger.DB } var _ Storage = (*Badger)(nil) -func NewBadger(config *config.Database) (*Badger, error) { +func NewBadger(config *Config) (*Badger, error) { var ( dir = config.Dir ) @@ -267,11 +272,12 @@ func (b *Badger) setObj(txn *badger.Txn, key []byte, obj interface{}, overwrite if !overwrite { // Overwrites are not allowed, make sure there is no object with the given key _, err := txn.Get(key) - if err == nil { + switch err { + case nil: return model.ErrAlreadyExists - } else if err == badger.ErrKeyNotFound { + case badger.ErrKeyNotFound: // Key not found, do nothing - } else { + default: return errors.Wrap(err, "failed to check whether key exists") } } diff --git a/pkg/db/badger_test.go b/pkg/db/badger_test.go index 1b0b99b2..5760bde2 100644 --- a/pkg/db/badger_test.go +++ b/pkg/db/badger_test.go @@ -2,40 +2,31 @@ package db import ( "context" - "io/ioutil" - "os" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) var testCtx = context.TODO() func TestNewBadger(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - require.NoError(t, err) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) err = db.Close() assert.NoError(t, err) - - err = os.RemoveAll(dir) - assert.NoError(t, err) } func TestBadger_Version(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -45,11 +36,9 @@ func TestBadger_Version(t *testing.T) { } func TestBadger_AddFeed(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -59,11 +48,9 @@ func TestBadger_AddFeed(t *testing.T) { } func TestBadger_GetFeed(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -79,11 +66,9 @@ func TestBadger_GetFeed(t *testing.T) { } func TestBadger_WalkFeeds(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -105,11 +90,9 @@ func TestBadger_WalkFeeds(t *testing.T) { } func TestBadger_DeleteFeed(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -130,11 +113,9 @@ func TestBadger_DeleteFeed(t *testing.T) { } func TestBadger_UpdateEpisode(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() @@ -160,11 +141,9 @@ func TestBadger_UpdateEpisode(t *testing.T) { } func TestBadger_WalkEpisodes(t *testing.T) { - dir, err := ioutil.TempDir("", "podsync-badger-") - assert.NoError(t, err) - defer os.RemoveAll(dir) + dir := t.TempDir() - db, err := NewBadger(&config.Database{Dir: dir}) + db, err := NewBadger(&Config{Dir: dir}) require.NoError(t, err) defer db.Close() diff --git a/pkg/db/config.go b/pkg/db/config.go new file mode 100644 index 00000000..cd5c1f9a --- /dev/null +++ b/pkg/db/config.go @@ -0,0 +1,7 @@ +package db + +type Config struct { + // Dir is a directory to keep database files + Dir string `toml:"dir"` + Badger *BadgerConfig `toml:"badger"` +} diff --git a/pkg/feed/config.go b/pkg/feed/config.go new file mode 100644 index 00000000..8897a136 --- /dev/null +++ b/pkg/feed/config.go @@ -0,0 +1,91 @@ +package feed + +import ( + "time" + + "github.com/mxpv/podsync/pkg/model" +) + +// Config is a configuration for a feed loaded from TOML +type Config struct { + ID string `toml:"-"` + // URL is a full URL of the field + URL string `toml:"url"` + // PageSize is the number of pages to query from YouTube API. + // NOTE: larger page sizes/often requests might drain your API token. + PageSize int `toml:"page_size"` + // UpdatePeriod is how often to check for updates. + // Format is "300ms", "1.5h" or "2h45m". + // Valid time units are "ns", "us" (or "ยตs"), "ms", "s", "m", "h". + // NOTE: too often update check might drain your API token. + UpdatePeriod time.Duration `toml:"update_period"` + // Cron expression format is how often to check update + // NOTE: too often update check might drain your API token. + CronSchedule string `toml:"cron_schedule"` + // Quality to use for this feed + Quality model.Quality `toml:"quality"` + // Maximum height of video + MaxHeight int `toml:"max_height"` + // Format to use for this feed + Format model.Format `toml:"format"` + // Custom format properties + CustomFormat CustomFormat `toml:"custom_format"` + // Only download episodes that match the filters (defaults to matching anything) + Filters Filters `toml:"filters"` + // Clean is a cleanup policy to use for this feed + Clean *Cleanup `toml:"clean"` + // Custom is a list of feed customizations + Custom Custom `toml:"custom"` + // List of additional youtube-dl arguments passed at download time + YouTubeDLArgs []string `toml:"youtube_dl_args"` + // Post episode download hooks - executed after each episode is successfully downloaded + // Multiple hooks can be configured and will execute in sequence + // Example: + // [[feeds.ID1.post_episode_download]] + // command = ["echo", "Downloaded: $EPISODE_TITLE"] + // timeout = 10 + PostEpisodeDownload []*ExecHook `toml:"post_episode_download"` + // Included in OPML file + OPML bool `toml:"opml"` + // Private feed (not indexed by podcast aggregators) + PrivateFeed bool `toml:"private_feed"` + // Playlist sort + PlaylistSort model.Sorting `toml:"playlist_sort"` +} + +type CustomFormat struct { + YouTubeDLFormat string `toml:"youtube_dl_format"` + Extension string `toml:"extension"` +} + +type Filters struct { + Title string `toml:"title"` + NotTitle string `toml:"not_title"` + Description string `toml:"description"` + NotDescription string `toml:"not_description"` + MinDuration int64 `toml:"min_duration"` + MaxDuration int64 `toml:"max_duration"` + MaxAge int `toml:"max_age"` + MinAge int `toml:"min_age"` + // More filters to be added here +} + +type Custom struct { + CoverArt string `toml:"cover_art"` + CoverArtQuality model.Quality `toml:"cover_art_quality"` + Category string `toml:"category"` + Subcategories []string `toml:"subcategories"` + Explicit bool `toml:"explicit"` + Language string `toml:"lang"` + Author string `toml:"author"` + Title string `toml:"title"` + Description string `toml:"description"` + OwnerName string `toml:"ownerName"` + OwnerEmail string `toml:"ownerEmail"` + Link string `toml:"link"` +} + +type Cleanup struct { + // KeepLast defines how many episodes to keep + KeepLast int `toml:"keep_last"` +} diff --git a/pkg/feed/deps.go b/pkg/feed/deps.go index 4c568aa9..df47b070 100644 --- a/pkg/feed/deps.go +++ b/pkg/feed/deps.go @@ -11,7 +11,3 @@ import ( type feedProvider interface { GetFeed(ctx context.Context, feedID string) (*model.Feed, error) } - -type urlProvider interface { - URL(ctx context.Context, ns string, fileName string) (string, error) -} diff --git a/pkg/feed/deps_mock_test.go b/pkg/feed/deps_mock_test.go index e7dec7d1..c6771bf6 100644 --- a/pkg/feed/deps_mock_test.go +++ b/pkg/feed/deps_mock_test.go @@ -6,35 +6,36 @@ package feed import ( context "context" + reflect "reflect" + gomock "github.com/golang/mock/gomock" model "github.com/mxpv/podsync/pkg/model" - reflect "reflect" ) -// MockfeedProvider is a mock of feedProvider interface +// MockfeedProvider is a mock of feedProvider interface. type MockfeedProvider struct { ctrl *gomock.Controller recorder *MockfeedProviderMockRecorder } -// MockfeedProviderMockRecorder is the mock recorder for MockfeedProvider +// MockfeedProviderMockRecorder is the mock recorder for MockfeedProvider. type MockfeedProviderMockRecorder struct { mock *MockfeedProvider } -// NewMockfeedProvider creates a new mock instance +// NewMockfeedProvider creates a new mock instance. func NewMockfeedProvider(ctrl *gomock.Controller) *MockfeedProvider { mock := &MockfeedProvider{ctrl: ctrl} mock.recorder = &MockfeedProviderMockRecorder{mock} return mock } -// EXPECT returns an object that allows the caller to indicate expected use +// EXPECT returns an object that allows the caller to indicate expected use. func (m *MockfeedProvider) EXPECT() *MockfeedProviderMockRecorder { return m.recorder } -// GetFeed mocks base method +// GetFeed mocks base method. func (m *MockfeedProvider) GetFeed(ctx context.Context, feedID string) (*model.Feed, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetFeed", ctx, feedID) @@ -43,46 +44,8 @@ func (m *MockfeedProvider) GetFeed(ctx context.Context, feedID string) (*model.F return ret0, ret1 } -// GetFeed indicates an expected call of GetFeed +// GetFeed indicates an expected call of GetFeed. func (mr *MockfeedProviderMockRecorder) GetFeed(ctx, feedID interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFeed", reflect.TypeOf((*MockfeedProvider)(nil).GetFeed), ctx, feedID) } - -// MockurlProvider is a mock of urlProvider interface -type MockurlProvider struct { - ctrl *gomock.Controller - recorder *MockurlProviderMockRecorder -} - -// MockurlProviderMockRecorder is the mock recorder for MockurlProvider -type MockurlProviderMockRecorder struct { - mock *MockurlProvider -} - -// NewMockurlProvider creates a new mock instance -func NewMockurlProvider(ctrl *gomock.Controller) *MockurlProvider { - mock := &MockurlProvider{ctrl: ctrl} - mock.recorder = &MockurlProviderMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockurlProvider) EXPECT() *MockurlProviderMockRecorder { - return m.recorder -} - -// URL mocks base method -func (m *MockurlProvider) URL(ctx context.Context, ns, fileName string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "URL", ctx, ns, fileName) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// URL indicates an expected call of URL -func (mr *MockurlProviderMockRecorder) URL(ctx, ns, fileName interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "URL", reflect.TypeOf((*MockurlProvider)(nil).URL), ctx, ns, fileName) -} diff --git a/pkg/feed/hooks.go b/pkg/feed/hooks.go new file mode 100644 index 00000000..da9f1572 --- /dev/null +++ b/pkg/feed/hooks.go @@ -0,0 +1,85 @@ +package feed + +import ( + "context" + "fmt" + "os" + "os/exec" + "time" +) + +// ExecHook represents a single hook configuration that executes commands +// after specific lifecycle events (e.g., episode downloads). +// +// Example configuration: +// +// [[feeds.ID1.post_episode_download]] +// command = ["curl", "-X", "POST", "-d", "$EPISODE_TITLE", "webhook.example.com"] +// timeout = 30 +// +// Environment variables available to hooks: +// - EPISODE_FILE: Path to downloaded file (e.g., "podcast-id/episode.mp3") +// - FEED_NAME: The feed identifier +// - EPISODE_TITLE: The episode title +type ExecHook struct { + // Command is the command and arguments to execute. + // For single commands, use shell parsing: ["echo hello"] + // For multiple args, pass directly: ["curl", "-X", "POST", "url"] + Command []string `toml:"command"` + + // Timeout in seconds for command execution. + // If 0 or unset, defaults to 60 seconds. + Timeout int `toml:"timeout"` +} + +// Invoke executes the hook command with the provided environment variables. +// +// The method handles nil hooks gracefully (returns nil) and validates that +// the command is not empty. Commands are executed with a timeout (default 60s) +// and inherit the parent process environment plus any additional variables. +// +// Single-element commands are executed via shell (/bin/sh -c), while +// multi-element commands are executed directly for better security. +// +// Returns an error if the command fails, times out, or returns a non-zero exit code. +// The error includes the combined stdout/stderr output for debugging. +func (h *ExecHook) Invoke(env []string) error { + if h == nil { + return nil + } + if len(h.Command) == 0 { + return fmt.Errorf("hook command is empty") + } + + // Set up context with timeout (default 1 minute if not specified) + timeout := h.Timeout + if timeout == 0 { + timeout = 60 // default to 1 minute + } + + ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second) + defer cancel() + + // Create command with context + var cmd *exec.Cmd + if len(h.Command) == 1 { + // Single command, use shell to parse + cmd = exec.CommandContext(ctx, "/bin/sh", "-c", h.Command[0]) + } else { + // Multiple arguments, use directly + cmd = exec.CommandContext(ctx, h.Command[0], h.Command[1:]...) + } + + // Set up environment variables + cmd.Env = append(os.Environ(), env...) + + // Execute the command + data, err := cmd.CombinedOutput() + output := string(data) + + if err != nil { + return fmt.Errorf("hook execution failed: %v, output: %s", err, output) + } + + return nil +} diff --git a/pkg/feed/hooks_test.go b/pkg/feed/hooks_test.go new file mode 100644 index 00000000..511c7ec8 --- /dev/null +++ b/pkg/feed/hooks_test.go @@ -0,0 +1,137 @@ +package feed + +import ( + "fmt" + "io" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestExecuteHook_WriteEnvToFile(t *testing.T) { + // Create a temporary file + tempDir := t.TempDir() + tempFile := filepath.Join(tempDir, "env_output.txt") + + hook := &ExecHook{ + Command: []string{"sh", "-c", "printenv | grep '^TEST_VAR=' > " + tempFile}, + Timeout: 5, + } + + env := []string{ + "TEST_VAR=test-value", + } + + err := hook.Invoke(env) + require.NoError(t, err) + + // Read the file and verify contents + content, err := os.ReadFile(tempFile) + require.NoError(t, err) + + output := string(content) + assert.Contains(t, output, "TEST_VAR=test-value") +} + +func TestExecuteHook_CornerCases(t *testing.T) { + tests := []struct { + name string + hook *ExecHook + env []string + expectError bool + errorMsg string + }{ + { + name: "nil hook", + hook: nil, + env: []string{"TEST=value"}, + expectError: false, + }, + { + name: "empty command", + hook: &ExecHook{ + Command: []string{}, + }, + env: []string{"TEST=value"}, + expectError: true, + errorMsg: "hook command is empty", + }, + { + name: "invalid command", + hook: &ExecHook{ + Command: []string{"nonexistentcommand12345"}, + }, + env: []string{"TEST=value"}, + expectError: true, + errorMsg: "hook execution failed", + }, + { + name: "successful command", + hook: &ExecHook{ + Command: []string{"echo", "test"}, + }, + env: []string{"TEST=value"}, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.hook.Invoke(tt.env) + + if tt.expectError { + require.Error(t, err) + if tt.errorMsg != "" { + assert.Contains(t, err.Error(), tt.errorMsg) + } + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestExecuteHook_CurlWebhook(t *testing.T) { + // Create a local test server to avoid external dependencies + receivedData := "" + receivedHeaders := make(map[string]string) + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Capture the request data for verification + body, err := io.ReadAll(r.Body) + if err == nil { + receivedData = string(body) + } + receivedHeaders["Content-Type"] = r.Header.Get("Content-Type") + receivedHeaders["User-Agent"] = r.Header.Get("User-Agent") + + // Return a simple response + w.WriteHeader(http.StatusOK) + fmt.Fprintln(w, `{"status": "ok"}`) + })) + defer server.Close() + + // Use the local test server URL instead of external httpbin.org + hook := &ExecHook{ + Command: []string{fmt.Sprintf("curl -s -X POST -d \"$EPISODE_TITLE\" %s", server.URL)}, + Timeout: 10, + } + + env := []string{ + "EPISODE_TITLE=Test Episode for Webhook", + "FEED_NAME=test-podcast", + "EPISODE_FILE=test-podcast/episode001.mp3", + } + + err := hook.Invoke(env) + assert.NoError(t, err, "Curl webhook should execute successfully") + + // Verify that the request was actually made and data was received + assert.Equal(t, "Test Episode for Webhook", receivedData, "Server should receive the episode title") + assert.Contains(t, receivedHeaders["User-Agent"], "curl", "Request should be made by curl") +} diff --git a/pkg/feed/opml.go b/pkg/feed/opml.go index c98aee00..5fb393b4 100644 --- a/pkg/feed/opml.go +++ b/pkg/feed/opml.go @@ -3,21 +3,21 @@ package feed import ( "context" "fmt" + "strings" "github.com/gilliek/go-opml/opml" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) -func BuildOPML(ctx context.Context, config *config.Config, db feedProvider, provider urlProvider) (string, error) { +func BuildOPML(ctx context.Context, feeds map[string]*Config, db feedProvider, hostname string) (string, error) { doc := opml.OPML{Version: "1.0"} doc.Head = opml.Head{Title: "Podsync feeds"} doc.Body = opml.Body{} - for _, feed := range config.Feeds { + for _, feed := range feeds { f, err := db.GetFeed(ctx, feed.ID) if err == model.ErrNotFound { // As we update OPML on per-feed basis, some feeds may not yet be populated in database. @@ -31,16 +31,11 @@ func BuildOPML(ctx context.Context, config *config.Config, db feedProvider, prov continue } - downloadURL, err := provider.URL(ctx, "", fmt.Sprintf("%s.xml", feed.ID)) - if err != nil { - return "", errors.Wrapf(err, "failed to get feed URL for %q", feed.ID) - } - outline := opml.Outline{ Title: f.Title, Text: f.Description, Type: "rss", - XMLURL: downloadURL, + XMLURL: fmt.Sprintf("%s/%s.xml", strings.TrimRight(hostname, "/"), feed.ID), } doc.Body.Outlines = append(doc.Body.Outlines, outline) diff --git a/pkg/feed/opml_test.go b/pkg/feed/opml_test.go index 1697676c..467586fa 100644 --- a/pkg/feed/opml_test.go +++ b/pkg/feed/opml_test.go @@ -7,7 +7,6 @@ import ( "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) @@ -25,19 +24,11 @@ func TestBuildOPML(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() - urlMock := NewMockurlProvider(ctrl) - urlMock.EXPECT().URL(gomock.Any(), "", "1.xml").Return("https://url/1.xml", nil) - dbMock := NewMockfeedProvider(ctrl) dbMock.EXPECT().GetFeed(gomock.Any(), "1").Return(&model.Feed{Title: "1", Description: "desc"}, nil) - cfg := config.Config{ - Feeds: map[string]*config.Feed{ - "any": {ID: "1", OPML: true}, - }, - } - - out, err := BuildOPML(context.Background(), &cfg, dbMock, urlMock) + feeds := map[string]*Config{"any": {ID: "1", OPML: true}} + out, err := BuildOPML(context.Background(), feeds, dbMock, "https://url/") assert.NoError(t, err) assert.Equal(t, expected, out) } diff --git a/pkg/feed/xml.go b/pkg/feed/xml.go index 40769e36..27c3eb24 100644 --- a/pkg/feed/xml.go +++ b/pkg/feed/xml.go @@ -5,12 +5,12 @@ import ( "fmt" "sort" "strconv" + "strings" "time" itunes "github.com/eduncan911/podcast" "github.com/pkg/errors" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) @@ -30,7 +30,7 @@ func (p timeSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } -func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider urlProvider) (*itunes.Podcast, error) { +func Build(_ctx context.Context, feed *model.Feed, cfg *Config, hostname string) (*itunes.Podcast, error) { const ( podsyncGenerator = "Podsync generator (support us at https://github.com/mxpv/podsync)" defaultCategory = "TV & Film" @@ -38,11 +38,16 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url var ( now = time.Now().UTC() - author = feed.Title + author = feed.Author title = feed.Title description = feed.Description + feedLink = feed.ItemURL ) + if author == "" { + author = feed.Title + } + if cfg.Custom.Author != "" { author = cfg.Custom.Author } @@ -55,12 +60,20 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url description = cfg.Custom.Description } - p := itunes.New(title, feed.ItemURL, description, &feed.PubDate, &now) + if cfg.Custom.Link != "" { + feedLink = cfg.Custom.Link + } + + p := itunes.New(title, feedLink, description, &feed.PubDate, &now) p.Generator = podsyncGenerator p.AddSubTitle(title) p.IAuthor = author p.AddSummary(description) + if feed.PrivateFeed { + p.IBlock = "yes" + } + if cfg.Custom.OwnerName != "" && cfg.Custom.OwnerEmail != "" { p.IOwner = &itunes.Author{ Name: cfg.Custom.OwnerName, @@ -81,9 +94,9 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url } if cfg.Custom.Explicit { - p.IExplicit = "yes" + p.IExplicit = "true" } else { - p.IExplicit = "no" + p.IExplicit = "false" } if cfg.Custom.Language != "" { @@ -101,7 +114,7 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url for i, episode := range feed.Episodes { if episode.Status != model.EpisodeDownloaded { - // Skip episodes that are not yet downloaded + // Skip episodes that are not yet downloaded or have been removed continue } @@ -124,13 +137,15 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url if feed.Format == model.FormatAudio { enclosureType = itunes.MP3 } - - episodeName := EpisodeName(cfg, episode) - downloadURL, err := provider.URL(ctx, cfg.ID, episodeName) - if err != nil { - return nil, errors.Wrapf(err, "failed to obtain download URL for: %s", episodeName) + if feed.Format == model.FormatCustom { + enclosureType = EnclosureFromExtension(cfg) } + var ( + episodeName = EpisodeName(cfg, episode) + downloadURL = fmt.Sprintf("%s/%s/%s", strings.TrimRight(hostname, "/"), cfg.ID, episodeName) + ) + item.AddEnclosure(downloadURL, enclosureType, episode.Size) // p.AddItem requires description to be not empty, use workaround @@ -139,9 +154,9 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url } if cfg.Custom.Explicit { - item.IExplicit = "yes" + item.IExplicit = "true" } else { - item.IExplicit = "no" + item.IExplicit = "false" } if _, err := p.AddItem(item); err != nil { @@ -152,11 +167,37 @@ func Build(ctx context.Context, feed *model.Feed, cfg *config.Feed, provider url return &p, nil } -func EpisodeName(feedConfig *config.Feed, episode *model.Episode) string { +func EpisodeName(feedConfig *Config, episode *model.Episode) string { ext := "mp4" if feedConfig.Format == model.FormatAudio { ext = "mp3" } + if feedConfig.Format == model.FormatCustom { + ext = feedConfig.CustomFormat.Extension + } return fmt.Sprintf("%s.%s", episode.ID, ext) } + +func EnclosureFromExtension(feedConfig *Config) itunes.EnclosureType { + ext := feedConfig.CustomFormat.Extension + + switch ext { + case "m4a": + return itunes.M4A + case "m4v": + return itunes.M4V + case "mp4": + return itunes.MP4 + case "mp3": + return itunes.MP3 + case "mov": + return itunes.MOV + case "pdf": + return itunes.PDF + case "epub": + return itunes.EPUB + default: + return -1 + } +} diff --git a/pkg/feed/xml_test.go b/pkg/feed/xml_test.go index 39e60817..00a803dc 100644 --- a/pkg/feed/xml_test.go +++ b/pkg/feed/xml_test.go @@ -4,27 +4,30 @@ import ( "context" "testing" - "github.com/golang/mock/gomock" + itunes "github.com/eduncan911/podcast" + "github.com/mxpv/podsync/pkg/model" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/mxpv/podsync/pkg/config" - "github.com/mxpv/podsync/pkg/model" ) func TestBuildXML(t *testing.T) { - ctrl := gomock.NewController(t) - defer ctrl.Finish() - - urlMock := NewMockurlProvider(ctrl) - - feed := model.Feed{} + feed := model.Feed{ + Episodes: []*model.Episode{ + { + ID: "1", + Status: model.EpisodeDownloaded, + Title: "title", + Description: "description", + }, + }, + } - cfg := config.Feed{ - Custom: config.Custom{Description: "description", Category: "Technology", Subcategories: []string{"Gadgets", "Podcasting"}}, + cfg := Config{ + ID: "test", + Custom: Custom{Description: "description", Category: "Technology", Subcategories: []string{"Gadgets", "Podcasting"}}, } - out, err := Build(context.Background(), &feed, &cfg, urlMock) + out, err := Build(context.Background(), &feed, &cfg, "http://localhost/") assert.NoError(t, err) assert.EqualValues(t, "description", out.Description) @@ -33,7 +36,13 @@ func TestBuildXML(t *testing.T) { require.Len(t, out.ICategories, 1) category := out.ICategories[0] assert.EqualValues(t, "Technology", category.Text) + require.Len(t, category.ICategories, 2) assert.EqualValues(t, "Gadgets", category.ICategories[0].Text) assert.EqualValues(t, "Podcasting", category.ICategories[1].Text) + + require.Len(t, out.Items, 1) + require.NotNil(t, out.Items[0].Enclosure) + assert.EqualValues(t, out.Items[0].Enclosure.URL, "http://localhost/test/1.mp4") + assert.EqualValues(t, out.Items[0].Enclosure.Type, itunes.MP4) } diff --git a/pkg/fs/local.go b/pkg/fs/local.go index 12d58275..e5c78cc6 100644 --- a/pkg/fs/local.go +++ b/pkg/fs/local.go @@ -4,89 +4,65 @@ import ( "context" "fmt" "io" + "net/http" "os" "path/filepath" - "strings" "github.com/pkg/errors" log "github.com/sirupsen/logrus" ) +// LocalConfig is the storage configuration for local file system +type LocalConfig struct { + DataDir string `toml:"data_dir"` +} + +// Local implements local file storage type Local struct { - hostname string - rootDir string + rootDir string + WebUIEnabled bool } -func NewLocal(rootDir string, hostname string) (*Local, error) { - if hostname == "" { - return nil, errors.New("hostname can't be empty") - } +func NewLocal(rootDir string, webUIEnabled bool) (*Local, error) { + return &Local{rootDir: rootDir, WebUIEnabled: webUIEnabled}, nil +} - hostname = strings.TrimSuffix(hostname, "/") - if !strings.HasPrefix(hostname, "http") { - hostname = fmt.Sprintf("http://%s", hostname) +func (l *Local) Open(name string) (http.File, error) { + if name == "/index.html" && l.WebUIEnabled { + return os.Open("./html/index.html") } + path := filepath.Join(l.rootDir, name) + return os.Open(path) +} - return &Local{rootDir: rootDir, hostname: hostname}, nil +func (l *Local) Delete(_ctx context.Context, name string) error { + path := filepath.Join(l.rootDir, name) + if err := os.Remove(path); err != nil { + return fmt.Errorf("failed to delete file %s: %w", path, err) + } + return nil } -func (l *Local) Create(ctx context.Context, ns string, fileName string, reader io.Reader) (int64, error) { +func (l *Local) Create(_ctx context.Context, name string, reader io.Reader) (int64, error) { var ( - logger = log.WithField("episode_id", fileName) - feedDir = filepath.Join(l.rootDir, ns) + logger = log.WithField("name", name) + path = filepath.Join(l.rootDir, name) ) - if err := os.MkdirAll(feedDir, 0755); err != nil { - return 0, errors.Wrapf(err, "failed to create a directory for the feed: %s", feedDir) - } - - logger.Debugf("creating directory: %s", feedDir) - if err := os.MkdirAll(feedDir, 0755); err != nil { - return 0, errors.Wrapf(err, "failed to create feed dir: %s", feedDir) + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + return 0, errors.Wrapf(err, "failed to mkdir: %s", path) } - var ( - episodePath = filepath.Join(l.rootDir, ns, fileName) - ) - - logger.Debugf("copying to: %s", episodePath) - written, err := l.copyFile(reader, episodePath) + logger.Infof("creating file: %s", path) + written, err := l.copyFile(reader, path) if err != nil { return 0, errors.Wrap(err, "failed to copy file") } - logger.Debugf("copied %d bytes", written) + logger.Debugf("written %d bytes", written) return written, nil } -func (l *Local) Delete(ctx context.Context, ns string, fileName string) error { - path := filepath.Join(l.rootDir, ns, fileName) - return os.Remove(path) -} - -func (l *Local) Size(ctx context.Context, ns string, fileName string) (int64, error) { - path := filepath.Join(l.rootDir, ns, fileName) - - stat, err := os.Stat(path) - if err == nil { - return stat.Size(), nil - } - - return 0, err -} - -func (l *Local) URL(ctx context.Context, ns string, fileName string) (string, error) { - if _, err := l.Size(ctx, ns, fileName); err != nil { - return "", errors.Wrap(err, "failed to check whether file exists") - } - - if ns == "" { - return fmt.Sprintf("%s/%s", l.hostname, fileName), nil - } - - return fmt.Sprintf("%s/%s/%s", l.hostname, ns, fileName), nil -} - func (l *Local) copyFile(source io.Reader, destinationPath string) (int64, error) { dest, err := os.Create(destinationPath) if err != nil { @@ -102,3 +78,18 @@ func (l *Local) copyFile(source io.Reader, destinationPath string) (int64, error return written, nil } + +func (l *Local) Size(_ctx context.Context, name string) (int64, error) { + file, err := l.Open(name) + if err != nil { + return 0, err + } + defer file.Close() + + stat, err := file.Stat() + if err != nil { + return 0, err + } + + return stat.Size(), nil +} diff --git a/pkg/fs/local_test.go b/pkg/fs/local_test.go index 33a4135a..ce532841 100644 --- a/pkg/fs/local_test.go +++ b/pkg/fs/local_test.go @@ -3,13 +3,12 @@ package fs import ( "bytes" "context" - "io/ioutil" "os" "path/filepath" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) var ( @@ -17,25 +16,19 @@ var ( ) func TestNewLocal(t *testing.T) { - local, err := NewLocal("", "localhost") + local, err := NewLocal("", false) assert.NoError(t, err) - assert.Equal(t, "http://localhost", local.hostname) - - local, err = NewLocal("", "https://localhost:8080/") - assert.NoError(t, err) - assert.Equal(t, "https://localhost:8080", local.hostname) + assert.NotNil(t, local) } func TestLocal_Create(t *testing.T) { - tmpDir, err := ioutil.TempDir("", "podsync-local-stor-") - require.NoError(t, err) - - defer os.RemoveAll(tmpDir) + tmpDir, err := os.MkdirTemp("", "") + assert.NoError(t, err) - stor, err := NewLocal(tmpDir, "localhost") + stor, err := NewLocal(tmpDir, false) assert.NoError(t, err) - written, err := stor.Create(testCtx, "1", "test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + written, err := stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) assert.NoError(t, err) assert.EqualValues(t, 5, written) @@ -45,76 +38,57 @@ func TestLocal_Create(t *testing.T) { } func TestLocal_Size(t *testing.T) { - tmpDir, err := ioutil.TempDir("", "podsync-local-stor-") - require.NoError(t, err) + tmpDir, err := os.MkdirTemp("", "") + assert.NoError(t, err) defer os.RemoveAll(tmpDir) - stor, err := NewLocal(tmpDir, "localhost") + stor, err := NewLocal(tmpDir, false) assert.NoError(t, err) - _, err = stor.Create(testCtx, "1", "test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + _, err = stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) assert.NoError(t, err) - sz, err := stor.Size(testCtx, "1", "test") + sz, err := stor.Size(testCtx, "1/test") assert.NoError(t, err) assert.EqualValues(t, 5, sz) } func TestLocal_NoSize(t *testing.T) { - stor, err := NewLocal("", "localhost") + stor, err := NewLocal("", false) assert.NoError(t, err) - _, err = stor.Size(testCtx, "1", "test") + _, err = stor.Size(testCtx, "1/test") assert.True(t, os.IsNotExist(err)) } func TestLocal_Delete(t *testing.T) { - tmpDir, err := ioutil.TempDir("", "podsync-local-stor-") - require.NoError(t, err) - - defer os.RemoveAll(tmpDir) + tmpDir, err := os.MkdirTemp("", "") + assert.NoError(t, err) - stor, err := NewLocal(tmpDir, "localhost") + stor, err := NewLocal(tmpDir, false) assert.NoError(t, err) - _, err = stor.Create(testCtx, "1", "test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + _, err = stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) assert.NoError(t, err) - err = stor.Delete(testCtx, "1", "test") + err = stor.Delete(testCtx, "1/test") assert.NoError(t, err) - _, err = stor.Size(testCtx, "1", "test") + _, err = stor.Size(testCtx, "1/test") assert.True(t, os.IsNotExist(err)) _, err = os.Stat(filepath.Join(tmpDir, "1", "test")) assert.True(t, os.IsNotExist(err)) -} - -func TestLocal_URL(t *testing.T) { - tmpDir, err := ioutil.TempDir("", "podsync-local-stor-") - require.NoError(t, err) - defer os.RemoveAll(tmpDir) - - stor, err := NewLocal(tmpDir, "localhost") - assert.NoError(t, err) - - _, err = stor.Create(testCtx, "1", "test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) - assert.NoError(t, err) - - url, err := stor.URL(testCtx, "1", "test") - assert.NoError(t, err) - assert.EqualValues(t, "http://localhost/1/test", url) + err = stor.Delete(testCtx, "1/test") + assert.True(t, errors.Is(err, os.ErrNotExist)) } func TestLocal_copyFile(t *testing.T) { reader := bytes.NewReader([]byte{1, 2, 4}) - - tmpDir, err := ioutil.TempDir("", "podsync-test-") - require.NoError(t, err) - - defer os.RemoveAll(tmpDir) + tmpDir, err := os.MkdirTemp("", "") + assert.NoError(t, err) file := filepath.Join(tmpDir, "1") diff --git a/pkg/fs/s3.go b/pkg/fs/s3.go new file mode 100644 index 00000000..1337e161 --- /dev/null +++ b/pkg/fs/s3.go @@ -0,0 +1,150 @@ +package fs + +import ( + "bytes" + "context" + "io" + "net/http" + "os" + "path" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3iface" + "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/gabriel-vasile/mimetype" + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" +) + +// S3Config is the configuration for a S3-compatible storage provider +type S3Config struct { + // S3 Bucket to store files + Bucket string `toml:"bucket"` + // Region of the S3 service + Region string `toml:"region"` + // EndpointURL is an HTTP endpoint of the S3 API + EndpointURL string `toml:"endpoint_url"` + // Prefix is a prefix (subfolder) to use to build key names + Prefix string `toml:"prefix"` +} + +// S3 implements file storage for S3-compatible providers. +type S3 struct { + api s3iface.S3API + uploader *s3manager.Uploader + bucket string + prefix string +} + +func NewS3(c S3Config) (*S3, error) { + cfg := aws.NewConfig(). + WithEndpoint(c.EndpointURL). + WithRegion(c.Region). + WithLogger(s3logger{}). + WithLogLevel(aws.LogDebug) + sess, err := session.NewSessionWithOptions(session.Options{Config: *cfg}) + if err != nil { + return nil, errors.Wrap(err, "failed to initialize S3 session") + } + return &S3{ + api: s3.New(sess), + uploader: s3manager.NewUploader(sess), + bucket: c.Bucket, + prefix: c.Prefix, + }, nil +} + +func (s *S3) Open(_name string) (http.File, error) { + return nil, errors.New("serving files from S3 is not supported") +} + +func (s *S3) Delete(ctx context.Context, name string) error { + key := s.buildKey(name) + _, err := s.api.DeleteObjectWithContext(ctx, &s3.DeleteObjectInput{ + Bucket: &s.bucket, + Key: &key, + }) + if err != nil { + if awsErr, ok := err.(awserr.Error); ok { + if awsErr.Code() == "NotFound" { + return os.ErrNotExist + } + } + } + return err +} + +func (s *S3) Create(ctx context.Context, name string, reader io.Reader) (int64, error) { + key := s.buildKey(name) + logger := log.WithField("key", key) + + // Detect MIME type from the first 512 bytes and then replay them with the rest of the stream. + buf := make([]byte, 512) + n, err := io.ReadFull(reader, buf) + if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF { + return 0, errors.Wrap(err, "failed to read file header for MIME detection") + } + head := buf[:n] + m := mimetype.Detect(head) + body := io.MultiReader(bytes.NewReader(head), reader) + + logger.Infof("uploading file to %s", s.bucket) + r := &readerWithN{Reader: body} + _, err = s.uploader.UploadWithContext(ctx, &s3manager.UploadInput{ + Body: r, + Bucket: &s.bucket, + ContentType: aws.String(m.String()), + Key: &key, + }) + if err != nil { + return 0, errors.Wrap(err, "failed to upload file") + } + + logger.Debugf("written %d bytes", r.n) + return int64(r.n), nil +} + +func (s *S3) Size(ctx context.Context, name string) (int64, error) { + key := s.buildKey(name) + logger := log.WithField("key", key) + + logger.Debugf("getting file size from %s", s.bucket) + resp, err := s.api.HeadObjectWithContext(ctx, &s3.HeadObjectInput{ + Bucket: &s.bucket, + Key: &key, + }) + if err != nil { + if awsErr, ok := err.(awserr.Error); ok { + if awsErr.Code() == "NotFound" { + return 0, os.ErrNotExist + } + } + return 0, errors.Wrap(err, "failed to get file size") + } + + return *resp.ContentLength, nil +} + +func (s *S3) buildKey(name string) string { + return path.Join(s.prefix, name) +} + +type readerWithN struct { + io.Reader + n int +} + +func (r *readerWithN) Read(p []byte) (n int, err error) { + n, err = r.Reader.Read(p) + r.n += n + return +} + +type s3logger struct{} + +func (s s3logger) Log(args ...interface{}) { + log.Debug(args...) +} diff --git a/pkg/fs/s3_test.go b/pkg/fs/s3_test.go new file mode 100644 index 00000000..8d25b48b --- /dev/null +++ b/pkg/fs/s3_test.go @@ -0,0 +1,124 @@ +package fs + +import ( + "bytes" + "io" + "os" + "testing" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/client/metadata" + "github.com/aws/aws-sdk-go/aws/request" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3iface" + "github.com/aws/aws-sdk-go/service/s3/s3manager" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" +) + +func TestS3_Create(t *testing.T) { + files := make(map[string][]byte) + stor, err := newMockS3(files, "") + assert.NoError(t, err) + + written, err := stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + assert.NoError(t, err) + assert.EqualValues(t, 5, written) + + d, ok := files["1/test"] + assert.True(t, ok) + assert.EqualValues(t, 5, len(d)) +} + +func TestS3_Size(t *testing.T) { + files := make(map[string][]byte) + stor, err := newMockS3(files, "") + assert.NoError(t, err) + + _, err = stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + assert.NoError(t, err) + + sz, err := stor.Size(testCtx, "1/test") + assert.NoError(t, err) + assert.EqualValues(t, 5, sz) +} + +func TestS3_NoSize(t *testing.T) { + files := make(map[string][]byte) + stor, err := newMockS3(files, "") + assert.NoError(t, err) + + _, err = stor.Size(testCtx, "1/test") + assert.True(t, os.IsNotExist(err)) +} + +func TestS3_Delete(t *testing.T) { + files := make(map[string][]byte) + stor, err := newMockS3(files, "") + assert.NoError(t, err) + + _, err = stor.Create(testCtx, "1/test", bytes.NewBuffer([]byte{1, 5, 7, 8, 3})) + assert.NoError(t, err) + + err = stor.Delete(testCtx, "1/test") + assert.NoError(t, err) + + _, err = stor.Size(testCtx, "1/test") + assert.True(t, errors.Is(err, os.ErrNotExist)) + + _, ok := files["1/test"] + assert.False(t, ok) + + err = stor.Delete(testCtx, "1/test") + assert.True(t, errors.Is(err, os.ErrNotExist)) +} + +func TestS3_BuildKey(t *testing.T) { + files := make(map[string][]byte) + + stor, _ := newMockS3(files, "") + key := stor.buildKey("test-fn") + assert.EqualValues(t, "test-fn", key) + + stor, _ = newMockS3(files, "mock-prefix") + key = stor.buildKey("test-fn") + assert.EqualValues(t, "mock-prefix/test-fn", key) +} + +type mockS3API struct { + s3iface.S3API + files map[string][]byte +} + +func newMockS3(files map[string][]byte, prefix string) (*S3, error) { + api := &mockS3API{files: files} + return &S3{ + api: api, + uploader: s3manager.NewUploaderWithClient(api), + bucket: "mock-bucket", + prefix: prefix, + }, nil +} + +func (m *mockS3API) PutObjectRequest(input *s3.PutObjectInput) (*request.Request, *s3.PutObjectOutput) { + content, _ := io.ReadAll(input.Body) + req := request.New(aws.Config{}, metadata.ClientInfo{}, request.Handlers{}, nil, &request.Operation{}, nil, nil) + m.files[*input.Key] = content + return req, &s3.PutObjectOutput{} +} + +func (m *mockS3API) HeadObjectWithContext(ctx aws.Context, input *s3.HeadObjectInput, opts ...request.Option) (*s3.HeadObjectOutput, error) { + if _, ok := m.files[*input.Key]; ok { + return &s3.HeadObjectOutput{ContentLength: aws.Int64(int64(len(m.files[*input.Key])))}, nil + } + return nil, awserr.New("NotFound", "", nil) +} + +func (m *mockS3API) DeleteObjectWithContext(ctx aws.Context, input *s3.DeleteObjectInput, opts ...request.Option) (*s3.DeleteObjectOutput, error) { + if _, ok := m.files[*input.Key]; ok { + delete(m.files, *input.Key) + return &s3.DeleteObjectOutput{}, nil + } + return nil, awserr.New("NotFound", "", nil) +} diff --git a/pkg/fs/storage.go b/pkg/fs/storage.go index a157bcf2..efdf1905 100644 --- a/pkg/fs/storage.go +++ b/pkg/fs/storage.go @@ -3,18 +3,28 @@ package fs import ( "context" "io" + "net/http" ) +// Storage is a file system interface to host downloaded episodes and feeds. type Storage interface { + // FileSystem must be implemented to in order to pass Storage interface to HTTP file server. + http.FileSystem + // Create will create a new file from reader - Create(ctx context.Context, ns string, fileName string, reader io.Reader) (int64, error) + Create(ctx context.Context, name string, reader io.Reader) (int64, error) // Delete deletes the file - Delete(ctx context.Context, ns string, fileName string) error + Delete(ctx context.Context, name string) error - // Size returns the size of a file in bytes - Size(ctx context.Context, ns string, fileName string) (int64, error) + // Size returns a storage object's size in bytes + Size(ctx context.Context, name string) (int64, error) +} - // URL will generate a download link for a file - URL(ctx context.Context, ns string, fileName string) (string, error) +// Config is a configuration for the file storage backend +type Config struct { + // Type is the type of file system to use + Type string `toml:"type"` + Local LocalConfig `toml:"local"` + S3 S3Config `toml:"s3"` } diff --git a/pkg/model/feed.go b/pkg/model/feed.go index e2c0cabf..fb6cc40b 100644 --- a/pkg/model/feed.go +++ b/pkg/model/feed.go @@ -16,8 +16,17 @@ const ( type Format string const ( - FormatAudio = Format("audio") - FormatVideo = Format("video") + FormatAudio = Format("audio") + FormatVideo = Format("video") + FormatCustom = Format("custom") +) + +// Playlist sorting style +type Sorting string + +const ( + SortingDesc = Sorting("desc") + SortingAsc = Sorting("asc") ) type Episode struct { @@ -54,6 +63,8 @@ type Feed struct { ItemURL string `json:"item_url"` // Platform specific URL Episodes []*Episode `json:"-"` // Array of episodes UpdatedAt time.Time `json:"updated_at"` + PlaylistSort Sorting `json:"playlist_sort"` + PrivateFeed bool `json:"private_feed"` } type EpisodeStatus string diff --git a/pkg/model/link.go b/pkg/model/link.go index 4a84e9a4..6731710f 100644 --- a/pkg/model/link.go +++ b/pkg/model/link.go @@ -7,18 +7,21 @@ const ( TypePlaylist = Type("playlist") TypeUser = Type("user") TypeGroup = Type("group") + TypeHandle = Type("handle") ) type Provider string const ( - ProviderYoutube = Provider("youtube") - ProviderVimeo = Provider("vimeo") + ProviderYoutube = Provider("youtube") + ProviderVimeo = Provider("vimeo") + ProviderSoundcloud = Provider("soundcloud") + ProviderTwitch = Provider("twitch") ) // Info represents data extracted from URL type Info struct { LinkType Type // Either group, channel or user - Provider Provider // Youtube or Vimeo + Provider Provider // Youtube, Vimeo, SoundCloud or Twitch ItemID string } diff --git a/pkg/ytdl/ytdl.go b/pkg/ytdl/ytdl.go index fe87752f..70c7c526 100644 --- a/pkg/ytdl/ytdl.go +++ b/pkg/ytdl/ytdl.go @@ -2,9 +2,9 @@ package ytdl import ( "context" + "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "os" "os/exec" @@ -13,10 +13,10 @@ import ( "sync" "time" + "github.com/mxpv/podsync/pkg/feed" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/model" ) @@ -25,23 +25,65 @@ const ( UpdatePeriod = 24 * time.Hour ) +type PlaylistMetadataThumbnail struct { + Id string `json:"id"` + Url string `json:"url"` + Resolution string `json:"resolution"` + Width int `json:"width"` + Height int `json:"height"` +} + +type PlaylistMetadata struct { + Id string `json:"id"` + Title string `json:"title"` + Description string `json:"description"` + Thumbnails []PlaylistMetadataThumbnail `json:"thumbnails"` + Channel string `json:"channel"` + ChannelId string `json:"channel_id"` + ChannelUrl string `json:"channel_url"` + WebpageUrl string `json:"webpage_url"` +} + var ( ErrTooManyRequests = errors.New(http.StatusText(http.StatusTooManyRequests)) ) +// Config is a youtube-dl related configuration +type Config struct { + // SelfUpdate toggles self update every 24 hour + SelfUpdate bool `toml:"self_update"` + // Timeout in minutes for youtube-dl process to finish download + Timeout int `toml:"timeout"` + // CustomBinary is a custom path to youtube-dl, this allows using various youtube-dl forks. + CustomBinary string `toml:"custom_binary"` +} + type YoutubeDl struct { path string timeout time.Duration updateLock sync.Mutex // Don't call youtube-dl while self updating } -func New(ctx context.Context, cfg config.Downloader) (*YoutubeDl, error) { - path, err := exec.LookPath("youtube-dl") - if err != nil { - return nil, errors.Wrap(err, "youtube-dl binary not found") - } +func New(ctx context.Context, cfg Config) (*YoutubeDl, error) { + var ( + path string + err error + ) - log.Debugf("found youtube-dl binary at %q", path) + if cfg.CustomBinary != "" { + path = cfg.CustomBinary + + // Don't update custom youtube-dl binaries. + log.Warnf("using custom youtube-dl binary, turning self updates off") + cfg.SelfUpdate = false + } else { + path, err = exec.LookPath("youtube-dl") + if err != nil { + return nil, errors.Wrap(err, "youtube-dl binary not found") + } + + log.Debugf("found youtube-dl binary at %q", path) + } timeout := DefaultDownloadTimeout if cfg.Timeout > 0 { @@ -134,8 +176,37 @@ func (dl *YoutubeDl) Update(ctx context.Context) error { return nil } -func (dl *YoutubeDl) Download(ctx context.Context, feedConfig *config.Feed, episode *model.Episode) (r io.ReadCloser, err error) { - tmpDir, err := ioutil.TempDir("", "podsync-") +func (dl *YoutubeDl) PlaylistMetadata(ctx context.Context, url string) (metadata PlaylistMetadata, err error) { + log.Info("getting playlist metadata for: ", url) + args := []string{ + "--playlist-items", "0", + "-J", // JSON output + "-q", // quiet mode + "--no-warnings", // suppress warnings + url, + } + dl.updateLock.Lock() + defer dl.updateLock.Unlock() + output, err := dl.exec(ctx, args...) + if err != nil { + log.WithError(err).Errorf("youtube-dl error: %s", url) + + // YouTube might block host with HTTP Error 429: Too Many Requests + if strings.Contains(output, "HTTP Error 429") { + return PlaylistMetadata{}, ErrTooManyRequests + } + + log.Error(output) + return PlaylistMetadata{}, errors.New(output) + } + + var playlistMetadata PlaylistMetadata + json.Unmarshal([]byte(output), &playlistMetadata) + return playlistMetadata, nil +} + +func (dl *YoutubeDl) Download(ctx context.Context, feedConfig *feed.Config, episode *model.Episode) (r io.ReadCloser, err error) { + tmpDir, err := os.MkdirTemp("", "podsync-") if err != nil { return nil, errors.Wrap(err, "failed to get temp dir for download") } @@ -175,6 +246,10 @@ func (dl *YoutubeDl) Download(ctx context.Context, feedConfig *config.Feed, epis if feedConfig.Format == model.FormatAudio { ext = "mp3" } + if feedConfig.Format == model.FormatCustom { + ext = feedConfig.CustomFormat.Extension + } + // filePath now with the final extension filePath = filepath.Join(tmpDir, fmt.Sprintf("%s.%s", episode.ID, ext)) f, err := os.Open(filePath) @@ -198,10 +273,11 @@ func (dl *YoutubeDl) exec(ctx context.Context, args ...string) (string, error) { return string(output), nil } -func buildArgs(feedConfig *config.Feed, episode *model.Episode, outputFilePath string) []string { +func buildArgs(feedConfig *feed.Config, episode *model.Episode, outputFilePath string) []string { var args []string - if feedConfig.Format == model.FormatVideo { + switch feedConfig.Format { + case model.FormatVideo: // Video, mp4, high by default format := "bestvideo[ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/best[ext=mp4][vcodec^=avc1]/best[ext=mp4]/best" @@ -213,7 +289,8 @@ func buildArgs(feedConfig *config.Feed, episode *model.Episode, outputFilePath s } args = append(args, "--format", format) - } else { + + case model.FormatAudio: // Audio, mp3, high by default format := "bestaudio" if feedConfig.Quality == model.QualityLow { @@ -221,6 +298,9 @@ func buildArgs(feedConfig *config.Feed, episode *model.Episode, outputFilePath s } args = append(args, "--extract-audio", "--audio-format", "mp3", "--format", format) + + default: + args = append(args, "--audio-format", feedConfig.CustomFormat.Extension, "--format", feedConfig.CustomFormat.YouTubeDLFormat) } // Insert additional per-feed youtube-dl arguments diff --git a/pkg/ytdl/ytdl_test.go b/pkg/ytdl/ytdl_test.go index 7ea1a965..f1cbe915 100644 --- a/pkg/ytdl/ytdl_test.go +++ b/pkg/ytdl/ytdl_test.go @@ -3,7 +3,7 @@ package ytdl import ( "testing" - "github.com/mxpv/podsync/pkg/config" + "github.com/mxpv/podsync/pkg/feed" "github.com/mxpv/podsync/pkg/model" "github.com/stretchr/testify/assert" @@ -11,14 +11,15 @@ import ( func TestBuildArgs(t *testing.T) { tests := []struct { - name string - format model.Format - quality model.Quality - maxHeight int - output string - videoURL string - ytdlArgs []string - expect []string + name string + format model.Format + customFormat feed.CustomFormat + quality model.Quality + maxHeight int + output string + videoURL string + ytdlArgs []string + expect []string }{ { name: "Audio unknown quality", @@ -101,13 +102,23 @@ func TestBuildArgs(t *testing.T) { ytdlArgs: []string{"--write-sub", "--embed-subs", "--sub-lang", "en,en-US,en-GB"}, expect: []string{"--format", "bestvideo[ext=mp4][vcodec^=avc1]+bestaudio[ext=m4a]/best[ext=mp4][vcodec^=avc1]/best[ext=mp4]/best", "--write-sub", "--embed-subs", "--sub-lang", "en,en-US,en-GB", "--output", "/tmp/2", "http://url1"}, }, + { + name: "Custom format", + format: model.FormatCustom, + customFormat: feed.CustomFormat{YouTubeDLFormat: "bestaudio[ext=m4a]", Extension: "m4a"}, + quality: model.QualityHigh, + output: "/tmp/2", + videoURL: "http://url1", + expect: []string{"--audio-format", "m4a", "--format", "bestaudio[ext=m4a]", "--output", "/tmp/2", "http://url1"}, + }, } for _, tst := range tests { t.Run(tst.name, func(t *testing.T) { - result := buildArgs(&config.Feed{ + result := buildArgs(&feed.Config{ Format: tst.format, Quality: tst.quality, + CustomFormat: tst.customFormat, MaxHeight: tst.maxHeight, YouTubeDLArgs: tst.ytdlArgs, }, &model.Episode{ diff --git a/services/update/matcher.go b/services/update/matcher.go new file mode 100644 index 00000000..fda338c6 --- /dev/null +++ b/services/update/matcher.go @@ -0,0 +1,72 @@ +package update + +import ( + "regexp" + "time" + + "github.com/mxpv/podsync/pkg/feed" + "github.com/mxpv/podsync/pkg/model" + log "github.com/sirupsen/logrus" +) + +func matchRegexpFilter(pattern, str string, negative bool, logger log.FieldLogger) bool { + if pattern != "" { + matched, err := regexp.MatchString(pattern, str) + if err != nil { + logger.Warnf("pattern %q is not a valid") + } else { + if matched == negative { + logger.Infof("skipping due to regexp mismatch") + return false + } + } + } + return true +} + +func matchFilters(episode *model.Episode, filters *feed.Filters) bool { + logger := log.WithFields(log.Fields{"episode_id": episode.ID}) + if !matchRegexpFilter(filters.Title, episode.Title, false, logger.WithField("filter", "title")) { + return false + } + + if !matchRegexpFilter(filters.NotTitle, episode.Title, true, logger.WithField("filter", "not_title")) { + return false + } + + if !matchRegexpFilter(filters.Description, episode.Description, false, logger.WithField("filter", "description")) { + return false + } + + if !matchRegexpFilter(filters.NotDescription, episode.Description, true, logger.WithField("filter", "not_description")) { + return false + } + + if filters.MaxDuration > 0 && episode.Duration > filters.MaxDuration { + logger.WithField("filter", "max_duration").Infof("skipping due to duration filter (%ds)", episode.Duration) + return false + } + + if filters.MinDuration > 0 && episode.Duration < filters.MinDuration { + logger.WithField("filter", "min_duration").Infof("skipping due to duration filter (%ds)", episode.Duration) + return false + } + + if filters.MaxAge > 0 { + dateDiff := int(time.Since(episode.PubDate).Hours()) / 24 + if dateDiff > filters.MaxAge { + logger.WithField("filter", "max_age").Infof("skipping due to max_age filter (%dd > %dd)", dateDiff, filters.MaxAge) + return false + } + } + + if filters.MinAge > 0 { + dateDiff := int(time.Since(episode.PubDate).Hours()) / 24 + if dateDiff < filters.MinAge { + logger.WithField("filter", "min_age").Infof("skipping due to min_age filter (%dd < %dd)", dateDiff, filters.MinAge) + return false + } + } + + return true +} diff --git a/services/update/matcher_test.go b/services/update/matcher_test.go new file mode 100644 index 00000000..a80f9fd4 --- /dev/null +++ b/services/update/matcher_test.go @@ -0,0 +1,28 @@ +package update + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mxpv/podsync/pkg/feed" + "github.com/mxpv/podsync/pkg/model" +) + +func TestNotTitleFilterIssue798(t *testing.T) { + // https://github.com/mxpv/podsync/issues/798 + filters := &feed.Filters{ + NotTitle: "(?i)^(holy mass|holy sacrifice|the holy)( |$)", + MinDuration: 600, + } + + // Titles starting with pattern should be excluded + assert.False(t, matchFilters(&model.Episode{ID: "1", Title: "Holy Mass โ€” Tuesday", Duration: 3600}, filters)) + assert.False(t, matchFilters(&model.Episode{ID: "2", Title: "The Holy Sacrifice of the Mass", Duration: 3600}, filters)) + assert.False(t, matchFilters(&model.Episode{ID: "3", Title: "The Holy Mass (Latin)", Duration: 3600}, filters)) + + // Titles NOT starting with pattern should be included + assert.True(t, matchFilters(&model.Episode{ID: "4", Title: "Homily: The Parable of the Good Samaritan", Duration: 1200}, filters)) + assert.True(t, matchFilters(&model.Episode{ID: "5", Title: "Sermon โ€” Love Your Enemies", Duration: 1800}, filters)) + assert.True(t, matchFilters(&model.Episode{ID: "6", Title: "Reflection on Today's Gospel", Duration: 900}, filters)) +} diff --git a/cmd/podsync/updater.go b/services/update/updater.go similarity index 69% rename from cmd/podsync/updater.go rename to services/update/updater.go index a548ea28..8d637ac6 100644 --- a/cmd/podsync/updater.go +++ b/services/update/updater.go @@ -1,4 +1,4 @@ -package main +package update import ( "bytes" @@ -6,7 +6,6 @@ import ( "fmt" "io" "os" - "regexp" "sort" "time" @@ -15,7 +14,6 @@ import ( log "github.com/sirupsen/logrus" "github.com/mxpv/podsync/pkg/builder" - "github.com/mxpv/podsync/pkg/config" "github.com/mxpv/podsync/pkg/db" "github.com/mxpv/podsync/pkg/feed" "github.com/mxpv/podsync/pkg/fs" @@ -24,38 +22,40 @@ import ( ) type Downloader interface { - Download(ctx context.Context, feedConfig *config.Feed, episode *model.Episode) (io.ReadCloser, error) + Download(ctx context.Context, feedConfig *feed.Config, episode *model.Episode) (io.ReadCloser, error) + PlaylistMetadata(ctx context.Context, url string) (metadata ytdl.PlaylistMetadata, err error) } -type Updater struct { - config *config.Config +type TokenList []string + +type Manager struct { + hostname string downloader Downloader db db.Storage fs fs.Storage + feeds map[string]*feed.Config keys map[model.Provider]feed.KeyProvider } -func NewUpdater(config *config.Config, downloader Downloader, db db.Storage, fs fs.Storage) (*Updater, error) { - keys := map[model.Provider]feed.KeyProvider{} - - for name, list := range config.Tokens { - provider, err := feed.NewKeyProvider(list) - if err != nil { - return nil, errors.Wrapf(err, "failed to create key provider for %q", name) - } - keys[name] = provider - } - - return &Updater{ - config: config, +func NewUpdater( + feeds map[string]*feed.Config, + keys map[model.Provider]feed.KeyProvider, + hostname string, + downloader Downloader, + db db.Storage, + fs fs.Storage, +) (*Manager, error) { + return &Manager{ + hostname: hostname, downloader: downloader, db: db, fs: fs, + feeds: feeds, keys: keys, }, nil } -func (u *Updater) Update(ctx context.Context, feedConfig *config.Feed) error { +func (u *Manager) Update(ctx context.Context, feedConfig *feed.Config) error { log.WithFields(log.Fields{ "feed_id": feedConfig.ID, "format": feedConfig.Format, @@ -68,10 +68,20 @@ func (u *Updater) Update(ctx context.Context, feedConfig *config.Feed) error { return errors.Wrap(err, "update failed") } - if err := u.downloadEpisodes(ctx, feedConfig); err != nil { + // Fetch episodes for download + episodesToDownload, err := u.fetchEpisodes(ctx, feedConfig) + if err != nil { + return errors.Wrap(err, "fetch episodes failed") + } + + if err := u.downloadEpisodes(ctx, feedConfig, episodesToDownload); err != nil { return errors.Wrap(err, "download failed") } + if err := u.cleanup(ctx, feedConfig); err != nil { + log.WithError(err).Error("cleanup failed") + } + if err := u.buildXML(ctx, feedConfig); err != nil { return errors.Wrap(err, "xml build failed") } @@ -80,17 +90,13 @@ func (u *Updater) Update(ctx context.Context, feedConfig *config.Feed) error { return errors.Wrap(err, "opml build failed") } - if err := u.cleanup(ctx, feedConfig); err != nil { - log.WithError(err).Error("cleanup failed") - } - elapsed := time.Since(started) log.Infof("successfully updated feed in %s", elapsed) return nil } // updateFeed pulls API for new episodes and saves them to database -func (u *Updater) updateFeed(ctx context.Context, feedConfig *config.Feed) error { +func (u *Manager) updateFeed(ctx context.Context, feedConfig *feed.Config) error { info, err := builder.ParseURL(feedConfig.URL) if err != nil { return errors.Wrapf(err, "failed to parse URL: %s", feedConfig.URL) @@ -102,7 +108,7 @@ func (u *Updater) updateFeed(ctx context.Context, feedConfig *config.Feed) error } // Create an updater for this feed type - provider, err := builder.New(ctx, info.Provider, keyProvider.Get()) + provider, err := builder.New(ctx, info.Provider, keyProvider.Get(), u.downloader) if err != nil { return err } @@ -147,76 +153,53 @@ func (u *Updater) updateFeed(ctx context.Context, feedConfig *config.Feed) error return nil } -func (u *Updater) matchRegexpFilter(pattern, str string, negative bool, logger log.FieldLogger) bool { - if pattern != "" { - matched, err := regexp.MatchString(pattern, str) - if err != nil { - logger.Warnf("pattern %q is not a valid") - } else { - if matched == negative { - logger.Infof("skipping due to mismatch") - return false - } - } - } - return true -} - -func (u *Updater) matchFilters(episode *model.Episode, filters *config.Filters) bool { - logger := log.WithFields(log.Fields{"episode_id": episode.ID}) - if !u.matchRegexpFilter(filters.Title, episode.Title, false, logger.WithField("filter", "title")) { - return false - } - if !u.matchRegexpFilter(filters.NotTitle, episode.Title, true, logger.WithField("filter", "not_title")) { - return false - } - - if !u.matchRegexpFilter(filters.Description, episode.Description, false, logger.WithField("filter", "description")) { - return false - } - if !u.matchRegexpFilter(filters.NotDescription, episode.Description, true, logger.WithField("filter", "not_description")) { - return false - } - - return true -} - -func (u *Updater) downloadEpisodes(ctx context.Context, feedConfig *config.Feed) error { +func (u *Manager) fetchEpisodes(ctx context.Context, feedConfig *feed.Config) ([]*model.Episode, error) { var ( feedID = feedConfig.ID downloadList []*model.Episode pageSize = feedConfig.PageSize ) - log.WithField("page_size", pageSize).Info("downloading episodes") + log.WithField("page_size", pageSize).Info("fetching episodes for download") // Build the list of files to download - if err := u.db.WalkEpisodes(ctx, feedID, func(episode *model.Episode) error { + err := u.db.WalkEpisodes(ctx, feedID, func(episode *model.Episode) error { + var ( + logger = log.WithFields(log.Fields{"episode_id": episode.ID}) + ) if episode.Status != model.EpisodeNew && episode.Status != model.EpisodeError { // File already downloaded + logger.Infof("skipping due to already downloaded") return nil } - if !u.matchFilters(episode, &feedConfig.Filters) { + if !matchFilters(episode, &feedConfig.Filters) { return nil } // Limit the number of episodes downloaded at once pageSize-- - if pageSize <= 0 { + if pageSize < 0 { return nil } log.Debugf("adding %s (%q) to queue", episode.ID, episode.Title) downloadList = append(downloadList, episode) return nil - }); err != nil { - return errors.Wrapf(err, "failed to build update list") + }) + + if err != nil { + return nil, errors.Wrapf(err, "failed to build update list") } + return downloadList, nil +} + +func (u *Manager) downloadEpisodes(ctx context.Context, feedConfig *feed.Config, downloadList []*model.Episode) error { var ( downloadCount = len(downloadList) downloaded = 0 + feedID = feedConfig.ID ) if downloadCount > 0 { @@ -235,7 +218,7 @@ func (u *Updater) downloadEpisodes(ctx context.Context, feedConfig *config.Feed) ) // Check whether episode already exists - size, err := u.fs.Size(ctx, feedID, episodeName) + size, err := u.fs.Size(ctx, fmt.Sprintf("%s/%s", feedID, episodeName)) if err == nil { logger.Infof("episode %q already exists on disk", episode.ID) @@ -283,13 +266,30 @@ func (u *Updater) downloadEpisodes(ctx context.Context, feedConfig *config.Feed) } logger.Debug("copying file") - fileSize, err := u.fs.Create(ctx, feedID, episodeName, tempFile) + fileSize, err := u.fs.Create(ctx, fmt.Sprintf("%s/%s", feedID, episodeName), tempFile) tempFile.Close() if err != nil { logger.WithError(err).Error("failed to copy file") return err } + // Execute post episode download hooks + if len(feedConfig.PostEpisodeDownload) > 0 { + env := []string{ + "EPISODE_FILE=" + fmt.Sprintf("%s/%s", feedID, episodeName), + "FEED_NAME=" + feedID, + "EPISODE_TITLE=" + episode.Title, + } + + for i, hook := range feedConfig.PostEpisodeDownload { + if err := hook.Invoke(env); err != nil { + logger.Errorf("failed to execute post episode download hook %d: %v", i+1, err) + } else { + logger.Infof("post episode download hook %d executed successfully", i+1) + } + } + } + // Update file status in database logger.Infof("successfully downloaded file %q", episode.ID) @@ -308,7 +308,7 @@ func (u *Updater) downloadEpisodes(ctx context.Context, feedConfig *config.Feed) return nil } -func (u *Updater) buildXML(ctx context.Context, feedConfig *config.Feed) error { +func (u *Manager) buildXML(ctx context.Context, feedConfig *feed.Config) error { f, err := u.db.GetFeed(ctx, feedConfig.ID) if err != nil { return err @@ -316,7 +316,7 @@ func (u *Updater) buildXML(ctx context.Context, feedConfig *config.Feed) error { // Build iTunes XML feed with data received from builder log.Debug("building iTunes podcast feed") - podcast, err := feed.Build(ctx, f, feedConfig, u.fs) + podcast, err := feed.Build(ctx, f, feedConfig, u.hostname) if err != nil { return err } @@ -326,17 +326,17 @@ func (u *Updater) buildXML(ctx context.Context, feedConfig *config.Feed) error { xmlName = fmt.Sprintf("%s.xml", feedConfig.ID) ) - if _, err := u.fs.Create(ctx, "", xmlName, reader); err != nil { + if _, err := u.fs.Create(ctx, xmlName, reader); err != nil { return errors.Wrap(err, "failed to upload new XML feed") } return nil } -func (u *Updater) buildOPML(ctx context.Context) error { +func (u *Manager) buildOPML(ctx context.Context) error { // Build OPML with data received from builder log.Debug("building podcast OPML") - opml, err := feed.BuildOPML(ctx, u.config, u.db, u.fs) + opml, err := feed.BuildOPML(ctx, u.feeds, u.db, u.hostname) if err != nil { return err } @@ -346,22 +346,27 @@ func (u *Updater) buildOPML(ctx context.Context) error { xmlName = fmt.Sprintf("%s.opml", "podsync") ) - if _, err := u.fs.Create(ctx, "", xmlName, reader); err != nil { + if _, err := u.fs.Create(ctx, xmlName, reader); err != nil { return errors.Wrap(err, "failed to upload OPML") } return nil } -func (u *Updater) cleanup(ctx context.Context, feedConfig *config.Feed) error { +func (u *Manager) cleanup(ctx context.Context, feedConfig *feed.Config) error { var ( feedID = feedConfig.ID logger = log.WithField("feed_id", feedID) - count = feedConfig.Clean.KeepLast list []*model.Episode result *multierror.Error ) + if feedConfig.Clean == nil { + logger.Debug("no cleanup policy configured") + return nil + } + + count := feedConfig.Clean.KeepLast if count < 1 { logger.Info("nothing to clean") return nil @@ -388,9 +393,20 @@ func (u *Updater) cleanup(ctx context.Context, feedConfig *config.Feed) error { for _, episode := range list[count:] { logger.WithField("episode_id", episode.ID).Infof("deleting %q", episode.Title) - if err := u.fs.Delete(ctx, feedConfig.ID, feed.EpisodeName(feedConfig, episode)); err != nil { - result = multierror.Append(result, errors.Wrapf(err, "failed to delete episode: %s", episode.ID)) - continue + var ( + episodeName = feed.EpisodeName(feedConfig, episode) + path = fmt.Sprintf("%s/%s", feedConfig.ID, episodeName) + ) + + err := u.fs.Delete(ctx, path) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + logger.WithError(err).Errorf("failed to delete episode file: %s", episode.ID) + result = multierror.Append(result, errors.Wrapf(err, "failed to delete episode: %s", episode.ID)) + continue + } + + logger.WithField("episode_id", episode.ID).Info("episode was not found - file does not exist") } if err := u.db.UpdateEpisode(feedID, episode.ID, func(episode *model.Episode) error { diff --git a/services/web/server.go b/services/web/server.go new file mode 100644 index 00000000..866cf329 --- /dev/null +++ b/services/web/server.go @@ -0,0 +1,135 @@ +package web + +import ( + "encoding/json" + "expvar" + "fmt" + "net/http" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/mxpv/podsync/pkg/db" + "github.com/mxpv/podsync/pkg/model" +) + +type Server struct { + http.Server + db db.Storage +} + +type Config struct { + // Hostname to use for download links + Hostname string `toml:"hostname"` + // Port is a server port to listen to + Port int `toml:"port"` + // Bind a specific IP addresses for server + // "*": bind all IP addresses which is default option + // localhost or 127.0.0.1 bind a single IPv4 address + BindAddress string `toml:"bind_address"` + // Flag indicating if the server will use TLS + TLS bool `toml:"tls"` + // Path to a certificate file for TLS connections + CertificatePath string `toml:"certificate_path"` + // Path to a private key file for TLS connections + KeyFilePath string `toml:"key_file_path"` + // Specify path for reverse proxy and only [A-Za-z0-9] + Path string `toml:"path"` + // DataDir is a path to a directory to keep XML feeds and downloaded episodes, + // that will be available to user via web server for download. + DataDir string `toml:"data_dir"` + // WebUIEnabled is a flag indicating if web UI is enabled + WebUIEnabled bool `toml:"web_ui"` + // DebugEndpoints enables /debug/vars endpoint for runtime metrics (disabled by default) + DebugEndpoints bool `toml:"debug_endpoints"` +} + +func New(cfg Config, storage http.FileSystem, database db.Storage) *Server { + port := cfg.Port + if port == 0 { + port = 8080 + } + + bindAddress := cfg.BindAddress + if bindAddress == "*" { + bindAddress = "" + } + + srv := Server{ + db: database, + } + + srv.Addr = fmt.Sprintf("%s:%d", bindAddress, port) + log.Debugf("using address: %s:%s", bindAddress, srv.Addr) + + // Use a custom mux instead of http.DefaultServeMux to avoid exposing + // debug endpoints registered by imported packages (security fix for #799) + mux := http.NewServeMux() + + fileServer := http.FileServer(storage) + + log.Debugf("handle path: /%s", cfg.Path) + mux.Handle(fmt.Sprintf("/%s", cfg.Path), fileServer) + + // Add health check endpoint + mux.HandleFunc("/health", srv.healthCheckHandler) + + // Optionally enable debug endpoints (disabled by default for security) + if cfg.DebugEndpoints { + log.Info("debug endpoints enabled at /debug/vars") + mux.Handle("/debug/vars", expvar.Handler()) + } + + srv.Handler = mux + + return &srv +} + +type HealthStatus struct { + Status string `json:"status"` + Timestamp time.Time `json:"timestamp"` + FailedEpisodes int `json:"failed_episodes,omitempty"` + Message string `json:"message,omitempty"` +} + +func (s *Server) healthCheckHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Check for recent download failures within the last 24 hours + failedCount := 0 + cutoffTime := time.Now().Add(-24 * time.Hour) + + // Walk through all feeds to count recent failures + err := s.db.WalkFeeds(ctx, func(feed *model.Feed) error { + return s.db.WalkEpisodes(ctx, feed.ID, func(episode *model.Episode) error { + if episode.Status == model.EpisodeError && episode.PubDate.After(cutoffTime) { + failedCount++ + } + return nil + }) + }) + + w.Header().Set("Content-Type", "application/json") + + status := HealthStatus{ + Timestamp: time.Now(), + } + + if err != nil { + log.WithError(err).Error("health check database error") + status.Status = "unhealthy" + status.Message = "database error during health check" + w.WriteHeader(http.StatusServiceUnavailable) + } else if failedCount > 0 { + status.Status = "unhealthy" + status.FailedEpisodes = failedCount + status.Message = fmt.Sprintf("found %d failed downloads in the last 24 hours", failedCount) + w.WriteHeader(http.StatusServiceUnavailable) + } else { + status.Status = "healthy" + status.Message = "no recent download failures detected" + w.WriteHeader(http.StatusOK) + } + + json.NewEncoder(w).Encode(status) +} diff --git a/services/web/server_test.go b/services/web/server_test.go new file mode 100644 index 00000000..a11b66be --- /dev/null +++ b/services/web/server_test.go @@ -0,0 +1,56 @@ +package web + +import ( + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +type mockFileSystem struct{} + +func (m *mockFileSystem) Open(name string) (http.File, error) { + return nil, http.ErrMissingFile +} + +func TestDebugEndpointDisabledByDefault(t *testing.T) { + cfg := Config{ + Port: 8080, + Path: "feeds", + } + + srv := New(cfg, &mockFileSystem{}, nil) + + req := httptest.NewRequest(http.MethodGet, "/debug/vars", nil) + rec := httptest.NewRecorder() + + srv.Handler.ServeHTTP(rec, req) + + // Should return 404 when debug endpoints are disabled + assert.Equal(t, http.StatusNotFound, rec.Code) + // Should NOT contain expvar data + assert.False(t, strings.Contains(rec.Body.String(), "cmdline")) +} + +func TestDebugEndpointEnabledWhenConfigured(t *testing.T) { + cfg := Config{ + Port: 8080, + Path: "feeds", + DebugEndpoints: true, + } + + srv := New(cfg, &mockFileSystem{}, nil) + + req := httptest.NewRequest(http.MethodGet, "/debug/vars", nil) + rec := httptest.NewRecorder() + + srv.Handler.ServeHTTP(rec, req) + + // Should return 200 and JSON content when debug endpoints are enabled + assert.Equal(t, http.StatusOK, rec.Code) + assert.Contains(t, rec.Header().Get("Content-Type"), "application/json") + // Verify it contains expvar data (cmdline is always present) + assert.True(t, strings.Contains(rec.Body.String(), "cmdline")) +}