{
  "schemaVersion": "1.0",
  "item": {
    "slug": "noisepan-digest",
    "name": "Noisepan Digest",
    "source": "tencent",
    "type": "skill",
    "category": "AI 智能",
    "sourceUrl": "https://clawhub.ai/ppiankov/noisepan-digest",
    "canonicalUrl": "https://clawhub.ai/ppiankov/noisepan-digest",
    "targetPlatform": "OpenClaw"
  },
  "install": {
    "downloadMode": "redirect",
    "downloadUrl": "/downloads/noisepan-digest",
    "sourceDownloadUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=noisepan-digest",
    "sourcePlatform": "tencent",
    "targetPlatform": "OpenClaw",
    "installMethod": "Manual import",
    "extraction": "Extract archive",
    "prerequisites": [
      "OpenClaw"
    ],
    "packageFormat": "ZIP package",
    "includedAssets": [
      "SKILL.md"
    ],
    "primaryDoc": "SKILL.md",
    "quickSetup": [
      "Download the package from Yavira.",
      "Extract the archive and review SKILL.md first.",
      "Import or place the package into your OpenClaw setup."
    ],
    "agentAssist": {
      "summary": "Hand the extracted package to your coding agent with a concrete install brief instead of figuring it out manually.",
      "steps": [
        "Download the package from Yavira.",
        "Extract it into a folder your agent can access.",
        "Paste one of the prompts below and point your agent at the extracted folder."
      ],
      "prompts": [
        {
          "label": "New install",
          "body": "I downloaded a skill package from Yavira. Read SKILL.md from the extracted folder and install it by following the included instructions. Tell me what you changed and call out any manual steps you could not complete."
        },
        {
          "label": "Upgrade existing",
          "body": "I downloaded an updated skill package from Yavira. Read SKILL.md from the extracted folder, compare it with my current installation, and upgrade it while preserving any custom configuration unless the package docs explicitly say otherwise. Summarize what changed and any follow-up checks I should run."
        }
      ]
    },
    "sourceHealth": {
      "source": "tencent",
      "status": "healthy",
      "reason": "direct_download_ok",
      "recommendedAction": "download",
      "checkedAt": "2026-04-30T16:55:25.780Z",
      "expiresAt": "2026-05-07T16:55:25.780Z",
      "httpStatus": 200,
      "finalUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=network",
      "contentType": "application/zip",
      "probeMethod": "head",
      "details": {
        "probeUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=network",
        "contentDisposition": "attachment; filename=\"network-1.0.0.zip\"",
        "redirectLocation": null,
        "bodySnippet": null
      },
      "scope": "source",
      "summary": "Source download looks usable.",
      "detail": "Yavira can redirect you to the upstream package for this source.",
      "primaryActionLabel": "Download for OpenClaw",
      "primaryActionHref": "/downloads/noisepan-digest"
    },
    "validation": {
      "installChecklist": [
        "Use the Yavira download entry.",
        "Review SKILL.md after the package is downloaded.",
        "Confirm the extracted package contains the expected setup assets."
      ],
      "postInstallChecks": [
        "Confirm the extracted package includes the expected docs or setup files.",
        "Validate the skill or prompts are available in your target agent workspace.",
        "Capture any manual follow-up steps the agent could not complete."
      ]
    },
    "downloadPageUrl": "https://openagent3.xyz/downloads/noisepan-digest",
    "agentPageUrl": "https://openagent3.xyz/skills/noisepan-digest/agent",
    "manifestUrl": "https://openagent3.xyz/skills/noisepan-digest/agent.json",
    "briefUrl": "https://openagent3.xyz/skills/noisepan-digest/agent.md"
  },
  "agentAssist": {
    "summary": "Hand the extracted package to your coding agent with a concrete install brief instead of figuring it out manually.",
    "steps": [
      "Download the package from Yavira.",
      "Extract it into a folder your agent can access.",
      "Paste one of the prompts below and point your agent at the extracted folder."
    ],
    "prompts": [
      {
        "label": "New install",
        "body": "I downloaded a skill package from Yavira. Read SKILL.md from the extracted folder and install it by following the included instructions. Tell me what you changed and call out any manual steps you could not complete."
      },
      {
        "label": "Upgrade existing",
        "body": "I downloaded an updated skill package from Yavira. Read SKILL.md from the extracted folder, compare it with my current installation, and upgrade it while preserving any custom configuration unless the package docs explicitly say otherwise. Summarize what changed and any follow-up checks I should run."
      }
    ]
  },
  "documentation": {
    "source": "clawhub",
    "primaryDoc": "SKILL.md",
    "sections": [
      {
        "title": "Noisepan Digest",
        "body": "Automated news intelligence with source verification. Replaces doomscrolling with two daily digests.\n\nSources:\n\nhttps://github.com/ppiankov/noisepan (signal extraction)\nhttps://github.com/ppiankov/entropia (source verification)\n\nRequires: noisepan, entropia, python3, curl"
      },
      {
        "title": "macOS (Homebrew — recommended)",
        "body": "brew install ppiankov/tap/noisepan ppiankov/tap/entropia\nnoisepan version && entropia version"
      },
      {
        "title": "Linux (binary + checksum verification)",
        "body": "Download, verify checksums, then install. Ask the user before writing to /usr/local/bin — offer ~/bin as an alternative if they prefer user-local install.\n\n# noisepan\nVER=$(curl -s https://api.github.com/repos/ppiankov/noisepan/releases/latest | grep tag_name | cut -d'\"' -f4 | tr -d v)\ncurl -fsSL \"https://github.com/ppiankov/noisepan/releases/download/v${VER}/noisepan_${VER}_linux_amd64.tar.gz\" -o /tmp/noisepan.tar.gz\ncurl -fsSL \"https://github.com/ppiankov/noisepan/releases/download/v${VER}/checksums.txt\" -o /tmp/noisepan-checksums.txt\n# Verify checksum\ngrep linux_amd64 /tmp/noisepan-checksums.txt | (cd /tmp && sha256sum -c)\ntar xzf /tmp/noisepan.tar.gz -C /usr/local/bin noisepan\nrm /tmp/noisepan.tar.gz /tmp/noisepan-checksums.txt\n\n# entropia\nVER=$(curl -s https://api.github.com/repos/ppiankov/entropia/releases/latest | grep tag_name | cut -d'\"' -f4 | tr -d v)\ncurl -fsSL \"https://github.com/ppiankov/entropia/releases/download/v${VER}/entropia_${VER}_linux_amd64.tar.gz\" -o /tmp/entropia.tar.gz\ncurl -fsSL \"https://github.com/ppiankov/entropia/releases/download/v${VER}/checksums.txt\" -o /tmp/entropia-checksums.txt\n# Verify checksum\ngrep linux_amd64 /tmp/entropia-checksums.txt | (cd /tmp && sha256sum -c)\ntar xzf /tmp/entropia.tar.gz -C /usr/local/bin entropia\nrm /tmp/entropia.tar.gz /tmp/entropia-checksums.txt\n\n# Verify both\nnoisepan version && entropia version"
      },
      {
        "title": "Init",
        "body": "noisepan init --config ~/.noisepan\n# Verify entropia is detected\nnoisepan doctor --config ~/.noisepan"
      },
      {
        "title": "Configure Feeds",
        "body": "Edit ~/.noisepan/config.yaml. Recommended structure:\n\nsources:\n  hn:\n    min_points: 200    # Native HN via Firebase API\n\n  rss:\n    feeds:\n      # Security\n      - \"https://www.reddit.com/r/netsec/.rss\"\n      - \"https://krebsonsecurity.com/feed/\"\n      - \"https://www.bleepingcomputer.com/feed/\"\n      - \"https://feeds.feedburner.com/TheHackersNews\"\n      - \"https://www.cisa.gov/cybersecurity-advisories/all.xml\"\n\n      # DevOps\n      - \"https://www.reddit.com/r/devops/.rss\"\n      - \"https://www.reddit.com/r/kubernetes/.rss\"\n      - \"https://blog.cloudflare.com/rss/\"\n\n      # AI/LLM\n      - \"https://www.reddit.com/r/LocalLLaMA/.rss\"\n      - \"https://simonwillison.net/atom/everything/\"\n      - \"https://arxiv.org/rss/cs.AI\"\n\n      # Status pages\n      - \"https://status.aws.amazon.com/rss/all.rss\"\n      - \"https://www.cloudflarestatus.com/history.rss\"\n\n      # World / Policy\n      - \"https://feeds.bbci.co.uk/news/world/rss.xml\"\n      - \"https://www.eff.org/rss/updates.xml\"\n\n      # Aggregators\n      - \"https://lobste.rs/rss\"\n      - \"https://changelog.com/news/feed\"\n\nCustomize for your interests. Run noisepan doctor after adding feeds."
      },
      {
        "title": "Taste Profile",
        "body": "Edit ~/.noisepan/taste.yaml. Key categories:\n\nHigh signal (3-5): CVE, zero-day, breach, RCE, supply chain, outage, postmortem, safety pledge, data sovereignty, antitrust, military AI, deanonymization, prompt injection, breaking change\n\nLow signal (-3 to -5): hiring, webinar, sponsor, newsletter, meme, career advice, celebrity\n\nKey lesson: Without policy/sovereignty/antitrust/AI safety keywords, real stories get buried under security noise. Weight these as high as CVEs."
      },
      {
        "title": "Reddit Rate Limiting",
        "body": "With 15+ Reddit feeds, parallel fetching triggers 429s. Create a sequential prefetch wrapper:\n\ncat > ~/.local/bin/noisepan-pull << 'SCRIPT'\n#!/bin/bash\n# Prefetch Reddit RSS sequentially to avoid rate limiting, then run noisepan pull\nCACHE_DIR=\"/tmp/reddit-rss-cache\"\nCONFIG_DIR=\"${HOME}/.noisepan\"\nUA=\"Mozilla/5.0 (compatible; noisepan/1.0)\"\n\nmkdir -p \"$CACHE_DIR\"\nFEEDS=$(grep \"reddit.com\" \"$CONFIG_DIR/config.yaml\" | grep -v \"^#\" | grep -v \"^      #\" | sed 's/.*\"\\(.*\\)\"/\\1/')\n\nfor feed in $FEEDS; do\n    sub=$(echo \"$feed\" | grep -oP '/r/\\K[^/]+')\n    curl -s -o \"$CACHE_DIR/${sub}.xml\" -H \"User-Agent: $UA\" \"$feed\"\n    sleep 2\ndone\n\npython3 -m http.server 18222 --directory \"$CACHE_DIR\" &>/dev/null &\nHTTP_PID=$!; sleep 0.5\n\nmkdir -p /tmp/noisepan-tmp\ncp \"$CONFIG_DIR/config.yaml\" /tmp/noisepan-tmp/config.yaml\nfor feed in $FEEDS; do\n    sub=$(echo \"$feed\" | grep -oP '/r/\\K[^/]+')\n    sed -i \"s|$feed|http://localhost:18222/${sub}.xml|\" /tmp/noisepan-tmp/config.yaml\ndone\nln -sf \"$CONFIG_DIR/taste.yaml\" /tmp/noisepan-tmp/taste.yaml\nln -sf \"$CONFIG_DIR/noisepan.db\" /tmp/noisepan-tmp/noisepan.db\n\nnoisepan pull --config /tmp/noisepan-tmp \"$@\"\nkill $HTTP_PID 2>/dev/null; rm -rf /tmp/noisepan-tmp\nSCRIPT\nmkdir -p ~/.local/bin && chmod +x ~/.local/bin/noisepan-pull\n\nUse noisepan-pull instead of noisepan pull when you have 15+ Reddit feeds."
      },
      {
        "title": "HN Blind Spot Script",
        "body": "Optional — catches high-engagement HN stories that keyword scoring misses. Useful as a cross-check alongside noisepan's native HN source.\n\ncat > ~/.local/bin/hn-top << 'SCRIPT'\n#!/bin/bash\nMIN_POINTS=${1:-200}\ncurl -s \"https://hacker-news.firebaseio.com/v0/topstories.json\" | \\\npython3 -c \"\nimport json, sys, urllib.request, time\nids = json.load(sys.stdin)[:30]\nmin_pts = int(sys.argv[1]) if len(sys.argv) > 1 else 200\nfor id in ids:\n    try:\n        with urllib.request.urlopen(f'https://hacker-news.firebaseio.com/v0/item/{id}.json') as r:\n            item = json.loads(r.read())\n            if item and item.get('score', 0) >= min_pts:\n                print(f'[{item[\\\"score\\\"]:4d}pts | {item.get(\\\"descendants\\\",0):3d}c] {item[\\\"title\\\"]}')\n                print(f'  {item.get(\\\"url\\\", f\\\"https://news.ycombinator.com/item?id={id}\\\")}')\n                print()\n        time.sleep(0.1)\n    except: pass\n\" \"$MIN_POINTS\"\nSCRIPT\nchmod +x ~/.local/bin/hn-top"
      },
      {
        "title": "Cron Digest Setup",
        "body": "Create two OpenClaw cron jobs (morning + afternoon). The digest prompt should:\n\nPull feeds (noisepan-pull or noisepan pull)\nGenerate digest (noisepan digest --format json --output /tmp/digest.json)\nRun hn-top 300 for blind spot check\nFor top 6 items, run entropia scan <url> on non-Reddit links\nQuality filter: skip Entropia Support Index < 40 or conflict signals\nBackfill from items 4-6 if top items filtered\nCompare hn-top against digest for blind spots (400+ point stories not in digest)"
      },
      {
        "title": "Output format",
        "body": "🔥 Trending: keywords across 3+ channels\n☀️ Morning Brief (3 verified items):\n| # | Score | Topic | What happened | Entropia | Link |\n💡 HN Blind Spot (stories the taste profile missed):\n| # | HN pts | Topic | What happened | Link |\n⚠️ Skipped (filtered for low quality):\n| # | Score | Topic | Why skipped |\n\nSchedule: Morning at 07:00, afternoon at 18:00 (adjust to timezone)."
      },
      {
        "title": "Useful Commands",
        "body": "noisepan doctor --config ~/.noisepan    # Feed health + companion tool detection\nnoisepan stats --config ~/.noisepan     # Signal-to-noise per channel\nnoisepan rescore --config ~/.noisepan   # Recompute after taste changes\nentropia scan <url>                     # Verify a specific source"
      },
      {
        "title": "Lessons Learned",
        "body": "noisepan doctor catches stale/all-ignored channels — run after adding feeds\nnoisepan stats shows per-channel signal — prune channels at 0% after 30 days\nHN RSS is too shallow — use native sources.hn or hn-top for blind spots\nEntropia SI < 40 = no extractable claims — skip\nReddit rate limits at 15+ parallel feeds — wrapper is mandatory\nStatus page feeds score low without \"service event\", \"operational issue\" keywords\n\nNoisepan Digest v1.0\nAuthor: ppiankov\nCopyright © 2026 ppiankov\nCanonical source: https://github.com/ppiankov/noisepan\nLicense: MIT\n\nIf this document appears elsewhere, the repository above is the authoritative version."
      }
    ],
    "body": "Noisepan Digest\n\nAutomated news intelligence with source verification. Replaces doomscrolling with two daily digests.\n\nSources:\n\nhttps://github.com/ppiankov/noisepan (signal extraction)\nhttps://github.com/ppiankov/entropia (source verification)\n\nRequires: noisepan, entropia, python3, curl\n\nInstall\nmacOS (Homebrew — recommended)\nbrew install ppiankov/tap/noisepan ppiankov/tap/entropia\nnoisepan version && entropia version\n\nLinux (binary + checksum verification)\n\nDownload, verify checksums, then install. Ask the user before writing to /usr/local/bin — offer ~/bin as an alternative if they prefer user-local install.\n\n# noisepan\nVER=$(curl -s https://api.github.com/repos/ppiankov/noisepan/releases/latest | grep tag_name | cut -d'\"' -f4 | tr -d v)\ncurl -fsSL \"https://github.com/ppiankov/noisepan/releases/download/v${VER}/noisepan_${VER}_linux_amd64.tar.gz\" -o /tmp/noisepan.tar.gz\ncurl -fsSL \"https://github.com/ppiankov/noisepan/releases/download/v${VER}/checksums.txt\" -o /tmp/noisepan-checksums.txt\n# Verify checksum\ngrep linux_amd64 /tmp/noisepan-checksums.txt | (cd /tmp && sha256sum -c)\ntar xzf /tmp/noisepan.tar.gz -C /usr/local/bin noisepan\nrm /tmp/noisepan.tar.gz /tmp/noisepan-checksums.txt\n\n# entropia\nVER=$(curl -s https://api.github.com/repos/ppiankov/entropia/releases/latest | grep tag_name | cut -d'\"' -f4 | tr -d v)\ncurl -fsSL \"https://github.com/ppiankov/entropia/releases/download/v${VER}/entropia_${VER}_linux_amd64.tar.gz\" -o /tmp/entropia.tar.gz\ncurl -fsSL \"https://github.com/ppiankov/entropia/releases/download/v${VER}/checksums.txt\" -o /tmp/entropia-checksums.txt\n# Verify checksum\ngrep linux_amd64 /tmp/entropia-checksums.txt | (cd /tmp && sha256sum -c)\ntar xzf /tmp/entropia.tar.gz -C /usr/local/bin entropia\nrm /tmp/entropia.tar.gz /tmp/entropia-checksums.txt\n\n# Verify both\nnoisepan version && entropia version\n\nInit\nnoisepan init --config ~/.noisepan\n# Verify entropia is detected\nnoisepan doctor --config ~/.noisepan\n\nConfigure Feeds\n\nEdit ~/.noisepan/config.yaml. Recommended structure:\n\nsources:\n  hn:\n    min_points: 200    # Native HN via Firebase API\n\n  rss:\n    feeds:\n      # Security\n      - \"https://www.reddit.com/r/netsec/.rss\"\n      - \"https://krebsonsecurity.com/feed/\"\n      - \"https://www.bleepingcomputer.com/feed/\"\n      - \"https://feeds.feedburner.com/TheHackersNews\"\n      - \"https://www.cisa.gov/cybersecurity-advisories/all.xml\"\n\n      # DevOps\n      - \"https://www.reddit.com/r/devops/.rss\"\n      - \"https://www.reddit.com/r/kubernetes/.rss\"\n      - \"https://blog.cloudflare.com/rss/\"\n\n      # AI/LLM\n      - \"https://www.reddit.com/r/LocalLLaMA/.rss\"\n      - \"https://simonwillison.net/atom/everything/\"\n      - \"https://arxiv.org/rss/cs.AI\"\n\n      # Status pages\n      - \"https://status.aws.amazon.com/rss/all.rss\"\n      - \"https://www.cloudflarestatus.com/history.rss\"\n\n      # World / Policy\n      - \"https://feeds.bbci.co.uk/news/world/rss.xml\"\n      - \"https://www.eff.org/rss/updates.xml\"\n\n      # Aggregators\n      - \"https://lobste.rs/rss\"\n      - \"https://changelog.com/news/feed\"\n\n\nCustomize for your interests. Run noisepan doctor after adding feeds.\n\nTaste Profile\n\nEdit ~/.noisepan/taste.yaml. Key categories:\n\nHigh signal (3-5): CVE, zero-day, breach, RCE, supply chain, outage, postmortem, safety pledge, data sovereignty, antitrust, military AI, deanonymization, prompt injection, breaking change\n\nLow signal (-3 to -5): hiring, webinar, sponsor, newsletter, meme, career advice, celebrity\n\nKey lesson: Without policy/sovereignty/antitrust/AI safety keywords, real stories get buried under security noise. Weight these as high as CVEs.\n\nReddit Rate Limiting\n\nWith 15+ Reddit feeds, parallel fetching triggers 429s. Create a sequential prefetch wrapper:\n\ncat > ~/.local/bin/noisepan-pull << 'SCRIPT'\n#!/bin/bash\n# Prefetch Reddit RSS sequentially to avoid rate limiting, then run noisepan pull\nCACHE_DIR=\"/tmp/reddit-rss-cache\"\nCONFIG_DIR=\"${HOME}/.noisepan\"\nUA=\"Mozilla/5.0 (compatible; noisepan/1.0)\"\n\nmkdir -p \"$CACHE_DIR\"\nFEEDS=$(grep \"reddit.com\" \"$CONFIG_DIR/config.yaml\" | grep -v \"^#\" | grep -v \"^      #\" | sed 's/.*\"\\(.*\\)\"/\\1/')\n\nfor feed in $FEEDS; do\n    sub=$(echo \"$feed\" | grep -oP '/r/\\K[^/]+')\n    curl -s -o \"$CACHE_DIR/${sub}.xml\" -H \"User-Agent: $UA\" \"$feed\"\n    sleep 2\ndone\n\npython3 -m http.server 18222 --directory \"$CACHE_DIR\" &>/dev/null &\nHTTP_PID=$!; sleep 0.5\n\nmkdir -p /tmp/noisepan-tmp\ncp \"$CONFIG_DIR/config.yaml\" /tmp/noisepan-tmp/config.yaml\nfor feed in $FEEDS; do\n    sub=$(echo \"$feed\" | grep -oP '/r/\\K[^/]+')\n    sed -i \"s|$feed|http://localhost:18222/${sub}.xml|\" /tmp/noisepan-tmp/config.yaml\ndone\nln -sf \"$CONFIG_DIR/taste.yaml\" /tmp/noisepan-tmp/taste.yaml\nln -sf \"$CONFIG_DIR/noisepan.db\" /tmp/noisepan-tmp/noisepan.db\n\nnoisepan pull --config /tmp/noisepan-tmp \"$@\"\nkill $HTTP_PID 2>/dev/null; rm -rf /tmp/noisepan-tmp\nSCRIPT\nmkdir -p ~/.local/bin && chmod +x ~/.local/bin/noisepan-pull\n\n\nUse noisepan-pull instead of noisepan pull when you have 15+ Reddit feeds.\n\nHN Blind Spot Script\n\nOptional — catches high-engagement HN stories that keyword scoring misses. Useful as a cross-check alongside noisepan's native HN source.\n\ncat > ~/.local/bin/hn-top << 'SCRIPT'\n#!/bin/bash\nMIN_POINTS=${1:-200}\ncurl -s \"https://hacker-news.firebaseio.com/v0/topstories.json\" | \\\npython3 -c \"\nimport json, sys, urllib.request, time\nids = json.load(sys.stdin)[:30]\nmin_pts = int(sys.argv[1]) if len(sys.argv) > 1 else 200\nfor id in ids:\n    try:\n        with urllib.request.urlopen(f'https://hacker-news.firebaseio.com/v0/item/{id}.json') as r:\n            item = json.loads(r.read())\n            if item and item.get('score', 0) >= min_pts:\n                print(f'[{item[\\\"score\\\"]:4d}pts | {item.get(\\\"descendants\\\",0):3d}c] {item[\\\"title\\\"]}')\n                print(f'  {item.get(\\\"url\\\", f\\\"https://news.ycombinator.com/item?id={id}\\\")}')\n                print()\n        time.sleep(0.1)\n    except: pass\n\" \"$MIN_POINTS\"\nSCRIPT\nchmod +x ~/.local/bin/hn-top\n\nCron Digest Setup\n\nCreate two OpenClaw cron jobs (morning + afternoon). The digest prompt should:\n\nPull feeds (noisepan-pull or noisepan pull)\nGenerate digest (noisepan digest --format json --output /tmp/digest.json)\nRun hn-top 300 for blind spot check\nFor top 6 items, run entropia scan <url> on non-Reddit links\nQuality filter: skip Entropia Support Index < 40 or conflict signals\nBackfill from items 4-6 if top items filtered\nCompare hn-top against digest for blind spots (400+ point stories not in digest)\nOutput format\n🔥 Trending: keywords across 3+ channels\n☀️ Morning Brief (3 verified items):\n| # | Score | Topic | What happened | Entropia | Link |\n💡 HN Blind Spot (stories the taste profile missed):\n| # | HN pts | Topic | What happened | Link |\n⚠️ Skipped (filtered for low quality):\n| # | Score | Topic | Why skipped |\n\n\nSchedule: Morning at 07:00, afternoon at 18:00 (adjust to timezone).\n\nUseful Commands\nnoisepan doctor --config ~/.noisepan    # Feed health + companion tool detection\nnoisepan stats --config ~/.noisepan     # Signal-to-noise per channel\nnoisepan rescore --config ~/.noisepan   # Recompute after taste changes\nentropia scan <url>                     # Verify a specific source\n\nLessons Learned\nnoisepan doctor catches stale/all-ignored channels — run after adding feeds\nnoisepan stats shows per-channel signal — prune channels at 0% after 30 days\nHN RSS is too shallow — use native sources.hn or hn-top for blind spots\nEntropia SI < 40 = no extractable claims — skip\nReddit rate limits at 15+ parallel feeds — wrapper is mandatory\nStatus page feeds score low without \"service event\", \"operational issue\" keywords\n\nNoisepan Digest v1.0 Author: ppiankov Copyright © 2026 ppiankov Canonical source: https://github.com/ppiankov/noisepan License: MIT\n\nIf this document appears elsewhere, the repository above is the authoritative version."
  },
  "trust": {
    "sourceLabel": "tencent",
    "provenanceUrl": "https://clawhub.ai/ppiankov/noisepan-digest",
    "publisherUrl": "https://clawhub.ai/ppiankov/noisepan-digest",
    "owner": "ppiankov",
    "version": "1.0.4",
    "license": null,
    "verificationStatus": "Indexed source record"
  },
  "links": {
    "detailUrl": "https://openagent3.xyz/skills/noisepan-digest",
    "downloadUrl": "https://openagent3.xyz/downloads/noisepan-digest",
    "agentUrl": "https://openagent3.xyz/skills/noisepan-digest/agent",
    "manifestUrl": "https://openagent3.xyz/skills/noisepan-digest/agent.json",
    "briefUrl": "https://openagent3.xyz/skills/noisepan-digest/agent.md"
  }
}