{
  "schemaVersion": "1.0",
  "item": {
    "slug": "verify-claims",
    "name": "Verify Claims",
    "source": "tencent",
    "type": "skill",
    "category": "效率提升",
    "sourceUrl": "https://clawhub.ai/asgraf/verify-claims",
    "canonicalUrl": "https://clawhub.ai/asgraf/verify-claims",
    "targetPlatform": "OpenClaw"
  },
  "install": {
    "downloadMode": "redirect",
    "downloadUrl": "/downloads/verify-claims",
    "sourceDownloadUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=verify-claims",
    "sourcePlatform": "tencent",
    "targetPlatform": "OpenClaw",
    "installMethod": "Manual import",
    "extraction": "Extract archive",
    "prerequisites": [
      "OpenClaw"
    ],
    "packageFormat": "ZIP package",
    "includedAssets": [
      "SKILL.md"
    ],
    "primaryDoc": "SKILL.md",
    "quickSetup": [
      "Download the package from Yavira.",
      "Extract the archive and review SKILL.md first.",
      "Import or place the package into your OpenClaw setup."
    ],
    "agentAssist": {
      "summary": "Hand the extracted package to your coding agent with a concrete install brief instead of figuring it out manually.",
      "steps": [
        "Download the package from Yavira.",
        "Extract it into a folder your agent can access.",
        "Paste one of the prompts below and point your agent at the extracted folder."
      ],
      "prompts": [
        {
          "label": "New install",
          "body": "I downloaded a skill package from Yavira. Read SKILL.md from the extracted folder and install it by following the included instructions. Tell me what you changed and call out any manual steps you could not complete."
        },
        {
          "label": "Upgrade existing",
          "body": "I downloaded an updated skill package from Yavira. Read SKILL.md from the extracted folder, compare it with my current installation, and upgrade it while preserving any custom configuration unless the package docs explicitly say otherwise. Summarize what changed and any follow-up checks I should run."
        }
      ]
    },
    "sourceHealth": {
      "source": "tencent",
      "status": "healthy",
      "reason": "direct_download_ok",
      "recommendedAction": "download",
      "checkedAt": "2026-04-30T16:55:25.780Z",
      "expiresAt": "2026-05-07T16:55:25.780Z",
      "httpStatus": 200,
      "finalUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=network",
      "contentType": "application/zip",
      "probeMethod": "head",
      "details": {
        "probeUrl": "https://wry-manatee-359.convex.site/api/v1/download?slug=network",
        "contentDisposition": "attachment; filename=\"network-1.0.0.zip\"",
        "redirectLocation": null,
        "bodySnippet": null
      },
      "scope": "source",
      "summary": "Source download looks usable.",
      "detail": "Yavira can redirect you to the upstream package for this source.",
      "primaryActionLabel": "Download for OpenClaw",
      "primaryActionHref": "/downloads/verify-claims"
    },
    "validation": {
      "installChecklist": [
        "Use the Yavira download entry.",
        "Review SKILL.md after the package is downloaded.",
        "Confirm the extracted package contains the expected setup assets."
      ],
      "postInstallChecks": [
        "Confirm the extracted package includes the expected docs or setup files.",
        "Validate the skill or prompts are available in your target agent workspace.",
        "Capture any manual follow-up steps the agent could not complete."
      ]
    },
    "downloadPageUrl": "https://openagent3.xyz/downloads/verify-claims",
    "agentPageUrl": "https://openagent3.xyz/skills/verify-claims/agent",
    "manifestUrl": "https://openagent3.xyz/skills/verify-claims/agent.json",
    "briefUrl": "https://openagent3.xyz/skills/verify-claims/agent.md"
  },
  "agentAssist": {
    "summary": "Hand the extracted package to your coding agent with a concrete install brief instead of figuring it out manually.",
    "steps": [
      "Download the package from Yavira.",
      "Extract it into a folder your agent can access.",
      "Paste one of the prompts below and point your agent at the extracted folder."
    ],
    "prompts": [
      {
        "label": "New install",
        "body": "I downloaded a skill package from Yavira. Read SKILL.md from the extracted folder and install it by following the included instructions. Tell me what you changed and call out any manual steps you could not complete."
      },
      {
        "label": "Upgrade existing",
        "body": "I downloaded an updated skill package from Yavira. Read SKILL.md from the extracted folder, compare it with my current installation, and upgrade it while preserving any custom configuration unless the package docs explicitly say otherwise. Summarize what changed and any follow-up checks I should run."
      }
    ]
  },
  "documentation": {
    "source": "clawhub",
    "primaryDoc": "SKILL.md",
    "sections": [
      {
        "title": "Fact-Checking Skill",
        "body": "Verify claims and information using professional fact-checking services from around the world."
      },
      {
        "title": "Core Principles",
        "body": "Multiple sources - Cross-reference findings from several fact-checking organizations\nRegional relevance - Prioritize fact-checkers appropriate to the content's context\nLanguage matching - Use fact-checkers in the native language of the content when possible\nCredible sources only - Never use fraudulent or unreliable fact-checking services\nBalanced presentation - Present both confirming and contradicting findings fairly"
      },
      {
        "title": "When to Use This Skill",
        "body": "Trigger this skill when the user:\n\nExplicitly asks to fact-check, verify, or validate information\nShares an article, video transcript, or claim and asks \"is this true?\"\nWants to check if something is misinformation or a hoax\nAsks about the credibility of specific claims or statements\nRequests verification of news, social media posts, or viral content\nWants to cross-reference information with trusted sources\n\nDo NOT trigger for:\n\nGeneral research or information gathering (use web search instead)\nChecking grammar, spelling, or writing quality\nVerifying code functionality or technical documentation\nQuestions about opinions rather than factual claims"
      },
      {
        "title": "Step 1: Understand the Content",
        "body": "Before beginning verification, analyze what needs to be checked:\n\nIdentify specific claims - Extract concrete, verifiable statements from the content\nNote the context - Identify:\n\nGeographic references (countries, regions, cities)\nNamed individuals (politicians, public figures, organizations)\nLanguages used in the content\nTime period or dates mentioned\nSubject matter (politics, health, science, etc.)\n\n\nDetermine user context:\n\nUser's native language (for selecting appropriate fact-checkers)\nUser's location if relevant\n\nExample Analysis:\n\nContent: \"Video claiming vaccines cause autism, mentions Andrew Wakefield, references UK study\"\nClaims to verify: Vaccine-autism link, Wakefield's research\nContext: Medical/health topic, UK origin, English language\nKey entities: Andrew Wakefield, MMR vaccine, UK medical establishment"
      },
      {
        "title": "Step 2: Select Fact-Checking Services",
        "body": "CRITICAL: Begin by fetching the current list of fact-checking services:\n\nFetch: https://en.wikipedia.org/wiki/List_of_fact-checking_websites\n\nFrom this list, select 3-7 relevant fact-checking services based on:\n\nSelection Criteria\n\nUser's language/location - Always include fact-checkers in the user's native language\n\n\nContent language/location - If different from user's language, also include fact-checkers in the content's language and region\n\n\nGeographic relevance - If content mentions specific countries/regions:\n\nInclude fact-checkers from those countries\nExample: Content about French politics → include French fact-checkers\n\n\n\nSubject matter specialists - Some fact-checkers specialize:\n\nHealth/medical claims → Health Feedback, Science Feedback\nPolitics → country-specific political fact-checkers\nGeneral → Snopes, FactCheck.org, Full Fact\n\n\n\nPerson-specific - If content focuses on specific public figures:\n\nInclude fact-checkers from their home countries\nExample: Claims about a US politician → include US fact-checkers\n\nExclusion Rule\n\nNEVER use services listed under \"Fraudulent fact-checking websites\" on the Wikipedia page, regardless of how well they match other criteria.\n\nPrioritization\n\nWhen you must limit selections:\n\nPrioritize: User's language > Content's language > Geographic relevance\nPrefer well-established services (FactCheck.org, Snopes, Full Fact, AFP Fact Check, etc.)\nInclude at least one international/general service\n\nExample Selection:\n\nUser: Polish speaker\nContent: English article about US vaccines\nSelected services:\n\nDemagog.pl (Polish, for user)\nFactCheck.org (US, for content geography)\nSnopes (US, general/medical)\nHealth Feedback (health specialist)\nFull Fact (UK, English-speaking, general)"
      },
      {
        "title": "Step 3: Search Each Fact-Checking Service",
        "body": "For each selected service, conduct targeted searches:\n\nSearch Strategy\n\nExtract 2-4 search terms from the content:\n\nKey person names\nMain topics/subjects\nSpecific claims or events\nImportant keywords\n\n\n\nTranslate terms to the fact-checker's native language if needed\n\n\nConstruct search queries using DuckDuckGo with site operator:\nFormat: site:domain.com [search terms in appropriate language]\n\nExamples:\n- site:fullfact.org vaccines autism\n- site:demagog.org.pl szczepionki autyzm\n- site:factcheck.org Andrew Wakefield MMR\n- site:healthfeedback.org vaccine safety\n\n\n\nExecute 1-3 searches per fact-checker (depending on content complexity)\n\nSearch Best Practices\n\nKeep queries concise (2-4 words typically)\nStart broad, then narrow if needed\nDon't repeat very similar queries\nIf first search yields good results, proceed to analysis\nIf first search yields poor results, try alternative terms"
      },
      {
        "title": "Step 4: Analyze Search Results",
        "body": "For each fact-checking service:\n\nReview search results - Examine the first 5-10 results from each search\n\n\nSelect relevant articles - Choose articles where:\n\nHeadline directly addresses the claim being verified\nContent appears substantial (not just brief mentions)\nPublication date is relevant (recent for ongoing issues, any date for historical debunks)\n\n\n\nFetch and read articles - Use web_fetch to retrieve the full text of 2-4 most relevant articles per fact-checker\n\n\nExtract key findings for each article:\n\nVerdict - What did the fact-checker conclude? (True, False, Misleading, Mixed, Unproven, etc.)\nEvidence - What evidence did they cite?\nContext - Any important nuance or context\nRelevance - How directly does this address the user's claim?"
      },
      {
        "title": "Step 5: Synthesize and Present Results",
        "body": "Organize findings into a clear, user-friendly format:\n\nHandle Fresh Content First\n\nBefore presenting results, check if the content is very recent (3 days old or less):\n\nIf fact-checks found: Proceed normally with presentation\n\n\nIf no fact-checks found AND content is ≤3 days old:\n\nNote that the content is too fresh for fact-checkers to have covered it yet\nIf task scheduling is available:\n\nSchedule a follow-up fact-check for 3 days from now\nInform user: \"I've scheduled a follow-up check for [date]. I'll notify you if fact-checkers have published verification by then.\"\n\n\nIf task scheduling is NOT available:\n\nSuggest: \"This content is very recent (published [date]). Fact-checkers typically need a few days to verify claims. I recommend checking back in 3 days for updated verification.\"\n\n\nOffer preliminary analysis using general web search\nProceed with any available information from general sources\n\n\n\nIf no fact-checks found AND content is older:\n\nNote that fact-checkers haven't specifically covered this\nOffer general web research instead\n\nStructure Your Response\n\nOpening summary (2-3 sentences)\n\nOverall consensus from fact-checkers\nBrief answer to the user's question\n\n\n\nKey findings by claim (if multiple claims)\n\nGroup related findings together\nPresent contradicting evidence if it exists\n\n\n\nDetailed evidence (organized by fact-checker or by claim)\n\nInclude specific verdicts\nCite evidence fact-checkers used\nNote any disagreements between fact-checkers\n\n\n\nImportant context (if relevant)\n\nHistorical background\nWhy the claim persists\nCommon misconceptions\n\n\n\nSource citations\n\nProvide direct links to all fact-checking articles referenced\nFormat: [Fact-Checker Name]: Article Title (Date if available) - [URL]\n\nPresentation Guidelines\n\nBe objective - Present findings without inserting personal judgment\nBe nuanced - Avoid oversimplifying complex issues\nBe clear about uncertainty - If fact-checkers disagree or evidence is inconclusive, say so\nBe balanced - If some evidence supports and some contradicts, present both\nUse accessible language - Avoid jargon, explain technical terms\nHighlight consensus - When multiple fact-checkers agree, emphasize this\n\nFormatting\n\nUse clear headers to organize different claims or themes\nUse natural prose, not bullet points, for the main findings\nOnly use lists for: multiple similar items, source citations, or when explicitly helpful\nInclude clickable citations throughout (not just at the end)\n\nExample Response Structure\n\nBased on verification from five established fact-checking organizations, the claim that vaccines cause autism has been thoroughly debunked. Multiple independent reviews of the evidence have found no causal link between vaccination and autism spectrum disorder.\n\nThe origins of this claim trace back to a fraudulent 1998 study by Andrew Wakefield, which was later retracted by The Lancet. Fact-checkers consistently note that Wakefield lost his medical license, and subsequent large-scale studies involving millions of children have found no connection.\n\n[Full Fact reviewed the evidence in 2023](link), concluding \"There is no link between the MMR vaccine and autism.\" Their analysis examined 12 major studies and found consistent results across different populations and methodologies.\n\n[FactCheck.org's comprehensive analysis](link) explains that \"The myth persists despite overwhelming scientific consensus against it\" and details how the original study was not only retracted but shown to involve falsified data.\n\nHowever, [Demagog.pl](link) notes that while the vaccine-autism link is false, concerns about vaccine safety in general are legitimate and should be addressed through proper scientific channels rather than dismissed.\n\n**Important context**: The persistence of this myth has real public health consequences, as fact-checkers note declining vaccination rates in some communities. Understanding why the claim was debunked helps address ongoing concerns.\n\n**Sources consulted:**\n- Full Fact: \"MMR vaccine does not cause autism\" - [link]\n- FactCheck.org: \"Wakefield's Fraudulent Research\" - [link]  \n- Snopes: \"Vaccines and Autism\" - [link]\n- Demagog.pl: \"Szczepionki i autyzm - mit czy prawda?\" - [link]\n- Health Feedback: \"Scientific consensus on vaccine safety\" - [link]"
      },
      {
        "title": "Scenario 1: Single Specific Claim",
        "body": "User request: \"Is it true that 5G causes COVID-19?\"\n\nApproach:\n\nIdentify claim: 5G technology causes or spreads COVID-19\nSelect 4-5 general fact-checkers (international scope, tech/health focus)\nSearch for \"5G COVID\" or \"5G coronavirus\"\nExpected result: Multiple fact-checkers will have debunked this\nPresent: Clear consensus with explanation of why the claim is false"
      },
      {
        "title": "Scenario 2: Article with Multiple Claims",
        "body": "User request: \"Can you fact-check this article about climate change?\"\n\nApproach:\n\nExtract 3-5 specific verifiable claims from the article\nSelect fact-checkers: user's language + climate-focused services\nSearch each claim separately\nPresent: Findings organized by claim, with overall assessment"
      },
      {
        "title": "Scenario 3: Complex Political Claim",
        "body": "User request: \"Did [politician] really say/do [thing]?\"\n\nApproach:\n\nIdentify the specific claim and context\nSelect fact-checkers from politician's country + user's language\nSearch politician's name + key terms\nPresent: Direct answer with context, including if statement was taken out of context"
      },
      {
        "title": "Scenario 4: Viral Social Media Content",
        "body": "User request: \"I saw this video on TikTok claiming [X], is it real?\"\n\nApproach:\n\nIdentify what's being claimed in the video\nSelect broad, well-known fact-checkers (viral content often fact-checked widely)\nSearch for key terms from the claim\nPresent: Whether it's been debunked, original context if misrepresented"
      },
      {
        "title": "Scenario 5: Historical Claim",
        "body": "User request: \"Did [historical event] really happen this way?\"\n\nApproach:\n\nNote that this is historical verification, may need broader research\nSelect fact-checkers + consider using general web search for historical records\nPresent: What fact-checkers say if available, acknowledge if claim is outside typical fact-checking scope"
      },
      {
        "title": "Scenario 6: Very Fresh Content (Breaking News)",
        "body": "User request: \"I just saw this article published today claiming [X]. Is it true?\"\n\nApproach:\n\nCheck publication date: is it 3 days old or less?\nSearch fact-checkers anyway (sometimes they work very quickly on major stories)\nIf no fact-checks found:\n\nWith task scheduling: Schedule follow-up check for 3 days later, notify user of the scheduled check\nWithout task scheduling: Inform user that content is too fresh, suggest returning in 3 days\n\n\nOffer preliminary analysis using general web search\nPresent: \"This is very recent content. Fact-checkers haven't had time to verify yet. Here's what I found from general sources, but I recommend waiting for professional fact-checking.\"\n\nExample response:\n\nThis article was published just [X hours/days] ago, which is too recent for professional \nfact-checkers to have verified the claims yet. They typically need a few days to conduct \nthorough research.\n\nI've scheduled a follow-up fact-check for [date in 3 days]. I'll notify you automatically \nif fact-checkers publish verification by then.\n\nIn the meantime, here's what I found through general web research:\n[preliminary findings with appropriate caveats]\n\nNote: These are preliminary findings only. Professional fact-checkers may provide more \nthorough verification in the coming days."
      },
      {
        "title": "When Fact-Checkers Haven't Covered the Topic",
        "body": "If searches return no relevant results:\n\nTry broader search terms\nTry related claims that fact-checkers may have covered\nIf still no results, check if the content is recent (3 days or less)\nFor fresh content (≤3 days old):\n\nAcknowledge: \"This is very recent content. Professional fact-checkers typically need a few days to verify claims.\"\nIf scheduling tools are available: Schedule a follow-up fact-check for 3 days later\nIf scheduling is not available: Suggest the user returns in 3 days for updated verification\nOffer to do preliminary general web research in the meantime\n\n\nFor older content: Acknowledge \"Professional fact-checkers haven't specifically addressed this claim\"\nOffer to do general web research instead\nConsider if the claim is too obscure or too local for major fact-checkers"
      },
      {
        "title": "Contradicting Fact-Checkers",
        "body": "If fact-checkers disagree:\n\nPresent all perspectives fairly\nNote the disagreement explicitly\nConsider if they're addressing slightly different aspects\nLook for consensus on specific sub-points\nDon't force a conclusion if the evidence is genuinely mixed"
      },
      {
        "title": "Outdated Information",
        "body": "If fact-checks are old but the claim is current:\n\nNote the publication dates\nSearch for more recent fact-checks\nConsider if circumstances have changed\nAcknowledge if using older sources due to lack of recent coverage"
      },
      {
        "title": "Language Barriers",
        "body": "If key fact-checkers are in languages you don't fully understand:\n\nUse web_fetch to retrieve the content\nFocus on verdicts, ratings, and conclusion sections which are often clear\nUse any English summaries or abstracts\nAcknowledge limitations if language creates uncertainty"
      },
      {
        "title": "Bias Concerns",
        "body": "Users may question fact-checker reliability:\n\nStick to well-established, internationally recognized services\nPresent findings from multiple fact-checkers to show consensus\nNote if you're using fact-checkers from multiple countries/perspectives\nAcknowledge that no source is perfect, but these are professional verification services"
      },
      {
        "title": "Quality Checklist",
        "body": "Before presenting results, verify:\n\nChecked at least 3 different fact-checking services\n Included fact-checkers relevant to the user's language/location\n Included fact-checkers relevant to the content's context\n Excluded any fraudulent fact-checking services\n Read full articles, not just headlines or snippets\n Provided direct links to all sources cited\n Presented findings objectively without adding personal judgment\n Acknowledged any uncertainty or disagreement between sources\n Organized response clearly with specific findings, not vague summaries\n Used natural prose for main findings, lists only where truly helpful\n If content is ≤3 days old with no fact-checks: Noted this and scheduled follow-up OR suggested user return in 3 days\n If providing preliminary analysis: Clearly distinguished it from professional fact-checking"
      },
      {
        "title": "Examples of Good Fact-Checking Services",
        "body": "International/English:\n\nFactCheck.org (US, general)\nSnopes (US, general)\nFull Fact (UK, general)\nAFP Fact Check (International, multilingual)\nPolitiFact (US, politics)\n\nRegional/Language-Specific:\n\nDemagog.pl (Poland, Polish)\nLes Décodeurs (France, French)\nCorrectiv (Germany, German)\nMaldita.es (Spain, Spanish)\nAos Fatos (Brazil, Portuguese)\nAlt News (India, English/Hindi)\nAfrica Check (Africa, multilingual)\n\nSpecialized:\n\nHealth Feedback (health/medical claims)\nClimate Feedback (climate science claims)\nScience Feedback (general science claims)\n\nNote: This is not exhaustive. Always fetch the current list from Wikipedia to see all available services."
      },
      {
        "title": "Task Scheduling for Fresh Content",
        "body": "When content is very recent (≤3 days old) and hasn't been fact-checked yet:\n\nIf task scheduling tools are available:\n\nAutomatically schedule a follow-up fact-check for 3 days later\nStore the original query, claims, and context\nWhen the scheduled task runs:\n\nRe-search the same fact-checking services\nCompare new findings to preliminary analysis\nNotify user only if new fact-checks were found\nProvide updated verification with links\n\nIf task scheduling is NOT available:\n\nInform the user that the content is too fresh\nSuggest they return in 3 days for updated verification\nProvide preliminary analysis from general sources with appropriate caveats\nMake it clear that preliminary findings are not from professional fact-checkers"
      },
      {
        "title": "Core Approach",
        "body": "This skill focuses on using professional fact-checking organizations rather than doing original research. These organizations employ journalists and researchers who specialize in verification. Your role is to:\n\nFind what they've already published\nSynthesize their findings\nPresent them clearly to the user\nSchedule follow-ups for very recent content when possible\n\nIf a topic hasn't been covered by fact-checkers, acknowledge this and offer to do general research instead. Don't try to replace professional fact-checking with web searches alone, but do provide preliminary information when users need it for fresh content."
      }
    ],
    "body": "Fact-Checking Skill\n\nVerify claims and information using professional fact-checking services from around the world.\n\nCore Principles\nMultiple sources - Cross-reference findings from several fact-checking organizations\nRegional relevance - Prioritize fact-checkers appropriate to the content's context\nLanguage matching - Use fact-checkers in the native language of the content when possible\nCredible sources only - Never use fraudulent or unreliable fact-checking services\nBalanced presentation - Present both confirming and contradicting findings fairly\nWhen to Use This Skill\n\nTrigger this skill when the user:\n\nExplicitly asks to fact-check, verify, or validate information\nShares an article, video transcript, or claim and asks \"is this true?\"\nWants to check if something is misinformation or a hoax\nAsks about the credibility of specific claims or statements\nRequests verification of news, social media posts, or viral content\nWants to cross-reference information with trusted sources\n\nDo NOT trigger for:\n\nGeneral research or information gathering (use web search instead)\nChecking grammar, spelling, or writing quality\nVerifying code functionality or technical documentation\nQuestions about opinions rather than factual claims\nWorkflow\nStep 1: Understand the Content\n\nBefore beginning verification, analyze what needs to be checked:\n\nIdentify specific claims - Extract concrete, verifiable statements from the content\nNote the context - Identify:\nGeographic references (countries, regions, cities)\nNamed individuals (politicians, public figures, organizations)\nLanguages used in the content\nTime period or dates mentioned\nSubject matter (politics, health, science, etc.)\nDetermine user context:\nUser's native language (for selecting appropriate fact-checkers)\nUser's location if relevant\n\nExample Analysis:\n\nContent: \"Video claiming vaccines cause autism, mentions Andrew Wakefield, references UK study\"\nClaims to verify: Vaccine-autism link, Wakefield's research\nContext: Medical/health topic, UK origin, English language\nKey entities: Andrew Wakefield, MMR vaccine, UK medical establishment\nStep 2: Select Fact-Checking Services\n\nCRITICAL: Begin by fetching the current list of fact-checking services:\n\nFetch: https://en.wikipedia.org/wiki/List_of_fact-checking_websites\n\n\nFrom this list, select 3-7 relevant fact-checking services based on:\n\nSelection Criteria\n\nUser's language/location - Always include fact-checkers in the user's native language\n\nContent language/location - If different from user's language, also include fact-checkers in the content's language and region\n\nGeographic relevance - If content mentions specific countries/regions:\n\nInclude fact-checkers from those countries\nExample: Content about French politics → include French fact-checkers\n\nSubject matter specialists - Some fact-checkers specialize:\n\nHealth/medical claims → Health Feedback, Science Feedback\nPolitics → country-specific political fact-checkers\nGeneral → Snopes, FactCheck.org, Full Fact\n\nPerson-specific - If content focuses on specific public figures:\n\nInclude fact-checkers from their home countries\nExample: Claims about a US politician → include US fact-checkers\nExclusion Rule\n\nNEVER use services listed under \"Fraudulent fact-checking websites\" on the Wikipedia page, regardless of how well they match other criteria.\n\nPrioritization\n\nWhen you must limit selections:\n\nPrioritize: User's language > Content's language > Geographic relevance\nPrefer well-established services (FactCheck.org, Snopes, Full Fact, AFP Fact Check, etc.)\nInclude at least one international/general service\n\nExample Selection:\n\nUser: Polish speaker\nContent: English article about US vaccines\nSelected services:\nDemagog.pl (Polish, for user)\nFactCheck.org (US, for content geography)\nSnopes (US, general/medical)\nHealth Feedback (health specialist)\nFull Fact (UK, English-speaking, general)\nStep 3: Search Each Fact-Checking Service\n\nFor each selected service, conduct targeted searches:\n\nSearch Strategy\n\nExtract 2-4 search terms from the content:\n\nKey person names\nMain topics/subjects\nSpecific claims or events\nImportant keywords\n\nTranslate terms to the fact-checker's native language if needed\n\nConstruct search queries using DuckDuckGo with site operator:\n\nFormat: site:domain.com [search terms in appropriate language]\n\nExamples:\n- site:fullfact.org vaccines autism\n- site:demagog.org.pl szczepionki autyzm\n- site:factcheck.org Andrew Wakefield MMR\n- site:healthfeedback.org vaccine safety\n\n\nExecute 1-3 searches per fact-checker (depending on content complexity)\n\nSearch Best Practices\nKeep queries concise (2-4 words typically)\nStart broad, then narrow if needed\nDon't repeat very similar queries\nIf first search yields good results, proceed to analysis\nIf first search yields poor results, try alternative terms\nStep 4: Analyze Search Results\n\nFor each fact-checking service:\n\nReview search results - Examine the first 5-10 results from each search\n\nSelect relevant articles - Choose articles where:\n\nHeadline directly addresses the claim being verified\nContent appears substantial (not just brief mentions)\nPublication date is relevant (recent for ongoing issues, any date for historical debunks)\n\nFetch and read articles - Use web_fetch to retrieve the full text of 2-4 most relevant articles per fact-checker\n\nExtract key findings for each article:\n\nVerdict - What did the fact-checker conclude? (True, False, Misleading, Mixed, Unproven, etc.)\nEvidence - What evidence did they cite?\nContext - Any important nuance or context\nRelevance - How directly does this address the user's claim?\nStep 5: Synthesize and Present Results\n\nOrganize findings into a clear, user-friendly format:\n\nHandle Fresh Content First\n\nBefore presenting results, check if the content is very recent (3 days old or less):\n\nIf fact-checks found: Proceed normally with presentation\n\nIf no fact-checks found AND content is ≤3 days old:\n\nNote that the content is too fresh for fact-checkers to have covered it yet\nIf task scheduling is available:\nSchedule a follow-up fact-check for 3 days from now\nInform user: \"I've scheduled a follow-up check for [date]. I'll notify you if fact-checkers have published verification by then.\"\nIf task scheduling is NOT available:\nSuggest: \"This content is very recent (published [date]). Fact-checkers typically need a few days to verify claims. I recommend checking back in 3 days for updated verification.\"\nOffer preliminary analysis using general web search\nProceed with any available information from general sources\n\nIf no fact-checks found AND content is older:\n\nNote that fact-checkers haven't specifically covered this\nOffer general web research instead\nStructure Your Response\n\nOpening summary (2-3 sentences)\n\nOverall consensus from fact-checkers\nBrief answer to the user's question\n\nKey findings by claim (if multiple claims)\n\nGroup related findings together\nPresent contradicting evidence if it exists\n\nDetailed evidence (organized by fact-checker or by claim)\n\nInclude specific verdicts\nCite evidence fact-checkers used\nNote any disagreements between fact-checkers\n\nImportant context (if relevant)\n\nHistorical background\nWhy the claim persists\nCommon misconceptions\n\nSource citations\n\nProvide direct links to all fact-checking articles referenced\nFormat: [Fact-Checker Name]: Article Title (Date if available) - [URL]\nPresentation Guidelines\nBe objective - Present findings without inserting personal judgment\nBe nuanced - Avoid oversimplifying complex issues\nBe clear about uncertainty - If fact-checkers disagree or evidence is inconclusive, say so\nBe balanced - If some evidence supports and some contradicts, present both\nUse accessible language - Avoid jargon, explain technical terms\nHighlight consensus - When multiple fact-checkers agree, emphasize this\nFormatting\nUse clear headers to organize different claims or themes\nUse natural prose, not bullet points, for the main findings\nOnly use lists for: multiple similar items, source citations, or when explicitly helpful\nInclude clickable citations throughout (not just at the end)\nExample Response Structure\nBased on verification from five established fact-checking organizations, the claim that vaccines cause autism has been thoroughly debunked. Multiple independent reviews of the evidence have found no causal link between vaccination and autism spectrum disorder.\n\nThe origins of this claim trace back to a fraudulent 1998 study by Andrew Wakefield, which was later retracted by The Lancet. Fact-checkers consistently note that Wakefield lost his medical license, and subsequent large-scale studies involving millions of children have found no connection.\n\n[Full Fact reviewed the evidence in 2023](link), concluding \"There is no link between the MMR vaccine and autism.\" Their analysis examined 12 major studies and found consistent results across different populations and methodologies.\n\n[FactCheck.org's comprehensive analysis](link) explains that \"The myth persists despite overwhelming scientific consensus against it\" and details how the original study was not only retracted but shown to involve falsified data.\n\nHowever, [Demagog.pl](link) notes that while the vaccine-autism link is false, concerns about vaccine safety in general are legitimate and should be addressed through proper scientific channels rather than dismissed.\n\n**Important context**: The persistence of this myth has real public health consequences, as fact-checkers note declining vaccination rates in some communities. Understanding why the claim was debunked helps address ongoing concerns.\n\n**Sources consulted:**\n- Full Fact: \"MMR vaccine does not cause autism\" - [link]\n- FactCheck.org: \"Wakefield's Fraudulent Research\" - [link]  \n- Snopes: \"Vaccines and Autism\" - [link]\n- Demagog.pl: \"Szczepionki i autyzm - mit czy prawda?\" - [link]\n- Health Feedback: \"Scientific consensus on vaccine safety\" - [link]\n\nCommon Scenarios\nScenario 1: Single Specific Claim\n\nUser request: \"Is it true that 5G causes COVID-19?\"\n\nApproach:\n\nIdentify claim: 5G technology causes or spreads COVID-19\nSelect 4-5 general fact-checkers (international scope, tech/health focus)\nSearch for \"5G COVID\" or \"5G coronavirus\"\nExpected result: Multiple fact-checkers will have debunked this\nPresent: Clear consensus with explanation of why the claim is false\nScenario 2: Article with Multiple Claims\n\nUser request: \"Can you fact-check this article about climate change?\"\n\nApproach:\n\nExtract 3-5 specific verifiable claims from the article\nSelect fact-checkers: user's language + climate-focused services\nSearch each claim separately\nPresent: Findings organized by claim, with overall assessment\nScenario 3: Complex Political Claim\n\nUser request: \"Did [politician] really say/do [thing]?\"\n\nApproach:\n\nIdentify the specific claim and context\nSelect fact-checkers from politician's country + user's language\nSearch politician's name + key terms\nPresent: Direct answer with context, including if statement was taken out of context\nScenario 4: Viral Social Media Content\n\nUser request: \"I saw this video on TikTok claiming [X], is it real?\"\n\nApproach:\n\nIdentify what's being claimed in the video\nSelect broad, well-known fact-checkers (viral content often fact-checked widely)\nSearch for key terms from the claim\nPresent: Whether it's been debunked, original context if misrepresented\nScenario 5: Historical Claim\n\nUser request: \"Did [historical event] really happen this way?\"\n\nApproach:\n\nNote that this is historical verification, may need broader research\nSelect fact-checkers + consider using general web search for historical records\nPresent: What fact-checkers say if available, acknowledge if claim is outside typical fact-checking scope\nScenario 6: Very Fresh Content (Breaking News)\n\nUser request: \"I just saw this article published today claiming [X]. Is it true?\"\n\nApproach:\n\nCheck publication date: is it 3 days old or less?\nSearch fact-checkers anyway (sometimes they work very quickly on major stories)\nIf no fact-checks found:\nWith task scheduling: Schedule follow-up check for 3 days later, notify user of the scheduled check\nWithout task scheduling: Inform user that content is too fresh, suggest returning in 3 days\nOffer preliminary analysis using general web search\nPresent: \"This is very recent content. Fact-checkers haven't had time to verify yet. Here's what I found from general sources, but I recommend waiting for professional fact-checking.\"\n\nExample response:\n\nThis article was published just [X hours/days] ago, which is too recent for professional \nfact-checkers to have verified the claims yet. They typically need a few days to conduct \nthorough research.\n\nI've scheduled a follow-up fact-check for [date in 3 days]. I'll notify you automatically \nif fact-checkers publish verification by then.\n\nIn the meantime, here's what I found through general web research:\n[preliminary findings with appropriate caveats]\n\nNote: These are preliminary findings only. Professional fact-checkers may provide more \nthorough verification in the coming days.\n\nEdge Cases and Limitations\nWhen Fact-Checkers Haven't Covered the Topic\n\nIf searches return no relevant results:\n\nTry broader search terms\nTry related claims that fact-checkers may have covered\nIf still no results, check if the content is recent (3 days or less)\nFor fresh content (≤3 days old):\nAcknowledge: \"This is very recent content. Professional fact-checkers typically need a few days to verify claims.\"\nIf scheduling tools are available: Schedule a follow-up fact-check for 3 days later\nIf scheduling is not available: Suggest the user returns in 3 days for updated verification\nOffer to do preliminary general web research in the meantime\nFor older content: Acknowledge \"Professional fact-checkers haven't specifically addressed this claim\"\nOffer to do general web research instead\nConsider if the claim is too obscure or too local for major fact-checkers\nContradicting Fact-Checkers\n\nIf fact-checkers disagree:\n\nPresent all perspectives fairly\nNote the disagreement explicitly\nConsider if they're addressing slightly different aspects\nLook for consensus on specific sub-points\nDon't force a conclusion if the evidence is genuinely mixed\nOutdated Information\n\nIf fact-checks are old but the claim is current:\n\nNote the publication dates\nSearch for more recent fact-checks\nConsider if circumstances have changed\nAcknowledge if using older sources due to lack of recent coverage\nLanguage Barriers\n\nIf key fact-checkers are in languages you don't fully understand:\n\nUse web_fetch to retrieve the content\nFocus on verdicts, ratings, and conclusion sections which are often clear\nUse any English summaries or abstracts\nAcknowledge limitations if language creates uncertainty\nBias Concerns\n\nUsers may question fact-checker reliability:\n\nStick to well-established, internationally recognized services\nPresent findings from multiple fact-checkers to show consensus\nNote if you're using fact-checkers from multiple countries/perspectives\nAcknowledge that no source is perfect, but these are professional verification services\nQuality Checklist\n\nBefore presenting results, verify:\n\n Checked at least 3 different fact-checking services\n Included fact-checkers relevant to the user's language/location\n Included fact-checkers relevant to the content's context\n Excluded any fraudulent fact-checking services\n Read full articles, not just headlines or snippets\n Provided direct links to all sources cited\n Presented findings objectively without adding personal judgment\n Acknowledged any uncertainty or disagreement between sources\n Organized response clearly with specific findings, not vague summaries\n Used natural prose for main findings, lists only where truly helpful\n If content is ≤3 days old with no fact-checks: Noted this and scheduled follow-up OR suggested user return in 3 days\n If providing preliminary analysis: Clearly distinguished it from professional fact-checking\nExamples of Good Fact-Checking Services\n\nInternational/English:\n\nFactCheck.org (US, general)\nSnopes (US, general)\nFull Fact (UK, general)\nAFP Fact Check (International, multilingual)\nPolitiFact (US, politics)\n\nRegional/Language-Specific:\n\nDemagog.pl (Poland, Polish)\nLes Décodeurs (France, French)\nCorrectiv (Germany, German)\nMaldita.es (Spain, Spanish)\nAos Fatos (Brazil, Portuguese)\nAlt News (India, English/Hindi)\nAfrica Check (Africa, multilingual)\n\nSpecialized:\n\nHealth Feedback (health/medical claims)\nClimate Feedback (climate science claims)\nScience Feedback (general science claims)\n\nNote: This is not exhaustive. Always fetch the current list from Wikipedia to see all available services.\n\nFinal Notes\nTask Scheduling for Fresh Content\n\nWhen content is very recent (≤3 days old) and hasn't been fact-checked yet:\n\nIf task scheduling tools are available:\n\nAutomatically schedule a follow-up fact-check for 3 days later\nStore the original query, claims, and context\nWhen the scheduled task runs:\nRe-search the same fact-checking services\nCompare new findings to preliminary analysis\nNotify user only if new fact-checks were found\nProvide updated verification with links\n\nIf task scheduling is NOT available:\n\nInform the user that the content is too fresh\nSuggest they return in 3 days for updated verification\nProvide preliminary analysis from general sources with appropriate caveats\nMake it clear that preliminary findings are not from professional fact-checkers\nCore Approach\n\nThis skill focuses on using professional fact-checking organizations rather than doing original research. These organizations employ journalists and researchers who specialize in verification. Your role is to:\n\nFind what they've already published\nSynthesize their findings\nPresent them clearly to the user\nSchedule follow-ups for very recent content when possible\n\nIf a topic hasn't been covered by fact-checkers, acknowledge this and offer to do general research instead. Don't try to replace professional fact-checking with web searches alone, but do provide preliminary information when users need it for fresh content."
  },
  "trust": {
    "sourceLabel": "tencent",
    "provenanceUrl": "https://clawhub.ai/asgraf/verify-claims",
    "publisherUrl": "https://clawhub.ai/asgraf/verify-claims",
    "owner": "asgraf",
    "version": "1.0.0",
    "license": null,
    "verificationStatus": "Indexed source record"
  },
  "links": {
    "detailUrl": "https://openagent3.xyz/skills/verify-claims",
    "downloadUrl": "https://openagent3.xyz/downloads/verify-claims",
    "agentUrl": "https://openagent3.xyz/skills/verify-claims/agent",
    "manifestUrl": "https://openagent3.xyz/skills/verify-claims/agent.json",
    "briefUrl": "https://openagent3.xyz/skills/verify-claims/agent.md"
  }
}