diff --git a/.github/scripts/checkTranslation.py b/.github/scripts/checkTranslation.py new file mode 100644 index 0000000..b067d6d --- /dev/null +++ b/.github/scripts/checkTranslation.py @@ -0,0 +1,125 @@ +import sys +import os +from crowdin_api import CrowdinClient + +def find_file_id(client, project_id, base_target, search_ext): + """ + Iterates through all project files (using pagination) to find the ID + of the source file matching the target name and extension. + """ + offset = 0 + limit = 100 + + while True: + resp = client.source_files.list_files( + projectId=project_id, + limit=limit, + offset=offset + ) + + data = resp['data'] + for f in data: + path_crowdin = f['data']['path'].lower() + # Check if the path ends with addon_id.pot or addon_id.xliff + if path_crowdin.endswith(f"{base_target}{search_ext}"): + file_id = f['data']['id'] + print(f"DEBUG: Match found: {path_crowdin} (ID: {file_id})") + return file_id + + if len(data) < limit: + break + + offset += limit + + return None + +def get_score_from_api(file_name_to_search: str, lang_id: str) -> float: + """ + Retrieves the translation progress score for a specific language and file. + Handles pagination for both file listing and language status. + """ + token = os.environ.get("crowdinAuthToken") + p_id_env = os.environ.get("CROWDIN_PROJECT_ID") + + if not token or not p_id_env: + print("ERROR: Missing environment variables 'crowdinAuthToken' or 'CROWDIN_PROJECT_ID'.") + return 0.0 + + client = CrowdinClient(token=token) + p_id = int(p_id_env) + + try: + # Clean and prepare search patterns + # Example: 'addon/locale/fr/LC_MESSAGES/myaddon.po' -> base_target: 'myaddon' + base_target = file_name_to_search.replace('\\', '/').split('/')[-1].rsplit('.', 1)[0].lower() + ext_target = file_name_to_search.split('.')[-1].lower() + + # On Crowdin, the source for a .po file is usually a .pot file + search_ext = ".pot" if ext_target == "po" else f".{ext_target}" + + print(f"DEBUG: Searching for source file: {base_target}{search_ext}") + + file_id = find_file_id(client, p_id, base_target, search_ext) + + if file_id is None: + print(f"WARNING: File '{base_target}{search_ext}' not found on Crowdin.") + return 0.0 + + # Pagination for translation status (Progress) + offset = 0 + limit = 100 + + while True: + resp = client.translation_status.get_file_progress( + projectId=p_id, + fileId=file_id, + limit=limit, + offset=offset + ) + + data = resp['data'] + for item in data: + lang_api = item['data']['languageId'] + + # Flexible matching (e.g., 'fr' will match 'fr' or 'fr-FR' from API) + # Also handles underscore to dash conversion for Crowdin compatibility + if lang_api.lower().startswith(lang_id.lower().replace('_', '-')): + progress = float(item['data']['translationProgress']) + return progress / 100 + + # Check pagination total + total = resp['pagination']['totalCount'] + if offset + limit >= total: + break + offset += limit + + print(f"DEBUG: Language '{lang_id}' not found in progress list for this file.") + return 0.0 + + except Exception as e: + print(f"API ERROR: {e}") + return 0.0 + +def main(): + if len(sys.argv) < 3: + print("Usage: python checkTranslation.py ") + sys.exit(2) + + input_file = sys.argv[1] + lang = sys.argv[2] + + score = get_score_from_api(input_file, lang) + + # Output formatted for capture by the PowerShell script (crowdinSync.ps1) + print(f"translationRatio={score}") + + if input_file.lower().endswith('.md'): + print(f"mdScore={score}") + else: + print(f"poScore={score}") + + # Exit with success (0) if there is at least some translated content + sys.exit(0 if score > 0.05 else 1) + +if __name__ == "__main__": + main() diff --git a/.github/scripts/crowdinSync.ps1 b/.github/scripts/crowdinSync.ps1 new file mode 100644 index 0000000..a96ae3e --- /dev/null +++ b/.github/scripts/crowdinSync.ps1 @@ -0,0 +1,166 @@ +#!/usr/bin/env pwsh +$ErrorActionPreference = 'Stop' + +# Git configuration for automated commits +git config user.name "github-actions[bot]" +git config user.email "github-actions[bot]@users.noreply.github.com" + +$addonId = $env:ADDON_ID.Trim() +if (-not $addonId) { + Write-Error "Failed to get addon ID." + exit 1 +} + +# --- STEP 1: PREPARATION AND SOURCE UPDATE --- + +$xliffFile = "./$addonId.xliff" +$mdFile = "./readme.md" + +if (Test-Path $mdFile) { + if (Test-Path $xliffFile) { + $tempXliff = [System.IO.Path]::GetTempFileName() + Copy-Item "$addonId.xliff" $tempXliff -Force + Write-Host "DEBUG: Updating XLIFF source based on readme.md..." + uv run .github/scripts/markdownTranslate.py updateXliff -m $mdFile -x $tempXliff -o $xliffFile + } else { + Write-Host "DEBUG: XLIFF template not found. Creating new one from readme.md..." + uv run .github/scripts/markdownTranslate.py generateXliff -m $mdFile -o $xliffFile + } +} + +# Update POT file (addon interface) +uv run scons pot +$potFile = "$addonId.pot" + +# --- STEP 2: UPLOAD SOURCES TO CROWDIN --- + +if (Test-Path $potFile) { + Write-Host "DEBUG: Uploading updated POT source to Crowdin..." + ./l10nUtil.exe uploadSourceFile "$potFile" -c addon +} + +if (Test-Path $xliffFile) { + Write-Host "DEBUG: Uploading updated XLIFF source to Crowdin..." + ./l10nUtil.exe uploadSourceFile "$xliffFile" -c addon + git add "$xliffFile" + git diff --staged --quiet + if ($LASTEXITCODE -ne 0) { + git commit -m "Update $xliffFile for $addonId" + git push + } +} + +# --- STEP 3: EXPORT AND PROCESS TRANSLATIONS --- + +Write-Host "DEBUG: Exporting translations from Crowdin..." +./l10nUtil.exe exportTranslations -o _addonL10n -c addon + +# Ensure base directories exist +New-Item -ItemType Directory -Force -Path addon/locale | Out-Null +New-Item -ItemType Directory -Force -Path addon/doc | Out-Null + +# Load language mappings for Crowdin API calls +$languageMappings = Get-Content -Raw ".github/scripts/languageMappings.json" | ConvertFrom-Json + +foreach ($dir in Get-ChildItem -Path "_addonL10n/$addonId" -Directory) { + $langCode = $dir.Name + + if ($langCode -eq "en") { continue } + + # Identify codes + $crowdinLang = $languageMappings[$langCode] + if (-not $crowdinLang) { $crowdinLang = $langCode } + $langShort = $langCode.Split('-')[0].Split('_')[0] + + # Map to local NVDA directory + $localLangDir = uv run python .github/scripts/langCodes.py $langCode + + Write-Host "`n--- Processing Language: $langCode (Mapped to local: $localLangDir) ---" + + # Paths + $remoteMd = Join-Path $dir.FullName "$addonId.md" + $remoteXliff = Join-Path $dir.FullName "$addonId.xliff" + $remotePo = Join-Path $dir.FullName "$addonId.po" + $localMdDir = "addon/doc/$localLangDir" + $localMd = "$localMdDir/readme.md" + $localPoPath = "addon/locale/$localLangDir/LC_MESSAGES/nvda.po" + + # --- 3.1 PO FILE PROCESSING --- + $poImported = $false + if (Test-Path $remotePo) { + Write-Host "DEBUG: Checking Remote PO progress for $langShort..." + uv run python .github/scripts/checkTranslation.py "$addonId.po" $langShort + if ($LASTEXITCODE -eq 0) { + Write-Host "SUCCESS: Remote PO is valid. Importing to $localPoPath" + New-Item -ItemType Directory -Force -Path (Split-Path $localPoPath) | Out-Null + Move-Item $remotePo $localPoPath -Force + $poImported = $true + } else { + Write-Host "WARNING: Remote PO progress is below threshold." + } + } + + if (-not $poImported -and (Test-Path $localPoPath)) { + Write-Host "ACTION: Uploading local legacy PO to Crowdin ($crowdinLang) as fallback." + ./l10nUtil.exe uploadTranslationFile $crowdinLang "$addonId.po" $localPoPath -c addon + } + + # --- 3.2 DOCUMENTATION PROCESSING (MD & XLIFF) --- + $scoreMd = 0.0 + $scoreXliff = 0.0 + + if (Test-Path $remoteMd) { + Write-Host "DEBUG: Evaluating Remote Markdown score..." + $res = uv run python .github/scripts/checkTranslation.py "$addonId.md" $langShort + $scoreMd = [double]($res | Select-String "mdScore=").ToString().Split("=")[1] + } else { + Write-Host "DEBUG: No remote Markdown file found for this language." + } + + if (Test-Path $remoteXliff) { + Write-Host "DEBUG: Evaluating Remote XLIFF score..." + $res = uv run python .github/scripts/checkTranslation.py "$addonId.xliff" $langShort + $scoreXliff = [double]($res | Select-String "translationRatio=").ToString().Split("=")[1] + } else { + Write-Host "DEBUG: No remote XLIFF file found for this language." + } + + Write-Host "DEBUG: Comparison Scores -> MD: $scoreMd | XLIFF: $scoreXliff" + + $threshold = 0.5 + $docImported = $false + + if ($scoreXliff -gt $threshold -or $scoreMd -gt $threshold) { + if (!(Test-Path $localMdDir)) { New-Item -ItemType Directory -Force -Path $localMdDir | Out-Null } + + if ($scoreXliff -ge $scoreMd) { + Write-Host "SUCCESS: XLIFF is better or equal. Converting XLIFF to local MD ($localLangDir)..." + ./l10nUtil.exe xliff2md $remoteXliff $localMd + $docImported = $true + } else { + Write-Host "SUCCESS: Markdown is better. Importing Remote MD to local ($localLangDir)..." + Move-Item $remoteMd $localMd -Force + $docImported = $true + } + } else { + Write-Host "WARNING: Both remote MD and XLIFF scores are below threshold ($threshold)." + } + + if (-not $docImported -and (Test-Path $localMd)) { + Write-Host "ACTION: Documentation quality too low. Uploading local MD to Crowdin ($crowdinLang) as fallback." + ./l10nUtil.exe uploadTranslationFile $crowdinLang "$addonId.md" $localMd -c addon + } +} + +# --- STEP 4: COMMIT UPDATED TRANSLATIONS --- + +git add addon/locale addon/doc +git diff --staged --quiet +if ($LASTEXITCODE -ne 0) { + git commit -m "Update translations for $addonId from Crowdin (Automatic Sync)" + $branch = $env:downloadTranslationsBranch + git push -f origin "HEAD:$branch" + Write-Host "SUCCESS: Translations committed and pushed." +} else { + Write-Host "DEBUG: No changes in translations to commit." +} diff --git a/.github/scripts/langCodes.py b/.github/scripts/langCodes.py new file mode 100644 index 0000000..044e5b5 --- /dev/null +++ b/.github/scripts/langCodes.py @@ -0,0 +1,60 @@ +import sys + +# Mapping between Crowdin language IDs (keys) and standard NVDA directory names (values). +# This dictionary acts as the symmetrical counterpart to 'languageMappings.json' implemented by @nvdaes. +# It ensures that translations exported from Crowdin are stored in the correct +# local paths (e.g., 'es-ES' from Crowdin goes into the 'es' folder). +CROWDIN_TO_NVDA = { + # Arabic variants + "ar-SA": "ar_SA", + + # Spanish variants + "es-ES": "es", + "es-CO": "es_CO", + + # Portuguese variants + "pt-BR": "pt_BR", + "pt-PT": "pt_PT", + + # Chinese variants + "zh-CN": "zh_CN", + "zh-HK": "zh_HK", + "zh-TW": "zh_TW", + + # Other specific mappings from the NVDA ecosystem + "af": "af_ZA", + "de-CH": "de_CH", + "nb": "nb_NO", + "nn-NO": "nn_NO", + "sr-CS": "sr" +} + +def get_nvda_code(crowdin_code): + """ + Returns the appropriate local directory name for a given Crowdin language ID. + + Args: + crowdin_code (str): The language identifier from Crowdin (e.g., 'pt-BR', 'fr'). + + Returns: + str: The corresponding NVDA locale folder name (e.g., 'pt_BR', 'fr'). + """ + # 1. Direct check in our verified map (Priority) + if crowdin_code in CROWDIN_TO_NVDA: + return CROWDIN_TO_NVDA[crowdin_code] + + # 2. Automated conversion for regional variants: Crowdin "xx-YY" -> NVDA "xx_YY" + # This handles regional codes not explicitly defined in the map. + if "-" in crowdin_code: + return crowdin_code.replace("-", "_") + + # 3. Default: Return as is. + # This covers base languages that don't use regional folders in NVDA + # (e.g., 'fr', 'tr', 'bg', 'fi', 'fa'). + return crowdin_code + +if __name__ == "__main__": + # Ensure a language code was provided as a command-line argument + if len(sys.argv) > 1: + # Standardize input and output the mapped code for PowerShell to capture + print(get_nvda_code(sys.argv[1])) \ No newline at end of file diff --git a/.github/scripts/languageMappings.json b/.github/scripts/languageMappings.json new file mode 100644 index 0000000..cae6e13 --- /dev/null +++ b/.github/scripts/languageMappings.json @@ -0,0 +1,14 @@ +{ + "af_ZA": "af", + "de_CH": "de-CH", + "es": "es-ES", + "es_CO": "es-CO", + "nb_NO": "nb", + "nn_NO": "nn-NO", + "pt_PT": "pt-PT", + "pt_BR": "pt-BR", + "sr": "sr-CS", + "zh_CN": "zh-CN", + "zh_HK": "zh-HK", + "zh_TW": "zh-TW" +} diff --git a/.github/scripts/markdownTranslate.py b/.github/scripts/markdownTranslate.py new file mode 100644 index 0000000..d9cbaf5 --- /dev/null +++ b/.github/scripts/markdownTranslate.py @@ -0,0 +1,881 @@ +# A part of NonVisual Desktop Access (NVDA) +# Copyright (C) 2024 NV Access Limited. +# This file is covered by the GNU General Public License. +# See the file COPYING for more details. + +from collections.abc import Generator +from collections.abc import Iterable +import tempfile +import os +import contextlib +import lxml.etree +import argparse +import uuid +import re +from itertools import zip_longest +from xml.sax.saxutils import escape as xmlEscape +import difflib +from dataclasses import dataclass +import subprocess + + +def getGithubRepoURL() -> str | None: + """ + Get the GitHub repository URL from git remote origin. + return: The raw GitHub URL for the repository, or None if it cannot be determined. + """ + result = subprocess.run( + ["git", "remote", "get-url", "origin"], + capture_output=True, + text=True, + check=True, + ) + remote_url = result.stdout.strip() + # Convert SSH or HTTPS URL to raw GitHub URL format + if match := re.match(r"git@github\.com:(.+?)(?:\.git)?$", remote_url): + repo_path = match.group(1) + elif match := re.match(r"https://github\.com/(.+?)(?:\.git)?$", remote_url): + repo_path = match.group(1) + else: + raise ValueError(f"Cannot parse GitHub URL from git remote: {remote_url}") + return f"https://raw.githubusercontent.com/{repo_path}" + + +re_kcTitle = re.compile(r"^()$") +re_kcSettingsSection = re.compile(r"^()$") +# Comments that span a single line in their entirety +re_comment = re.compile(r"^$") +re_heading = re.compile(r"^(#+\s+)(.+?)((?:\s+\{#.+\})?)$") +re_bullet = re.compile(r"^(\s*\*\s+)(.+)$") +re_number = re.compile(r"^(\s*[0-9]+\.\s+)(.+)$") +re_hiddenHeaderRow = re.compile(r"^\|\s*\.\s*\{\.hideHeaderRow\}\s*(\|\s*\.\s*)*\|$") +re_postTableHeaderLine = re.compile(r"^(\|\s*-+\s*)+\|$") +re_tableRow = re.compile(r"^(\|)(.+)(\|)$") +re_translationID = re.compile(r"^(.*)\$\(ID:([0-9a-f-]+)\)(.*)$") +re_inlineMarkdownLintComment = re.compile(r"^(.*?)(?:\s*)(\s*)$") + + +def prettyPathString(path: str) -> str: + cwd = os.getcwd() + if os.path.normcase(os.path.splitdrive(path)[0]) != os.path.normcase( + os.path.splitdrive(cwd)[0], + ): + return path + return os.path.relpath(path, cwd) + + +@contextlib.contextmanager +def createAndDeleteTempFilePath_contextManager( + dir: str | None = None, + prefix: str | None = None, + suffix: str | None = None, +) -> Generator[str, None, None]: + """A context manager that creates a temporary file and deletes it when the context is exited""" + with tempfile.NamedTemporaryFile( + dir=dir, + prefix=prefix, + suffix=suffix, + delete=False, + ) as tempFile: + tempFilePath = tempFile.name + tempFile.close() + yield tempFilePath + os.remove(tempFilePath) + + +def getLastCommitID(filePath: str) -> str: + # Run the git log command to get the last commit ID for the given file + result = subprocess.run( + ["git", "log", "-n", "1", "--pretty=format:%H", "--", filePath], + capture_output=True, + text=True, + check=True, + ) + commitID = result.stdout.strip() + if not re.match(r"[0-9a-f]{40}", commitID): + raise ValueError(f"Invalid commit ID: '{commitID}' for file '{filePath}'") + return commitID + + +def getGitDir() -> str: + # Run the git rev-parse command to get the root of the git directory + result = subprocess.run( + ["git", "rev-parse", "--show-toplevel"], + capture_output=True, + text=True, + check=True, + ) + gitDir = result.stdout.strip() + if not os.path.isdir(gitDir): + raise ValueError(f"Invalid git directory: '{gitDir}'") + return gitDir + + +def getRawGithubURLForPath(filePath: str) -> str: + gitDirPath = getGitDir() + commitID = getLastCommitID(filePath) + relativePath = os.path.relpath(os.path.abspath(filePath), gitDirPath) + relativePath = relativePath.replace("\\", "/") + rawGithubRepoUrl = getGithubRepoURL() + return f"{rawGithubRepoUrl}/{commitID}/{relativePath}" + + +def preprocessMarkdownLines(mdLines: Iterable[str]) -> Iterable[str]: + """ + Preprocess markdown lines such as removing inline markdown lint comments.\ + :param mdLines: The markdown lines to preprocess + :returns: The preprocessed markdown lines + """ + for mdLine in mdLines: + # #18982: Remove markdown lint comments completely - not needed for intermediate markdown or final html. + mdLine = re_inlineMarkdownLintComment.sub(r"\1\2", mdLine) + yield mdLine + + +def skeletonizeLine(mdLine: str) -> str | None: + prefix = "" + suffix = "" + if ( + mdLine.isspace() + or mdLine.strip() == "[TOC]" + or re_hiddenHeaderRow.match(mdLine) + or re_postTableHeaderLine.match(mdLine) + ): + return None + elif m := re_heading.match(mdLine): + prefix, content, suffix = m.groups() + elif m := re_bullet.match(mdLine): + prefix, content = m.groups() + elif m := re_number.match(mdLine): + prefix, content = m.groups() + elif m := re_tableRow.match(mdLine): + prefix, content, suffix = m.groups() + elif m := re_kcTitle.match(mdLine): + prefix, content, suffix = m.groups() + elif m := re_kcSettingsSection.match(mdLine): + prefix, content, suffix = m.groups() + elif re_comment.match(mdLine): + return None + ID = str(uuid.uuid4()) + return f"{prefix}$(ID:{ID}){suffix}\n" + + +@dataclass +class Result_generateSkeleton: + numTotalLines: int = 0 + numTranslationPlaceholders: int = 0 + + +def generateSkeleton(mdPath: str, outputPath: str) -> Result_generateSkeleton: + print( + f"Generating skeleton file {prettyPathString(outputPath)} from {prettyPathString(mdPath)}...", + ) + res = Result_generateSkeleton() + with ( + open(mdPath, "r", encoding="utf8") as mdFile, + open(outputPath, "w", encoding="utf8", newline="") as outputFile, + ): + for mdLine in preprocessMarkdownLines(mdFile.readlines()): + res.numTotalLines += 1 + skelLine = skeletonizeLine(mdLine) + if skelLine: + res.numTranslationPlaceholders += 1 + else: + skelLine = mdLine + outputFile.write(skelLine) + print( + f"Generated skeleton file with {res.numTotalLines} total lines and {res.numTranslationPlaceholders} translation placeholders", + ) + return res + + +@dataclass +class Result_updateSkeleton: + numAddedLines: int = 0 + numAddedTranslationPlaceholders: int = 0 + numRemovedLines: int = 0 + numRemovedTranslationPlaceholders: int = 0 + numUnchangedLines: int = 0 + numUnchangedTranslationPlaceholders: int = 0 + + +def extractSkeleton(xliffPath: str, outputPath: str): + print( + f"Extracting skeleton from {prettyPathString(xliffPath)} to {prettyPathString(outputPath)}...", + ) + with contextlib.ExitStack() as stack: + outputFile = stack.enter_context( + open(outputPath, "w", encoding="utf8", newline=""), + ) + xliff = lxml.etree.parse(xliffPath) + xliffRoot = xliff.getroot() + namespace = {"xliff": "urn:oasis:names:tc:xliff:document:2.0"} + if xliffRoot.tag != "{urn:oasis:names:tc:xliff:document:2.0}xliff": + raise ValueError("Not an xliff file") + skeletonNode = xliffRoot.find( + "./xliff:file/xliff:skeleton", + namespaces=namespace, + ) + if skeletonNode is None: + raise ValueError("No skeleton found in xliff file") + skeletonContent = skeletonNode.text.strip() + outputFile.write(skeletonContent) + print(f"Extracted skeleton to {prettyPathString(outputPath)}") + + +def updateSkeleton( + origMdPath: str, + newMdPath: str, + origSkelPath: str, + outputPath: str, +) -> Result_updateSkeleton: + print( + f"Creating updated skeleton file {prettyPathString(outputPath)} from {prettyPathString(origSkelPath)} with changes from {prettyPathString(origMdPath)} to {prettyPathString(newMdPath)}...", + ) + res = Result_updateSkeleton() + with contextlib.ExitStack() as stack: + origMdFile = stack.enter_context(open(origMdPath, "r", encoding="utf8")) + newMdFile = stack.enter_context(open(newMdPath, "r", encoding="utf8")) + origSkelFile = stack.enter_context(open(origSkelPath, "r", encoding="utf8")) + outputFile = stack.enter_context( + open(outputPath, "w", encoding="utf8", newline=""), + ) + origMdLines = preprocessMarkdownLines(origMdFile.readlines()) + newMdLines = preprocessMarkdownLines(newMdFile.readlines()) + mdDiff = difflib.ndiff(list(origMdLines), list(newMdLines)) + origSkelLines = iter(origSkelFile.readlines()) + for mdDiffLine in mdDiff: + if mdDiffLine.startswith("?"): + continue + if mdDiffLine.startswith(" "): + res.numUnchangedLines += 1 + skelLine = next(origSkelLines) + if re_translationID.match(skelLine): + res.numUnchangedTranslationPlaceholders += 1 + outputFile.write(skelLine) + elif mdDiffLine.startswith("+"): + res.numAddedLines += 1 + skelLine = skeletonizeLine(mdDiffLine[2:]) + if skelLine: + res.numAddedTranslationPlaceholders += 1 + else: + skelLine = mdDiffLine[2:] + outputFile.write(skelLine) + elif mdDiffLine.startswith("-"): + res.numRemovedLines += 1 + origSkelLine = next(origSkelLines) + if re_translationID.match(origSkelLine): + res.numRemovedTranslationPlaceholders += 1 + else: + raise ValueError(f"Unexpected diff line: {mdDiffLine}") + print( + f"Updated skeleton file with {res.numAddedLines} added lines " + f"({res.numAddedTranslationPlaceholders} translation placeholders), " + f"{res.numRemovedLines} removed lines ({res.numRemovedTranslationPlaceholders} translation placeholders), " + f"and {res.numUnchangedLines} unchanged lines ({res.numUnchangedTranslationPlaceholders} translation placeholders)", + ) + return res + + +@dataclass +class Result_generateXliff: + numTranslatableStrings: int = 0 + + +def generateXliff( + mdPath: str, + outputPath: str, + skelPath: str | None = None, +) -> Result_generateXliff: + # If a skeleton file is not provided, first generate one + with contextlib.ExitStack() as stack: + if not skelPath: + skelPath = stack.enter_context( + createAndDeleteTempFilePath_contextManager( + dir=os.path.dirname(outputPath), + prefix=os.path.basename(mdPath), + suffix=".skel", + ), + ) + generateSkeleton(mdPath=mdPath, outputPath=skelPath) + with open(skelPath, "r", encoding="utf8") as skelFile: + skelContent = skelFile.read() + res = Result_generateXliff() + print( + f"Generating xliff file {prettyPathString(outputPath)} from {prettyPathString(mdPath)} and {prettyPathString(skelPath)}...", + ) + with contextlib.ExitStack() as stack: + mdFile = stack.enter_context(open(mdPath, "r", encoding="utf8")) + outputFile = stack.enter_context( + open(outputPath, "w", encoding="utf8", newline=""), + ) + fileID = os.path.basename(mdPath) + mdUri = getRawGithubURLForPath(mdPath) + print(f"Including Github raw URL: {mdUri}") + outputFile.write( + '\n' + f'\n' + f'\n', + ) + outputFile.write(f"\n{xmlEscape(skelContent)}\n\n") + res.numTranslatableStrings = 0 + for lineNo, (mdLine, skelLine) in enumerate( + zip_longest( + preprocessMarkdownLines(mdFile.readlines()), + skelContent.splitlines(keepends=True), + ), + start=1, + ): + mdLine = mdLine.rstrip() + skelLine = skelLine.rstrip() + if m := re_translationID.match(skelLine): + res.numTranslatableStrings += 1 + prefix, ID, suffix = m.groups() + if prefix and not mdLine.startswith(prefix): + raise ValueError( + f'Line {lineNo}: does not start with "{prefix}", {mdLine=}, {skelLine=}', + ) + if suffix and not mdLine.endswith(suffix): + raise ValueError( + f'Line {lineNo}: does not end with "{suffix}", {mdLine=}, {skelLine=}', + ) + source = mdLine[len(prefix) : len(mdLine) - len(suffix)] + outputFile.write( + f'\n\nline: {lineNo + 1}\n', + ) + if prefix: + outputFile.write( + f'prefix: {xmlEscape(prefix)}\n', + ) + if suffix: + outputFile.write( + f'suffix: {xmlEscape(suffix)}\n', + ) + outputFile.write( + "\n" + f"\n" + f"{xmlEscape(source)}\n" + "\n" + "\n", # fmt: skip + ) + else: + if mdLine != skelLine: + raise ValueError( + f"Line {lineNo}: {mdLine=} does not match {skelLine=}", + ) + outputFile.write("\n") + print( + f"Generated xliff file with {res.numTranslatableStrings} translatable strings", + ) + return res + + +@dataclass +class Result_translateXliff: + numTranslatedStrings: int = 0 + + +def updateXliff( + xliffPath: str, + mdPath: str, + outputPath: str, +): + # uses generateMarkdown, extractSkeleton, updateSkeleton, and generateXliff to generate an updated xliff file. + outputDir = os.path.dirname(outputPath) + print( + f"Generating updated xliff file {prettyPathString(outputPath)} from {prettyPathString(xliffPath)} and {prettyPathString(mdPath)}...", + ) + with contextlib.ExitStack() as stack: + origMdPath = stack.enter_context( + createAndDeleteTempFilePath_contextManager( + dir=outputDir, + prefix="generated_", + suffix=".md", + ), + ) + generateMarkdown(xliffPath=xliffPath, outputPath=origMdPath, translated=False) + origSkelPath = stack.enter_context( + createAndDeleteTempFilePath_contextManager( + dir=outputDir, + prefix="extracted_", + suffix=".skel", + ), + ) + extractSkeleton(xliffPath=xliffPath, outputPath=origSkelPath) + updatedSkelPath = stack.enter_context( + createAndDeleteTempFilePath_contextManager( + dir=outputDir, + prefix="updated_", + suffix=".skel", + ), + ) + updateSkeleton( + origMdPath=origMdPath, + newMdPath=mdPath, + origSkelPath=origSkelPath, + outputPath=updatedSkelPath, + ) + generateXliff( + mdPath=mdPath, + skelPath=updatedSkelPath, + outputPath=outputPath, + ) + print(f"Generated updated xliff file {prettyPathString(outputPath)}") + + +def translateXliff( + xliffPath: str, + lang: str, + pretranslatedMdPath: str, + outputPath: str, + allowBadAnchors: bool = False, +) -> Result_translateXliff: + print( + f"Creating {lang} translated xliff file {prettyPathString(outputPath)} from {prettyPathString(xliffPath)} using {prettyPathString(pretranslatedMdPath)}...", + ) + res = Result_translateXliff() + with contextlib.ExitStack() as stack: + pretranslatedMdFile = stack.enter_context( + open(pretranslatedMdPath, "r", encoding="utf8"), + ) + xliff = lxml.etree.parse(xliffPath) + xliffRoot = xliff.getroot() + namespace = {"xliff": "urn:oasis:names:tc:xliff:document:2.0"} + if xliffRoot.tag != "{urn:oasis:names:tc:xliff:document:2.0}xliff": + raise ValueError("Not an xliff file") + xliffRoot.set("trgLang", lang) + skeletonNode = xliffRoot.find( + "./xliff:file/xliff:skeleton", + namespaces=namespace, + ) + if skeletonNode is None: + raise ValueError("No skeleton found in xliff file") + skeletonContent = skeletonNode.text.strip() + for lineNo, (skelLine, pretranslatedLine) in enumerate( + zip_longest( + skeletonContent.splitlines(), + preprocessMarkdownLines(pretranslatedMdFile.readlines()), + ), + start=1, + ): + skelLine = skelLine.rstrip() + pretranslatedLine = pretranslatedLine.rstrip() + if m := re_translationID.match(skelLine): + prefix, ID, suffix = m.groups() + if prefix and not pretranslatedLine.startswith(prefix): + raise ValueError( + f'Line {lineNo} of translation does not start with "{prefix}", {pretranslatedLine=}, {skelLine=}', + ) + if suffix and not pretranslatedLine.endswith(suffix): + if allowBadAnchors and (m := re_heading.match(pretranslatedLine)): + print( + f"Warning: ignoring bad anchor in line {lineNo}: {pretranslatedLine}", + ) + suffix = m.group(3) + if suffix and not pretranslatedLine.endswith(suffix): + raise ValueError( + f'Line {lineNo} of translation: does not end with "{suffix}", {pretranslatedLine=}, {skelLine=}', + ) + translation = pretranslatedLine[len(prefix) : len(pretranslatedLine) - len(suffix)] + try: + unit = xliffRoot.find( + f'./xliff:file/xliff:unit[@id="{ID}"]', + namespaces=namespace, + ) + if unit is not None: + segment = unit.find("./xliff:segment", namespaces=namespace) + if segment is not None: + target = lxml.etree.Element("target") + target.text = translation + target.tail = "\n" + segment.append(target) + res.numTranslatedStrings += 1 + else: + raise ValueError(f"No segment found for unit {ID}") + else: + raise ValueError(f"Cannot locate Unit {ID} in xliff file") + except Exception as e: + e.add_note(f"Line {lineNo}: {pretranslatedLine=}, {skelLine=}") + raise + elif skelLine != pretranslatedLine: + raise ValueError( + f"Line {lineNo}: pretranslated line {pretranslatedLine!r}, does not match skeleton line {skelLine!r}", + ) + xliff.write(outputPath, encoding="utf8", xml_declaration=True) + print( + f"Translated xliff file with {res.numTranslatedStrings} translated strings", + ) + return res + + +@dataclass +class Result_generateMarkdown: + numTotalLines = 0 + numTranslatableStrings = 0 + numTranslatedStrings = 0 + numBadTranslationStrings = 0 + + +def generateMarkdown( + xliffPath: str, + outputPath: str, + translated: bool = True, +) -> Result_generateMarkdown: + print( + f"Generating markdown file {prettyPathString(outputPath)} from {prettyPathString(xliffPath)}...", + ) + res = Result_generateMarkdown() + with contextlib.ExitStack() as stack: + outputFile = stack.enter_context( + open(outputPath, "w", encoding="utf8", newline=""), + ) + xliff = lxml.etree.parse(xliffPath) + xliffRoot = xliff.getroot() + namespace = {"xliff": "urn:oasis:names:tc:xliff:document:2.0"} + if xliffRoot.tag != "{urn:oasis:names:tc:xliff:document:2.0}xliff": + raise ValueError("Not an xliff file") + skeletonNode = xliffRoot.find( + "./xliff:file/xliff:skeleton", + namespaces=namespace, + ) + if skeletonNode is None: + raise ValueError("No skeleton found in xliff file") + skeletonContent = skeletonNode.text.strip() + for lineNum, line in enumerate(skeletonContent.splitlines(keepends=True), 1): + res.numTotalLines += 1 + if m := re_translationID.match(line): + prefix, ID, suffix = m.groups() + res.numTranslatableStrings += 1 + unit = xliffRoot.find( + f'./xliff:file/xliff:unit[@id="{ID}"]', + namespaces=namespace, + ) + if unit is None: + raise ValueError(f"Cannot locate Unit {ID} in xliff file") + segment = unit.find("./xliff:segment", namespaces=namespace) + if segment is None: + raise ValueError(f"No segment found for unit {ID}") + source = segment.find("./xliff:source", namespaces=namespace) + if source is None: + raise ValueError(f"No source found for unit {ID}") + translation = "" + if translated: + target = segment.find("./xliff:target", namespaces=namespace) + if target is not None: + targetText = target.text + if targetText: + translation = targetText + # Crowdin treats empty targets () as a literal translation. + # Filter out such strings and count them as bad translations. + if translation in ( + "", + "<target/>", + "", + "<target></target>", + ): + res.numBadTranslationStrings += 1 + translation = "" + else: + res.numTranslatedStrings += 1 + # If we have no translation, use the source text + if not translation: + sourceText = source.text + if sourceText is None: + raise ValueError(f"No source text found for unit {ID}") + translation = sourceText + outputFile.write(f"{prefix}{translation}{suffix}\n") + else: + outputFile.write(line) + print( + f"Generated markdown file with {res.numTotalLines} total lines, {res.numTranslatableStrings} translatable strings, and {res.numTranslatedStrings} translated strings. Ignoring {res.numBadTranslationStrings} bad translated strings", + ) + return res + + +def ensureMarkdownFilesMatch(path1: str, path2: str, allowBadAnchors: bool = False): + print( + f"Ensuring files {prettyPathString(path1)} and {prettyPathString(path2)} match...", + ) + with contextlib.ExitStack() as stack: + file1 = stack.enter_context(open(path1, "r", encoding="utf8")) + file2 = stack.enter_context(open(path2, "r", encoding="utf8")) + for lineNo, (line1, line2) in enumerate( + zip_longest( + preprocessMarkdownLines(file1.readlines()), + preprocessMarkdownLines(file2.readlines()), + ), + start=1, + ): + line1 = line1.rstrip() + line2 = line2.rstrip() + if line1 != line2: + if ( + re_postTableHeaderLine.match(line1) + and re_postTableHeaderLine.match(line2) + and line1.count("|") == line2.count("|") + ): + print( + f"Warning: ignoring cell padding of post table header line at line {lineNo}: {line1}, {line2}", + ) + continue + if ( + re_hiddenHeaderRow.match(line1) + and re_hiddenHeaderRow.match(line2) + and line1.count("|") == line2.count("|") + ): + print( + f"Warning: ignoring cell padding of hidden header row at line {lineNo}: {line1}, {line2}", + ) + continue + if allowBadAnchors and (m1 := re_heading.match(line1)) and (m2 := re_heading.match(line2)): + print( + f"Warning: ignoring bad anchor in headings at line {lineNo}: {line1}, {line2}", + ) + line1 = m1.group(1) + m1.group(2) + line2 = m2.group(1) + m2.group(2) + if line1 != line2: + raise ValueError( + f"Files do not match at line {lineNo}: {line1=} {line2=}", + ) + print("Files match") + + +def markdownTranslateCommand(command: str, *args): + print(f"Running markdownTranslate command: {command} {' '.join(args)}") + subprocess.run(["python", __file__, command, *args], check=True) + + +def pretranslateAllPossibleLanguages(langsDir: str, mdBaseName: str): + # This function walks through all language directories in the given directory, skipping en (English) and translates the English xlif and skel file along with the lang's pretranslated md file + enXliffPath = os.path.join(langsDir, "en", f"{mdBaseName}.xliff") + if not os.path.exists(enXliffPath): + raise ValueError(f"English xliff file {enXliffPath} does not exist") + allLangs = set() + succeededLangs = set() + skippedLangs = set() + for langDir in os.listdir(langsDir): + if langDir == "en": + continue + langDirPath = os.path.join(langsDir, langDir) + if not os.path.isdir(langDirPath): + continue + langPretranslatedMdPath = os.path.join(langDirPath, f"{mdBaseName}.md") + if not os.path.exists(langPretranslatedMdPath): + continue + allLangs.add(langDir) + langXliffPath = os.path.join(langDirPath, f"{mdBaseName}.xliff") + if os.path.exists(langXliffPath): + print(f"Skipping {langDir} as the xliff file already exists") + skippedLangs.add(langDir) + continue + try: + translateXliff( + xliffPath=enXliffPath, + lang=langDir, + pretranslatedMdPath=langPretranslatedMdPath, + outputPath=langXliffPath, + allowBadAnchors=True, + ) + except Exception as e: + print(f"Failed to translate {langDir}: {e}") + continue + rebuiltLangMdPath = os.path.join(langDirPath, f"rebuilt_{mdBaseName}.md") + try: + generateMarkdown( + xliffPath=langXliffPath, + outputPath=rebuiltLangMdPath, + ) + except Exception as e: + print(f"Failed to rebuild {langDir} markdown: {e}") + os.remove(langXliffPath) + continue + try: + ensureMarkdownFilesMatch( + rebuiltLangMdPath, + langPretranslatedMdPath, + allowBadAnchors=True, + ) + except Exception as e: + print( + f"Rebuilt {langDir} markdown does not match pretranslated markdown: {e}", + ) + os.remove(langXliffPath) + continue + os.remove(rebuiltLangMdPath) + print(f"Successfully pretranslated {langDir}") + succeededLangs.add(langDir) + if len(skippedLangs) > 0: + print(f"Skipped {len(skippedLangs)} languages already pretranslated.") + print( + f"Pretranslated {len(succeededLangs)} out of {len(allLangs) - len(skippedLangs)} languages.", + ) + + +if __name__ == "__main__": + mainParser = argparse.ArgumentParser() + commandParser = mainParser.add_subparsers( + title="commands", + dest="command", + required=True, + ) + generateXliffParser = commandParser.add_parser("generateXliff") + generateXliffParser.add_argument( + "-m", + "--markdown", + dest="md", + type=str, + required=True, + help="The markdown file to generate the xliff file for", + ) + generateXliffParser.add_argument( + "-o", + "--output", + dest="output", + type=str, + required=True, + help="The file to output the xliff file to", + ) + updateXliffParser = commandParser.add_parser("updateXliff") + updateXliffParser.add_argument( + "-x", + "--xliff", + dest="xliff", + type=str, + required=True, + help="The original xliff file", + ) + updateXliffParser.add_argument( + "-m", + "--newMarkdown", + dest="md", + type=str, + required=True, + help="The new markdown file", + ) + updateXliffParser.add_argument( + "-o", + "--output", + dest="output", + type=str, + required=True, + help="The file to output the updated xliff to", + ) + translateXliffParser = commandParser.add_parser("translateXliff") + translateXliffParser.add_argument( + "-x", + "--xliff", + dest="xliff", + type=str, + required=True, + help="The xliff file to translate", + ) + translateXliffParser.add_argument( + "-l", + "--lang", + dest="lang", + type=str, + required=True, + help="The language to translate to", + ) + translateXliffParser.add_argument( + "-p", + "--pretranslatedMarkdown", + dest="pretranslatedMd", + type=str, + required=True, + help="The pretranslated markdown file to use", + ) + translateXliffParser.add_argument( + "-o", + "--output", + dest="output", + type=str, + required=True, + help="The file to output the translated xliff file to", + ) + generateMarkdownParser = commandParser.add_parser("generateMarkdown") + generateMarkdownParser.add_argument( + "-x", + "--xliff", + dest="xliff", + type=str, + required=True, + help="The xliff file to generate the markdown file for", + ) + generateMarkdownParser.add_argument( + "-o", + "--output", + dest="output", + type=str, + required=True, + help="The file to output the markdown file to", + ) + generateMarkdownParser.add_argument( + "-u", + "--untranslated", + dest="translated", + action="store_false", + help="Generate the markdown file with the untranslated strings", + ) + ensureMarkdownFilesMatchParser = commandParser.add_parser( + "ensureMarkdownFilesMatch", + ) + ensureMarkdownFilesMatchParser.add_argument( + dest="path1", + type=str, + help="The first markdown file", + ) + ensureMarkdownFilesMatchParser.add_argument( + dest="path2", + type=str, + help="The second markdown file", + ) + pretranslateLangsParser = commandParser.add_parser("pretranslateLangs") + pretranslateLangsParser.add_argument( + "-d", + "--langs-dir", + dest="langsDir", + type=str, + required=True, + help="The directory containing the language directories", + ) + pretranslateLangsParser.add_argument( + "-b", + "--md-base-name", + dest="mdBaseName", + type=str, + required=True, + help="The base name of the markdown files to pretranslate", + ) + args = mainParser.parse_args() + match args.command: + case "generateXliff": + generateXliff(mdPath=args.md, outputPath=args.output) + case "updateXliff": + updateXliff( + xliffPath=args.xliff, + mdPath=args.md, + outputPath=args.output, + ) + case "generateMarkdown": + generateMarkdown( + xliffPath=args.xliff, + outputPath=args.output, + translated=args.translated, + ) + case "translateXliff": + translateXliff( + xliffPath=args.xliff, + lang=args.lang, + pretranslatedMdPath=args.pretranslatedMd, + outputPath=args.output, + ) + case "pretranslateLangs": + pretranslateAllPossibleLanguages( + langsDir=args.langsDir, + mdBaseName=args.mdBaseName, + ) + case "ensureMarkdownFilesMatch": + ensureMarkdownFilesMatch(path1=args.path1, path2=args.path2) + case _: + raise ValueError(f"Unknown command: {args.command}") diff --git a/.github/scripts/setOutputs.py b/.github/scripts/setOutputs.py new file mode 100644 index 0000000..a5d9161 --- /dev/null +++ b/.github/scripts/setOutputs.py @@ -0,0 +1,21 @@ +# Copyright (C) 2025 NV Access Limited, Noelia Ruiz Martínez +# This file is covered by the GNU General Public License. +# See the file COPYING for more details. + +import os +import sys + +sys.path.insert(0, os.getcwd()) +import buildVars + + +def main(): + addonId = buildVars.addon_info["addon_name"] + name = "addonId" + value = addonId + with open(os.environ["GITHUB_OUTPUT"], "a") as f: + _ = f.write(f"{name}={value}\n") + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/build_addon.yml b/.github/workflows/build_addon.yml index bbd563f..59f9ee1 100644 --- a/.github/workflows/build_addon.yml +++ b/.github/workflows/build_addon.yml @@ -10,6 +10,7 @@ on: branches: [ main, master ] workflow_dispatch: + workflow_call: jobs: build: diff --git a/.github/workflows/crowdinL10n.yml b/.github/workflows/crowdinL10n.yml new file mode 100644 index 0000000..8ec5efc --- /dev/null +++ b/.github/workflows/crowdinL10n.yml @@ -0,0 +1,55 @@ +name: Crowdin l10n + +on: + workflow_dispatch: + schedule: + # Every Monday at 00:00 UTC + - cron: '0 0 * * 1' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + crowdinAuthToken: ${{ secrets.CROWDIN_TOKEN }} + downloadTranslationsBranch: l10n + GH_TOKEN: ${{ github.token }} + CROWDIN_PROJECT_ID: 780748 + +jobs: + crowdinSync: + runs-on: windows-latest + permissions: + contents: write + + steps: + - name: Checkout add-on + uses: actions/checkout@v6 + with: + submodules: true + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: ".python-version" + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Install dependencies + run: uv sync + - name: Install gettext + run: | + choco install -y gettext + # Add gettext to PATH for current and future steps + $gettextPath = "C:\Program Files\gettext-iconv\bin" + echo "$gettextPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append + - name: Get add-on info + id: getAddonInfo + shell: pwsh + run: uv run ./.github/scripts/setOutputs.py + - name: Download l10nUtil + run: | + gh release download --repo nvaccess/nvdaL10n --pattern "l10nUtil.exe" + - name: Download translations from Crowdin + shell: pwsh + env: + ADDON_ID: ${{ steps.getAddonInfo.outputs.addonId }} + run: ./.github/scripts/crowdinSync.ps1 diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..24ee5b1 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/buildVars.py b/buildVars.py index 0adb36b..fde02e7 100644 --- a/buildVars.py +++ b/buildVars.py @@ -10,7 +10,6 @@ # which returns whatever is given to it as an argument. from site_scons.site_tools.NVDATool.utils import _ - # Add-on information variables addon_info = AddonInfo( # add-on Name/identifier, internal for NVDA diff --git a/pyproject.toml b/pyproject.toml index 19f3359..9fe5dd8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,6 +105,8 @@ exclude = [ # When excluding concrete paths relative to a directory, # not matching multiple folders by name e.g. `__pycache__`, # paths are relative to the configuration file. + ".github/scripts/markdownTranslate.py", + ".github/scripts/checkTranslation.py", ] # Tell pyright where to load python code from diff --git a/readme.md b/readme.md index df61be9..3be0986 100644 --- a/readme.md +++ b/readme.md @@ -174,6 +174,30 @@ If not, leave the dictionary empty. * channel: update channel (do not use this switch unless you know what you are doing). * dev: suitable for development builds, names the add-on according to current date (yyyymmdd) and sets update channel to "dev". + +### Translation workflow + +This template allows you to automate the synchronization of documentation and interface messages with Crowdin. + +#### 1. Crowdin Project Setup +You need a Crowdin account and an API token with permissions to manage a project. +If you wish to use the community project [Crowdin project to translate NVDA add-ons](https://crowdin.com/project/nvdaaddons): +* **Request Access:** Send a message to the [NVDA translation mailing list](https://groups.io/g/nvda-translations) (**nvda-translations@groups.io**) requesting an invitation to join the project as a developer. +* **API Token:** Once invited, generate an API token in your Crowdin account settings. + +#### 2. GitHub Secrets +To allow the workflows to communicate with Crowdin, you must add the following secrets to your GitHub repository (`Settings > Secrets and variables > Actions`): +* `crowdinAuthToken`: Paste your Crowdin API token here. + +#### 3. Infrastructure +Ensure that your repository includes the following files (provided in this template): +* **Workflows:** `.github/workflows/crowdinL10n.yml** +* **Scripts:** The `.github/scripts/` folder containing `checkTranslation.py`, `langCodes.py`, `languageMappings.json`, `setOutputs.py`, and `crowdinSync.ps1`. + +#### 4. Running the Workflow + +The translation workflow will be run weekly. Also, you can run the workflow manually from GitHub or using GitHub CLI. + ### Additional tools The template includes configuration files for use with additional tools such as linters. These include: diff --git a/uv.lock b/uv.lock index 81dbe96..1969d46 100644 --- a/uv.lock +++ b/uv.lock @@ -40,11 +40,11 @@ requires-dist = [ [[package]] name = "certifi" -version = "2025.11.12" +version = "2026.4.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, ] [[package]] @@ -58,27 +58,27 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.4" +version = "3.4.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] @@ -117,29 +117,29 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.3" +version = "3.29.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/fe/997687a931ab51049acce6fa1f23e8f01216374ea81374ddee763c493db5/filelock-3.29.0.tar.gz", hash = "sha256:69974355e960702e789734cb4871f884ea6fe50bd8404051a3530bc07809cf90", size = 57571, upload-time = "2026-04-19T15:39:10.068Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, + { url = "https://files.pythonhosted.org/packages/81/47/dd9a212ef6e343a6857485ffe25bba537304f1913bdbed446a23f7f592e1/filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258", size = 39812, upload-time = "2026-04-19T15:39:08.752Z" }, ] [[package]] name = "identify" -version = "2.6.15" +version = "2.6.19" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/63/51723b5f116cc04b061cb6f5a561790abf249d25931d515cd375e063e0f4/identify-2.6.19.tar.gz", hash = "sha256:6be5020c38fcb07da56c53733538a3081ea5aa70d36a156f83044bfbf9173842", size = 99567, upload-time = "2026-04-17T18:39:50.265Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, + { url = "https://files.pythonhosted.org/packages/94/84/d9273cd09688070a6523c4aee4663a8538721b2b755c4962aafae0011e72/identify-2.6.19-py2.py3-none-any.whl", hash = "sha256:20e6a87f786f768c092a721ad107fc9df0eb89347be9396cadf3f4abbd1fb78a", size = 99397, upload-time = "2026-04-17T18:39:49.221Z" }, ] [[package]] name = "idna" -version = "3.11" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] @@ -247,27 +247,27 @@ wheels = [ [[package]] name = "nodejs-wheel-binaries" -version = "24.12.0" +version = "24.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b9/35/d806c2ca66072e36dc340ccdbeb2af7e4f1b5bcc33f1481f00ceed476708/nodejs_wheel_binaries-24.12.0.tar.gz", hash = "sha256:f1b50aa25375e264697dec04b232474906b997c2630c8f499f4caf3692938435", size = 8058, upload-time = "2025-12-11T21:12:26.856Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/70/a1e4f4d5986768ab90cc860b1cc3660fd2ded74ca175a900a5c29f839c7d/nodejs_wheel_binaries-24.15.0.tar.gz", hash = "sha256:b43f5c4f6e5768d8845b2ae4682eb703a19bf7aadc84187e2d903ed3a611c859", size = 8057, upload-time = "2026-04-19T15:48:16.899Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/3b/9d6f044319cd5b1e98f07c41e2465b58cadc1c9c04a74c891578f3be6cb5/nodejs_wheel_binaries-24.12.0-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:7564ddea0a87eff34e9b3ef71764cc2a476a8f09a5cccfddc4691148b0a47338", size = 55125859, upload-time = "2025-12-11T21:11:58.132Z" }, - { url = "https://files.pythonhosted.org/packages/48/a5/f5722bf15c014e2f476d7c76bce3d55c341d19122d8a5d86454db32a61a4/nodejs_wheel_binaries-24.12.0-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:8ff929c4669e64613ceb07f5bbd758d528c3563820c75d5de3249eb452c0c0ab", size = 55309035, upload-time = "2025-12-11T21:12:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/a9/61/68d39a6f1b5df67805969fd2829ba7e80696c9af19537856ec912050a2be/nodejs_wheel_binaries-24.12.0-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6ebacefa8891bc456ad3655e6bce0af7e20ba08662f79d9109986faeb703fd6f", size = 59661017, upload-time = "2025-12-11T21:12:05.268Z" }, - { url = "https://files.pythonhosted.org/packages/16/a1/31aad16f55a5e44ca7ea62d1367fc69f4b6e1dba67f58a0a41d0ed854540/nodejs_wheel_binaries-24.12.0-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:3292649a03682ccbfa47f7b04d3e4240e8c46ef04dc941b708f20e4e6a764f75", size = 60159770, upload-time = "2025-12-11T21:12:08.696Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5e/b7c569aa1862690ca4d4daf3a64cafa1ea6ce667a9e3ae3918c56e127d9b/nodejs_wheel_binaries-24.12.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7fb83df312955ea355ba7f8cbd7055c477249a131d3cb43b60e4aeb8f8c730b1", size = 61653561, upload-time = "2025-12-11T21:12:12.575Z" }, - { url = "https://files.pythonhosted.org/packages/71/87/567f58d7ba69ff0208be849b37be0f2c2e99c69e49334edd45ff44f00043/nodejs_wheel_binaries-24.12.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2473c819448fedd7b036dde236b09f3c8bbf39fbbd0c1068790a0498800f498b", size = 62238331, upload-time = "2025-12-11T21:12:16.143Z" }, - { url = "https://files.pythonhosted.org/packages/6a/9d/c6492188ce8de90093c6755a4a63bb6b2b4efb17094cb4f9a9a49c73ed3b/nodejs_wheel_binaries-24.12.0-py2.py3-none-win_amd64.whl", hash = "sha256:2090d59f75a68079fabc9b86b14df8238b9aecb9577966dc142ce2a23a32e9bb", size = 41342076, upload-time = "2025-12-11T21:12:20.618Z" }, - { url = "https://files.pythonhosted.org/packages/df/af/cd3290a647df567645353feed451ef4feaf5844496ced69c4dcb84295ff4/nodejs_wheel_binaries-24.12.0-py2.py3-none-win_arm64.whl", hash = "sha256:d0c2273b667dd7e3f55e369c0085957b702144b1b04bfceb7ce2411e58333757", size = 39048104, upload-time = "2025-12-11T21:12:23.495Z" }, + { url = "https://files.pythonhosted.org/packages/85/66/54051d14853d6ab4fb85f8be9b042b530be653357fb9a19557498bc91ab7/nodejs_wheel_binaries-24.15.0-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:a6232fa8b754220941f52388c8ead923f7c1c7fdf0ea0d98f657523bd9a81ef4", size = 55173485, upload-time = "2026-04-19T15:47:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/ad/5f/66acada164da5ca10a0824db021aa7394ae18396c550cd9280e839a43126/nodejs_wheel_binaries-24.15.0-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:001a6b62c69d9109c1738163cca00608dd2722e8663af59300054ea02610972d", size = 55348100, upload-time = "2026-04-19T15:47:40.521Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2d/0cbd5ff40c9bb030ca1735d8f8793bd74f08a4cbd49100a1d19313ea57ab/nodejs_wheel_binaries-24.15.0-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:0fbc48765e60ed0ff30d43898dbf5cadbadf2e5f1e7f204afc2b01493b7ebce6", size = 59668206, upload-time = "2026-04-19T15:47:46.848Z" }, + { url = "https://files.pythonhosted.org/packages/da/d5/91ac63951ec75927a486b83b8cafe650e360fa70ac01dc94adfb32b93b97/nodejs_wheel_binaries-24.15.0-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:20ee0536809795da8a4942fc1ab4cbdebbcaaf29383eab67ba8874268fb00008", size = 60206736, upload-time = "2026-04-19T15:47:52.668Z" }, + { url = "https://files.pythonhosted.org/packages/db/72/dc22776974d928869c0c30d23ee98ed7df254243c2df68f09f5963e8e8b8/nodejs_wheel_binaries-24.15.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1fade6c214285e72472ca40a631e98ff36559671cd5eefc8bf009471d67f04b4", size = 61720456, upload-time = "2026-04-19T15:47:58.325Z" }, + { url = "https://files.pythonhosted.org/packages/01/0a/34461b9050cb45ee371dccdefc622aef6351506ea2691b08fc761ca67150/nodejs_wheel_binaries-24.15.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3984cb8d87766567aee67a49743227ab40ede6f47734ec990ff90e50b74e7740", size = 62326172, upload-time = "2026-04-19T15:48:04.094Z" }, + { url = "https://files.pythonhosted.org/packages/c9/17/09252bf35672dba926649d59dfe51443a0f6955ad13784e91131d5ec82a2/nodejs_wheel_binaries-24.15.0-py2.py3-none-win_amd64.whl", hash = "sha256:a437601956b532dcb3082046e6978e622733f90edc0932cbb9adb3bb97a16501", size = 41543461, upload-time = "2026-04-19T15:48:09.332Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7e/b649777d148e1e0c2ce349156603cdb12f7ed99921b95d93717393650193/nodejs_wheel_binaries-24.15.0-py2.py3-none-win_arm64.whl", hash = "sha256:bdf4a431e08321a32efc604111c6f23941f87055d796a537e8c4110daecad23f", size = 39233248, upload-time = "2026-04-19T15:48:13.326Z" }, ] [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.9.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/4a/0883b8e3802965322523f0b200ecf33d31f10991d0401162f4b23c698b42/platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a", size = 29400, upload-time = "2026-04-09T00:04:10.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/75/a6/a0a304dc33b49145b21f4808d763822111e67d1c3a32b524a1baf947b6e1/platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917", size = 21348, upload-time = "2026-04-09T00:04:09.463Z" }, ] [[package]] @@ -304,6 +304,19 @@ nodejs = [ { name = "nodejs-wheel-binaries" }, ] +[[package]] +name = "python-discovery" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/ef/3bae0e537cfe91e8431efcba4434463d2c5a65f5a89edd47c6cf2f03c55f/python_discovery-1.2.2.tar.gz", hash = "sha256:876e9c57139eb757cb5878cbdd9ae5379e5d96266c99ef731119e04fffe533bb", size = 58872, upload-time = "2026-04-07T17:28:49.249Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/db/795879cc3ddfe338599bddea6388cc5100b088db0a4caf6e6c1af1c27e04/python_discovery-1.2.2-py3-none-any.whl", hash = "sha256:e1ae95d9af875e78f15e19aed0c6137ab1bb49c200f21f5061786490c9585c7a", size = 31894, upload-time = "2026-04-07T17:28:48.09Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -418,47 +431,46 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.36.1" +version = "21.2.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "python-discovery" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/98/3a7e644e19cb26133488caff231be390579860bbbb3da35913c49a1d0a46/virtualenv-21.2.4.tar.gz", hash = "sha256:b294ef68192638004d72524ce7ef303e9d0cf5a44c95ce2e54a7500a6381cada", size = 5850742, upload-time = "2026-04-14T22:15:31.438Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, + { url = "https://files.pythonhosted.org/packages/27/8d/edd0bd910ff803c308ee9a6b7778621af0d10252219ad9f19ef4d4982a61/virtualenv-21.2.4-py3-none-any.whl", hash = "sha256:29d21e941795206138d0f22f4e45ff7050e5da6c6472299fb7103318763861ac", size = 5831232, upload-time = "2026-04-14T22:15:29.342Z" }, ] [[package]] name = "wrapt" -version = "2.0.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/2a/6de8a50cb435b7f42c46126cf1a54b2aab81784e74c8595c8e025e8f36d3/wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f", size = 82040, upload-time = "2025-11-07T00:45:33.312Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/64/925f213fdcbb9baeb1530449ac71a4d57fc361c053d06bf78d0c5c7cd80c/wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e", size = 81678, upload-time = "2026-03-06T02:53:25.134Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/fe/41af4c46b5e498c90fc87981ab2972fbd9f0bccda597adb99d3d3441b94b/wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9", size = 78132, upload-time = "2025-11-07T00:44:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/1c/92/d68895a984a5ebbbfb175512b0c0aad872354a4a2484fbd5552e9f275316/wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f", size = 61211, upload-time = "2025-11-07T00:44:05.626Z" }, - { url = "https://files.pythonhosted.org/packages/e8/26/ba83dc5ae7cf5aa2b02364a3d9cf74374b86169906a1f3ade9a2d03cf21c/wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218", size = 61689, upload-time = "2025-11-07T00:44:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/cf/67/d7a7c276d874e5d26738c22444d466a3a64ed541f6ef35f740dbd865bab4/wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9", size = 121502, upload-time = "2025-11-07T00:44:09.557Z" }, - { url = "https://files.pythonhosted.org/packages/0f/6b/806dbf6dd9579556aab22fc92908a876636e250f063f71548a8660382184/wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c", size = 123110, upload-time = "2025-11-07T00:44:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/e5/08/cdbb965fbe4c02c5233d185d070cabed2ecc1f1e47662854f95d77613f57/wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db", size = 117434, upload-time = "2025-11-07T00:44:08.138Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d1/6aae2ce39db4cb5216302fa2e9577ad74424dfbe315bd6669725569e048c/wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233", size = 121533, upload-time = "2025-11-07T00:44:12.142Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/565abf57559fbe0a9155c29879ff43ce8bd28d2ca61033a3a3dd67b70794/wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2", size = 116324, upload-time = "2025-11-07T00:44:13.28Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e0/53ff5e76587822ee33e560ad55876d858e384158272cd9947abdd4ad42ca/wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b", size = 120627, upload-time = "2025-11-07T00:44:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/7c/7b/38df30fd629fbd7612c407643c63e80e1c60bcc982e30ceeae163a9800e7/wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7", size = 58252, upload-time = "2025-11-07T00:44:17.814Z" }, - { url = "https://files.pythonhosted.org/packages/85/64/d3954e836ea67c4d3ad5285e5c8fd9d362fd0a189a2db622df457b0f4f6a/wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3", size = 60500, upload-time = "2025-11-07T00:44:15.561Z" }, - { url = "https://files.pythonhosted.org/packages/89/4e/3c8b99ac93527cfab7f116089db120fef16aac96e5f6cdb724ddf286086d/wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8", size = 58993, upload-time = "2025-11-07T00:44:16.65Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f4/eff2b7d711cae20d220780b9300faa05558660afb93f2ff5db61fe725b9a/wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3", size = 82028, upload-time = "2025-11-07T00:44:18.944Z" }, - { url = "https://files.pythonhosted.org/packages/0c/67/cb945563f66fd0f61a999339460d950f4735c69f18f0a87ca586319b1778/wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1", size = 62949, upload-time = "2025-11-07T00:44:20.074Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ca/f63e177f0bbe1e5cf5e8d9b74a286537cd709724384ff20860f8f6065904/wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d", size = 63681, upload-time = "2025-11-07T00:44:21.345Z" }, - { url = "https://files.pythonhosted.org/packages/39/a1/1b88fcd21fd835dca48b556daef750952e917a2794fa20c025489e2e1f0f/wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7", size = 152696, upload-time = "2025-11-07T00:44:24.318Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/d9185500c1960d9f5f77b9c0b890b7fc62282b53af7ad1b6bd779157f714/wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3", size = 158859, upload-time = "2025-11-07T00:44:25.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/60/5d796ed0f481ec003220c7878a1d6894652efe089853a208ea0838c13086/wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b", size = 146068, upload-time = "2025-11-07T00:44:22.81Z" }, - { url = "https://files.pythonhosted.org/packages/04/f8/75282dd72f102ddbfba137e1e15ecba47b40acff32c08ae97edbf53f469e/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10", size = 155724, upload-time = "2025-11-07T00:44:26.634Z" }, - { url = "https://files.pythonhosted.org/packages/5a/27/fe39c51d1b344caebb4a6a9372157bdb8d25b194b3561b52c8ffc40ac7d1/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf", size = 144413, upload-time = "2025-11-07T00:44:27.939Z" }, - { url = "https://files.pythonhosted.org/packages/83/2b/9f6b643fe39d4505c7bf926d7c2595b7cb4b607c8c6b500e56c6b36ac238/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e", size = 150325, upload-time = "2025-11-07T00:44:29.29Z" }, - { url = "https://files.pythonhosted.org/packages/bb/b6/20ffcf2558596a7f58a2e69c89597128781f0b88e124bf5a4cadc05b8139/wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c", size = 59943, upload-time = "2025-11-07T00:44:33.211Z" }, - { url = "https://files.pythonhosted.org/packages/87/6a/0e56111cbb3320151eed5d3821ee1373be13e05b376ea0870711f18810c3/wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92", size = 63240, upload-time = "2025-11-07T00:44:30.935Z" }, - { url = "https://files.pythonhosted.org/packages/1d/54/5ab4c53ea1f7f7e5c3e7c1095db92932cc32fd62359d285486d00c2884c3/wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f", size = 60416, upload-time = "2025-11-07T00:44:32.002Z" }, - { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7a/d936840735c828b38d26a854e85d5338894cda544cb7a85a9d5b8b9c4df7/wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b", size = 61259, upload-time = "2026-03-06T02:53:41.922Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/9a9b9a90ac8ca11c2fdb6a286cb3a1fc7dd774c00ed70929a6434f6bc634/wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e", size = 61851, upload-time = "2026-03-06T02:52:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/03/a9/5b7d6a16fd6533fed2756900fc8fc923f678179aea62ada6d65c92718c00/wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb", size = 121446, upload-time = "2026-03-06T02:54:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/34c443690c847835cfe9f892be78c533d4f32366ad2888972c094a897e39/wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca", size = 123056, upload-time = "2026-03-06T02:54:10.829Z" }, + { url = "https://files.pythonhosted.org/packages/93/b9/ff205f391cb708f67f41ea148545f2b53ff543a7ac293b30d178af4d2271/wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267", size = 117359, upload-time = "2026-03-06T02:53:03.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/1ea04d7747825119c3c9a5e0874a40b33594ada92e5649347c457d982805/wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f", size = 121479, upload-time = "2026-03-06T02:53:45.844Z" }, + { url = "https://files.pythonhosted.org/packages/78/cc/ee3a011920c7a023b25e8df26f306b2484a531ab84ca5c96260a73de76c0/wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8", size = 116271, upload-time = "2026-03-06T02:54:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/e5ff7ded41b76d802cf1191288473e850d24ba2e39a6ec540f21ae3b57cb/wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413", size = 120573, upload-time = "2026-03-06T02:52:50.163Z" }, + { url = "https://files.pythonhosted.org/packages/47/c5/242cae3b5b080cd09bacef0591691ba1879739050cc7c801ff35c8886b66/wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6", size = 58205, upload-time = "2026-03-06T02:53:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/c358c61e7a50f290958809b3c61ebe8b3838ea3e070d7aac9814f95a0528/wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1", size = 60452, upload-time = "2026-03-06T02:53:30.038Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/c8a6fcfe321295fd8c0ab1bd685b5a01462a9b3aa2f597254462fc2bc975/wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf", size = 58842, upload-time = "2026-03-06T02:52:52.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/55/9c7052c349106e0b3f17ae8db4b23a691a963c334de7f9dbd60f8f74a831/wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b", size = 63075, upload-time = "2026-03-06T02:53:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/ce7b4006f7218248dd71b7b2b732d0710845a0e49213b18faef64811ffef/wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18", size = 63719, upload-time = "2026-03-06T02:54:33.452Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e5/2ca472e80b9e2b7a17f106bb8f9df1db11e62101652ce210f66935c6af67/wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d", size = 152643, upload-time = "2026-03-06T02:52:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/36/42/30f0f2cefca9d9cbf6835f544d825064570203c3e70aa873d8ae12e23791/wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015", size = 158805, upload-time = "2026-03-06T02:54:25.441Z" }, + { url = "https://files.pythonhosted.org/packages/bb/67/d08672f801f604889dcf58f1a0b424fe3808860ede9e03affc1876b295af/wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92", size = 145990, upload-time = "2026-03-06T02:53:57.456Z" }, + { url = "https://files.pythonhosted.org/packages/68/a7/fd371b02e73babec1de6ade596e8cd9691051058cfdadbfd62a5898f3295/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf", size = 155670, upload-time = "2026-03-06T02:54:55.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/9fe0095dfdb621009f40117dcebf41d7396c2c22dca6eac779f4c007b86c/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67", size = 144357, upload-time = "2026-03-06T02:54:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/ec7b4a254abbe4cde9fa15c5d2cca4518f6b07d0f1b77d4ee9655e30280e/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a", size = 150269, upload-time = "2026-03-06T02:53:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/2fabe8ebf148f4ee3c782aae86a795cc68ffe7d432ef550f234025ce0cfa/wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd", size = 59894, upload-time = "2026-03-06T02:54:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fb/9ba66fc2dedc936de5f8073c0217b5d4484e966d87723415cc8262c5d9c2/wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f", size = 63197, upload-time = "2026-03-06T02:54:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/012d7423c95d0e337117723eb8ecf73c622ce15a97847e84cf3f8f26cd7e/wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679", size = 60363, upload-time = "2026-03-06T02:54:48.093Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c7/8528ac2dfa2c1e6708f647df7ae144ead13f0a31146f43c7264b4942bf12/wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8", size = 43993, upload-time = "2026-03-06T02:53:12.905Z" }, ]