Major fixes and new features
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
247
venv/bin/Activate.ps1
Normal file
247
venv/bin/Activate.ps1
Normal file
@@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
70
venv/bin/activate
Normal file
70
venv/bin/activate
Normal file
@@ -0,0 +1,70 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath /home/trevor/dev/chat/venv)
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV=/home/trevor/dev/chat/venv
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(venv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(venv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
27
venv/bin/activate.csh
Normal file
27
venv/bin/activate.csh
Normal file
@@ -0,0 +1,27 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/trevor/dev/chat/venv
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(venv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
venv/bin/activate.fish
Normal file
69
venv/bin/activate.fish
Normal file
@@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/trevor/dev/chat/venv
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
||||
end
|
||||
8
venv/bin/alembic
Executable file
8
venv/bin/alembic
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from alembic.config import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/black
Executable file
8
venv/bin/black
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from black import patched_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(patched_main())
|
||||
8
venv/bin/blackd
Executable file
8
venv/bin/blackd
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from blackd import patched_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(patched_main())
|
||||
8
venv/bin/celery
Executable file
8
venv/bin/celery
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from celery.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/coverage
Executable file
8
venv/bin/coverage
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from coverage.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/coverage-3.12
Executable file
8
venv/bin/coverage-3.12
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from coverage.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/coverage3
Executable file
8
venv/bin/coverage3
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from coverage.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/dmypy
Executable file
8
venv/bin/dmypy
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mypy.dmypy.client import console_entry
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_entry())
|
||||
8
venv/bin/dotenv
Executable file
8
venv/bin/dotenv
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
venv/bin/email_validator
Executable file
8
venv/bin/email_validator
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from email_validator.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/flake8
Executable file
8
venv/bin/flake8
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flake8.main.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/httpx
Executable file
8
venv/bin/httpx
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from httpx import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/isort
Executable file
8
venv/bin/isort
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from isort.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/isort-identify-imports
Executable file
8
venv/bin/isort-identify-imports
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from isort.main import identify_imports_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(identify_imports_main())
|
||||
8
venv/bin/mako-render
Executable file
8
venv/bin/mako-render
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mako.cmd import cmdline
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cmdline())
|
||||
8
venv/bin/mypy
Executable file
8
venv/bin/mypy
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mypy.__main__ import console_entry
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_entry())
|
||||
8
venv/bin/mypyc
Executable file
8
venv/bin/mypyc
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mypyc.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip
Executable file
8
venv/bin/pip
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3
Executable file
8
venv/bin/pip3
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3.12
Executable file
8
venv/bin/pip3.12
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/py.test
Executable file
8
venv/bin/py.test
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pytest import console_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_main())
|
||||
8
venv/bin/pycodestyle
Executable file
8
venv/bin/pycodestyle
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pycodestyle import _main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(_main())
|
||||
8
venv/bin/pyflakes
Executable file
8
venv/bin/pyflakes
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pyflakes.api import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pyrsa-decrypt
Executable file
8
venv/bin/pyrsa-decrypt
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import decrypt
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(decrypt())
|
||||
8
venv/bin/pyrsa-encrypt
Executable file
8
venv/bin/pyrsa-encrypt
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import encrypt
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(encrypt())
|
||||
8
venv/bin/pyrsa-keygen
Executable file
8
venv/bin/pyrsa-keygen
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import keygen
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(keygen())
|
||||
8
venv/bin/pyrsa-priv2pub
Executable file
8
venv/bin/pyrsa-priv2pub
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.util import private_to_public
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(private_to_public())
|
||||
8
venv/bin/pyrsa-sign
Executable file
8
venv/bin/pyrsa-sign
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import sign
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(sign())
|
||||
8
venv/bin/pyrsa-verify
Executable file
8
venv/bin/pyrsa-verify
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import verify
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(verify())
|
||||
8
venv/bin/pytest
Executable file
8
venv/bin/pytest
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pytest import console_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_main())
|
||||
8
venv/bin/stubgen
Executable file
8
venv/bin/stubgen
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mypy.stubgen import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/stubtest
Executable file
8
venv/bin/stubtest
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mypy.stubtest import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/uvicorn
Executable file
8
venv/bin/uvicorn
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from uvicorn.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/watchfiles
Executable file
8
venv/bin/watchfiles
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from watchfiles.cli import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
venv/bin/websockets
Executable file
8
venv/bin/websockets
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/trevor/dev/chat/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from websockets.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
164
venv/include/site/python3.12/greenlet/greenlet.h
Normal file
164
venv/include/site/python3.12/greenlet/greenlet.h
Normal file
@@ -0,0 +1,164 @@
|
||||
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
|
||||
|
||||
/* Greenlet object interface */
|
||||
|
||||
#ifndef Py_GREENLETOBJECT_H
|
||||
#define Py_GREENLETOBJECT_H
|
||||
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* This is deprecated and undocumented. It does not change. */
|
||||
#define GREENLET_VERSION "1.0.0"
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
#define implementation_ptr_t void*
|
||||
#endif
|
||||
|
||||
typedef struct _greenlet {
|
||||
PyObject_HEAD
|
||||
PyObject* weakreflist;
|
||||
PyObject* dict;
|
||||
implementation_ptr_t pimpl;
|
||||
} PyGreenlet;
|
||||
|
||||
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
|
||||
|
||||
|
||||
/* C API functions */
|
||||
|
||||
/* Total number of symbols that are exported */
|
||||
#define PyGreenlet_API_pointers 12
|
||||
|
||||
#define PyGreenlet_Type_NUM 0
|
||||
#define PyExc_GreenletError_NUM 1
|
||||
#define PyExc_GreenletExit_NUM 2
|
||||
|
||||
#define PyGreenlet_New_NUM 3
|
||||
#define PyGreenlet_GetCurrent_NUM 4
|
||||
#define PyGreenlet_Throw_NUM 5
|
||||
#define PyGreenlet_Switch_NUM 6
|
||||
#define PyGreenlet_SetParent_NUM 7
|
||||
|
||||
#define PyGreenlet_MAIN_NUM 8
|
||||
#define PyGreenlet_STARTED_NUM 9
|
||||
#define PyGreenlet_ACTIVE_NUM 10
|
||||
#define PyGreenlet_GET_PARENT_NUM 11
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
/* This section is used by modules that uses the greenlet C API */
|
||||
static void** _PyGreenlet_API = NULL;
|
||||
|
||||
# define PyGreenlet_Type \
|
||||
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
|
||||
|
||||
# define PyExc_GreenletError \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
|
||||
|
||||
# define PyExc_GreenletExit \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_New(PyObject *args)
|
||||
*
|
||||
* greenlet.greenlet(run, parent=None)
|
||||
*/
|
||||
# define PyGreenlet_New \
|
||||
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
|
||||
_PyGreenlet_API[PyGreenlet_New_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetCurrent(void)
|
||||
*
|
||||
* greenlet.getcurrent()
|
||||
*/
|
||||
# define PyGreenlet_GetCurrent \
|
||||
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Throw(
|
||||
* PyGreenlet *greenlet,
|
||||
* PyObject *typ,
|
||||
* PyObject *val,
|
||||
* PyObject *tb)
|
||||
*
|
||||
* g.throw(...)
|
||||
*/
|
||||
# define PyGreenlet_Throw \
|
||||
(*(PyObject * (*)(PyGreenlet * self, \
|
||||
PyObject * typ, \
|
||||
PyObject * val, \
|
||||
PyObject * tb)) \
|
||||
_PyGreenlet_API[PyGreenlet_Throw_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
|
||||
*
|
||||
* g.switch(*args, **kwargs)
|
||||
*/
|
||||
# define PyGreenlet_Switch \
|
||||
(*(PyObject * \
|
||||
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
|
||||
_PyGreenlet_API[PyGreenlet_Switch_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
|
||||
*
|
||||
* g.parent = new_parent
|
||||
*/
|
||||
# define PyGreenlet_SetParent \
|
||||
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
|
||||
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetParent(PyObject* greenlet)
|
||||
*
|
||||
* return greenlet.parent;
|
||||
*
|
||||
* This could return NULL even if there is no exception active.
|
||||
* If it does not return NULL, you are responsible for decrementing the
|
||||
* reference count.
|
||||
*/
|
||||
# define PyGreenlet_GetParent \
|
||||
(*(PyGreenlet* (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
|
||||
|
||||
/*
|
||||
* deprecated, undocumented alias.
|
||||
*/
|
||||
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
|
||||
|
||||
# define PyGreenlet_MAIN \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
|
||||
|
||||
# define PyGreenlet_STARTED \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
|
||||
|
||||
# define PyGreenlet_ACTIVE \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
|
||||
|
||||
|
||||
|
||||
|
||||
/* Macro that imports greenlet and initializes C API */
|
||||
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
|
||||
keep the older definition to be sure older code that might have a copy of
|
||||
the header still works. */
|
||||
# define PyGreenlet_Import() \
|
||||
{ \
|
||||
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
|
||||
}
|
||||
|
||||
#endif /* GREENLET_MODULE */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_GREENLETOBJECT_H */
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,28 @@
|
||||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -0,0 +1,92 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: MarkupSafe
|
||||
Version: 3.0.2
|
||||
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
License: Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
||||
Project-URL: Source, https://github.com/pallets/markupsafe/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
License-File: LICENSE.txt
|
||||
|
||||
# MarkupSafe
|
||||
|
||||
MarkupSafe implements a text object that escapes characters so it is
|
||||
safe to use in HTML and XML. Characters that have special meanings are
|
||||
replaced so that they display as the actual characters. This mitigates
|
||||
injection attacks, meaning untrusted user input can safely be displayed
|
||||
on a page.
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
```pycon
|
||||
>>> from markupsafe import Markup, escape
|
||||
|
||||
>>> # escape replaces special characters and wraps in Markup
|
||||
>>> escape("<script>alert(document.cookie);</script>")
|
||||
Markup('<script>alert(document.cookie);</script>')
|
||||
|
||||
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||
>>> Markup("<strong>Hello</strong>")
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> escape(Markup("<strong>Hello</strong>"))
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> # Markup is a str subclass
|
||||
>>> # methods and operators escape their arguments
|
||||
>>> template = Markup("Hello <em>{name}</em>")
|
||||
>>> template.format(name='"World"')
|
||||
Markup('Hello <em>"World"</em>')
|
||||
```
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports MarkupSafe and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
[please donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
||||
@@ -0,0 +1,14 @@
|
||||
MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975
|
||||
MarkupSafe-3.0.2.dist-info/RECORD,,
|
||||
MarkupSafe-3.0.2.dist-info/WHEEL,sha256=OVgtqZzfzIXXtylXP90gxCZ6CKBCwKYyHM8PpMEjN1M,151
|
||||
MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
||||
markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214
|
||||
markupsafe/__pycache__/__init__.cpython-312.pyc,,
|
||||
markupsafe/__pycache__/_native.cpython-312.pyc,,
|
||||
markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210
|
||||
markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149
|
||||
markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so,sha256=t1DBZlpsjFA30BOOvXfXfT1wvO_4cS16VbHz1-49q5U,43432
|
||||
markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41
|
||||
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
@@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.2.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
markupsafe
|
||||
@@ -0,0 +1,7 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
``pyjwt`` is currently written and maintained by `Jose Padilla <https://github.com/jpadilla>`_.
|
||||
Originally written and maintained by `Jeff Lindsay <https://github.com/progrium>`_.
|
||||
|
||||
A full list of contributors can be found on GitHub’s `overview <https://github.com/jpadilla/pyjwt/graphs/contributors>`_.
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2022 José Padilla
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
107
venv/lib/python3.12/site-packages/PyJWT-2.8.0.dist-info/METADATA
Normal file
107
venv/lib/python3.12/site-packages/PyJWT-2.8.0.dist-info/METADATA
Normal file
@@ -0,0 +1,107 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyJWT
|
||||
Version: 2.8.0
|
||||
Summary: JSON Web Token implementation in Python
|
||||
Home-page: https://github.com/jpadilla/pyjwt
|
||||
Author: Jose Padilla
|
||||
Author-email: hello@jpadilla.com
|
||||
License: MIT
|
||||
Keywords: json,jwt,security,signing,token,web
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
License-File: AUTHORS.rst
|
||||
Requires-Dist: typing-extensions ; python_version <= "3.7"
|
||||
Provides-Extra: crypto
|
||||
Requires-Dist: cryptography (>=3.4.0) ; extra == 'crypto'
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'dev'
|
||||
Requires-Dist: sphinx-rtd-theme ; extra == 'dev'
|
||||
Requires-Dist: zope.interface ; extra == 'dev'
|
||||
Requires-Dist: cryptography (>=3.4.0) ; extra == 'dev'
|
||||
Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'dev'
|
||||
Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'dev'
|
||||
Requires-Dist: pre-commit ; extra == 'dev'
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx (<5.0.0,>=4.5.0) ; extra == 'docs'
|
||||
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
||||
Requires-Dist: zope.interface ; extra == 'docs'
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: pytest (<7.0.0,>=6.0.0) ; extra == 'tests'
|
||||
Requires-Dist: coverage[toml] (==5.0.4) ; extra == 'tests'
|
||||
|
||||
PyJWT
|
||||
=====
|
||||
|
||||
.. image:: https://github.com/jpadilla/pyjwt/workflows/CI/badge.svg
|
||||
:target: https://github.com/jpadilla/pyjwt/actions?query=workflow%3ACI
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pyjwt.svg
|
||||
:target: https://pypi.python.org/pypi/pyjwt
|
||||
|
||||
.. image:: https://codecov.io/gh/jpadilla/pyjwt/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/jpadilla/pyjwt
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=stable
|
||||
:target: https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Original implementation was written by `@progrium <https://github.com/progrium>`_.
|
||||
|
||||
Sponsor
|
||||
-------
|
||||
|
||||
+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/developers <https://auth0.com/developers?utm_source=GHsponsor&utm_medium=GHsponsor&utm_campaign=pyjwt&utm_content=auth>`_. |
|
||||
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
.. |auth0-logo| image:: https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install with **pip**:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install PyJWT
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> import jwt
|
||||
>>> encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256")
|
||||
>>> print(encoded)
|
||||
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg
|
||||
>>> jwt.decode(encoded, "secret", algorithms=["HS256"])
|
||||
{'some': 'payload'}
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
View the full docs online at https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
You can run tests from the project root after cloning with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox
|
||||
@@ -0,0 +1,33 @@
|
||||
PyJWT-2.8.0.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
|
||||
PyJWT-2.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyJWT-2.8.0.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
|
||||
PyJWT-2.8.0.dist-info/METADATA,sha256=pV2XZjvithGcVesLHWAv0J4T5t8Qc66fip2sbxwoz1o,4160
|
||||
PyJWT-2.8.0.dist-info/RECORD,,
|
||||
PyJWT-2.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
PyJWT-2.8.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
||||
PyJWT-2.8.0.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
|
||||
jwt/__init__.py,sha256=mV9lg6n4-0xiqCKaE1eEPC9a4j6sEkEYQcKghULE7kU,1670
|
||||
jwt/__pycache__/__init__.cpython-312.pyc,,
|
||||
jwt/__pycache__/algorithms.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jwk.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jws.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jwt.cpython-312.pyc,,
|
||||
jwt/__pycache__/exceptions.cpython-312.pyc,,
|
||||
jwt/__pycache__/help.cpython-312.pyc,,
|
||||
jwt/__pycache__/jwk_set_cache.cpython-312.pyc,,
|
||||
jwt/__pycache__/jwks_client.cpython-312.pyc,,
|
||||
jwt/__pycache__/types.cpython-312.pyc,,
|
||||
jwt/__pycache__/utils.cpython-312.pyc,,
|
||||
jwt/__pycache__/warnings.cpython-312.pyc,,
|
||||
jwt/algorithms.py,sha256=RDsv5Lm3bzwsiWT3TynT7JR41R6H6s_fWUGOIqd9x_I,29800
|
||||
jwt/api_jwk.py,sha256=HPxVqgBZm7RTaEXydciNBCuYNKDYOC_prTdaN9toGbo,4196
|
||||
jwt/api_jws.py,sha256=da17RrDe0PDccTbx3rx2lLezEG_c_YGw_vVHa335IOk,11099
|
||||
jwt/api_jwt.py,sha256=yF9DwF1kt3PA5n_TiU0OmHd0LtPHfe4JCE1XOfKPjw0,12638
|
||||
jwt/exceptions.py,sha256=KDC3M7cTrpR4OQXVURlVMThem0pfANSgBxRz-ttivmo,1046
|
||||
jwt/help.py,sha256=Jrp84fG43sCwmSIaDtY08I6ZR2VE7NhrTff89tYSE40,1749
|
||||
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
|
||||
jwt/jwks_client.py,sha256=9W8JVyGByQgoLbBN1u5iY1_jlgfnnukeOBTpqaM_9SE,4222
|
||||
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
|
||||
jwt/utils.py,sha256=PAI05_8MHQCxWQTDlwN0hTtTIT2DTTZ28mm1x6-26UY,3903
|
||||
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.40.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
jwt
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2017-2021 Ingy döt Net
|
||||
Copyright (c) 2006-2016 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -0,0 +1,46 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyYAML
|
||||
Version: 6.0.2
|
||||
Summary: YAML parser and emitter for Python
|
||||
Home-page: https://pyyaml.org/
|
||||
Download-URL: https://pypi.org/project/PyYAML/
|
||||
Author: Kirill Simonov
|
||||
Author-email: xi@resolvent.net
|
||||
License: MIT
|
||||
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
||||
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
||||
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
||||
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
||||
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
||||
Platform: Any
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Cython
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Text Processing :: Markup
|
||||
Requires-Python: >=3.8
|
||||
License-File: LICENSE
|
||||
|
||||
YAML is a data serialization format designed for human readability
|
||||
and interaction with scripting languages. PyYAML is a YAML parser
|
||||
and emitter for Python.
|
||||
|
||||
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
||||
support, capable extension API, and sensible error messages. PyYAML
|
||||
supports standard YAML tags and provides Python-specific tags that
|
||||
allow to represent an arbitrary Python object.
|
||||
|
||||
PyYAML is applicable for a broad range of tasks from complex
|
||||
configuration files to object serialization and persistence.
|
||||
@@ -0,0 +1,43 @@
|
||||
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
||||
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
|
||||
PyYAML-6.0.2.dist-info/RECORD,,
|
||||
PyYAML-6.0.2.dist-info/WHEEL,sha256=1pP4yhrbipRtdbm4Rbg3aoTjzc7pDhpHKO0CEY24CNM,152
|
||||
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
||||
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
||||
_yaml/__pycache__/__init__.cpython-312.pyc,,
|
||||
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
||||
yaml/__pycache__/__init__.cpython-312.pyc,,
|
||||
yaml/__pycache__/composer.cpython-312.pyc,,
|
||||
yaml/__pycache__/constructor.cpython-312.pyc,,
|
||||
yaml/__pycache__/cyaml.cpython-312.pyc,,
|
||||
yaml/__pycache__/dumper.cpython-312.pyc,,
|
||||
yaml/__pycache__/emitter.cpython-312.pyc,,
|
||||
yaml/__pycache__/error.cpython-312.pyc,,
|
||||
yaml/__pycache__/events.cpython-312.pyc,,
|
||||
yaml/__pycache__/loader.cpython-312.pyc,,
|
||||
yaml/__pycache__/nodes.cpython-312.pyc,,
|
||||
yaml/__pycache__/parser.cpython-312.pyc,,
|
||||
yaml/__pycache__/reader.cpython-312.pyc,,
|
||||
yaml/__pycache__/representer.cpython-312.pyc,,
|
||||
yaml/__pycache__/resolver.cpython-312.pyc,,
|
||||
yaml/__pycache__/scanner.cpython-312.pyc,,
|
||||
yaml/__pycache__/serializer.cpython-312.pyc,,
|
||||
yaml/__pycache__/tokens.cpython-312.pyc,,
|
||||
yaml/_yaml.cpython-312-x86_64-linux-gnu.so,sha256=PJFgxnc0f5Dyde6WKmBm6fZWapawmWl7aBRruXjRA80,2481784
|
||||
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
||||
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
||||
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
||||
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
||||
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
||||
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
||||
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
||||
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
||||
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
||||
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
||||
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
||||
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
||||
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
||||
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
||||
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
||||
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
||||
@@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.44.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
_yaml
|
||||
yaml
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,19 @@
|
||||
Copyright 2005-2023 SQLAlchemy authors and contributors <see AUTHORS file>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -0,0 +1,241 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: SQLAlchemy
|
||||
Version: 2.0.23
|
||||
Summary: Database Abstraction Library
|
||||
Home-page: https://www.sqlalchemy.org
|
||||
Author: Mike Bayer
|
||||
Author-email: mike_mp@zzzcomputing.com
|
||||
License: MIT
|
||||
Project-URL: Documentation, https://docs.sqlalchemy.org
|
||||
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Database :: Front-Ends
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Requires-Dist: typing-extensions >=4.2.0
|
||||
Requires-Dist: greenlet !=0.4.17 ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))
|
||||
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||
Provides-Extra: aiomysql
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
|
||||
Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
|
||||
Provides-Extra: aioodbc
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
|
||||
Requires-Dist: aioodbc ; extra == 'aioodbc'
|
||||
Provides-Extra: aiosqlite
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
|
||||
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
|
||||
Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
|
||||
Provides-Extra: asyncio
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
|
||||
Provides-Extra: asyncmy
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
|
||||
Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
|
||||
Provides-Extra: mariadb_connector
|
||||
Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
|
||||
Provides-Extra: mssql
|
||||
Requires-Dist: pyodbc ; extra == 'mssql'
|
||||
Provides-Extra: mssql_pymssql
|
||||
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
|
||||
Provides-Extra: mssql_pyodbc
|
||||
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
|
||||
Provides-Extra: mypy
|
||||
Requires-Dist: mypy >=0.910 ; extra == 'mypy'
|
||||
Provides-Extra: mysql
|
||||
Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
|
||||
Provides-Extra: mysql_connector
|
||||
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
|
||||
Provides-Extra: oracle
|
||||
Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
|
||||
Provides-Extra: oracle_oracledb
|
||||
Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
|
||||
Provides-Extra: postgresql
|
||||
Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
|
||||
Provides-Extra: postgresql_asyncpg
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
|
||||
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
|
||||
Provides-Extra: postgresql_pg8000
|
||||
Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
|
||||
Provides-Extra: postgresql_psycopg
|
||||
Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
|
||||
Provides-Extra: postgresql_psycopg2binary
|
||||
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
|
||||
Provides-Extra: postgresql_psycopg2cffi
|
||||
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
|
||||
Provides-Extra: postgresql_psycopgbinary
|
||||
Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
|
||||
Provides-Extra: pymysql
|
||||
Requires-Dist: pymysql ; extra == 'pymysql'
|
||||
Provides-Extra: sqlcipher
|
||||
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
|
||||
|
||||
SQLAlchemy
|
||||
==========
|
||||
|
||||
|PyPI| |Python| |Downloads|
|
||||
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
|
||||
:target: https://pypi.org/project/sqlalchemy
|
||||
:alt: PyPI
|
||||
|
||||
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
|
||||
:target: https://pypi.org/project/sqlalchemy
|
||||
:alt: PyPI - Python Version
|
||||
|
||||
.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month
|
||||
:target: https://pepy.tech/project/sqlalchemy
|
||||
:alt: PyPI - Downloads
|
||||
|
||||
|
||||
The Python SQL Toolkit and Object Relational Mapper
|
||||
|
||||
Introduction
|
||||
-------------
|
||||
|
||||
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
|
||||
that gives application developers the full power and
|
||||
flexibility of SQL. SQLAlchemy provides a full suite
|
||||
of well known enterprise-level persistence patterns,
|
||||
designed for efficient and high-performing database
|
||||
access, adapted into a simple and Pythonic domain
|
||||
language.
|
||||
|
||||
Major SQLAlchemy features include:
|
||||
|
||||
* An industrial strength ORM, built
|
||||
from the core on the identity map, unit of work,
|
||||
and data mapper patterns. These patterns
|
||||
allow transparent persistence of objects
|
||||
using a declarative configuration system.
|
||||
Domain models
|
||||
can be constructed and manipulated naturally,
|
||||
and changes are synchronized with the
|
||||
current transaction automatically.
|
||||
* A relationally-oriented query system, exposing
|
||||
the full range of SQL's capabilities
|
||||
explicitly, including joins, subqueries,
|
||||
correlation, and most everything else,
|
||||
in terms of the object model.
|
||||
Writing queries with the ORM uses the same
|
||||
techniques of relational composition you use
|
||||
when writing SQL. While you can drop into
|
||||
literal SQL at any time, it's virtually never
|
||||
needed.
|
||||
* A comprehensive and flexible system
|
||||
of eager loading for related collections and objects.
|
||||
Collections are cached within a session,
|
||||
and can be loaded on individual access, all
|
||||
at once using joins, or by query per collection
|
||||
across the full result set.
|
||||
* A Core SQL construction system and DBAPI
|
||||
interaction layer. The SQLAlchemy Core is
|
||||
separate from the ORM and is a full database
|
||||
abstraction layer in its own right, and includes
|
||||
an extensible Python-based SQL expression
|
||||
language, schema metadata, connection pooling,
|
||||
type coercion, and custom types.
|
||||
* All primary and foreign key constraints are
|
||||
assumed to be composite and natural. Surrogate
|
||||
integer primary keys are of course still the
|
||||
norm, but SQLAlchemy never assumes or hardcodes
|
||||
to this model.
|
||||
* Database introspection and generation. Database
|
||||
schemas can be "reflected" in one step into
|
||||
Python structures representing database metadata;
|
||||
those same structures can then generate
|
||||
CREATE statements right back out - all within
|
||||
the Core, independent of the ORM.
|
||||
|
||||
SQLAlchemy's philosophy:
|
||||
|
||||
* SQL databases behave less and less like object
|
||||
collections the more size and performance start to
|
||||
matter; object collections behave less and less like
|
||||
tables and rows the more abstraction starts to matter.
|
||||
SQLAlchemy aims to accommodate both of these
|
||||
principles.
|
||||
* An ORM doesn't need to hide the "R". A relational
|
||||
database provides rich, set-based functionality
|
||||
that should be fully exposed. SQLAlchemy's
|
||||
ORM provides an open-ended set of patterns
|
||||
that allow a developer to construct a custom
|
||||
mediation layer between a domain model and
|
||||
a relational schema, turning the so-called
|
||||
"object relational impedance" issue into
|
||||
a distant memory.
|
||||
* The developer, in all cases, makes all decisions
|
||||
regarding the design, structure, and naming conventions
|
||||
of both the object model as well as the relational
|
||||
schema. SQLAlchemy only provides the means
|
||||
to automate the execution of these decisions.
|
||||
* With SQLAlchemy, there's no such thing as
|
||||
"the ORM generated a bad query" - you
|
||||
retain full control over the structure of
|
||||
queries, including how joins are organized,
|
||||
how subqueries and correlation is used, what
|
||||
columns are requested. Everything SQLAlchemy
|
||||
does is ultimately the result of a developer-initiated
|
||||
decision.
|
||||
* Don't use an ORM if the problem doesn't need one.
|
||||
SQLAlchemy consists of a Core and separate ORM
|
||||
component. The Core offers a full SQL expression
|
||||
language that allows Pythonic construction
|
||||
of SQL constructs that render directly to SQL
|
||||
strings for a target database, returning
|
||||
result sets that are essentially enhanced DBAPI
|
||||
cursors.
|
||||
* Transactions should be the norm. With SQLAlchemy's
|
||||
ORM, nothing goes to permanent storage until
|
||||
commit() is called. SQLAlchemy encourages applications
|
||||
to create a consistent means of delineating
|
||||
the start and end of a series of operations.
|
||||
* Never render a literal value in a SQL statement.
|
||||
Bound parameters are used to the greatest degree
|
||||
possible, allowing query optimizers to cache
|
||||
query plans effectively and making SQL injection
|
||||
attacks a non-issue.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Latest documentation is at:
|
||||
|
||||
https://www.sqlalchemy.org/docs/
|
||||
|
||||
Installation / Requirements
|
||||
---------------------------
|
||||
|
||||
Full documentation for installation is at
|
||||
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
|
||||
|
||||
Getting Help / Development / Bug reporting
|
||||
------------------------------------------
|
||||
|
||||
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
|
||||
constructive communication between users and developers.
|
||||
Please see our current Code of Conduct at
|
||||
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
SQLAlchemy is distributed under the `MIT license
|
||||
<https://www.opensource.org/licenses/mit-license.php>`_.
|
||||
|
||||
@@ -0,0 +1,530 @@
|
||||
SQLAlchemy-2.0.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
SQLAlchemy-2.0.23.dist-info/LICENSE,sha256=2lSTeluT1aC-5eJXO8vhkzf93qCSeV_mFXLrv3tNdIU,1100
|
||||
SQLAlchemy-2.0.23.dist-info/METADATA,sha256=znDChLueFNPCOPuNix-FfY7FG6aQOCM-lQwwN-cPLQs,9551
|
||||
SQLAlchemy-2.0.23.dist-info/RECORD,,
|
||||
SQLAlchemy-2.0.23.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
SQLAlchemy-2.0.23.dist-info/WHEEL,sha256=JmQLNqDEfvnYMfsIaVeSP3fmUcYDwmF12m3QYW0c7QQ,152
|
||||
SQLAlchemy-2.0.23.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||
sqlalchemy/__init__.py,sha256=DjKCAltzrHGfaVdXVeFJpBmTaX6JmyloHANzewBUWo4,12708
|
||||
sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/log.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__init__.py,sha256=uKUYWQoXyleIyjWBuh7gzgnazJokx3DaasKJbFOfQGA,476
|
||||
sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/aioodbc.py,sha256=QiafuN9bx_wcIs8tByLftTmGAegXPoFPwUaxCDU_ZQA,5737
|
||||
sqlalchemy/connectors/asyncio.py,sha256=ZZmJSFT50u-GEjZzytQOdB_tkBFxi3XPWRrNhs_nASc,6139
|
||||
sqlalchemy/connectors/pyodbc.py,sha256=NskMydn26ZkHL8aQ1V3L4WIAWin3zwJ5VEnlHvAD1DE,8453
|
||||
sqlalchemy/cyextension/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=qPSMnyXVSLYHMr_ot_ZK7yEYadhTuT8ryb6eTMFFWrM,1947440
|
||||
sqlalchemy/cyextension/collections.pyx,sha256=KDI5QTOyYz9gDl-3d7MbGMA0Kc-wxpJqnLmCaUmQy2U,12323
|
||||
sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=J9m0gK6R8PGR36jxAKx415VxX0-0fqvbQAP9-DDU1qA,811232
|
||||
sqlalchemy/cyextension/immutabledict.pxd,sha256=oc8BbnQwDg7pWAdThB-fzu8s9_ViOe1Ds-8T0r0POjI,41
|
||||
sqlalchemy/cyextension/immutabledict.pyx,sha256=aQJPZKjcqbO8jHDqpC9F-v-ew2qAjUscc5CntaheZUk,3285
|
||||
sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=WOLcEWRNXn4UtJGhzF5B1h7JpPPfn-ziQMT0lkhobQE,533968
|
||||
sqlalchemy/cyextension/processors.pyx,sha256=0swFIBdR19x1kPRe-dijBaLW898AhH6QJizbv4ho9pk,1545
|
||||
sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=bte73oURZXuV7YvkjyGo-OjRCnSgYukqDp5KM9-Z8xY,626112
|
||||
sqlalchemy/cyextension/resultproxy.pyx,sha256=cDtMjLTdC47g7cME369NSOCck3JwG2jwZ6j25no3_gw,2477
|
||||
sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=8yMbb069NQN1b6yAsCBCMpbX94sH4iLs61vPNxd0bOg,958760
|
||||
sqlalchemy/cyextension/util.pyx,sha256=lv03p63oVn23jLhMI4_RYGewUnJfh-4FkrNMEFL7A3Y,2289
|
||||
sqlalchemy/dialects/__init__.py,sha256=hLsgIEomunlp4mNLnvjCQTLOnBVva8N7IT2-RYrN2_4,1770
|
||||
sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/_typing.py,sha256=P2ML2o4b_bWAAy3zbdoUjx3vXsMNwpiOblef8ThCxlM,648
|
||||
sqlalchemy/dialects/mssql/__init__.py,sha256=CYbbydyMSLjUq8vY1siNStd4lvjVXod8ddeDS6ELHLk,1871
|
||||
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/aioodbc.py,sha256=ncj3yyfvW91o3g19GB5s1I0oaZKUO_P-R2nwnLF0t9E,2013
|
||||
sqlalchemy/dialects/mssql/base.py,sha256=l9vX6fK6DJEYA00N9uDnvSbqfgvxXfYUn2C4AF5T920,133649
|
||||
sqlalchemy/dialects/mssql/information_schema.py,sha256=ll0zAupJ4cPvhi9v5hTi7PQLU1lae4o6eQ5Vg7gykXQ,8074
|
||||
sqlalchemy/dialects/mssql/json.py,sha256=B0m6H08CKuk-yomDHcCwfQbVuVN2WLufuVueA_qb1NQ,4573
|
||||
sqlalchemy/dialects/mssql/provision.py,sha256=x7XRSQDxz4jz2uIpqwhuIXpL9bic0Vw7Mhy39HOkyqY,5013
|
||||
sqlalchemy/dialects/mssql/pymssql.py,sha256=BfJp9t-IQabqWXySJBmP9pwNTWnJqbjA2jJM9M4XeWc,4029
|
||||
sqlalchemy/dialects/mssql/pyodbc.py,sha256=qwZ8ByOTZ1WObjxeOravoJBSBX-s4RJ_PZ5VJ_Ch5Ws,27048
|
||||
sqlalchemy/dialects/mysql/__init__.py,sha256=btLABiNnmbWt9ziW-XgVWEB1qHWQcSFz7zxZNw4m_LY,2144
|
||||
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/aiomysql.py,sha256=Zb-_F9Pzl0t-fT1bZwbNNne6jjCUqBXxeizbhMFPqls,9750
|
||||
sqlalchemy/dialects/mysql/asyncmy.py,sha256=zqupDz7AJihjv3E8w_4XAtq95d8stdrETNx60MLNVr0,9819
|
||||
sqlalchemy/dialects/mysql/base.py,sha256=q-DzkR_txwDTeWTEByzHAoIArYU3Bb5HT2Bnmuw7WIM,120688
|
||||
sqlalchemy/dialects/mysql/cymysql.py,sha256=5CQVJAlqQ3pT4IDGSQJH2hCzj-EWjUitA21MLqJwEEs,2291
|
||||
sqlalchemy/dialects/mysql/dml.py,sha256=qw0ZweHbMsbNyVSfC17HqylCnf7XAuIjtgofiWABT8k,7636
|
||||
sqlalchemy/dialects/mysql/enumerated.py,sha256=1L2J2wT6nQEmRS4z-jzZpoi44IqIaHgBRZZB9m55czo,8439
|
||||
sqlalchemy/dialects/mysql/expression.py,sha256=WW5G2XPwqJfXjuzHBt4BRP0pCLcPJkPD1mvZX1g0JL0,4066
|
||||
sqlalchemy/dialects/mysql/json.py,sha256=JlSFBAHhJ9JmV-3azH80xkLgeh7g6A6DVyNVCNZiKPU,2260
|
||||
sqlalchemy/dialects/mysql/mariadb.py,sha256=Sugyngvo6j6SfFFuJ23rYeFWEPdZ9Ji9guElsk_1WSQ,844
|
||||
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=F1VPosecC1hDZqjzZI29j4GUduyU4ewPwb-ekBQva5w,8725
|
||||
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=5glmkPhD_KP-Mci8ZXBr4yzqH1MDfzCJ9F_kZNyXcGo,5666
|
||||
sqlalchemy/dialects/mysql/mysqldb.py,sha256=R5BDiXiHX5oFuAOzyxZ6TYUTGzly-dulMeQLkeia6kk,9649
|
||||
sqlalchemy/dialects/mysql/provision.py,sha256=uPT6-BIoP_12XLmWAza1TDFNhOVVJ3rmQoMH7nvh-Vg,3226
|
||||
sqlalchemy/dialects/mysql/pymysql.py,sha256=d2-00IPoyEDkR9REQTE-DGEQrGshUq_0G5liZ5FiSEM,4032
|
||||
sqlalchemy/dialects/mysql/pyodbc.py,sha256=mkOvumrxpmAi6noZlkaTVKz2F7G5vLh2vx0cZSn9VTA,4288
|
||||
sqlalchemy/dialects/mysql/reflection.py,sha256=ak6E-eCP9346ixnILYNJcrRYblWbIT0sjXf4EqmfBsY,22556
|
||||
sqlalchemy/dialects/mysql/reserved_words.py,sha256=DsPHsW3vwOrvU7bv3Nbfact2Z_jyZ9xUTT-mdeQvqxo,9145
|
||||
sqlalchemy/dialects/mysql/types.py,sha256=i8DpRkOL1QhPErZ25AmCQOuFLciWhdjNL3I0CeHEhdY,24258
|
||||
sqlalchemy/dialects/oracle/__init__.py,sha256=pjk1aWi9XFCAHWNSJzSzmoIcL32-AkU_1J9IV4PtwpA,1318
|
||||
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/base.py,sha256=u55_R9NrCRijud7ioHMxT-r0MSW0gMFjOwbrDdPgFsc,118036
|
||||
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=L0GvcB6xb0-zyv5dx3bpQCeptp0KSqH6g9FUQ4y-d-g,55108
|
||||
sqlalchemy/dialects/oracle/dictionary.py,sha256=iUoyFEFM8z0sfVWR2n_nnre14kaQkV_syKO0R5Dos4M,19487
|
||||
sqlalchemy/dialects/oracle/oracledb.py,sha256=_-fUQ94xai80B7v9WLVGoGDIv8u54nVspBdyGEyI76g,3457
|
||||
sqlalchemy/dialects/oracle/provision.py,sha256=5cvIc3yTWxz4AIRYxcesbRJ1Ft-zT9GauQ911yPnN2o,8055
|
||||
sqlalchemy/dialects/oracle/types.py,sha256=TeOhUW5W9qZC8SaJ-9b3u6OvOPOarNq4MmCQ7l3wWX0,8204
|
||||
sqlalchemy/dialects/postgresql/__init__.py,sha256=bZEPsLbRtB7s6TMQAHCIzKBgkxUa3eDXvCkeARua37E,3734
|
||||
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=U3aWzbKD3VOj6Z6r-4IsIQmtjGGIB4RDZH6NXfd8Xz0,5655
|
||||
sqlalchemy/dialects/postgresql/array.py,sha256=tLyU9GDAeIypNhjTuFQUYbaTeijVM1VVJS6UdzzXXn4,13682
|
||||
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=XNaoOZ5Da4-jUTaES1zEOTEW3WG8UKyVCoIS3LsFhzE,39967
|
||||
sqlalchemy/dialects/postgresql/base.py,sha256=DGhaquFJWDQL7wIvQ2EE57LxD7zGR06BKQxvNZHFLgY,175634
|
||||
sqlalchemy/dialects/postgresql/dml.py,sha256=_He69efdpDA5gGmBsE7Lo4ViSi3QnR38BiFmrR1tw6k,11203
|
||||
sqlalchemy/dialects/postgresql/ext.py,sha256=oPP22Pq-n2lMmQ8ahifYmsmzRhSiSv1RV-xrTT0gycw,16253
|
||||
sqlalchemy/dialects/postgresql/hstore.py,sha256=q5x0npbAMI8cdRFGTMwLoWFj9P1G9DUkw5OEUCfTXpI,11532
|
||||
sqlalchemy/dialects/postgresql/json.py,sha256=panGtnEbcirQDy4yR2huWydFqa_Kmv8xhpLyf-SSRWE,11203
|
||||
sqlalchemy/dialects/postgresql/named_types.py,sha256=zNoHsP3nVq5xxA7SOQ6LLDwYZEHFciZ-nDjw_I9f_G0,17092
|
||||
sqlalchemy/dialects/postgresql/operators.py,sha256=MB40xq1124OnhUzkvtbnTmxEiey0VxMOYyznF96wwhI,2799
|
||||
sqlalchemy/dialects/postgresql/pg8000.py,sha256=w6pJ3LaIKWmnwvB0Pr1aTJX5OKNtG5RNClVfkE019vU,18620
|
||||
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=0lLnIgxfCrqkx_LNijMxo0trNLsodcd8KwretZIj4uM,8875
|
||||
sqlalchemy/dialects/postgresql/provision.py,sha256=oxyAzs8_PhuK0ChivXC3l2Nldih3_HKffvGsZqD8XWI,5509
|
||||
sqlalchemy/dialects/postgresql/psycopg.py,sha256=YMubzQHMYN1By8QJScIPb_PwNiACv6srddQ6nX6WltQ,22238
|
||||
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=3Xci4bTA2BvhrZAQa727uFWdaXEZmvfD-Z-upE3NyQE,31592
|
||||
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=2EOuDwBetfvelcPoTzSwOHe6X8lTwaYH7znNzXJt9eM,1739
|
||||
sqlalchemy/dialects/postgresql/ranges.py,sha256=yHB1BRlUreQPZB3VEn0KMMLf02zjf5jjYdmg4N4S2Sw,30220
|
||||
sqlalchemy/dialects/postgresql/types.py,sha256=l24rs8_nK4vqLyQC0aUkf4S7ecw6T_7Pgq50Icc5CBs,7292
|
||||
sqlalchemy/dialects/sqlite/__init__.py,sha256=wnZ9vtfm0QXmth1jiGiubFgRiKxIoQoNthb1bp4FhCs,1173
|
||||
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=GZJioZLot0D5CQ6ovPQoqv2iV8FAFm3G75lEFCzopoE,12296
|
||||
sqlalchemy/dialects/sqlite/base.py,sha256=YYEB5BeuemLC3FAR7EB8vA0zoUOwHTKoF_srvnAStps,96785
|
||||
sqlalchemy/dialects/sqlite/dml.py,sha256=PYESBj8Ip7bGs_Fi7QjbWLXLnU9a-SbP96JZiUoZNHg,8434
|
||||
sqlalchemy/dialects/sqlite/json.py,sha256=XFPwSdNx0DxDfxDZn7rmGGqsAgL4vpJbjjGaA73WruQ,2533
|
||||
sqlalchemy/dialects/sqlite/provision.py,sha256=O4JDoybdb2RBblXErEVPE2P_5xHab927BQItJa203zU,5383
|
||||
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=_JuOCoic--ehAGkCgnwUUKKTs6xYoBGag4Y_WkQUDwU,5347
|
||||
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=xBg6DKqvml5cCGxVSAQxR1dcMvso8q4uyXs2m4WLzz0,27891
|
||||
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
|
||||
sqlalchemy/engine/__init__.py,sha256=fJCAl5P7JH9iwjuWo72_3LOIzWWhTnvXqzpAmm_T0fY,2818
|
||||
sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/engine/_py_processors.py,sha256=RSVKm9YppSBDSCEi8xvbZdRCP9EsCYfbyEg9iDCMCiI,3744
|
||||
sqlalchemy/engine/_py_row.py,sha256=Zdta0JGa7V2aV04L7nzXUEp-H1gpresKyBlneQu60pk,3549
|
||||
sqlalchemy/engine/_py_util.py,sha256=5m3MZbEqnUwP5kK_ghisFpzcXgBwSxTSkBEFB6afiD8,2245
|
||||
sqlalchemy/engine/base.py,sha256=RbIfWZ1Otyb4VzMYjDpK5BiDIE8QZwa4vQgRX0yCa28,122246
|
||||
sqlalchemy/engine/characteristics.py,sha256=YvMgrUVAt3wsSiQ0K8l44yBjFlMK3MGajxhg50t5yFM,2344
|
||||
sqlalchemy/engine/create.py,sha256=8372TLpy4FOAIZ9WmuNkx1v9DPgwpoCAH9P7LNXZCwY,32629
|
||||
sqlalchemy/engine/cursor.py,sha256=6e1Tp63r0Kt-P4pEaYR7wUew2aClTdKAEI-FoAAxJxE,74405
|
||||
sqlalchemy/engine/default.py,sha256=bi--ytxYJ0EtsCudl38owGtytnwTHX-PjlsYTFe8LpA,84065
|
||||
sqlalchemy/engine/events.py,sha256=PQyc_sbmqks6pqyN7xitO658KdKzzJWfW1TKYwEd5vo,37392
|
||||
sqlalchemy/engine/interfaces.py,sha256=pAFYR15f1Z_-qdzTYI4mAm8IYbD6maLBKbG3pBaJ8Us,112824
|
||||
sqlalchemy/engine/mock.py,sha256=ki4ud7YrUrzP2katdkxlJGFUKB2kS7cZZAHK5xWsNF8,4179
|
||||
sqlalchemy/engine/processors.py,sha256=ENN6XwndxJPW-aXPu_3NzAZsy5SvNznHoa1Qn29ERAw,2383
|
||||
sqlalchemy/engine/reflection.py,sha256=2aakNheQJNMUXZbhY8s1NtqGoGWTxM2THkJlMMfiX_s,75125
|
||||
sqlalchemy/engine/result.py,sha256=shRAsboHPTvKR38ryGgC4KLcUeVTbABSlWzAfOUKVZs,77841
|
||||
sqlalchemy/engine/row.py,sha256=doiXKaUI6s6OkfqPIwNyTPLllxJfR8HYgEI8ve9VYe0,11955
|
||||
sqlalchemy/engine/strategies.py,sha256=HjCj_FHQOgkkhhtnVmcOEuHI_cftNo3P0hN5zkhZvDc,442
|
||||
sqlalchemy/engine/url.py,sha256=_WNE7ia0JIPRc1PLY_jSA3F7bB5kp1gzuzkc5eoKviA,30694
|
||||
sqlalchemy/engine/util.py,sha256=3-ENI9S-3KLWr0GW27uWQfsvCJwMBGTKbykkKPUgiAE,5667
|
||||
sqlalchemy/event/__init__.py,sha256=CSBMp0yu5joTC6tWvx40B4p87N7oGKxC-ZLx2ULKUnQ,997
|
||||
sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
|
||||
sqlalchemy/event/api.py,sha256=nQAvPK1jrLpmu8aKCUtc-vYWcIuG-1FgAtp3GRkfIiI,8227
|
||||
sqlalchemy/event/attr.py,sha256=NMe_sPQTju2PE-f68C8TcKJGW-Gxyi1CLXumAmE368Y,20438
|
||||
sqlalchemy/event/base.py,sha256=Cr_PNJlCYJSU3rtT8DkplyjBRb-E2Wa3OAeK9woFJkk,14980
|
||||
sqlalchemy/event/legacy.py,sha256=OpPqE64xk1OYjLW1scvc6iijhoa5GZJt5f7-beWhgOc,8211
|
||||
sqlalchemy/event/registry.py,sha256=Zig9q2Galo8kO2aqr7a2rNAhmIkdJ-ntHSEcM5MfSgw,10833
|
||||
sqlalchemy/events.py,sha256=pRcPKKsPQHGPH_pvTtKRmzuEIy-QHCtkUiZl4MUbxKs,536
|
||||
sqlalchemy/exc.py,sha256=4SMKOJtz7_SWt5vskCSeXSi4ZlFyL4jh53Q8sk4-ODQ,24011
|
||||
sqlalchemy/ext/__init__.py,sha256=w4h7EpXjKPr0LD4yHa0pDCfrvleU3rrX7mgyb8RuDYQ,322
|
||||
sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
|
||||
sqlalchemy/ext/associationproxy.py,sha256=5voNXWIJYGt6c8mwuSA6alm3SmEHOZ-CVK8ikgfzk8s,65960
|
||||
sqlalchemy/ext/asyncio/__init__.py,sha256=iG_0TmBO1pCB316WS-p17AImwqRtUoaKo7UphYZ7bYw,1317
|
||||
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/base.py,sha256=PXF4YqfRi2-mADAtaL2_-Uv7CzoBVojPbzyA5phJ9To,8959
|
||||
sqlalchemy/ext/asyncio/engine.py,sha256=h4pe3ixuX6YfI97B5QWo2V4_CCCnOvM_EHPZhX19Mgc,47796
|
||||
sqlalchemy/ext/asyncio/exc.py,sha256=1hCdOKzvSryc_YE4jgj0l9JASOmZXutdzShEYPiLbGI,639
|
||||
sqlalchemy/ext/asyncio/result.py,sha256=zETerVB53gql1DL6tkO_JiqeU-m1OM-8kX0ULxmoL_I,30554
|
||||
sqlalchemy/ext/asyncio/scoping.py,sha256=cBNluB7n_lwdAAo6pySbvNRqPN7UBzwQHZ6XhRDyWgA,52685
|
||||
sqlalchemy/ext/asyncio/session.py,sha256=yWwhI5i_yVWjykxmxkcP3-xmw3UpoGYNhHZL8sYXQMA,62998
|
||||
sqlalchemy/ext/automap.py,sha256=7p13-VpN0MOM525r7pmEnftedya9l5G-Ei_cFXZfpTc,61431
|
||||
sqlalchemy/ext/baked.py,sha256=R8ZAxiVN6eH50AJu0O3TtFXNE1tnRkMlSj3AvkcWFhY,17818
|
||||
sqlalchemy/ext/compiler.py,sha256=h7eR0NcPJ4F_k8YGRP3R9YX75Y9pgiVxoCjRyvceF7g,20391
|
||||
sqlalchemy/ext/declarative/__init__.py,sha256=VJu8S1efxil20W48fJlpDn6gHorOudn5p3-lF72WcJ8,1818
|
||||
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
|
||||
sqlalchemy/ext/declarative/extensions.py,sha256=vwZjudPFA_mao1U04-RZCaU_tvPMBgQa5OTmSI7K7SU,19547
|
||||
sqlalchemy/ext/horizontal_shard.py,sha256=eh14W8QWHYH22PL1l5qF_ad9Fyh1WAFjKi_vNfsme94,16766
|
||||
sqlalchemy/ext/hybrid.py,sha256=98D72WBmlileYBtEKMSNF9l-bwRavThSV8-LyB2gjo0,52499
|
||||
sqlalchemy/ext/indexable.py,sha256=RkG9BKwil-TqDjVBM14ML9c-geUrHxtRKpYkSJEwGHA,11028
|
||||
sqlalchemy/ext/instrumentation.py,sha256=rjjSbTGilYeGLdyEWV932TfTaGxiVP44_RajinANk54,15723
|
||||
sqlalchemy/ext/mutable.py,sha256=d3Pp8PcAVN4pHN9rhc1ReXBWe0Q70Q5S1klFoYGyDPA,37393
|
||||
sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/apply.py,sha256=uUES4grydYtKykLKlxzJeBXeGe8kfWou9_rzEyEkfp0,10503
|
||||
sqlalchemy/ext/mypy/decl_class.py,sha256=Ls2Efh4kEhle6Z4VMz0GRBgGQTYs2fHr5b4DfuDj44c,17377
|
||||
sqlalchemy/ext/mypy/infer.py,sha256=si720RW6iGxMRZNP5tcaIxA1_ehFp215TzxVXaLjglU,19364
|
||||
sqlalchemy/ext/mypy/names.py,sha256=tch4f5fDmdv4AWWFzXgGZdCpxmae59XRPT02KyMvrEI,10625
|
||||
sqlalchemy/ext/mypy/plugin.py,sha256=fLXDukvZqbJ0JJCOoyZAuOniYZ_F1YT-l9gKppu8SEs,9750
|
||||
sqlalchemy/ext/mypy/util.py,sha256=TlEQq4bcs8ARLL3PoFS8Qw6oYFeMqcGnWTeJ7NsPPFk,9408
|
||||
sqlalchemy/ext/orderinglist.py,sha256=8Vcg7UUkLg-QbYAbLVDSqu-5REkR6L-FLLhCYsHYxCQ,14384
|
||||
sqlalchemy/ext/serializer.py,sha256=ox6dbMOBmFR0H2RQFt17mcYBOGKgn1cNVFfqY8-jpgQ,6178
|
||||
sqlalchemy/future/__init__.py,sha256=79DZx3v7TQZpkS_qThlmuCOm1a9UK2ObNZhyMmjfNB0,516
|
||||
sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
|
||||
sqlalchemy/future/engine.py,sha256=6uOpOedIqiT1-3qJSJIlv9_raMJU8NTkhQwN_Ngg8kI,499
|
||||
sqlalchemy/inspection.py,sha256=i3aR-IV101YU8D9TA8Pxb2wi08QZuJ34sMy6L5M__rY,5145
|
||||
sqlalchemy/log.py,sha256=aSlZ8DFHkOuI-AMmaOUUYtS9zGPadi_7tAo98QpUOiY,8634
|
||||
sqlalchemy/orm/__init__.py,sha256=cBn0aPWyDFY4ya-cHRshQBcuThk1smTUCTrlp6LHdlE,8463
|
||||
sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
|
||||
sqlalchemy/orm/_orm_constructors.py,sha256=_7_GY6qw2sA-GG_WXLz1GOO-0qC-SCBeA43GhVuS2Qw,99803
|
||||
sqlalchemy/orm/_typing.py,sha256=oRUJVAGpU3_DhSkIb1anXgneweVIARjB51HlPhMNfcM,5015
|
||||
sqlalchemy/orm/attributes.py,sha256=NFhYheqqu2VcXmKTdcvQKiRR_6qo0rHLK7nda7rpviA,92578
|
||||
sqlalchemy/orm/base.py,sha256=iZXsygk4fn8wd7wx1iXn_PfnGDY7d41YRfS0mC_q5vE,27700
|
||||
sqlalchemy/orm/bulk_persistence.py,sha256=S9VK5a6GSqnw3z7O5UG5OOnc9WxzmS_ooDkA5JmCIsY,69878
|
||||
sqlalchemy/orm/clsregistry.py,sha256=4J-kKshmLOEyx3VBqREm2k_XY0cer4zwUoHJT3n5Xmw,17949
|
||||
sqlalchemy/orm/collections.py,sha256=0AZFr9us9MiHo_Xcyi7DUsN02jSBERUOd-jIK8qQ1DA,52159
|
||||
sqlalchemy/orm/context.py,sha256=VyJl1ZJ5OnJUACKlM-bPLyyoqu4tyaKKdxeC-QF4EuU,111698
|
||||
sqlalchemy/orm/decl_api.py,sha256=a2Cyvjh6j5BlXJQ2i0jpQx7xkeI_6xo5MMxr0d2ndQY,63589
|
||||
sqlalchemy/orm/decl_base.py,sha256=g9xW9G-n9iStMI0i3i-9Rt4LDRW8--3iCCRPlWF6Cko,81660
|
||||
sqlalchemy/orm/dependency.py,sha256=g3R_1H_OGzagXFeen3Irm3c1lO3yeXGdGa0muUZgZAk,47583
|
||||
sqlalchemy/orm/descriptor_props.py,sha256=SdrfVu05zhWLGe_DnBlgbU6e5sWkkfBTirH9Nrr1MLk,37176
|
||||
sqlalchemy/orm/dynamic.py,sha256=pYlMIrpp80Ex4KByqdyhx0x0kIrl_cIADwkeVxvYu4s,9798
|
||||
sqlalchemy/orm/evaluator.py,sha256=jPjVrP7XbVOG6aXTCBREq0rF3oNHLqB4XAT-gt_cpaA,11925
|
||||
sqlalchemy/orm/events.py,sha256=fGnUHwDTV9FTiifB2mmIJispwPbIT4mZongRJD7uiw4,127258
|
||||
sqlalchemy/orm/exc.py,sha256=A3wvZVs5sC5XCef4LoTUBG-UfhmliFpU9rYMdS2t_To,7356
|
||||
sqlalchemy/orm/identity.py,sha256=gRiuQSrurHGEAJXH9QGYioXL49Im5EGcYQ-IKUEpHmQ,9249
|
||||
sqlalchemy/orm/instrumentation.py,sha256=o1mTv5gCgl9d-SRvEXXjl8rzl8uBasRL3bpDgWg9P58,24337
|
||||
sqlalchemy/orm/interfaces.py,sha256=RW7bBXGWtZHY2wXFOSqtvYm6UDl7yHZUyRX_6Yd3GfQ,48395
|
||||
sqlalchemy/orm/loading.py,sha256=F1ZEHTPBglmznST2nGj_0ARccoFgTyaOOwjcqpYeuvM,57366
|
||||
sqlalchemy/orm/mapped_collection.py,sha256=ZgYHaF37yo6-gZ7Da1Gg25rMgG2GynAy-RJoDhljV5g,19698
|
||||
sqlalchemy/orm/mapper.py,sha256=kyq4pBkTvvEqlW4H4XK_ktP1sOiALNAycgvF5f-xtqw,170969
|
||||
sqlalchemy/orm/path_registry.py,sha256=olyutgn0uNB7Wi32YNQx9ZHV6sUgV3TbyGplfSxfZ6g,25938
|
||||
sqlalchemy/orm/persistence.py,sha256=qr1jUgo-NZ0tLa5eIis2271QDt4KNJwYlYU_9CaKNhQ,60545
|
||||
sqlalchemy/orm/properties.py,sha256=dt1Gy06pbRY6zgm4QGR9nU6z2WCyoTZWBJYKpUhLq_c,29095
|
||||
sqlalchemy/orm/query.py,sha256=VBSD0k15xU_XykggvLGAwGdwNglBAoBKbOk8qAoMKdI,117714
|
||||
sqlalchemy/orm/relationships.py,sha256=wrHyICb8A5qPoyxf-nITQVJ13kCNr2MedDqEY8QMSt8,127816
|
||||
sqlalchemy/orm/scoping.py,sha256=75iPEWDFhPcIXgl8EUd_sPTCL6punfegEaTRE5mP3e8,78835
|
||||
sqlalchemy/orm/session.py,sha256=TeBcZNdY4HWQFdXNCIqbsQTtkvfJkBweMzvA9p3BiPA,193279
|
||||
sqlalchemy/orm/state.py,sha256=EaWkVNWHaDeJ_FZGXHakSamUk51BXmtMWLGdFhlJmh8,37536
|
||||
sqlalchemy/orm/state_changes.py,sha256=pqkjSDOR6H5BufMKdzFUIatDp3DY90SovOJiJ1k6Ayw,6815
|
||||
sqlalchemy/orm/strategies.py,sha256=V0o-1kB1IVTxhOGqGtRyjddZqAbPdsl_h-k0N3MKCGo,114052
|
||||
sqlalchemy/orm/strategy_options.py,sha256=EmgH28uMQhwwBCDVcXmywLk_Q8AbpnK02seMsMV4nmc,84102
|
||||
sqlalchemy/orm/sync.py,sha256=5Nt_OqP4IfhAtHwFRar4dw-YjLENRLvp4d3jDC4wpnw,5749
|
||||
sqlalchemy/orm/unitofwork.py,sha256=Wk5YZocBbxe4m1wU2aFQ7gY1Cp5CROi13kDEM1iOSz4,27033
|
||||
sqlalchemy/orm/util.py,sha256=7hCRYbQjqhWJTkrPf_NXY9zF_18VWTpyguu-nfYfc6c,80340
|
||||
sqlalchemy/orm/writeonly.py,sha256=WCPXCAwHqVCfhVWXQEFCP3OocIiHgqNJ5KnuJwSgGq4,22329
|
||||
sqlalchemy/pool/__init__.py,sha256=CIv4b6ctueY7w3sML_LxyLKAdl59esYOhz3O7W5w7WE,1815
|
||||
sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
|
||||
sqlalchemy/pool/base.py,sha256=wuwKIak5d_4-TqKI2RFN8OYMEyOvV0djnoSVR8gbxAQ,52249
|
||||
sqlalchemy/pool/events.py,sha256=IcWfORKbHM69Z9FdPJlXI7-NIhQrR9O_lg59tiUdTRU,13148
|
||||
sqlalchemy/pool/impl.py,sha256=vU0n82a7uxdE34p3hU7cvUDA5QDy9MkIv1COT4kYFP8,17724
|
||||
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/schema.py,sha256=mt74CGCBtfv_qI1_6zzNFMexYGyWDj2Jkh-XdH4kEWI,3194
|
||||
sqlalchemy/sql/__init__.py,sha256=jAQx9rwhyPhoSjntM1BZSElJiMRmLowGThJVDGvExSU,5820
|
||||
sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/_dml_constructors.py,sha256=hoNyINY3FNi1ZQajR6lbcRN7oYsNghM1wuzzVWxIv3c,3867
|
||||
sqlalchemy/sql/_elements_constructors.py,sha256=-qksx59Gqhmzxo1xByPtZZboNvL8uYcCN14pjHYHxL8,62914
|
||||
sqlalchemy/sql/_orm_types.py,sha256=_vR3_HQYgZR_of6_ZpTQByie2gaVScxQjVAVWAP3Ztg,620
|
||||
sqlalchemy/sql/_py_util.py,sha256=iiwgX3dQhOjdB5-10jtgHPIdibUqGk49bC1qdZMBpYI,2173
|
||||
sqlalchemy/sql/_selectable_constructors.py,sha256=RDqgejqiUuU12Be1jBpMIx_YdJho8fhKfnMoJLPFTFE,18812
|
||||
sqlalchemy/sql/_typing.py,sha256=C8kNZQ3TIpM-Q12Of3tTaESB1UxIfRME_lXouqgwMT8,12252
|
||||
sqlalchemy/sql/annotation.py,sha256=pTNidcQatCar6H1I9YAoPP1e6sOewaJ15B7_-7ykZOE,18271
|
||||
sqlalchemy/sql/base.py,sha256=dVvZoPoa3pb6iuwTU4QoCvVWQPyHZthaekl5J2zV_SU,73928
|
||||
sqlalchemy/sql/cache_key.py,sha256=Dl163qHjTkMCa5LTipZud8X3w0d8DvdIvGvv4AqriHE,32823
|
||||
sqlalchemy/sql/coercions.py,sha256=ju8xEi7b9G_GzxaQ6Nwu0cFIWFZ--ottIVfdiuhHY7Y,40553
|
||||
sqlalchemy/sql/compiler.py,sha256=9Wx423H72Yq7NHR8cmMAH6GpMCJmghs1L85YJqs_Lng,268763
|
||||
sqlalchemy/sql/crud.py,sha256=nyAPlmvuyWxMqSBdWPffC5P3CGXTQKK0bJoDbNgB3iQ,56457
|
||||
sqlalchemy/sql/ddl.py,sha256=XuUhulJLvvPjU4nYD6N42QLg8rEgquD6Jwn_yIHZejk,45542
|
||||
sqlalchemy/sql/default_comparator.py,sha256=SE0OaK1BlY0RinQ21ZXJOUGkO00oGv6GMMmAH-4iNTQ,16663
|
||||
sqlalchemy/sql/dml.py,sha256=eftbzdFJgMk7NV0BHKfK4dQ2R7XsyyJn6fCgYFJ0KNQ,65728
|
||||
sqlalchemy/sql/elements.py,sha256=dsNa2K57RygsGoaWuTMPp2QQ6SU3uZXSMW6CLGBbcIY,171208
|
||||
sqlalchemy/sql/events.py,sha256=xe3vJ6pQJau3dJWBAY0zU7Lz52UKuMrpLycriLm3AWA,18301
|
||||
sqlalchemy/sql/expression.py,sha256=baMnCH04jeE8E3tA2TovXlsREocA2j3fdHKnzOB8H4U,7586
|
||||
sqlalchemy/sql/functions.py,sha256=AcI_KstJxeLw6rEXx6QnIgR2rq4Ru6RXMbq4EIIUURA,55319
|
||||
sqlalchemy/sql/lambdas.py,sha256=EfDdUBi5cSmkjz8pQCSRo858UWQCFNZxXkM-1qS0CgU,49281
|
||||
sqlalchemy/sql/naming.py,sha256=l8udFP2wvXLgehIB0uF2KXwpkXSVSREDk6fLCH9F-XY,6865
|
||||
sqlalchemy/sql/operators.py,sha256=BYATjkBQLJAmwHAlGUSV-dv9RLtGw_ziAvFbKDrN4YU,76107
|
||||
sqlalchemy/sql/roles.py,sha256=71zm_xpRkUdnu-WzG6lxQVnFHwvUjf6X6e3kRIkbzAs,7686
|
||||
sqlalchemy/sql/schema.py,sha256=TOBTbcRY6ehosJEcpYn2NX0_UGZP9lfFs-o8lJVc5tI,228104
|
||||
sqlalchemy/sql/selectable.py,sha256=9dO2yhN83zjna7nPjOE1hcvGyJGjc_lj5SAz7SP5CBQ,233041
|
||||
sqlalchemy/sql/sqltypes.py,sha256=_0FpFLH0AFueb3TIB5Vcx9nXWDNj31XFQTP0u8OXnSo,126540
|
||||
sqlalchemy/sql/traversals.py,sha256=7b98JSeLxqecmGHhhLXT_2M4QMke6W-xCci5RXndhxI,33521
|
||||
sqlalchemy/sql/type_api.py,sha256=D9Kq-ppwZvlNmxaHqvVmM8IVg4n6_erzJpVioye9WKE,83823
|
||||
sqlalchemy/sql/util.py,sha256=lBEAf_-eRepTErOBCp1PbEMZDYdJqAiK1GemQtgojYo,48175
|
||||
sqlalchemy/sql/visitors.py,sha256=KD1qOYm6RdftCufVGB8q6jFTIZIQKS3zPCg78cVV0mQ,36427
|
||||
sqlalchemy/testing/__init__.py,sha256=9M2SMxBBLJ8xLUWXNCWDzkcvOqFznWcJzrSd712vATU,3126
|
||||
sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
|
||||
sqlalchemy/testing/assertions.py,sha256=lNNZ-gfF4TDRXmB7hZDdch7JYZRb_qWGeqWDFKtopx0,31439
|
||||
sqlalchemy/testing/assertsql.py,sha256=EIVk3i5qjiSI63c1ikTPoGhulZl88SSeOS2VNo1LJvM,16817
|
||||
sqlalchemy/testing/asyncio.py,sha256=cAw68tzu3h5wjdIKfOqhFATcbMb38XeK0ThjIalUHuQ,3728
|
||||
sqlalchemy/testing/config.py,sha256=MZOWz7wqzc1pbwHWSAR0RJkt2C-SD6ox-nYY7VHdi_U,12030
|
||||
sqlalchemy/testing/engines.py,sha256=w5-0FbanItRsOt6x4n7wM_OnToCzJnrvZZ2hk5Yzng8,13355
|
||||
sqlalchemy/testing/entities.py,sha256=rysywsnjXHlIIC-uv0L7-fLmTAuNpHJvcSd1HeAdY5M,3354
|
||||
sqlalchemy/testing/exclusions.py,sha256=uoYLEwyNOK1eR8rpfOZ2Q3dxgY0akM-RtsIFML-FPrY,12444
|
||||
sqlalchemy/testing/fixtures/__init__.py,sha256=9snVns5A7g28LqC6gqQuO4xRBoJzdnf068GQ6Cae75I,1198
|
||||
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/base.py,sha256=OayRr25soCqj1_yc665D5XbWWzFCm7Xl9Txtps953p4,12256
|
||||
sqlalchemy/testing/fixtures/mypy.py,sha256=7fWVZzYzNjqmLIoFa-MmXSGDPS3eZYFXlH-WxaxBDDY,11845
|
||||
sqlalchemy/testing/fixtures/orm.py,sha256=x27qjpK54JETATcYuiphtW-HXRy8ej8h3aCDkeQXPfY,6095
|
||||
sqlalchemy/testing/fixtures/sql.py,sha256=Q7Qq0n4qTT681nWt5DqjThopgjv5BB2KmSmrmAxUqHM,15704
|
||||
sqlalchemy/testing/pickleable.py,sha256=B9dXGF7E2PywB67SngHPjSMIBDTFhyAV4rkDUcyMulk,2833
|
||||
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/bootstrap.py,sha256=GrBB27KbswjE3Tt-zJlj6uSqGh9N-_CXkonnJSSBz84,1437
|
||||
sqlalchemy/testing/plugin/plugin_base.py,sha256=4SizjghFdDddt5o5gQ16Nw0bJHrtuBa4smxJcea-ti8,21573
|
||||
sqlalchemy/testing/plugin/pytestplugin.py,sha256=yh4PP406O0TwPMDzpJHpcNdU2WHXCLYI10F3oOLePjE,27295
|
||||
sqlalchemy/testing/profiling.py,sha256=HPjYvRLT1nD90FCZ7AA8j9ygkMtf1SGA47Xze2QPueo,10148
|
||||
sqlalchemy/testing/provision.py,sha256=w4F_ceGHPpWHUeh6cVcE5ktCC-ISrGc2yOSnXauOd5U,14200
|
||||
sqlalchemy/testing/requirements.py,sha256=gkviA8f5p4qdoDwAK791I4oGvnEqlm0ZZwJZpJzobFY,51393
|
||||
sqlalchemy/testing/schema.py,sha256=OSfMoIJ7ORbevGkeJdrKcTrQ0s7wXebuCU08mC1Y9jA,6513
|
||||
sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476
|
||||
sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/test_cte.py,sha256=O5idVeBnHm9zdiG3tuCBUn4hYU_TA63-6LNnRygr8g0,6205
|
||||
sqlalchemy/testing/suite/test_ddl.py,sha256=xWimTjggpTe3S1Xfmt_IPofTXkUUcKuVSVCIfIyGMbA,11785
|
||||
sqlalchemy/testing/suite/test_deprecations.py,sha256=XI8ZU1NxC-6uvPDImaaq9O7Ov6MF5gmy-yk3TfesLAo,5082
|
||||
sqlalchemy/testing/suite/test_dialect.py,sha256=HUpHZb7pnHbsoRpDLONpsCO_oWhBgjglU9pBO-EOUw4,22673
|
||||
sqlalchemy/testing/suite/test_insert.py,sha256=Wm_pW0qqUNV1Fs7mXoxtmaTHMQGmaVDgDsYgZs1jlxM,18308
|
||||
sqlalchemy/testing/suite/test_reflection.py,sha256=Nd4Ao_J3Sr-VeAeWbUe3gs6STPvik9DC37WkyJc-PVg,106205
|
||||
sqlalchemy/testing/suite/test_results.py,sha256=Hd6R4jhBNNQSp0xGa8wwTgpw-XUrCEZ3dWXpoZ4_DKs,15687
|
||||
sqlalchemy/testing/suite/test_rowcount.py,sha256=zhKVv0ibFSQmnE5luLwgHAn840zOJ6HxtkR3oL995cs,7652
|
||||
sqlalchemy/testing/suite/test_select.py,sha256=QHsBX16EZpxlEZZLM0pMNcwayPU0dig39McKwiiith0,58325
|
||||
sqlalchemy/testing/suite/test_sequence.py,sha256=c80CBWrU930GPnPfr9TCRbTTuITR7BpIactncLIj2XU,9672
|
||||
sqlalchemy/testing/suite/test_types.py,sha256=QjV48MqR7dB8UVzt56UL2z7Nt28-IhywX3DKuQeLYsY,65429
|
||||
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=7obItCpFt4qlWaDqe25HWgQT6FoUhgz1W7_Xycfz9Xk,5887
|
||||
sqlalchemy/testing/suite/test_update_delete.py,sha256=1hT0BTxB4SNipd6hnVlMnq25dLtQQoXov7z7UR0Sgi8,3658
|
||||
sqlalchemy/testing/util.py,sha256=Wsu4GZgCW6wX9mmxfiffhDz1cZm3778OB3LtiWNgb3Y,14080
|
||||
sqlalchemy/testing/warnings.py,sha256=pmfT33PF1q1PI7DdHOsup3LxHq1AC4-aYl1oL8HmrYo,1546
|
||||
sqlalchemy/types.py,sha256=DgBpPaT-vtsn6_glx5wocrIhR2A1vy56SQNRY3NiPUw,3168
|
||||
sqlalchemy/util/__init__.py,sha256=Bh0SkfkeCsz6-rbDmC41lAWOuCvKCiXVZthN2cWJEXk,8245
|
||||
sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
|
||||
sqlalchemy/util/_collections.py,sha256=FYqVQg3CaqiEd21OFN1pNCfFbQ8gvlchW_TMtihSFNE,20169
|
||||
sqlalchemy/util/_concurrency_py3k.py,sha256=31vs1oXaLzeTRgmOXRrWToRQskWmJk-CBs3-JxSTcck,8223
|
||||
sqlalchemy/util/_has_cy.py,sha256=XMkeqCDGmhkd0uuzpCdyELz7gOjHxyFQ1AIlc5NneoY,1229
|
||||
sqlalchemy/util/_py_collections.py,sha256=cYjsYLCLBy5jdGBJATLJCmtfzr_AaJ-HKTUN8OdAzxY,16630
|
||||
sqlalchemy/util/compat.py,sha256=FkeHnW9asJYJvNmxVltee8jQNwQSdVRdKJlVRRInJI4,9388
|
||||
sqlalchemy/util/concurrency.py,sha256=ZxcQYOKy-GBsQkPmCrBO5MzMpqW3JZme2Hiyqpbt9uc,2284
|
||||
sqlalchemy/util/deprecations.py,sha256=pr9DSAf1ECqDk7X7F6TNc1jrhOeFihL33uEb5Wt2_T0,11971
|
||||
sqlalchemy/util/langhelpers.py,sha256=CQQP2Q9c68nL5mcWL-Q38-INrtoDHDnBmq7QhnWyEDM,64980
|
||||
sqlalchemy/util/preloaded.py,sha256=KKNLJEqChDW1TNUsM_TzKu7JYEA3kkuh2N-quM_2_Y4,5905
|
||||
sqlalchemy/util/queue.py,sha256=ITejs6KS4Hz_ojrss2oFeUO9MoIeR3qWmZQ8J7yyrNU,10205
|
||||
sqlalchemy/util/tool_support.py,sha256=epm8MzDZpVmhE6LIjrjJrP8BUf12Wab2m28A9lGq95s,5969
|
||||
sqlalchemy/util/topological.py,sha256=hjJWL3C_B7Rpv9s7jj7wcTckcZUSkxc6xRDhiN1xyec,3458
|
||||
sqlalchemy/util/typing.py,sha256=ESYm4oQtt-SarN04YTXCgovXT8tFupMiPmuGCDCMEIc,15831
|
||||
@@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.41.3)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
sqlalchemy
|
||||
1
venv/lib/python3.12/site-packages/_black_version.py
Normal file
1
venv/lib/python3.12/site-packages/_black_version.py
Normal file
@@ -0,0 +1 @@
|
||||
version = "23.10.1"
|
||||
BIN
venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so
Executable file
BIN
venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so
Executable file
Binary file not shown.
9
venv/lib/python3.12/site-packages/_pytest/__init__.py
Normal file
9
venv/lib/python3.12/site-packages/_pytest/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
__all__ = ["__version__", "version_tuple"]
|
||||
|
||||
try:
|
||||
from ._version import version as __version__, version_tuple
|
||||
except ImportError: # pragma: no cover
|
||||
# broken installation, we don't even try
|
||||
# unknown only works because we do poor mans version compare
|
||||
__version__ = "unknown"
|
||||
version_tuple = (0, 0, "unknown") # type:ignore[assignment]
|
||||
116
venv/lib/python3.12/site-packages/_pytest/_argcomplete.py
Normal file
116
venv/lib/python3.12/site-packages/_pytest/_argcomplete.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Allow bash-completion for argparse with argcomplete if installed.
|
||||
|
||||
Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
|
||||
to find the magic string, so _ARGCOMPLETE env. var is never set, and
|
||||
this does not need special code).
|
||||
|
||||
Function try_argcomplete(parser) should be called directly before
|
||||
the call to ArgumentParser.parse_args().
|
||||
|
||||
The filescompleter is what you normally would use on the positional
|
||||
arguments specification, in order to get "dirname/" after "dirn<TAB>"
|
||||
instead of the default "dirname ":
|
||||
|
||||
optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter
|
||||
|
||||
Other, application specific, completers should go in the file
|
||||
doing the add_argument calls as they need to be specified as .completer
|
||||
attributes as well. (If argcomplete is not installed, the function the
|
||||
attribute points to will not be used).
|
||||
|
||||
SPEEDUP
|
||||
=======
|
||||
|
||||
The generic argcomplete script for bash-completion
|
||||
(/etc/bash_completion.d/python-argcomplete.sh)
|
||||
uses a python program to determine startup script generated by pip.
|
||||
You can speed up completion somewhat by changing this script to include
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
so the python-argcomplete-check-easy-install-script does not
|
||||
need to be called to find the entry point of the code and see if that is
|
||||
marked with PYTHON_ARGCOMPLETE_OK.
|
||||
|
||||
INSTALL/DEBUGGING
|
||||
=================
|
||||
|
||||
To include this support in another application that has setup.py generated
|
||||
scripts:
|
||||
|
||||
- Add the line:
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
near the top of the main python entry point.
|
||||
|
||||
- Include in the file calling parse_args():
|
||||
from _argcomplete import try_argcomplete, filescompleter
|
||||
Call try_argcomplete just before parse_args(), and optionally add
|
||||
filescompleter to the positional arguments' add_argument().
|
||||
|
||||
If things do not work right away:
|
||||
|
||||
- Switch on argcomplete debugging with (also helpful when doing custom
|
||||
completers):
|
||||
export _ARC_DEBUG=1
|
||||
|
||||
- Run:
|
||||
python-argcomplete-check-easy-install-script $(which appname)
|
||||
echo $?
|
||||
will echo 0 if the magic line has been found, 1 if not.
|
||||
|
||||
- Sometimes it helps to find early on errors using:
|
||||
_ARGCOMPLETE=1 _ARC_DEBUG=1 appname
|
||||
which should throw a KeyError: 'COMPLINE' (which is properly set by the
|
||||
global argcomplete script).
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class FastFilesCompleter:
|
||||
"""Fast file completer class."""
|
||||
|
||||
def __init__(self, directories: bool = True) -> None:
|
||||
self.directories = directories
|
||||
|
||||
def __call__(self, prefix: str, **kwargs: Any) -> List[str]:
|
||||
# Only called on non option completions.
|
||||
if os.sep in prefix[1:]:
|
||||
prefix_dir = len(os.path.dirname(prefix) + os.sep)
|
||||
else:
|
||||
prefix_dir = 0
|
||||
completion = []
|
||||
globbed = []
|
||||
if "*" not in prefix and "?" not in prefix:
|
||||
# We are on unix, otherwise no bash.
|
||||
if not prefix or prefix[-1] == os.sep:
|
||||
globbed.extend(glob(prefix + ".*"))
|
||||
prefix += "*"
|
||||
globbed.extend(glob(prefix))
|
||||
for x in sorted(globbed):
|
||||
if os.path.isdir(x):
|
||||
x += "/"
|
||||
# Append stripping the prefix (like bash, not like compgen).
|
||||
completion.append(x[prefix_dir:])
|
||||
return completion
|
||||
|
||||
|
||||
if os.environ.get("_ARGCOMPLETE"):
|
||||
try:
|
||||
import argcomplete.completers
|
||||
except ImportError:
|
||||
sys.exit(-1)
|
||||
filescompleter: Optional[FastFilesCompleter] = FastFilesCompleter()
|
||||
|
||||
def try_argcomplete(parser: argparse.ArgumentParser) -> None:
|
||||
argcomplete.autocomplete(parser, always_complete_options=False)
|
||||
|
||||
else:
|
||||
|
||||
def try_argcomplete(parser: argparse.ArgumentParser) -> None:
|
||||
pass
|
||||
|
||||
filescompleter = None
|
||||
22
venv/lib/python3.12/site-packages/_pytest/_code/__init__.py
Normal file
22
venv/lib/python3.12/site-packages/_pytest/_code/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Python inspection/code generation API."""
|
||||
from .code import Code
|
||||
from .code import ExceptionInfo
|
||||
from .code import filter_traceback
|
||||
from .code import Frame
|
||||
from .code import getfslineno
|
||||
from .code import Traceback
|
||||
from .code import TracebackEntry
|
||||
from .source import getrawcode
|
||||
from .source import Source
|
||||
|
||||
__all__ = [
|
||||
"Code",
|
||||
"ExceptionInfo",
|
||||
"filter_traceback",
|
||||
"Frame",
|
||||
"getfslineno",
|
||||
"getrawcode",
|
||||
"Traceback",
|
||||
"TracebackEntry",
|
||||
"Source",
|
||||
]
|
||||
1337
venv/lib/python3.12/site-packages/_pytest/_code/code.py
Normal file
1337
venv/lib/python3.12/site-packages/_pytest/_code/code.py
Normal file
File diff suppressed because it is too large
Load Diff
217
venv/lib/python3.12/site-packages/_pytest/_code/source.py
Normal file
217
venv/lib/python3.12/site-packages/_pytest/_code/source.py
Normal file
@@ -0,0 +1,217 @@
|
||||
import ast
|
||||
import inspect
|
||||
import textwrap
|
||||
import tokenize
|
||||
import types
|
||||
import warnings
|
||||
from bisect import bisect_right
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
|
||||
class Source:
|
||||
"""An immutable object holding a source code fragment.
|
||||
|
||||
When using Source(...), the source lines are deindented.
|
||||
"""
|
||||
|
||||
def __init__(self, obj: object = None) -> None:
|
||||
if not obj:
|
||||
self.lines: List[str] = []
|
||||
elif isinstance(obj, Source):
|
||||
self.lines = obj.lines
|
||||
elif isinstance(obj, (tuple, list)):
|
||||
self.lines = deindent(x.rstrip("\n") for x in obj)
|
||||
elif isinstance(obj, str):
|
||||
self.lines = deindent(obj.split("\n"))
|
||||
else:
|
||||
try:
|
||||
rawcode = getrawcode(obj)
|
||||
src = inspect.getsource(rawcode)
|
||||
except TypeError:
|
||||
src = inspect.getsource(obj) # type: ignore[arg-type]
|
||||
self.lines = deindent(src.split("\n"))
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Source):
|
||||
return NotImplemented
|
||||
return self.lines == other.lines
|
||||
|
||||
# Ignore type because of https://github.com/python/mypy/issues/4266.
|
||||
__hash__ = None # type: ignore
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: int) -> str:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: slice) -> "Source":
|
||||
...
|
||||
|
||||
def __getitem__(self, key: Union[int, slice]) -> Union[str, "Source"]:
|
||||
if isinstance(key, int):
|
||||
return self.lines[key]
|
||||
else:
|
||||
if key.step not in (None, 1):
|
||||
raise IndexError("cannot slice a Source with a step")
|
||||
newsource = Source()
|
||||
newsource.lines = self.lines[key.start : key.stop]
|
||||
return newsource
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self.lines)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.lines)
|
||||
|
||||
def strip(self) -> "Source":
|
||||
"""Return new Source object with trailing and leading blank lines removed."""
|
||||
start, end = 0, len(self)
|
||||
while start < end and not self.lines[start].strip():
|
||||
start += 1
|
||||
while end > start and not self.lines[end - 1].strip():
|
||||
end -= 1
|
||||
source = Source()
|
||||
source.lines[:] = self.lines[start:end]
|
||||
return source
|
||||
|
||||
def indent(self, indent: str = " " * 4) -> "Source":
|
||||
"""Return a copy of the source object with all lines indented by the
|
||||
given indent-string."""
|
||||
newsource = Source()
|
||||
newsource.lines = [(indent + line) for line in self.lines]
|
||||
return newsource
|
||||
|
||||
def getstatement(self, lineno: int) -> "Source":
|
||||
"""Return Source statement which contains the given linenumber
|
||||
(counted from 0)."""
|
||||
start, end = self.getstatementrange(lineno)
|
||||
return self[start:end]
|
||||
|
||||
def getstatementrange(self, lineno: int) -> Tuple[int, int]:
|
||||
"""Return (start, end) tuple which spans the minimal statement region
|
||||
which containing the given lineno."""
|
||||
if not (0 <= lineno < len(self)):
|
||||
raise IndexError("lineno out of range")
|
||||
ast, start, end = getstatementrange_ast(lineno, self)
|
||||
return start, end
|
||||
|
||||
def deindent(self) -> "Source":
|
||||
"""Return a new Source object deindented."""
|
||||
newsource = Source()
|
||||
newsource.lines[:] = deindent(self.lines)
|
||||
return newsource
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "\n".join(self.lines)
|
||||
|
||||
|
||||
#
|
||||
# helper functions
|
||||
#
|
||||
|
||||
|
||||
def findsource(obj) -> Tuple[Optional[Source], int]:
|
||||
try:
|
||||
sourcelines, lineno = inspect.findsource(obj)
|
||||
except Exception:
|
||||
return None, -1
|
||||
source = Source()
|
||||
source.lines = [line.rstrip() for line in sourcelines]
|
||||
return source, lineno
|
||||
|
||||
|
||||
def getrawcode(obj: object, trycall: bool = True) -> types.CodeType:
|
||||
"""Return code object for given function."""
|
||||
try:
|
||||
return obj.__code__ # type: ignore[attr-defined,no-any-return]
|
||||
except AttributeError:
|
||||
pass
|
||||
if trycall:
|
||||
call = getattr(obj, "__call__", None)
|
||||
if call and not isinstance(obj, type):
|
||||
return getrawcode(call, trycall=False)
|
||||
raise TypeError(f"could not get code object for {obj!r}")
|
||||
|
||||
|
||||
def deindent(lines: Iterable[str]) -> List[str]:
|
||||
return textwrap.dedent("\n".join(lines)).splitlines()
|
||||
|
||||
|
||||
def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]:
|
||||
# Flatten all statements and except handlers into one lineno-list.
|
||||
# AST's line numbers start indexing at 1.
|
||||
values: List[int] = []
|
||||
for x in ast.walk(node):
|
||||
if isinstance(x, (ast.stmt, ast.ExceptHandler)):
|
||||
# Before Python 3.8, the lineno of a decorated class or function pointed at the decorator.
|
||||
# Since Python 3.8, the lineno points to the class/def, so need to include the decorators.
|
||||
if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)):
|
||||
for d in x.decorator_list:
|
||||
values.append(d.lineno - 1)
|
||||
values.append(x.lineno - 1)
|
||||
for name in ("finalbody", "orelse"):
|
||||
val: Optional[List[ast.stmt]] = getattr(x, name, None)
|
||||
if val:
|
||||
# Treat the finally/orelse part as its own statement.
|
||||
values.append(val[0].lineno - 1 - 1)
|
||||
values.sort()
|
||||
insert_index = bisect_right(values, lineno)
|
||||
start = values[insert_index - 1]
|
||||
if insert_index >= len(values):
|
||||
end = None
|
||||
else:
|
||||
end = values[insert_index]
|
||||
return start, end
|
||||
|
||||
|
||||
def getstatementrange_ast(
|
||||
lineno: int,
|
||||
source: Source,
|
||||
assertion: bool = False,
|
||||
astnode: Optional[ast.AST] = None,
|
||||
) -> Tuple[ast.AST, int, int]:
|
||||
if astnode is None:
|
||||
content = str(source)
|
||||
# See #4260:
|
||||
# Don't produce duplicate warnings when compiling source to find AST.
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
astnode = ast.parse(content, "source", "exec")
|
||||
|
||||
start, end = get_statement_startend2(lineno, astnode)
|
||||
# We need to correct the end:
|
||||
# - ast-parsing strips comments
|
||||
# - there might be empty lines
|
||||
# - we might have lesser indented code blocks at the end
|
||||
if end is None:
|
||||
end = len(source.lines)
|
||||
|
||||
if end > start + 1:
|
||||
# Make sure we don't span differently indented code blocks
|
||||
# by using the BlockFinder helper used which inspect.getsource() uses itself.
|
||||
block_finder = inspect.BlockFinder()
|
||||
# If we start with an indented line, put blockfinder to "started" mode.
|
||||
block_finder.started = source.lines[start][0].isspace()
|
||||
it = ((x + "\n") for x in source.lines[start:end])
|
||||
try:
|
||||
for tok in tokenize.generate_tokens(lambda: next(it)):
|
||||
block_finder.tokeneater(*tok)
|
||||
except (inspect.EndOfBlock, IndentationError):
|
||||
end = block_finder.last + start
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# The end might still point to a comment or empty line, correct it.
|
||||
while end:
|
||||
line = source.lines[end - 1].lstrip()
|
||||
if line.startswith("#") or not line:
|
||||
end -= 1
|
||||
else:
|
||||
break
|
||||
return astnode, start, end
|
||||
@@ -0,0 +1,8 @@
|
||||
from .terminalwriter import get_terminal_width
|
||||
from .terminalwriter import TerminalWriter
|
||||
|
||||
|
||||
__all__ = [
|
||||
"TerminalWriter",
|
||||
"get_terminal_width",
|
||||
]
|
||||
180
venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py
Normal file
180
venv/lib/python3.12/site-packages/_pytest/_io/saferepr.py
Normal file
@@ -0,0 +1,180 @@
|
||||
import pprint
|
||||
import reprlib
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import IO
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def _try_repr_or_str(obj: object) -> str:
|
||||
try:
|
||||
return repr(obj)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except BaseException:
|
||||
return f'{type(obj).__name__}("{obj}")'
|
||||
|
||||
|
||||
def _format_repr_exception(exc: BaseException, obj: object) -> str:
|
||||
try:
|
||||
exc_info = _try_repr_or_str(exc)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except BaseException as exc:
|
||||
exc_info = f"unpresentable exception ({_try_repr_or_str(exc)})"
|
||||
return "<[{} raised in repr()] {} object at 0x{:x}>".format(
|
||||
exc_info, type(obj).__name__, id(obj)
|
||||
)
|
||||
|
||||
|
||||
def _ellipsize(s: str, maxsize: int) -> str:
|
||||
if len(s) > maxsize:
|
||||
i = max(0, (maxsize - 3) // 2)
|
||||
j = max(0, maxsize - 3 - i)
|
||||
return s[:i] + "..." + s[len(s) - j :]
|
||||
return s
|
||||
|
||||
|
||||
class SafeRepr(reprlib.Repr):
|
||||
"""
|
||||
repr.Repr that limits the resulting size of repr() and includes
|
||||
information on exceptions raised during the call.
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize: Optional[int], use_ascii: bool = False) -> None:
|
||||
"""
|
||||
:param maxsize:
|
||||
If not None, will truncate the resulting repr to that specific size, using ellipsis
|
||||
somewhere in the middle to hide the extra text.
|
||||
If None, will not impose any size limits on the returning repr.
|
||||
"""
|
||||
super().__init__()
|
||||
# ``maxstring`` is used by the superclass, and needs to be an int; using a
|
||||
# very large number in case maxsize is None, meaning we want to disable
|
||||
# truncation.
|
||||
self.maxstring = maxsize if maxsize is not None else 1_000_000_000
|
||||
self.maxsize = maxsize
|
||||
self.use_ascii = use_ascii
|
||||
|
||||
def repr(self, x: object) -> str:
|
||||
try:
|
||||
if self.use_ascii:
|
||||
s = ascii(x)
|
||||
else:
|
||||
s = super().repr(x)
|
||||
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except BaseException as exc:
|
||||
s = _format_repr_exception(exc, x)
|
||||
if self.maxsize is not None:
|
||||
s = _ellipsize(s, self.maxsize)
|
||||
return s
|
||||
|
||||
def repr_instance(self, x: object, level: int) -> str:
|
||||
try:
|
||||
s = repr(x)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except BaseException as exc:
|
||||
s = _format_repr_exception(exc, x)
|
||||
if self.maxsize is not None:
|
||||
s = _ellipsize(s, self.maxsize)
|
||||
return s
|
||||
|
||||
|
||||
def safeformat(obj: object) -> str:
|
||||
"""Return a pretty printed string for the given object.
|
||||
|
||||
Failing __repr__ functions of user instances will be represented
|
||||
with a short exception info.
|
||||
"""
|
||||
try:
|
||||
return pprint.pformat(obj)
|
||||
except Exception as exc:
|
||||
return _format_repr_exception(exc, obj)
|
||||
|
||||
|
||||
# Maximum size of overall repr of objects to display during assertion errors.
|
||||
DEFAULT_REPR_MAX_SIZE = 240
|
||||
|
||||
|
||||
def saferepr(
|
||||
obj: object, maxsize: Optional[int] = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False
|
||||
) -> str:
|
||||
"""Return a size-limited safe repr-string for the given object.
|
||||
|
||||
Failing __repr__ functions of user instances will be represented
|
||||
with a short exception info and 'saferepr' generally takes
|
||||
care to never raise exceptions itself.
|
||||
|
||||
This function is a wrapper around the Repr/reprlib functionality of the
|
||||
stdlib.
|
||||
"""
|
||||
|
||||
return SafeRepr(maxsize, use_ascii).repr(obj)
|
||||
|
||||
|
||||
def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str:
|
||||
"""Return an unlimited-size safe repr-string for the given object.
|
||||
|
||||
As with saferepr, failing __repr__ functions of user instances
|
||||
will be represented with a short exception info.
|
||||
|
||||
This function is a wrapper around simple repr.
|
||||
|
||||
Note: a cleaner solution would be to alter ``saferepr``this way
|
||||
when maxsize=None, but that might affect some other code.
|
||||
"""
|
||||
try:
|
||||
if use_ascii:
|
||||
return ascii(obj)
|
||||
return repr(obj)
|
||||
except Exception as exc:
|
||||
return _format_repr_exception(exc, obj)
|
||||
|
||||
|
||||
class AlwaysDispatchingPrettyPrinter(pprint.PrettyPrinter):
|
||||
"""PrettyPrinter that always dispatches (regardless of width)."""
|
||||
|
||||
def _format(
|
||||
self,
|
||||
object: object,
|
||||
stream: IO[str],
|
||||
indent: int,
|
||||
allowance: int,
|
||||
context: Dict[int, Any],
|
||||
level: int,
|
||||
) -> None:
|
||||
# Type ignored because _dispatch is private.
|
||||
p = self._dispatch.get(type(object).__repr__, None) # type: ignore[attr-defined]
|
||||
|
||||
objid = id(object)
|
||||
if objid in context or p is None:
|
||||
# Type ignored because _format is private.
|
||||
super()._format( # type: ignore[misc]
|
||||
object,
|
||||
stream,
|
||||
indent,
|
||||
allowance,
|
||||
context,
|
||||
level,
|
||||
)
|
||||
return
|
||||
|
||||
context[objid] = 1
|
||||
p(self, object, stream, indent, allowance, context, level + 1)
|
||||
del context[objid]
|
||||
|
||||
|
||||
def _pformat_dispatch(
|
||||
object: object,
|
||||
indent: int = 1,
|
||||
width: int = 80,
|
||||
depth: Optional[int] = None,
|
||||
*,
|
||||
compact: bool = False,
|
||||
) -> str:
|
||||
return AlwaysDispatchingPrettyPrinter(
|
||||
indent=indent, width=width, depth=depth, compact=compact
|
||||
).pformat(object)
|
||||
233
venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py
Normal file
233
venv/lib/python3.12/site-packages/_pytest/_io/terminalwriter.py
Normal file
@@ -0,0 +1,233 @@
|
||||
"""Helper functions for writing to terminals and files."""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import TextIO
|
||||
|
||||
from .wcwidth import wcswidth
|
||||
from _pytest.compat import final
|
||||
|
||||
|
||||
# This code was initially copied from py 1.8.1, file _io/terminalwriter.py.
|
||||
|
||||
|
||||
def get_terminal_width() -> int:
|
||||
width, _ = shutil.get_terminal_size(fallback=(80, 24))
|
||||
|
||||
# The Windows get_terminal_size may be bogus, let's sanify a bit.
|
||||
if width < 40:
|
||||
width = 80
|
||||
|
||||
return width
|
||||
|
||||
|
||||
def should_do_markup(file: TextIO) -> bool:
|
||||
if os.environ.get("PY_COLORS") == "1":
|
||||
return True
|
||||
if os.environ.get("PY_COLORS") == "0":
|
||||
return False
|
||||
if "NO_COLOR" in os.environ:
|
||||
return False
|
||||
if "FORCE_COLOR" in os.environ:
|
||||
return True
|
||||
return (
|
||||
hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb"
|
||||
)
|
||||
|
||||
|
||||
@final
|
||||
class TerminalWriter:
|
||||
_esctable = dict(
|
||||
black=30,
|
||||
red=31,
|
||||
green=32,
|
||||
yellow=33,
|
||||
blue=34,
|
||||
purple=35,
|
||||
cyan=36,
|
||||
white=37,
|
||||
Black=40,
|
||||
Red=41,
|
||||
Green=42,
|
||||
Yellow=43,
|
||||
Blue=44,
|
||||
Purple=45,
|
||||
Cyan=46,
|
||||
White=47,
|
||||
bold=1,
|
||||
light=2,
|
||||
blink=5,
|
||||
invert=7,
|
||||
)
|
||||
|
||||
def __init__(self, file: Optional[TextIO] = None) -> None:
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32":
|
||||
try:
|
||||
import colorama
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
file = colorama.AnsiToWin32(file).stream
|
||||
assert file is not None
|
||||
self._file = file
|
||||
self.hasmarkup = should_do_markup(file)
|
||||
self._current_line = ""
|
||||
self._terminal_width: Optional[int] = None
|
||||
self.code_highlight = True
|
||||
|
||||
@property
|
||||
def fullwidth(self) -> int:
|
||||
if self._terminal_width is not None:
|
||||
return self._terminal_width
|
||||
return get_terminal_width()
|
||||
|
||||
@fullwidth.setter
|
||||
def fullwidth(self, value: int) -> None:
|
||||
self._terminal_width = value
|
||||
|
||||
@property
|
||||
def width_of_current_line(self) -> int:
|
||||
"""Return an estimate of the width so far in the current line."""
|
||||
return wcswidth(self._current_line)
|
||||
|
||||
def markup(self, text: str, **markup: bool) -> str:
|
||||
for name in markup:
|
||||
if name not in self._esctable:
|
||||
raise ValueError(f"unknown markup: {name!r}")
|
||||
if self.hasmarkup:
|
||||
esc = [self._esctable[name] for name, on in markup.items() if on]
|
||||
if esc:
|
||||
text = "".join("\x1b[%sm" % cod for cod in esc) + text + "\x1b[0m"
|
||||
return text
|
||||
|
||||
def sep(
|
||||
self,
|
||||
sepchar: str,
|
||||
title: Optional[str] = None,
|
||||
fullwidth: Optional[int] = None,
|
||||
**markup: bool,
|
||||
) -> None:
|
||||
if fullwidth is None:
|
||||
fullwidth = self.fullwidth
|
||||
# The goal is to have the line be as long as possible
|
||||
# under the condition that len(line) <= fullwidth.
|
||||
if sys.platform == "win32":
|
||||
# If we print in the last column on windows we are on a
|
||||
# new line but there is no way to verify/neutralize this
|
||||
# (we may not know the exact line width).
|
||||
# So let's be defensive to avoid empty lines in the output.
|
||||
fullwidth -= 1
|
||||
if title is not None:
|
||||
# we want 2 + 2*len(fill) + len(title) <= fullwidth
|
||||
# i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
|
||||
# 2*len(sepchar)*N <= fullwidth - len(title) - 2
|
||||
# N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
|
||||
N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1)
|
||||
fill = sepchar * N
|
||||
line = f"{fill} {title} {fill}"
|
||||
else:
|
||||
# we want len(sepchar)*N <= fullwidth
|
||||
# i.e. N <= fullwidth // len(sepchar)
|
||||
line = sepchar * (fullwidth // len(sepchar))
|
||||
# In some situations there is room for an extra sepchar at the right,
|
||||
# in particular if we consider that with a sepchar like "_ " the
|
||||
# trailing space is not important at the end of the line.
|
||||
if len(line) + len(sepchar.rstrip()) <= fullwidth:
|
||||
line += sepchar.rstrip()
|
||||
|
||||
self.line(line, **markup)
|
||||
|
||||
def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None:
|
||||
if msg:
|
||||
current_line = msg.rsplit("\n", 1)[-1]
|
||||
if "\n" in msg:
|
||||
self._current_line = current_line
|
||||
else:
|
||||
self._current_line += current_line
|
||||
|
||||
msg = self.markup(msg, **markup)
|
||||
|
||||
try:
|
||||
self._file.write(msg)
|
||||
except UnicodeEncodeError:
|
||||
# Some environments don't support printing general Unicode
|
||||
# strings, due to misconfiguration or otherwise; in that case,
|
||||
# print the string escaped to ASCII.
|
||||
# When the Unicode situation improves we should consider
|
||||
# letting the error propagate instead of masking it (see #7475
|
||||
# for one brief attempt).
|
||||
msg = msg.encode("unicode-escape").decode("ascii")
|
||||
self._file.write(msg)
|
||||
|
||||
if flush:
|
||||
self.flush()
|
||||
|
||||
def line(self, s: str = "", **markup: bool) -> None:
|
||||
self.write(s, **markup)
|
||||
self.write("\n")
|
||||
|
||||
def flush(self) -> None:
|
||||
self._file.flush()
|
||||
|
||||
def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None:
|
||||
"""Write lines of source code possibly highlighted.
|
||||
|
||||
Keeping this private for now because the API is clunky. We should discuss how
|
||||
to evolve the terminal writer so we can have more precise color support, for example
|
||||
being able to write part of a line in one color and the rest in another, and so on.
|
||||
"""
|
||||
if indents and len(indents) != len(lines):
|
||||
raise ValueError(
|
||||
"indents size ({}) should have same size as lines ({})".format(
|
||||
len(indents), len(lines)
|
||||
)
|
||||
)
|
||||
if not indents:
|
||||
indents = [""] * len(lines)
|
||||
source = "\n".join(lines)
|
||||
new_lines = self._highlight(source).splitlines()
|
||||
for indent, new_line in zip(indents, new_lines):
|
||||
self.line(indent + new_line)
|
||||
|
||||
def _highlight(self, source: str) -> str:
|
||||
"""Highlight the given source code if we have markup support."""
|
||||
from _pytest.config.exceptions import UsageError
|
||||
|
||||
if not self.hasmarkup or not self.code_highlight:
|
||||
return source
|
||||
try:
|
||||
from pygments.formatters.terminal import TerminalFormatter
|
||||
from pygments.lexers.python import PythonLexer
|
||||
from pygments import highlight
|
||||
import pygments.util
|
||||
except ImportError:
|
||||
return source
|
||||
else:
|
||||
try:
|
||||
highlighted: str = highlight(
|
||||
source,
|
||||
PythonLexer(),
|
||||
TerminalFormatter(
|
||||
bg=os.getenv("PYTEST_THEME_MODE", "dark"),
|
||||
style=os.getenv("PYTEST_THEME"),
|
||||
),
|
||||
)
|
||||
return highlighted
|
||||
except pygments.util.ClassNotFound:
|
||||
raise UsageError(
|
||||
"PYTEST_THEME environment variable had an invalid value: '{}'. "
|
||||
"Only valid pygment styles are allowed.".format(
|
||||
os.getenv("PYTEST_THEME")
|
||||
)
|
||||
)
|
||||
except pygments.util.OptionError:
|
||||
raise UsageError(
|
||||
"PYTEST_THEME_MODE environment variable had an invalid value: '{}'. "
|
||||
"The only allowed values are 'dark' and 'light'.".format(
|
||||
os.getenv("PYTEST_THEME_MODE")
|
||||
)
|
||||
)
|
||||
55
venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py
Normal file
55
venv/lib/python3.12/site-packages/_pytest/_io/wcwidth.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import unicodedata
|
||||
from functools import lru_cache
|
||||
|
||||
|
||||
@lru_cache(100)
|
||||
def wcwidth(c: str) -> int:
|
||||
"""Determine how many columns are needed to display a character in a terminal.
|
||||
|
||||
Returns -1 if the character is not printable.
|
||||
Returns 0, 1 or 2 for other characters.
|
||||
"""
|
||||
o = ord(c)
|
||||
|
||||
# ASCII fast path.
|
||||
if 0x20 <= o < 0x07F:
|
||||
return 1
|
||||
|
||||
# Some Cf/Zp/Zl characters which should be zero-width.
|
||||
if (
|
||||
o == 0x0000
|
||||
or 0x200B <= o <= 0x200F
|
||||
or 0x2028 <= o <= 0x202E
|
||||
or 0x2060 <= o <= 0x2063
|
||||
):
|
||||
return 0
|
||||
|
||||
category = unicodedata.category(c)
|
||||
|
||||
# Control characters.
|
||||
if category == "Cc":
|
||||
return -1
|
||||
|
||||
# Combining characters with zero width.
|
||||
if category in ("Me", "Mn"):
|
||||
return 0
|
||||
|
||||
# Full/Wide east asian characters.
|
||||
if unicodedata.east_asian_width(c) in ("F", "W"):
|
||||
return 2
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
def wcswidth(s: str) -> int:
|
||||
"""Determine how many columns are needed to display a string in a terminal.
|
||||
|
||||
Returns -1 if the string contains non-printable characters.
|
||||
"""
|
||||
width = 0
|
||||
for c in unicodedata.normalize("NFC", s):
|
||||
wc = wcwidth(c)
|
||||
if wc < 0:
|
||||
return -1
|
||||
width += wc
|
||||
return width
|
||||
109
venv/lib/python3.12/site-packages/_pytest/_py/error.py
Normal file
109
venv/lib/python3.12/site-packages/_pytest/_py/error.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""create errno-specific classes for IO or os calls."""
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
from typing import Callable
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class Error(EnvironmentError):
|
||||
def __repr__(self) -> str:
|
||||
return "{}.{} {!r}: {} ".format(
|
||||
self.__class__.__module__,
|
||||
self.__class__.__name__,
|
||||
self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
# repr(self.args)
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
s = "[{}]: {}".format(
|
||||
self.__class__.__doc__,
|
||||
" ".join(map(str, self.args)),
|
||||
)
|
||||
return s
|
||||
|
||||
|
||||
_winerrnomap = {
|
||||
2: errno.ENOENT,
|
||||
3: errno.ENOENT,
|
||||
17: errno.EEXIST,
|
||||
18: errno.EXDEV,
|
||||
13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
|
||||
22: errno.ENOTDIR,
|
||||
20: errno.ENOTDIR,
|
||||
267: errno.ENOTDIR,
|
||||
5: errno.EACCES, # anything better?
|
||||
}
|
||||
|
||||
|
||||
class ErrorMaker:
|
||||
"""lazily provides Exception classes for each possible POSIX errno
|
||||
(as defined per the 'errno' module). All such instances
|
||||
subclass EnvironmentError.
|
||||
"""
|
||||
|
||||
_errno2class: dict[int, type[Error]] = {}
|
||||
|
||||
def __getattr__(self, name: str) -> type[Error]:
|
||||
if name[0] == "_":
|
||||
raise AttributeError(name)
|
||||
eno = getattr(errno, name)
|
||||
cls = self._geterrnoclass(eno)
|
||||
setattr(self, name, cls)
|
||||
return cls
|
||||
|
||||
def _geterrnoclass(self, eno: int) -> type[Error]:
|
||||
try:
|
||||
return self._errno2class[eno]
|
||||
except KeyError:
|
||||
clsname = errno.errorcode.get(eno, "UnknownErrno%d" % (eno,))
|
||||
errorcls = type(
|
||||
clsname,
|
||||
(Error,),
|
||||
{"__module__": "py.error", "__doc__": os.strerror(eno)},
|
||||
)
|
||||
self._errno2class[eno] = errorcls
|
||||
return errorcls
|
||||
|
||||
def checked_call(
|
||||
self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs
|
||||
) -> R:
|
||||
"""Call a function and raise an errno-exception if applicable."""
|
||||
__tracebackhide__ = True
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Error:
|
||||
raise
|
||||
except OSError as value:
|
||||
if not hasattr(value, "errno"):
|
||||
raise
|
||||
errno = value.errno
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
cls = self._geterrnoclass(_winerrnomap[errno])
|
||||
except KeyError:
|
||||
raise value
|
||||
else:
|
||||
# we are not on Windows, or we got a proper OSError
|
||||
cls = self._geterrnoclass(errno)
|
||||
|
||||
raise cls(f"{func.__name__}{args!r}")
|
||||
|
||||
|
||||
_error_maker = ErrorMaker()
|
||||
checked_call = _error_maker.checked_call
|
||||
|
||||
|
||||
def __getattr__(attr: str) -> type[Error]:
|
||||
return getattr(_error_maker, attr) # type: ignore[no-any-return]
|
||||
1475
venv/lib/python3.12/site-packages/_pytest/_py/path.py
Normal file
1475
venv/lib/python3.12/site-packages/_pytest/_py/path.py
Normal file
File diff suppressed because it is too large
Load Diff
16
venv/lib/python3.12/site-packages/_pytest/_version.py
Normal file
16
venv/lib/python3.12/site-packages/_pytest/_version.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
TYPE_CHECKING = False
|
||||
if TYPE_CHECKING:
|
||||
from typing import Tuple, Union
|
||||
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
||||
else:
|
||||
VERSION_TUPLE = object
|
||||
|
||||
version: str
|
||||
__version__: str
|
||||
__version_tuple__: VERSION_TUPLE
|
||||
version_tuple: VERSION_TUPLE
|
||||
|
||||
__version__ = version = '7.4.3'
|
||||
__version_tuple__ = version_tuple = (7, 4, 3)
|
||||
181
venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py
Normal file
181
venv/lib/python3.12/site-packages/_pytest/assertion/__init__.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""Support for presenting detailed information in failing assertions."""
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Generator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from _pytest.assertion import rewrite
|
||||
from _pytest.assertion import truncate
|
||||
from _pytest.assertion import util
|
||||
from _pytest.assertion.rewrite import assertstate_key
|
||||
from _pytest.config import Config
|
||||
from _pytest.config import hookimpl
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.nodes import Item
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _pytest.main import Session
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
group = parser.getgroup("debugconfig")
|
||||
group.addoption(
|
||||
"--assert",
|
||||
action="store",
|
||||
dest="assertmode",
|
||||
choices=("rewrite", "plain"),
|
||||
default="rewrite",
|
||||
metavar="MODE",
|
||||
help=(
|
||||
"Control assertion debugging tools.\n"
|
||||
"'plain' performs no assertion debugging.\n"
|
||||
"'rewrite' (the default) rewrites assert statements in test modules"
|
||||
" on import to provide assert expression information."
|
||||
),
|
||||
)
|
||||
parser.addini(
|
||||
"enable_assertion_pass_hook",
|
||||
type="bool",
|
||||
default=False,
|
||||
help="Enables the pytest_assertion_pass hook. "
|
||||
"Make sure to delete any previously generated pyc cache files.",
|
||||
)
|
||||
|
||||
|
||||
def register_assert_rewrite(*names: str) -> None:
|
||||
"""Register one or more module names to be rewritten on import.
|
||||
|
||||
This function will make sure that this module or all modules inside
|
||||
the package will get their assert statements rewritten.
|
||||
Thus you should make sure to call this before the module is
|
||||
actually imported, usually in your __init__.py if you are a plugin
|
||||
using a package.
|
||||
|
||||
:param names: The module names to register.
|
||||
"""
|
||||
for name in names:
|
||||
if not isinstance(name, str):
|
||||
msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable]
|
||||
raise TypeError(msg.format(repr(names)))
|
||||
for hook in sys.meta_path:
|
||||
if isinstance(hook, rewrite.AssertionRewritingHook):
|
||||
importhook = hook
|
||||
break
|
||||
else:
|
||||
# TODO(typing): Add a protocol for mark_rewrite() and use it
|
||||
# for importhook and for PytestPluginManager.rewrite_hook.
|
||||
importhook = DummyRewriteHook() # type: ignore
|
||||
importhook.mark_rewrite(*names)
|
||||
|
||||
|
||||
class DummyRewriteHook:
|
||||
"""A no-op import hook for when rewriting is disabled."""
|
||||
|
||||
def mark_rewrite(self, *names: str) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AssertionState:
|
||||
"""State for the assertion plugin."""
|
||||
|
||||
def __init__(self, config: Config, mode) -> None:
|
||||
self.mode = mode
|
||||
self.trace = config.trace.root.get("assertion")
|
||||
self.hook: Optional[rewrite.AssertionRewritingHook] = None
|
||||
|
||||
|
||||
def install_importhook(config: Config) -> rewrite.AssertionRewritingHook:
|
||||
"""Try to install the rewrite hook, raise SystemError if it fails."""
|
||||
config.stash[assertstate_key] = AssertionState(config, "rewrite")
|
||||
config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config)
|
||||
sys.meta_path.insert(0, hook)
|
||||
config.stash[assertstate_key].trace("installed rewrite import hook")
|
||||
|
||||
def undo() -> None:
|
||||
hook = config.stash[assertstate_key].hook
|
||||
if hook is not None and hook in sys.meta_path:
|
||||
sys.meta_path.remove(hook)
|
||||
|
||||
config.add_cleanup(undo)
|
||||
return hook
|
||||
|
||||
|
||||
def pytest_collection(session: "Session") -> None:
|
||||
# This hook is only called when test modules are collected
|
||||
# so for example not in the managing process of pytest-xdist
|
||||
# (which does not collect test modules).
|
||||
assertstate = session.config.stash.get(assertstate_key, None)
|
||||
if assertstate:
|
||||
if assertstate.hook is not None:
|
||||
assertstate.hook.set_session(session)
|
||||
|
||||
|
||||
@hookimpl(tryfirst=True, hookwrapper=True)
|
||||
def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
|
||||
"""Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks.
|
||||
|
||||
The rewrite module will use util._reprcompare if it exists to use custom
|
||||
reporting via the pytest_assertrepr_compare hook. This sets up this custom
|
||||
comparison for the test.
|
||||
"""
|
||||
|
||||
ihook = item.ihook
|
||||
|
||||
def callbinrepr(op, left: object, right: object) -> Optional[str]:
|
||||
"""Call the pytest_assertrepr_compare hook and prepare the result.
|
||||
|
||||
This uses the first result from the hook and then ensures the
|
||||
following:
|
||||
* Overly verbose explanations are truncated unless configured otherwise
|
||||
(eg. if running in verbose mode).
|
||||
* Embedded newlines are escaped to help util.format_explanation()
|
||||
later.
|
||||
* If the rewrite mode is used embedded %-characters are replaced
|
||||
to protect later % formatting.
|
||||
|
||||
The result can be formatted by util.format_explanation() for
|
||||
pretty printing.
|
||||
"""
|
||||
hook_result = ihook.pytest_assertrepr_compare(
|
||||
config=item.config, op=op, left=left, right=right
|
||||
)
|
||||
for new_expl in hook_result:
|
||||
if new_expl:
|
||||
new_expl = truncate.truncate_if_required(new_expl, item)
|
||||
new_expl = [line.replace("\n", "\\n") for line in new_expl]
|
||||
res = "\n~".join(new_expl)
|
||||
if item.config.getvalue("assertmode") == "rewrite":
|
||||
res = res.replace("%", "%%")
|
||||
return res
|
||||
return None
|
||||
|
||||
saved_assert_hooks = util._reprcompare, util._assertion_pass
|
||||
util._reprcompare = callbinrepr
|
||||
util._config = item.config
|
||||
|
||||
if ihook.pytest_assertion_pass.get_hookimpls():
|
||||
|
||||
def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None:
|
||||
ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl)
|
||||
|
||||
util._assertion_pass = call_assertion_pass_hook
|
||||
|
||||
yield
|
||||
|
||||
util._reprcompare, util._assertion_pass = saved_assert_hooks
|
||||
util._config = None
|
||||
|
||||
|
||||
def pytest_sessionfinish(session: "Session") -> None:
|
||||
assertstate = session.config.stash.get(assertstate_key, None)
|
||||
if assertstate:
|
||||
if assertstate.hook is not None:
|
||||
assertstate.hook.set_session(None)
|
||||
|
||||
|
||||
def pytest_assertrepr_compare(
|
||||
config: Config, op: str, left: Any, right: Any
|
||||
) -> Optional[List[str]]:
|
||||
return util.assertrepr_compare(config=config, op=op, left=left, right=right)
|
||||
1211
venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py
Normal file
1211
venv/lib/python3.12/site-packages/_pytest/assertion/rewrite.py
Normal file
File diff suppressed because it is too large
Load Diff
115
venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py
Normal file
115
venv/lib/python3.12/site-packages/_pytest/assertion/truncate.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Utilities for truncating assertion output.
|
||||
|
||||
Current default behaviour is to truncate assertion explanations at
|
||||
~8 terminal lines, unless running in "-vv" mode or running on CI.
|
||||
"""
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from _pytest.assertion import util
|
||||
from _pytest.nodes import Item
|
||||
|
||||
|
||||
DEFAULT_MAX_LINES = 8
|
||||
DEFAULT_MAX_CHARS = 8 * 80
|
||||
USAGE_MSG = "use '-vv' to show"
|
||||
|
||||
|
||||
def truncate_if_required(
|
||||
explanation: List[str], item: Item, max_length: Optional[int] = None
|
||||
) -> List[str]:
|
||||
"""Truncate this assertion explanation if the given test item is eligible."""
|
||||
if _should_truncate_item(item):
|
||||
return _truncate_explanation(explanation)
|
||||
return explanation
|
||||
|
||||
|
||||
def _should_truncate_item(item: Item) -> bool:
|
||||
"""Whether or not this test item is eligible for truncation."""
|
||||
verbose = item.config.option.verbose
|
||||
return verbose < 2 and not util.running_on_ci()
|
||||
|
||||
|
||||
def _truncate_explanation(
|
||||
input_lines: List[str],
|
||||
max_lines: Optional[int] = None,
|
||||
max_chars: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Truncate given list of strings that makes up the assertion explanation.
|
||||
|
||||
Truncates to either 8 lines, or 640 characters - whichever the input reaches
|
||||
first, taking the truncation explanation into account. The remaining lines
|
||||
will be replaced by a usage message.
|
||||
"""
|
||||
if max_lines is None:
|
||||
max_lines = DEFAULT_MAX_LINES
|
||||
if max_chars is None:
|
||||
max_chars = DEFAULT_MAX_CHARS
|
||||
|
||||
# Check if truncation required
|
||||
input_char_count = len("".join(input_lines))
|
||||
# The length of the truncation explanation depends on the number of lines
|
||||
# removed but is at least 68 characters:
|
||||
# The real value is
|
||||
# 64 (for the base message:
|
||||
# '...\n...Full output truncated (1 line hidden), use '-vv' to show")'
|
||||
# )
|
||||
# + 1 (for plural)
|
||||
# + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1)
|
||||
# + 3 for the '...' added to the truncated line
|
||||
# But if there's more than 100 lines it's very likely that we're going to
|
||||
# truncate, so we don't need the exact value using log10.
|
||||
tolerable_max_chars = (
|
||||
max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...'
|
||||
)
|
||||
# The truncation explanation add two lines to the output
|
||||
tolerable_max_lines = max_lines + 2
|
||||
if (
|
||||
len(input_lines) <= tolerable_max_lines
|
||||
and input_char_count <= tolerable_max_chars
|
||||
):
|
||||
return input_lines
|
||||
# Truncate first to max_lines, and then truncate to max_chars if necessary
|
||||
truncated_explanation = input_lines[:max_lines]
|
||||
truncated_char = True
|
||||
# We reevaluate the need to truncate chars following removal of some lines
|
||||
if len("".join(truncated_explanation)) > tolerable_max_chars:
|
||||
truncated_explanation = _truncate_by_char_count(
|
||||
truncated_explanation, max_chars
|
||||
)
|
||||
else:
|
||||
truncated_char = False
|
||||
|
||||
truncated_line_count = len(input_lines) - len(truncated_explanation)
|
||||
if truncated_explanation[-1]:
|
||||
# Add ellipsis and take into account part-truncated final line
|
||||
truncated_explanation[-1] = truncated_explanation[-1] + "..."
|
||||
if truncated_char:
|
||||
# It's possible that we did not remove any char from this line
|
||||
truncated_line_count += 1
|
||||
else:
|
||||
# Add proper ellipsis when we were able to fit a full line exactly
|
||||
truncated_explanation[-1] = "..."
|
||||
return truncated_explanation + [
|
||||
"",
|
||||
f"...Full output truncated ({truncated_line_count} line"
|
||||
f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}",
|
||||
]
|
||||
|
||||
|
||||
def _truncate_by_char_count(input_lines: List[str], max_chars: int) -> List[str]:
|
||||
# Find point at which input length exceeds total allowed length
|
||||
iterated_char_count = 0
|
||||
for iterated_index, input_line in enumerate(input_lines):
|
||||
if iterated_char_count + len(input_line) > max_chars:
|
||||
break
|
||||
iterated_char_count += len(input_line)
|
||||
|
||||
# Create truncated explanation with modified final line
|
||||
truncated_result = input_lines[:iterated_index]
|
||||
final_line = input_lines[iterated_index]
|
||||
if final_line:
|
||||
final_line_truncate_point = max_chars - iterated_char_count
|
||||
final_line = final_line[:final_line_truncate_point]
|
||||
truncated_result.append(final_line)
|
||||
return truncated_result
|
||||
522
venv/lib/python3.12/site-packages/_pytest/assertion/util.py
Normal file
522
venv/lib/python3.12/site-packages/_pytest/assertion/util.py
Normal file
@@ -0,0 +1,522 @@
|
||||
"""Utilities for assertion debugging."""
|
||||
import collections.abc
|
||||
import os
|
||||
import pprint
|
||||
from typing import AbstractSet
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from unicodedata import normalize
|
||||
|
||||
import _pytest._code
|
||||
from _pytest import outcomes
|
||||
from _pytest._io.saferepr import _pformat_dispatch
|
||||
from _pytest._io.saferepr import saferepr
|
||||
from _pytest._io.saferepr import saferepr_unlimited
|
||||
from _pytest.config import Config
|
||||
|
||||
# The _reprcompare attribute on the util module is used by the new assertion
|
||||
# interpretation code and assertion rewriter to detect this plugin was
|
||||
# loaded and in turn call the hooks defined here as part of the
|
||||
# DebugInterpreter.
|
||||
_reprcompare: Optional[Callable[[str, object, object], Optional[str]]] = None
|
||||
|
||||
# Works similarly as _reprcompare attribute. Is populated with the hook call
|
||||
# when pytest_runtest_setup is called.
|
||||
_assertion_pass: Optional[Callable[[int, str, str], None]] = None
|
||||
|
||||
# Config object which is assigned during pytest_runtest_protocol.
|
||||
_config: Optional[Config] = None
|
||||
|
||||
|
||||
def format_explanation(explanation: str) -> str:
|
||||
r"""Format an explanation.
|
||||
|
||||
Normally all embedded newlines are escaped, however there are
|
||||
three exceptions: \n{, \n} and \n~. The first two are intended
|
||||
cover nested explanations, see function and attribute explanations
|
||||
for examples (.visit_Call(), visit_Attribute()). The last one is
|
||||
for when one explanation needs to span multiple lines, e.g. when
|
||||
displaying diffs.
|
||||
"""
|
||||
lines = _split_explanation(explanation)
|
||||
result = _format_lines(lines)
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def _split_explanation(explanation: str) -> List[str]:
|
||||
r"""Return a list of individual lines in the explanation.
|
||||
|
||||
This will return a list of lines split on '\n{', '\n}' and '\n~'.
|
||||
Any other newlines will be escaped and appear in the line as the
|
||||
literal '\n' characters.
|
||||
"""
|
||||
raw_lines = (explanation or "").split("\n")
|
||||
lines = [raw_lines[0]]
|
||||
for values in raw_lines[1:]:
|
||||
if values and values[0] in ["{", "}", "~", ">"]:
|
||||
lines.append(values)
|
||||
else:
|
||||
lines[-1] += "\\n" + values
|
||||
return lines
|
||||
|
||||
|
||||
def _format_lines(lines: Sequence[str]) -> List[str]:
|
||||
"""Format the individual lines.
|
||||
|
||||
This will replace the '{', '}' and '~' characters of our mini formatting
|
||||
language with the proper 'where ...', 'and ...' and ' + ...' text, taking
|
||||
care of indentation along the way.
|
||||
|
||||
Return a list of formatted lines.
|
||||
"""
|
||||
result = list(lines[:1])
|
||||
stack = [0]
|
||||
stackcnt = [0]
|
||||
for line in lines[1:]:
|
||||
if line.startswith("{"):
|
||||
if stackcnt[-1]:
|
||||
s = "and "
|
||||
else:
|
||||
s = "where "
|
||||
stack.append(len(result))
|
||||
stackcnt[-1] += 1
|
||||
stackcnt.append(0)
|
||||
result.append(" +" + " " * (len(stack) - 1) + s + line[1:])
|
||||
elif line.startswith("}"):
|
||||
stack.pop()
|
||||
stackcnt.pop()
|
||||
result[stack[-1]] += line[1:]
|
||||
else:
|
||||
assert line[0] in ["~", ">"]
|
||||
stack[-1] += 1
|
||||
indent = len(stack) if line.startswith("~") else len(stack) - 1
|
||||
result.append(" " * indent + line[1:])
|
||||
assert len(stack) == 1
|
||||
return result
|
||||
|
||||
|
||||
def issequence(x: Any) -> bool:
|
||||
return isinstance(x, collections.abc.Sequence) and not isinstance(x, str)
|
||||
|
||||
|
||||
def istext(x: Any) -> bool:
|
||||
return isinstance(x, str)
|
||||
|
||||
|
||||
def isdict(x: Any) -> bool:
|
||||
return isinstance(x, dict)
|
||||
|
||||
|
||||
def isset(x: Any) -> bool:
|
||||
return isinstance(x, (set, frozenset))
|
||||
|
||||
|
||||
def isnamedtuple(obj: Any) -> bool:
|
||||
return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None
|
||||
|
||||
|
||||
def isdatacls(obj: Any) -> bool:
|
||||
return getattr(obj, "__dataclass_fields__", None) is not None
|
||||
|
||||
|
||||
def isattrs(obj: Any) -> bool:
|
||||
return getattr(obj, "__attrs_attrs__", None) is not None
|
||||
|
||||
|
||||
def isiterable(obj: Any) -> bool:
|
||||
try:
|
||||
iter(obj)
|
||||
return not istext(obj)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def has_default_eq(
|
||||
obj: object,
|
||||
) -> bool:
|
||||
"""Check if an instance of an object contains the default eq
|
||||
|
||||
First, we check if the object's __eq__ attribute has __code__,
|
||||
if so, we check the equally of the method code filename (__code__.co_filename)
|
||||
to the default one generated by the dataclass and attr module
|
||||
for dataclasses the default co_filename is <string>, for attrs class, the __eq__ should contain "attrs eq generated"
|
||||
"""
|
||||
# inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68
|
||||
if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"):
|
||||
code_filename = obj.__eq__.__code__.co_filename
|
||||
|
||||
if isattrs(obj):
|
||||
return "attrs generated eq" in code_filename
|
||||
|
||||
return code_filename == "<string>" # data class
|
||||
return True
|
||||
|
||||
|
||||
def assertrepr_compare(
|
||||
config, op: str, left: Any, right: Any, use_ascii: bool = False
|
||||
) -> Optional[List[str]]:
|
||||
"""Return specialised explanations for some operators/operands."""
|
||||
verbose = config.getoption("verbose")
|
||||
|
||||
# Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier.
|
||||
# See issue #3246.
|
||||
use_ascii = (
|
||||
isinstance(left, str)
|
||||
and isinstance(right, str)
|
||||
and normalize("NFD", left) == normalize("NFD", right)
|
||||
)
|
||||
|
||||
if verbose > 1:
|
||||
left_repr = saferepr_unlimited(left, use_ascii=use_ascii)
|
||||
right_repr = saferepr_unlimited(right, use_ascii=use_ascii)
|
||||
else:
|
||||
# XXX: "15 chars indentation" is wrong
|
||||
# ("E AssertionError: assert "); should use term width.
|
||||
maxsize = (
|
||||
80 - 15 - len(op) - 2
|
||||
) // 2 # 15 chars indentation, 1 space around op
|
||||
|
||||
left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii)
|
||||
right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii)
|
||||
|
||||
summary = f"{left_repr} {op} {right_repr}"
|
||||
|
||||
explanation = None
|
||||
try:
|
||||
if op == "==":
|
||||
explanation = _compare_eq_any(left, right, verbose)
|
||||
elif op == "not in":
|
||||
if istext(left) and istext(right):
|
||||
explanation = _notin_text(left, right, verbose)
|
||||
except outcomes.Exit:
|
||||
raise
|
||||
except Exception:
|
||||
explanation = [
|
||||
"(pytest_assertion plugin: representation of details failed: {}.".format(
|
||||
_pytest._code.ExceptionInfo.from_current()._getreprcrash()
|
||||
),
|
||||
" Probably an object has a faulty __repr__.)",
|
||||
]
|
||||
|
||||
if not explanation:
|
||||
return None
|
||||
|
||||
return [summary] + explanation
|
||||
|
||||
|
||||
def _compare_eq_any(left: Any, right: Any, verbose: int = 0) -> List[str]:
|
||||
explanation = []
|
||||
if istext(left) and istext(right):
|
||||
explanation = _diff_text(left, right, verbose)
|
||||
else:
|
||||
from _pytest.python_api import ApproxBase
|
||||
|
||||
if isinstance(left, ApproxBase) or isinstance(right, ApproxBase):
|
||||
# Although the common order should be obtained == expected, this ensures both ways
|
||||
approx_side = left if isinstance(left, ApproxBase) else right
|
||||
other_side = right if isinstance(left, ApproxBase) else left
|
||||
|
||||
explanation = approx_side._repr_compare(other_side)
|
||||
elif type(left) == type(right) and (
|
||||
isdatacls(left) or isattrs(left) or isnamedtuple(left)
|
||||
):
|
||||
# Note: unlike dataclasses/attrs, namedtuples compare only the
|
||||
# field values, not the type or field names. But this branch
|
||||
# intentionally only handles the same-type case, which was often
|
||||
# used in older code bases before dataclasses/attrs were available.
|
||||
explanation = _compare_eq_cls(left, right, verbose)
|
||||
elif issequence(left) and issequence(right):
|
||||
explanation = _compare_eq_sequence(left, right, verbose)
|
||||
elif isset(left) and isset(right):
|
||||
explanation = _compare_eq_set(left, right, verbose)
|
||||
elif isdict(left) and isdict(right):
|
||||
explanation = _compare_eq_dict(left, right, verbose)
|
||||
|
||||
if isiterable(left) and isiterable(right):
|
||||
expl = _compare_eq_iterable(left, right, verbose)
|
||||
explanation.extend(expl)
|
||||
|
||||
return explanation
|
||||
|
||||
|
||||
def _diff_text(left: str, right: str, verbose: int = 0) -> List[str]:
|
||||
"""Return the explanation for the diff between text.
|
||||
|
||||
Unless --verbose is used this will skip leading and trailing
|
||||
characters which are identical to keep the diff minimal.
|
||||
"""
|
||||
from difflib import ndiff
|
||||
|
||||
explanation: List[str] = []
|
||||
|
||||
if verbose < 1:
|
||||
i = 0 # just in case left or right has zero length
|
||||
for i in range(min(len(left), len(right))):
|
||||
if left[i] != right[i]:
|
||||
break
|
||||
if i > 42:
|
||||
i -= 10 # Provide some context
|
||||
explanation = [
|
||||
"Skipping %s identical leading characters in diff, use -v to show" % i
|
||||
]
|
||||
left = left[i:]
|
||||
right = right[i:]
|
||||
if len(left) == len(right):
|
||||
for i in range(len(left)):
|
||||
if left[-i] != right[-i]:
|
||||
break
|
||||
if i > 42:
|
||||
i -= 10 # Provide some context
|
||||
explanation += [
|
||||
"Skipping {} identical trailing "
|
||||
"characters in diff, use -v to show".format(i)
|
||||
]
|
||||
left = left[:-i]
|
||||
right = right[:-i]
|
||||
keepends = True
|
||||
if left.isspace() or right.isspace():
|
||||
left = repr(str(left))
|
||||
right = repr(str(right))
|
||||
explanation += ["Strings contain only whitespace, escaping them using repr()"]
|
||||
# "right" is the expected base against which we compare "left",
|
||||
# see https://github.com/pytest-dev/pytest/issues/3333
|
||||
explanation += [
|
||||
line.strip("\n")
|
||||
for line in ndiff(right.splitlines(keepends), left.splitlines(keepends))
|
||||
]
|
||||
return explanation
|
||||
|
||||
|
||||
def _surrounding_parens_on_own_lines(lines: List[str]) -> None:
|
||||
"""Move opening/closing parenthesis/bracket to own lines."""
|
||||
opening = lines[0][:1]
|
||||
if opening in ["(", "[", "{"]:
|
||||
lines[0] = " " + lines[0][1:]
|
||||
lines[:] = [opening] + lines
|
||||
closing = lines[-1][-1:]
|
||||
if closing in [")", "]", "}"]:
|
||||
lines[-1] = lines[-1][:-1] + ","
|
||||
lines[:] = lines + [closing]
|
||||
|
||||
|
||||
def _compare_eq_iterable(
|
||||
left: Iterable[Any], right: Iterable[Any], verbose: int = 0
|
||||
) -> List[str]:
|
||||
if verbose <= 0 and not running_on_ci():
|
||||
return ["Use -v to get more diff"]
|
||||
# dynamic import to speedup pytest
|
||||
import difflib
|
||||
|
||||
left_formatting = pprint.pformat(left).splitlines()
|
||||
right_formatting = pprint.pformat(right).splitlines()
|
||||
|
||||
# Re-format for different output lengths.
|
||||
lines_left = len(left_formatting)
|
||||
lines_right = len(right_formatting)
|
||||
if lines_left != lines_right:
|
||||
left_formatting = _pformat_dispatch(left).splitlines()
|
||||
right_formatting = _pformat_dispatch(right).splitlines()
|
||||
|
||||
if lines_left > 1 or lines_right > 1:
|
||||
_surrounding_parens_on_own_lines(left_formatting)
|
||||
_surrounding_parens_on_own_lines(right_formatting)
|
||||
|
||||
explanation = ["Full diff:"]
|
||||
# "right" is the expected base against which we compare "left",
|
||||
# see https://github.com/pytest-dev/pytest/issues/3333
|
||||
explanation.extend(
|
||||
line.rstrip() for line in difflib.ndiff(right_formatting, left_formatting)
|
||||
)
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_sequence(
|
||||
left: Sequence[Any], right: Sequence[Any], verbose: int = 0
|
||||
) -> List[str]:
|
||||
comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes)
|
||||
explanation: List[str] = []
|
||||
len_left = len(left)
|
||||
len_right = len(right)
|
||||
for i in range(min(len_left, len_right)):
|
||||
if left[i] != right[i]:
|
||||
if comparing_bytes:
|
||||
# when comparing bytes, we want to see their ascii representation
|
||||
# instead of their numeric values (#5260)
|
||||
# using a slice gives us the ascii representation:
|
||||
# >>> s = b'foo'
|
||||
# >>> s[0]
|
||||
# 102
|
||||
# >>> s[0:1]
|
||||
# b'f'
|
||||
left_value = left[i : i + 1]
|
||||
right_value = right[i : i + 1]
|
||||
else:
|
||||
left_value = left[i]
|
||||
right_value = right[i]
|
||||
|
||||
explanation += [f"At index {i} diff: {left_value!r} != {right_value!r}"]
|
||||
break
|
||||
|
||||
if comparing_bytes:
|
||||
# when comparing bytes, it doesn't help to show the "sides contain one or more
|
||||
# items" longer explanation, so skip it
|
||||
|
||||
return explanation
|
||||
|
||||
len_diff = len_left - len_right
|
||||
if len_diff:
|
||||
if len_diff > 0:
|
||||
dir_with_more = "Left"
|
||||
extra = saferepr(left[len_right])
|
||||
else:
|
||||
len_diff = 0 - len_diff
|
||||
dir_with_more = "Right"
|
||||
extra = saferepr(right[len_left])
|
||||
|
||||
if len_diff == 1:
|
||||
explanation += [f"{dir_with_more} contains one more item: {extra}"]
|
||||
else:
|
||||
explanation += [
|
||||
"%s contains %d more items, first extra item: %s"
|
||||
% (dir_with_more, len_diff, extra)
|
||||
]
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_set(
|
||||
left: AbstractSet[Any], right: AbstractSet[Any], verbose: int = 0
|
||||
) -> List[str]:
|
||||
explanation = []
|
||||
diff_left = left - right
|
||||
diff_right = right - left
|
||||
if diff_left:
|
||||
explanation.append("Extra items in the left set:")
|
||||
for item in diff_left:
|
||||
explanation.append(saferepr(item))
|
||||
if diff_right:
|
||||
explanation.append("Extra items in the right set:")
|
||||
for item in diff_right:
|
||||
explanation.append(saferepr(item))
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_dict(
|
||||
left: Mapping[Any, Any], right: Mapping[Any, Any], verbose: int = 0
|
||||
) -> List[str]:
|
||||
explanation: List[str] = []
|
||||
set_left = set(left)
|
||||
set_right = set(right)
|
||||
common = set_left.intersection(set_right)
|
||||
same = {k: left[k] for k in common if left[k] == right[k]}
|
||||
if same and verbose < 2:
|
||||
explanation += ["Omitting %s identical items, use -vv to show" % len(same)]
|
||||
elif same:
|
||||
explanation += ["Common items:"]
|
||||
explanation += pprint.pformat(same).splitlines()
|
||||
diff = {k for k in common if left[k] != right[k]}
|
||||
if diff:
|
||||
explanation += ["Differing items:"]
|
||||
for k in diff:
|
||||
explanation += [saferepr({k: left[k]}) + " != " + saferepr({k: right[k]})]
|
||||
extra_left = set_left - set_right
|
||||
len_extra_left = len(extra_left)
|
||||
if len_extra_left:
|
||||
explanation.append(
|
||||
"Left contains %d more item%s:"
|
||||
% (len_extra_left, "" if len_extra_left == 1 else "s")
|
||||
)
|
||||
explanation.extend(
|
||||
pprint.pformat({k: left[k] for k in extra_left}).splitlines()
|
||||
)
|
||||
extra_right = set_right - set_left
|
||||
len_extra_right = len(extra_right)
|
||||
if len_extra_right:
|
||||
explanation.append(
|
||||
"Right contains %d more item%s:"
|
||||
% (len_extra_right, "" if len_extra_right == 1 else "s")
|
||||
)
|
||||
explanation.extend(
|
||||
pprint.pformat({k: right[k] for k in extra_right}).splitlines()
|
||||
)
|
||||
return explanation
|
||||
|
||||
|
||||
def _compare_eq_cls(left: Any, right: Any, verbose: int) -> List[str]:
|
||||
if not has_default_eq(left):
|
||||
return []
|
||||
if isdatacls(left):
|
||||
import dataclasses
|
||||
|
||||
all_fields = dataclasses.fields(left)
|
||||
fields_to_check = [info.name for info in all_fields if info.compare]
|
||||
elif isattrs(left):
|
||||
all_fields = left.__attrs_attrs__
|
||||
fields_to_check = [field.name for field in all_fields if getattr(field, "eq")]
|
||||
elif isnamedtuple(left):
|
||||
fields_to_check = left._fields
|
||||
else:
|
||||
assert False
|
||||
|
||||
indent = " "
|
||||
same = []
|
||||
diff = []
|
||||
for field in fields_to_check:
|
||||
if getattr(left, field) == getattr(right, field):
|
||||
same.append(field)
|
||||
else:
|
||||
diff.append(field)
|
||||
|
||||
explanation = []
|
||||
if same or diff:
|
||||
explanation += [""]
|
||||
if same and verbose < 2:
|
||||
explanation.append("Omitting %s identical items, use -vv to show" % len(same))
|
||||
elif same:
|
||||
explanation += ["Matching attributes:"]
|
||||
explanation += pprint.pformat(same).splitlines()
|
||||
if diff:
|
||||
explanation += ["Differing attributes:"]
|
||||
explanation += pprint.pformat(diff).splitlines()
|
||||
for field in diff:
|
||||
field_left = getattr(left, field)
|
||||
field_right = getattr(right, field)
|
||||
explanation += [
|
||||
"",
|
||||
"Drill down into differing attribute %s:" % field,
|
||||
("%s%s: %r != %r") % (indent, field, field_left, field_right),
|
||||
]
|
||||
explanation += [
|
||||
indent + line
|
||||
for line in _compare_eq_any(field_left, field_right, verbose)
|
||||
]
|
||||
return explanation
|
||||
|
||||
|
||||
def _notin_text(term: str, text: str, verbose: int = 0) -> List[str]:
|
||||
index = text.find(term)
|
||||
head = text[:index]
|
||||
tail = text[index + len(term) :]
|
||||
correct_text = head + tail
|
||||
diff = _diff_text(text, correct_text, verbose)
|
||||
newdiff = ["%s is contained here:" % saferepr(term, maxsize=42)]
|
||||
for line in diff:
|
||||
if line.startswith("Skipping"):
|
||||
continue
|
||||
if line.startswith("- "):
|
||||
continue
|
||||
if line.startswith("+ "):
|
||||
newdiff.append(" " + line[2:])
|
||||
else:
|
||||
newdiff.append(line)
|
||||
return newdiff
|
||||
|
||||
|
||||
def running_on_ci() -> bool:
|
||||
"""Check if we're currently running on a CI system."""
|
||||
env_vars = ["CI", "BUILD_NUMBER"]
|
||||
return any(var in os.environ for var in env_vars)
|
||||
602
venv/lib/python3.12/site-packages/_pytest/cacheprovider.py
Normal file
602
venv/lib/python3.12/site-packages/_pytest/cacheprovider.py
Normal file
@@ -0,0 +1,602 @@
|
||||
"""Implementation of the cache provider."""
|
||||
# This plugin was not named "cache" to avoid conflicts with the external
|
||||
# pytest-cache version.
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
from .pathlib import resolve_from_str
|
||||
from .pathlib import rm_rf
|
||||
from .reports import CollectReport
|
||||
from _pytest import nodes
|
||||
from _pytest._io import TerminalWriter
|
||||
from _pytest.compat import final
|
||||
from _pytest.config import Config
|
||||
from _pytest.config import ExitCode
|
||||
from _pytest.config import hookimpl
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.deprecated import check_ispytest
|
||||
from _pytest.fixtures import fixture
|
||||
from _pytest.fixtures import FixtureRequest
|
||||
from _pytest.main import Session
|
||||
from _pytest.nodes import File
|
||||
from _pytest.python import Package
|
||||
from _pytest.reports import TestReport
|
||||
|
||||
README_CONTENT = """\
|
||||
# pytest cache directory #
|
||||
|
||||
This directory contains data from the pytest's cache plugin,
|
||||
which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
|
||||
|
||||
**Do not** commit this to version control.
|
||||
|
||||
See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information.
|
||||
"""
|
||||
|
||||
CACHEDIR_TAG_CONTENT = b"""\
|
||||
Signature: 8a477f597d28d172789f06886806bc55
|
||||
# This file is a cache directory tag created by pytest.
|
||||
# For information about cache directory tags, see:
|
||||
# https://bford.info/cachedir/spec.html
|
||||
"""
|
||||
|
||||
|
||||
@final
|
||||
@dataclasses.dataclass
|
||||
class Cache:
|
||||
"""Instance of the `cache` fixture."""
|
||||
|
||||
_cachedir: Path = dataclasses.field(repr=False)
|
||||
_config: Config = dataclasses.field(repr=False)
|
||||
|
||||
# Sub-directory under cache-dir for directories created by `mkdir()`.
|
||||
_CACHE_PREFIX_DIRS = "d"
|
||||
|
||||
# Sub-directory under cache-dir for values created by `set()`.
|
||||
_CACHE_PREFIX_VALUES = "v"
|
||||
|
||||
def __init__(
|
||||
self, cachedir: Path, config: Config, *, _ispytest: bool = False
|
||||
) -> None:
|
||||
check_ispytest(_ispytest)
|
||||
self._cachedir = cachedir
|
||||
self._config = config
|
||||
|
||||
@classmethod
|
||||
def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache":
|
||||
"""Create the Cache instance for a Config.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
check_ispytest(_ispytest)
|
||||
cachedir = cls.cache_dir_from_config(config, _ispytest=True)
|
||||
if config.getoption("cacheclear") and cachedir.is_dir():
|
||||
cls.clear_cache(cachedir, _ispytest=True)
|
||||
return cls(cachedir, config, _ispytest=True)
|
||||
|
||||
@classmethod
|
||||
def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None:
|
||||
"""Clear the sub-directories used to hold cached directories and values.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
check_ispytest(_ispytest)
|
||||
for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES):
|
||||
d = cachedir / prefix
|
||||
if d.is_dir():
|
||||
rm_rf(d)
|
||||
|
||||
@staticmethod
|
||||
def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path:
|
||||
"""Get the path to the cache directory for a Config.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
check_ispytest(_ispytest)
|
||||
return resolve_from_str(config.getini("cache_dir"), config.rootpath)
|
||||
|
||||
def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:
|
||||
"""Issue a cache warning.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
check_ispytest(_ispytest)
|
||||
import warnings
|
||||
from _pytest.warning_types import PytestCacheWarning
|
||||
|
||||
warnings.warn(
|
||||
PytestCacheWarning(fmt.format(**args) if args else fmt),
|
||||
self._config.hook,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
def mkdir(self, name: str) -> Path:
|
||||
"""Return a directory path object with the given name.
|
||||
|
||||
If the directory does not yet exist, it will be created. You can use
|
||||
it to manage files to e.g. store/retrieve database dumps across test
|
||||
sessions.
|
||||
|
||||
.. versionadded:: 7.0
|
||||
|
||||
:param name:
|
||||
Must be a string not containing a ``/`` separator.
|
||||
Make sure the name contains your plugin or application
|
||||
identifiers to prevent clashes with other cache users.
|
||||
"""
|
||||
path = Path(name)
|
||||
if len(path.parts) > 1:
|
||||
raise ValueError("name is not allowed to contain path separators")
|
||||
res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)
|
||||
res.mkdir(exist_ok=True, parents=True)
|
||||
return res
|
||||
|
||||
def _getvaluepath(self, key: str) -> Path:
|
||||
return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key))
|
||||
|
||||
def get(self, key: str, default):
|
||||
"""Return the cached value for the given key.
|
||||
|
||||
If no value was yet cached or the value cannot be read, the specified
|
||||
default is returned.
|
||||
|
||||
:param key:
|
||||
Must be a ``/`` separated value. Usually the first
|
||||
name is the name of your plugin or your application.
|
||||
:param default:
|
||||
The value to return in case of a cache-miss or invalid cache value.
|
||||
"""
|
||||
path = self._getvaluepath(key)
|
||||
try:
|
||||
with path.open("r", encoding="UTF-8") as f:
|
||||
return json.load(f)
|
||||
except (ValueError, OSError):
|
||||
return default
|
||||
|
||||
def set(self, key: str, value: object) -> None:
|
||||
"""Save value for the given key.
|
||||
|
||||
:param key:
|
||||
Must be a ``/`` separated value. Usually the first
|
||||
name is the name of your plugin or your application.
|
||||
:param value:
|
||||
Must be of any combination of basic python types,
|
||||
including nested types like lists of dictionaries.
|
||||
"""
|
||||
path = self._getvaluepath(key)
|
||||
try:
|
||||
if path.parent.is_dir():
|
||||
cache_dir_exists_already = True
|
||||
else:
|
||||
cache_dir_exists_already = self._cachedir.exists()
|
||||
path.parent.mkdir(exist_ok=True, parents=True)
|
||||
except OSError as exc:
|
||||
self.warn(
|
||||
f"could not create cache path {path}: {exc}",
|
||||
_ispytest=True,
|
||||
)
|
||||
return
|
||||
if not cache_dir_exists_already:
|
||||
self._ensure_supporting_files()
|
||||
data = json.dumps(value, ensure_ascii=False, indent=2)
|
||||
try:
|
||||
f = path.open("w", encoding="UTF-8")
|
||||
except OSError as exc:
|
||||
self.warn(
|
||||
f"cache could not write path {path}: {exc}",
|
||||
_ispytest=True,
|
||||
)
|
||||
else:
|
||||
with f:
|
||||
f.write(data)
|
||||
|
||||
def _ensure_supporting_files(self) -> None:
|
||||
"""Create supporting files in the cache dir that are not really part of the cache."""
|
||||
readme_path = self._cachedir / "README.md"
|
||||
readme_path.write_text(README_CONTENT, encoding="UTF-8")
|
||||
|
||||
gitignore_path = self._cachedir.joinpath(".gitignore")
|
||||
msg = "# Created by pytest automatically.\n*\n"
|
||||
gitignore_path.write_text(msg, encoding="UTF-8")
|
||||
|
||||
cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
|
||||
cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
|
||||
|
||||
|
||||
class LFPluginCollWrapper:
|
||||
def __init__(self, lfplugin: "LFPlugin") -> None:
|
||||
self.lfplugin = lfplugin
|
||||
self._collected_at_least_one_failure = False
|
||||
|
||||
@hookimpl(hookwrapper=True)
|
||||
def pytest_make_collect_report(self, collector: nodes.Collector):
|
||||
if isinstance(collector, (Session, Package)):
|
||||
out = yield
|
||||
res: CollectReport = out.get_result()
|
||||
|
||||
# Sort any lf-paths to the beginning.
|
||||
lf_paths = self.lfplugin._last_failed_paths
|
||||
|
||||
# Use stable sort to priorize last failed.
|
||||
def sort_key(node: Union[nodes.Item, nodes.Collector]) -> bool:
|
||||
# Package.path is the __init__.py file, we need the directory.
|
||||
if isinstance(node, Package):
|
||||
path = node.path.parent
|
||||
else:
|
||||
path = node.path
|
||||
return path in lf_paths
|
||||
|
||||
res.result = sorted(
|
||||
res.result,
|
||||
key=sort_key,
|
||||
reverse=True,
|
||||
)
|
||||
return
|
||||
|
||||
elif isinstance(collector, File):
|
||||
if collector.path in self.lfplugin._last_failed_paths:
|
||||
out = yield
|
||||
res = out.get_result()
|
||||
result = res.result
|
||||
lastfailed = self.lfplugin.lastfailed
|
||||
|
||||
# Only filter with known failures.
|
||||
if not self._collected_at_least_one_failure:
|
||||
if not any(x.nodeid in lastfailed for x in result):
|
||||
return
|
||||
self.lfplugin.config.pluginmanager.register(
|
||||
LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip"
|
||||
)
|
||||
self._collected_at_least_one_failure = True
|
||||
|
||||
session = collector.session
|
||||
result[:] = [
|
||||
x
|
||||
for x in result
|
||||
if x.nodeid in lastfailed
|
||||
# Include any passed arguments (not trivial to filter).
|
||||
or session.isinitpath(x.path)
|
||||
# Keep all sub-collectors.
|
||||
or isinstance(x, nodes.Collector)
|
||||
]
|
||||
return
|
||||
yield
|
||||
|
||||
|
||||
class LFPluginCollSkipfiles:
|
||||
def __init__(self, lfplugin: "LFPlugin") -> None:
|
||||
self.lfplugin = lfplugin
|
||||
|
||||
@hookimpl
|
||||
def pytest_make_collect_report(
|
||||
self, collector: nodes.Collector
|
||||
) -> Optional[CollectReport]:
|
||||
# Packages are Files, but we only want to skip test-bearing Files,
|
||||
# so don't filter Packages.
|
||||
if isinstance(collector, File) and not isinstance(collector, Package):
|
||||
if collector.path not in self.lfplugin._last_failed_paths:
|
||||
self.lfplugin._skipped_files += 1
|
||||
|
||||
return CollectReport(
|
||||
collector.nodeid, "passed", longrepr=None, result=[]
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
class LFPlugin:
|
||||
"""Plugin which implements the --lf (run last-failing) option."""
|
||||
|
||||
def __init__(self, config: Config) -> None:
|
||||
self.config = config
|
||||
active_keys = "lf", "failedfirst"
|
||||
self.active = any(config.getoption(key) for key in active_keys)
|
||||
assert config.cache
|
||||
self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
|
||||
self._previously_failed_count: Optional[int] = None
|
||||
self._report_status: Optional[str] = None
|
||||
self._skipped_files = 0 # count skipped files during collection due to --lf
|
||||
|
||||
if config.getoption("lf"):
|
||||
self._last_failed_paths = self.get_last_failed_paths()
|
||||
config.pluginmanager.register(
|
||||
LFPluginCollWrapper(self), "lfplugin-collwrapper"
|
||||
)
|
||||
|
||||
def get_last_failed_paths(self) -> Set[Path]:
|
||||
"""Return a set with all Paths of the previously failed nodeids and
|
||||
their parents."""
|
||||
rootpath = self.config.rootpath
|
||||
result = set()
|
||||
for nodeid in self.lastfailed:
|
||||
path = rootpath / nodeid.split("::")[0]
|
||||
result.add(path)
|
||||
result.update(path.parents)
|
||||
return {x for x in result if x.exists()}
|
||||
|
||||
def pytest_report_collectionfinish(self) -> Optional[str]:
|
||||
if self.active and self.config.getoption("verbose") >= 0:
|
||||
return "run-last-failure: %s" % self._report_status
|
||||
return None
|
||||
|
||||
def pytest_runtest_logreport(self, report: TestReport) -> None:
|
||||
if (report.when == "call" and report.passed) or report.skipped:
|
||||
self.lastfailed.pop(report.nodeid, None)
|
||||
elif report.failed:
|
||||
self.lastfailed[report.nodeid] = True
|
||||
|
||||
def pytest_collectreport(self, report: CollectReport) -> None:
|
||||
passed = report.outcome in ("passed", "skipped")
|
||||
if passed:
|
||||
if report.nodeid in self.lastfailed:
|
||||
self.lastfailed.pop(report.nodeid)
|
||||
self.lastfailed.update((item.nodeid, True) for item in report.result)
|
||||
else:
|
||||
self.lastfailed[report.nodeid] = True
|
||||
|
||||
@hookimpl(hookwrapper=True, tryfirst=True)
|
||||
def pytest_collection_modifyitems(
|
||||
self, config: Config, items: List[nodes.Item]
|
||||
) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
if self.lastfailed:
|
||||
previously_failed = []
|
||||
previously_passed = []
|
||||
for item in items:
|
||||
if item.nodeid in self.lastfailed:
|
||||
previously_failed.append(item)
|
||||
else:
|
||||
previously_passed.append(item)
|
||||
self._previously_failed_count = len(previously_failed)
|
||||
|
||||
if not previously_failed:
|
||||
# Running a subset of all tests with recorded failures
|
||||
# only outside of it.
|
||||
self._report_status = "%d known failures not in selected tests" % (
|
||||
len(self.lastfailed),
|
||||
)
|
||||
else:
|
||||
if self.config.getoption("lf"):
|
||||
items[:] = previously_failed
|
||||
config.hook.pytest_deselected(items=previously_passed)
|
||||
else: # --failedfirst
|
||||
items[:] = previously_failed + previously_passed
|
||||
|
||||
noun = "failure" if self._previously_failed_count == 1 else "failures"
|
||||
suffix = " first" if self.config.getoption("failedfirst") else ""
|
||||
self._report_status = "rerun previous {count} {noun}{suffix}".format(
|
||||
count=self._previously_failed_count, suffix=suffix, noun=noun
|
||||
)
|
||||
|
||||
if self._skipped_files > 0:
|
||||
files_noun = "file" if self._skipped_files == 1 else "files"
|
||||
self._report_status += " (skipped {files} {files_noun})".format(
|
||||
files=self._skipped_files, files_noun=files_noun
|
||||
)
|
||||
else:
|
||||
self._report_status = "no previously failed tests, "
|
||||
if self.config.getoption("last_failed_no_failures") == "none":
|
||||
self._report_status += "deselecting all items."
|
||||
config.hook.pytest_deselected(items=items[:])
|
||||
items[:] = []
|
||||
else:
|
||||
self._report_status += "not deselecting items."
|
||||
|
||||
def pytest_sessionfinish(self, session: Session) -> None:
|
||||
config = self.config
|
||||
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
|
||||
return
|
||||
|
||||
assert config.cache is not None
|
||||
saved_lastfailed = config.cache.get("cache/lastfailed", {})
|
||||
if saved_lastfailed != self.lastfailed:
|
||||
config.cache.set("cache/lastfailed", self.lastfailed)
|
||||
|
||||
|
||||
class NFPlugin:
|
||||
"""Plugin which implements the --nf (run new-first) option."""
|
||||
|
||||
def __init__(self, config: Config) -> None:
|
||||
self.config = config
|
||||
self.active = config.option.newfirst
|
||||
assert config.cache is not None
|
||||
self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
|
||||
|
||||
@hookimpl(hookwrapper=True, tryfirst=True)
|
||||
def pytest_collection_modifyitems(
|
||||
self, items: List[nodes.Item]
|
||||
) -> Generator[None, None, None]:
|
||||
yield
|
||||
|
||||
if self.active:
|
||||
new_items: Dict[str, nodes.Item] = {}
|
||||
other_items: Dict[str, nodes.Item] = {}
|
||||
for item in items:
|
||||
if item.nodeid not in self.cached_nodeids:
|
||||
new_items[item.nodeid] = item
|
||||
else:
|
||||
other_items[item.nodeid] = item
|
||||
|
||||
items[:] = self._get_increasing_order(
|
||||
new_items.values()
|
||||
) + self._get_increasing_order(other_items.values())
|
||||
self.cached_nodeids.update(new_items)
|
||||
else:
|
||||
self.cached_nodeids.update(item.nodeid for item in items)
|
||||
|
||||
def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:
|
||||
return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) # type: ignore[no-any-return]
|
||||
|
||||
def pytest_sessionfinish(self) -> None:
|
||||
config = self.config
|
||||
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
|
||||
return
|
||||
|
||||
if config.getoption("collectonly"):
|
||||
return
|
||||
|
||||
assert config.cache is not None
|
||||
config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
group = parser.getgroup("general")
|
||||
group.addoption(
|
||||
"--lf",
|
||||
"--last-failed",
|
||||
action="store_true",
|
||||
dest="lf",
|
||||
help="Rerun only the tests that failed "
|
||||
"at the last run (or all if none failed)",
|
||||
)
|
||||
group.addoption(
|
||||
"--ff",
|
||||
"--failed-first",
|
||||
action="store_true",
|
||||
dest="failedfirst",
|
||||
help="Run all tests, but run the last failures first. "
|
||||
"This may re-order tests and thus lead to "
|
||||
"repeated fixture setup/teardown.",
|
||||
)
|
||||
group.addoption(
|
||||
"--nf",
|
||||
"--new-first",
|
||||
action="store_true",
|
||||
dest="newfirst",
|
||||
help="Run tests from new files first, then the rest of the tests "
|
||||
"sorted by file mtime",
|
||||
)
|
||||
group.addoption(
|
||||
"--cache-show",
|
||||
action="append",
|
||||
nargs="?",
|
||||
dest="cacheshow",
|
||||
help=(
|
||||
"Show cache contents, don't perform collection or tests. "
|
||||
"Optional argument: glob (default: '*')."
|
||||
),
|
||||
)
|
||||
group.addoption(
|
||||
"--cache-clear",
|
||||
action="store_true",
|
||||
dest="cacheclear",
|
||||
help="Remove all cache contents at start of test run",
|
||||
)
|
||||
cache_dir_default = ".pytest_cache"
|
||||
if "TOX_ENV_DIR" in os.environ:
|
||||
cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
|
||||
parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path")
|
||||
group.addoption(
|
||||
"--lfnf",
|
||||
"--last-failed-no-failures",
|
||||
action="store",
|
||||
dest="last_failed_no_failures",
|
||||
choices=("all", "none"),
|
||||
default="all",
|
||||
help="With ``--lf``, determines whether to execute tests when there "
|
||||
"are no previously (known) failures or when no "
|
||||
"cached ``lastfailed`` data was found. "
|
||||
"``all`` (the default) runs the full test suite again. "
|
||||
"``none`` just emits a message about no known failures and exits successfully.",
|
||||
)
|
||||
|
||||
|
||||
def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
|
||||
if config.option.cacheshow and not config.option.help:
|
||||
from _pytest.main import wrap_session
|
||||
|
||||
return wrap_session(config, cacheshow)
|
||||
return None
|
||||
|
||||
|
||||
@hookimpl(tryfirst=True)
|
||||
def pytest_configure(config: Config) -> None:
|
||||
config.cache = Cache.for_config(config, _ispytest=True)
|
||||
config.pluginmanager.register(LFPlugin(config), "lfplugin")
|
||||
config.pluginmanager.register(NFPlugin(config), "nfplugin")
|
||||
|
||||
|
||||
@fixture
|
||||
def cache(request: FixtureRequest) -> Cache:
|
||||
"""Return a cache object that can persist state between testing sessions.
|
||||
|
||||
cache.get(key, default)
|
||||
cache.set(key, value)
|
||||
|
||||
Keys must be ``/`` separated strings, where the first part is usually the
|
||||
name of your plugin or application to avoid clashes with other cache users.
|
||||
|
||||
Values can be any object handled by the json stdlib module.
|
||||
"""
|
||||
assert request.config.cache is not None
|
||||
return request.config.cache
|
||||
|
||||
|
||||
def pytest_report_header(config: Config) -> Optional[str]:
|
||||
"""Display cachedir with --cache-show and if non-default."""
|
||||
if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
|
||||
assert config.cache is not None
|
||||
cachedir = config.cache._cachedir
|
||||
# TODO: evaluate generating upward relative paths
|
||||
# starting with .., ../.. if sensible
|
||||
|
||||
try:
|
||||
displaypath = cachedir.relative_to(config.rootpath)
|
||||
except ValueError:
|
||||
displaypath = cachedir
|
||||
return f"cachedir: {displaypath}"
|
||||
return None
|
||||
|
||||
|
||||
def cacheshow(config: Config, session: Session) -> int:
|
||||
from pprint import pformat
|
||||
|
||||
assert config.cache is not None
|
||||
|
||||
tw = TerminalWriter()
|
||||
tw.line("cachedir: " + str(config.cache._cachedir))
|
||||
if not config.cache._cachedir.is_dir():
|
||||
tw.line("cache is empty")
|
||||
return 0
|
||||
|
||||
glob = config.option.cacheshow[0]
|
||||
if glob is None:
|
||||
glob = "*"
|
||||
|
||||
dummy = object()
|
||||
basedir = config.cache._cachedir
|
||||
vdir = basedir / Cache._CACHE_PREFIX_VALUES
|
||||
tw.sep("-", "cache values for %r" % glob)
|
||||
for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):
|
||||
key = str(valpath.relative_to(vdir))
|
||||
val = config.cache.get(key, dummy)
|
||||
if val is dummy:
|
||||
tw.line("%s contains unreadable content, will be ignored" % key)
|
||||
else:
|
||||
tw.line("%s contains:" % key)
|
||||
for line in pformat(val).splitlines():
|
||||
tw.line(" " + line)
|
||||
|
||||
ddir = basedir / Cache._CACHE_PREFIX_DIRS
|
||||
if ddir.is_dir():
|
||||
contents = sorted(ddir.rglob(glob))
|
||||
tw.sep("-", "cache directories for %r" % glob)
|
||||
for p in contents:
|
||||
# if p.is_dir():
|
||||
# print("%s/" % p.relative_to(basedir))
|
||||
if p.is_file():
|
||||
key = str(p.relative_to(basedir))
|
||||
tw.line(f"{key} is a file of length {p.stat().st_size:d}")
|
||||
return 0
|
||||
1082
venv/lib/python3.12/site-packages/_pytest/capture.py
Normal file
1082
venv/lib/python3.12/site-packages/_pytest/capture.py
Normal file
File diff suppressed because it is too large
Load Diff
435
venv/lib/python3.12/site-packages/_pytest/compat.py
Normal file
435
venv/lib/python3.12/site-packages/_pytest/compat.py
Normal file
@@ -0,0 +1,435 @@
|
||||
"""Python version compatibility code."""
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import enum
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
from inspect import Parameter
|
||||
from inspect import signature
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Generic
|
||||
from typing import NoReturn
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
import py
|
||||
|
||||
# fmt: off
|
||||
# Workaround for https://github.com/sphinx-doc/sphinx/issues/10351.
|
||||
# If `overload` is imported from `compat` instead of from `typing`,
|
||||
# Sphinx doesn't recognize it as `overload` and the API docs for
|
||||
# overloaded functions look good again. But type checkers handle
|
||||
# it fine.
|
||||
# fmt: on
|
||||
if True:
|
||||
from typing import overload as overload
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Final
|
||||
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_S = TypeVar("_S")
|
||||
|
||||
#: constant to prepare valuing pylib path replacements/lazy proxies later on
|
||||
# intended for removal in pytest 8.0 or 9.0
|
||||
|
||||
# fmt: off
|
||||
# intentional space to create a fake difference for the verification
|
||||
LEGACY_PATH = py.path. local
|
||||
# fmt: on
|
||||
|
||||
|
||||
def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH:
|
||||
"""Internal wrapper to prepare lazy proxies for legacy_path instances"""
|
||||
return LEGACY_PATH(path)
|
||||
|
||||
|
||||
# fmt: off
|
||||
# Singleton type for NOTSET, as described in:
|
||||
# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions
|
||||
class NotSetType(enum.Enum):
|
||||
token = 0
|
||||
NOTSET: Final = NotSetType.token # noqa: E305
|
||||
# fmt: on
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import importlib.metadata
|
||||
|
||||
importlib_metadata = importlib.metadata
|
||||
else:
|
||||
import importlib_metadata as importlib_metadata # noqa: F401
|
||||
|
||||
|
||||
def _format_args(func: Callable[..., Any]) -> str:
|
||||
return str(signature(func))
|
||||
|
||||
|
||||
def is_generator(func: object) -> bool:
|
||||
genfunc = inspect.isgeneratorfunction(func)
|
||||
return genfunc and not iscoroutinefunction(func)
|
||||
|
||||
|
||||
def iscoroutinefunction(func: object) -> bool:
|
||||
"""Return True if func is a coroutine function (a function defined with async
|
||||
def syntax, and doesn't contain yield), or a function decorated with
|
||||
@asyncio.coroutine.
|
||||
|
||||
Note: copied and modified from Python 3.5's builtin couroutines.py to avoid
|
||||
importing asyncio directly, which in turns also initializes the "logging"
|
||||
module as a side-effect (see issue #8).
|
||||
"""
|
||||
return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False)
|
||||
|
||||
|
||||
def is_async_function(func: object) -> bool:
|
||||
"""Return True if the given function seems to be an async function or
|
||||
an async generator."""
|
||||
return iscoroutinefunction(func) or inspect.isasyncgenfunction(func)
|
||||
|
||||
|
||||
def getlocation(function, curdir: str | None = None) -> str:
|
||||
function = get_real_func(function)
|
||||
fn = Path(inspect.getfile(function))
|
||||
lineno = function.__code__.co_firstlineno
|
||||
if curdir is not None:
|
||||
try:
|
||||
relfn = fn.relative_to(curdir)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
return "%s:%d" % (relfn, lineno + 1)
|
||||
return "%s:%d" % (fn, lineno + 1)
|
||||
|
||||
|
||||
def num_mock_patch_args(function) -> int:
|
||||
"""Return number of arguments used up by mock arguments (if any)."""
|
||||
patchings = getattr(function, "patchings", None)
|
||||
if not patchings:
|
||||
return 0
|
||||
|
||||
mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object())
|
||||
ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object())
|
||||
|
||||
return len(
|
||||
[
|
||||
p
|
||||
for p in patchings
|
||||
if not p.attribute_name
|
||||
and (p.new is mock_sentinel or p.new is ut_mock_sentinel)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def getfuncargnames(
|
||||
function: Callable[..., Any],
|
||||
*,
|
||||
name: str = "",
|
||||
is_method: bool = False,
|
||||
cls: type | None = None,
|
||||
) -> tuple[str, ...]:
|
||||
"""Return the names of a function's mandatory arguments.
|
||||
|
||||
Should return the names of all function arguments that:
|
||||
* Aren't bound to an instance or type as in instance or class methods.
|
||||
* Don't have default values.
|
||||
* Aren't bound with functools.partial.
|
||||
* Aren't replaced with mocks.
|
||||
|
||||
The is_method and cls arguments indicate that the function should
|
||||
be treated as a bound method even though it's not unless, only in
|
||||
the case of cls, the function is a static method.
|
||||
|
||||
The name parameter should be the original name in which the function was collected.
|
||||
"""
|
||||
# TODO(RonnyPfannschmidt): This function should be refactored when we
|
||||
# revisit fixtures. The fixture mechanism should ask the node for
|
||||
# the fixture names, and not try to obtain directly from the
|
||||
# function object well after collection has occurred.
|
||||
|
||||
# The parameters attribute of a Signature object contains an
|
||||
# ordered mapping of parameter names to Parameter instances. This
|
||||
# creates a tuple of the names of the parameters that don't have
|
||||
# defaults.
|
||||
try:
|
||||
parameters = signature(function).parameters
|
||||
except (ValueError, TypeError) as e:
|
||||
from _pytest.outcomes import fail
|
||||
|
||||
fail(
|
||||
f"Could not determine arguments of {function!r}: {e}",
|
||||
pytrace=False,
|
||||
)
|
||||
|
||||
arg_names = tuple(
|
||||
p.name
|
||||
for p in parameters.values()
|
||||
if (
|
||||
p.kind is Parameter.POSITIONAL_OR_KEYWORD
|
||||
or p.kind is Parameter.KEYWORD_ONLY
|
||||
)
|
||||
and p.default is Parameter.empty
|
||||
)
|
||||
if not name:
|
||||
name = function.__name__
|
||||
|
||||
# If this function should be treated as a bound method even though
|
||||
# it's passed as an unbound method or function, remove the first
|
||||
# parameter name.
|
||||
if is_method or (
|
||||
# Not using `getattr` because we don't want to resolve the staticmethod.
|
||||
# Not using `cls.__dict__` because we want to check the entire MRO.
|
||||
cls
|
||||
and not isinstance(
|
||||
inspect.getattr_static(cls, name, default=None), staticmethod
|
||||
)
|
||||
):
|
||||
arg_names = arg_names[1:]
|
||||
# Remove any names that will be replaced with mocks.
|
||||
if hasattr(function, "__wrapped__"):
|
||||
arg_names = arg_names[num_mock_patch_args(function) :]
|
||||
return arg_names
|
||||
|
||||
|
||||
def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]:
|
||||
# Note: this code intentionally mirrors the code at the beginning of
|
||||
# getfuncargnames, to get the arguments which were excluded from its result
|
||||
# because they had default values.
|
||||
return tuple(
|
||||
p.name
|
||||
for p in signature(function).parameters.values()
|
||||
if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY)
|
||||
and p.default is not Parameter.empty
|
||||
)
|
||||
|
||||
|
||||
_non_printable_ascii_translate_table = {
|
||||
i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127)
|
||||
}
|
||||
_non_printable_ascii_translate_table.update(
|
||||
{ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"}
|
||||
)
|
||||
|
||||
|
||||
def _translate_non_printable(s: str) -> str:
|
||||
return s.translate(_non_printable_ascii_translate_table)
|
||||
|
||||
|
||||
STRING_TYPES = bytes, str
|
||||
|
||||
|
||||
def _bytes_to_ascii(val: bytes) -> str:
|
||||
return val.decode("ascii", "backslashreplace")
|
||||
|
||||
|
||||
def ascii_escaped(val: bytes | str) -> str:
|
||||
r"""If val is pure ASCII, return it as an str, otherwise, escape
|
||||
bytes objects into a sequence of escaped bytes:
|
||||
|
||||
b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6'
|
||||
|
||||
and escapes unicode objects into a sequence of escaped unicode
|
||||
ids, e.g.:
|
||||
|
||||
r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944'
|
||||
|
||||
Note:
|
||||
The obvious "v.decode('unicode-escape')" will return
|
||||
valid UTF-8 unicode if it finds them in bytes, but we
|
||||
want to return escaped bytes for any byte, even if they match
|
||||
a UTF-8 string.
|
||||
"""
|
||||
if isinstance(val, bytes):
|
||||
ret = _bytes_to_ascii(val)
|
||||
else:
|
||||
ret = val.encode("unicode_escape").decode("ascii")
|
||||
return _translate_non_printable(ret)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _PytestWrapper:
|
||||
"""Dummy wrapper around a function object for internal use only.
|
||||
|
||||
Used to correctly unwrap the underlying function object when we are
|
||||
creating fixtures, because we wrap the function object ourselves with a
|
||||
decorator to issue warnings when the fixture function is called directly.
|
||||
"""
|
||||
|
||||
obj: Any
|
||||
|
||||
|
||||
def get_real_func(obj):
|
||||
"""Get the real function object of the (possibly) wrapped object by
|
||||
functools.wraps or functools.partial."""
|
||||
start_obj = obj
|
||||
for i in range(100):
|
||||
# __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function
|
||||
# to trigger a warning if it gets called directly instead of by pytest: we don't
|
||||
# want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774)
|
||||
new_obj = getattr(obj, "__pytest_wrapped__", None)
|
||||
if isinstance(new_obj, _PytestWrapper):
|
||||
obj = new_obj.obj
|
||||
break
|
||||
new_obj = getattr(obj, "__wrapped__", None)
|
||||
if new_obj is None:
|
||||
break
|
||||
obj = new_obj
|
||||
else:
|
||||
from _pytest._io.saferepr import saferepr
|
||||
|
||||
raise ValueError(
|
||||
("could not find real function of {start}\nstopped at {current}").format(
|
||||
start=saferepr(start_obj), current=saferepr(obj)
|
||||
)
|
||||
)
|
||||
if isinstance(obj, functools.partial):
|
||||
obj = obj.func
|
||||
return obj
|
||||
|
||||
|
||||
def get_real_method(obj, holder):
|
||||
"""Attempt to obtain the real function object that might be wrapping
|
||||
``obj``, while at the same time returning a bound method to ``holder`` if
|
||||
the original object was a bound method."""
|
||||
try:
|
||||
is_method = hasattr(obj, "__func__")
|
||||
obj = get_real_func(obj)
|
||||
except Exception: # pragma: no cover
|
||||
return obj
|
||||
if is_method and hasattr(obj, "__get__") and callable(obj.__get__):
|
||||
obj = obj.__get__(holder)
|
||||
return obj
|
||||
|
||||
|
||||
def getimfunc(func):
|
||||
try:
|
||||
return func.__func__
|
||||
except AttributeError:
|
||||
return func
|
||||
|
||||
|
||||
def safe_getattr(object: Any, name: str, default: Any) -> Any:
|
||||
"""Like getattr but return default upon any Exception or any OutcomeException.
|
||||
|
||||
Attribute access can potentially fail for 'evil' Python objects.
|
||||
See issue #214.
|
||||
It catches OutcomeException because of #2490 (issue #580), new outcomes
|
||||
are derived from BaseException instead of Exception (for more details
|
||||
check #2707).
|
||||
"""
|
||||
from _pytest.outcomes import TEST_OUTCOME
|
||||
|
||||
try:
|
||||
return getattr(object, name, default)
|
||||
except TEST_OUTCOME:
|
||||
return default
|
||||
|
||||
|
||||
def safe_isclass(obj: object) -> bool:
|
||||
"""Ignore any exception via isinstance on Python 3."""
|
||||
try:
|
||||
return inspect.isclass(obj)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import final as final
|
||||
else:
|
||||
from typing_extensions import final as final
|
||||
elif sys.version_info >= (3, 8):
|
||||
from typing import final as final
|
||||
else:
|
||||
|
||||
def final(f):
|
||||
return f
|
||||
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from functools import cached_property as cached_property
|
||||
else:
|
||||
|
||||
class cached_property(Generic[_S, _T]):
|
||||
__slots__ = ("func", "__doc__")
|
||||
|
||||
def __init__(self, func: Callable[[_S], _T]) -> None:
|
||||
self.func = func
|
||||
self.__doc__ = func.__doc__
|
||||
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: type[_S] | None = ...
|
||||
) -> cached_property[_S, _T]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: _S, owner: type[_S] | None = ...) -> _T:
|
||||
...
|
||||
|
||||
def __get__(self, instance, owner=None):
|
||||
if instance is None:
|
||||
return self
|
||||
value = instance.__dict__[self.func.__name__] = self.func(instance)
|
||||
return value
|
||||
|
||||
|
||||
def get_user_id() -> int | None:
|
||||
"""Return the current process's real user id or None if it could not be
|
||||
determined.
|
||||
|
||||
:return: The user id or None if it could not be determined.
|
||||
"""
|
||||
# mypy follows the version and platform checking expectation of PEP 484:
|
||||
# https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks
|
||||
# Containment checks are too complex for mypy v1.5.0 and cause failure.
|
||||
if sys.platform == "win32" or sys.platform == "emscripten":
|
||||
# win32 does not have a getuid() function.
|
||||
# Emscripten has a return 0 stub.
|
||||
return None
|
||||
else:
|
||||
# On other platforms, a return value of -1 is assumed to indicate that
|
||||
# the current process's real user id could not be determined.
|
||||
ERROR = -1
|
||||
uid = os.getuid()
|
||||
return uid if uid != ERROR else None
|
||||
|
||||
|
||||
# Perform exhaustiveness checking.
|
||||
#
|
||||
# Consider this example:
|
||||
#
|
||||
# MyUnion = Union[int, str]
|
||||
#
|
||||
# def handle(x: MyUnion) -> int {
|
||||
# if isinstance(x, int):
|
||||
# return 1
|
||||
# elif isinstance(x, str):
|
||||
# return 2
|
||||
# else:
|
||||
# raise Exception('unreachable')
|
||||
#
|
||||
# Now suppose we add a new variant:
|
||||
#
|
||||
# MyUnion = Union[int, str, bytes]
|
||||
#
|
||||
# After doing this, we must remember ourselves to go and update the handle
|
||||
# function to handle the new variant.
|
||||
#
|
||||
# With `assert_never` we can do better:
|
||||
#
|
||||
# // raise Exception('unreachable')
|
||||
# return assert_never(x)
|
||||
#
|
||||
# Now, if we forget to handle the new variant, the type-checker will emit a
|
||||
# compile-time error, instead of the runtime error we would have gotten
|
||||
# previously.
|
||||
#
|
||||
# This also work for Enums (if you use `is` to compare) and Literals.
|
||||
def assert_never(value: NoReturn) -> NoReturn:
|
||||
assert False, f"Unhandled value: {value} ({type(value).__name__})"
|
||||
1816
venv/lib/python3.12/site-packages/_pytest/config/__init__.py
Normal file
1816
venv/lib/python3.12/site-packages/_pytest/config/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
551
venv/lib/python3.12/site-packages/_pytest/config/argparsing.py
Normal file
551
venv/lib/python3.12/site-packages/_pytest/config/argparsing.py
Normal file
@@ -0,0 +1,551 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from gettext import gettext
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
import _pytest._io
|
||||
from _pytest.compat import final
|
||||
from _pytest.config.exceptions import UsageError
|
||||
from _pytest.deprecated import ARGUMENT_PERCENT_DEFAULT
|
||||
from _pytest.deprecated import ARGUMENT_TYPE_STR
|
||||
from _pytest.deprecated import ARGUMENT_TYPE_STR_CHOICE
|
||||
from _pytest.deprecated import check_ispytest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Literal
|
||||
|
||||
FILE_OR_DIR = "file_or_dir"
|
||||
|
||||
|
||||
@final
|
||||
class Parser:
|
||||
"""Parser for command line arguments and ini-file values.
|
||||
|
||||
:ivar extra_info: Dict of generic param -> value to display in case
|
||||
there's an error processing the command line arguments.
|
||||
"""
|
||||
|
||||
prog: Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
usage: Optional[str] = None,
|
||||
processopt: Optional[Callable[["Argument"], None]] = None,
|
||||
*,
|
||||
_ispytest: bool = False,
|
||||
) -> None:
|
||||
check_ispytest(_ispytest)
|
||||
self._anonymous = OptionGroup("Custom options", parser=self, _ispytest=True)
|
||||
self._groups: List[OptionGroup] = []
|
||||
self._processopt = processopt
|
||||
self._usage = usage
|
||||
self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {}
|
||||
self._ininames: List[str] = []
|
||||
self.extra_info: Dict[str, Any] = {}
|
||||
|
||||
def processoption(self, option: "Argument") -> None:
|
||||
if self._processopt:
|
||||
if option.dest:
|
||||
self._processopt(option)
|
||||
|
||||
def getgroup(
|
||||
self, name: str, description: str = "", after: Optional[str] = None
|
||||
) -> "OptionGroup":
|
||||
"""Get (or create) a named option Group.
|
||||
|
||||
:param name: Name of the option group.
|
||||
:param description: Long description for --help output.
|
||||
:param after: Name of another group, used for ordering --help output.
|
||||
:returns: The option group.
|
||||
|
||||
The returned group object has an ``addoption`` method with the same
|
||||
signature as :func:`parser.addoption <pytest.Parser.addoption>` but
|
||||
will be shown in the respective group in the output of
|
||||
``pytest --help``.
|
||||
"""
|
||||
for group in self._groups:
|
||||
if group.name == name:
|
||||
return group
|
||||
group = OptionGroup(name, description, parser=self, _ispytest=True)
|
||||
i = 0
|
||||
for i, grp in enumerate(self._groups):
|
||||
if grp.name == after:
|
||||
break
|
||||
self._groups.insert(i + 1, group)
|
||||
return group
|
||||
|
||||
def addoption(self, *opts: str, **attrs: Any) -> None:
|
||||
"""Register a command line option.
|
||||
|
||||
:param opts:
|
||||
Option names, can be short or long options.
|
||||
:param attrs:
|
||||
Same attributes as the argparse library's :py:func:`add_argument()
|
||||
<argparse.ArgumentParser.add_argument>` function accepts.
|
||||
|
||||
After command line parsing, options are available on the pytest config
|
||||
object via ``config.option.NAME`` where ``NAME`` is usually set
|
||||
by passing a ``dest`` attribute, for example
|
||||
``addoption("--long", dest="NAME", ...)``.
|
||||
"""
|
||||
self._anonymous.addoption(*opts, **attrs)
|
||||
|
||||
def parse(
|
||||
self,
|
||||
args: Sequence[Union[str, "os.PathLike[str]"]],
|
||||
namespace: Optional[argparse.Namespace] = None,
|
||||
) -> argparse.Namespace:
|
||||
from _pytest._argcomplete import try_argcomplete
|
||||
|
||||
self.optparser = self._getparser()
|
||||
try_argcomplete(self.optparser)
|
||||
strargs = [os.fspath(x) for x in args]
|
||||
return self.optparser.parse_args(strargs, namespace=namespace)
|
||||
|
||||
def _getparser(self) -> "MyOptionParser":
|
||||
from _pytest._argcomplete import filescompleter
|
||||
|
||||
optparser = MyOptionParser(self, self.extra_info, prog=self.prog)
|
||||
groups = self._groups + [self._anonymous]
|
||||
for group in groups:
|
||||
if group.options:
|
||||
desc = group.description or group.name
|
||||
arggroup = optparser.add_argument_group(desc)
|
||||
for option in group.options:
|
||||
n = option.names()
|
||||
a = option.attrs()
|
||||
arggroup.add_argument(*n, **a)
|
||||
file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*")
|
||||
# bash like autocompletion for dirs (appending '/')
|
||||
# Type ignored because typeshed doesn't know about argcomplete.
|
||||
file_or_dir_arg.completer = filescompleter # type: ignore
|
||||
return optparser
|
||||
|
||||
def parse_setoption(
|
||||
self,
|
||||
args: Sequence[Union[str, "os.PathLike[str]"]],
|
||||
option: argparse.Namespace,
|
||||
namespace: Optional[argparse.Namespace] = None,
|
||||
) -> List[str]:
|
||||
parsedoption = self.parse(args, namespace=namespace)
|
||||
for name, value in parsedoption.__dict__.items():
|
||||
setattr(option, name, value)
|
||||
return cast(List[str], getattr(parsedoption, FILE_OR_DIR))
|
||||
|
||||
def parse_known_args(
|
||||
self,
|
||||
args: Sequence[Union[str, "os.PathLike[str]"]],
|
||||
namespace: Optional[argparse.Namespace] = None,
|
||||
) -> argparse.Namespace:
|
||||
"""Parse the known arguments at this point.
|
||||
|
||||
:returns: An argparse namespace object.
|
||||
"""
|
||||
return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
|
||||
|
||||
def parse_known_and_unknown_args(
|
||||
self,
|
||||
args: Sequence[Union[str, "os.PathLike[str]"]],
|
||||
namespace: Optional[argparse.Namespace] = None,
|
||||
) -> Tuple[argparse.Namespace, List[str]]:
|
||||
"""Parse the known arguments at this point, and also return the
|
||||
remaining unknown arguments.
|
||||
|
||||
:returns:
|
||||
A tuple containing an argparse namespace object for the known
|
||||
arguments, and a list of the unknown arguments.
|
||||
"""
|
||||
optparser = self._getparser()
|
||||
strargs = [os.fspath(x) for x in args]
|
||||
return optparser.parse_known_args(strargs, namespace=namespace)
|
||||
|
||||
def addini(
|
||||
self,
|
||||
name: str,
|
||||
help: str,
|
||||
type: Optional[
|
||||
"Literal['string', 'paths', 'pathlist', 'args', 'linelist', 'bool']"
|
||||
] = None,
|
||||
default: Any = None,
|
||||
) -> None:
|
||||
"""Register an ini-file option.
|
||||
|
||||
:param name:
|
||||
Name of the ini-variable.
|
||||
:param type:
|
||||
Type of the variable. Can be:
|
||||
|
||||
* ``string``: a string
|
||||
* ``bool``: a boolean
|
||||
* ``args``: a list of strings, separated as in a shell
|
||||
* ``linelist``: a list of strings, separated by line breaks
|
||||
* ``paths``: a list of :class:`pathlib.Path`, separated as in a shell
|
||||
* ``pathlist``: a list of ``py.path``, separated as in a shell
|
||||
|
||||
.. versionadded:: 7.0
|
||||
The ``paths`` variable type.
|
||||
|
||||
Defaults to ``string`` if ``None`` or not passed.
|
||||
:param default:
|
||||
Default value if no ini-file option exists but is queried.
|
||||
|
||||
The value of ini-variables can be retrieved via a call to
|
||||
:py:func:`config.getini(name) <pytest.Config.getini>`.
|
||||
"""
|
||||
assert type in (None, "string", "paths", "pathlist", "args", "linelist", "bool")
|
||||
self._inidict[name] = (help, type, default)
|
||||
self._ininames.append(name)
|
||||
|
||||
|
||||
class ArgumentError(Exception):
|
||||
"""Raised if an Argument instance is created with invalid or
|
||||
inconsistent arguments."""
|
||||
|
||||
def __init__(self, msg: str, option: Union["Argument", str]) -> None:
|
||||
self.msg = msg
|
||||
self.option_id = str(option)
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.option_id:
|
||||
return f"option {self.option_id}: {self.msg}"
|
||||
else:
|
||||
return self.msg
|
||||
|
||||
|
||||
class Argument:
|
||||
"""Class that mimics the necessary behaviour of optparse.Option.
|
||||
|
||||
It's currently a least effort implementation and ignoring choices
|
||||
and integer prefixes.
|
||||
|
||||
https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
|
||||
"""
|
||||
|
||||
_typ_map = {"int": int, "string": str, "float": float, "complex": complex}
|
||||
|
||||
def __init__(self, *names: str, **attrs: Any) -> None:
|
||||
"""Store params in private vars for use in add_argument."""
|
||||
self._attrs = attrs
|
||||
self._short_opts: List[str] = []
|
||||
self._long_opts: List[str] = []
|
||||
if "%default" in (attrs.get("help") or ""):
|
||||
warnings.warn(ARGUMENT_PERCENT_DEFAULT, stacklevel=3)
|
||||
try:
|
||||
typ = attrs["type"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
# This might raise a keyerror as well, don't want to catch that.
|
||||
if isinstance(typ, str):
|
||||
if typ == "choice":
|
||||
warnings.warn(
|
||||
ARGUMENT_TYPE_STR_CHOICE.format(typ=typ, names=names),
|
||||
stacklevel=4,
|
||||
)
|
||||
# argparse expects a type here take it from
|
||||
# the type of the first element
|
||||
attrs["type"] = type(attrs["choices"][0])
|
||||
else:
|
||||
warnings.warn(
|
||||
ARGUMENT_TYPE_STR.format(typ=typ, names=names), stacklevel=4
|
||||
)
|
||||
attrs["type"] = Argument._typ_map[typ]
|
||||
# Used in test_parseopt -> test_parse_defaultgetter.
|
||||
self.type = attrs["type"]
|
||||
else:
|
||||
self.type = typ
|
||||
try:
|
||||
# Attribute existence is tested in Config._processopt.
|
||||
self.default = attrs["default"]
|
||||
except KeyError:
|
||||
pass
|
||||
self._set_opt_strings(names)
|
||||
dest: Optional[str] = attrs.get("dest")
|
||||
if dest:
|
||||
self.dest = dest
|
||||
elif self._long_opts:
|
||||
self.dest = self._long_opts[0][2:].replace("-", "_")
|
||||
else:
|
||||
try:
|
||||
self.dest = self._short_opts[0][1:]
|
||||
except IndexError as e:
|
||||
self.dest = "???" # Needed for the error repr.
|
||||
raise ArgumentError("need a long or short option", self) from e
|
||||
|
||||
def names(self) -> List[str]:
|
||||
return self._short_opts + self._long_opts
|
||||
|
||||
def attrs(self) -> Mapping[str, Any]:
|
||||
# Update any attributes set by processopt.
|
||||
attrs = "default dest help".split()
|
||||
attrs.append(self.dest)
|
||||
for attr in attrs:
|
||||
try:
|
||||
self._attrs[attr] = getattr(self, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
if self._attrs.get("help"):
|
||||
a = self._attrs["help"]
|
||||
a = a.replace("%default", "%(default)s")
|
||||
# a = a.replace('%prog', '%(prog)s')
|
||||
self._attrs["help"] = a
|
||||
return self._attrs
|
||||
|
||||
def _set_opt_strings(self, opts: Sequence[str]) -> None:
|
||||
"""Directly from optparse.
|
||||
|
||||
Might not be necessary as this is passed to argparse later on.
|
||||
"""
|
||||
for opt in opts:
|
||||
if len(opt) < 2:
|
||||
raise ArgumentError(
|
||||
"invalid option string %r: "
|
||||
"must be at least two characters long" % opt,
|
||||
self,
|
||||
)
|
||||
elif len(opt) == 2:
|
||||
if not (opt[0] == "-" and opt[1] != "-"):
|
||||
raise ArgumentError(
|
||||
"invalid short option string %r: "
|
||||
"must be of the form -x, (x any non-dash char)" % opt,
|
||||
self,
|
||||
)
|
||||
self._short_opts.append(opt)
|
||||
else:
|
||||
if not (opt[0:2] == "--" and opt[2] != "-"):
|
||||
raise ArgumentError(
|
||||
"invalid long option string %r: "
|
||||
"must start with --, followed by non-dash" % opt,
|
||||
self,
|
||||
)
|
||||
self._long_opts.append(opt)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
args: List[str] = []
|
||||
if self._short_opts:
|
||||
args += ["_short_opts: " + repr(self._short_opts)]
|
||||
if self._long_opts:
|
||||
args += ["_long_opts: " + repr(self._long_opts)]
|
||||
args += ["dest: " + repr(self.dest)]
|
||||
if hasattr(self, "type"):
|
||||
args += ["type: " + repr(self.type)]
|
||||
if hasattr(self, "default"):
|
||||
args += ["default: " + repr(self.default)]
|
||||
return "Argument({})".format(", ".join(args))
|
||||
|
||||
|
||||
class OptionGroup:
|
||||
"""A group of options shown in its own section."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
description: str = "",
|
||||
parser: Optional[Parser] = None,
|
||||
*,
|
||||
_ispytest: bool = False,
|
||||
) -> None:
|
||||
check_ispytest(_ispytest)
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.options: List[Argument] = []
|
||||
self.parser = parser
|
||||
|
||||
def addoption(self, *opts: str, **attrs: Any) -> None:
|
||||
"""Add an option to this group.
|
||||
|
||||
If a shortened version of a long option is specified, it will
|
||||
be suppressed in the help. ``addoption('--twowords', '--two-words')``
|
||||
results in help showing ``--two-words`` only, but ``--twowords`` gets
|
||||
accepted **and** the automatic destination is in ``args.twowords``.
|
||||
|
||||
:param opts:
|
||||
Option names, can be short or long options.
|
||||
:param attrs:
|
||||
Same attributes as the argparse library's :py:func:`add_argument()
|
||||
<argparse.ArgumentParser.add_argument>` function accepts.
|
||||
"""
|
||||
conflict = set(opts).intersection(
|
||||
name for opt in self.options for name in opt.names()
|
||||
)
|
||||
if conflict:
|
||||
raise ValueError("option names %s already added" % conflict)
|
||||
option = Argument(*opts, **attrs)
|
||||
self._addoption_instance(option, shortupper=False)
|
||||
|
||||
def _addoption(self, *opts: str, **attrs: Any) -> None:
|
||||
option = Argument(*opts, **attrs)
|
||||
self._addoption_instance(option, shortupper=True)
|
||||
|
||||
def _addoption_instance(self, option: "Argument", shortupper: bool = False) -> None:
|
||||
if not shortupper:
|
||||
for opt in option._short_opts:
|
||||
if opt[0] == "-" and opt[1].islower():
|
||||
raise ValueError("lowercase shortoptions reserved")
|
||||
if self.parser:
|
||||
self.parser.processoption(option)
|
||||
self.options.append(option)
|
||||
|
||||
|
||||
class MyOptionParser(argparse.ArgumentParser):
|
||||
def __init__(
|
||||
self,
|
||||
parser: Parser,
|
||||
extra_info: Optional[Dict[str, Any]] = None,
|
||||
prog: Optional[str] = None,
|
||||
) -> None:
|
||||
self._parser = parser
|
||||
super().__init__(
|
||||
prog=prog,
|
||||
usage=parser._usage,
|
||||
add_help=False,
|
||||
formatter_class=DropShorterLongHelpFormatter,
|
||||
allow_abbrev=False,
|
||||
)
|
||||
# extra_info is a dict of (param -> value) to display if there's
|
||||
# an usage error to provide more contextual information to the user.
|
||||
self.extra_info = extra_info if extra_info else {}
|
||||
|
||||
def error(self, message: str) -> NoReturn:
|
||||
"""Transform argparse error message into UsageError."""
|
||||
msg = f"{self.prog}: error: {message}"
|
||||
|
||||
if hasattr(self._parser, "_config_source_hint"):
|
||||
# Type ignored because the attribute is set dynamically.
|
||||
msg = f"{msg} ({self._parser._config_source_hint})" # type: ignore
|
||||
|
||||
raise UsageError(self.format_usage() + msg)
|
||||
|
||||
# Type ignored because typeshed has a very complex type in the superclass.
|
||||
def parse_args( # type: ignore
|
||||
self,
|
||||
args: Optional[Sequence[str]] = None,
|
||||
namespace: Optional[argparse.Namespace] = None,
|
||||
) -> argparse.Namespace:
|
||||
"""Allow splitting of positional arguments."""
|
||||
parsed, unrecognized = self.parse_known_args(args, namespace)
|
||||
if unrecognized:
|
||||
for arg in unrecognized:
|
||||
if arg and arg[0] == "-":
|
||||
lines = ["unrecognized arguments: %s" % (" ".join(unrecognized))]
|
||||
for k, v in sorted(self.extra_info.items()):
|
||||
lines.append(f" {k}: {v}")
|
||||
self.error("\n".join(lines))
|
||||
getattr(parsed, FILE_OR_DIR).extend(unrecognized)
|
||||
return parsed
|
||||
|
||||
if sys.version_info[:2] < (3, 9): # pragma: no cover
|
||||
# Backport of https://github.com/python/cpython/pull/14316 so we can
|
||||
# disable long --argument abbreviations without breaking short flags.
|
||||
def _parse_optional(
|
||||
self, arg_string: str
|
||||
) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]:
|
||||
if not arg_string:
|
||||
return None
|
||||
if not arg_string[0] in self.prefix_chars:
|
||||
return None
|
||||
if arg_string in self._option_string_actions:
|
||||
action = self._option_string_actions[arg_string]
|
||||
return action, arg_string, None
|
||||
if len(arg_string) == 1:
|
||||
return None
|
||||
if "=" in arg_string:
|
||||
option_string, explicit_arg = arg_string.split("=", 1)
|
||||
if option_string in self._option_string_actions:
|
||||
action = self._option_string_actions[option_string]
|
||||
return action, option_string, explicit_arg
|
||||
if self.allow_abbrev or not arg_string.startswith("--"):
|
||||
option_tuples = self._get_option_tuples(arg_string)
|
||||
if len(option_tuples) > 1:
|
||||
msg = gettext(
|
||||
"ambiguous option: %(option)s could match %(matches)s"
|
||||
)
|
||||
options = ", ".join(option for _, option, _ in option_tuples)
|
||||
self.error(msg % {"option": arg_string, "matches": options})
|
||||
elif len(option_tuples) == 1:
|
||||
(option_tuple,) = option_tuples
|
||||
return option_tuple
|
||||
if self._negative_number_matcher.match(arg_string):
|
||||
if not self._has_negative_number_optionals:
|
||||
return None
|
||||
if " " in arg_string:
|
||||
return None
|
||||
return None, arg_string, None
|
||||
|
||||
|
||||
class DropShorterLongHelpFormatter(argparse.HelpFormatter):
|
||||
"""Shorten help for long options that differ only in extra hyphens.
|
||||
|
||||
- Collapse **long** options that are the same except for extra hyphens.
|
||||
- Shortcut if there are only two options and one of them is a short one.
|
||||
- Cache result on the action object as this is called at least 2 times.
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
# Use more accurate terminal width.
|
||||
if "width" not in kwargs:
|
||||
kwargs["width"] = _pytest._io.get_terminal_width()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def _format_action_invocation(self, action: argparse.Action) -> str:
|
||||
orgstr = super()._format_action_invocation(action)
|
||||
if orgstr and orgstr[0] != "-": # only optional arguments
|
||||
return orgstr
|
||||
res: Optional[str] = getattr(action, "_formatted_action_invocation", None)
|
||||
if res:
|
||||
return res
|
||||
options = orgstr.split(", ")
|
||||
if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
|
||||
# a shortcut for '-h, --help' or '--abc', '-a'
|
||||
action._formatted_action_invocation = orgstr # type: ignore
|
||||
return orgstr
|
||||
return_list = []
|
||||
short_long: Dict[str, str] = {}
|
||||
for option in options:
|
||||
if len(option) == 2 or option[2] == " ":
|
||||
continue
|
||||
if not option.startswith("--"):
|
||||
raise ArgumentError(
|
||||
'long optional argument without "--": [%s]' % (option), option
|
||||
)
|
||||
xxoption = option[2:]
|
||||
shortened = xxoption.replace("-", "")
|
||||
if shortened not in short_long or len(short_long[shortened]) < len(
|
||||
xxoption
|
||||
):
|
||||
short_long[shortened] = xxoption
|
||||
# now short_long has been filled out to the longest with dashes
|
||||
# **and** we keep the right option ordering from add_argument
|
||||
for option in options:
|
||||
if len(option) == 2 or option[2] == " ":
|
||||
return_list.append(option)
|
||||
if option[2:] == short_long.get(option.replace("-", "")):
|
||||
return_list.append(option.replace(" ", "=", 1))
|
||||
formatted_action_invocation = ", ".join(return_list)
|
||||
action._formatted_action_invocation = formatted_action_invocation # type: ignore
|
||||
return formatted_action_invocation
|
||||
|
||||
def _split_lines(self, text, width):
|
||||
"""Wrap lines after splitting on original newlines.
|
||||
|
||||
This allows to have explicit line breaks in the help text.
|
||||
"""
|
||||
import textwrap
|
||||
|
||||
lines = []
|
||||
for line in text.splitlines():
|
||||
lines.extend(textwrap.wrap(line.strip(), width))
|
||||
return lines
|
||||
70
venv/lib/python3.12/site-packages/_pytest/config/compat.py
Normal file
70
venv/lib/python3.12/site-packages/_pytest/config/compat.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import functools
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from ..compat import LEGACY_PATH
|
||||
from ..compat import legacy_path
|
||||
from ..deprecated import HOOK_LEGACY_PATH_ARG
|
||||
from _pytest.nodes import _check_path
|
||||
|
||||
# hookname: (Path, LEGACY_PATH)
|
||||
imply_paths_hooks = {
|
||||
"pytest_ignore_collect": ("collection_path", "path"),
|
||||
"pytest_collect_file": ("file_path", "path"),
|
||||
"pytest_pycollect_makemodule": ("module_path", "path"),
|
||||
"pytest_report_header": ("start_path", "startdir"),
|
||||
"pytest_report_collectionfinish": ("start_path", "startdir"),
|
||||
}
|
||||
|
||||
|
||||
class PathAwareHookProxy:
|
||||
"""
|
||||
this helper wraps around hook callers
|
||||
until pluggy supports fixingcalls, this one will do
|
||||
|
||||
it currently doesn't return full hook caller proxies for fixed hooks,
|
||||
this may have to be changed later depending on bugs
|
||||
"""
|
||||
|
||||
def __init__(self, hook_caller):
|
||||
self.__hook_caller = hook_caller
|
||||
|
||||
def __dir__(self):
|
||||
return dir(self.__hook_caller)
|
||||
|
||||
def __getattr__(self, key, _wraps=functools.wraps):
|
||||
hook = getattr(self.__hook_caller, key)
|
||||
if key not in imply_paths_hooks:
|
||||
self.__dict__[key] = hook
|
||||
return hook
|
||||
else:
|
||||
path_var, fspath_var = imply_paths_hooks[key]
|
||||
|
||||
@_wraps(hook)
|
||||
def fixed_hook(**kw):
|
||||
path_value: Optional[Path] = kw.pop(path_var, None)
|
||||
fspath_value: Optional[LEGACY_PATH] = kw.pop(fspath_var, None)
|
||||
if fspath_value is not None:
|
||||
warnings.warn(
|
||||
HOOK_LEGACY_PATH_ARG.format(
|
||||
pylib_path_arg=fspath_var, pathlib_path_arg=path_var
|
||||
),
|
||||
stacklevel=2,
|
||||
)
|
||||
if path_value is not None:
|
||||
if fspath_value is not None:
|
||||
_check_path(path_value, fspath_value)
|
||||
else:
|
||||
fspath_value = legacy_path(path_value)
|
||||
else:
|
||||
assert fspath_value is not None
|
||||
path_value = Path(fspath_value)
|
||||
|
||||
kw[path_var] = path_value
|
||||
kw[fspath_var] = fspath_value
|
||||
return hook(**kw)
|
||||
|
||||
fixed_hook.__name__ = key
|
||||
self.__dict__[key] = fixed_hook
|
||||
return fixed_hook
|
||||
@@ -0,0 +1,11 @@
|
||||
from _pytest.compat import final
|
||||
|
||||
|
||||
@final
|
||||
class UsageError(Exception):
|
||||
"""Error in pytest usage or invocation."""
|
||||
|
||||
|
||||
class PrintHelp(Exception):
|
||||
"""Raised when pytest should print its help to skip the rest of the
|
||||
argument parsing and validation."""
|
||||
218
venv/lib/python3.12/site-packages/_pytest/config/findpaths.py
Normal file
218
venv/lib/python3.12/site-packages/_pytest/config/findpaths.py
Normal file
@@ -0,0 +1,218 @@
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
import iniconfig
|
||||
|
||||
from .exceptions import UsageError
|
||||
from _pytest.outcomes import fail
|
||||
from _pytest.pathlib import absolutepath
|
||||
from _pytest.pathlib import commonpath
|
||||
from _pytest.pathlib import safe_exists
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import Config
|
||||
|
||||
|
||||
def _parse_ini_config(path: Path) -> iniconfig.IniConfig:
|
||||
"""Parse the given generic '.ini' file using legacy IniConfig parser, returning
|
||||
the parsed object.
|
||||
|
||||
Raise UsageError if the file cannot be parsed.
|
||||
"""
|
||||
try:
|
||||
return iniconfig.IniConfig(str(path))
|
||||
except iniconfig.ParseError as exc:
|
||||
raise UsageError(str(exc)) from exc
|
||||
|
||||
|
||||
def load_config_dict_from_file(
|
||||
filepath: Path,
|
||||
) -> Optional[Dict[str, Union[str, List[str]]]]:
|
||||
"""Load pytest configuration from the given file path, if supported.
|
||||
|
||||
Return None if the file does not contain valid pytest configuration.
|
||||
"""
|
||||
|
||||
# Configuration from ini files are obtained from the [pytest] section, if present.
|
||||
if filepath.suffix == ".ini":
|
||||
iniconfig = _parse_ini_config(filepath)
|
||||
|
||||
if "pytest" in iniconfig:
|
||||
return dict(iniconfig["pytest"].items())
|
||||
else:
|
||||
# "pytest.ini" files are always the source of configuration, even if empty.
|
||||
if filepath.name == "pytest.ini":
|
||||
return {}
|
||||
|
||||
# '.cfg' files are considered if they contain a "[tool:pytest]" section.
|
||||
elif filepath.suffix == ".cfg":
|
||||
iniconfig = _parse_ini_config(filepath)
|
||||
|
||||
if "tool:pytest" in iniconfig.sections:
|
||||
return dict(iniconfig["tool:pytest"].items())
|
||||
elif "pytest" in iniconfig.sections:
|
||||
# If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that
|
||||
# plain "[pytest]" sections in setup.cfg files is no longer supported (#3086).
|
||||
fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False)
|
||||
|
||||
# '.toml' files are considered if they contain a [tool.pytest.ini_options] table.
|
||||
elif filepath.suffix == ".toml":
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
else:
|
||||
import tomli as tomllib
|
||||
|
||||
toml_text = filepath.read_text(encoding="utf-8")
|
||||
try:
|
||||
config = tomllib.loads(toml_text)
|
||||
except tomllib.TOMLDecodeError as exc:
|
||||
raise UsageError(f"{filepath}: {exc}") from exc
|
||||
|
||||
result = config.get("tool", {}).get("pytest", {}).get("ini_options", None)
|
||||
if result is not None:
|
||||
# TOML supports richer data types than ini files (strings, arrays, floats, ints, etc),
|
||||
# however we need to convert all scalar values to str for compatibility with the rest
|
||||
# of the configuration system, which expects strings only.
|
||||
def make_scalar(v: object) -> Union[str, List[str]]:
|
||||
return v if isinstance(v, list) else str(v)
|
||||
|
||||
return {k: make_scalar(v) for k, v in result.items()}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def locate_config(
|
||||
args: Iterable[Path],
|
||||
) -> Tuple[Optional[Path], Optional[Path], Dict[str, Union[str, List[str]]]]:
|
||||
"""Search in the list of arguments for a valid ini-file for pytest,
|
||||
and return a tuple of (rootdir, inifile, cfg-dict)."""
|
||||
config_names = [
|
||||
"pytest.ini",
|
||||
".pytest.ini",
|
||||
"pyproject.toml",
|
||||
"tox.ini",
|
||||
"setup.cfg",
|
||||
]
|
||||
args = [x for x in args if not str(x).startswith("-")]
|
||||
if not args:
|
||||
args = [Path.cwd()]
|
||||
for arg in args:
|
||||
argpath = absolutepath(arg)
|
||||
for base in (argpath, *argpath.parents):
|
||||
for config_name in config_names:
|
||||
p = base / config_name
|
||||
if p.is_file():
|
||||
ini_config = load_config_dict_from_file(p)
|
||||
if ini_config is not None:
|
||||
return base, p, ini_config
|
||||
return None, None, {}
|
||||
|
||||
|
||||
def get_common_ancestor(paths: Iterable[Path]) -> Path:
|
||||
common_ancestor: Optional[Path] = None
|
||||
for path in paths:
|
||||
if not path.exists():
|
||||
continue
|
||||
if common_ancestor is None:
|
||||
common_ancestor = path
|
||||
else:
|
||||
if common_ancestor in path.parents or path == common_ancestor:
|
||||
continue
|
||||
elif path in common_ancestor.parents:
|
||||
common_ancestor = path
|
||||
else:
|
||||
shared = commonpath(path, common_ancestor)
|
||||
if shared is not None:
|
||||
common_ancestor = shared
|
||||
if common_ancestor is None:
|
||||
common_ancestor = Path.cwd()
|
||||
elif common_ancestor.is_file():
|
||||
common_ancestor = common_ancestor.parent
|
||||
return common_ancestor
|
||||
|
||||
|
||||
def get_dirs_from_args(args: Iterable[str]) -> List[Path]:
|
||||
def is_option(x: str) -> bool:
|
||||
return x.startswith("-")
|
||||
|
||||
def get_file_part_from_node_id(x: str) -> str:
|
||||
return x.split("::")[0]
|
||||
|
||||
def get_dir_from_path(path: Path) -> Path:
|
||||
if path.is_dir():
|
||||
return path
|
||||
return path.parent
|
||||
|
||||
# These look like paths but may not exist
|
||||
possible_paths = (
|
||||
absolutepath(get_file_part_from_node_id(arg))
|
||||
for arg in args
|
||||
if not is_option(arg)
|
||||
)
|
||||
|
||||
return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)]
|
||||
|
||||
|
||||
CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead."
|
||||
|
||||
|
||||
def determine_setup(
|
||||
inifile: Optional[str],
|
||||
args: Sequence[str],
|
||||
rootdir_cmd_arg: Optional[str] = None,
|
||||
config: Optional["Config"] = None,
|
||||
) -> Tuple[Path, Optional[Path], Dict[str, Union[str, List[str]]]]:
|
||||
rootdir = None
|
||||
dirs = get_dirs_from_args(args)
|
||||
if inifile:
|
||||
inipath_ = absolutepath(inifile)
|
||||
inipath: Optional[Path] = inipath_
|
||||
inicfg = load_config_dict_from_file(inipath_) or {}
|
||||
if rootdir_cmd_arg is None:
|
||||
rootdir = inipath_.parent
|
||||
else:
|
||||
ancestor = get_common_ancestor(dirs)
|
||||
rootdir, inipath, inicfg = locate_config([ancestor])
|
||||
if rootdir is None and rootdir_cmd_arg is None:
|
||||
for possible_rootdir in (ancestor, *ancestor.parents):
|
||||
if (possible_rootdir / "setup.py").is_file():
|
||||
rootdir = possible_rootdir
|
||||
break
|
||||
else:
|
||||
if dirs != [ancestor]:
|
||||
rootdir, inipath, inicfg = locate_config(dirs)
|
||||
if rootdir is None:
|
||||
if config is not None:
|
||||
cwd = config.invocation_params.dir
|
||||
else:
|
||||
cwd = Path.cwd()
|
||||
rootdir = get_common_ancestor([cwd, ancestor])
|
||||
if is_fs_root(rootdir):
|
||||
rootdir = ancestor
|
||||
if rootdir_cmd_arg:
|
||||
rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg))
|
||||
if not rootdir.is_dir():
|
||||
raise UsageError(
|
||||
"Directory '{}' not found. Check your '--rootdir' option.".format(
|
||||
rootdir
|
||||
)
|
||||
)
|
||||
assert rootdir is not None
|
||||
return rootdir, inipath, inicfg or {}
|
||||
|
||||
|
||||
def is_fs_root(p: Path) -> bool:
|
||||
r"""
|
||||
Return True if the given path is pointing to the root of the
|
||||
file system ("/" on Unix and "C:\\" on Windows for example).
|
||||
"""
|
||||
return os.path.splitdrive(str(p))[1] == os.sep
|
||||
391
venv/lib/python3.12/site-packages/_pytest/debugging.py
Normal file
391
venv/lib/python3.12/site-packages/_pytest/debugging.py
Normal file
@@ -0,0 +1,391 @@
|
||||
"""Interactive debugging with PDB, the Python Debugger."""
|
||||
import argparse
|
||||
import functools
|
||||
import sys
|
||||
import types
|
||||
import unittest
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Generator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from _pytest import outcomes
|
||||
from _pytest._code import ExceptionInfo
|
||||
from _pytest.config import Config
|
||||
from _pytest.config import ConftestImportFailure
|
||||
from _pytest.config import hookimpl
|
||||
from _pytest.config import PytestPluginManager
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.config.exceptions import UsageError
|
||||
from _pytest.nodes import Node
|
||||
from _pytest.reports import BaseReport
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _pytest.capture import CaptureManager
|
||||
from _pytest.runner import CallInfo
|
||||
|
||||
|
||||
def _validate_usepdb_cls(value: str) -> Tuple[str, str]:
|
||||
"""Validate syntax of --pdbcls option."""
|
||||
try:
|
||||
modname, classname = value.split(":")
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
f"{value!r} is not in the format 'modname:classname'"
|
||||
) from e
|
||||
return (modname, classname)
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
group = parser.getgroup("general")
|
||||
group._addoption(
|
||||
"--pdb",
|
||||
dest="usepdb",
|
||||
action="store_true",
|
||||
help="Start the interactive Python debugger on errors or KeyboardInterrupt",
|
||||
)
|
||||
group._addoption(
|
||||
"--pdbcls",
|
||||
dest="usepdb_cls",
|
||||
metavar="modulename:classname",
|
||||
type=_validate_usepdb_cls,
|
||||
help="Specify a custom interactive Python debugger for use with --pdb."
|
||||
"For example: --pdbcls=IPython.terminal.debugger:TerminalPdb",
|
||||
)
|
||||
group._addoption(
|
||||
"--trace",
|
||||
dest="trace",
|
||||
action="store_true",
|
||||
help="Immediately break when running each test",
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config: Config) -> None:
|
||||
import pdb
|
||||
|
||||
if config.getvalue("trace"):
|
||||
config.pluginmanager.register(PdbTrace(), "pdbtrace")
|
||||
if config.getvalue("usepdb"):
|
||||
config.pluginmanager.register(PdbInvoke(), "pdbinvoke")
|
||||
|
||||
pytestPDB._saved.append(
|
||||
(pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config)
|
||||
)
|
||||
pdb.set_trace = pytestPDB.set_trace
|
||||
pytestPDB._pluginmanager = config.pluginmanager
|
||||
pytestPDB._config = config
|
||||
|
||||
# NOTE: not using pytest_unconfigure, since it might get called although
|
||||
# pytest_configure was not (if another plugin raises UsageError).
|
||||
def fin() -> None:
|
||||
(
|
||||
pdb.set_trace,
|
||||
pytestPDB._pluginmanager,
|
||||
pytestPDB._config,
|
||||
) = pytestPDB._saved.pop()
|
||||
|
||||
config.add_cleanup(fin)
|
||||
|
||||
|
||||
class pytestPDB:
|
||||
"""Pseudo PDB that defers to the real pdb."""
|
||||
|
||||
_pluginmanager: Optional[PytestPluginManager] = None
|
||||
_config: Optional[Config] = None
|
||||
_saved: List[
|
||||
Tuple[Callable[..., None], Optional[PytestPluginManager], Optional[Config]]
|
||||
] = []
|
||||
_recursive_debug = 0
|
||||
_wrapped_pdb_cls: Optional[Tuple[Type[Any], Type[Any]]] = None
|
||||
|
||||
@classmethod
|
||||
def _is_capturing(cls, capman: Optional["CaptureManager"]) -> Union[str, bool]:
|
||||
if capman:
|
||||
return capman.is_capturing()
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _import_pdb_cls(cls, capman: Optional["CaptureManager"]):
|
||||
if not cls._config:
|
||||
import pdb
|
||||
|
||||
# Happens when using pytest.set_trace outside of a test.
|
||||
return pdb.Pdb
|
||||
|
||||
usepdb_cls = cls._config.getvalue("usepdb_cls")
|
||||
|
||||
if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls:
|
||||
return cls._wrapped_pdb_cls[1]
|
||||
|
||||
if usepdb_cls:
|
||||
modname, classname = usepdb_cls
|
||||
|
||||
try:
|
||||
__import__(modname)
|
||||
mod = sys.modules[modname]
|
||||
|
||||
# Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp).
|
||||
parts = classname.split(".")
|
||||
pdb_cls = getattr(mod, parts[0])
|
||||
for part in parts[1:]:
|
||||
pdb_cls = getattr(pdb_cls, part)
|
||||
except Exception as exc:
|
||||
value = ":".join((modname, classname))
|
||||
raise UsageError(
|
||||
f"--pdbcls: could not import {value!r}: {exc}"
|
||||
) from exc
|
||||
else:
|
||||
import pdb
|
||||
|
||||
pdb_cls = pdb.Pdb
|
||||
|
||||
wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman)
|
||||
cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls)
|
||||
return wrapped_cls
|
||||
|
||||
@classmethod
|
||||
def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional["CaptureManager"]):
|
||||
import _pytest.config
|
||||
|
||||
# Type ignored because mypy doesn't support "dynamic"
|
||||
# inheritance like this.
|
||||
class PytestPdbWrapper(pdb_cls): # type: ignore[valid-type,misc]
|
||||
_pytest_capman = capman
|
||||
_continued = False
|
||||
|
||||
def do_debug(self, arg):
|
||||
cls._recursive_debug += 1
|
||||
ret = super().do_debug(arg)
|
||||
cls._recursive_debug -= 1
|
||||
return ret
|
||||
|
||||
def do_continue(self, arg):
|
||||
ret = super().do_continue(arg)
|
||||
if cls._recursive_debug == 0:
|
||||
assert cls._config is not None
|
||||
tw = _pytest.config.create_terminal_writer(cls._config)
|
||||
tw.line()
|
||||
|
||||
capman = self._pytest_capman
|
||||
capturing = pytestPDB._is_capturing(capman)
|
||||
if capturing:
|
||||
if capturing == "global":
|
||||
tw.sep(">", "PDB continue (IO-capturing resumed)")
|
||||
else:
|
||||
tw.sep(
|
||||
">",
|
||||
"PDB continue (IO-capturing resumed for %s)"
|
||||
% capturing,
|
||||
)
|
||||
assert capman is not None
|
||||
capman.resume()
|
||||
else:
|
||||
tw.sep(">", "PDB continue")
|
||||
assert cls._pluginmanager is not None
|
||||
cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self)
|
||||
self._continued = True
|
||||
return ret
|
||||
|
||||
do_c = do_cont = do_continue
|
||||
|
||||
def do_quit(self, arg):
|
||||
"""Raise Exit outcome when quit command is used in pdb.
|
||||
|
||||
This is a bit of a hack - it would be better if BdbQuit
|
||||
could be handled, but this would require to wrap the
|
||||
whole pytest run, and adjust the report etc.
|
||||
"""
|
||||
ret = super().do_quit(arg)
|
||||
|
||||
if cls._recursive_debug == 0:
|
||||
outcomes.exit("Quitting debugger")
|
||||
|
||||
return ret
|
||||
|
||||
do_q = do_quit
|
||||
do_exit = do_quit
|
||||
|
||||
def setup(self, f, tb):
|
||||
"""Suspend on setup().
|
||||
|
||||
Needed after do_continue resumed, and entering another
|
||||
breakpoint again.
|
||||
"""
|
||||
ret = super().setup(f, tb)
|
||||
if not ret and self._continued:
|
||||
# pdb.setup() returns True if the command wants to exit
|
||||
# from the interaction: do not suspend capturing then.
|
||||
if self._pytest_capman:
|
||||
self._pytest_capman.suspend_global_capture(in_=True)
|
||||
return ret
|
||||
|
||||
def get_stack(self, f, t):
|
||||
stack, i = super().get_stack(f, t)
|
||||
if f is None:
|
||||
# Find last non-hidden frame.
|
||||
i = max(0, len(stack) - 1)
|
||||
while i and stack[i][0].f_locals.get("__tracebackhide__", False):
|
||||
i -= 1
|
||||
return stack, i
|
||||
|
||||
return PytestPdbWrapper
|
||||
|
||||
@classmethod
|
||||
def _init_pdb(cls, method, *args, **kwargs):
|
||||
"""Initialize PDB debugging, dropping any IO capturing."""
|
||||
import _pytest.config
|
||||
|
||||
if cls._pluginmanager is None:
|
||||
capman: Optional[CaptureManager] = None
|
||||
else:
|
||||
capman = cls._pluginmanager.getplugin("capturemanager")
|
||||
if capman:
|
||||
capman.suspend(in_=True)
|
||||
|
||||
if cls._config:
|
||||
tw = _pytest.config.create_terminal_writer(cls._config)
|
||||
tw.line()
|
||||
|
||||
if cls._recursive_debug == 0:
|
||||
# Handle header similar to pdb.set_trace in py37+.
|
||||
header = kwargs.pop("header", None)
|
||||
if header is not None:
|
||||
tw.sep(">", header)
|
||||
else:
|
||||
capturing = cls._is_capturing(capman)
|
||||
if capturing == "global":
|
||||
tw.sep(">", f"PDB {method} (IO-capturing turned off)")
|
||||
elif capturing:
|
||||
tw.sep(
|
||||
">",
|
||||
"PDB %s (IO-capturing turned off for %s)"
|
||||
% (method, capturing),
|
||||
)
|
||||
else:
|
||||
tw.sep(">", f"PDB {method}")
|
||||
|
||||
_pdb = cls._import_pdb_cls(capman)(**kwargs)
|
||||
|
||||
if cls._pluginmanager:
|
||||
cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb)
|
||||
return _pdb
|
||||
|
||||
@classmethod
|
||||
def set_trace(cls, *args, **kwargs) -> None:
|
||||
"""Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing."""
|
||||
frame = sys._getframe().f_back
|
||||
_pdb = cls._init_pdb("set_trace", *args, **kwargs)
|
||||
_pdb.set_trace(frame)
|
||||
|
||||
|
||||
class PdbInvoke:
|
||||
def pytest_exception_interact(
|
||||
self, node: Node, call: "CallInfo[Any]", report: BaseReport
|
||||
) -> None:
|
||||
capman = node.config.pluginmanager.getplugin("capturemanager")
|
||||
if capman:
|
||||
capman.suspend_global_capture(in_=True)
|
||||
out, err = capman.read_global_capture()
|
||||
sys.stdout.write(out)
|
||||
sys.stdout.write(err)
|
||||
assert call.excinfo is not None
|
||||
|
||||
if not isinstance(call.excinfo.value, unittest.SkipTest):
|
||||
_enter_pdb(node, call.excinfo, report)
|
||||
|
||||
def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None:
|
||||
tb = _postmortem_traceback(excinfo)
|
||||
post_mortem(tb)
|
||||
|
||||
|
||||
class PdbTrace:
|
||||
@hookimpl(hookwrapper=True)
|
||||
def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, None, None]:
|
||||
wrap_pytest_function_for_tracing(pyfuncitem)
|
||||
yield
|
||||
|
||||
|
||||
def wrap_pytest_function_for_tracing(pyfuncitem):
|
||||
"""Change the Python function object of the given Function item by a
|
||||
wrapper which actually enters pdb before calling the python function
|
||||
itself, effectively leaving the user in the pdb prompt in the first
|
||||
statement of the function."""
|
||||
_pdb = pytestPDB._init_pdb("runcall")
|
||||
testfunction = pyfuncitem.obj
|
||||
|
||||
# we can't just return `partial(pdb.runcall, testfunction)` because (on
|
||||
# python < 3.7.4) runcall's first param is `func`, which means we'd get
|
||||
# an exception if one of the kwargs to testfunction was called `func`.
|
||||
@functools.wraps(testfunction)
|
||||
def wrapper(*args, **kwargs):
|
||||
func = functools.partial(testfunction, *args, **kwargs)
|
||||
_pdb.runcall(func)
|
||||
|
||||
pyfuncitem.obj = wrapper
|
||||
|
||||
|
||||
def maybe_wrap_pytest_function_for_tracing(pyfuncitem):
|
||||
"""Wrap the given pytestfunct item for tracing support if --trace was given in
|
||||
the command line."""
|
||||
if pyfuncitem.config.getvalue("trace"):
|
||||
wrap_pytest_function_for_tracing(pyfuncitem)
|
||||
|
||||
|
||||
def _enter_pdb(
|
||||
node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport
|
||||
) -> BaseReport:
|
||||
# XXX we re-use the TerminalReporter's terminalwriter
|
||||
# because this seems to avoid some encoding related troubles
|
||||
# for not completely clear reasons.
|
||||
tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
|
||||
tw.line()
|
||||
|
||||
showcapture = node.config.option.showcapture
|
||||
|
||||
for sectionname, content in (
|
||||
("stdout", rep.capstdout),
|
||||
("stderr", rep.capstderr),
|
||||
("log", rep.caplog),
|
||||
):
|
||||
if showcapture in (sectionname, "all") and content:
|
||||
tw.sep(">", "captured " + sectionname)
|
||||
if content[-1:] == "\n":
|
||||
content = content[:-1]
|
||||
tw.line(content)
|
||||
|
||||
tw.sep(">", "traceback")
|
||||
rep.toterminal(tw)
|
||||
tw.sep(">", "entering PDB")
|
||||
tb = _postmortem_traceback(excinfo)
|
||||
rep._pdbshown = True # type: ignore[attr-defined]
|
||||
post_mortem(tb)
|
||||
return rep
|
||||
|
||||
|
||||
def _postmortem_traceback(excinfo: ExceptionInfo[BaseException]) -> types.TracebackType:
|
||||
from doctest import UnexpectedException
|
||||
|
||||
if isinstance(excinfo.value, UnexpectedException):
|
||||
# A doctest.UnexpectedException is not useful for post_mortem.
|
||||
# Use the underlying exception instead:
|
||||
return excinfo.value.exc_info[2]
|
||||
elif isinstance(excinfo.value, ConftestImportFailure):
|
||||
# A config.ConftestImportFailure is not useful for post_mortem.
|
||||
# Use the underlying exception instead:
|
||||
return excinfo.value.excinfo[2]
|
||||
else:
|
||||
assert excinfo._excinfo is not None
|
||||
return excinfo._excinfo[2]
|
||||
|
||||
|
||||
def post_mortem(t: types.TracebackType) -> None:
|
||||
p = pytestPDB._init_pdb("post_mortem")
|
||||
p.reset()
|
||||
p.interaction(None, t)
|
||||
if p.quitting:
|
||||
outcomes.exit("Quitting debugger")
|
||||
146
venv/lib/python3.12/site-packages/_pytest/deprecated.py
Normal file
146
venv/lib/python3.12/site-packages/_pytest/deprecated.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""Deprecation messages and bits of code used elsewhere in the codebase that
|
||||
is planned to be removed in the next pytest release.
|
||||
|
||||
Keeping it in a central location makes it easy to track what is deprecated and should
|
||||
be removed when the time comes.
|
||||
|
||||
All constants defined in this module should be either instances of
|
||||
:class:`PytestWarning`, or :class:`UnformattedWarning`
|
||||
in case of warnings which need to format their messages.
|
||||
"""
|
||||
from warnings import warn
|
||||
|
||||
from _pytest.warning_types import PytestDeprecationWarning
|
||||
from _pytest.warning_types import PytestRemovedIn8Warning
|
||||
from _pytest.warning_types import UnformattedWarning
|
||||
|
||||
# set of plugins which have been integrated into the core; we use this list to ignore
|
||||
# them during registration to avoid conflicts
|
||||
DEPRECATED_EXTERNAL_PLUGINS = {
|
||||
"pytest_catchlog",
|
||||
"pytest_capturelog",
|
||||
"pytest_faulthandler",
|
||||
}
|
||||
|
||||
NOSE_SUPPORT = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"Support for nose tests is deprecated and will be removed in a future release.\n"
|
||||
"{nodeid} is using nose method: `{method}` ({stage})\n"
|
||||
"See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose",
|
||||
)
|
||||
|
||||
NOSE_SUPPORT_METHOD = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"Support for nose tests is deprecated and will be removed in a future release.\n"
|
||||
"{nodeid} is using nose-specific method: `{method}(self)`\n"
|
||||
"To remove this warning, rename it to `{method}_method(self)`\n"
|
||||
"See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose",
|
||||
)
|
||||
|
||||
|
||||
# This can be* removed pytest 8, but it's harmless and common, so no rush to remove.
|
||||
# * If you're in the future: "could have been".
|
||||
YIELD_FIXTURE = PytestDeprecationWarning(
|
||||
"@pytest.yield_fixture is deprecated.\n"
|
||||
"Use @pytest.fixture instead; they are the same."
|
||||
)
|
||||
|
||||
WARNING_CMDLINE_PREPARSE_HOOK = PytestRemovedIn8Warning(
|
||||
"The pytest_cmdline_preparse hook is deprecated and will be removed in a future release. \n"
|
||||
"Please use pytest_load_initial_conftests hook instead."
|
||||
)
|
||||
|
||||
FSCOLLECTOR_GETHOOKPROXY_ISINITPATH = PytestRemovedIn8Warning(
|
||||
"The gethookproxy() and isinitpath() methods of FSCollector and Package are deprecated; "
|
||||
"use self.session.gethookproxy() and self.session.isinitpath() instead. "
|
||||
)
|
||||
|
||||
STRICT_OPTION = PytestRemovedIn8Warning(
|
||||
"The --strict option is deprecated, use --strict-markers instead."
|
||||
)
|
||||
|
||||
# This deprecation is never really meant to be removed.
|
||||
PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.")
|
||||
|
||||
ARGUMENT_PERCENT_DEFAULT = PytestRemovedIn8Warning(
|
||||
'pytest now uses argparse. "%default" should be changed to "%(default)s"',
|
||||
)
|
||||
|
||||
ARGUMENT_TYPE_STR_CHOICE = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"`type` argument to addoption() is the string {typ!r}."
|
||||
" For choices this is optional and can be omitted, "
|
||||
" but when supplied should be a type (for example `str` or `int`)."
|
||||
" (options: {names})",
|
||||
)
|
||||
|
||||
ARGUMENT_TYPE_STR = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"`type` argument to addoption() is the string {typ!r}, "
|
||||
" but when supplied should be a type (for example `str` or `int`)."
|
||||
" (options: {names})",
|
||||
)
|
||||
|
||||
|
||||
HOOK_LEGACY_PATH_ARG = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n"
|
||||
"see https://docs.pytest.org/en/latest/deprecations.html"
|
||||
"#py-path-local-arguments-for-hooks-replaced-with-pathlib-path",
|
||||
)
|
||||
|
||||
NODE_CTOR_FSPATH_ARG = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"The (fspath: py.path.local) argument to {node_type_name} is deprecated. "
|
||||
"Please use the (path: pathlib.Path) argument instead.\n"
|
||||
"See https://docs.pytest.org/en/latest/deprecations.html"
|
||||
"#fspath-argument-for-node-constructors-replaced-with-pathlib-path",
|
||||
)
|
||||
|
||||
WARNS_NONE_ARG = PytestRemovedIn8Warning(
|
||||
"Passing None has been deprecated.\n"
|
||||
"See https://docs.pytest.org/en/latest/how-to/capture-warnings.html"
|
||||
"#additional-use-cases-of-warnings-in-tests"
|
||||
" for alternatives in common use cases."
|
||||
)
|
||||
|
||||
KEYWORD_MSG_ARG = UnformattedWarning(
|
||||
PytestRemovedIn8Warning,
|
||||
"pytest.{func}(msg=...) is now deprecated, use pytest.{func}(reason=...) instead",
|
||||
)
|
||||
|
||||
INSTANCE_COLLECTOR = PytestRemovedIn8Warning(
|
||||
"The pytest.Instance collector type is deprecated and is no longer used. "
|
||||
"See https://docs.pytest.org/en/latest/deprecations.html#the-pytest-instance-collector",
|
||||
)
|
||||
HOOK_LEGACY_MARKING = UnformattedWarning(
|
||||
PytestDeprecationWarning,
|
||||
"The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n"
|
||||
"Please use the pytest.hook{type}({hook_opts}) decorator instead\n"
|
||||
" to configure the hooks.\n"
|
||||
" See https://docs.pytest.org/en/latest/deprecations.html"
|
||||
"#configuring-hook-specs-impls-using-markers",
|
||||
)
|
||||
|
||||
# You want to make some `__init__` or function "private".
|
||||
#
|
||||
# def my_private_function(some, args):
|
||||
# ...
|
||||
#
|
||||
# Do this:
|
||||
#
|
||||
# def my_private_function(some, args, *, _ispytest: bool = False):
|
||||
# check_ispytest(_ispytest)
|
||||
# ...
|
||||
#
|
||||
# Change all internal/allowed calls to
|
||||
#
|
||||
# my_private_function(some, args, _ispytest=True)
|
||||
#
|
||||
# All other calls will get the default _ispytest=False and trigger
|
||||
# the warning (possibly error in the future).
|
||||
|
||||
|
||||
def check_ispytest(ispytest: bool) -> None:
|
||||
if not ispytest:
|
||||
warn(PRIVATE, stacklevel=3)
|
||||
771
venv/lib/python3.12/site-packages/_pytest/doctest.py
Normal file
771
venv/lib/python3.12/site-packages/_pytest/doctest.py
Normal file
@@ -0,0 +1,771 @@
|
||||
"""Discover and run doctests in modules and test files."""
|
||||
import bdb
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Pattern
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from _pytest import outcomes
|
||||
from _pytest._code.code import ExceptionInfo
|
||||
from _pytest._code.code import ReprFileLocation
|
||||
from _pytest._code.code import TerminalRepr
|
||||
from _pytest._io import TerminalWriter
|
||||
from _pytest.compat import safe_getattr
|
||||
from _pytest.config import Config
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.fixtures import fixture
|
||||
from _pytest.fixtures import FixtureRequest
|
||||
from _pytest.nodes import Collector
|
||||
from _pytest.nodes import Item
|
||||
from _pytest.outcomes import OutcomeException
|
||||
from _pytest.outcomes import skip
|
||||
from _pytest.pathlib import fnmatch_ex
|
||||
from _pytest.pathlib import import_path
|
||||
from _pytest.python import Module
|
||||
from _pytest.python_api import approx
|
||||
from _pytest.warning_types import PytestWarning
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import doctest
|
||||
|
||||
DOCTEST_REPORT_CHOICE_NONE = "none"
|
||||
DOCTEST_REPORT_CHOICE_CDIFF = "cdiff"
|
||||
DOCTEST_REPORT_CHOICE_NDIFF = "ndiff"
|
||||
DOCTEST_REPORT_CHOICE_UDIFF = "udiff"
|
||||
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure"
|
||||
|
||||
DOCTEST_REPORT_CHOICES = (
|
||||
DOCTEST_REPORT_CHOICE_NONE,
|
||||
DOCTEST_REPORT_CHOICE_CDIFF,
|
||||
DOCTEST_REPORT_CHOICE_NDIFF,
|
||||
DOCTEST_REPORT_CHOICE_UDIFF,
|
||||
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
|
||||
)
|
||||
|
||||
# Lazy definition of runner class
|
||||
RUNNER_CLASS = None
|
||||
# Lazy definition of output checker class
|
||||
CHECKER_CLASS: Optional[Type["doctest.OutputChecker"]] = None
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
parser.addini(
|
||||
"doctest_optionflags",
|
||||
"Option flags for doctests",
|
||||
type="args",
|
||||
default=["ELLIPSIS"],
|
||||
)
|
||||
parser.addini(
|
||||
"doctest_encoding", "Encoding used for doctest files", default="utf-8"
|
||||
)
|
||||
group = parser.getgroup("collect")
|
||||
group.addoption(
|
||||
"--doctest-modules",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run doctests in all .py modules",
|
||||
dest="doctestmodules",
|
||||
)
|
||||
group.addoption(
|
||||
"--doctest-report",
|
||||
type=str.lower,
|
||||
default="udiff",
|
||||
help="Choose another output format for diffs on doctest failure",
|
||||
choices=DOCTEST_REPORT_CHOICES,
|
||||
dest="doctestreport",
|
||||
)
|
||||
group.addoption(
|
||||
"--doctest-glob",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="pat",
|
||||
help="Doctests file matching pattern, default: test*.txt",
|
||||
dest="doctestglob",
|
||||
)
|
||||
group.addoption(
|
||||
"--doctest-ignore-import-errors",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Ignore doctest ImportErrors",
|
||||
dest="doctest_ignore_import_errors",
|
||||
)
|
||||
group.addoption(
|
||||
"--doctest-continue-on-failure",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="For a given doctest, continue to run after the first failure",
|
||||
dest="doctest_continue_on_failure",
|
||||
)
|
||||
|
||||
|
||||
def pytest_unconfigure() -> None:
|
||||
global RUNNER_CLASS
|
||||
|
||||
RUNNER_CLASS = None
|
||||
|
||||
|
||||
def pytest_collect_file(
|
||||
file_path: Path,
|
||||
parent: Collector,
|
||||
) -> Optional[Union["DoctestModule", "DoctestTextfile"]]:
|
||||
config = parent.config
|
||||
if file_path.suffix == ".py":
|
||||
if config.option.doctestmodules and not any(
|
||||
(_is_setup_py(file_path), _is_main_py(file_path))
|
||||
):
|
||||
mod: DoctestModule = DoctestModule.from_parent(parent, path=file_path)
|
||||
return mod
|
||||
elif _is_doctest(config, file_path, parent):
|
||||
txt: DoctestTextfile = DoctestTextfile.from_parent(parent, path=file_path)
|
||||
return txt
|
||||
return None
|
||||
|
||||
|
||||
def _is_setup_py(path: Path) -> bool:
|
||||
if path.name != "setup.py":
|
||||
return False
|
||||
contents = path.read_bytes()
|
||||
return b"setuptools" in contents or b"distutils" in contents
|
||||
|
||||
|
||||
def _is_doctest(config: Config, path: Path, parent: Collector) -> bool:
|
||||
if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path):
|
||||
return True
|
||||
globs = config.getoption("doctestglob") or ["test*.txt"]
|
||||
return any(fnmatch_ex(glob, path) for glob in globs)
|
||||
|
||||
|
||||
def _is_main_py(path: Path) -> bool:
|
||||
return path.name == "__main__.py"
|
||||
|
||||
|
||||
class ReprFailDoctest(TerminalRepr):
|
||||
def __init__(
|
||||
self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]]
|
||||
) -> None:
|
||||
self.reprlocation_lines = reprlocation_lines
|
||||
|
||||
def toterminal(self, tw: TerminalWriter) -> None:
|
||||
for reprlocation, lines in self.reprlocation_lines:
|
||||
for line in lines:
|
||||
tw.line(line)
|
||||
reprlocation.toterminal(tw)
|
||||
|
||||
|
||||
class MultipleDoctestFailures(Exception):
|
||||
def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None:
|
||||
super().__init__()
|
||||
self.failures = failures
|
||||
|
||||
|
||||
def _init_runner_class() -> Type["doctest.DocTestRunner"]:
|
||||
import doctest
|
||||
|
||||
class PytestDoctestRunner(doctest.DebugRunner):
|
||||
"""Runner to collect failures.
|
||||
|
||||
Note that the out variable in this case is a list instead of a
|
||||
stdout-like object.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
checker: Optional["doctest.OutputChecker"] = None,
|
||||
verbose: Optional[bool] = None,
|
||||
optionflags: int = 0,
|
||||
continue_on_failure: bool = True,
|
||||
) -> None:
|
||||
super().__init__(checker=checker, verbose=verbose, optionflags=optionflags)
|
||||
self.continue_on_failure = continue_on_failure
|
||||
|
||||
def report_failure(
|
||||
self,
|
||||
out,
|
||||
test: "doctest.DocTest",
|
||||
example: "doctest.Example",
|
||||
got: str,
|
||||
) -> None:
|
||||
failure = doctest.DocTestFailure(test, example, got)
|
||||
if self.continue_on_failure:
|
||||
out.append(failure)
|
||||
else:
|
||||
raise failure
|
||||
|
||||
def report_unexpected_exception(
|
||||
self,
|
||||
out,
|
||||
test: "doctest.DocTest",
|
||||
example: "doctest.Example",
|
||||
exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType],
|
||||
) -> None:
|
||||
if isinstance(exc_info[1], OutcomeException):
|
||||
raise exc_info[1]
|
||||
if isinstance(exc_info[1], bdb.BdbQuit):
|
||||
outcomes.exit("Quitting debugger")
|
||||
failure = doctest.UnexpectedException(test, example, exc_info)
|
||||
if self.continue_on_failure:
|
||||
out.append(failure)
|
||||
else:
|
||||
raise failure
|
||||
|
||||
return PytestDoctestRunner
|
||||
|
||||
|
||||
def _get_runner(
|
||||
checker: Optional["doctest.OutputChecker"] = None,
|
||||
verbose: Optional[bool] = None,
|
||||
optionflags: int = 0,
|
||||
continue_on_failure: bool = True,
|
||||
) -> "doctest.DocTestRunner":
|
||||
# We need this in order to do a lazy import on doctest
|
||||
global RUNNER_CLASS
|
||||
if RUNNER_CLASS is None:
|
||||
RUNNER_CLASS = _init_runner_class()
|
||||
# Type ignored because the continue_on_failure argument is only defined on
|
||||
# PytestDoctestRunner, which is lazily defined so can't be used as a type.
|
||||
return RUNNER_CLASS( # type: ignore
|
||||
checker=checker,
|
||||
verbose=verbose,
|
||||
optionflags=optionflags,
|
||||
continue_on_failure=continue_on_failure,
|
||||
)
|
||||
|
||||
|
||||
class DoctestItem(Item):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
parent: "Union[DoctestTextfile, DoctestModule]",
|
||||
runner: Optional["doctest.DocTestRunner"] = None,
|
||||
dtest: Optional["doctest.DocTest"] = None,
|
||||
) -> None:
|
||||
super().__init__(name, parent)
|
||||
self.runner = runner
|
||||
self.dtest = dtest
|
||||
self.obj = None
|
||||
self.fixture_request: Optional[FixtureRequest] = None
|
||||
|
||||
@classmethod
|
||||
def from_parent( # type: ignore
|
||||
cls,
|
||||
parent: "Union[DoctestTextfile, DoctestModule]",
|
||||
*,
|
||||
name: str,
|
||||
runner: "doctest.DocTestRunner",
|
||||
dtest: "doctest.DocTest",
|
||||
):
|
||||
# incompatible signature due to imposed limits on subclass
|
||||
"""The public named constructor."""
|
||||
return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest)
|
||||
|
||||
def setup(self) -> None:
|
||||
if self.dtest is not None:
|
||||
self.fixture_request = _setup_fixtures(self)
|
||||
globs = dict(getfixture=self.fixture_request.getfixturevalue)
|
||||
for name, value in self.fixture_request.getfixturevalue(
|
||||
"doctest_namespace"
|
||||
).items():
|
||||
globs[name] = value
|
||||
self.dtest.globs.update(globs)
|
||||
|
||||
def runtest(self) -> None:
|
||||
assert self.dtest is not None
|
||||
assert self.runner is not None
|
||||
_check_all_skipped(self.dtest)
|
||||
self._disable_output_capturing_for_darwin()
|
||||
failures: List["doctest.DocTestFailure"] = []
|
||||
# Type ignored because we change the type of `out` from what
|
||||
# doctest expects.
|
||||
self.runner.run(self.dtest, out=failures) # type: ignore[arg-type]
|
||||
if failures:
|
||||
raise MultipleDoctestFailures(failures)
|
||||
|
||||
def _disable_output_capturing_for_darwin(self) -> None:
|
||||
"""Disable output capturing. Otherwise, stdout is lost to doctest (#985)."""
|
||||
if platform.system() != "Darwin":
|
||||
return
|
||||
capman = self.config.pluginmanager.getplugin("capturemanager")
|
||||
if capman:
|
||||
capman.suspend_global_capture(in_=True)
|
||||
out, err = capman.read_global_capture()
|
||||
sys.stdout.write(out)
|
||||
sys.stderr.write(err)
|
||||
|
||||
# TODO: Type ignored -- breaks Liskov Substitution.
|
||||
def repr_failure( # type: ignore[override]
|
||||
self,
|
||||
excinfo: ExceptionInfo[BaseException],
|
||||
) -> Union[str, TerminalRepr]:
|
||||
import doctest
|
||||
|
||||
failures: Optional[
|
||||
Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]]
|
||||
] = None
|
||||
if isinstance(
|
||||
excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException)
|
||||
):
|
||||
failures = [excinfo.value]
|
||||
elif isinstance(excinfo.value, MultipleDoctestFailures):
|
||||
failures = excinfo.value.failures
|
||||
|
||||
if failures is None:
|
||||
return super().repr_failure(excinfo)
|
||||
|
||||
reprlocation_lines = []
|
||||
for failure in failures:
|
||||
example = failure.example
|
||||
test = failure.test
|
||||
filename = test.filename
|
||||
if test.lineno is None:
|
||||
lineno = None
|
||||
else:
|
||||
lineno = test.lineno + example.lineno + 1
|
||||
message = type(failure).__name__
|
||||
# TODO: ReprFileLocation doesn't expect a None lineno.
|
||||
reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type]
|
||||
checker = _get_checker()
|
||||
report_choice = _get_report_choice(self.config.getoption("doctestreport"))
|
||||
if lineno is not None:
|
||||
assert failure.test.docstring is not None
|
||||
lines = failure.test.docstring.splitlines(False)
|
||||
# add line numbers to the left of the error message
|
||||
assert test.lineno is not None
|
||||
lines = [
|
||||
"%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines)
|
||||
]
|
||||
# trim docstring error lines to 10
|
||||
lines = lines[max(example.lineno - 9, 0) : example.lineno + 1]
|
||||
else:
|
||||
lines = [
|
||||
"EXAMPLE LOCATION UNKNOWN, not showing all tests of that example"
|
||||
]
|
||||
indent = ">>>"
|
||||
for line in example.source.splitlines():
|
||||
lines.append(f"??? {indent} {line}")
|
||||
indent = "..."
|
||||
if isinstance(failure, doctest.DocTestFailure):
|
||||
lines += checker.output_difference(
|
||||
example, failure.got, report_choice
|
||||
).split("\n")
|
||||
else:
|
||||
inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info)
|
||||
lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)]
|
||||
lines += [
|
||||
x.strip("\n") for x in traceback.format_exception(*failure.exc_info)
|
||||
]
|
||||
reprlocation_lines.append((reprlocation, lines))
|
||||
return ReprFailDoctest(reprlocation_lines)
|
||||
|
||||
def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]:
|
||||
assert self.dtest is not None
|
||||
return self.path, self.dtest.lineno, "[doctest] %s" % self.name
|
||||
|
||||
|
||||
def _get_flag_lookup() -> Dict[str, int]:
|
||||
import doctest
|
||||
|
||||
return dict(
|
||||
DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
|
||||
DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
|
||||
NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
|
||||
ELLIPSIS=doctest.ELLIPSIS,
|
||||
IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
|
||||
COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
|
||||
ALLOW_UNICODE=_get_allow_unicode_flag(),
|
||||
ALLOW_BYTES=_get_allow_bytes_flag(),
|
||||
NUMBER=_get_number_flag(),
|
||||
)
|
||||
|
||||
|
||||
def get_optionflags(parent):
|
||||
optionflags_str = parent.config.getini("doctest_optionflags")
|
||||
flag_lookup_table = _get_flag_lookup()
|
||||
flag_acc = 0
|
||||
for flag in optionflags_str:
|
||||
flag_acc |= flag_lookup_table[flag]
|
||||
return flag_acc
|
||||
|
||||
|
||||
def _get_continue_on_failure(config):
|
||||
continue_on_failure = config.getvalue("doctest_continue_on_failure")
|
||||
if continue_on_failure:
|
||||
# We need to turn off this if we use pdb since we should stop at
|
||||
# the first failure.
|
||||
if config.getvalue("usepdb"):
|
||||
continue_on_failure = False
|
||||
return continue_on_failure
|
||||
|
||||
|
||||
class DoctestTextfile(Module):
|
||||
obj = None
|
||||
|
||||
def collect(self) -> Iterable[DoctestItem]:
|
||||
import doctest
|
||||
|
||||
# Inspired by doctest.testfile; ideally we would use it directly,
|
||||
# but it doesn't support passing a custom checker.
|
||||
encoding = self.config.getini("doctest_encoding")
|
||||
text = self.path.read_text(encoding)
|
||||
filename = str(self.path)
|
||||
name = self.path.name
|
||||
globs = {"__name__": "__main__"}
|
||||
|
||||
optionflags = get_optionflags(self)
|
||||
|
||||
runner = _get_runner(
|
||||
verbose=False,
|
||||
optionflags=optionflags,
|
||||
checker=_get_checker(),
|
||||
continue_on_failure=_get_continue_on_failure(self.config),
|
||||
)
|
||||
|
||||
parser = doctest.DocTestParser()
|
||||
test = parser.get_doctest(text, globs, name, filename, 0)
|
||||
if test.examples:
|
||||
yield DoctestItem.from_parent(
|
||||
self, name=test.name, runner=runner, dtest=test
|
||||
)
|
||||
|
||||
|
||||
def _check_all_skipped(test: "doctest.DocTest") -> None:
|
||||
"""Raise pytest.skip() if all examples in the given DocTest have the SKIP
|
||||
option set."""
|
||||
import doctest
|
||||
|
||||
all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
|
||||
if all_skipped:
|
||||
skip("all tests skipped by +SKIP option")
|
||||
|
||||
|
||||
def _is_mocked(obj: object) -> bool:
|
||||
"""Return if an object is possibly a mock object by checking the
|
||||
existence of a highly improbable attribute."""
|
||||
return (
|
||||
safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None)
|
||||
is not None
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _patch_unwrap_mock_aware() -> Generator[None, None, None]:
|
||||
"""Context manager which replaces ``inspect.unwrap`` with a version
|
||||
that's aware of mock objects and doesn't recurse into them."""
|
||||
real_unwrap = inspect.unwrap
|
||||
|
||||
def _mock_aware_unwrap(
|
||||
func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None
|
||||
) -> Any:
|
||||
try:
|
||||
if stop is None or stop is _is_mocked:
|
||||
return real_unwrap(func, stop=_is_mocked)
|
||||
_stop = stop
|
||||
return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func))
|
||||
except Exception as e:
|
||||
warnings.warn(
|
||||
"Got %r when unwrapping %r. This is usually caused "
|
||||
"by a violation of Python's object protocol; see e.g. "
|
||||
"https://github.com/pytest-dev/pytest/issues/5080" % (e, func),
|
||||
PytestWarning,
|
||||
)
|
||||
raise
|
||||
|
||||
inspect.unwrap = _mock_aware_unwrap
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
inspect.unwrap = real_unwrap
|
||||
|
||||
|
||||
class DoctestModule(Module):
|
||||
def collect(self) -> Iterable[DoctestItem]:
|
||||
import doctest
|
||||
|
||||
class MockAwareDocTestFinder(doctest.DocTestFinder):
|
||||
"""A hackish doctest finder that overrides stdlib internals to fix a stdlib bug.
|
||||
|
||||
https://github.com/pytest-dev/pytest/issues/3456
|
||||
https://bugs.python.org/issue25532
|
||||
"""
|
||||
|
||||
def _find_lineno(self, obj, source_lines):
|
||||
"""Doctest code does not take into account `@property`, this
|
||||
is a hackish way to fix it. https://bugs.python.org/issue17446
|
||||
|
||||
Wrapped Doctests will need to be unwrapped so the correct
|
||||
line number is returned. This will be reported upstream. #8796
|
||||
"""
|
||||
if isinstance(obj, property):
|
||||
obj = getattr(obj, "fget", obj)
|
||||
|
||||
if hasattr(obj, "__wrapped__"):
|
||||
# Get the main obj in case of it being wrapped
|
||||
obj = inspect.unwrap(obj)
|
||||
|
||||
# Type ignored because this is a private function.
|
||||
return super()._find_lineno( # type:ignore[misc]
|
||||
obj,
|
||||
source_lines,
|
||||
)
|
||||
|
||||
def _find(
|
||||
self, tests, obj, name, module, source_lines, globs, seen
|
||||
) -> None:
|
||||
if _is_mocked(obj):
|
||||
return
|
||||
with _patch_unwrap_mock_aware():
|
||||
# Type ignored because this is a private function.
|
||||
super()._find( # type:ignore[misc]
|
||||
tests, obj, name, module, source_lines, globs, seen
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
|
||||
def _from_module(self, module, object):
|
||||
"""`cached_property` objects are never considered a part
|
||||
of the 'current module'. As such they are skipped by doctest.
|
||||
Here we override `_from_module` to check the underlying
|
||||
function instead. https://github.com/python/cpython/issues/107995
|
||||
"""
|
||||
if hasattr(functools, "cached_property") and isinstance(
|
||||
object, functools.cached_property
|
||||
):
|
||||
object = object.func
|
||||
|
||||
# Type ignored because this is a private function.
|
||||
return super()._from_module(module, object) # type: ignore[misc]
|
||||
|
||||
else: # pragma: no cover
|
||||
pass
|
||||
|
||||
if self.path.name == "conftest.py":
|
||||
module = self.config.pluginmanager._importconftest(
|
||||
self.path,
|
||||
self.config.getoption("importmode"),
|
||||
rootpath=self.config.rootpath,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
module = import_path(
|
||||
self.path,
|
||||
root=self.config.rootpath,
|
||||
mode=self.config.getoption("importmode"),
|
||||
)
|
||||
except ImportError:
|
||||
if self.config.getvalue("doctest_ignore_import_errors"):
|
||||
skip("unable to import module %r" % self.path)
|
||||
else:
|
||||
raise
|
||||
# Uses internal doctest module parsing mechanism.
|
||||
finder = MockAwareDocTestFinder()
|
||||
optionflags = get_optionflags(self)
|
||||
runner = _get_runner(
|
||||
verbose=False,
|
||||
optionflags=optionflags,
|
||||
checker=_get_checker(),
|
||||
continue_on_failure=_get_continue_on_failure(self.config),
|
||||
)
|
||||
|
||||
for test in finder.find(module, module.__name__):
|
||||
if test.examples: # skip empty doctests
|
||||
yield DoctestItem.from_parent(
|
||||
self, name=test.name, runner=runner, dtest=test
|
||||
)
|
||||
|
||||
|
||||
def _setup_fixtures(doctest_item: DoctestItem) -> FixtureRequest:
|
||||
"""Used by DoctestTextfile and DoctestItem to setup fixture information."""
|
||||
|
||||
def func() -> None:
|
||||
pass
|
||||
|
||||
doctest_item.funcargs = {} # type: ignore[attr-defined]
|
||||
fm = doctest_item.session._fixturemanager
|
||||
doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined]
|
||||
node=doctest_item, func=func, cls=None, funcargs=False
|
||||
)
|
||||
fixture_request = FixtureRequest(doctest_item, _ispytest=True)
|
||||
fixture_request._fillfixtures()
|
||||
return fixture_request
|
||||
|
||||
|
||||
def _init_checker_class() -> Type["doctest.OutputChecker"]:
|
||||
import doctest
|
||||
import re
|
||||
|
||||
class LiteralsOutputChecker(doctest.OutputChecker):
|
||||
# Based on doctest_nose_plugin.py from the nltk project
|
||||
# (https://github.com/nltk/nltk) and on the "numtest" doctest extension
|
||||
# by Sebastien Boisgerault (https://github.com/boisgera/numtest).
|
||||
|
||||
_unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
|
||||
_bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
|
||||
_number_re = re.compile(
|
||||
r"""
|
||||
(?P<number>
|
||||
(?P<mantissa>
|
||||
(?P<integer1> [+-]?\d*)\.(?P<fraction>\d+)
|
||||
|
|
||||
(?P<integer2> [+-]?\d+)\.
|
||||
)
|
||||
(?:
|
||||
[Ee]
|
||||
(?P<exponent1> [+-]?\d+)
|
||||
)?
|
||||
|
|
||||
(?P<integer3> [+-]?\d+)
|
||||
(?:
|
||||
[Ee]
|
||||
(?P<exponent2> [+-]?\d+)
|
||||
)
|
||||
)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
def check_output(self, want: str, got: str, optionflags: int) -> bool:
|
||||
if super().check_output(want, got, optionflags):
|
||||
return True
|
||||
|
||||
allow_unicode = optionflags & _get_allow_unicode_flag()
|
||||
allow_bytes = optionflags & _get_allow_bytes_flag()
|
||||
allow_number = optionflags & _get_number_flag()
|
||||
|
||||
if not allow_unicode and not allow_bytes and not allow_number:
|
||||
return False
|
||||
|
||||
def remove_prefixes(regex: Pattern[str], txt: str) -> str:
|
||||
return re.sub(regex, r"\1\2", txt)
|
||||
|
||||
if allow_unicode:
|
||||
want = remove_prefixes(self._unicode_literal_re, want)
|
||||
got = remove_prefixes(self._unicode_literal_re, got)
|
||||
|
||||
if allow_bytes:
|
||||
want = remove_prefixes(self._bytes_literal_re, want)
|
||||
got = remove_prefixes(self._bytes_literal_re, got)
|
||||
|
||||
if allow_number:
|
||||
got = self._remove_unwanted_precision(want, got)
|
||||
|
||||
return super().check_output(want, got, optionflags)
|
||||
|
||||
def _remove_unwanted_precision(self, want: str, got: str) -> str:
|
||||
wants = list(self._number_re.finditer(want))
|
||||
gots = list(self._number_re.finditer(got))
|
||||
if len(wants) != len(gots):
|
||||
return got
|
||||
offset = 0
|
||||
for w, g in zip(wants, gots):
|
||||
fraction: Optional[str] = w.group("fraction")
|
||||
exponent: Optional[str] = w.group("exponent1")
|
||||
if exponent is None:
|
||||
exponent = w.group("exponent2")
|
||||
precision = 0 if fraction is None else len(fraction)
|
||||
if exponent is not None:
|
||||
precision -= int(exponent)
|
||||
if float(w.group()) == approx(float(g.group()), abs=10**-precision):
|
||||
# They're close enough. Replace the text we actually
|
||||
# got with the text we want, so that it will match when we
|
||||
# check the string literally.
|
||||
got = (
|
||||
got[: g.start() + offset] + w.group() + got[g.end() + offset :]
|
||||
)
|
||||
offset += w.end() - w.start() - (g.end() - g.start())
|
||||
return got
|
||||
|
||||
return LiteralsOutputChecker
|
||||
|
||||
|
||||
def _get_checker() -> "doctest.OutputChecker":
|
||||
"""Return a doctest.OutputChecker subclass that supports some
|
||||
additional options:
|
||||
|
||||
* ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b''
|
||||
prefixes (respectively) in string literals. Useful when the same
|
||||
doctest should run in Python 2 and Python 3.
|
||||
|
||||
* NUMBER to ignore floating-point differences smaller than the
|
||||
precision of the literal number in the doctest.
|
||||
|
||||
An inner class is used to avoid importing "doctest" at the module
|
||||
level.
|
||||
"""
|
||||
global CHECKER_CLASS
|
||||
if CHECKER_CLASS is None:
|
||||
CHECKER_CLASS = _init_checker_class()
|
||||
return CHECKER_CLASS()
|
||||
|
||||
|
||||
def _get_allow_unicode_flag() -> int:
|
||||
"""Register and return the ALLOW_UNICODE flag."""
|
||||
import doctest
|
||||
|
||||
return doctest.register_optionflag("ALLOW_UNICODE")
|
||||
|
||||
|
||||
def _get_allow_bytes_flag() -> int:
|
||||
"""Register and return the ALLOW_BYTES flag."""
|
||||
import doctest
|
||||
|
||||
return doctest.register_optionflag("ALLOW_BYTES")
|
||||
|
||||
|
||||
def _get_number_flag() -> int:
|
||||
"""Register and return the NUMBER flag."""
|
||||
import doctest
|
||||
|
||||
return doctest.register_optionflag("NUMBER")
|
||||
|
||||
|
||||
def _get_report_choice(key: str) -> int:
|
||||
"""Return the actual `doctest` module flag value.
|
||||
|
||||
We want to do it as late as possible to avoid importing `doctest` and all
|
||||
its dependencies when parsing options, as it adds overhead and breaks tests.
|
||||
"""
|
||||
import doctest
|
||||
|
||||
return {
|
||||
DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
|
||||
DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
|
||||
DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
|
||||
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
|
||||
DOCTEST_REPORT_CHOICE_NONE: 0,
|
||||
}[key]
|
||||
|
||||
|
||||
@fixture(scope="session")
|
||||
def doctest_namespace() -> Dict[str, Any]:
|
||||
"""Fixture that returns a :py:class:`dict` that will be injected into the
|
||||
namespace of doctests.
|
||||
|
||||
Usually this fixture is used in conjunction with another ``autouse`` fixture:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def add_np(doctest_namespace):
|
||||
doctest_namespace["np"] = numpy
|
||||
|
||||
For more details: :ref:`doctest_namespace`.
|
||||
"""
|
||||
return dict()
|
||||
94
venv/lib/python3.12/site-packages/_pytest/faulthandler.py
Normal file
94
venv/lib/python3.12/site-packages/_pytest/faulthandler.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from _pytest.config import Config
|
||||
from _pytest.config.argparsing import Parser
|
||||
from _pytest.nodes import Item
|
||||
from _pytest.stash import StashKey
|
||||
|
||||
|
||||
fault_handler_stderr_fd_key = StashKey[int]()
|
||||
fault_handler_originally_enabled_key = StashKey[bool]()
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
help = (
|
||||
"Dump the traceback of all threads if a test takes "
|
||||
"more than TIMEOUT seconds to finish"
|
||||
)
|
||||
parser.addini("faulthandler_timeout", help, default=0.0)
|
||||
|
||||
|
||||
def pytest_configure(config: Config) -> None:
|
||||
import faulthandler
|
||||
|
||||
config.stash[fault_handler_stderr_fd_key] = os.dup(get_stderr_fileno())
|
||||
config.stash[fault_handler_originally_enabled_key] = faulthandler.is_enabled()
|
||||
faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key])
|
||||
|
||||
|
||||
def pytest_unconfigure(config: Config) -> None:
|
||||
import faulthandler
|
||||
|
||||
faulthandler.disable()
|
||||
# Close the dup file installed during pytest_configure.
|
||||
if fault_handler_stderr_fd_key in config.stash:
|
||||
os.close(config.stash[fault_handler_stderr_fd_key])
|
||||
del config.stash[fault_handler_stderr_fd_key]
|
||||
if config.stash.get(fault_handler_originally_enabled_key, False):
|
||||
# Re-enable the faulthandler if it was originally enabled.
|
||||
faulthandler.enable(file=get_stderr_fileno())
|
||||
|
||||
|
||||
def get_stderr_fileno() -> int:
|
||||
try:
|
||||
fileno = sys.stderr.fileno()
|
||||
# The Twisted Logger will return an invalid file descriptor since it is not backed
|
||||
# by an FD. So, let's also forward this to the same code path as with pytest-xdist.
|
||||
if fileno == -1:
|
||||
raise AttributeError()
|
||||
return fileno
|
||||
except (AttributeError, ValueError):
|
||||
# pytest-xdist monkeypatches sys.stderr with an object that is not an actual file.
|
||||
# https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors
|
||||
# This is potentially dangerous, but the best we can do.
|
||||
return sys.__stderr__.fileno()
|
||||
|
||||
|
||||
def get_timeout_config_value(config: Config) -> float:
|
||||
return float(config.getini("faulthandler_timeout") or 0.0)
|
||||
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True, trylast=True)
|
||||
def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
|
||||
timeout = get_timeout_config_value(item.config)
|
||||
if timeout > 0:
|
||||
import faulthandler
|
||||
|
||||
stderr = item.config.stash[fault_handler_stderr_fd_key]
|
||||
faulthandler.dump_traceback_later(timeout, file=stderr)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
faulthandler.cancel_dump_traceback_later()
|
||||
else:
|
||||
yield
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_enter_pdb() -> None:
|
||||
"""Cancel any traceback dumping due to timeout before entering pdb."""
|
||||
import faulthandler
|
||||
|
||||
faulthandler.cancel_dump_traceback_later()
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_exception_interact() -> None:
|
||||
"""Cancel any traceback dumping due to an interactive exception being
|
||||
raised."""
|
||||
import faulthandler
|
||||
|
||||
faulthandler.cancel_dump_traceback_later()
|
||||
1713
venv/lib/python3.12/site-packages/_pytest/fixtures.py
Normal file
1713
venv/lib/python3.12/site-packages/_pytest/fixtures.py
Normal file
File diff suppressed because it is too large
Load Diff
44
venv/lib/python3.12/site-packages/_pytest/freeze_support.py
Normal file
44
venv/lib/python3.12/site-packages/_pytest/freeze_support.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Provides a function to report all internal modules for using freezing
|
||||
tools."""
|
||||
import types
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Union
|
||||
|
||||
|
||||
def freeze_includes() -> List[str]:
|
||||
"""Return a list of module names used by pytest that should be
|
||||
included by cx_freeze."""
|
||||
import _pytest
|
||||
|
||||
result = list(_iter_all_modules(_pytest))
|
||||
return result
|
||||
|
||||
|
||||
def _iter_all_modules(
|
||||
package: Union[str, types.ModuleType],
|
||||
prefix: str = "",
|
||||
) -> Iterator[str]:
|
||||
"""Iterate over the names of all modules that can be found in the given
|
||||
package, recursively.
|
||||
|
||||
>>> import _pytest
|
||||
>>> list(_iter_all_modules(_pytest))
|
||||
['_pytest._argcomplete', '_pytest._code.code', ...]
|
||||
"""
|
||||
import os
|
||||
import pkgutil
|
||||
|
||||
if isinstance(package, str):
|
||||
path = package
|
||||
else:
|
||||
# Type ignored because typeshed doesn't define ModuleType.__path__
|
||||
# (only defined on packages).
|
||||
package_path = package.__path__ # type: ignore[attr-defined]
|
||||
path, prefix = package_path[0], package.__name__ + "."
|
||||
for _, name, is_package in pkgutil.iter_modules([path]):
|
||||
if is_package:
|
||||
for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."):
|
||||
yield prefix + m
|
||||
else:
|
||||
yield prefix + name
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user