syllable app let's hope it works
This commit is contained in:
parent
0d5bb6c91f
commit
a1f2164e6d
BIN
syllable_game/.DS_Store
vendored
Normal file
BIN
syllable_game/.DS_Store
vendored
Normal file
Binary file not shown.
247
syllable_game/christmasenv/bin/Activate.ps1
Normal file
247
syllable_game/christmasenv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
70
syllable_game/christmasenv/bin/activate
Normal file
70
syllable_game/christmasenv/bin/activate
Normal file
@ -0,0 +1,70 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath "/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv")
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV="/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv"
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1="(christmasenv) ${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT="(christmasenv) "
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
27
syllable_game/christmasenv/bin/activate.csh
Normal file
27
syllable_game/christmasenv/bin/activate.csh
Normal file
@ -0,0 +1,27 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = "(christmasenv) $prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT "(christmasenv) "
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
syllable_game/christmasenv/bin/activate.fish
Normal file
69
syllable_game/christmasenv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) "(christmasenv) " (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT "(christmasenv) "
|
||||
end
|
||||
8
syllable_game/christmasenv/bin/dotenv
Executable file
8
syllable_game/christmasenv/bin/dotenv
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
syllable_game/christmasenv/bin/flask
Executable file
8
syllable_game/christmasenv/bin/flask
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flask.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
syllable_game/christmasenv/bin/pip
Executable file
8
syllable_game/christmasenv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
syllable_game/christmasenv/bin/pip3
Executable file
8
syllable_game/christmasenv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
syllable_game/christmasenv/bin/pip3.12
Executable file
8
syllable_game/christmasenv/bin/pip3.12
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/dloefflad/Documents/iwm/iwm/christmas_party/syllable_game/christmasenv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
syllable_game/christmasenv/bin/python
Symbolic link
1
syllable_game/christmasenv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3.12
|
||||
1
syllable_game/christmasenv/bin/python3
Symbolic link
1
syllable_game/christmasenv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
python3.12
|
||||
1
syllable_game/christmasenv/bin/python3.12
Symbolic link
1
syllable_game/christmasenv/bin/python3.12
Symbolic link
@ -0,0 +1 @@
|
||||
/Library/Frameworks/Python.framework/Versions/3.12/bin/python3.12
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,19 @@
|
||||
Copyright 2005-2024 SQLAlchemy authors and contributors <see AUTHORS file>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@ -0,0 +1,243 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: SQLAlchemy
|
||||
Version: 2.0.36
|
||||
Summary: Database Abstraction Library
|
||||
Home-page: https://www.sqlalchemy.org
|
||||
Author: Mike Bayer
|
||||
Author-email: mike_mp@zzzcomputing.com
|
||||
License: MIT
|
||||
Project-URL: Documentation, https://docs.sqlalchemy.org
|
||||
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Topic :: Database :: Front-Ends
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Requires-Dist: typing-extensions >=4.6.0
|
||||
Requires-Dist: greenlet !=0.4.17 ; python_version < "3.13" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
|
||||
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
||||
Provides-Extra: aiomysql
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
|
||||
Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
|
||||
Provides-Extra: aioodbc
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
|
||||
Requires-Dist: aioodbc ; extra == 'aioodbc'
|
||||
Provides-Extra: aiosqlite
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
|
||||
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
|
||||
Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
|
||||
Provides-Extra: asyncio
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
|
||||
Provides-Extra: asyncmy
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
|
||||
Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
|
||||
Provides-Extra: mariadb_connector
|
||||
Requires-Dist: mariadb !=1.1.10,!=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
|
||||
Provides-Extra: mssql
|
||||
Requires-Dist: pyodbc ; extra == 'mssql'
|
||||
Provides-Extra: mssql_pymssql
|
||||
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
|
||||
Provides-Extra: mssql_pyodbc
|
||||
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
|
||||
Provides-Extra: mypy
|
||||
Requires-Dist: mypy >=0.910 ; extra == 'mypy'
|
||||
Provides-Extra: mysql
|
||||
Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
|
||||
Provides-Extra: mysql_connector
|
||||
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
|
||||
Provides-Extra: oracle
|
||||
Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
|
||||
Provides-Extra: oracle_oracledb
|
||||
Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
|
||||
Provides-Extra: postgresql
|
||||
Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
|
||||
Provides-Extra: postgresql_asyncpg
|
||||
Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
|
||||
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
|
||||
Provides-Extra: postgresql_pg8000
|
||||
Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
|
||||
Provides-Extra: postgresql_psycopg
|
||||
Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
|
||||
Provides-Extra: postgresql_psycopg2binary
|
||||
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
|
||||
Provides-Extra: postgresql_psycopg2cffi
|
||||
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
|
||||
Provides-Extra: postgresql_psycopgbinary
|
||||
Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
|
||||
Provides-Extra: pymysql
|
||||
Requires-Dist: pymysql ; extra == 'pymysql'
|
||||
Provides-Extra: sqlcipher
|
||||
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
|
||||
|
||||
SQLAlchemy
|
||||
==========
|
||||
|
||||
|PyPI| |Python| |Downloads|
|
||||
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
|
||||
:target: https://pypi.org/project/sqlalchemy
|
||||
:alt: PyPI
|
||||
|
||||
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
|
||||
:target: https://pypi.org/project/sqlalchemy
|
||||
:alt: PyPI - Python Version
|
||||
|
||||
.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month
|
||||
:target: https://pepy.tech/project/sqlalchemy
|
||||
:alt: PyPI - Downloads
|
||||
|
||||
|
||||
The Python SQL Toolkit and Object Relational Mapper
|
||||
|
||||
Introduction
|
||||
-------------
|
||||
|
||||
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
|
||||
that gives application developers the full power and
|
||||
flexibility of SQL. SQLAlchemy provides a full suite
|
||||
of well known enterprise-level persistence patterns,
|
||||
designed for efficient and high-performing database
|
||||
access, adapted into a simple and Pythonic domain
|
||||
language.
|
||||
|
||||
Major SQLAlchemy features include:
|
||||
|
||||
* An industrial strength ORM, built
|
||||
from the core on the identity map, unit of work,
|
||||
and data mapper patterns. These patterns
|
||||
allow transparent persistence of objects
|
||||
using a declarative configuration system.
|
||||
Domain models
|
||||
can be constructed and manipulated naturally,
|
||||
and changes are synchronized with the
|
||||
current transaction automatically.
|
||||
* A relationally-oriented query system, exposing
|
||||
the full range of SQL's capabilities
|
||||
explicitly, including joins, subqueries,
|
||||
correlation, and most everything else,
|
||||
in terms of the object model.
|
||||
Writing queries with the ORM uses the same
|
||||
techniques of relational composition you use
|
||||
when writing SQL. While you can drop into
|
||||
literal SQL at any time, it's virtually never
|
||||
needed.
|
||||
* A comprehensive and flexible system
|
||||
of eager loading for related collections and objects.
|
||||
Collections are cached within a session,
|
||||
and can be loaded on individual access, all
|
||||
at once using joins, or by query per collection
|
||||
across the full result set.
|
||||
* A Core SQL construction system and DBAPI
|
||||
interaction layer. The SQLAlchemy Core is
|
||||
separate from the ORM and is a full database
|
||||
abstraction layer in its own right, and includes
|
||||
an extensible Python-based SQL expression
|
||||
language, schema metadata, connection pooling,
|
||||
type coercion, and custom types.
|
||||
* All primary and foreign key constraints are
|
||||
assumed to be composite and natural. Surrogate
|
||||
integer primary keys are of course still the
|
||||
norm, but SQLAlchemy never assumes or hardcodes
|
||||
to this model.
|
||||
* Database introspection and generation. Database
|
||||
schemas can be "reflected" in one step into
|
||||
Python structures representing database metadata;
|
||||
those same structures can then generate
|
||||
CREATE statements right back out - all within
|
||||
the Core, independent of the ORM.
|
||||
|
||||
SQLAlchemy's philosophy:
|
||||
|
||||
* SQL databases behave less and less like object
|
||||
collections the more size and performance start to
|
||||
matter; object collections behave less and less like
|
||||
tables and rows the more abstraction starts to matter.
|
||||
SQLAlchemy aims to accommodate both of these
|
||||
principles.
|
||||
* An ORM doesn't need to hide the "R". A relational
|
||||
database provides rich, set-based functionality
|
||||
that should be fully exposed. SQLAlchemy's
|
||||
ORM provides an open-ended set of patterns
|
||||
that allow a developer to construct a custom
|
||||
mediation layer between a domain model and
|
||||
a relational schema, turning the so-called
|
||||
"object relational impedance" issue into
|
||||
a distant memory.
|
||||
* The developer, in all cases, makes all decisions
|
||||
regarding the design, structure, and naming conventions
|
||||
of both the object model as well as the relational
|
||||
schema. SQLAlchemy only provides the means
|
||||
to automate the execution of these decisions.
|
||||
* With SQLAlchemy, there's no such thing as
|
||||
"the ORM generated a bad query" - you
|
||||
retain full control over the structure of
|
||||
queries, including how joins are organized,
|
||||
how subqueries and correlation is used, what
|
||||
columns are requested. Everything SQLAlchemy
|
||||
does is ultimately the result of a developer-initiated
|
||||
decision.
|
||||
* Don't use an ORM if the problem doesn't need one.
|
||||
SQLAlchemy consists of a Core and separate ORM
|
||||
component. The Core offers a full SQL expression
|
||||
language that allows Pythonic construction
|
||||
of SQL constructs that render directly to SQL
|
||||
strings for a target database, returning
|
||||
result sets that are essentially enhanced DBAPI
|
||||
cursors.
|
||||
* Transactions should be the norm. With SQLAlchemy's
|
||||
ORM, nothing goes to permanent storage until
|
||||
commit() is called. SQLAlchemy encourages applications
|
||||
to create a consistent means of delineating
|
||||
the start and end of a series of operations.
|
||||
* Never render a literal value in a SQL statement.
|
||||
Bound parameters are used to the greatest degree
|
||||
possible, allowing query optimizers to cache
|
||||
query plans effectively and making SQL injection
|
||||
attacks a non-issue.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Latest documentation is at:
|
||||
|
||||
https://www.sqlalchemy.org/docs/
|
||||
|
||||
Installation / Requirements
|
||||
---------------------------
|
||||
|
||||
Full documentation for installation is at
|
||||
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
|
||||
|
||||
Getting Help / Development / Bug reporting
|
||||
------------------------------------------
|
||||
|
||||
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
|
||||
constructive communication between users and developers.
|
||||
Please see our current Code of Conduct at
|
||||
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
SQLAlchemy is distributed under the `MIT license
|
||||
<https://www.opensource.org/licenses/mit-license.php>`_.
|
||||
|
||||
@ -0,0 +1,530 @@
|
||||
SQLAlchemy-2.0.36.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
SQLAlchemy-2.0.36.dist-info/LICENSE,sha256=PA9Zq4h9BB3mpOUv_j6e212VIt6Qn66abNettue-MpM,1100
|
||||
SQLAlchemy-2.0.36.dist-info/METADATA,sha256=EZH514FydYtyOhgoZk_OF1ZQEtI4eTAEddlnUlRjzac,9692
|
||||
SQLAlchemy-2.0.36.dist-info/RECORD,,
|
||||
SQLAlchemy-2.0.36.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
SQLAlchemy-2.0.36.dist-info/WHEEL,sha256=lWey-nzGdePMz7fsIJ1fIMVKiFg5IkOcUVqkuIpdbws,109
|
||||
SQLAlchemy-2.0.36.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
|
||||
sqlalchemy/__init__.py,sha256=J2PsdiJiNW93Etxk6YN8o_C3TcpR1_DckU71r4LBcGE,13033
|
||||
sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/log.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__init__.py,sha256=PzXPqZqi3BzEnrs1eW0DcsR4lyknAzhhN9rWcQ97hb4,476
|
||||
sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/connectors/aioodbc.py,sha256=GSTiNMO9h0qjPxgqaxDwWZ8HvhWMFNVR6MJQnN1oc40,5288
|
||||
sqlalchemy/connectors/asyncio.py,sha256=Hq2bkXmG6-KO_RfCrwMqx4oGH-uH1Z1WWKqPWNjz8p4,6138
|
||||
sqlalchemy/connectors/pyodbc.py,sha256=t7AjyxIOnaWg3CrlUEpBs4Y5l0HFdNt3P_cSSKhbi0Y,8501
|
||||
sqlalchemy/cyextension/__init__.py,sha256=GzhhN8cjMnDTE0qerlUlpbrNmFPHQWCZ4Gk74OAxl04,244
|
||||
sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/cyextension/collections.cpython-312-darwin.so,sha256=nW7Y9IRJzL-VEINp4aianHULAZHiHcRBXdcP7xH0ewE,247360
|
||||
sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571
|
||||
sqlalchemy/cyextension/immutabledict.cpython-312-darwin.so,sha256=rCpXutY-DGPuNmzkvWSqoq2zQMfnsxajfeAsOvbkoVY,122336
|
||||
sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291
|
||||
sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535
|
||||
sqlalchemy/cyextension/processors.cpython-312-darwin.so,sha256=CqB4a4gnQkjXxQx_gWZT_PvAa7mn6NvgwltpD8drEZk,102928
|
||||
sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792
|
||||
sqlalchemy/cyextension/resultproxy.cpython-312-darwin.so,sha256=_x5qDP5V7hf8nV6NtoJS1W89o_jEcxeFdiVchNoenP8,104864
|
||||
sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725
|
||||
sqlalchemy/cyextension/util.cpython-312-darwin.so,sha256=JXXrpXKMlgPRQ0Tv1eqZn1xdChzcAT0M32Fq1oJNZzI,122296
|
||||
sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530
|
||||
sqlalchemy/dialects/__init__.py,sha256=Kos9Gf5JZg1Vg6GWaCqEbD6e0r1jCwCmcnJIfcxDdcY,1770
|
||||
sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/_typing.py,sha256=hyv0nKucX2gI8ispB1IsvaUgrEPn9zEcq9hS7kfstEw,888
|
||||
sqlalchemy/dialects/mssql/__init__.py,sha256=r5t8wFRNtBQoiUWh0WfIEWzXZW6f3D0uDt6NZTW_7Cc,1880
|
||||
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mssql/aioodbc.py,sha256=UQd9ecSMIML713TDnLAviuBVJle7P7i1FtqGZZePk2Y,2022
|
||||
sqlalchemy/dialects/mssql/base.py,sha256=msl_N_a_z8ali7Nthx55AGoV7b5wakCWvWu560BvH9o,132423
|
||||
sqlalchemy/dialects/mssql/information_schema.py,sha256=HswjDc6y0mPXCf_x6VyylHlBdBa4PSY6Evxmmlch700,8084
|
||||
sqlalchemy/dialects/mssql/json.py,sha256=evUACW2O62TAPq8B7QIPagz7jfc664ql9ms68JqiYzg,4816
|
||||
sqlalchemy/dialects/mssql/provision.py,sha256=ZAtt6Div9NLIngMs8kyloxfphw0KDNMsnRCAVd7-esE,5593
|
||||
sqlalchemy/dialects/mssql/pymssql.py,sha256=LAv43q4vBCB85OsAwHQItaQUYTYIO0QJ-jvzaBrswmY,4097
|
||||
sqlalchemy/dialects/mssql/pyodbc.py,sha256=vwM-vBlmRwrqxOc73P0sFOrBSwn24wzc5IkEOpalbXQ,27056
|
||||
sqlalchemy/dialects/mysql/__init__.py,sha256=bxbi4hkysUK2OOVvr1F49akUj1cky27kKb07tgFzI9U,2153
|
||||
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/mysql/aiomysql.py,sha256=-oMZnCqNsSki8mlQRTWIwiQPT1OVdZIuANkb90q8LAs,9999
|
||||
sqlalchemy/dialects/mysql/asyncmy.py,sha256=YpuuOh8VknEeqHqUXQGfQ3jhfO3Xb-vZv78Jq5cscJ0,10067
|
||||
sqlalchemy/dialects/mysql/base.py,sha256=giGlZNGrKsNMoSkbzY0PGgfamKjA9rOkSq1o5vKvno4,122755
|
||||
sqlalchemy/dialects/mysql/cymysql.py,sha256=eXT1ry0w_qRxjiO24M980c-8PZ9qSsbhqBHntjEiKB0,2300
|
||||
sqlalchemy/dialects/mysql/dml.py,sha256=HXJMAvimJsqvhj3UZO4vW_6LkF5RqaKbHvklAjor7yU,7645
|
||||
sqlalchemy/dialects/mysql/enumerated.py,sha256=ipEPPQqoXfFwcywNdcLlZCEzHBtnitHRah1Gn6nItcg,8448
|
||||
sqlalchemy/dialects/mysql/expression.py,sha256=lsmQCHKwfPezUnt27d2kR6ohk4IRFCA64KBS16kx5dc,4097
|
||||
sqlalchemy/dialects/mysql/json.py,sha256=l6MEZ0qp8FgiRrIQvOMhyEJq0q6OqiEnvDTx5Cbt9uQ,2269
|
||||
sqlalchemy/dialects/mysql/mariadb.py,sha256=kTfBLioLKk4JFFst4TY_iWqPtnvvQXFHknLfm89H2N8,853
|
||||
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=_S1aV93kyP52Nvj7HR9weThML4oUvSLsLqiVFdoLR2o,8623
|
||||
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=oq3mtsNOMldUjs32JbJG2u3Hy3DObyVzUUMYfOkwkHg,5729
|
||||
sqlalchemy/dialects/mysql/mysqldb.py,sha256=qUBbA6STeYGozutyTxHCo5p1W3p59QFFS2FwCgPrjBA,9503
|
||||
sqlalchemy/dialects/mysql/provision.py,sha256=Jnk8UO9_Apd2odR2IQFLrscCfAmYxuBKcB8giS3bBog,3575
|
||||
sqlalchemy/dialects/mysql/pymysql.py,sha256=GUnSHd2M2uKjmN46Hheymtm26g7phEgwYOXrX0zLY8M,4083
|
||||
sqlalchemy/dialects/mysql/pyodbc.py,sha256=072crI4qVyPhajYvHnsfFeSrNjLFVPIjBQKo5uyz5yk,4297
|
||||
sqlalchemy/dialects/mysql/reflection.py,sha256=3u34YwT1JJh3uThGZJZ3FKdnUcT7v08QB-tAl1r7VRk,22834
|
||||
sqlalchemy/dialects/mysql/reserved_words.py,sha256=ucKX2p2c3UnMq2ayZuOHuf73eXhu7SKsOsTlIN1Q83I,9258
|
||||
sqlalchemy/dialects/mysql/types.py,sha256=L5cTCsMT1pTedszNEM3jSxFNZEMcHQLprYCZ0vmfsnA,24343
|
||||
sqlalchemy/dialects/oracle/__init__.py,sha256=p4-2gw7TT0bX_MoJXTGD4i8WHctYsK9kCRbkpzykBrc,1493
|
||||
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/oracle/base.py,sha256=zLMZedrr6j1LvJz4qYnoSjikI5RZY92YFeQHiZ_YvW0,119676
|
||||
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=q8Nyj15UZCE2TWOmxuWp5ZsxiCiGMzqfd_9UkmjIja0,55235
|
||||
sqlalchemy/dialects/oracle/dictionary.py,sha256=7WMrbPkqo8ZdGjaEZyQr-5f2pajSOF1OTGb8P97z8-g,19519
|
||||
sqlalchemy/dialects/oracle/oracledb.py,sha256=fZRKGqNIwW9LG4i8yDOXABrucbfzn_yC86Od-BJ3PcM,13619
|
||||
sqlalchemy/dialects/oracle/provision.py,sha256=O9ZpF4OG6Cx4mMzLRfZwhs8dZjrJETWR402n9c7726A,8304
|
||||
sqlalchemy/dialects/oracle/types.py,sha256=QK3hJvWzKnnCe3oD3rItwEEIwcoBze8qGg7VFOvVlIk,8231
|
||||
sqlalchemy/dialects/postgresql/__init__.py,sha256=wwnNAq4wDQzrlPRzDNB06ayuq3L2HNO99nzeEvq-YcU,3892
|
||||
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=7TudtgsPiSB8O5kX8W8KxcNYR8t5h_UHb86b_ChL0P8,5696
|
||||
sqlalchemy/dialects/postgresql/array.py,sha256=bWcame7ntmI_Kx6gmBX0-chwADFdLHeCvaDQ4iX8id8,13734
|
||||
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=9P0Itn9eeSBu67kGSsHuzx8xd4YYwRKdiZ5m7bF5onU,41074
|
||||
sqlalchemy/dialects/postgresql/base.py,sha256=dGPsaV3Esw6-AwE3QcgHF0Fray3Yw5-gLLgCvgdxvS0,179083
|
||||
sqlalchemy/dialects/postgresql/dml.py,sha256=Pc69Le6qzmUHHb1FT5zeUSD31dWm6SBgdCAGW89cs3s,11212
|
||||
sqlalchemy/dialects/postgresql/ext.py,sha256=1bZ--iNh2O9ym7l2gXZX48yP3yMO4dqb9RpYro2Mj2Q,16262
|
||||
sqlalchemy/dialects/postgresql/hstore.py,sha256=otAx-RTDfpi_tcXkMuQV0JOIXtYgevgnsikLKKOkI6U,11541
|
||||
sqlalchemy/dialects/postgresql/json.py,sha256=53rQWon9cUXd1yCjIvUpJjWwNyRSy3U7Kz0HV70ftrc,11618
|
||||
sqlalchemy/dialects/postgresql/named_types.py,sha256=3IV1ufo7zJjKmX4VtGDEnoXE6xEqLJAtGG82IiqHXwY,17594
|
||||
sqlalchemy/dialects/postgresql/operators.py,sha256=NsAaWun_tL3d_be0fs9YL6T4LPKK6crnmFxxIJHgyeY,2808
|
||||
sqlalchemy/dialects/postgresql/pg8000.py,sha256=3yoekiWSF-xnaWMqG76XrYPMqerg-42TdmfsW_ivK9E,18640
|
||||
sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=hY3NXEUHxTWD4umhd2aowNu3laC-61Q_qQ_pReyXTUM,9254
|
||||
sqlalchemy/dialects/postgresql/provision.py,sha256=t6TZj0XaWG9zrpCjNr0oJRjAC_WQzaNdp3kaKJIbS8I,5770
|
||||
sqlalchemy/dialects/postgresql/psycopg.py,sha256=Uwf45f9fInOtaExiEdwiP9xzRo7hw0XyZTkRtgdom44,23168
|
||||
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=kwEnflz5bAqJcuO_20eYiCtha_a4m_tg5_lppdDnaeU,31998
|
||||
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=M7wAYSL6Pvt-4nbfacAHGyyw4XMKJ_bQZ1tc1pBtIdg,1756
|
||||
sqlalchemy/dialects/postgresql/ranges.py,sha256=6CgV7qkxEMJ9AQsiibo_XBLJYzGh-2ZxpG83sRaesVY,32949
|
||||
sqlalchemy/dialects/postgresql/types.py,sha256=Jfxqw9JaKNOq29JRWBublywgb3lLMyzx8YZI7CXpS2s,7300
|
||||
sqlalchemy/dialects/sqlite/__init__.py,sha256=lp9DIggNn349M-7IYhUA8et8--e8FRExWD2V_r1LJk4,1182
|
||||
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
|
||||
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=g3qGV6jmiXabWyb3282g_Nmxtj1jThxGSe9C9yalb-U,12345
|
||||
sqlalchemy/dialects/sqlite/base.py,sha256=LcnW6hzxqTtPlDBOInHumvuDt8a31THA5Jnm4vFvdFI,97811
|
||||
sqlalchemy/dialects/sqlite/dml.py,sha256=9GE55WvwoktKy2fHeT-Wbc9xPHgsbh5oBfd_fckMH5Q,8443
|
||||
sqlalchemy/dialects/sqlite/json.py,sha256=Eoplbb_4dYlfrtmQaI8Xddd2suAIHA-IdbDQYM-LIhs,2777
|
||||
sqlalchemy/dialects/sqlite/provision.py,sha256=UCpmwxf4IWlrpb2eLHGbPTpCFVbdI_KAh2mKtjiLYao,5632
|
||||
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=OL2S_05DK9kllZj6DOz7QtEl7jI7syxjW6woS725ii4,5356
|
||||
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=aDp47n0J509kl2hDchoaBKXEQVZtkux54DwfKytUAe4,28068
|
||||
sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
|
||||
sqlalchemy/engine/__init__.py,sha256=Stb2oV6l8w65JvqEo6J4qtKoApcmOpXy3AAxQud4C1o,2818
|
||||
sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
|
||||
sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/engine/_py_processors.py,sha256=j9i_lcYYQOYJMcsDerPxI0sVFBIlX5sqoYMdMJlgWPI,3744
|
||||
sqlalchemy/engine/_py_row.py,sha256=wSqoUFzLOJ1f89kgDb6sJm9LUrF5LMFpXPcK1vUsKcs,3787
|
||||
sqlalchemy/engine/_py_util.py,sha256=f2DI3AN1kv6EplelowesCVpwS8hSXNufRkZoQmJtSH8,2484
|
||||
sqlalchemy/engine/base.py,sha256=frWSMmt3dlentYH4QNN3cijdGzp8NbunColUZwWsWgI,122958
|
||||
sqlalchemy/engine/characteristics.py,sha256=N3kbvw_ApMh86wb5yAGnxtPYD4YRhYMWion1H_aVZBI,4765
|
||||
sqlalchemy/engine/create.py,sha256=mYJtOG2ZKM8sgyfjpGpamW15RDU7JXi5s6iibbJHMIs,33206
|
||||
sqlalchemy/engine/cursor.py,sha256=cFq61yrw76k-QR_xNUBWuL-Zeyb14ltG-6jo2Q2iuuw,76392
|
||||
sqlalchemy/engine/default.py,sha256=2wwKKdsagb3QTajRSEw8Hl-EnQ-LmRxy822xOGyenHc,84648
|
||||
sqlalchemy/engine/events.py,sha256=c0unNFFiHzTAvkUtXoJaxzMFMDwurBkHiiUhuN8qluc,37381
|
||||
sqlalchemy/engine/interfaces.py,sha256=fcVHOmnMo7JZLHzgSKoK3QsdVHH7kJ_AmrDvwW9Ka3k,112936
|
||||
sqlalchemy/engine/mock.py,sha256=yvpxgFmRw5G4QsHeF-ZwQGHKES-HqQOucTxFtN1uzdk,4179
|
||||
sqlalchemy/engine/processors.py,sha256=XyfINKbo-2fjN-mW55YybvFyQMOil50_kVqsunahkNs,2379
|
||||
sqlalchemy/engine/reflection.py,sha256=gwGs8y7x6py5z-ZWx3hQqQrwpHepMCTJyQcFwWJjPlw,75364
|
||||
sqlalchemy/engine/result.py,sha256=NZEskTMAcDzK-vjE96Fw8VvBL58s5Y6rt9vXcmZdM4w,77651
|
||||
sqlalchemy/engine/row.py,sha256=9AAQo9zYDL88GcZ3bjcQTwMT-YIcuGTSMAyTfmBJ_yM,12032
|
||||
sqlalchemy/engine/strategies.py,sha256=DqFSWaXJPL-29Omot9O0aOcuGL8KmCGyOvnPGDkAJoE,442
|
||||
sqlalchemy/engine/url.py,sha256=8eWkUaIUyDExOcJ2D4xJXRcn4OY1GQJ3Q2duSX6UGAg,30784
|
||||
sqlalchemy/engine/util.py,sha256=bNirO8k1S8yOW61uNH-a9QrWtAJ9VGFgbiR0lk1lUQU,5682
|
||||
sqlalchemy/event/__init__.py,sha256=KBrp622xojnC3FFquxa2JsMamwAbfkvzfv6Op0NKiYc,997
|
||||
sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
|
||||
sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
|
||||
sqlalchemy/event/api.py,sha256=DtDVgjKSorOfp9MGJ7fgMWrj4seC_hkwF4D8CW1RFZU,8226
|
||||
sqlalchemy/event/attr.py,sha256=X8QeHGK4ioSYht1vkhc11f606_mq_t91jMNIT314ubs,20751
|
||||
sqlalchemy/event/base.py,sha256=270OShTD17-bSFUFnPtKdVnB0NFJZ2AouYPo1wT0aJw,15127
|
||||
sqlalchemy/event/legacy.py,sha256=teMPs00fO-4g8a_z2omcVKkYce5wj_1uvJO2n2MIeuo,8227
|
||||
sqlalchemy/event/registry.py,sha256=nfTSSyhjZZXc5wseWB4sXn-YibSc0LKX8mg17XlWmAo,10835
|
||||
sqlalchemy/events.py,sha256=k-ZD38aSPD29LYhED7CBqttp5MDVVx_YSaWC2-cu9ec,525
|
||||
sqlalchemy/exc.py,sha256=M_8-O1hd8i6gbyx-TapV400p_Lxq2QqTGMXUAO-YgCc,23976
|
||||
sqlalchemy/ext/__init__.py,sha256=S1fGKAbycnQDV01gs-JWGaFQ9GCD4QHwKcU2wnugg_o,322
|
||||
sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
|
||||
sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
|
||||
sqlalchemy/ext/associationproxy.py,sha256=ZGc_ssGf7FC6eKrja1iTvnWEKLkFZQA8CiVAjR8iVRw,66062
|
||||
sqlalchemy/ext/asyncio/__init__.py,sha256=1OqSxEyIUn7RWLGyO12F-jAUIvk1I6DXlVy80-Gvkds,1317
|
||||
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
|
||||
sqlalchemy/ext/asyncio/base.py,sha256=fl7wxZD9KjgFiCtG3WXrYjHEvanamcsodCqq9pH9lOk,8905
|
||||
sqlalchemy/ext/asyncio/engine.py,sha256=S_IRWX4QAjj2veLSu4Y3gKBIXkKQt7_2StJAK2_KUDY,48190
|
||||
sqlalchemy/ext/asyncio/exc.py,sha256=8sII7VMXzs2TrhizhFQMzSfcroRtiesq8o3UwLfXSgQ,639
|
||||
sqlalchemy/ext/asyncio/result.py,sha256=3rbVIY_wySi50JwaK3Kf2qa3c5Fc8W84FtUpt-9i9Vk,30477
|
||||
sqlalchemy/ext/asyncio/scoping.py,sha256=UxHAFxtWKqA7TEozyN2h7MJyzSspTCrS-1SlgQLTExo,52608
|
||||
sqlalchemy/ext/asyncio/session.py,sha256=QpXnqspwYnT28znD1EdpUIaVjQOO1BirtS0BJeBxeZk,63087
|
||||
sqlalchemy/ext/automap.py,sha256=r0mUSyogNyqdBL4m9AA1NXbLiTLQmtvyQymsssNEipo,61581
|
||||
sqlalchemy/ext/baked.py,sha256=H6T1il7GY84BhzPFj49UECSpZh_eBuiHomA-QIsYOYQ,17807
|
||||
sqlalchemy/ext/compiler.py,sha256=6X6sZCAo9v-PQfLbwBSYQUK0-XH2xTE5Jm0Zg6Ka6eM,20877
|
||||
sqlalchemy/ext/declarative/__init__.py,sha256=20psLdFQbbOWfpdXHZ0CTY6I1k4UqXvKemNVu1LvPOI,1818
|
||||
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
|
||||
sqlalchemy/ext/declarative/extensions.py,sha256=uCjN1GisQt54AjqYnKYzJdUjnGd2pZBW47WWdPlS7FE,19547
|
||||
sqlalchemy/ext/horizontal_shard.py,sha256=wuwAPnHymln0unSBnyx-cpX0AfESKSsypaSQTYCvzDk,16750
|
||||
sqlalchemy/ext/hybrid.py,sha256=IYkCaPZ29gm2cPKPg0cWMkLCEqMykD8-JJTvgacGbmc,52458
|
||||
sqlalchemy/ext/indexable.py,sha256=UkTelbydKCdKelzbv3HWFFavoET9WocKaGRPGEOVfN8,11032
|
||||
sqlalchemy/ext/instrumentation.py,sha256=sg8ghDjdHSODFXh_jAmpgemnNX1rxCeeXEG3-PMdrNk,15707
|
||||
sqlalchemy/ext/mutable.py,sha256=L5ZkHBGYhMaqO75Xtyrk2DBR44RDk0g6Rz2HzHH0F8Q,37355
|
||||
sqlalchemy/ext/mypy/__init__.py,sha256=0WebDIZmqBD0OTq5JLtd_PmfF9JGxe4d4Qv3Ml3PKUg,241
|
||||
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/ext/mypy/apply.py,sha256=Aek_-XA1eXihT4attxhfE43yBKtCgsxBSb--qgZKUqc,10550
|
||||
sqlalchemy/ext/mypy/decl_class.py,sha256=1vVJRII2apnLTUbc5HkJS6Z2GueaUv_eKvhbqh7Wik4,17384
|
||||
sqlalchemy/ext/mypy/infer.py,sha256=KVnmLFEVS33Al8pUKI7MJbJQu3KeveBUMl78EluBORw,19369
|
||||
sqlalchemy/ext/mypy/names.py,sha256=Q3ef8XQBgVm9WUwlItqlYCXDNi_kbV5DdLEgbtEMEI8,10479
|
||||
sqlalchemy/ext/mypy/plugin.py,sha256=74ML8LI9xar0V86oCxnPFv5FQGEEfUzK64vOay4BKFs,9750
|
||||
sqlalchemy/ext/mypy/util.py,sha256=DKRaurkXHI2lAMAAcEO5GLXbX_m2Xqy7l_juh8Byf5U,9960
|
||||
sqlalchemy/ext/orderinglist.py,sha256=TGYbsGH72wEZcFNQDYDsZg9OSPuzf__P8YX8_2HtYUo,14384
|
||||
sqlalchemy/ext/serializer.py,sha256=D0g4jMZkRk0Gjr0L-FZe81SR63h0Zs-9JzuWtT_SD7k,6140
|
||||
sqlalchemy/future/__init__.py,sha256=q2mw-gxk_xoxJLEvRoyMha3vO1xSRHrslcExOHZwmPA,512
|
||||
sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
|
||||
sqlalchemy/future/engine.py,sha256=AgIw6vMsef8W6tynOTkxsjd6o_OQDwGjLdbpoMD8ue8,495
|
||||
sqlalchemy/inspection.py,sha256=MF-LE358wZDUEl1IH8-Uwt2HI65EsQpQW5o5udHkZwA,5063
|
||||
sqlalchemy/log.py,sha256=8x9UR3nj0uFm6or6bQF-JWb4fYv2zOeQjG_w-0wOJFA,8607
|
||||
sqlalchemy/orm/__init__.py,sha256=ZYys5nL3RFUDCMOLFDBrRI52F6er3S1U1OY9TeORuKs,8463
|
||||
sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
|
||||
sqlalchemy/orm/_orm_constructors.py,sha256=8EQfYsDL2k_ev0eK-wxMl3algouczN38Gu43CrRlAlo,103434
|
||||
sqlalchemy/orm/_typing.py,sha256=DVBfpHmDVK4x1zxaGJPY2GoTrAsyR6uexv20Lzf1afc,4973
|
||||
sqlalchemy/orm/attributes.py,sha256=lorOHBJvJJYndOuafWJhHBbQ1pR6FAyimhqz-mErBRQ,92534
|
||||
sqlalchemy/orm/base.py,sha256=FXkYTSCDUJFQSB5pcyPt2wG-dRctf5P6ySjyjVxQsX0,27502
|
||||
sqlalchemy/orm/bulk_persistence.py,sha256=1FC23bRJKjpfbp2D5hYuV1qOVIKGSswu9XPXbbSJ5Mo,72663
|
||||
sqlalchemy/orm/clsregistry.py,sha256=IjoDZwWpjG42ji59L4M1EZvjBEoXPZykzENDtKWxU8A,17974
|
||||
sqlalchemy/orm/collections.py,sha256=WEKuUCRgLhDhJEIBhZ21UrE0pBOyRm2zxD20GvbgA9g,52243
|
||||
sqlalchemy/orm/context.py,sha256=FMPyw07OA9OXWQ32RQx52AEa2xTLSkqdYgx9R_yN1x0,112955
|
||||
sqlalchemy/orm/decl_api.py,sha256=_WPKQ_vSE5k2TLtNmkaxxYmvbhZvkRMrrvCeDxdqDQE,63998
|
||||
sqlalchemy/orm/decl_base.py,sha256=8R7go5sULTYNRlhYiEjXIJkQ34oPp7DY_fC2nS5D5is,83343
|
||||
sqlalchemy/orm/dependency.py,sha256=hgjksUWhgbmgHK5GdJdiDCBgDAIGQXIrY-Tj79tbL2k,47631
|
||||
sqlalchemy/orm/descriptor_props.py,sha256=dR_h4Gvdtpcdp4sj_ZOR4P5Nng2J2vhsvFHouRLlntc,37244
|
||||
sqlalchemy/orm/dynamic.py,sha256=rWAZ-nfAkREuNjt8e_FRdqYrvHDdbODn1CcfyP8Y18k,9816
|
||||
sqlalchemy/orm/evaluator.py,sha256=tRETz4dNZ71VsEA8nG0hpefByB-W0zBt02IxcSR5H2g,12353
|
||||
sqlalchemy/orm/events.py,sha256=1PiGT7JMUWTDAb3X1T79P02BMVDmcWEpatz1FwpLqoA,127777
|
||||
sqlalchemy/orm/exc.py,sha256=IP40P-wOeXhkYk0YizuTC3wqm6W9cPTaQU08f5MMaQ0,7413
|
||||
sqlalchemy/orm/identity.py,sha256=jHdCxCpCyda_8mFOfGmN_Pr0XZdKiU-2hFZshlNxbHs,9249
|
||||
sqlalchemy/orm/instrumentation.py,sha256=M-kZmkUvHUxtf-0mCA8RIM5QmMH1hWlYR_pKMwaidjA,24321
|
||||
sqlalchemy/orm/interfaces.py,sha256=7Lni4Cue41b1CsmN4VbeUyWwzuNMcKtkrpihc9U-WIw,48690
|
||||
sqlalchemy/orm/loading.py,sha256=9RacpzFOWbuKgPRWHFmyIvD4fYCLAnkpwBFASyQ2CoI,58277
|
||||
sqlalchemy/orm/mapped_collection.py,sha256=zK3d3iozORzDruBUrAmkVC0RR3Orj5szk-TSQ24xzIU,19682
|
||||
sqlalchemy/orm/mapper.py,sha256=W-srpoEc3UIYv_6qTXTd_dG_TVeQcToG77VGrXt85PM,171738
|
||||
sqlalchemy/orm/path_registry.py,sha256=sJZMv_WPqUpHfQtKWaX3WYFeKBcNJ8C3wOM2mkBGkTE,25920
|
||||
sqlalchemy/orm/persistence.py,sha256=dzyB2JOXNwQgaCbN8kh0sEz00WFePr48qf8NWVCUZH8,61701
|
||||
sqlalchemy/orm/properties.py,sha256=eDPFzxYUgdM3uWjHywnb1XW-i0tVKKyx7A2MCD31GQU,29306
|
||||
sqlalchemy/orm/query.py,sha256=Cf0e94-u1XyoXJoOAmr4iFvtCwNY98kxUYyMPenaWTE,117708
|
||||
sqlalchemy/orm/relationships.py,sha256=dS5SY0v1MiD7iCNnAQlHaI6prUQhL5EkXT7ijc8FR8E,128644
|
||||
sqlalchemy/orm/scoping.py,sha256=rJVc7_Lic4V00HZ-UvYFWkVpXqdrMayRmIs4fIwH1UA,78688
|
||||
sqlalchemy/orm/session.py,sha256=CZJTQ-wPwIy0c3AMFxgJnBgaft6eEf4JzcCLcaaCSjg,195979
|
||||
sqlalchemy/orm/state.py,sha256=327-F4TG29s6mLC8oWRiO2PuvYIUZzY1MqUPjtUy7M4,37670
|
||||
sqlalchemy/orm/state_changes.py,sha256=qKYg7NxwrDkuUY3EPygAztym6oAVUFcP2wXn7QD3Mz4,6815
|
||||
sqlalchemy/orm/strategies.py,sha256=-tsBRsmEqkaxAAIn4t2F-U5SrRIPoPCyzpqFYGTAwNs,119866
|
||||
sqlalchemy/orm/strategy_options.py,sha256=oeDl_rMDNAC_90N7ytsni-psXWAeQMhABQFyKBSmai0,85353
|
||||
sqlalchemy/orm/sync.py,sha256=g7iZfSge1HgxMk9SKRgUgtHEbpbZ1kP_CBqOIdTOXqc,5779
|
||||
sqlalchemy/orm/unitofwork.py,sha256=fiVaqcymbDDHRa1NjS90N9Z466nd5pkJOEi1dHO6QLY,27033
|
||||
sqlalchemy/orm/util.py,sha256=5SC4MOVU0cPObexDjpMvXvetueiU5pze42raL94gj24,81021
|
||||
sqlalchemy/orm/writeonly.py,sha256=SYu2sAaHZONk2pW4PmtE871LG-O0P_bjidvKzY1H_zI,22305
|
||||
sqlalchemy/pool/__init__.py,sha256=qiDdq4r4FFAoDrK6ncugF_i6usi_X1LeJt-CuBHey0s,1804
|
||||
sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
|
||||
sqlalchemy/pool/base.py,sha256=WF4az4ZKuzQGuKeSJeyexaYjmWZUvYdC6KIi8zTGodw,52236
|
||||
sqlalchemy/pool/events.py,sha256=xGjkIUZl490ZDtCHqnQF9ZCwe2Jv93eGXmnQxftB11E,13147
|
||||
sqlalchemy/pool/impl.py,sha256=JwpALSkH-pCoO_6oENbkHYY00Jx9nlttyoI61LivRNc,18944
|
||||
sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
sqlalchemy/schema.py,sha256=dKiWmgHYjcKQ4TiiD6vD0UMmIsD8u0Fsor1M9AAeGUs,3194
|
||||
sqlalchemy/sql/__init__.py,sha256=UNa9EUiYWoPayf-FzNcwVgQvpsBdInPZfpJesAStN9o,5820
|
||||
sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
|
||||
sqlalchemy/sql/_dml_constructors.py,sha256=YdBJex0MCVACv4q2nl_ii3uhxzwU6aDB8zAsratX5UQ,3867
|
||||
sqlalchemy/sql/_elements_constructors.py,sha256=833Flez92odZkE2Vy6SXK8LcoO1AwkfVzOnATJLWFsA,63168
|
||||
sqlalchemy/sql/_orm_types.py,sha256=T-vjcry4C1y0GToFKVxQCnmly_-Zsq4IO4SHN6bvUF4,625
|
||||
sqlalchemy/sql/_py_util.py,sha256=hiM9ePbRSGs60bAMxPFuJCIC_p9SQ1VzqXGiPchiYwE,2173
|
||||
sqlalchemy/sql/_selectable_constructors.py,sha256=wjE6HrLm9cR7bxvZXT8sFLUqT6t_J9G1XyQCnYmBDl0,18780
|
||||
sqlalchemy/sql/_typing.py,sha256=oqwrYHVMtK-AuKGH9c4SgfiOEJUt5vjkzSEzzscMHkM,12771
|
||||
sqlalchemy/sql/annotation.py,sha256=aqbbVz9kfbCT3_66CZ9GEirVN197Cukoqt8rq48FgkQ,18245
|
||||
sqlalchemy/sql/base.py,sha256=M1b-Tg49ikUW2mnZv0aI38oASG6dgeo4jBNWDgJgAg8,73925
|
||||
sqlalchemy/sql/cache_key.py,sha256=0Db8mR8IrpBgdzXs4TGTt98LOpL3c7KABd72MAPKUQQ,33668
|
||||
sqlalchemy/sql/coercions.py,sha256=hAEou9Ycyswzu8yz_Q7QkwL2_c3nctzBJQS2oDEr4iE,40664
|
||||
sqlalchemy/sql/compiler.py,sha256=hrTptbOKIgVIHapywj4Lk5OMwpXvHS-KGg3odFwlo-I,274687
|
||||
sqlalchemy/sql/crud.py,sha256=HBX4QPtW_PYYJmIKfNr-wE8IdEr963N24WXzFBUZOo0,56514
|
||||
sqlalchemy/sql/ddl.py,sha256=lKqvOigbcYrDG0euxd5F4tu9HbBi1kmp3eFPc45HH-8,45636
|
||||
sqlalchemy/sql/default_comparator.py,sha256=utXWsZVGEjflhFfCT4ywa6RnhORc1Rryo87Hga71Rps,16707
|
||||
sqlalchemy/sql/dml.py,sha256=pn0Lm1ofC5qVZzwGWFW73lPCiNba8OsTeemurJgwRyg,65614
|
||||
sqlalchemy/sql/elements.py,sha256=YfccXzQc9DlgF8q15kDf-zKBUY_vpIe0FGaVDBPoic4,176544
|
||||
sqlalchemy/sql/events.py,sha256=iC_Q1Htm1Aobt5tOYxWfHHqNpoytrULORmUKcusH_-E,18290
|
||||
sqlalchemy/sql/expression.py,sha256=VMX-dLpsZYnVRJpYNDozDUgaj7iQ0HuewUKVefD57PE,7586
|
||||
sqlalchemy/sql/functions.py,sha256=kMMYplvuIHFAPwxBI03SizwaLcYEHzysecWk-R1V-JM,63762
|
||||
sqlalchemy/sql/lambdas.py,sha256=DP0Qz7Ypo8QhzMwygGHYgRhwJMx-rNezO1euouH3iYU,49292
|
||||
sqlalchemy/sql/naming.py,sha256=ZHs1qSV3ou8TYmZ92uvU3sfdklUQlIz4uhe330n05SU,6858
|
||||
sqlalchemy/sql/operators.py,sha256=himArRqBzrljob3Zfhi_ZS-Jleg1u6YFp0g3d7Co6IM,76106
|
||||
sqlalchemy/sql/roles.py,sha256=pOsVn_OZD7mF2gJByHf24Rjopt0_Hu3dUCEOK5t4KS8,7662
|
||||
sqlalchemy/sql/schema.py,sha256=iFleWHkxi-3mKGiK_N1TzUqxnNwOpypB4bWDuAVQe8c,229717
|
||||
sqlalchemy/sql/selectable.py,sha256=cgyV0AsPy4CXAFdhMiTCkbgaHiFilW9sclzxlHJKH3o,236460
|
||||
sqlalchemy/sql/sqltypes.py,sha256=5_N9MhprQFWYc3yjcXgFC_DmvkQU-Jz-Ok9nIMYp2Q4,127469
|
||||
sqlalchemy/sql/traversals.py,sha256=3ScTC1fh1-y8Y478h_2Azmd2xdQdWPWkDve4YgrwMf8,33664
|
||||
sqlalchemy/sql/type_api.py,sha256=SN16_oNZG6G65cvG6ABPcptz_YV5vfB2fknwJZxrkOs,84464
|
||||
sqlalchemy/sql/util.py,sha256=qGHQF-tPCj-m1FBerzT7weCanGcXU7dK5m-W7NHio-4,48077
|
||||
sqlalchemy/sql/visitors.py,sha256=71wdVvhhZL4nJvVwFAs6ssaW-qZgNRSmKjpAcOzF_TA,36317
|
||||
sqlalchemy/testing/__init__.py,sha256=zgitAYzsCWT_U48ZiifXHHLJFo8nZBYmI-5TueA4_lE,3160
|
||||
sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
|
||||
sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
|
||||
sqlalchemy/testing/assertions.py,sha256=gL0rA7CCZJbcVgvWOPV91tTZTRwQc1_Ta0-ykBn83Ew,31439
|
||||
sqlalchemy/testing/assertsql.py,sha256=IgQG7l94WaiRP8nTbilJh1ZHZl125g7GPq-S5kmQZN0,16817
|
||||
sqlalchemy/testing/asyncio.py,sha256=kM8uuOqDBagZF0r9xvGmsiirUVLUQ_KBzjUFU67W-b8,3830
|
||||
sqlalchemy/testing/config.py,sha256=AqyH1qub_gDqX0BvlL-JBQe7N-t2wo8655FtwblUNOY,12090
|
||||
sqlalchemy/testing/engines.py,sha256=HFJceEBD3Q_TTFQMTtIV5wGWO_a7oUgoKtUF_z636SM,13481
|
||||
sqlalchemy/testing/entities.py,sha256=IphFegPKbff3Un47jY6bi7_MQXy6qkx_50jX2tHZJR4,3354
|
||||
sqlalchemy/testing/exclusions.py,sha256=T8B01hmm8WVs-EKcUOQRzabahPqblWJfOidi6bHJ6GA,12460
|
||||
sqlalchemy/testing/fixtures/__init__.py,sha256=dMClrIoxqlYIFpk2ia4RZpkbfxsS_3EBigr9QsPJ66g,1198
|
||||
sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
|
||||
sqlalchemy/testing/fixtures/base.py,sha256=9r_J2ksiTzClpUxW0TczICHrWR7Ny8PV8IsBz6TsGFI,12256
|
||||
sqlalchemy/testing/fixtures/mypy.py,sha256=gdxiwNFIzDlNGSOdvM3gbwDceVCC9t8oM5kKbwyhGBk,11973
|
||||
sqlalchemy/testing/fixtures/orm.py,sha256=8EFbnaBbXX_Bf4FcCzBUaAHgyVpsLGBHX16SGLqE3Fg,6095
|
||||
sqlalchemy/testing/fixtures/sql.py,sha256=KZMjco9_3dsuspmkew5Ejp88Wlr9PsSBB1qeJGFxQAk,15900
|
||||
sqlalchemy/testing/pickleable.py,sha256=U9mIqk-zaxq9Xfy7HErP7UrKgTov-A3QFnhZh-NiOjI,2833
|
||||
sqlalchemy/testing/plugin/__init__.py,sha256=79F--BIY_NTBzVRIlJGgAY5LNJJ3cD19XvrAo4X0W9A,247
|
||||
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
|
||||
sqlalchemy/testing/plugin/bootstrap.py,sha256=oYScMbEW4pCnWlPEAq1insFruCXFQeEVBwo__i4McpU,1685
|
||||
sqlalchemy/testing/plugin/plugin_base.py,sha256=BgNzWNEmgpK4CwhyblQQKnH-7FDKVi_Uul5vw8fFjBU,21578
|
||||
sqlalchemy/testing/plugin/pytestplugin.py,sha256=6jkQHH2VQMD75k2As9CuWXmEy9jrscoFRhCNg6-PaTw,27656
|
||||
sqlalchemy/testing/profiling.py,sha256=PbuPhRFbauFilUONeY3tV_Y_5lBkD7iCa8VVyH2Sk9Y,10148
|
||||
sqlalchemy/testing/provision.py,sha256=3qFor_sN1FFlS7odUGkKqLUxGmQZC9XM67I9vQ_zeXo,14626
|
||||
sqlalchemy/testing/requirements.py,sha256=Z__o-1Rj9B7dI8E_l3qsKTvsg0rK198vB0A1p7A5dcM,52832
|
||||
sqlalchemy/testing/schema.py,sha256=lr4GkGrGwagaHMuSGzWdzkMaj3HnS7dgfLLWfxt__-U,6513
|
||||
sqlalchemy/testing/suite/__init__.py,sha256=Y5DRNG0Yl1u3ypt9zVF0Z9suPZeuO_UQGLl-wRgvTjU,722
|
||||
sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
|
||||
sqlalchemy/testing/suite/test_cte.py,sha256=6zBC3W2OwX1Xs-HedzchcKN2S7EaLNkgkvV_JSZ_Pq0,6451
|
||||
sqlalchemy/testing/suite/test_ddl.py,sha256=1Npkf0C_4UNxphthAGjG078n0vPEgnSIHpDu5MfokxQ,12031
|
||||
sqlalchemy/testing/suite/test_deprecations.py,sha256=BcJxZTcjYqeOAENVElCg3hVvU6fkGEW3KGBMfnW8bng,5337
|
||||
sqlalchemy/testing/suite/test_dialect.py,sha256=EH4ZQWbnGdtjmx5amZtTyhYmrkXJCvW1SQoLahoE7uk,22923
|
||||
sqlalchemy/testing/suite/test_insert.py,sha256=9azifj6-OCD7s8h_tAO1uPw100ibQv8YoKc_VA3hn3c,18824
|
||||
sqlalchemy/testing/suite/test_reflection.py,sha256=7sML8-owubSQeEM7Ve6LbnB8uIVlNV00WWepKwII2a8,109648
|
||||
sqlalchemy/testing/suite/test_results.py,sha256=X720GafdA4p75SOGS93j-dXkt6QDEnnJbU2bh18VCcg,16914
|
||||
sqlalchemy/testing/suite/test_rowcount.py,sha256=3KDTlRgjpQ1OVfp__1cv8Hvq4CsDKzmrhJQ_WIJWoJg,7900
|
||||
sqlalchemy/testing/suite/test_select.py,sha256=ulRZQJlzkwwcewEyisuBEXVWFR0Wshz9MEDxYYiYLwQ,61732
|
||||
sqlalchemy/testing/suite/test_sequence.py,sha256=66bCoy4xo99GBSaX6Hxb88foANAykLGRz1YEKbvpfuA,9923
|
||||
sqlalchemy/testing/suite/test_types.py,sha256=K4MGHvnTtgqeksoQOBCZRVQYC7HoYO6Z6rVt5vj2t9o,67805
|
||||
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=c3_eIxLyORuSOhNDP0jWKxPyUf3SwMFpdalxtquwqlM,6141
|
||||
sqlalchemy/testing/suite/test_update_delete.py,sha256=yTiM2unnfOK9rK8ZkqeTTU_MkT-RsKFLmdYliniZfAY,3994
|
||||
sqlalchemy/testing/util.py,sha256=qldXKw8gRJ4I2x3uXsBssYMqwatmcMFMTOveRQCmfDU,14469
|
||||
sqlalchemy/testing/warnings.py,sha256=fJ-QJUY2zY2PPxZJKv9medW-BKKbCNbA4Ns_V3YwFXM,1546
|
||||
sqlalchemy/types.py,sha256=cQFM-hFRmaf1GErun1qqgEs6QxufvzMuwKqj9tuMPpE,3168
|
||||
sqlalchemy/util/__init__.py,sha256=5D5Mquvx3SOmud0QErKzzGvBTkqMdhrrd_sXijOILeo,8312
|
||||
sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
|
||||
sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
|
||||
sqlalchemy/util/_collections.py,sha256=aZoSAVOXnHBoYEsxDOi0O9odg9wqLbGb7PGjaWQKiyY,20078
|
||||
sqlalchemy/util/_concurrency_py3k.py,sha256=zb0Bow2Y_QjTdaACEviBEEaFvqDuVvpJfmwCjaw8xNE,9170
|
||||
sqlalchemy/util/_has_cy.py,sha256=wCQmeSjT3jaH_oxfCEtGk-1g0gbSpt5MCK5UcWdMWqk,1247
|
||||
sqlalchemy/util/_py_collections.py,sha256=U6L5AoyLdgSv7cdqB4xxQbw1rpeJjyOZVXffgxgga8I,16714
|
||||
sqlalchemy/util/compat.py,sha256=cnucBQOKspo58vjRpQXUBrHGguHOSFvftpD-I8vfUy0,8760
|
||||
sqlalchemy/util/concurrency.py,sha256=9lT_cMoO1fZNdY8QTUZ22oeSf-L5I-79Ke7chcBNPA0,3304
|
||||
sqlalchemy/util/deprecations.py,sha256=YBwvvYhSB8LhasIZRKvg_-WNoVhPUcaYI1ZrnjDn868,11971
|
||||
sqlalchemy/util/langhelpers.py,sha256=uIK3szZuq9aMnO-vEpSlNekNWv4I-E391e56bkTnUm0,65090
|
||||
sqlalchemy/util/preloaded.py,sha256=az7NmLJLsqs0mtM9uBkIu10-841RYDq8wOyqJ7xXvqE,5904
|
||||
sqlalchemy/util/queue.py,sha256=CaeSEaYZ57YwtmLdNdOIjT5PK_LCuwMFiO0mpp39ybM,10185
|
||||
sqlalchemy/util/tool_support.py,sha256=9braZyidaiNrZVsWtGmkSmus50-byhuYrlAqvhjcmnA,6135
|
||||
sqlalchemy/util/topological.py,sha256=N3M3Le7KzGHCmqPGg0ZBqixTDGwmFLhOZvBtc4rHL_g,3458
|
||||
sqlalchemy/util/typing.py,sha256=lFcGo1dJbZIZ9drAnvef-PzP0cX4LMxMSwgk3lJBb0g,18182
|
||||
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.1.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-macosx_11_0_arm64
|
||||
|
||||
@ -0,0 +1 @@
|
||||
sqlalchemy
|
||||
Binary file not shown.
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,20 @@
|
||||
Copyright 2010 Jason Kirtland
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@ -0,0 +1,60 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: blinker
|
||||
Version: 1.9.0
|
||||
Summary: Fast, simple object-to-object and broadcast signaling
|
||||
Author: Jason Kirtland
|
||||
Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://blinker.readthedocs.io
|
||||
Project-URL: Source, https://github.com/pallets-eco/blinker/
|
||||
|
||||
# Blinker
|
||||
|
||||
Blinker provides a fast dispatching system that allows any number of
|
||||
interested parties to subscribe to events, or "signals".
|
||||
|
||||
|
||||
## Pallets Community Ecosystem
|
||||
|
||||
> [!IMPORTANT]\
|
||||
> This project is part of the Pallets Community Ecosystem. Pallets is the open
|
||||
> source organization that maintains Flask; Pallets-Eco enables community
|
||||
> maintenance of related projects. If you are interested in helping maintain
|
||||
> this project, please reach out on [the Pallets Discord server][discord].
|
||||
>
|
||||
> [discord]: https://discord.gg/pallets
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
Signal receivers can subscribe to specific senders or receive signals
|
||||
sent by any sender.
|
||||
|
||||
```pycon
|
||||
>>> from blinker import signal
|
||||
>>> started = signal('round-started')
|
||||
>>> def each(round):
|
||||
... print(f"Round {round}")
|
||||
...
|
||||
>>> started.connect(each)
|
||||
|
||||
>>> def round_two(round):
|
||||
... print("This is round two.")
|
||||
...
|
||||
>>> started.connect(round_two, sender=2)
|
||||
|
||||
>>> for round in range(1, 4):
|
||||
... started.send(round)
|
||||
...
|
||||
Round 1!
|
||||
Round 2!
|
||||
This is round two.
|
||||
Round 3!
|
||||
```
|
||||
|
||||
@ -0,0 +1,12 @@
|
||||
blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
|
||||
blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633
|
||||
blinker-1.9.0.dist-info/RECORD,,
|
||||
blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
||||
blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317
|
||||
blinker/__pycache__/__init__.cpython-312.pyc,,
|
||||
blinker/__pycache__/_utilities.cpython-312.pyc,,
|
||||
blinker/__pycache__/base.cpython-312.pyc,,
|
||||
blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675
|
||||
blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132
|
||||
blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.10.1
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .base import ANY
|
||||
from .base import default_namespace
|
||||
from .base import NamedSignal
|
||||
from .base import Namespace
|
||||
from .base import Signal
|
||||
from .base import signal
|
||||
|
||||
__all__ = [
|
||||
"ANY",
|
||||
"default_namespace",
|
||||
"NamedSignal",
|
||||
"Namespace",
|
||||
"Signal",
|
||||
"signal",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,64 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import inspect
|
||||
import typing as t
|
||||
from weakref import ref
|
||||
from weakref import WeakMethod
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
class Symbol:
|
||||
"""A constant symbol, nicer than ``object()``. Repeated calls return the
|
||||
same instance.
|
||||
|
||||
>>> Symbol('foo') is Symbol('foo')
|
||||
True
|
||||
>>> Symbol('foo')
|
||||
foo
|
||||
"""
|
||||
|
||||
symbols: t.ClassVar[dict[str, Symbol]] = {}
|
||||
|
||||
def __new__(cls, name: str) -> Symbol:
|
||||
if name in cls.symbols:
|
||||
return cls.symbols[name]
|
||||
|
||||
obj = super().__new__(cls)
|
||||
cls.symbols[name] = obj
|
||||
return obj
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def __getnewargs__(self) -> tuple[t.Any, ...]:
|
||||
return (self.name,)
|
||||
|
||||
|
||||
def make_id(obj: object) -> c.Hashable:
|
||||
"""Get a stable identifier for a receiver or sender, to be used as a dict
|
||||
key or in a set.
|
||||
"""
|
||||
if inspect.ismethod(obj):
|
||||
# The id of a bound method is not stable, but the id of the unbound
|
||||
# function and instance are.
|
||||
return id(obj.__func__), id(obj.__self__)
|
||||
|
||||
if isinstance(obj, (str, int)):
|
||||
# Instances with the same value always compare equal and have the same
|
||||
# hash, even if the id may change.
|
||||
return obj
|
||||
|
||||
# Assume other types are not hashable but will always be the same instance.
|
||||
return id(obj)
|
||||
|
||||
|
||||
def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]:
|
||||
if inspect.ismethod(obj):
|
||||
return WeakMethod(obj, callback) # type: ignore[arg-type, return-value]
|
||||
|
||||
return ref(obj, callback)
|
||||
@ -0,0 +1,512 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import sys
|
||||
import typing as t
|
||||
import weakref
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from functools import cached_property
|
||||
from inspect import iscoroutinefunction
|
||||
|
||||
from ._utilities import make_id
|
||||
from ._utilities import make_ref
|
||||
from ._utilities import Symbol
|
||||
|
||||
F = t.TypeVar("F", bound=c.Callable[..., t.Any])
|
||||
|
||||
ANY = Symbol("ANY")
|
||||
"""Symbol for "any sender"."""
|
||||
|
||||
ANY_ID = 0
|
||||
|
||||
|
||||
class Signal:
|
||||
"""A notification emitter.
|
||||
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
ANY = ANY
|
||||
"""An alias for the :data:`~blinker.ANY` sender symbol."""
|
||||
|
||||
set_class: type[set[t.Any]] = set
|
||||
"""The set class to use for tracking connected receivers and senders.
|
||||
Python's ``set`` is unordered. If receivers must be dispatched in the order
|
||||
they were connected, an ordered set implementation can be used.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def receiver_connected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`connect` call.
|
||||
|
||||
The signal sender is the signal instance, and the :meth:`connect`
|
||||
arguments are passed through: ``receiver``, ``sender``, and ``weak``.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver connects.")
|
||||
|
||||
@cached_property
|
||||
def receiver_disconnected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`disconnect` call.
|
||||
|
||||
The sender is the signal instance, and the :meth:`disconnect` arguments
|
||||
are passed through: ``receiver`` and ``sender``.
|
||||
|
||||
This signal is emitted **only** when :meth:`disconnect` is called
|
||||
explicitly. This signal cannot be emitted by an automatic disconnect
|
||||
when a weakly referenced receiver or sender goes out of scope, as the
|
||||
instance is no longer be available to be used as the sender for this
|
||||
signal.
|
||||
|
||||
An alternative approach is available by subscribing to
|
||||
:attr:`receiver_connected` and setting up a custom weakref cleanup
|
||||
callback on weak receivers and senders.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver disconnects.")
|
||||
|
||||
def __init__(self, doc: str | None = None) -> None:
|
||||
if doc:
|
||||
self.__doc__ = doc
|
||||
|
||||
self.receivers: dict[
|
||||
t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any]
|
||||
] = {}
|
||||
"""The map of connected receivers. Useful to quickly check if any
|
||||
receivers are connected to the signal: ``if s.receivers:``. The
|
||||
structure and data is not part of the public API, but checking its
|
||||
boolean value is.
|
||||
"""
|
||||
|
||||
self.is_muted: bool = False
|
||||
self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {}
|
||||
|
||||
def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F:
|
||||
"""Connect ``receiver`` to be called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.
|
||||
"""
|
||||
receiver_id = make_id(receiver)
|
||||
sender_id = ANY_ID if sender is ANY else make_id(sender)
|
||||
|
||||
if weak:
|
||||
self.receivers[receiver_id] = make_ref(
|
||||
receiver, self._make_cleanup_receiver(receiver_id)
|
||||
)
|
||||
else:
|
||||
self.receivers[receiver_id] = receiver
|
||||
|
||||
self._by_sender[sender_id].add(receiver_id)
|
||||
self._by_receiver[receiver_id].add(sender_id)
|
||||
|
||||
if sender is not ANY and sender_id not in self._weak_senders:
|
||||
# store a cleanup for weakref-able senders
|
||||
try:
|
||||
self._weak_senders[sender_id] = make_ref(
|
||||
sender, self._make_cleanup_sender(sender_id)
|
||||
)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
|
||||
try:
|
||||
self.receiver_connected.send(
|
||||
self, receiver=receiver, sender=sender, weak=weak
|
||||
)
|
||||
except TypeError:
|
||||
# TODO no explanation or test for this
|
||||
self.disconnect(receiver, sender)
|
||||
raise
|
||||
|
||||
return receiver
|
||||
|
||||
def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]:
|
||||
"""Connect the decorated function to be called when the signal is sent
|
||||
by ``sender``.
|
||||
|
||||
The decorated function will be called when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument along
|
||||
with any extra keyword arguments.
|
||||
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.=
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
|
||||
def decorator(fn: F) -> F:
|
||||
self.connect(fn, sender, weak)
|
||||
return fn
|
||||
|
||||
return decorator
|
||||
|
||||
@contextmanager
|
||||
def connected_to(
|
||||
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
|
||||
) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily connects ``receiver`` to the
|
||||
signal while a ``with`` block executes. When the block exits, the
|
||||
receiver is disconnected. Useful for tests.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
self.connect(receiver, sender=sender, weak=False)
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.disconnect(receiver)
|
||||
|
||||
@contextmanager
|
||||
def muted(self) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily disables the signal. No receivers
|
||||
will be called if the signal is sent, until the ``with`` block exits.
|
||||
Useful for tests.
|
||||
"""
|
||||
self.is_muted = True
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.is_muted = False
|
||||
|
||||
def send(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_async_wrapper: c.Callable[
|
||||
[c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Call all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _async_wrapper: Will be called on any receivers that are async
|
||||
coroutines to turn them into sync callables. For example, could run
|
||||
the receiver with an event loop.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionchanged:: 1.7
|
||||
Added the ``_async_wrapper`` argument.
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if iscoroutinefunction(receiver):
|
||||
if _async_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a coroutine function.")
|
||||
|
||||
result = _async_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
async def send_async(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_sync_wrapper: c.Callable[
|
||||
[c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Await all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _sync_wrapper: Will be called on any receivers that are sync
|
||||
callables to turn them into async coroutines. For example,
|
||||
could call the receiver in a thread.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if not iscoroutinefunction(receiver):
|
||||
if _sync_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a non-coroutine function.")
|
||||
|
||||
result = await _sync_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = await receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
def has_receivers_for(self, sender: t.Any) -> bool:
|
||||
"""Check if there is at least one receiver that will be called with the
|
||||
given ``sender``. A receiver connected to :data:`ANY` will always be
|
||||
called, regardless of sender. Does not check if weakly referenced
|
||||
receivers are still live. See :meth:`receivers_for` for a stronger
|
||||
search.
|
||||
|
||||
:param sender: Check for receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
if not self.receivers:
|
||||
return False
|
||||
|
||||
if self._by_sender[ANY_ID]:
|
||||
return True
|
||||
|
||||
if sender is ANY:
|
||||
return False
|
||||
|
||||
return make_id(sender) in self._by_sender
|
||||
|
||||
def receivers_for(
|
||||
self, sender: t.Any
|
||||
) -> c.Generator[c.Callable[..., t.Any], None, None]:
|
||||
"""Yield each receiver to be called for ``sender``, in addition to those
|
||||
to be called for :data:`ANY`. Weakly referenced receivers that are not
|
||||
live will be disconnected and skipped.
|
||||
|
||||
:param sender: Yield receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
# TODO: test receivers_for(ANY)
|
||||
if not self.receivers:
|
||||
return
|
||||
|
||||
sender_id = make_id(sender)
|
||||
|
||||
if sender_id in self._by_sender:
|
||||
ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
|
||||
else:
|
||||
ids = self._by_sender[ANY_ID].copy()
|
||||
|
||||
for receiver_id in ids:
|
||||
receiver = self.receivers.get(receiver_id)
|
||||
|
||||
if receiver is None:
|
||||
continue
|
||||
|
||||
if isinstance(receiver, weakref.ref):
|
||||
strong = receiver()
|
||||
|
||||
if strong is None:
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
continue
|
||||
|
||||
yield strong
|
||||
else:
|
||||
yield receiver
|
||||
|
||||
def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None:
|
||||
"""Disconnect ``receiver`` from being called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: A connected receiver callable.
|
||||
:param sender: Disconnect from only this sender. By default, disconnect
|
||||
from all senders.
|
||||
"""
|
||||
sender_id: c.Hashable
|
||||
|
||||
if sender is ANY:
|
||||
sender_id = ANY_ID
|
||||
else:
|
||||
sender_id = make_id(sender)
|
||||
|
||||
receiver_id = make_id(receiver)
|
||||
self._disconnect(receiver_id, sender_id)
|
||||
|
||||
if (
|
||||
"receiver_disconnected" in self.__dict__
|
||||
and self.receiver_disconnected.receivers
|
||||
):
|
||||
self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
|
||||
|
||||
def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None:
|
||||
if sender_id == ANY_ID:
|
||||
if self._by_receiver.pop(receiver_id, None) is not None:
|
||||
for bucket in self._by_sender.values():
|
||||
bucket.discard(receiver_id)
|
||||
|
||||
self.receivers.pop(receiver_id, None)
|
||||
else:
|
||||
self._by_sender[sender_id].discard(receiver_id)
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
def _make_cleanup_receiver(
|
||||
self, receiver_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]:
|
||||
"""Create a callback function to disconnect a weakly referenced
|
||||
receiver when it is garbage collected.
|
||||
"""
|
||||
|
||||
def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None:
|
||||
# If the interpreter is shutting down, disconnecting can result in a
|
||||
# weird ignored exception. Don't call it in that case.
|
||||
if not sys.is_finalizing():
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _make_cleanup_sender(
|
||||
self, sender_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[t.Any]], None]:
|
||||
"""Create a callback function to disconnect all receivers for a weakly
|
||||
referenced sender when it is garbage collected.
|
||||
"""
|
||||
assert sender_id != ANY_ID
|
||||
|
||||
def cleanup(ref: weakref.ref[t.Any]) -> None:
|
||||
self._weak_senders.pop(sender_id, None)
|
||||
|
||||
for receiver_id in self._by_sender.pop(sender_id, ()):
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _cleanup_bookkeeping(self) -> None:
|
||||
"""Prune unused sender/receiver bookkeeping. Not threadsafe.
|
||||
|
||||
Connecting & disconnecting leaves behind a small amount of bookkeeping
|
||||
data. Typical workloads using Blinker, for example in most web apps,
|
||||
Flask, CLI scripts, etc., are not adversely affected by this
|
||||
bookkeeping.
|
||||
|
||||
With a long-running process performing dynamic signal routing with high
|
||||
volume, e.g. connecting to function closures, senders are all unique
|
||||
object instances. Doing all of this over and over may cause memory usage
|
||||
to grow due to extraneous bookkeeping. (An empty ``set`` for each stale
|
||||
sender/receiver pair.)
|
||||
|
||||
This method will prune that bookkeeping away, with the caveat that such
|
||||
pruning is not threadsafe. The risk is that cleanup of a fully
|
||||
disconnected receiver/sender pair occurs while another thread is
|
||||
connecting that same pair. If you are in the highly dynamic, unique
|
||||
receiver/sender situation that has lead you to this method, that failure
|
||||
mode is perhaps not a big deal for you.
|
||||
"""
|
||||
for mapping in (self._by_sender, self._by_receiver):
|
||||
for ident, bucket in list(mapping.items()):
|
||||
if not bucket:
|
||||
mapping.pop(ident, None)
|
||||
|
||||
def _clear_state(self) -> None:
|
||||
"""Disconnect all receivers and senders. Useful for tests."""
|
||||
self._weak_senders.clear()
|
||||
self.receivers.clear()
|
||||
self._by_sender.clear()
|
||||
self._by_receiver.clear()
|
||||
|
||||
|
||||
class NamedSignal(Signal):
|
||||
"""A named generic notification emitter. The name is not used by the signal
|
||||
itself, but matches the key in the :class:`Namespace` that it belongs to.
|
||||
|
||||
:param name: The name of the signal within the namespace.
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, doc: str | None = None) -> None:
|
||||
super().__init__(doc)
|
||||
|
||||
#: The name of this signal.
|
||||
self.name: str = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
base = super().__repr__()
|
||||
return f"{base[:-1]}; {self.name!r}>" # noqa: E702
|
||||
|
||||
|
||||
class Namespace(dict[str, NamedSignal]):
|
||||
"""A dict mapping names to signals."""
|
||||
|
||||
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
|
||||
"""Return the :class:`NamedSignal` for the given ``name``, creating it
|
||||
if required. Repeated calls with the same name return the same signal.
|
||||
|
||||
:param name: The name of the signal.
|
||||
:param doc: The docstring of the signal.
|
||||
"""
|
||||
if name not in self:
|
||||
self[name] = NamedSignal(name, doc)
|
||||
|
||||
return self[name]
|
||||
|
||||
|
||||
class _PNamespaceSignal(t.Protocol):
|
||||
def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ...
|
||||
|
||||
|
||||
default_namespace: Namespace = Namespace()
|
||||
"""A default :class:`Namespace` for creating named signals. :func:`signal`
|
||||
creates a :class:`NamedSignal` in this namespace.
|
||||
"""
|
||||
|
||||
signal: _PNamespaceSignal = default_namespace.signal
|
||||
"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given
|
||||
``name``, creating it if required. Repeated calls with the same name return the
|
||||
same signal.
|
||||
"""
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,84 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: click
|
||||
Version: 8.3.0
|
||||
Summary: Composable command line interface toolkit
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.10
|
||||
Description-Content-Type: text/markdown
|
||||
License-Expression: BSD-3-Clause
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
License-File: LICENSE.txt
|
||||
Requires-Dist: colorama; platform_system == 'Windows'
|
||||
Project-URL: Changes, https://click.palletsprojects.com/page/changes/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://click.palletsprojects.com/
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Source, https://github.com/pallets/click/
|
||||
|
||||
<div align="center"><img src="https://raw.githubusercontent.com/pallets/click/refs/heads/stable/docs/_static/click-name.svg" alt="" height="150"></div>
|
||||
|
||||
# Click
|
||||
|
||||
Click is a Python package for creating beautiful command line interfaces
|
||||
in a composable way with as little code as necessary. It's the "Command
|
||||
Line Interface Creation Kit". It's highly configurable but comes with
|
||||
sensible defaults out of the box.
|
||||
|
||||
It aims to make the process of writing command line tools quick and fun
|
||||
while also preventing any frustration caused by the inability to
|
||||
implement an intended CLI API.
|
||||
|
||||
Click in three points:
|
||||
|
||||
- Arbitrary nesting of commands
|
||||
- Automatic help page generation
|
||||
- Supports lazy loading of subcommands at runtime
|
||||
|
||||
|
||||
## A Simple Example
|
||||
|
||||
```python
|
||||
import click
|
||||
|
||||
@click.command()
|
||||
@click.option("--count", default=1, help="Number of greetings.")
|
||||
@click.option("--name", prompt="Your name", help="The person to greet.")
|
||||
def hello(count, name):
|
||||
"""Simple program that greets NAME for a total of COUNT times."""
|
||||
for _ in range(count):
|
||||
click.echo(f"Hello, {name}!")
|
||||
|
||||
if __name__ == '__main__':
|
||||
hello()
|
||||
```
|
||||
|
||||
```
|
||||
$ python hello.py --count=3
|
||||
Your name: Click
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
Hello, Click!
|
||||
```
|
||||
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports Click and other popular
|
||||
packages. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, [please
|
||||
donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
||||
|
||||
## Contributing
|
||||
|
||||
See our [detailed contributing documentation][contrib] for many ways to
|
||||
contribute, including reporting issues, requesting features, asking or answering
|
||||
questions, and making PRs.
|
||||
|
||||
[contrib]: https://palletsprojects.com/contributing/
|
||||
|
||||
@ -0,0 +1,40 @@
|
||||
click-8.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
click-8.3.0.dist-info/METADATA,sha256=P6vpEHZ_MLBt4SO2eB-QaadcOdiznkzaZtJImRo7_V4,2621
|
||||
click-8.3.0.dist-info/RECORD,,
|
||||
click-8.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
||||
click-8.3.0.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
||||
click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473
|
||||
click/__pycache__/__init__.cpython-312.pyc,,
|
||||
click/__pycache__/_compat.cpython-312.pyc,,
|
||||
click/__pycache__/_termui_impl.cpython-312.pyc,,
|
||||
click/__pycache__/_textwrap.cpython-312.pyc,,
|
||||
click/__pycache__/_utils.cpython-312.pyc,,
|
||||
click/__pycache__/_winconsole.cpython-312.pyc,,
|
||||
click/__pycache__/core.cpython-312.pyc,,
|
||||
click/__pycache__/decorators.cpython-312.pyc,,
|
||||
click/__pycache__/exceptions.cpython-312.pyc,,
|
||||
click/__pycache__/formatting.cpython-312.pyc,,
|
||||
click/__pycache__/globals.cpython-312.pyc,,
|
||||
click/__pycache__/parser.cpython-312.pyc,,
|
||||
click/__pycache__/shell_completion.cpython-312.pyc,,
|
||||
click/__pycache__/termui.cpython-312.pyc,,
|
||||
click/__pycache__/testing.cpython-312.pyc,,
|
||||
click/__pycache__/types.cpython-312.pyc,,
|
||||
click/__pycache__/utils.cpython-312.pyc,,
|
||||
click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693
|
||||
click/_termui_impl.py,sha256=ktpAHyJtNkhyR-x64CQFD6xJQI11fTA3qg2AV3iCToU,26799
|
||||
click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400
|
||||
click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943
|
||||
click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465
|
||||
click/core.py,sha256=1A5T8UoAXklIGPTJ83_DJbVi35ehtJS2FTkP_wQ7es0,128855
|
||||
click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461
|
||||
click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954
|
||||
click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730
|
||||
click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923
|
||||
click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010
|
||||
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994
|
||||
click/termui.py,sha256=vAYrKC2a7f_NfEIhAThEVYfa__ib5XQbTSCGtJlABRA,30847
|
||||
click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102
|
||||
click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927
|
||||
click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257
|
||||
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.12.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@ -0,0 +1,28 @@
|
||||
Copyright 2014 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,123 @@
|
||||
"""
|
||||
Click is a simple Python module inspired by the stdlib optparse to make
|
||||
writing command line scripts fun. Unlike other modules, it's based
|
||||
around a simple API that does not come with too much magic and is
|
||||
composable.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .core import Argument as Argument
|
||||
from .core import Command as Command
|
||||
from .core import CommandCollection as CommandCollection
|
||||
from .core import Context as Context
|
||||
from .core import Group as Group
|
||||
from .core import Option as Option
|
||||
from .core import Parameter as Parameter
|
||||
from .decorators import argument as argument
|
||||
from .decorators import command as command
|
||||
from .decorators import confirmation_option as confirmation_option
|
||||
from .decorators import group as group
|
||||
from .decorators import help_option as help_option
|
||||
from .decorators import make_pass_decorator as make_pass_decorator
|
||||
from .decorators import option as option
|
||||
from .decorators import pass_context as pass_context
|
||||
from .decorators import pass_obj as pass_obj
|
||||
from .decorators import password_option as password_option
|
||||
from .decorators import version_option as version_option
|
||||
from .exceptions import Abort as Abort
|
||||
from .exceptions import BadArgumentUsage as BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage as BadOptionUsage
|
||||
from .exceptions import BadParameter as BadParameter
|
||||
from .exceptions import ClickException as ClickException
|
||||
from .exceptions import FileError as FileError
|
||||
from .exceptions import MissingParameter as MissingParameter
|
||||
from .exceptions import NoSuchOption as NoSuchOption
|
||||
from .exceptions import UsageError as UsageError
|
||||
from .formatting import HelpFormatter as HelpFormatter
|
||||
from .formatting import wrap_text as wrap_text
|
||||
from .globals import get_current_context as get_current_context
|
||||
from .termui import clear as clear
|
||||
from .termui import confirm as confirm
|
||||
from .termui import echo_via_pager as echo_via_pager
|
||||
from .termui import edit as edit
|
||||
from .termui import getchar as getchar
|
||||
from .termui import launch as launch
|
||||
from .termui import pause as pause
|
||||
from .termui import progressbar as progressbar
|
||||
from .termui import prompt as prompt
|
||||
from .termui import secho as secho
|
||||
from .termui import style as style
|
||||
from .termui import unstyle as unstyle
|
||||
from .types import BOOL as BOOL
|
||||
from .types import Choice as Choice
|
||||
from .types import DateTime as DateTime
|
||||
from .types import File as File
|
||||
from .types import FLOAT as FLOAT
|
||||
from .types import FloatRange as FloatRange
|
||||
from .types import INT as INT
|
||||
from .types import IntRange as IntRange
|
||||
from .types import ParamType as ParamType
|
||||
from .types import Path as Path
|
||||
from .types import STRING as STRING
|
||||
from .types import Tuple as Tuple
|
||||
from .types import UNPROCESSED as UNPROCESSED
|
||||
from .types import UUID as UUID
|
||||
from .utils import echo as echo
|
||||
from .utils import format_filename as format_filename
|
||||
from .utils import get_app_dir as get_app_dir
|
||||
from .utils import get_binary_stream as get_binary_stream
|
||||
from .utils import get_text_stream as get_text_stream
|
||||
from .utils import open_file as open_file
|
||||
|
||||
|
||||
def __getattr__(name: str) -> object:
|
||||
import warnings
|
||||
|
||||
if name == "BaseCommand":
|
||||
from .core import _BaseCommand
|
||||
|
||||
warnings.warn(
|
||||
"'BaseCommand' is deprecated and will be removed in Click 9.0. Use"
|
||||
" 'Command' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _BaseCommand
|
||||
|
||||
if name == "MultiCommand":
|
||||
from .core import _MultiCommand
|
||||
|
||||
warnings.warn(
|
||||
"'MultiCommand' is deprecated and will be removed in Click 9.0. Use"
|
||||
" 'Group' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _MultiCommand
|
||||
|
||||
if name == "OptionParser":
|
||||
from .parser import _OptionParser
|
||||
|
||||
warnings.warn(
|
||||
"'OptionParser' is deprecated and will be removed in Click 9.0. The"
|
||||
" old parser is available in 'optparse'.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return _OptionParser
|
||||
|
||||
if name == "__version__":
|
||||
import importlib.metadata
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The '__version__' attribute is deprecated and will be removed in"
|
||||
" Click 9.1. Use feature detection or"
|
||||
" 'importlib.metadata.version(\"click\")' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return importlib.metadata.version("click")
|
||||
|
||||
raise AttributeError(name)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,622 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import codecs
|
||||
import collections.abc as cabc
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import typing as t
|
||||
from types import TracebackType
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
CYGWIN = sys.platform.startswith("cygwin")
|
||||
WIN = sys.platform.startswith("win")
|
||||
auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None
|
||||
_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
|
||||
|
||||
|
||||
def _make_text_stream(
|
||||
stream: t.BinaryIO,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
if encoding is None:
|
||||
encoding = get_best_encoding(stream)
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
return _NonClosingTextIOWrapper(
|
||||
stream,
|
||||
encoding,
|
||||
errors,
|
||||
line_buffering=True,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def is_ascii_encoding(encoding: str) -> bool:
|
||||
"""Checks if a given encoding is ascii."""
|
||||
try:
|
||||
return codecs.lookup(encoding).name == "ascii"
|
||||
except LookupError:
|
||||
return False
|
||||
|
||||
|
||||
def get_best_encoding(stream: t.IO[t.Any]) -> str:
|
||||
"""Returns the default stream encoding if not found."""
|
||||
rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
|
||||
if is_ascii_encoding(rv):
|
||||
return "utf-8"
|
||||
return rv
|
||||
|
||||
|
||||
class _NonClosingTextIOWrapper(io.TextIOWrapper):
|
||||
def __init__(
|
||||
self,
|
||||
stream: t.BinaryIO,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
**extra: t.Any,
|
||||
) -> None:
|
||||
self._stream = stream = t.cast(
|
||||
t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
|
||||
)
|
||||
super().__init__(stream, encoding, errors, **extra)
|
||||
|
||||
def __del__(self) -> None:
|
||||
try:
|
||||
self.detach()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def isatty(self) -> bool:
|
||||
# https://bitbucket.org/pypy/pypy/issue/1803
|
||||
return self._stream.isatty()
|
||||
|
||||
|
||||
class _FixupStream:
|
||||
"""The new io interface needs more from streams than streams
|
||||
traditionally implement. As such, this fix-up code is necessary in
|
||||
some circumstances.
|
||||
|
||||
The forcing of readable and writable flags are there because some tools
|
||||
put badly patched objects on sys (one such offender are certain version
|
||||
of jupyter notebook).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stream: t.BinaryIO,
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
):
|
||||
self._stream = stream
|
||||
self._force_readable = force_readable
|
||||
self._force_writable = force_writable
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._stream, name)
|
||||
|
||||
def read1(self, size: int) -> bytes:
|
||||
f = getattr(self._stream, "read1", None)
|
||||
|
||||
if f is not None:
|
||||
return t.cast(bytes, f(size))
|
||||
|
||||
return self._stream.read(size)
|
||||
|
||||
def readable(self) -> bool:
|
||||
if self._force_readable:
|
||||
return True
|
||||
x = getattr(self._stream, "readable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.read(0)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def writable(self) -> bool:
|
||||
if self._force_writable:
|
||||
return True
|
||||
x = getattr(self._stream, "writable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.write(b"")
|
||||
except Exception:
|
||||
try:
|
||||
self._stream.write(b"")
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def seekable(self) -> bool:
|
||||
x = getattr(self._stream, "seekable", None)
|
||||
if x is not None:
|
||||
return t.cast(bool, x())
|
||||
try:
|
||||
self._stream.seek(self._stream.tell())
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||
try:
|
||||
return isinstance(stream.read(0), bytes)
|
||||
except Exception:
|
||||
return default
|
||||
# This happens in some cases where the stream was already
|
||||
# closed. In this case, we assume the default.
|
||||
|
||||
|
||||
def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||
try:
|
||||
stream.write(b"")
|
||||
except Exception:
|
||||
try:
|
||||
stream.write("")
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return default
|
||||
return True
|
||||
|
||||
|
||||
def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None:
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_reader(stream, False):
|
||||
return t.cast(t.BinaryIO, stream)
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_reader(buf, True):
|
||||
return t.cast(t.BinaryIO, buf)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None:
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_writer(stream, False):
|
||||
return t.cast(t.BinaryIO, stream)
|
||||
|
||||
buf = getattr(stream, "buffer", None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_writer(buf, True):
|
||||
return t.cast(t.BinaryIO, buf)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _stream_is_misconfigured(stream: t.TextIO) -> bool:
|
||||
"""A stream is misconfigured if its encoding is ASCII."""
|
||||
# If the stream does not have an encoding set, we assume it's set
|
||||
# to ASCII. This appears to happen in certain unittest
|
||||
# environments. It's not quite clear what the correct behavior is
|
||||
# but this at least will force Click to recover somehow.
|
||||
return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
|
||||
|
||||
|
||||
def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool:
|
||||
"""A stream attribute is compatible if it is equal to the
|
||||
desired value or the desired value is unset and the attribute
|
||||
has a value.
|
||||
"""
|
||||
stream_value = getattr(stream, attr, None)
|
||||
return stream_value == value or (value is None and stream_value is not None)
|
||||
|
||||
|
||||
def _is_compatible_text_stream(
|
||||
stream: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> bool:
|
||||
"""Check if a stream's encoding and errors attributes are
|
||||
compatible with the desired values.
|
||||
"""
|
||||
return _is_compat_stream_attr(
|
||||
stream, "encoding", encoding
|
||||
) and _is_compat_stream_attr(stream, "errors", errors)
|
||||
|
||||
|
||||
def _force_correct_text_stream(
|
||||
text_stream: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
is_binary: t.Callable[[t.IO[t.Any], bool], bool],
|
||||
find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None],
|
||||
force_readable: bool = False,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
if is_binary(text_stream, False):
|
||||
binary_reader = t.cast(t.BinaryIO, text_stream)
|
||||
else:
|
||||
text_stream = t.cast(t.TextIO, text_stream)
|
||||
# If the stream looks compatible, and won't default to a
|
||||
# misconfigured ascii encoding, return it as-is.
|
||||
if _is_compatible_text_stream(text_stream, encoding, errors) and not (
|
||||
encoding is None and _stream_is_misconfigured(text_stream)
|
||||
):
|
||||
return text_stream
|
||||
|
||||
# Otherwise, get the underlying binary reader.
|
||||
possible_binary_reader = find_binary(text_stream)
|
||||
|
||||
# If that's not possible, silently use the original reader
|
||||
# and get mojibake instead of exceptions.
|
||||
if possible_binary_reader is None:
|
||||
return text_stream
|
||||
|
||||
binary_reader = possible_binary_reader
|
||||
|
||||
# Default errors to replace instead of strict in order to get
|
||||
# something that works.
|
||||
if errors is None:
|
||||
errors = "replace"
|
||||
|
||||
# Wrap the binary stream in a text stream with the correct
|
||||
# encoding parameters.
|
||||
return _make_text_stream(
|
||||
binary_reader,
|
||||
encoding,
|
||||
errors,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def _force_correct_text_reader(
|
||||
text_reader: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_readable: bool = False,
|
||||
) -> t.TextIO:
|
||||
return _force_correct_text_stream(
|
||||
text_reader,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_reader,
|
||||
_find_binary_reader,
|
||||
force_readable=force_readable,
|
||||
)
|
||||
|
||||
|
||||
def _force_correct_text_writer(
|
||||
text_writer: t.IO[t.Any],
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
force_writable: bool = False,
|
||||
) -> t.TextIO:
|
||||
return _force_correct_text_stream(
|
||||
text_writer,
|
||||
encoding,
|
||||
errors,
|
||||
_is_binary_writer,
|
||||
_find_binary_writer,
|
||||
force_writable=force_writable,
|
||||
)
|
||||
|
||||
|
||||
def get_binary_stdin() -> t.BinaryIO:
|
||||
reader = _find_binary_reader(sys.stdin)
|
||||
if reader is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
|
||||
return reader
|
||||
|
||||
|
||||
def get_binary_stdout() -> t.BinaryIO:
|
||||
writer = _find_binary_writer(sys.stdout)
|
||||
if writer is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
|
||||
return writer
|
||||
|
||||
|
||||
def get_binary_stderr() -> t.BinaryIO:
|
||||
writer = _find_binary_writer(sys.stderr)
|
||||
if writer is None:
|
||||
raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
|
||||
return writer
|
||||
|
||||
|
||||
def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
|
||||
|
||||
|
||||
def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
|
||||
|
||||
|
||||
def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
|
||||
|
||||
|
||||
def _wrap_io_open(
|
||||
file: str | os.PathLike[str] | int,
|
||||
mode: str,
|
||||
encoding: str | None,
|
||||
errors: str | None,
|
||||
) -> t.IO[t.Any]:
|
||||
"""Handles not passing ``encoding`` and ``errors`` in binary mode."""
|
||||
if "b" in mode:
|
||||
return open(file, mode)
|
||||
|
||||
return open(file, mode, encoding=encoding, errors=errors)
|
||||
|
||||
|
||||
def open_stream(
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
atomic: bool = False,
|
||||
) -> tuple[t.IO[t.Any], bool]:
|
||||
binary = "b" in mode
|
||||
filename = os.fspath(filename)
|
||||
|
||||
# Standard streams first. These are simple because they ignore the
|
||||
# atomic flag. Use fsdecode to handle Path("-").
|
||||
if os.fsdecode(filename) == "-":
|
||||
if any(m in mode for m in ["w", "a", "x"]):
|
||||
if binary:
|
||||
return get_binary_stdout(), False
|
||||
return get_text_stdout(encoding=encoding, errors=errors), False
|
||||
if binary:
|
||||
return get_binary_stdin(), False
|
||||
return get_text_stdin(encoding=encoding, errors=errors), False
|
||||
|
||||
# Non-atomic writes directly go out through the regular open functions.
|
||||
if not atomic:
|
||||
return _wrap_io_open(filename, mode, encoding, errors), True
|
||||
|
||||
# Some usability stuff for atomic writes
|
||||
if "a" in mode:
|
||||
raise ValueError(
|
||||
"Appending to an existing file is not supported, because that"
|
||||
" would involve an expensive `copy`-operation to a temporary"
|
||||
" file. Open the file in normal `w`-mode and copy explicitly"
|
||||
" if that's what you're after."
|
||||
)
|
||||
if "x" in mode:
|
||||
raise ValueError("Use the `overwrite`-parameter instead.")
|
||||
if "w" not in mode:
|
||||
raise ValueError("Atomic writes only make sense with `w`-mode.")
|
||||
|
||||
# Atomic writes are more complicated. They work by opening a file
|
||||
# as a proxy in the same folder and then using the fdopen
|
||||
# functionality to wrap it in a Python file. Then we wrap it in an
|
||||
# atomic file that moves the file over on close.
|
||||
import errno
|
||||
import random
|
||||
|
||||
try:
|
||||
perm: int | None = os.stat(filename).st_mode
|
||||
except OSError:
|
||||
perm = None
|
||||
|
||||
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
|
||||
|
||||
if binary:
|
||||
flags |= getattr(os, "O_BINARY", 0)
|
||||
|
||||
while True:
|
||||
tmp_filename = os.path.join(
|
||||
os.path.dirname(filename),
|
||||
f".__atomic-write{random.randrange(1 << 32):08x}",
|
||||
)
|
||||
try:
|
||||
fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
|
||||
break
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST or (
|
||||
os.name == "nt"
|
||||
and e.errno == errno.EACCES
|
||||
and os.path.isdir(e.filename)
|
||||
and os.access(e.filename, os.W_OK)
|
||||
):
|
||||
continue
|
||||
raise
|
||||
|
||||
if perm is not None:
|
||||
os.chmod(tmp_filename, perm) # in case perm includes bits in umask
|
||||
|
||||
f = _wrap_io_open(fd, mode, encoding, errors)
|
||||
af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
|
||||
return t.cast(t.IO[t.Any], af), True
|
||||
|
||||
|
||||
class _AtomicFile:
|
||||
def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None:
|
||||
self._f = f
|
||||
self._tmp_filename = tmp_filename
|
||||
self._real_filename = real_filename
|
||||
self.closed = False
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._real_filename
|
||||
|
||||
def close(self, delete: bool = False) -> None:
|
||||
if self.closed:
|
||||
return
|
||||
self._f.close()
|
||||
os.replace(self._tmp_filename, self._real_filename)
|
||||
self.closed = True
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._f, name)
|
||||
|
||||
def __enter__(self) -> _AtomicFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close(delete=exc_type is not None)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._f)
|
||||
|
||||
|
||||
def strip_ansi(value: str) -> str:
|
||||
return _ansi_re.sub("", value)
|
||||
|
||||
|
||||
def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool:
|
||||
while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
|
||||
stream = stream._stream
|
||||
|
||||
return stream.__class__.__module__.startswith("ipykernel.")
|
||||
|
||||
|
||||
def should_strip_ansi(
|
||||
stream: t.IO[t.Any] | None = None, color: bool | None = None
|
||||
) -> bool:
|
||||
if color is None:
|
||||
if stream is None:
|
||||
stream = sys.stdin
|
||||
return not isatty(stream) and not _is_jupyter_kernel_output(stream)
|
||||
return not color
|
||||
|
||||
|
||||
# On Windows, wrap the output streams with colorama to support ANSI
|
||||
# color codes.
|
||||
# NOTE: double check is needed so mypy does not analyze this on Linux
|
||||
if sys.platform.startswith("win") and WIN:
|
||||
from ._winconsole import _get_windows_console_stream
|
||||
|
||||
def _get_argv_encoding() -> str:
|
||||
import locale
|
||||
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
_ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||
|
||||
def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO:
|
||||
"""Support ANSI color and style codes on Windows by wrapping a
|
||||
stream with colorama.
|
||||
"""
|
||||
try:
|
||||
cached = _ansi_stream_wrappers.get(stream)
|
||||
except Exception:
|
||||
cached = None
|
||||
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
import colorama
|
||||
|
||||
strip = should_strip_ansi(stream, color)
|
||||
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
|
||||
rv = t.cast(t.TextIO, ansi_wrapper.stream)
|
||||
_write = rv.write
|
||||
|
||||
def _safe_write(s: str) -> int:
|
||||
try:
|
||||
return _write(s)
|
||||
except BaseException:
|
||||
ansi_wrapper.reset_all()
|
||||
raise
|
||||
|
||||
rv.write = _safe_write # type: ignore[method-assign]
|
||||
|
||||
try:
|
||||
_ansi_stream_wrappers[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return rv
|
||||
|
||||
else:
|
||||
|
||||
def _get_argv_encoding() -> str:
|
||||
return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding()
|
||||
|
||||
def _get_windows_console_stream(
|
||||
f: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> t.TextIO | None:
|
||||
return None
|
||||
|
||||
|
||||
def term_len(x: str) -> int:
|
||||
return len(strip_ansi(x))
|
||||
|
||||
|
||||
def isatty(stream: t.IO[t.Any]) -> bool:
|
||||
try:
|
||||
return stream.isatty()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _make_cached_stream_func(
|
||||
src_func: t.Callable[[], t.TextIO | None],
|
||||
wrapper_func: t.Callable[[], t.TextIO],
|
||||
) -> t.Callable[[], t.TextIO | None]:
|
||||
cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||
|
||||
def func() -> t.TextIO | None:
|
||||
stream = src_func()
|
||||
|
||||
if stream is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
rv = cache.get(stream)
|
||||
except Exception:
|
||||
rv = None
|
||||
if rv is not None:
|
||||
return rv
|
||||
rv = wrapper_func()
|
||||
try:
|
||||
cache[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
|
||||
return func
|
||||
|
||||
|
||||
_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
|
||||
_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
|
||||
_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
|
||||
|
||||
|
||||
binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = {
|
||||
"stdin": get_binary_stdin,
|
||||
"stdout": get_binary_stdout,
|
||||
"stderr": get_binary_stderr,
|
||||
}
|
||||
|
||||
text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = {
|
||||
"stdin": get_text_stdin,
|
||||
"stdout": get_text_stdout,
|
||||
"stderr": get_text_stderr,
|
||||
}
|
||||
@ -0,0 +1,847 @@
|
||||
"""
|
||||
This module contains implementations for the termui module. To keep the
|
||||
import time of Click down, some infrequently used functionality is
|
||||
placed in this module and only imported as needed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import contextlib
|
||||
import math
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import time
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import CYGWIN
|
||||
from ._compat import get_best_encoding
|
||||
from ._compat import isatty
|
||||
from ._compat import open_stream
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import term_len
|
||||
from ._compat import WIN
|
||||
from .exceptions import ClickException
|
||||
from .utils import echo
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
if os.name == "nt":
|
||||
BEFORE_BAR = "\r"
|
||||
AFTER_BAR = "\n"
|
||||
else:
|
||||
BEFORE_BAR = "\r\033[?25l"
|
||||
AFTER_BAR = "\033[?25h\n"
|
||||
|
||||
|
||||
class ProgressBar(t.Generic[V]):
|
||||
def __init__(
|
||||
self,
|
||||
iterable: cabc.Iterable[V] | None,
|
||||
length: int | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = " ",
|
||||
bar_template: str = "%(bar)s",
|
||||
info_sep: str = " ",
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
label: str | None = None,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
width: int = 30,
|
||||
) -> None:
|
||||
self.fill_char = fill_char
|
||||
self.empty_char = empty_char
|
||||
self.bar_template = bar_template
|
||||
self.info_sep = info_sep
|
||||
self.hidden = hidden
|
||||
self.show_eta = show_eta
|
||||
self.show_percent = show_percent
|
||||
self.show_pos = show_pos
|
||||
self.item_show_func = item_show_func
|
||||
self.label: str = label or ""
|
||||
|
||||
if file is None:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if file is None:
|
||||
file = StringIO()
|
||||
|
||||
self.file = file
|
||||
self.color = color
|
||||
self.update_min_steps = update_min_steps
|
||||
self._completed_intervals = 0
|
||||
self.width: int = width
|
||||
self.autowidth: bool = width == 0
|
||||
|
||||
if length is None:
|
||||
from operator import length_hint
|
||||
|
||||
length = length_hint(iterable, -1)
|
||||
|
||||
if length == -1:
|
||||
length = None
|
||||
if iterable is None:
|
||||
if length is None:
|
||||
raise TypeError("iterable or length is required")
|
||||
iterable = t.cast("cabc.Iterable[V]", range(length))
|
||||
self.iter: cabc.Iterable[V] = iter(iterable)
|
||||
self.length = length
|
||||
self.pos: int = 0
|
||||
self.avg: list[float] = []
|
||||
self.last_eta: float
|
||||
self.start: float
|
||||
self.start = self.last_eta = time.time()
|
||||
self.eta_known: bool = False
|
||||
self.finished: bool = False
|
||||
self.max_width: int | None = None
|
||||
self.entered: bool = False
|
||||
self.current_item: V | None = None
|
||||
self._is_atty = isatty(self.file)
|
||||
self._last_line: str | None = None
|
||||
|
||||
def __enter__(self) -> ProgressBar[V]:
|
||||
self.entered = True
|
||||
self.render_progress()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.render_finish()
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[V]:
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
self.render_progress()
|
||||
return self.generator()
|
||||
|
||||
def __next__(self) -> V:
|
||||
# Iteration is defined in terms of a generator function,
|
||||
# returned by iter(self); use that to define next(). This works
|
||||
# because `self.iter` is an iterable consumed by that generator,
|
||||
# so it is re-entry safe. Calling `next(self.generator())`
|
||||
# twice works and does "what you want".
|
||||
return next(iter(self))
|
||||
|
||||
def render_finish(self) -> None:
|
||||
if self.hidden or not self._is_atty:
|
||||
return
|
||||
self.file.write(AFTER_BAR)
|
||||
self.file.flush()
|
||||
|
||||
@property
|
||||
def pct(self) -> float:
|
||||
if self.finished:
|
||||
return 1.0
|
||||
return min(self.pos / (float(self.length or 1) or 1), 1.0)
|
||||
|
||||
@property
|
||||
def time_per_iteration(self) -> float:
|
||||
if not self.avg:
|
||||
return 0.0
|
||||
return sum(self.avg) / float(len(self.avg))
|
||||
|
||||
@property
|
||||
def eta(self) -> float:
|
||||
if self.length is not None and not self.finished:
|
||||
return self.time_per_iteration * (self.length - self.pos)
|
||||
return 0.0
|
||||
|
||||
def format_eta(self) -> str:
|
||||
if self.eta_known:
|
||||
t = int(self.eta)
|
||||
seconds = t % 60
|
||||
t //= 60
|
||||
minutes = t % 60
|
||||
t //= 60
|
||||
hours = t % 24
|
||||
t //= 24
|
||||
if t > 0:
|
||||
return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
|
||||
else:
|
||||
return f"{hours:02}:{minutes:02}:{seconds:02}"
|
||||
return ""
|
||||
|
||||
def format_pos(self) -> str:
|
||||
pos = str(self.pos)
|
||||
if self.length is not None:
|
||||
pos += f"/{self.length}"
|
||||
return pos
|
||||
|
||||
def format_pct(self) -> str:
|
||||
return f"{int(self.pct * 100): 4}%"[1:]
|
||||
|
||||
def format_bar(self) -> str:
|
||||
if self.length is not None:
|
||||
bar_length = int(self.pct * self.width)
|
||||
bar = self.fill_char * bar_length
|
||||
bar += self.empty_char * (self.width - bar_length)
|
||||
elif self.finished:
|
||||
bar = self.fill_char * self.width
|
||||
else:
|
||||
chars = list(self.empty_char * (self.width or 1))
|
||||
if self.time_per_iteration != 0:
|
||||
chars[
|
||||
int(
|
||||
(math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
|
||||
* self.width
|
||||
)
|
||||
] = self.fill_char
|
||||
bar = "".join(chars)
|
||||
return bar
|
||||
|
||||
def format_progress_line(self) -> str:
|
||||
show_percent = self.show_percent
|
||||
|
||||
info_bits = []
|
||||
if self.length is not None and show_percent is None:
|
||||
show_percent = not self.show_pos
|
||||
|
||||
if self.show_pos:
|
||||
info_bits.append(self.format_pos())
|
||||
if show_percent:
|
||||
info_bits.append(self.format_pct())
|
||||
if self.show_eta and self.eta_known and not self.finished:
|
||||
info_bits.append(self.format_eta())
|
||||
if self.item_show_func is not None:
|
||||
item_info = self.item_show_func(self.current_item)
|
||||
if item_info is not None:
|
||||
info_bits.append(item_info)
|
||||
|
||||
return (
|
||||
self.bar_template
|
||||
% {
|
||||
"label": self.label,
|
||||
"bar": self.format_bar(),
|
||||
"info": self.info_sep.join(info_bits),
|
||||
}
|
||||
).rstrip()
|
||||
|
||||
def render_progress(self) -> None:
|
||||
if self.hidden:
|
||||
return
|
||||
|
||||
if not self._is_atty:
|
||||
# Only output the label once if the output is not a TTY.
|
||||
if self._last_line != self.label:
|
||||
self._last_line = self.label
|
||||
echo(self.label, file=self.file, color=self.color)
|
||||
return
|
||||
|
||||
buf = []
|
||||
# Update width in case the terminal has been resized
|
||||
if self.autowidth:
|
||||
import shutil
|
||||
|
||||
old_width = self.width
|
||||
self.width = 0
|
||||
clutter_length = term_len(self.format_progress_line())
|
||||
new_width = max(0, shutil.get_terminal_size().columns - clutter_length)
|
||||
if new_width < old_width and self.max_width is not None:
|
||||
buf.append(BEFORE_BAR)
|
||||
buf.append(" " * self.max_width)
|
||||
self.max_width = new_width
|
||||
self.width = new_width
|
||||
|
||||
clear_width = self.width
|
||||
if self.max_width is not None:
|
||||
clear_width = self.max_width
|
||||
|
||||
buf.append(BEFORE_BAR)
|
||||
line = self.format_progress_line()
|
||||
line_len = term_len(line)
|
||||
if self.max_width is None or self.max_width < line_len:
|
||||
self.max_width = line_len
|
||||
|
||||
buf.append(line)
|
||||
buf.append(" " * (clear_width - line_len))
|
||||
line = "".join(buf)
|
||||
# Render the line only if it changed.
|
||||
|
||||
if line != self._last_line:
|
||||
self._last_line = line
|
||||
echo(line, file=self.file, color=self.color, nl=False)
|
||||
self.file.flush()
|
||||
|
||||
def make_step(self, n_steps: int) -> None:
|
||||
self.pos += n_steps
|
||||
if self.length is not None and self.pos >= self.length:
|
||||
self.finished = True
|
||||
|
||||
if (time.time() - self.last_eta) < 1.0:
|
||||
return
|
||||
|
||||
self.last_eta = time.time()
|
||||
|
||||
# self.avg is a rolling list of length <= 7 of steps where steps are
|
||||
# defined as time elapsed divided by the total progress through
|
||||
# self.length.
|
||||
if self.pos:
|
||||
step = (time.time() - self.start) / self.pos
|
||||
else:
|
||||
step = time.time() - self.start
|
||||
|
||||
self.avg = self.avg[-6:] + [step]
|
||||
|
||||
self.eta_known = self.length is not None
|
||||
|
||||
def update(self, n_steps: int, current_item: V | None = None) -> None:
|
||||
"""Update the progress bar by advancing a specified number of
|
||||
steps, and optionally set the ``current_item`` for this new
|
||||
position.
|
||||
|
||||
:param n_steps: Number of steps to advance.
|
||||
:param current_item: Optional item to set as ``current_item``
|
||||
for the updated position.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``current_item`` optional parameter.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Only render when the number of steps meets the
|
||||
``update_min_steps`` threshold.
|
||||
"""
|
||||
if current_item is not None:
|
||||
self.current_item = current_item
|
||||
|
||||
self._completed_intervals += n_steps
|
||||
|
||||
if self._completed_intervals >= self.update_min_steps:
|
||||
self.make_step(self._completed_intervals)
|
||||
self.render_progress()
|
||||
self._completed_intervals = 0
|
||||
|
||||
def finish(self) -> None:
|
||||
self.eta_known = False
|
||||
self.current_item = None
|
||||
self.finished = True
|
||||
|
||||
def generator(self) -> cabc.Iterator[V]:
|
||||
"""Return a generator which yields the items added to the bar
|
||||
during construction, and updates the progress bar *after* the
|
||||
yielded block returns.
|
||||
"""
|
||||
# WARNING: the iterator interface for `ProgressBar` relies on
|
||||
# this and only works because this is a simple generator which
|
||||
# doesn't create or manage additional state. If this function
|
||||
# changes, the impact should be evaluated both against
|
||||
# `iter(bar)` and `next(bar)`. `next()` in particular may call
|
||||
# `self.generator()` repeatedly, and this must remain safe in
|
||||
# order for that interface to work.
|
||||
if not self.entered:
|
||||
raise RuntimeError("You need to use progress bars in a with block.")
|
||||
|
||||
if not self._is_atty:
|
||||
yield from self.iter
|
||||
else:
|
||||
for rv in self.iter:
|
||||
self.current_item = rv
|
||||
|
||||
# This allows show_item_func to be updated before the
|
||||
# item is processed. Only trigger at the beginning of
|
||||
# the update interval.
|
||||
if self._completed_intervals == 0:
|
||||
self.render_progress()
|
||||
|
||||
yield rv
|
||||
self.update(1)
|
||||
|
||||
self.finish()
|
||||
self.render_progress()
|
||||
|
||||
|
||||
def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None:
|
||||
"""Decide what method to use for paging through text."""
|
||||
stdout = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if stdout is None:
|
||||
stdout = StringIO()
|
||||
|
||||
if not isatty(sys.stdin) or not isatty(stdout):
|
||||
return _nullpager(stdout, generator, color)
|
||||
|
||||
# Split and normalize the pager command into parts.
|
||||
pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False)
|
||||
if pager_cmd_parts:
|
||||
if WIN:
|
||||
if _tempfilepager(generator, pager_cmd_parts, color):
|
||||
return
|
||||
elif _pipepager(generator, pager_cmd_parts, color):
|
||||
return
|
||||
|
||||
if os.environ.get("TERM") in ("dumb", "emacs"):
|
||||
return _nullpager(stdout, generator, color)
|
||||
if (WIN or sys.platform.startswith("os2")) and _tempfilepager(
|
||||
generator, ["more"], color
|
||||
):
|
||||
return
|
||||
if _pipepager(generator, ["less"], color):
|
||||
return
|
||||
|
||||
import tempfile
|
||||
|
||||
fd, filename = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
if _pipepager(generator, ["more"], color):
|
||||
return
|
||||
return _nullpager(stdout, generator, color)
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _pipepager(
|
||||
generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
|
||||
) -> bool:
|
||||
"""Page through text by feeding it to another program. Invoking a
|
||||
pager through this might support colors.
|
||||
|
||||
Returns `True` if the command was found, `False` otherwise and thus another
|
||||
pager should be attempted.
|
||||
"""
|
||||
# Split the command into the invoked CLI and its parameters.
|
||||
if not cmd_parts:
|
||||
return False
|
||||
|
||||
import shutil
|
||||
|
||||
cmd = cmd_parts[0]
|
||||
cmd_params = cmd_parts[1:]
|
||||
|
||||
cmd_filepath = shutil.which(cmd)
|
||||
if not cmd_filepath:
|
||||
return False
|
||||
# Resolves symlinks and produces a normalized absolute path string.
|
||||
cmd_path = Path(cmd_filepath).resolve()
|
||||
cmd_name = cmd_path.name
|
||||
|
||||
import subprocess
|
||||
|
||||
# Make a local copy of the environment to not affect the global one.
|
||||
env = dict(os.environ)
|
||||
|
||||
# If we're piping to less and the user hasn't decided on colors, we enable
|
||||
# them by default we find the -R flag in the command line arguments.
|
||||
if color is None and cmd_name == "less":
|
||||
less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}"
|
||||
if not less_flags:
|
||||
env["LESS"] = "-R"
|
||||
color = True
|
||||
elif "r" in less_flags or "R" in less_flags:
|
||||
color = True
|
||||
|
||||
c = subprocess.Popen(
|
||||
[str(cmd_path)] + cmd_params,
|
||||
shell=True,
|
||||
stdin=subprocess.PIPE,
|
||||
env=env,
|
||||
errors="replace",
|
||||
text=True,
|
||||
)
|
||||
assert c.stdin is not None
|
||||
try:
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
|
||||
c.stdin.write(text)
|
||||
except BrokenPipeError:
|
||||
# In case the pager exited unexpectedly, ignore the broken pipe error.
|
||||
pass
|
||||
except Exception as e:
|
||||
# In case there is an exception we want to close the pager immediately
|
||||
# and let the caller handle it.
|
||||
# Otherwise the pager will keep running, and the user may not notice
|
||||
# the error message, or worse yet it may leave the terminal in a broken state.
|
||||
c.terminate()
|
||||
raise e
|
||||
finally:
|
||||
# We must close stdin and wait for the pager to exit before we continue
|
||||
try:
|
||||
c.stdin.close()
|
||||
# Close implies flush, so it might throw a BrokenPipeError if the pager
|
||||
# process exited already.
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
|
||||
# search or other commands inside less).
|
||||
#
|
||||
# That means when the user hits ^C, the parent process (click) terminates,
|
||||
# but less is still alive, paging the output and messing up the terminal.
|
||||
#
|
||||
# If the user wants to make the pager exit on ^C, they should set
|
||||
# `LESS='-K'`. It's not our decision to make.
|
||||
while True:
|
||||
try:
|
||||
c.wait()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _tempfilepager(
|
||||
generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
|
||||
) -> bool:
|
||||
"""Page through text by invoking a program on a temporary file.
|
||||
|
||||
Returns `True` if the command was found, `False` otherwise and thus another
|
||||
pager should be attempted.
|
||||
"""
|
||||
# Split the command into the invoked CLI and its parameters.
|
||||
if not cmd_parts:
|
||||
return False
|
||||
|
||||
import shutil
|
||||
|
||||
cmd = cmd_parts[0]
|
||||
|
||||
cmd_filepath = shutil.which(cmd)
|
||||
if not cmd_filepath:
|
||||
return False
|
||||
# Resolves symlinks and produces a normalized absolute path string.
|
||||
cmd_path = Path(cmd_filepath).resolve()
|
||||
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
fd, filename = tempfile.mkstemp()
|
||||
# TODO: This never terminates if the passed generator never terminates.
|
||||
text = "".join(generator)
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
encoding = get_best_encoding(sys.stdout)
|
||||
with open_stream(filename, "wb")[0] as f:
|
||||
f.write(text.encode(encoding))
|
||||
try:
|
||||
subprocess.call([str(cmd_path), filename])
|
||||
except OSError:
|
||||
# Command not found
|
||||
pass
|
||||
finally:
|
||||
os.close(fd)
|
||||
os.unlink(filename)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _nullpager(
|
||||
stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None
|
||||
) -> None:
|
||||
"""Simply print unformatted text. This is the ultimate fallback."""
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
stream.write(text)
|
||||
|
||||
|
||||
class Editor:
|
||||
def __init__(
|
||||
self,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
) -> None:
|
||||
self.editor = editor
|
||||
self.env = env
|
||||
self.require_save = require_save
|
||||
self.extension = extension
|
||||
|
||||
def get_editor(self) -> str:
|
||||
if self.editor is not None:
|
||||
return self.editor
|
||||
for key in "VISUAL", "EDITOR":
|
||||
rv = os.environ.get(key)
|
||||
if rv:
|
||||
return rv
|
||||
if WIN:
|
||||
return "notepad"
|
||||
|
||||
from shutil import which
|
||||
|
||||
for editor in "sensible-editor", "vim", "nano":
|
||||
if which(editor) is not None:
|
||||
return editor
|
||||
return "vi"
|
||||
|
||||
def edit_files(self, filenames: cabc.Iterable[str]) -> None:
|
||||
import subprocess
|
||||
|
||||
editor = self.get_editor()
|
||||
environ: dict[str, str] | None = None
|
||||
|
||||
if self.env:
|
||||
environ = os.environ.copy()
|
||||
environ.update(self.env)
|
||||
|
||||
exc_filename = " ".join(f'"{filename}"' for filename in filenames)
|
||||
|
||||
try:
|
||||
c = subprocess.Popen(
|
||||
args=f"{editor} {exc_filename}", env=environ, shell=True
|
||||
)
|
||||
exit_code = c.wait()
|
||||
if exit_code != 0:
|
||||
raise ClickException(
|
||||
_("{editor}: Editing failed").format(editor=editor)
|
||||
)
|
||||
except OSError as e:
|
||||
raise ClickException(
|
||||
_("{editor}: Editing failed: {e}").format(editor=editor, e=e)
|
||||
) from e
|
||||
|
||||
@t.overload
|
||||
def edit(self, text: bytes | bytearray) -> bytes | None: ...
|
||||
|
||||
# We cannot know whether or not the type expected is str or bytes when None
|
||||
# is passed, so str is returned as that was what was done before.
|
||||
@t.overload
|
||||
def edit(self, text: str | None) -> str | None: ...
|
||||
|
||||
def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None:
|
||||
import tempfile
|
||||
|
||||
if text is None:
|
||||
data: bytes | bytearray = b""
|
||||
elif isinstance(text, (bytes, bytearray)):
|
||||
data = text
|
||||
else:
|
||||
if text and not text.endswith("\n"):
|
||||
text += "\n"
|
||||
|
||||
if WIN:
|
||||
data = text.replace("\n", "\r\n").encode("utf-8-sig")
|
||||
else:
|
||||
data = text.encode("utf-8")
|
||||
|
||||
fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
|
||||
f: t.BinaryIO
|
||||
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
# If the filesystem resolution is 1 second, like Mac OS
|
||||
# 10.12 Extended, or 2 seconds, like FAT32, and the editor
|
||||
# closes very fast, require_save can fail. Set the modified
|
||||
# time to be 2 seconds in the past to work around this.
|
||||
os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2))
|
||||
# Depending on the resolution, the exact value might not be
|
||||
# recorded, so get the new recorded value.
|
||||
timestamp = os.path.getmtime(name)
|
||||
|
||||
self.edit_files((name,))
|
||||
|
||||
if self.require_save and os.path.getmtime(name) == timestamp:
|
||||
return None
|
||||
|
||||
with open(name, "rb") as f:
|
||||
rv = f.read()
|
||||
|
||||
if isinstance(text, (bytes, bytearray)):
|
||||
return rv
|
||||
|
||||
return rv.decode("utf-8-sig").replace("\r\n", "\n")
|
||||
finally:
|
||||
os.unlink(name)
|
||||
|
||||
|
||||
def open_url(url: str, wait: bool = False, locate: bool = False) -> int:
|
||||
import subprocess
|
||||
|
||||
def _unquote_file(url: str) -> str:
|
||||
from urllib.parse import unquote
|
||||
|
||||
if url.startswith("file://"):
|
||||
url = unquote(url[7:])
|
||||
|
||||
return url
|
||||
|
||||
if sys.platform == "darwin":
|
||||
args = ["open"]
|
||||
if wait:
|
||||
args.append("-W")
|
||||
if locate:
|
||||
args.append("-R")
|
||||
args.append(_unquote_file(url))
|
||||
null = open("/dev/null", "w")
|
||||
try:
|
||||
return subprocess.Popen(args, stderr=null).wait()
|
||||
finally:
|
||||
null.close()
|
||||
elif WIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["explorer", f"/select,{url}"]
|
||||
else:
|
||||
args = ["start"]
|
||||
if wait:
|
||||
args.append("/WAIT")
|
||||
args.append("")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
elif CYGWIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = ["cygstart", os.path.dirname(url)]
|
||||
else:
|
||||
args = ["cygstart"]
|
||||
if wait:
|
||||
args.append("-w")
|
||||
args.append(url)
|
||||
try:
|
||||
return subprocess.call(args)
|
||||
except OSError:
|
||||
# Command not found
|
||||
return 127
|
||||
|
||||
try:
|
||||
if locate:
|
||||
url = os.path.dirname(_unquote_file(url)) or "."
|
||||
else:
|
||||
url = _unquote_file(url)
|
||||
c = subprocess.Popen(["xdg-open", url])
|
||||
if wait:
|
||||
return c.wait()
|
||||
return 0
|
||||
except OSError:
|
||||
if url.startswith(("http://", "https://")) and not locate and not wait:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open(url)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def _translate_ch_to_exc(ch: str) -> None:
|
||||
if ch == "\x03":
|
||||
raise KeyboardInterrupt()
|
||||
|
||||
if ch == "\x04" and not WIN: # Unix-like, Ctrl+D
|
||||
raise EOFError()
|
||||
|
||||
if ch == "\x1a" and WIN: # Windows, Ctrl+Z
|
||||
raise EOFError()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
import msvcrt
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal() -> cabc.Iterator[int]:
|
||||
yield -1
|
||||
|
||||
def getchar(echo: bool) -> str:
|
||||
# The function `getch` will return a bytes object corresponding to
|
||||
# the pressed character. Since Windows 10 build 1803, it will also
|
||||
# return \x00 when called a second time after pressing a regular key.
|
||||
#
|
||||
# `getwch` does not share this probably-bugged behavior. Moreover, it
|
||||
# returns a Unicode object by default, which is what we want.
|
||||
#
|
||||
# Either of these functions will return \x00 or \xe0 to indicate
|
||||
# a special key, and you need to call the same function again to get
|
||||
# the "rest" of the code. The fun part is that \u00e0 is
|
||||
# "latin small letter a with grave", so if you type that on a French
|
||||
# keyboard, you _also_ get a \xe0.
|
||||
# E.g., consider the Up arrow. This returns \xe0 and then \x48. The
|
||||
# resulting Unicode string reads as "a with grave" + "capital H".
|
||||
# This is indistinguishable from when the user actually types
|
||||
# "a with grave" and then "capital H".
|
||||
#
|
||||
# When \xe0 is returned, we assume it's part of a special-key sequence
|
||||
# and call `getwch` again, but that means that when the user types
|
||||
# the \u00e0 character, `getchar` doesn't return until a second
|
||||
# character is typed.
|
||||
# The alternative is returning immediately, but that would mess up
|
||||
# cross-platform handling of arrow keys and others that start with
|
||||
# \xe0. Another option is using `getch`, but then we can't reliably
|
||||
# read non-ASCII characters, because return values of `getch` are
|
||||
# limited to the current 8-bit codepage.
|
||||
#
|
||||
# Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
|
||||
# is doing the right thing in more situations than with `getch`.
|
||||
|
||||
if echo:
|
||||
func = t.cast(t.Callable[[], str], msvcrt.getwche)
|
||||
else:
|
||||
func = t.cast(t.Callable[[], str], msvcrt.getwch)
|
||||
|
||||
rv = func()
|
||||
|
||||
if rv in ("\x00", "\xe0"):
|
||||
# \x00 and \xe0 are control characters that indicate special key,
|
||||
# see above.
|
||||
rv += func()
|
||||
|
||||
_translate_ch_to_exc(rv)
|
||||
return rv
|
||||
|
||||
else:
|
||||
import termios
|
||||
import tty
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal() -> cabc.Iterator[int]:
|
||||
f: t.TextIO | None
|
||||
fd: int
|
||||
|
||||
if not isatty(sys.stdin):
|
||||
f = open("/dev/tty")
|
||||
fd = f.fileno()
|
||||
else:
|
||||
fd = sys.stdin.fileno()
|
||||
f = None
|
||||
|
||||
try:
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
yield fd
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
sys.stdout.flush()
|
||||
|
||||
if f is not None:
|
||||
f.close()
|
||||
except termios.error:
|
||||
pass
|
||||
|
||||
def getchar(echo: bool) -> str:
|
||||
with raw_terminal() as fd:
|
||||
ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace")
|
||||
|
||||
if echo and isatty(sys.stdout):
|
||||
sys.stdout.write(ch)
|
||||
|
||||
_translate_ch_to_exc(ch)
|
||||
return ch
|
||||
@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import textwrap
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper):
|
||||
def _handle_long_word(
|
||||
self,
|
||||
reversed_chunks: list[str],
|
||||
cur_line: list[str],
|
||||
cur_len: int,
|
||||
width: int,
|
||||
) -> None:
|
||||
space_left = max(width - cur_len, 1)
|
||||
|
||||
if self.break_long_words:
|
||||
last = reversed_chunks[-1]
|
||||
cut = last[:space_left]
|
||||
res = last[space_left:]
|
||||
cur_line.append(cut)
|
||||
reversed_chunks[-1] = res
|
||||
elif not cur_line:
|
||||
cur_line.append(reversed_chunks.pop())
|
||||
|
||||
@contextmanager
|
||||
def extra_indent(self, indent: str) -> cabc.Iterator[None]:
|
||||
old_initial_indent = self.initial_indent
|
||||
old_subsequent_indent = self.subsequent_indent
|
||||
self.initial_indent += indent
|
||||
self.subsequent_indent += indent
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.initial_indent = old_initial_indent
|
||||
self.subsequent_indent = old_subsequent_indent
|
||||
|
||||
def indent_only(self, text: str) -> str:
|
||||
rv = []
|
||||
|
||||
for idx, line in enumerate(text.splitlines()):
|
||||
indent = self.initial_indent
|
||||
|
||||
if idx > 0:
|
||||
indent = self.subsequent_indent
|
||||
|
||||
rv.append(f"{indent}{line}")
|
||||
|
||||
return "\n".join(rv)
|
||||
@ -0,0 +1,36 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import typing as t
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
"""Enum used to define sentinel values.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`PEP 661 - Sentinel Values <https://peps.python.org/pep-0661/>`_.
|
||||
"""
|
||||
|
||||
UNSET = object()
|
||||
FLAG_NEEDS_VALUE = object()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}.{self.name}"
|
||||
|
||||
|
||||
UNSET = Sentinel.UNSET
|
||||
"""Sentinel used to indicate that a value is not set."""
|
||||
|
||||
FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE
|
||||
"""Sentinel used to indicate an option was passed as a flag without a
|
||||
value but is not a flag option.
|
||||
|
||||
``Option.consume_value`` uses this to prompt or use the ``flag_value``.
|
||||
"""
|
||||
|
||||
T_UNSET = t.Literal[UNSET] # type: ignore[valid-type]
|
||||
"""Type hint for the :data:`UNSET` sentinel value."""
|
||||
|
||||
T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type]
|
||||
"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value."""
|
||||
@ -0,0 +1,296 @@
|
||||
# This module is based on the excellent work by Adam Bartoš who
|
||||
# provided a lot of what went into the implementation here in
|
||||
# the discussion to issue1602 in the Python bug tracker.
|
||||
#
|
||||
# There are some general differences in regards to how this works
|
||||
# compared to the original patches as we do not need to patch
|
||||
# the entire interpreter but just work in our little world of
|
||||
# echo and prompt.
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import io
|
||||
import sys
|
||||
import time
|
||||
import typing as t
|
||||
from ctypes import Array
|
||||
from ctypes import byref
|
||||
from ctypes import c_char
|
||||
from ctypes import c_char_p
|
||||
from ctypes import c_int
|
||||
from ctypes import c_ssize_t
|
||||
from ctypes import c_ulong
|
||||
from ctypes import c_void_p
|
||||
from ctypes import POINTER
|
||||
from ctypes import py_object
|
||||
from ctypes import Structure
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes.wintypes import HANDLE
|
||||
from ctypes.wintypes import LPCWSTR
|
||||
from ctypes.wintypes import LPWSTR
|
||||
|
||||
from ._compat import _NonClosingTextIOWrapper
|
||||
|
||||
assert sys.platform == "win32"
|
||||
import msvcrt # noqa: E402
|
||||
from ctypes import windll # noqa: E402
|
||||
from ctypes import WINFUNCTYPE # noqa: E402
|
||||
|
||||
c_ssize_p = POINTER(c_ssize_t)
|
||||
|
||||
kernel32 = windll.kernel32
|
||||
GetStdHandle = kernel32.GetStdHandle
|
||||
ReadConsoleW = kernel32.ReadConsoleW
|
||||
WriteConsoleW = kernel32.WriteConsoleW
|
||||
GetConsoleMode = kernel32.GetConsoleMode
|
||||
GetLastError = kernel32.GetLastError
|
||||
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
|
||||
CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
|
||||
("CommandLineToArgvW", windll.shell32)
|
||||
)
|
||||
LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
|
||||
|
||||
STDIN_HANDLE = GetStdHandle(-10)
|
||||
STDOUT_HANDLE = GetStdHandle(-11)
|
||||
STDERR_HANDLE = GetStdHandle(-12)
|
||||
|
||||
PyBUF_SIMPLE = 0
|
||||
PyBUF_WRITABLE = 1
|
||||
|
||||
ERROR_SUCCESS = 0
|
||||
ERROR_NOT_ENOUGH_MEMORY = 8
|
||||
ERROR_OPERATION_ABORTED = 995
|
||||
|
||||
STDIN_FILENO = 0
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
EOF = b"\x1a"
|
||||
MAX_BYTES_WRITTEN = 32767
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
try:
|
||||
# Using `typing_extensions.Buffer` instead of `collections.abc`
|
||||
# on Windows for some reason does not have `Sized` implemented.
|
||||
from collections.abc import Buffer # type: ignore
|
||||
except ImportError:
|
||||
from typing_extensions import Buffer
|
||||
|
||||
try:
|
||||
from ctypes import pythonapi
|
||||
except ImportError:
|
||||
# On PyPy we cannot get buffers so our ability to operate here is
|
||||
# severely limited.
|
||||
get_buffer = None
|
||||
else:
|
||||
|
||||
class Py_buffer(Structure):
|
||||
_fields_ = [ # noqa: RUF012
|
||||
("buf", c_void_p),
|
||||
("obj", py_object),
|
||||
("len", c_ssize_t),
|
||||
("itemsize", c_ssize_t),
|
||||
("readonly", c_int),
|
||||
("ndim", c_int),
|
||||
("format", c_char_p),
|
||||
("shape", c_ssize_p),
|
||||
("strides", c_ssize_p),
|
||||
("suboffsets", c_ssize_p),
|
||||
("internal", c_void_p),
|
||||
]
|
||||
|
||||
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
|
||||
PyBuffer_Release = pythonapi.PyBuffer_Release
|
||||
|
||||
def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]:
|
||||
buf = Py_buffer()
|
||||
flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
|
||||
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
|
||||
|
||||
try:
|
||||
buffer_type = c_char * buf.len
|
||||
out: Array[c_char] = buffer_type.from_address(buf.buf)
|
||||
return out
|
||||
finally:
|
||||
PyBuffer_Release(byref(buf))
|
||||
|
||||
|
||||
class _WindowsConsoleRawIOBase(io.RawIOBase):
|
||||
def __init__(self, handle: int | None) -> None:
|
||||
self.handle = handle
|
||||
|
||||
def isatty(self) -> t.Literal[True]:
|
||||
super().isatty()
|
||||
return True
|
||||
|
||||
|
||||
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
|
||||
def readable(self) -> t.Literal[True]:
|
||||
return True
|
||||
|
||||
def readinto(self, b: Buffer) -> int:
|
||||
bytes_to_be_read = len(b)
|
||||
if not bytes_to_be_read:
|
||||
return 0
|
||||
elif bytes_to_be_read % 2:
|
||||
raise ValueError(
|
||||
"cannot read odd number of bytes from UTF-16-LE encoded console"
|
||||
)
|
||||
|
||||
buffer = get_buffer(b, writable=True)
|
||||
code_units_to_be_read = bytes_to_be_read // 2
|
||||
code_units_read = c_ulong()
|
||||
|
||||
rv = ReadConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buffer,
|
||||
code_units_to_be_read,
|
||||
byref(code_units_read),
|
||||
None,
|
||||
)
|
||||
if GetLastError() == ERROR_OPERATION_ABORTED:
|
||||
# wait for KeyboardInterrupt
|
||||
time.sleep(0.1)
|
||||
if not rv:
|
||||
raise OSError(f"Windows error: {GetLastError()}")
|
||||
|
||||
if buffer[0] == EOF:
|
||||
return 0
|
||||
return 2 * code_units_read.value
|
||||
|
||||
|
||||
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
|
||||
def writable(self) -> t.Literal[True]:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_error_message(errno: int) -> str:
|
||||
if errno == ERROR_SUCCESS:
|
||||
return "ERROR_SUCCESS"
|
||||
elif errno == ERROR_NOT_ENOUGH_MEMORY:
|
||||
return "ERROR_NOT_ENOUGH_MEMORY"
|
||||
return f"Windows error {errno}"
|
||||
|
||||
def write(self, b: Buffer) -> int:
|
||||
bytes_to_be_written = len(b)
|
||||
buf = get_buffer(b)
|
||||
code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
|
||||
code_units_written = c_ulong()
|
||||
|
||||
WriteConsoleW(
|
||||
HANDLE(self.handle),
|
||||
buf,
|
||||
code_units_to_be_written,
|
||||
byref(code_units_written),
|
||||
None,
|
||||
)
|
||||
bytes_written = 2 * code_units_written.value
|
||||
|
||||
if bytes_written == 0 and bytes_to_be_written > 0:
|
||||
raise OSError(self._get_error_message(GetLastError()))
|
||||
return bytes_written
|
||||
|
||||
|
||||
class ConsoleStream:
|
||||
def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
|
||||
self._text_stream = text_stream
|
||||
self.buffer = byte_stream
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.buffer.name
|
||||
|
||||
def write(self, x: t.AnyStr) -> int:
|
||||
if isinstance(x, str):
|
||||
return self._text_stream.write(x)
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(x)
|
||||
|
||||
def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None:
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._text_stream, name)
|
||||
|
||||
def isatty(self) -> bool:
|
||||
return self.buffer.isatty()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>"
|
||||
|
||||
|
||||
def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
|
||||
"utf-16-le",
|
||||
"strict",
|
||||
line_buffering=True,
|
||||
)
|
||||
return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
|
||||
|
||||
|
||||
_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
|
||||
0: _get_text_stdin,
|
||||
1: _get_text_stdout,
|
||||
2: _get_text_stderr,
|
||||
}
|
||||
|
||||
|
||||
def _is_console(f: t.TextIO) -> bool:
|
||||
if not hasattr(f, "fileno"):
|
||||
return False
|
||||
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except (OSError, io.UnsupportedOperation):
|
||||
return False
|
||||
|
||||
handle = msvcrt.get_osfhandle(fileno)
|
||||
return bool(GetConsoleMode(handle, byref(DWORD())))
|
||||
|
||||
|
||||
def _get_windows_console_stream(
|
||||
f: t.TextIO, encoding: str | None, errors: str | None
|
||||
) -> t.TextIO | None:
|
||||
if (
|
||||
get_buffer is None
|
||||
or encoding not in {"utf-16-le", None}
|
||||
or errors not in {"strict", None}
|
||||
or not _is_console(f)
|
||||
):
|
||||
return None
|
||||
|
||||
func = _stream_factories.get(f.fileno())
|
||||
if func is None:
|
||||
return None
|
||||
|
||||
b = getattr(f, "buffer", None)
|
||||
|
||||
if b is None:
|
||||
return None
|
||||
|
||||
return func(b)
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,551 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from gettext import gettext as _
|
||||
|
||||
from .core import Argument
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Group
|
||||
from .core import Option
|
||||
from .core import Parameter
|
||||
from .globals import get_current_context
|
||||
from .utils import echo
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
P = te.ParamSpec("P")
|
||||
|
||||
R = t.TypeVar("R")
|
||||
T = t.TypeVar("T")
|
||||
_AnyCallable = t.Callable[..., t.Any]
|
||||
FC = t.TypeVar("FC", bound="_AnyCallable | Command")
|
||||
|
||||
|
||||
def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]:
|
||||
"""Marks a callback as wanting to receive the current context
|
||||
object as first argument.
|
||||
"""
|
||||
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
"""Similar to :func:`pass_context`, but only pass the object on the
|
||||
context onwards (:attr:`Context.obj`). This is useful if that object
|
||||
represents the state of a nested system.
|
||||
"""
|
||||
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
return f(get_current_context().obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def make_pass_decorator(
|
||||
object_type: type[T], ensure: bool = False
|
||||
) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
|
||||
"""Given an object type this creates a decorator that will work
|
||||
similar to :func:`pass_obj` but instead of passing the object of the
|
||||
current context, it will find the innermost context of type
|
||||
:func:`object_type`.
|
||||
|
||||
This generates a decorator that works roughly like this::
|
||||
|
||||
from functools import update_wrapper
|
||||
|
||||
def decorator(f):
|
||||
@pass_context
|
||||
def new_func(ctx, *args, **kwargs):
|
||||
obj = ctx.find_object(object_type)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
return decorator
|
||||
|
||||
:param object_type: the type of the object to pass.
|
||||
:param ensure: if set to `True`, a new object will be created and
|
||||
remembered on the context if it's not there yet.
|
||||
"""
|
||||
|
||||
def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
ctx = get_current_context()
|
||||
|
||||
obj: T | None
|
||||
if ensure:
|
||||
obj = ctx.ensure_object(object_type)
|
||||
else:
|
||||
obj = ctx.find_object(object_type)
|
||||
|
||||
if obj is None:
|
||||
raise RuntimeError(
|
||||
"Managed to invoke callback without a context"
|
||||
f" object of type {object_type.__name__!r}"
|
||||
" existing."
|
||||
)
|
||||
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def pass_meta_key(
|
||||
key: str, *, doc_description: str | None = None
|
||||
) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
|
||||
"""Create a decorator that passes a key from
|
||||
:attr:`click.Context.meta` as the first argument to the decorated
|
||||
function.
|
||||
|
||||
:param key: Key in ``Context.meta`` to pass.
|
||||
:param doc_description: Description of the object being passed,
|
||||
inserted into the decorator's docstring. Defaults to "the 'key'
|
||||
key from Context.meta".
|
||||
|
||||
.. versionadded:: 8.0
|
||||
"""
|
||||
|
||||
def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
|
||||
def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
ctx = get_current_context()
|
||||
obj = ctx.meta[key]
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
if doc_description is None:
|
||||
doc_description = f"the {key!r} key from :attr:`click.Context.meta`"
|
||||
|
||||
decorator.__doc__ = (
|
||||
f"Decorator that passes {doc_description} as the first argument"
|
||||
" to the decorated function."
|
||||
)
|
||||
return decorator
|
||||
|
||||
|
||||
CmdType = t.TypeVar("CmdType", bound=Command)
|
||||
|
||||
|
||||
# variant: no call, directly as decorator for a function.
|
||||
@t.overload
|
||||
def command(name: _AnyCallable) -> Command: ...
|
||||
|
||||
|
||||
# variant: with positional name and with positional or keyword cls argument:
|
||||
# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...)
|
||||
@t.overload
|
||||
def command(
|
||||
name: str | None,
|
||||
cls: type[CmdType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], CmdType]: ...
|
||||
|
||||
|
||||
# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...)
|
||||
@t.overload
|
||||
def command(
|
||||
name: None = None,
|
||||
*,
|
||||
cls: type[CmdType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], CmdType]: ...
|
||||
|
||||
|
||||
# variant: with optional string name, no cls argument provided.
|
||||
@t.overload
|
||||
def command(
|
||||
name: str | None = ..., cls: None = None, **attrs: t.Any
|
||||
) -> t.Callable[[_AnyCallable], Command]: ...
|
||||
|
||||
|
||||
def command(
|
||||
name: str | _AnyCallable | None = None,
|
||||
cls: type[CmdType] | None = None,
|
||||
**attrs: t.Any,
|
||||
) -> Command | t.Callable[[_AnyCallable], Command | CmdType]:
|
||||
r"""Creates a new :class:`Command` and uses the decorated function as
|
||||
callback. This will also automatically attach all decorated
|
||||
:func:`option`\s and :func:`argument`\s as parameters to the command.
|
||||
|
||||
The name of the command defaults to the name of the function, converted to
|
||||
lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes
|
||||
``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example,
|
||||
``init_data_command`` becomes ``init-data``.
|
||||
|
||||
All keyword arguments are forwarded to the underlying command class.
|
||||
For the ``params`` argument, any decorated params are appended to
|
||||
the end of the list.
|
||||
|
||||
Once decorated the function turns into a :class:`Command` instance
|
||||
that can be invoked as a command line utility or be attached to a
|
||||
command :class:`Group`.
|
||||
|
||||
:param name: The name of the command. Defaults to modifying the function's
|
||||
name as described above.
|
||||
:param cls: The command class to create. Defaults to :class:`Command`.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are
|
||||
removed when generating the name.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
This decorator can be applied without parentheses.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
The ``params`` argument can be used. Decorated params are
|
||||
appended to the end of the list.
|
||||
"""
|
||||
|
||||
func: t.Callable[[_AnyCallable], t.Any] | None = None
|
||||
|
||||
if callable(name):
|
||||
func = name
|
||||
name = None
|
||||
assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class."
|
||||
assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
|
||||
|
||||
if cls is None:
|
||||
cls = t.cast("type[CmdType]", Command)
|
||||
|
||||
def decorator(f: _AnyCallable) -> CmdType:
|
||||
if isinstance(f, Command):
|
||||
raise TypeError("Attempted to convert a callback into a command twice.")
|
||||
|
||||
attr_params = attrs.pop("params", None)
|
||||
params = attr_params if attr_params is not None else []
|
||||
|
||||
try:
|
||||
decorator_params = f.__click_params__ # type: ignore
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
del f.__click_params__ # type: ignore
|
||||
params.extend(reversed(decorator_params))
|
||||
|
||||
if attrs.get("help") is None:
|
||||
attrs["help"] = f.__doc__
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
assert cls is not None
|
||||
assert not callable(name)
|
||||
|
||||
if name is not None:
|
||||
cmd_name = name
|
||||
else:
|
||||
cmd_name = f.__name__.lower().replace("_", "-")
|
||||
cmd_left, sep, suffix = cmd_name.rpartition("-")
|
||||
|
||||
if sep and suffix in {"command", "cmd", "group", "grp"}:
|
||||
cmd_name = cmd_left
|
||||
|
||||
cmd = cls(name=cmd_name, callback=f, params=params, **attrs)
|
||||
cmd.__doc__ = f.__doc__
|
||||
return cmd
|
||||
|
||||
if func is not None:
|
||||
return decorator(func)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
GrpType = t.TypeVar("GrpType", bound=Group)
|
||||
|
||||
|
||||
# variant: no call, directly as decorator for a function.
|
||||
@t.overload
|
||||
def group(name: _AnyCallable) -> Group: ...
|
||||
|
||||
|
||||
# variant: with positional name and with positional or keyword cls argument:
|
||||
# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...)
|
||||
@t.overload
|
||||
def group(
|
||||
name: str | None,
|
||||
cls: type[GrpType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], GrpType]: ...
|
||||
|
||||
|
||||
# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...)
|
||||
@t.overload
|
||||
def group(
|
||||
name: None = None,
|
||||
*,
|
||||
cls: type[GrpType],
|
||||
**attrs: t.Any,
|
||||
) -> t.Callable[[_AnyCallable], GrpType]: ...
|
||||
|
||||
|
||||
# variant: with optional string name, no cls argument provided.
|
||||
@t.overload
|
||||
def group(
|
||||
name: str | None = ..., cls: None = None, **attrs: t.Any
|
||||
) -> t.Callable[[_AnyCallable], Group]: ...
|
||||
|
||||
|
||||
def group(
|
||||
name: str | _AnyCallable | None = None,
|
||||
cls: type[GrpType] | None = None,
|
||||
**attrs: t.Any,
|
||||
) -> Group | t.Callable[[_AnyCallable], Group | GrpType]:
|
||||
"""Creates a new :class:`Group` with a function as callback. This
|
||||
works otherwise the same as :func:`command` just that the `cls`
|
||||
parameter is set to :class:`Group`.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
This decorator can be applied without parentheses.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = t.cast("type[GrpType]", Group)
|
||||
|
||||
if callable(name):
|
||||
return command(cls=cls, **attrs)(name)
|
||||
|
||||
return command(name, cls, **attrs)
|
||||
|
||||
|
||||
def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None:
|
||||
if isinstance(f, Command):
|
||||
f.params.append(param)
|
||||
else:
|
||||
if not hasattr(f, "__click_params__"):
|
||||
f.__click_params__ = [] # type: ignore
|
||||
|
||||
f.__click_params__.append(param) # type: ignore
|
||||
|
||||
|
||||
def argument(
|
||||
*param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Attaches an argument to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Argument`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Argument` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
For the default argument class, refer to :class:`Argument` and
|
||||
:class:`Parameter` for descriptions of parameters.
|
||||
|
||||
:param cls: the argument class to instantiate. This defaults to
|
||||
:class:`Argument`.
|
||||
:param param_decls: Passed as positional arguments to the constructor of
|
||||
``cls``.
|
||||
:param attrs: Passed as keyword arguments to the constructor of ``cls``.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Argument
|
||||
|
||||
def decorator(f: FC) -> FC:
|
||||
_param_memo(f, cls(param_decls, **attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def option(
|
||||
*param_decls: str, cls: type[Option] | None = None, **attrs: t.Any
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Attaches an option to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Option`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Option` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
For the default option class, refer to :class:`Option` and
|
||||
:class:`Parameter` for descriptions of parameters.
|
||||
|
||||
:param cls: the option class to instantiate. This defaults to
|
||||
:class:`Option`.
|
||||
:param param_decls: Passed as positional arguments to the constructor of
|
||||
``cls``.
|
||||
:param attrs: Passed as keyword arguments to the constructor of ``cls``.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Option
|
||||
|
||||
def decorator(f: FC) -> FC:
|
||||
_param_memo(f, cls(param_decls, **attrs))
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--yes`` option which shows a prompt before continuing if
|
||||
not passed. If the prompt is declined, the program will exit.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--yes"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
|
||||
def callback(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
if not value:
|
||||
ctx.abort()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--yes",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("callback", callback)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("prompt", "Do you want to continue?")
|
||||
kwargs.setdefault("help", "Confirm the action without prompting.")
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--password`` option which prompts for a password, hiding
|
||||
input and asking to enter the value again for confirmation.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--password"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
if not param_decls:
|
||||
param_decls = ("--password",)
|
||||
|
||||
kwargs.setdefault("prompt", True)
|
||||
kwargs.setdefault("confirmation_prompt", True)
|
||||
kwargs.setdefault("hide_input", True)
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def version_option(
|
||||
version: str | None = None,
|
||||
*param_decls: str,
|
||||
package_name: str | None = None,
|
||||
prog_name: str | None = None,
|
||||
message: str | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> t.Callable[[FC], FC]:
|
||||
"""Add a ``--version`` option which immediately prints the version
|
||||
number and exits the program.
|
||||
|
||||
If ``version`` is not provided, Click will try to detect it using
|
||||
:func:`importlib.metadata.version` to get the version for the
|
||||
``package_name``.
|
||||
|
||||
If ``package_name`` is not provided, Click will try to detect it by
|
||||
inspecting the stack frames. This will be used to detect the
|
||||
version, so it must match the name of the installed package.
|
||||
|
||||
:param version: The version number to show. If not provided, Click
|
||||
will try to detect it.
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--version"``.
|
||||
:param package_name: The package name to detect the version from. If
|
||||
not provided, Click will try to detect it.
|
||||
:param prog_name: The name of the CLI to show in the message. If not
|
||||
provided, it will be detected from the command.
|
||||
:param message: The message to show. The values ``%(prog)s``,
|
||||
``%(package)s``, and ``%(version)s`` are available. Defaults to
|
||||
``"%(prog)s, version %(version)s"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
:raise RuntimeError: ``version`` could not be detected.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Add the ``package_name`` parameter, and the ``%(package)s``
|
||||
value for messages.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
|
||||
version is detected based on the package name, not the entry
|
||||
point name. The Python package name must match the installed
|
||||
package name, or be passed with ``package_name=``.
|
||||
"""
|
||||
if message is None:
|
||||
message = _("%(prog)s, version %(version)s")
|
||||
|
||||
if version is None and package_name is None:
|
||||
frame = inspect.currentframe()
|
||||
f_back = frame.f_back if frame is not None else None
|
||||
f_globals = f_back.f_globals if f_back is not None else None
|
||||
# break reference cycle
|
||||
# https://docs.python.org/3/library/inspect.html#the-interpreter-stack
|
||||
del frame
|
||||
|
||||
if f_globals is not None:
|
||||
package_name = f_globals.get("__name__")
|
||||
|
||||
if package_name == "__main__":
|
||||
package_name = f_globals.get("__package__")
|
||||
|
||||
if package_name:
|
||||
package_name = package_name.partition(".")[0]
|
||||
|
||||
def callback(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
|
||||
nonlocal prog_name
|
||||
nonlocal version
|
||||
|
||||
if prog_name is None:
|
||||
prog_name = ctx.find_root().info_name
|
||||
|
||||
if version is None and package_name is not None:
|
||||
import importlib.metadata
|
||||
|
||||
try:
|
||||
version = importlib.metadata.version(package_name)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
raise RuntimeError(
|
||||
f"{package_name!r} is not installed. Try passing"
|
||||
" 'package_name' instead."
|
||||
) from None
|
||||
|
||||
if version is None:
|
||||
raise RuntimeError(
|
||||
f"Could not determine the version for {package_name!r} automatically."
|
||||
)
|
||||
|
||||
echo(
|
||||
message % {"prog": prog_name, "package": package_name, "version": version},
|
||||
color=ctx.color,
|
||||
)
|
||||
ctx.exit()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--version",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("is_eager", True)
|
||||
kwargs.setdefault("help", _("Show the version and exit."))
|
||||
kwargs["callback"] = callback
|
||||
return option(*param_decls, **kwargs)
|
||||
|
||||
|
||||
def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
|
||||
"""Pre-configured ``--help`` option which immediately prints the help page
|
||||
and exits the program.
|
||||
|
||||
:param param_decls: One or more option names. Defaults to the single
|
||||
value ``"--help"``.
|
||||
:param kwargs: Extra arguments are passed to :func:`option`.
|
||||
"""
|
||||
|
||||
def show_help(ctx: Context, param: Parameter, value: bool) -> None:
|
||||
"""Callback that print the help page on ``<stdout>`` and exits."""
|
||||
if value and not ctx.resilient_parsing:
|
||||
echo(ctx.get_help(), color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
if not param_decls:
|
||||
param_decls = ("--help",)
|
||||
|
||||
kwargs.setdefault("is_flag", True)
|
||||
kwargs.setdefault("expose_value", False)
|
||||
kwargs.setdefault("is_eager", True)
|
||||
kwargs.setdefault("help", _("Show this message and exit."))
|
||||
kwargs.setdefault("callback", show_help)
|
||||
|
||||
return option(*param_decls, **kwargs)
|
||||
@ -0,0 +1,308 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
from gettext import ngettext
|
||||
|
||||
from ._compat import get_text_stderr
|
||||
from .globals import resolve_color_default
|
||||
from .utils import echo
|
||||
from .utils import format_filename
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Parameter
|
||||
|
||||
|
||||
def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None:
|
||||
if param_hint is not None and not isinstance(param_hint, str):
|
||||
return " / ".join(repr(x) for x in param_hint)
|
||||
|
||||
return param_hint
|
||||
|
||||
|
||||
class ClickException(Exception):
|
||||
"""An exception that Click can handle and show to the user."""
|
||||
|
||||
#: The exit code for this exception.
|
||||
exit_code = 1
|
||||
|
||||
def __init__(self, message: str) -> None:
|
||||
super().__init__(message)
|
||||
# The context will be removed by the time we print the message, so cache
|
||||
# the color settings here to be used later on (in `show`)
|
||||
self.show_color: bool | None = resolve_color_default()
|
||||
self.message = message
|
||||
|
||||
def format_message(self) -> str:
|
||||
return self.message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.message
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
|
||||
echo(
|
||||
_("Error: {message}").format(message=self.format_message()),
|
||||
file=file,
|
||||
color=self.show_color,
|
||||
)
|
||||
|
||||
|
||||
class UsageError(ClickException):
|
||||
"""An internal exception that signals a usage error. This typically
|
||||
aborts any further handling.
|
||||
|
||||
:param message: the error message to display.
|
||||
:param ctx: optionally the context that caused this error. Click will
|
||||
fill in the context automatically in some situations.
|
||||
"""
|
||||
|
||||
exit_code = 2
|
||||
|
||||
def __init__(self, message: str, ctx: Context | None = None) -> None:
|
||||
super().__init__(message)
|
||||
self.ctx = ctx
|
||||
self.cmd: Command | None = self.ctx.command if self.ctx else None
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
color = None
|
||||
hint = ""
|
||||
if (
|
||||
self.ctx is not None
|
||||
and self.ctx.command.get_help_option(self.ctx) is not None
|
||||
):
|
||||
hint = _("Try '{command} {option}' for help.").format(
|
||||
command=self.ctx.command_path, option=self.ctx.help_option_names[0]
|
||||
)
|
||||
hint = f"{hint}\n"
|
||||
if self.ctx is not None:
|
||||
color = self.ctx.color
|
||||
echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
|
||||
echo(
|
||||
_("Error: {message}").format(message=self.format_message()),
|
||||
file=file,
|
||||
color=color,
|
||||
)
|
||||
|
||||
|
||||
class BadParameter(UsageError):
|
||||
"""An exception that formats out a standardized error message for a
|
||||
bad parameter. This is useful when thrown from a callback or type as
|
||||
Click will attach contextual information to it (for instance, which
|
||||
parameter it is).
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param param: the parameter object that caused this error. This can
|
||||
be left out, and Click will attach this info itself
|
||||
if possible.
|
||||
:param param_hint: a string that shows up as parameter name. This
|
||||
can be used as alternative to `param` in cases
|
||||
where custom validation should happen. If it is
|
||||
a string it's used as such, if it's a list then
|
||||
each item is quoted and separated.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
ctx: Context | None = None,
|
||||
param: Parameter | None = None,
|
||||
param_hint: cabc.Sequence[str] | str | None = None,
|
||||
) -> None:
|
||||
super().__init__(message, ctx)
|
||||
self.param = param
|
||||
self.param_hint = param_hint
|
||||
|
||||
def format_message(self) -> str:
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx) # type: ignore
|
||||
else:
|
||||
return _("Invalid value: {message}").format(message=self.message)
|
||||
|
||||
return _("Invalid value for {param_hint}: {message}").format(
|
||||
param_hint=_join_param_hints(param_hint), message=self.message
|
||||
)
|
||||
|
||||
|
||||
class MissingParameter(BadParameter):
|
||||
"""Raised if click required an option or argument but it was not
|
||||
provided when invoking the script.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param param_type: a string that indicates the type of the parameter.
|
||||
The default is to inherit the parameter type from
|
||||
the given `param`. Valid values are ``'parameter'``,
|
||||
``'option'`` or ``'argument'``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str | None = None,
|
||||
ctx: Context | None = None,
|
||||
param: Parameter | None = None,
|
||||
param_hint: cabc.Sequence[str] | str | None = None,
|
||||
param_type: str | None = None,
|
||||
) -> None:
|
||||
super().__init__(message or "", ctx, param, param_hint)
|
||||
self.param_type = param_type
|
||||
|
||||
def format_message(self) -> str:
|
||||
if self.param_hint is not None:
|
||||
param_hint: cabc.Sequence[str] | str | None = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx) # type: ignore
|
||||
else:
|
||||
param_hint = None
|
||||
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
param_hint = f" {param_hint}" if param_hint else ""
|
||||
|
||||
param_type = self.param_type
|
||||
if param_type is None and self.param is not None:
|
||||
param_type = self.param.param_type_name
|
||||
|
||||
msg = self.message
|
||||
if self.param is not None:
|
||||
msg_extra = self.param.type.get_missing_message(
|
||||
param=self.param, ctx=self.ctx
|
||||
)
|
||||
if msg_extra:
|
||||
if msg:
|
||||
msg += f". {msg_extra}"
|
||||
else:
|
||||
msg = msg_extra
|
||||
|
||||
msg = f" {msg}" if msg else ""
|
||||
|
||||
# Translate param_type for known types.
|
||||
if param_type == "argument":
|
||||
missing = _("Missing argument")
|
||||
elif param_type == "option":
|
||||
missing = _("Missing option")
|
||||
elif param_type == "parameter":
|
||||
missing = _("Missing parameter")
|
||||
else:
|
||||
missing = _("Missing {param_type}").format(param_type=param_type)
|
||||
|
||||
return f"{missing}{param_hint}.{msg}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
if not self.message:
|
||||
param_name = self.param.name if self.param else None
|
||||
return _("Missing parameter: {param_name}").format(param_name=param_name)
|
||||
else:
|
||||
return self.message
|
||||
|
||||
|
||||
class NoSuchOption(UsageError):
|
||||
"""Raised if click attempted to handle an option that does not
|
||||
exist.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
option_name: str,
|
||||
message: str | None = None,
|
||||
possibilities: cabc.Sequence[str] | None = None,
|
||||
ctx: Context | None = None,
|
||||
) -> None:
|
||||
if message is None:
|
||||
message = _("No such option: {name}").format(name=option_name)
|
||||
|
||||
super().__init__(message, ctx)
|
||||
self.option_name = option_name
|
||||
self.possibilities = possibilities
|
||||
|
||||
def format_message(self) -> str:
|
||||
if not self.possibilities:
|
||||
return self.message
|
||||
|
||||
possibility_str = ", ".join(sorted(self.possibilities))
|
||||
suggest = ngettext(
|
||||
"Did you mean {possibility}?",
|
||||
"(Possible options: {possibilities})",
|
||||
len(self.possibilities),
|
||||
).format(possibility=possibility_str, possibilities=possibility_str)
|
||||
return f"{self.message} {suggest}"
|
||||
|
||||
|
||||
class BadOptionUsage(UsageError):
|
||||
"""Raised if an option is generally supplied but the use of the option
|
||||
was incorrect. This is for instance raised if the number of arguments
|
||||
for an option is not correct.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param option_name: the name of the option being used incorrectly.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, option_name: str, message: str, ctx: Context | None = None
|
||||
) -> None:
|
||||
super().__init__(message, ctx)
|
||||
self.option_name = option_name
|
||||
|
||||
|
||||
class BadArgumentUsage(UsageError):
|
||||
"""Raised if an argument is generally supplied but the use of the argument
|
||||
was incorrect. This is for instance raised if the number of values
|
||||
for an argument is not correct.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
|
||||
|
||||
class NoArgsIsHelpError(UsageError):
|
||||
def __init__(self, ctx: Context) -> None:
|
||||
self.ctx: Context
|
||||
super().__init__(ctx.get_help(), ctx=ctx)
|
||||
|
||||
def show(self, file: t.IO[t.Any] | None = None) -> None:
|
||||
echo(self.format_message(), file=file, err=True, color=self.ctx.color)
|
||||
|
||||
|
||||
class FileError(ClickException):
|
||||
"""Raised if a file cannot be opened."""
|
||||
|
||||
def __init__(self, filename: str, hint: str | None = None) -> None:
|
||||
if hint is None:
|
||||
hint = _("unknown error")
|
||||
|
||||
super().__init__(hint)
|
||||
self.ui_filename: str = format_filename(filename)
|
||||
self.filename = filename
|
||||
|
||||
def format_message(self) -> str:
|
||||
return _("Could not open file {filename!r}: {message}").format(
|
||||
filename=self.ui_filename, message=self.message
|
||||
)
|
||||
|
||||
|
||||
class Abort(RuntimeError):
|
||||
"""An internal signalling exception that signals Click to abort."""
|
||||
|
||||
|
||||
class Exit(RuntimeError):
|
||||
"""An exception that indicates that the application should exit with some
|
||||
status code.
|
||||
|
||||
:param code: the status code to exit with.
|
||||
"""
|
||||
|
||||
__slots__ = ("exit_code",)
|
||||
|
||||
def __init__(self, code: int = 0) -> None:
|
||||
self.exit_code: int = code
|
||||
@ -0,0 +1,301 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
from contextlib import contextmanager
|
||||
from gettext import gettext as _
|
||||
|
||||
from ._compat import term_len
|
||||
from .parser import _split_opt
|
||||
|
||||
# Can force a width. This is used by the test system
|
||||
FORCED_WIDTH: int | None = None
|
||||
|
||||
|
||||
def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]:
|
||||
widths: dict[int, int] = {}
|
||||
|
||||
for row in rows:
|
||||
for idx, col in enumerate(row):
|
||||
widths[idx] = max(widths.get(idx, 0), term_len(col))
|
||||
|
||||
return tuple(y for x, y in sorted(widths.items()))
|
||||
|
||||
|
||||
def iter_rows(
|
||||
rows: cabc.Iterable[tuple[str, str]], col_count: int
|
||||
) -> cabc.Iterator[tuple[str, ...]]:
|
||||
for row in rows:
|
||||
yield row + ("",) * (col_count - len(row))
|
||||
|
||||
|
||||
def wrap_text(
|
||||
text: str,
|
||||
width: int = 78,
|
||||
initial_indent: str = "",
|
||||
subsequent_indent: str = "",
|
||||
preserve_paragraphs: bool = False,
|
||||
) -> str:
|
||||
"""A helper function that intelligently wraps text. By default, it
|
||||
assumes that it operates on a single paragraph of text but if the
|
||||
`preserve_paragraphs` parameter is provided it will intelligently
|
||||
handle paragraphs (defined by two empty lines).
|
||||
|
||||
If paragraphs are handled, a paragraph can be prefixed with an empty
|
||||
line containing the ``\\b`` character (``\\x08``) to indicate that
|
||||
no rewrapping should happen in that block.
|
||||
|
||||
:param text: the text that should be rewrapped.
|
||||
:param width: the maximum width for the text.
|
||||
:param initial_indent: the initial indent that should be placed on the
|
||||
first line as a string.
|
||||
:param subsequent_indent: the indent string that should be placed on
|
||||
each consecutive line.
|
||||
:param preserve_paragraphs: if this flag is set then the wrapping will
|
||||
intelligently handle paragraphs.
|
||||
"""
|
||||
from ._textwrap import TextWrapper
|
||||
|
||||
text = text.expandtabs()
|
||||
wrapper = TextWrapper(
|
||||
width,
|
||||
initial_indent=initial_indent,
|
||||
subsequent_indent=subsequent_indent,
|
||||
replace_whitespace=False,
|
||||
)
|
||||
if not preserve_paragraphs:
|
||||
return wrapper.fill(text)
|
||||
|
||||
p: list[tuple[int, bool, str]] = []
|
||||
buf: list[str] = []
|
||||
indent = None
|
||||
|
||||
def _flush_par() -> None:
|
||||
if not buf:
|
||||
return
|
||||
if buf[0].strip() == "\b":
|
||||
p.append((indent or 0, True, "\n".join(buf[1:])))
|
||||
else:
|
||||
p.append((indent or 0, False, " ".join(buf)))
|
||||
del buf[:]
|
||||
|
||||
for line in text.splitlines():
|
||||
if not line:
|
||||
_flush_par()
|
||||
indent = None
|
||||
else:
|
||||
if indent is None:
|
||||
orig_len = term_len(line)
|
||||
line = line.lstrip()
|
||||
indent = orig_len - term_len(line)
|
||||
buf.append(line)
|
||||
_flush_par()
|
||||
|
||||
rv = []
|
||||
for indent, raw, text in p:
|
||||
with wrapper.extra_indent(" " * indent):
|
||||
if raw:
|
||||
rv.append(wrapper.indent_only(text))
|
||||
else:
|
||||
rv.append(wrapper.fill(text))
|
||||
|
||||
return "\n\n".join(rv)
|
||||
|
||||
|
||||
class HelpFormatter:
|
||||
"""This class helps with formatting text-based help pages. It's
|
||||
usually just needed for very special internal cases, but it's also
|
||||
exposed so that developers can write their own fancy outputs.
|
||||
|
||||
At present, it always writes into memory.
|
||||
|
||||
:param indent_increment: the additional increment for each level.
|
||||
:param width: the width for the text. This defaults to the terminal
|
||||
width clamped to a maximum of 78.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
indent_increment: int = 2,
|
||||
width: int | None = None,
|
||||
max_width: int | None = None,
|
||||
) -> None:
|
||||
self.indent_increment = indent_increment
|
||||
if max_width is None:
|
||||
max_width = 80
|
||||
if width is None:
|
||||
import shutil
|
||||
|
||||
width = FORCED_WIDTH
|
||||
if width is None:
|
||||
width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50)
|
||||
self.width = width
|
||||
self.current_indent: int = 0
|
||||
self.buffer: list[str] = []
|
||||
|
||||
def write(self, string: str) -> None:
|
||||
"""Writes a unicode string into the internal buffer."""
|
||||
self.buffer.append(string)
|
||||
|
||||
def indent(self) -> None:
|
||||
"""Increases the indentation."""
|
||||
self.current_indent += self.indent_increment
|
||||
|
||||
def dedent(self) -> None:
|
||||
"""Decreases the indentation."""
|
||||
self.current_indent -= self.indent_increment
|
||||
|
||||
def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None:
|
||||
"""Writes a usage line into the buffer.
|
||||
|
||||
:param prog: the program name.
|
||||
:param args: whitespace separated list of arguments.
|
||||
:param prefix: The prefix for the first line. Defaults to
|
||||
``"Usage: "``.
|
||||
"""
|
||||
if prefix is None:
|
||||
prefix = f"{_('Usage:')} "
|
||||
|
||||
usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
|
||||
text_width = self.width - self.current_indent
|
||||
|
||||
if text_width >= (term_len(usage_prefix) + 20):
|
||||
# The arguments will fit to the right of the prefix.
|
||||
indent = " " * term_len(usage_prefix)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args,
|
||||
text_width,
|
||||
initial_indent=usage_prefix,
|
||||
subsequent_indent=indent,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# The prefix is too long, put the arguments on the next line.
|
||||
self.write(usage_prefix)
|
||||
self.write("\n")
|
||||
indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
|
||||
self.write(
|
||||
wrap_text(
|
||||
args, text_width, initial_indent=indent, subsequent_indent=indent
|
||||
)
|
||||
)
|
||||
|
||||
self.write("\n")
|
||||
|
||||
def write_heading(self, heading: str) -> None:
|
||||
"""Writes a heading into the buffer."""
|
||||
self.write(f"{'':>{self.current_indent}}{heading}:\n")
|
||||
|
||||
def write_paragraph(self) -> None:
|
||||
"""Writes a paragraph into the buffer."""
|
||||
if self.buffer:
|
||||
self.write("\n")
|
||||
|
||||
def write_text(self, text: str) -> None:
|
||||
"""Writes re-indented text into the buffer. This rewraps and
|
||||
preserves paragraphs.
|
||||
"""
|
||||
indent = " " * self.current_indent
|
||||
self.write(
|
||||
wrap_text(
|
||||
text,
|
||||
self.width,
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
preserve_paragraphs=True,
|
||||
)
|
||||
)
|
||||
self.write("\n")
|
||||
|
||||
def write_dl(
|
||||
self,
|
||||
rows: cabc.Sequence[tuple[str, str]],
|
||||
col_max: int = 30,
|
||||
col_spacing: int = 2,
|
||||
) -> None:
|
||||
"""Writes a definition list into the buffer. This is how options
|
||||
and commands are usually formatted.
|
||||
|
||||
:param rows: a list of two item tuples for the terms and values.
|
||||
:param col_max: the maximum width of the first column.
|
||||
:param col_spacing: the number of spaces between the first and
|
||||
second column.
|
||||
"""
|
||||
rows = list(rows)
|
||||
widths = measure_table(rows)
|
||||
if len(widths) != 2:
|
||||
raise TypeError("Expected two columns for definition list")
|
||||
|
||||
first_col = min(widths[0], col_max) + col_spacing
|
||||
|
||||
for first, second in iter_rows(rows, len(widths)):
|
||||
self.write(f"{'':>{self.current_indent}}{first}")
|
||||
if not second:
|
||||
self.write("\n")
|
||||
continue
|
||||
if term_len(first) <= first_col - col_spacing:
|
||||
self.write(" " * (first_col - term_len(first)))
|
||||
else:
|
||||
self.write("\n")
|
||||
self.write(" " * (first_col + self.current_indent))
|
||||
|
||||
text_width = max(self.width - first_col - 2, 10)
|
||||
wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
|
||||
lines = wrapped_text.splitlines()
|
||||
|
||||
if lines:
|
||||
self.write(f"{lines[0]}\n")
|
||||
|
||||
for line in lines[1:]:
|
||||
self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
|
||||
else:
|
||||
self.write("\n")
|
||||
|
||||
@contextmanager
|
||||
def section(self, name: str) -> cabc.Iterator[None]:
|
||||
"""Helpful context manager that writes a paragraph, a heading,
|
||||
and the indents.
|
||||
|
||||
:param name: the section name that is written as heading.
|
||||
"""
|
||||
self.write_paragraph()
|
||||
self.write_heading(name)
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
@contextmanager
|
||||
def indentation(self) -> cabc.Iterator[None]:
|
||||
"""A context manager that increases the indentation."""
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
def getvalue(self) -> str:
|
||||
"""Returns the buffer contents."""
|
||||
return "".join(self.buffer)
|
||||
|
||||
|
||||
def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]:
|
||||
"""Given a list of option strings this joins them in the most appropriate
|
||||
way and returns them in the form ``(formatted_string,
|
||||
any_prefix_is_slash)`` where the second item in the tuple is a flag that
|
||||
indicates if any of the option prefixes was a slash.
|
||||
"""
|
||||
rv = []
|
||||
any_prefix_is_slash = False
|
||||
|
||||
for opt in options:
|
||||
prefix = _split_opt(opt)[0]
|
||||
|
||||
if prefix == "/":
|
||||
any_prefix_is_slash = True
|
||||
|
||||
rv.append((len(prefix), opt))
|
||||
|
||||
rv.sort(key=lambda x: x[0])
|
||||
return ", ".join(x[1] for x in rv), any_prefix_is_slash
|
||||
@ -0,0 +1,67 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from threading import local
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .core import Context
|
||||
|
||||
_local = local()
|
||||
|
||||
|
||||
@t.overload
|
||||
def get_current_context(silent: t.Literal[False] = False) -> Context: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def get_current_context(silent: bool = ...) -> Context | None: ...
|
||||
|
||||
|
||||
def get_current_context(silent: bool = False) -> Context | None:
|
||||
"""Returns the current click context. This can be used as a way to
|
||||
access the current context object from anywhere. This is a more implicit
|
||||
alternative to the :func:`pass_context` decorator. This function is
|
||||
primarily useful for helpers such as :func:`echo` which might be
|
||||
interested in changing its behavior based on the current context.
|
||||
|
||||
To push the current context, :meth:`Context.scope` can be used.
|
||||
|
||||
.. versionadded:: 5.0
|
||||
|
||||
:param silent: if set to `True` the return value is `None` if no context
|
||||
is available. The default behavior is to raise a
|
||||
:exc:`RuntimeError`.
|
||||
"""
|
||||
try:
|
||||
return t.cast("Context", _local.stack[-1])
|
||||
except (AttributeError, IndexError) as e:
|
||||
if not silent:
|
||||
raise RuntimeError("There is no active click context.") from e
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def push_context(ctx: Context) -> None:
|
||||
"""Pushes a new context to the current stack."""
|
||||
_local.__dict__.setdefault("stack", []).append(ctx)
|
||||
|
||||
|
||||
def pop_context() -> None:
|
||||
"""Removes the top level from the stack."""
|
||||
_local.stack.pop()
|
||||
|
||||
|
||||
def resolve_color_default(color: bool | None = None) -> bool | None:
|
||||
"""Internal helper to get the default value of the color flag. If a
|
||||
value is passed it's returned unchanged, otherwise it's looked up from
|
||||
the current context.
|
||||
"""
|
||||
if color is not None:
|
||||
return color
|
||||
|
||||
ctx = get_current_context(silent=True)
|
||||
|
||||
if ctx is not None:
|
||||
return ctx.color
|
||||
|
||||
return None
|
||||
@ -0,0 +1,532 @@
|
||||
"""
|
||||
This module started out as largely a copy paste from the stdlib's
|
||||
optparse module with the features removed that we do not need from
|
||||
optparse because we implement them in Click on a higher level (for
|
||||
instance type handling, help formatting and a lot more).
|
||||
|
||||
The plan is to remove more and more from here over time.
|
||||
|
||||
The reason this is a different module and not optparse from the stdlib
|
||||
is that there are differences in 2.x and 3.x about the error messages
|
||||
generated and optparse in the stdlib uses gettext for no good reason
|
||||
and might cause us issues.
|
||||
|
||||
Click uses parts of optparse written by Gregory P. Ward and maintained
|
||||
by the Python Software Foundation. This is limited to code in parser.py.
|
||||
|
||||
Copyright 2001-2006 Gregory P. Ward. All rights reserved.
|
||||
Copyright 2002-2006 Python Software Foundation. All rights reserved.
|
||||
"""
|
||||
|
||||
# This code uses parts of optparse written by Gregory P. Ward and
|
||||
# maintained by the Python Software Foundation.
|
||||
# Copyright 2001-2006 Gregory P. Ward
|
||||
# Copyright 2002-2006 Python Software Foundation
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import typing as t
|
||||
from collections import deque
|
||||
from gettext import gettext as _
|
||||
from gettext import ngettext
|
||||
|
||||
from ._utils import FLAG_NEEDS_VALUE
|
||||
from ._utils import UNSET
|
||||
from .exceptions import BadArgumentUsage
|
||||
from .exceptions import BadOptionUsage
|
||||
from .exceptions import NoSuchOption
|
||||
from .exceptions import UsageError
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ._utils import T_FLAG_NEEDS_VALUE
|
||||
from ._utils import T_UNSET
|
||||
from .core import Argument as CoreArgument
|
||||
from .core import Context
|
||||
from .core import Option as CoreOption
|
||||
from .core import Parameter as CoreParameter
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
|
||||
def _unpack_args(
|
||||
args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int]
|
||||
) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]:
|
||||
"""Given an iterable of arguments and an iterable of nargs specifications,
|
||||
it returns a tuple with all the unpacked arguments at the first index
|
||||
and all remaining arguments as the second.
|
||||
|
||||
The nargs specification is the number of arguments that should be consumed
|
||||
or `-1` to indicate that this position should eat up all the remainders.
|
||||
|
||||
Missing items are filled with ``UNSET``.
|
||||
"""
|
||||
args = deque(args)
|
||||
nargs_spec = deque(nargs_spec)
|
||||
rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = []
|
||||
spos: int | None = None
|
||||
|
||||
def _fetch(c: deque[V]) -> V | T_UNSET:
|
||||
try:
|
||||
if spos is None:
|
||||
return c.popleft()
|
||||
else:
|
||||
return c.pop()
|
||||
except IndexError:
|
||||
return UNSET
|
||||
|
||||
while nargs_spec:
|
||||
nargs = _fetch(nargs_spec)
|
||||
|
||||
if nargs is None:
|
||||
continue
|
||||
|
||||
if nargs == 1:
|
||||
rv.append(_fetch(args)) # type: ignore[arg-type]
|
||||
elif nargs > 1:
|
||||
x = [_fetch(args) for _ in range(nargs)]
|
||||
|
||||
# If we're reversed, we're pulling in the arguments in reverse,
|
||||
# so we need to turn them around.
|
||||
if spos is not None:
|
||||
x.reverse()
|
||||
|
||||
rv.append(tuple(x))
|
||||
elif nargs < 0:
|
||||
if spos is not None:
|
||||
raise TypeError("Cannot have two nargs < 0")
|
||||
|
||||
spos = len(rv)
|
||||
rv.append(UNSET)
|
||||
|
||||
# spos is the position of the wildcard (star). If it's not `None`,
|
||||
# we fill it with the remainder.
|
||||
if spos is not None:
|
||||
rv[spos] = tuple(args)
|
||||
args = []
|
||||
rv[spos + 1 :] = reversed(rv[spos + 1 :])
|
||||
|
||||
return tuple(rv), list(args)
|
||||
|
||||
|
||||
def _split_opt(opt: str) -> tuple[str, str]:
|
||||
first = opt[:1]
|
||||
if first.isalnum():
|
||||
return "", opt
|
||||
if opt[1:2] == first:
|
||||
return opt[:2], opt[2:]
|
||||
return first, opt[1:]
|
||||
|
||||
|
||||
def _normalize_opt(opt: str, ctx: Context | None) -> str:
|
||||
if ctx is None or ctx.token_normalize_func is None:
|
||||
return opt
|
||||
prefix, opt = _split_opt(opt)
|
||||
return f"{prefix}{ctx.token_normalize_func(opt)}"
|
||||
|
||||
|
||||
class _Option:
|
||||
def __init__(
|
||||
self,
|
||||
obj: CoreOption,
|
||||
opts: cabc.Sequence[str],
|
||||
dest: str | None,
|
||||
action: str | None = None,
|
||||
nargs: int = 1,
|
||||
const: t.Any | None = None,
|
||||
):
|
||||
self._short_opts = []
|
||||
self._long_opts = []
|
||||
self.prefixes: set[str] = set()
|
||||
|
||||
for opt in opts:
|
||||
prefix, value = _split_opt(opt)
|
||||
if not prefix:
|
||||
raise ValueError(f"Invalid start character for option ({opt})")
|
||||
self.prefixes.add(prefix[0])
|
||||
if len(prefix) == 1 and len(value) == 1:
|
||||
self._short_opts.append(opt)
|
||||
else:
|
||||
self._long_opts.append(opt)
|
||||
self.prefixes.add(prefix)
|
||||
|
||||
if action is None:
|
||||
action = "store"
|
||||
|
||||
self.dest = dest
|
||||
self.action = action
|
||||
self.nargs = nargs
|
||||
self.const = const
|
||||
self.obj = obj
|
||||
|
||||
@property
|
||||
def takes_value(self) -> bool:
|
||||
return self.action in ("store", "append")
|
||||
|
||||
def process(self, value: t.Any, state: _ParsingState) -> None:
|
||||
if self.action == "store":
|
||||
state.opts[self.dest] = value # type: ignore
|
||||
elif self.action == "store_const":
|
||||
state.opts[self.dest] = self.const # type: ignore
|
||||
elif self.action == "append":
|
||||
state.opts.setdefault(self.dest, []).append(value) # type: ignore
|
||||
elif self.action == "append_const":
|
||||
state.opts.setdefault(self.dest, []).append(self.const) # type: ignore
|
||||
elif self.action == "count":
|
||||
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore
|
||||
else:
|
||||
raise ValueError(f"unknown action '{self.action}'")
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class _Argument:
|
||||
def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1):
|
||||
self.dest = dest
|
||||
self.nargs = nargs
|
||||
self.obj = obj
|
||||
|
||||
def process(
|
||||
self,
|
||||
value: str | cabc.Sequence[str | None] | None | T_UNSET,
|
||||
state: _ParsingState,
|
||||
) -> None:
|
||||
if self.nargs > 1:
|
||||
assert isinstance(value, cabc.Sequence)
|
||||
holes = sum(1 for x in value if x is UNSET)
|
||||
if holes == len(value):
|
||||
value = UNSET
|
||||
elif holes != 0:
|
||||
raise BadArgumentUsage(
|
||||
_("Argument {name!r} takes {nargs} values.").format(
|
||||
name=self.dest, nargs=self.nargs
|
||||
)
|
||||
)
|
||||
|
||||
# We failed to collect any argument value so we consider the argument as unset.
|
||||
if value == ():
|
||||
value = UNSET
|
||||
|
||||
state.opts[self.dest] = value # type: ignore
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class _ParsingState:
|
||||
def __init__(self, rargs: list[str]) -> None:
|
||||
self.opts: dict[str, t.Any] = {}
|
||||
self.largs: list[str] = []
|
||||
self.rargs = rargs
|
||||
self.order: list[CoreParameter] = []
|
||||
|
||||
|
||||
class _OptionParser:
|
||||
"""The option parser is an internal class that is ultimately used to
|
||||
parse options and arguments. It's modelled after optparse and brings
|
||||
a similar but vastly simplified API. It should generally not be used
|
||||
directly as the high level Click classes wrap it for you.
|
||||
|
||||
It's not nearly as extensible as optparse or argparse as it does not
|
||||
implement features that are implemented on a higher level (such as
|
||||
types or defaults).
|
||||
|
||||
:param ctx: optionally the :class:`~click.Context` where this parser
|
||||
should go with.
|
||||
|
||||
.. deprecated:: 8.2
|
||||
Will be removed in Click 9.0.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx: Context | None = None) -> None:
|
||||
#: The :class:`~click.Context` for this parser. This might be
|
||||
#: `None` for some advanced use cases.
|
||||
self.ctx = ctx
|
||||
#: This controls how the parser deals with interspersed arguments.
|
||||
#: If this is set to `False`, the parser will stop on the first
|
||||
#: non-option. Click uses this to implement nested subcommands
|
||||
#: safely.
|
||||
self.allow_interspersed_args: bool = True
|
||||
#: This tells the parser how to deal with unknown options. By
|
||||
#: default it will error out (which is sensible), but there is a
|
||||
#: second mode where it will ignore it and continue processing
|
||||
#: after shifting all the unknown options into the resulting args.
|
||||
self.ignore_unknown_options: bool = False
|
||||
|
||||
if ctx is not None:
|
||||
self.allow_interspersed_args = ctx.allow_interspersed_args
|
||||
self.ignore_unknown_options = ctx.ignore_unknown_options
|
||||
|
||||
self._short_opt: dict[str, _Option] = {}
|
||||
self._long_opt: dict[str, _Option] = {}
|
||||
self._opt_prefixes = {"-", "--"}
|
||||
self._args: list[_Argument] = []
|
||||
|
||||
def add_option(
|
||||
self,
|
||||
obj: CoreOption,
|
||||
opts: cabc.Sequence[str],
|
||||
dest: str | None,
|
||||
action: str | None = None,
|
||||
nargs: int = 1,
|
||||
const: t.Any | None = None,
|
||||
) -> None:
|
||||
"""Adds a new option named `dest` to the parser. The destination
|
||||
is not inferred (unlike with optparse) and needs to be explicitly
|
||||
provided. Action can be any of ``store``, ``store_const``,
|
||||
``append``, ``append_const`` or ``count``.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
opts = [_normalize_opt(opt, self.ctx) for opt in opts]
|
||||
option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const)
|
||||
self._opt_prefixes.update(option.prefixes)
|
||||
for opt in option._short_opts:
|
||||
self._short_opt[opt] = option
|
||||
for opt in option._long_opts:
|
||||
self._long_opt[opt] = option
|
||||
|
||||
def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None:
|
||||
"""Adds a positional argument named `dest` to the parser.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
self._args.append(_Argument(obj, dest=dest, nargs=nargs))
|
||||
|
||||
def parse_args(
|
||||
self, args: list[str]
|
||||
) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]:
|
||||
"""Parses positional arguments and returns ``(values, args, order)``
|
||||
for the parsed options and arguments as well as the leftover
|
||||
arguments if there are any. The order is a list of objects as they
|
||||
appear on the command line. If arguments appear multiple times they
|
||||
will be memorized multiple times as well.
|
||||
"""
|
||||
state = _ParsingState(args)
|
||||
try:
|
||||
self._process_args_for_options(state)
|
||||
self._process_args_for_args(state)
|
||||
except UsageError:
|
||||
if self.ctx is None or not self.ctx.resilient_parsing:
|
||||
raise
|
||||
return state.opts, state.largs, state.order
|
||||
|
||||
def _process_args_for_args(self, state: _ParsingState) -> None:
|
||||
pargs, args = _unpack_args(
|
||||
state.largs + state.rargs, [x.nargs for x in self._args]
|
||||
)
|
||||
|
||||
for idx, arg in enumerate(self._args):
|
||||
arg.process(pargs[idx], state)
|
||||
|
||||
state.largs = args
|
||||
state.rargs = []
|
||||
|
||||
def _process_args_for_options(self, state: _ParsingState) -> None:
|
||||
while state.rargs:
|
||||
arg = state.rargs.pop(0)
|
||||
arglen = len(arg)
|
||||
# Double dashes always handled explicitly regardless of what
|
||||
# prefixes are valid.
|
||||
if arg == "--":
|
||||
return
|
||||
elif arg[:1] in self._opt_prefixes and arglen > 1:
|
||||
self._process_opts(arg, state)
|
||||
elif self.allow_interspersed_args:
|
||||
state.largs.append(arg)
|
||||
else:
|
||||
state.rargs.insert(0, arg)
|
||||
return
|
||||
|
||||
# Say this is the original argument list:
|
||||
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
|
||||
# ^
|
||||
# (we are about to process arg(i)).
|
||||
#
|
||||
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
|
||||
# [arg0, ..., arg(i-1)] (any options and their arguments will have
|
||||
# been removed from largs).
|
||||
#
|
||||
# The while loop will usually consume 1 or more arguments per pass.
|
||||
# If it consumes 1 (eg. arg is an option that takes no arguments),
|
||||
# then after _process_arg() is done the situation is:
|
||||
#
|
||||
# largs = subset of [arg0, ..., arg(i)]
|
||||
# rargs = [arg(i+1), ..., arg(N-1)]
|
||||
#
|
||||
# If allow_interspersed_args is false, largs will always be
|
||||
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
|
||||
# not a very interesting subset!
|
||||
|
||||
def _match_long_opt(
|
||||
self, opt: str, explicit_value: str | None, state: _ParsingState
|
||||
) -> None:
|
||||
if opt not in self._long_opt:
|
||||
from difflib import get_close_matches
|
||||
|
||||
possibilities = get_close_matches(opt, self._long_opt)
|
||||
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
|
||||
|
||||
option = self._long_opt[opt]
|
||||
if option.takes_value:
|
||||
# At this point it's safe to modify rargs by injecting the
|
||||
# explicit value, because no exception is raised in this
|
||||
# branch. This means that the inserted value will be fully
|
||||
# consumed.
|
||||
if explicit_value is not None:
|
||||
state.rargs.insert(0, explicit_value)
|
||||
|
||||
value = self._get_value_from_state(opt, option, state)
|
||||
|
||||
elif explicit_value is not None:
|
||||
raise BadOptionUsage(
|
||||
opt, _("Option {name!r} does not take a value.").format(name=opt)
|
||||
)
|
||||
|
||||
else:
|
||||
value = UNSET
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
def _match_short_opt(self, arg: str, state: _ParsingState) -> None:
|
||||
stop = False
|
||||
i = 1
|
||||
prefix = arg[0]
|
||||
unknown_options = []
|
||||
|
||||
for ch in arg[1:]:
|
||||
opt = _normalize_opt(f"{prefix}{ch}", self.ctx)
|
||||
option = self._short_opt.get(opt)
|
||||
i += 1
|
||||
|
||||
if not option:
|
||||
if self.ignore_unknown_options:
|
||||
unknown_options.append(ch)
|
||||
continue
|
||||
raise NoSuchOption(opt, ctx=self.ctx)
|
||||
if option.takes_value:
|
||||
# Any characters left in arg? Pretend they're the
|
||||
# next arg, and stop consuming characters of arg.
|
||||
if i < len(arg):
|
||||
state.rargs.insert(0, arg[i:])
|
||||
stop = True
|
||||
|
||||
value = self._get_value_from_state(opt, option, state)
|
||||
|
||||
else:
|
||||
value = UNSET
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
if stop:
|
||||
break
|
||||
|
||||
# If we got any unknown options we recombine the string of the
|
||||
# remaining options and re-attach the prefix, then report that
|
||||
# to the state as new larg. This way there is basic combinatorics
|
||||
# that can be achieved while still ignoring unknown arguments.
|
||||
if self.ignore_unknown_options and unknown_options:
|
||||
state.largs.append(f"{prefix}{''.join(unknown_options)}")
|
||||
|
||||
def _get_value_from_state(
|
||||
self, option_name: str, option: _Option, state: _ParsingState
|
||||
) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE:
|
||||
nargs = option.nargs
|
||||
|
||||
value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE
|
||||
|
||||
if len(state.rargs) < nargs:
|
||||
if option.obj._flag_needs_value:
|
||||
# Option allows omitting the value.
|
||||
value = FLAG_NEEDS_VALUE
|
||||
else:
|
||||
raise BadOptionUsage(
|
||||
option_name,
|
||||
ngettext(
|
||||
"Option {name!r} requires an argument.",
|
||||
"Option {name!r} requires {nargs} arguments.",
|
||||
nargs,
|
||||
).format(name=option_name, nargs=nargs),
|
||||
)
|
||||
elif nargs == 1:
|
||||
next_rarg = state.rargs[0]
|
||||
|
||||
if (
|
||||
option.obj._flag_needs_value
|
||||
and isinstance(next_rarg, str)
|
||||
and next_rarg[:1] in self._opt_prefixes
|
||||
and len(next_rarg) > 1
|
||||
):
|
||||
# The next arg looks like the start of an option, don't
|
||||
# use it as the value if omitting the value is allowed.
|
||||
value = FLAG_NEEDS_VALUE
|
||||
else:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
return value
|
||||
|
||||
def _process_opts(self, arg: str, state: _ParsingState) -> None:
|
||||
explicit_value = None
|
||||
# Long option handling happens in two parts. The first part is
|
||||
# supporting explicitly attached values. In any case, we will try
|
||||
# to long match the option first.
|
||||
if "=" in arg:
|
||||
long_opt, explicit_value = arg.split("=", 1)
|
||||
else:
|
||||
long_opt = arg
|
||||
norm_long_opt = _normalize_opt(long_opt, self.ctx)
|
||||
|
||||
# At this point we will match the (assumed) long option through
|
||||
# the long option matching code. Note that this allows options
|
||||
# like "-foo" to be matched as long options.
|
||||
try:
|
||||
self._match_long_opt(norm_long_opt, explicit_value, state)
|
||||
except NoSuchOption:
|
||||
# At this point the long option matching failed, and we need
|
||||
# to try with short options. However there is a special rule
|
||||
# which says, that if we have a two character options prefix
|
||||
# (applies to "--foo" for instance), we do not dispatch to the
|
||||
# short option code and will instead raise the no option
|
||||
# error.
|
||||
if arg[:2] not in self._opt_prefixes:
|
||||
self._match_short_opt(arg, state)
|
||||
return
|
||||
|
||||
if not self.ignore_unknown_options:
|
||||
raise
|
||||
|
||||
state.largs.append(arg)
|
||||
|
||||
|
||||
def __getattr__(name: str) -> object:
|
||||
import warnings
|
||||
|
||||
if name in {
|
||||
"OptionParser",
|
||||
"Argument",
|
||||
"Option",
|
||||
"split_opt",
|
||||
"normalize_opt",
|
||||
"ParsingState",
|
||||
}:
|
||||
warnings.warn(
|
||||
f"'parser.{name}' is deprecated and will be removed in Click 9.0."
|
||||
" The old parser is available in 'optparse'.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return globals()[f"_{name}"]
|
||||
|
||||
if name == "split_arg_string":
|
||||
from .shell_completion import split_arg_string
|
||||
|
||||
warnings.warn(
|
||||
"Importing 'parser.split_arg_string' is deprecated, it will only be"
|
||||
" available in 'shell_completion' in Click 9.0.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return split_arg_string
|
||||
|
||||
raise AttributeError(name)
|
||||
@ -0,0 +1,667 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
from gettext import gettext as _
|
||||
|
||||
from .core import Argument
|
||||
from .core import Command
|
||||
from .core import Context
|
||||
from .core import Group
|
||||
from .core import Option
|
||||
from .core import Parameter
|
||||
from .core import ParameterSource
|
||||
from .utils import echo
|
||||
|
||||
|
||||
def shell_complete(
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
complete_var: str,
|
||||
instruction: str,
|
||||
) -> int:
|
||||
"""Perform shell completion for the given CLI program.
|
||||
|
||||
:param cli: Command being called.
|
||||
:param ctx_args: Extra arguments to pass to
|
||||
``cli.make_context``.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param complete_var: Name of the environment variable that holds
|
||||
the completion instruction.
|
||||
:param instruction: Value of ``complete_var`` with the completion
|
||||
instruction and shell, in the form ``instruction_shell``.
|
||||
:return: Status code to exit with.
|
||||
"""
|
||||
shell, _, instruction = instruction.partition("_")
|
||||
comp_cls = get_completion_class(shell)
|
||||
|
||||
if comp_cls is None:
|
||||
return 1
|
||||
|
||||
comp = comp_cls(cli, ctx_args, prog_name, complete_var)
|
||||
|
||||
if instruction == "source":
|
||||
echo(comp.source())
|
||||
return 0
|
||||
|
||||
if instruction == "complete":
|
||||
echo(comp.complete())
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
class CompletionItem:
|
||||
"""Represents a completion value and metadata about the value. The
|
||||
default metadata is ``type`` to indicate special shell handling,
|
||||
and ``help`` if a shell supports showing a help string next to the
|
||||
value.
|
||||
|
||||
Arbitrary parameters can be passed when creating the object, and
|
||||
accessed using ``item.attr``. If an attribute wasn't passed,
|
||||
accessing it returns ``None``.
|
||||
|
||||
:param value: The completion suggestion.
|
||||
:param type: Tells the shell script to provide special completion
|
||||
support for the type. Click uses ``"dir"`` and ``"file"``.
|
||||
:param help: String shown next to the value if supported.
|
||||
:param kwargs: Arbitrary metadata. The built-in implementations
|
||||
don't use this, but custom type completions paired with custom
|
||||
shell support could use it.
|
||||
"""
|
||||
|
||||
__slots__ = ("value", "type", "help", "_info")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: t.Any,
|
||||
type: str = "plain",
|
||||
help: str | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
self.value: t.Any = value
|
||||
self.type: str = type
|
||||
self.help: str | None = help
|
||||
self._info = kwargs
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return self._info.get(name)
|
||||
|
||||
|
||||
# Only Bash >= 4.4 has the nosort option.
|
||||
_SOURCE_BASH = """\
|
||||
%(complete_func)s() {
|
||||
local IFS=$'\\n'
|
||||
local response
|
||||
|
||||
response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \
|
||||
%(complete_var)s=bash_complete $1)
|
||||
|
||||
for completion in $response; do
|
||||
IFS=',' read type value <<< "$completion"
|
||||
|
||||
if [[ $type == 'dir' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o dirnames
|
||||
elif [[ $type == 'file' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o default
|
||||
elif [[ $type == 'plain' ]]; then
|
||||
COMPREPLY+=($value)
|
||||
fi
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
%(complete_func)s_setup() {
|
||||
complete -o nosort -F %(complete_func)s %(prog_name)s
|
||||
}
|
||||
|
||||
%(complete_func)s_setup;
|
||||
"""
|
||||
|
||||
# See ZshComplete.format_completion below, and issue #2703, before
|
||||
# changing this script.
|
||||
#
|
||||
# (TL;DR: _describe is picky about the format, but this Zsh script snippet
|
||||
# is already widely deployed. So freeze this script, and use clever-ish
|
||||
# handling of colons in ZshComplet.format_completion.)
|
||||
_SOURCE_ZSH = """\
|
||||
#compdef %(prog_name)s
|
||||
|
||||
%(complete_func)s() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
(( ! $+commands[%(prog_name)s] )) && return 1
|
||||
|
||||
response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \
|
||||
%(complete_var)s=zsh_complete %(prog_name)s)}")
|
||||
|
||||
for type key descr in ${response}; do
|
||||
if [[ "$type" == "plain" ]]; then
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
elif [[ "$type" == "dir" ]]; then
|
||||
_path_files -/
|
||||
elif [[ "$type" == "file" ]]; then
|
||||
_path_files -f
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -a completions
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
%(complete_func)s "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef %(complete_func)s %(prog_name)s
|
||||
fi
|
||||
"""
|
||||
|
||||
_SOURCE_FISH = """\
|
||||
function %(complete_func)s;
|
||||
set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \
|
||||
COMP_CWORD=(commandline -t) %(prog_name)s);
|
||||
|
||||
for completion in $response;
|
||||
set -l metadata (string split "," $completion);
|
||||
|
||||
if test $metadata[1] = "dir";
|
||||
__fish_complete_directories $metadata[2];
|
||||
else if test $metadata[1] = "file";
|
||||
__fish_complete_path $metadata[2];
|
||||
else if test $metadata[1] = "plain";
|
||||
echo $metadata[2];
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
complete --no-files --command %(prog_name)s --arguments \
|
||||
"(%(complete_func)s)";
|
||||
"""
|
||||
|
||||
|
||||
class ShellComplete:
|
||||
"""Base class for providing shell completion support. A subclass for
|
||||
a given shell will override attributes and methods to implement the
|
||||
completion instructions (``source`` and ``complete``).
|
||||
|
||||
:param cli: Command being called.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param complete_var: Name of the environment variable that holds
|
||||
the completion instruction.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
"""
|
||||
|
||||
name: t.ClassVar[str]
|
||||
"""Name to register the shell as with :func:`add_completion_class`.
|
||||
This is used in completion instructions (``{name}_source`` and
|
||||
``{name}_complete``).
|
||||
"""
|
||||
|
||||
source_template: t.ClassVar[str]
|
||||
"""Completion script template formatted by :meth:`source`. This must
|
||||
be provided by subclasses.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
complete_var: str,
|
||||
) -> None:
|
||||
self.cli = cli
|
||||
self.ctx_args = ctx_args
|
||||
self.prog_name = prog_name
|
||||
self.complete_var = complete_var
|
||||
|
||||
@property
|
||||
def func_name(self) -> str:
|
||||
"""The name of the shell function defined by the completion
|
||||
script.
|
||||
"""
|
||||
safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII)
|
||||
return f"_{safe_name}_completion"
|
||||
|
||||
def source_vars(self) -> dict[str, t.Any]:
|
||||
"""Vars for formatting :attr:`source_template`.
|
||||
|
||||
By default this provides ``complete_func``, ``complete_var``,
|
||||
and ``prog_name``.
|
||||
"""
|
||||
return {
|
||||
"complete_func": self.func_name,
|
||||
"complete_var": self.complete_var,
|
||||
"prog_name": self.prog_name,
|
||||
}
|
||||
|
||||
def source(self) -> str:
|
||||
"""Produce the shell script that defines the completion
|
||||
function. By default this ``%``-style formats
|
||||
:attr:`source_template` with the dict returned by
|
||||
:meth:`source_vars`.
|
||||
"""
|
||||
return self.source_template % self.source_vars()
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
"""Use the env vars defined by the shell script to return a
|
||||
tuple of ``args, incomplete``. This must be implemented by
|
||||
subclasses.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]:
|
||||
"""Determine the context and last complete command or parameter
|
||||
from the complete args. Call that object's ``shell_complete``
|
||||
method to get the completions for the incomplete value.
|
||||
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param incomplete: Value being completed. May be empty.
|
||||
"""
|
||||
ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args)
|
||||
obj, incomplete = _resolve_incomplete(ctx, args, incomplete)
|
||||
return obj.shell_complete(ctx, incomplete)
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
"""Format a completion item into the form recognized by the
|
||||
shell script. This must be implemented by subclasses.
|
||||
|
||||
:param item: Completion item to format.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def complete(self) -> str:
|
||||
"""Produce the completion data to send back to the shell.
|
||||
|
||||
By default this calls :meth:`get_completion_args`, gets the
|
||||
completions, then calls :meth:`format_completion` for each
|
||||
completion.
|
||||
"""
|
||||
args, incomplete = self.get_completion_args()
|
||||
completions = self.get_completions(args, incomplete)
|
||||
out = [self.format_completion(item) for item in completions]
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
class BashComplete(ShellComplete):
|
||||
"""Shell completion for Bash."""
|
||||
|
||||
name = "bash"
|
||||
source_template = _SOURCE_BASH
|
||||
|
||||
@staticmethod
|
||||
def _check_version() -> None:
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
bash_exe = shutil.which("bash")
|
||||
|
||||
if bash_exe is None:
|
||||
match = None
|
||||
else:
|
||||
output = subprocess.run(
|
||||
[bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'],
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
|
||||
|
||||
if match is not None:
|
||||
major, minor = match.groups()
|
||||
|
||||
if major < "4" or major == "4" and minor < "4":
|
||||
echo(
|
||||
_(
|
||||
"Shell completion is not supported for Bash"
|
||||
" versions older than 4.4."
|
||||
),
|
||||
err=True,
|
||||
)
|
||||
else:
|
||||
echo(
|
||||
_("Couldn't detect Bash version, shell completion is not supported."),
|
||||
err=True,
|
||||
)
|
||||
|
||||
def source(self) -> str:
|
||||
self._check_version()
|
||||
return super().source()
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
args = cwords[1:cword]
|
||||
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ""
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
return f"{item.type},{item.value}"
|
||||
|
||||
|
||||
class ZshComplete(ShellComplete):
|
||||
"""Shell completion for Zsh."""
|
||||
|
||||
name = "zsh"
|
||||
source_template = _SOURCE_ZSH
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
cword = int(os.environ["COMP_CWORD"])
|
||||
args = cwords[1:cword]
|
||||
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ""
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
help_ = item.help or "_"
|
||||
# The zsh completion script uses `_describe` on items with help
|
||||
# texts (which splits the item help from the item value at the
|
||||
# first unescaped colon) and `compadd` on items without help
|
||||
# text (which uses the item value as-is and does not support
|
||||
# colon escaping). So escape colons in the item value if and
|
||||
# only if the item help is not the sentinel "_" value, as used
|
||||
# by the completion script.
|
||||
#
|
||||
# (The zsh completion script is potentially widely deployed, and
|
||||
# thus harder to fix than this method.)
|
||||
#
|
||||
# See issue #1812 and issue #2703 for further context.
|
||||
value = item.value.replace(":", r"\:") if help_ != "_" else item.value
|
||||
return f"{item.type}\n{value}\n{help_}"
|
||||
|
||||
|
||||
class FishComplete(ShellComplete):
|
||||
"""Shell completion for Fish."""
|
||||
|
||||
name = "fish"
|
||||
source_template = _SOURCE_FISH
|
||||
|
||||
def get_completion_args(self) -> tuple[list[str], str]:
|
||||
cwords = split_arg_string(os.environ["COMP_WORDS"])
|
||||
incomplete = os.environ["COMP_CWORD"]
|
||||
if incomplete:
|
||||
incomplete = split_arg_string(incomplete)[0]
|
||||
args = cwords[1:]
|
||||
|
||||
# Fish stores the partial word in both COMP_WORDS and
|
||||
# COMP_CWORD, remove it from complete args.
|
||||
if incomplete and args and args[-1] == incomplete:
|
||||
args.pop()
|
||||
|
||||
return args, incomplete
|
||||
|
||||
def format_completion(self, item: CompletionItem) -> str:
|
||||
if item.help:
|
||||
return f"{item.type},{item.value}\t{item.help}"
|
||||
|
||||
return f"{item.type},{item.value}"
|
||||
|
||||
|
||||
ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]")
|
||||
|
||||
|
||||
_available_shells: dict[str, type[ShellComplete]] = {
|
||||
"bash": BashComplete,
|
||||
"fish": FishComplete,
|
||||
"zsh": ZshComplete,
|
||||
}
|
||||
|
||||
|
||||
def add_completion_class(
|
||||
cls: ShellCompleteType, name: str | None = None
|
||||
) -> ShellCompleteType:
|
||||
"""Register a :class:`ShellComplete` subclass under the given name.
|
||||
The name will be provided by the completion instruction environment
|
||||
variable during completion.
|
||||
|
||||
:param cls: The completion class that will handle completion for the
|
||||
shell.
|
||||
:param name: Name to register the class under. Defaults to the
|
||||
class's ``name`` attribute.
|
||||
"""
|
||||
if name is None:
|
||||
name = cls.name
|
||||
|
||||
_available_shells[name] = cls
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
def get_completion_class(shell: str) -> type[ShellComplete] | None:
|
||||
"""Look up a registered :class:`ShellComplete` subclass by the name
|
||||
provided by the completion instruction environment variable. If the
|
||||
name isn't registered, returns ``None``.
|
||||
|
||||
:param shell: Name the class is registered under.
|
||||
"""
|
||||
return _available_shells.get(shell)
|
||||
|
||||
|
||||
def split_arg_string(string: str) -> list[str]:
|
||||
"""Split an argument string as with :func:`shlex.split`, but don't
|
||||
fail if the string is incomplete. Ignores a missing closing quote or
|
||||
incomplete escape sequence and uses the partial token as-is.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
split_arg_string("example 'my file")
|
||||
["example", "my file"]
|
||||
|
||||
split_arg_string("example my\\")
|
||||
["example", "my"]
|
||||
|
||||
:param string: String to split.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Moved to ``shell_completion`` from ``parser``.
|
||||
"""
|
||||
import shlex
|
||||
|
||||
lex = shlex.shlex(string, posix=True)
|
||||
lex.whitespace_split = True
|
||||
lex.commenters = ""
|
||||
out = []
|
||||
|
||||
try:
|
||||
for token in lex:
|
||||
out.append(token)
|
||||
except ValueError:
|
||||
# Raised when end-of-string is reached in an invalid state. Use
|
||||
# the partial token as-is. The quote or escape character is in
|
||||
# lex.state, not lex.token.
|
||||
out.append(lex.token)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool:
|
||||
"""Determine if the given parameter is an argument that can still
|
||||
accept values.
|
||||
|
||||
:param ctx: Invocation context for the command represented by the
|
||||
parsed complete args.
|
||||
:param param: Argument object being checked.
|
||||
"""
|
||||
if not isinstance(param, Argument):
|
||||
return False
|
||||
|
||||
assert param.name is not None
|
||||
# Will be None if expose_value is False.
|
||||
value = ctx.params.get(param.name)
|
||||
return (
|
||||
param.nargs == -1
|
||||
or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE
|
||||
or (
|
||||
param.nargs > 1
|
||||
and isinstance(value, (tuple, list))
|
||||
and len(value) < param.nargs
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _start_of_option(ctx: Context, value: str) -> bool:
|
||||
"""Check if the value looks like the start of an option."""
|
||||
if not value:
|
||||
return False
|
||||
|
||||
c = value[0]
|
||||
return c in ctx._opt_prefixes
|
||||
|
||||
|
||||
def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool:
|
||||
"""Determine if the given parameter is an option that needs a value.
|
||||
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param param: Option object being checked.
|
||||
"""
|
||||
if not isinstance(param, Option):
|
||||
return False
|
||||
|
||||
if param.is_flag or param.count:
|
||||
return False
|
||||
|
||||
last_option = None
|
||||
|
||||
for index, arg in enumerate(reversed(args)):
|
||||
if index + 1 > param.nargs:
|
||||
break
|
||||
|
||||
if _start_of_option(ctx, arg):
|
||||
last_option = arg
|
||||
break
|
||||
|
||||
return last_option is not None and last_option in param.opts
|
||||
|
||||
|
||||
def _resolve_context(
|
||||
cli: Command,
|
||||
ctx_args: cabc.MutableMapping[str, t.Any],
|
||||
prog_name: str,
|
||||
args: list[str],
|
||||
) -> Context:
|
||||
"""Produce the context hierarchy starting with the command and
|
||||
traversing the complete arguments. This only follows the commands,
|
||||
it doesn't trigger input prompts or callbacks.
|
||||
|
||||
:param cli: Command being called.
|
||||
:param prog_name: Name of the executable in the shell.
|
||||
:param args: List of complete args before the incomplete value.
|
||||
"""
|
||||
ctx_args["resilient_parsing"] = True
|
||||
with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx:
|
||||
args = ctx._protected_args + ctx.args
|
||||
|
||||
while args:
|
||||
command = ctx.command
|
||||
|
||||
if isinstance(command, Group):
|
||||
if not command.chain:
|
||||
name, cmd, args = command.resolve_command(ctx, args)
|
||||
|
||||
if cmd is None:
|
||||
return ctx
|
||||
|
||||
with cmd.make_context(
|
||||
name, args, parent=ctx, resilient_parsing=True
|
||||
) as sub_ctx:
|
||||
ctx = sub_ctx
|
||||
args = ctx._protected_args + ctx.args
|
||||
else:
|
||||
sub_ctx = ctx
|
||||
|
||||
while args:
|
||||
name, cmd, args = command.resolve_command(ctx, args)
|
||||
|
||||
if cmd is None:
|
||||
return ctx
|
||||
|
||||
with cmd.make_context(
|
||||
name,
|
||||
args,
|
||||
parent=ctx,
|
||||
allow_extra_args=True,
|
||||
allow_interspersed_args=False,
|
||||
resilient_parsing=True,
|
||||
) as sub_sub_ctx:
|
||||
sub_ctx = sub_sub_ctx
|
||||
args = sub_ctx.args
|
||||
|
||||
ctx = sub_ctx
|
||||
args = [*sub_ctx._protected_args, *sub_ctx.args]
|
||||
else:
|
||||
break
|
||||
|
||||
return ctx
|
||||
|
||||
|
||||
def _resolve_incomplete(
|
||||
ctx: Context, args: list[str], incomplete: str
|
||||
) -> tuple[Command | Parameter, str]:
|
||||
"""Find the Click object that will handle the completion of the
|
||||
incomplete value. Return the object and the incomplete value.
|
||||
|
||||
:param ctx: Invocation context for the command represented by
|
||||
the parsed complete args.
|
||||
:param args: List of complete args before the incomplete value.
|
||||
:param incomplete: Value being completed. May be empty.
|
||||
"""
|
||||
# Different shells treat an "=" between a long option name and
|
||||
# value differently. Might keep the value joined, return the "="
|
||||
# as a separate item, or return the split name and value. Always
|
||||
# split and discard the "=" to make completion easier.
|
||||
if incomplete == "=":
|
||||
incomplete = ""
|
||||
elif "=" in incomplete and _start_of_option(ctx, incomplete):
|
||||
name, _, incomplete = incomplete.partition("=")
|
||||
args.append(name)
|
||||
|
||||
# The "--" marker tells Click to stop treating values as options
|
||||
# even if they start with the option character. If it hasn't been
|
||||
# given and the incomplete arg looks like an option, the current
|
||||
# command will provide option name completions.
|
||||
if "--" not in args and _start_of_option(ctx, incomplete):
|
||||
return ctx.command, incomplete
|
||||
|
||||
params = ctx.command.get_params(ctx)
|
||||
|
||||
# If the last complete arg is an option name with an incomplete
|
||||
# value, the option will provide value completions.
|
||||
for param in params:
|
||||
if _is_incomplete_option(ctx, args, param):
|
||||
return param, incomplete
|
||||
|
||||
# It's not an option name or value. The first argument without a
|
||||
# parsed value will provide value completions.
|
||||
for param in params:
|
||||
if _is_incomplete_argument(ctx, param):
|
||||
return param, incomplete
|
||||
|
||||
# There were no unparsed arguments, the command may be a group that
|
||||
# will provide command name completions.
|
||||
return ctx.command, incomplete
|
||||
@ -0,0 +1,877 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import sys
|
||||
import typing as t
|
||||
from contextlib import AbstractContextManager
|
||||
from gettext import gettext as _
|
||||
|
||||
from ._compat import isatty
|
||||
from ._compat import strip_ansi
|
||||
from .exceptions import Abort
|
||||
from .exceptions import UsageError
|
||||
from .globals import resolve_color_default
|
||||
from .types import Choice
|
||||
from .types import convert_type
|
||||
from .types import ParamType
|
||||
from .utils import echo
|
||||
from .utils import LazyFile
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from ._termui_impl import ProgressBar
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
# The prompt functions to use. The doc tools currently override these
|
||||
# functions to customize how they work.
|
||||
visible_prompt_func: t.Callable[[str], str] = input
|
||||
|
||||
_ansi_colors = {
|
||||
"black": 30,
|
||||
"red": 31,
|
||||
"green": 32,
|
||||
"yellow": 33,
|
||||
"blue": 34,
|
||||
"magenta": 35,
|
||||
"cyan": 36,
|
||||
"white": 37,
|
||||
"reset": 39,
|
||||
"bright_black": 90,
|
||||
"bright_red": 91,
|
||||
"bright_green": 92,
|
||||
"bright_yellow": 93,
|
||||
"bright_blue": 94,
|
||||
"bright_magenta": 95,
|
||||
"bright_cyan": 96,
|
||||
"bright_white": 97,
|
||||
}
|
||||
_ansi_reset_all = "\033[0m"
|
||||
|
||||
|
||||
def hidden_prompt_func(prompt: str) -> str:
|
||||
import getpass
|
||||
|
||||
return getpass.getpass(prompt)
|
||||
|
||||
|
||||
def _build_prompt(
|
||||
text: str,
|
||||
suffix: str,
|
||||
show_default: bool = False,
|
||||
default: t.Any | None = None,
|
||||
show_choices: bool = True,
|
||||
type: ParamType | None = None,
|
||||
) -> str:
|
||||
prompt = text
|
||||
if type is not None and show_choices and isinstance(type, Choice):
|
||||
prompt += f" ({', '.join(map(str, type.choices))})"
|
||||
if default is not None and show_default:
|
||||
prompt = f"{prompt} [{_format_default(default)}]"
|
||||
return f"{prompt}{suffix}"
|
||||
|
||||
|
||||
def _format_default(default: t.Any) -> t.Any:
|
||||
if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
|
||||
return default.name
|
||||
|
||||
return default
|
||||
|
||||
|
||||
def prompt(
|
||||
text: str,
|
||||
default: t.Any | None = None,
|
||||
hide_input: bool = False,
|
||||
confirmation_prompt: bool | str = False,
|
||||
type: ParamType | t.Any | None = None,
|
||||
value_proc: t.Callable[[str], t.Any] | None = None,
|
||||
prompt_suffix: str = ": ",
|
||||
show_default: bool = True,
|
||||
err: bool = False,
|
||||
show_choices: bool = True,
|
||||
) -> t.Any:
|
||||
"""Prompts a user for input. This is a convenience function that can
|
||||
be used to prompt a user for input later.
|
||||
|
||||
If the user aborts the input by sending an interrupt signal, this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
:param text: the text to show for the prompt.
|
||||
:param default: the default value to use if no input happens. If this
|
||||
is not given it will prompt until it's aborted.
|
||||
:param hide_input: if this is set to true then the input value will
|
||||
be hidden.
|
||||
:param confirmation_prompt: Prompt a second time to confirm the
|
||||
value. Can be set to a string instead of ``True`` to customize
|
||||
the message.
|
||||
:param type: the type to use to check the value against.
|
||||
:param value_proc: if this parameter is provided it's a function that
|
||||
is invoked instead of the type conversion to
|
||||
convert a value.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
:param show_choices: Show or hide choices if the passed type is a Choice.
|
||||
For example if type is a Choice of either day or week,
|
||||
show_choices is true and text is "Group by" then the
|
||||
prompt will be "Group by (day, week): ".
|
||||
|
||||
.. versionadded:: 8.0
|
||||
``confirmation_prompt`` can be a custom string.
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added the ``show_choices`` parameter.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
Added unicode support for cmd.exe on Windows.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
"""
|
||||
|
||||
def prompt_func(text: str) -> str:
|
||||
f = hidden_prompt_func if hide_input else visible_prompt_func
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(text.rstrip(" "), nl=False, err=err)
|
||||
# Echo a space to stdout to work around an issue where
|
||||
# readline causes backspace to clear the whole line.
|
||||
return f(" ")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
# getpass doesn't print a newline if the user aborts input with ^C.
|
||||
# Allegedly this behavior is inherited from getpass(3).
|
||||
# A doc bug has been filed at https://bugs.python.org/issue24711
|
||||
if hide_input:
|
||||
echo(None, err=err)
|
||||
raise Abort() from None
|
||||
|
||||
if value_proc is None:
|
||||
value_proc = convert_type(type, default)
|
||||
|
||||
prompt = _build_prompt(
|
||||
text, prompt_suffix, show_default, default, show_choices, type
|
||||
)
|
||||
|
||||
if confirmation_prompt:
|
||||
if confirmation_prompt is True:
|
||||
confirmation_prompt = _("Repeat for confirmation")
|
||||
|
||||
confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix)
|
||||
|
||||
while True:
|
||||
while True:
|
||||
value = prompt_func(prompt)
|
||||
if value:
|
||||
break
|
||||
elif default is not None:
|
||||
value = default
|
||||
break
|
||||
try:
|
||||
result = value_proc(value)
|
||||
except UsageError as e:
|
||||
if hide_input:
|
||||
echo(_("Error: The value you entered was invalid."), err=err)
|
||||
else:
|
||||
echo(_("Error: {e.message}").format(e=e), err=err)
|
||||
continue
|
||||
if not confirmation_prompt:
|
||||
return result
|
||||
while True:
|
||||
value2 = prompt_func(confirmation_prompt)
|
||||
is_empty = not value and not value2
|
||||
if value2 or is_empty:
|
||||
break
|
||||
if value == value2:
|
||||
return result
|
||||
echo(_("Error: The two entered values do not match."), err=err)
|
||||
|
||||
|
||||
def confirm(
|
||||
text: str,
|
||||
default: bool | None = False,
|
||||
abort: bool = False,
|
||||
prompt_suffix: str = ": ",
|
||||
show_default: bool = True,
|
||||
err: bool = False,
|
||||
) -> bool:
|
||||
"""Prompts for confirmation (yes/no question).
|
||||
|
||||
If the user aborts the input by sending a interrupt signal this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
:param text: the question to ask.
|
||||
:param default: The default value to use when no input is given. If
|
||||
``None``, repeat until input is given.
|
||||
:param abort: if this is set to `True` a negative answer aborts the
|
||||
exception by raising :exc:`Abort`.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Repeat until input is given if ``default`` is ``None``.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the ``err`` parameter.
|
||||
"""
|
||||
prompt = _build_prompt(
|
||||
text,
|
||||
prompt_suffix,
|
||||
show_default,
|
||||
"y/n" if default is None else ("Y/n" if default else "y/N"),
|
||||
)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(prompt.rstrip(" "), nl=False, err=err)
|
||||
# Echo a space to stdout to work around an issue where
|
||||
# readline causes backspace to clear the whole line.
|
||||
value = visible_prompt_func(" ").lower().strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
raise Abort() from None
|
||||
if value in ("y", "yes"):
|
||||
rv = True
|
||||
elif value in ("n", "no"):
|
||||
rv = False
|
||||
elif default is not None and value == "":
|
||||
rv = default
|
||||
else:
|
||||
echo(_("Error: invalid input"), err=err)
|
||||
continue
|
||||
break
|
||||
if abort and not rv:
|
||||
raise Abort()
|
||||
return rv
|
||||
|
||||
|
||||
def echo_via_pager(
|
||||
text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str,
|
||||
color: bool | None = None,
|
||||
) -> None:
|
||||
"""This function takes a text and shows it via an environment specific
|
||||
pager on stdout.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param text_or_generator: the text to page, or alternatively, a
|
||||
generator emitting the text to page.
|
||||
:param color: controls if the pager supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if inspect.isgeneratorfunction(text_or_generator):
|
||||
i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)()
|
||||
elif isinstance(text_or_generator, str):
|
||||
i = [text_or_generator]
|
||||
else:
|
||||
i = iter(t.cast("cabc.Iterable[str]", text_or_generator))
|
||||
|
||||
# convert every element of i to a text type if necessary
|
||||
text_generator = (el if isinstance(el, str) else str(el) for el in i)
|
||||
|
||||
from ._termui_impl import pager
|
||||
|
||||
return pager(itertools.chain(text_generator, "\n"), color)
|
||||
|
||||
|
||||
@t.overload
|
||||
def progressbar(
|
||||
*,
|
||||
length: int,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[int]: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def progressbar(
|
||||
iterable: cabc.Iterable[V] | None = None,
|
||||
length: int | None = None,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[V]: ...
|
||||
|
||||
|
||||
def progressbar(
|
||||
iterable: cabc.Iterable[V] | None = None,
|
||||
length: int | None = None,
|
||||
label: str | None = None,
|
||||
hidden: bool = False,
|
||||
show_eta: bool = True,
|
||||
show_percent: bool | None = None,
|
||||
show_pos: bool = False,
|
||||
item_show_func: t.Callable[[V | None], str | None] | None = None,
|
||||
fill_char: str = "#",
|
||||
empty_char: str = "-",
|
||||
bar_template: str = "%(label)s [%(bar)s] %(info)s",
|
||||
info_sep: str = " ",
|
||||
width: int = 36,
|
||||
file: t.TextIO | None = None,
|
||||
color: bool | None = None,
|
||||
update_min_steps: int = 1,
|
||||
) -> ProgressBar[V]:
|
||||
"""This function creates an iterable context manager that can be used
|
||||
to iterate over something while showing a progress bar. It will
|
||||
either iterate over the `iterable` or `length` items (that are counted
|
||||
up). While iteration happens, this function will print a rendered
|
||||
progress bar to the given `file` (defaults to stdout) and will attempt
|
||||
to calculate remaining time and more. By default, this progress bar
|
||||
will not be rendered if the file is not a terminal.
|
||||
|
||||
The context manager creates the progress bar. When the context
|
||||
manager is entered the progress bar is already created. With every
|
||||
iteration over the progress bar, the iterable passed to the bar is
|
||||
advanced and the bar is updated. When the context manager exits,
|
||||
a newline is printed and the progress bar is finalized on screen.
|
||||
|
||||
Note: The progress bar is currently designed for use cases where the
|
||||
total progress can be expected to take at least several seconds.
|
||||
Because of this, the ProgressBar class object won't display
|
||||
progress that is considered too fast, and progress where the time
|
||||
between steps is less than a second.
|
||||
|
||||
No printing must happen or the progress bar will be unintentionally
|
||||
destroyed.
|
||||
|
||||
Example usage::
|
||||
|
||||
with progressbar(items) as bar:
|
||||
for item in bar:
|
||||
do_something_with(item)
|
||||
|
||||
Alternatively, if no iterable is specified, one can manually update the
|
||||
progress bar through the `update()` method instead of directly
|
||||
iterating over the progress bar. The update method accepts the number
|
||||
of steps to increment the bar with::
|
||||
|
||||
with progressbar(length=chunks.total_bytes) as bar:
|
||||
for chunk in chunks:
|
||||
process_chunk(chunk)
|
||||
bar.update(chunks.bytes)
|
||||
|
||||
The ``update()`` method also takes an optional value specifying the
|
||||
``current_item`` at the new position. This is useful when used
|
||||
together with ``item_show_func`` to customize the output for each
|
||||
manual step::
|
||||
|
||||
with click.progressbar(
|
||||
length=total_size,
|
||||
label='Unzipping archive',
|
||||
item_show_func=lambda a: a.filename
|
||||
) as bar:
|
||||
for archive in zip_file:
|
||||
archive.extract()
|
||||
bar.update(archive.size, archive)
|
||||
|
||||
:param iterable: an iterable to iterate over. If not provided the length
|
||||
is required.
|
||||
:param length: the number of items to iterate over. By default the
|
||||
progressbar will attempt to ask the iterator about its
|
||||
length, which might or might not work. If an iterable is
|
||||
also provided this parameter can be used to override the
|
||||
length. If an iterable is not provided the progress bar
|
||||
will iterate over a range of that length.
|
||||
:param label: the label to show next to the progress bar.
|
||||
:param hidden: hide the progressbar. Defaults to ``False``. When no tty is
|
||||
detected, it will only print the progressbar label. Setting this to
|
||||
``False`` also disables that.
|
||||
:param show_eta: enables or disables the estimated time display. This is
|
||||
automatically disabled if the length cannot be
|
||||
determined.
|
||||
:param show_percent: enables or disables the percentage display. The
|
||||
default is `True` if the iterable has a length or
|
||||
`False` if not.
|
||||
:param show_pos: enables or disables the absolute position display. The
|
||||
default is `False`.
|
||||
:param item_show_func: A function called with the current item which
|
||||
can return a string to show next to the progress bar. If the
|
||||
function returns ``None`` nothing is shown. The current item can
|
||||
be ``None``, such as when entering and exiting the bar.
|
||||
:param fill_char: the character to use to show the filled part of the
|
||||
progress bar.
|
||||
:param empty_char: the character to use to show the non-filled part of
|
||||
the progress bar.
|
||||
:param bar_template: the format string to use as template for the bar.
|
||||
The parameters in it are ``label`` for the label,
|
||||
``bar`` for the progress bar and ``info`` for the
|
||||
info section.
|
||||
:param info_sep: the separator between multiple info items (eta etc.)
|
||||
:param width: the width of the progress bar in characters, 0 means full
|
||||
terminal width
|
||||
:param file: The file to write to. If this is not a terminal then
|
||||
only the label is printed.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection. This is only needed if ANSI
|
||||
codes are included anywhere in the progress bar output
|
||||
which is not the case by default.
|
||||
:param update_min_steps: Render only when this many updates have
|
||||
completed. This allows tuning for very fast iterators.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
The ``hidden`` argument.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Output is shown even if execution time is less than 0.5 seconds.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
``item_show_func`` shows the current item, not the previous one.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Labels are echoed if the output is not a TTY. Reverts a change
|
||||
in 7.0 that removed all output.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
The ``update_min_steps`` parameter.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter and ``update`` method.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
from ._termui_impl import ProgressBar
|
||||
|
||||
color = resolve_color_default(color)
|
||||
return ProgressBar(
|
||||
iterable=iterable,
|
||||
length=length,
|
||||
hidden=hidden,
|
||||
show_eta=show_eta,
|
||||
show_percent=show_percent,
|
||||
show_pos=show_pos,
|
||||
item_show_func=item_show_func,
|
||||
fill_char=fill_char,
|
||||
empty_char=empty_char,
|
||||
bar_template=bar_template,
|
||||
info_sep=info_sep,
|
||||
file=file,
|
||||
label=label,
|
||||
width=width,
|
||||
color=color,
|
||||
update_min_steps=update_min_steps,
|
||||
)
|
||||
|
||||
|
||||
def clear() -> None:
|
||||
"""Clears the terminal screen. This will have the effect of clearing
|
||||
the whole visible space of the terminal and moving the cursor to the
|
||||
top left. This does not do anything if not connected to a terminal.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isatty(sys.stdout):
|
||||
return
|
||||
|
||||
# ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor
|
||||
echo("\033[2J\033[1;1H", nl=False)
|
||||
|
||||
|
||||
def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str:
|
||||
if isinstance(color, int):
|
||||
return f"{38 + offset};5;{color:d}"
|
||||
|
||||
if isinstance(color, (tuple, list)):
|
||||
r, g, b = color
|
||||
return f"{38 + offset};2;{r:d};{g:d};{b:d}"
|
||||
|
||||
return str(_ansi_colors[color] + offset)
|
||||
|
||||
|
||||
def style(
|
||||
text: t.Any,
|
||||
fg: int | tuple[int, int, int] | str | None = None,
|
||||
bg: int | tuple[int, int, int] | str | None = None,
|
||||
bold: bool | None = None,
|
||||
dim: bool | None = None,
|
||||
underline: bool | None = None,
|
||||
overline: bool | None = None,
|
||||
italic: bool | None = None,
|
||||
blink: bool | None = None,
|
||||
reverse: bool | None = None,
|
||||
strikethrough: bool | None = None,
|
||||
reset: bool = True,
|
||||
) -> str:
|
||||
"""Styles a text with ANSI styles and returns the new string. By
|
||||
default the styling is self contained which means that at the end
|
||||
of the string a reset code is issued. This can be prevented by
|
||||
passing ``reset=False``.
|
||||
|
||||
Examples::
|
||||
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
click.echo(click.style('ATTENTION!', blink=True))
|
||||
click.echo(click.style('Some things', reverse=True, fg='cyan'))
|
||||
click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
|
||||
|
||||
Supported color names:
|
||||
|
||||
* ``black`` (might be a gray)
|
||||
* ``red``
|
||||
* ``green``
|
||||
* ``yellow`` (might be an orange)
|
||||
* ``blue``
|
||||
* ``magenta``
|
||||
* ``cyan``
|
||||
* ``white`` (might be light gray)
|
||||
* ``bright_black``
|
||||
* ``bright_red``
|
||||
* ``bright_green``
|
||||
* ``bright_yellow``
|
||||
* ``bright_blue``
|
||||
* ``bright_magenta``
|
||||
* ``bright_cyan``
|
||||
* ``bright_white``
|
||||
* ``reset`` (reset the color code only)
|
||||
|
||||
If the terminal supports it, color may also be specified as:
|
||||
|
||||
- An integer in the interval [0, 255]. The terminal must support
|
||||
8-bit/256-color mode.
|
||||
- An RGB tuple of three integers in [0, 255]. The terminal must
|
||||
support 24-bit/true-color mode.
|
||||
|
||||
See https://en.wikipedia.org/wiki/ANSI_color and
|
||||
https://gist.github.com/XVilka/8346728 for more information.
|
||||
|
||||
:param text: the string to style with ansi codes.
|
||||
:param fg: if provided this will become the foreground color.
|
||||
:param bg: if provided this will become the background color.
|
||||
:param bold: if provided this will enable or disable bold mode.
|
||||
:param dim: if provided this will enable or disable dim mode. This is
|
||||
badly supported.
|
||||
:param underline: if provided this will enable or disable underline.
|
||||
:param overline: if provided this will enable or disable overline.
|
||||
:param italic: if provided this will enable or disable italic.
|
||||
:param blink: if provided this will enable or disable blinking.
|
||||
:param reverse: if provided this will enable or disable inverse
|
||||
rendering (foreground becomes background and the
|
||||
other way round).
|
||||
:param strikethrough: if provided this will enable or disable
|
||||
striking through text.
|
||||
:param reset: by default a reset-all code is added at the end of the
|
||||
string which means that styles do not carry over. This
|
||||
can be disabled to compose styles.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
A non-string ``message`` is converted to a string.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added support for 256 and RGB color codes.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``strikethrough``, ``italic``, and ``overline``
|
||||
parameters.
|
||||
|
||||
.. versionchanged:: 7.0
|
||||
Added support for bright colors.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isinstance(text, str):
|
||||
text = str(text)
|
||||
|
||||
bits = []
|
||||
|
||||
if fg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(fg)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {fg!r}") from None
|
||||
|
||||
if bg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(bg, 10)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {bg!r}") from None
|
||||
|
||||
if bold is not None:
|
||||
bits.append(f"\033[{1 if bold else 22}m")
|
||||
if dim is not None:
|
||||
bits.append(f"\033[{2 if dim else 22}m")
|
||||
if underline is not None:
|
||||
bits.append(f"\033[{4 if underline else 24}m")
|
||||
if overline is not None:
|
||||
bits.append(f"\033[{53 if overline else 55}m")
|
||||
if italic is not None:
|
||||
bits.append(f"\033[{3 if italic else 23}m")
|
||||
if blink is not None:
|
||||
bits.append(f"\033[{5 if blink else 25}m")
|
||||
if reverse is not None:
|
||||
bits.append(f"\033[{7 if reverse else 27}m")
|
||||
if strikethrough is not None:
|
||||
bits.append(f"\033[{9 if strikethrough else 29}m")
|
||||
bits.append(text)
|
||||
if reset:
|
||||
bits.append(_ansi_reset_all)
|
||||
return "".join(bits)
|
||||
|
||||
|
||||
def unstyle(text: str) -> str:
|
||||
"""Removes ANSI styling information from a string. Usually it's not
|
||||
necessary to use this function as Click's echo function will
|
||||
automatically remove styling if necessary.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param text: the text to remove style information from.
|
||||
"""
|
||||
return strip_ansi(text)
|
||||
|
||||
|
||||
def secho(
|
||||
message: t.Any | None = None,
|
||||
file: t.IO[t.AnyStr] | None = None,
|
||||
nl: bool = True,
|
||||
err: bool = False,
|
||||
color: bool | None = None,
|
||||
**styles: t.Any,
|
||||
) -> None:
|
||||
"""This function combines :func:`echo` and :func:`style` into one
|
||||
call. As such the following two calls are the same::
|
||||
|
||||
click.secho('Hello World!', fg='green')
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
|
||||
All keyword arguments are forwarded to the underlying functions
|
||||
depending on which one they go with.
|
||||
|
||||
Non-string types will be converted to :class:`str`. However,
|
||||
:class:`bytes` are passed directly to :meth:`echo` without applying
|
||||
style. If you want to style bytes that represent text, call
|
||||
:meth:`bytes.decode` first.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
A non-string ``message`` is converted to a string. Bytes are
|
||||
passed through without style applied.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if message is not None and not isinstance(message, (bytes, bytearray)):
|
||||
message = style(message, **styles)
|
||||
|
||||
return echo(message, file=file, nl=nl, err=err, color=color)
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: bytes | bytearray,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = False,
|
||||
extension: str = ".txt",
|
||||
) -> bytes | None: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: str,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
) -> str | None: ...
|
||||
|
||||
|
||||
@t.overload
|
||||
def edit(
|
||||
text: None = None,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
filename: str | cabc.Iterable[str] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
def edit(
|
||||
text: str | bytes | bytearray | None = None,
|
||||
editor: str | None = None,
|
||||
env: cabc.Mapping[str, str] | None = None,
|
||||
require_save: bool = True,
|
||||
extension: str = ".txt",
|
||||
filename: str | cabc.Iterable[str] | None = None,
|
||||
) -> str | bytes | bytearray | None:
|
||||
r"""Edits the given text in the defined editor. If an editor is given
|
||||
(should be the full path to the executable but the regular operating
|
||||
system search path is used for finding the executable) it overrides
|
||||
the detected editor. Optionally, some environment variables can be
|
||||
used. If the editor is closed without changes, `None` is returned. In
|
||||
case a file is edited directly the return value is always `None` and
|
||||
`require_save` and `extension` are ignored.
|
||||
|
||||
If the editor cannot be opened a :exc:`UsageError` is raised.
|
||||
|
||||
Note for Windows: to simplify cross-platform usage, the newlines are
|
||||
automatically converted from POSIX to Windows and vice versa. As such,
|
||||
the message here will have ``\n`` as newline markers.
|
||||
|
||||
:param text: the text to edit.
|
||||
:param editor: optionally the editor to use. Defaults to automatic
|
||||
detection.
|
||||
:param env: environment variables to forward to the editor.
|
||||
:param require_save: if this is true, then not saving in the editor
|
||||
will make the return value become `None`.
|
||||
:param extension: the extension to tell the editor about. This defaults
|
||||
to `.txt` but changing this might change syntax
|
||||
highlighting.
|
||||
:param filename: if provided it will edit this file instead of the
|
||||
provided text contents. It will not use a temporary
|
||||
file as an indirection in that case. If the editor supports
|
||||
editing multiple files at once, a sequence of files may be
|
||||
passed as well. Invoke `click.file` once per file instead
|
||||
if multiple files cannot be managed at once or editing the
|
||||
files serially is desired.
|
||||
|
||||
.. versionchanged:: 8.2.0
|
||||
``filename`` now accepts any ``Iterable[str]`` in addition to a ``str``
|
||||
if the ``editor`` supports editing multiple files at once.
|
||||
|
||||
"""
|
||||
from ._termui_impl import Editor
|
||||
|
||||
ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension)
|
||||
|
||||
if filename is None:
|
||||
return ed.edit(text)
|
||||
|
||||
if isinstance(filename, str):
|
||||
filename = (filename,)
|
||||
|
||||
ed.edit_files(filenames=filename)
|
||||
return None
|
||||
|
||||
|
||||
def launch(url: str, wait: bool = False, locate: bool = False) -> int:
|
||||
"""This function launches the given URL (or filename) in the default
|
||||
viewer application for this file type. If this is an executable, it
|
||||
might launch the executable in a new session. The return value is
|
||||
the exit code of the launched application. Usually, ``0`` indicates
|
||||
success.
|
||||
|
||||
Examples::
|
||||
|
||||
click.launch('https://click.palletsprojects.com/')
|
||||
click.launch('/my/downloaded/file', locate=True)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param url: URL or filename of the thing to launch.
|
||||
:param wait: Wait for the program to exit before returning. This
|
||||
only works if the launched program blocks. In particular,
|
||||
``xdg-open`` on Linux does not block.
|
||||
:param locate: if this is set to `True` then instead of launching the
|
||||
application associated with the URL it will attempt to
|
||||
launch a file manager with the file located. This
|
||||
might have weird effects if the URL does not point to
|
||||
the filesystem.
|
||||
"""
|
||||
from ._termui_impl import open_url
|
||||
|
||||
return open_url(url, wait=wait, locate=locate)
|
||||
|
||||
|
||||
# If this is provided, getchar() calls into this instead. This is used
|
||||
# for unittesting purposes.
|
||||
_getchar: t.Callable[[bool], str] | None = None
|
||||
|
||||
|
||||
def getchar(echo: bool = False) -> str:
|
||||
"""Fetches a single character from the terminal and returns it. This
|
||||
will always return a unicode character and under certain rare
|
||||
circumstances this might return more than one character. The
|
||||
situations which more than one character is returned is when for
|
||||
whatever reason multiple characters end up in the terminal buffer or
|
||||
standard input was not actually a terminal.
|
||||
|
||||
Note that this will always read from the terminal, even if something
|
||||
is piped into the standard input.
|
||||
|
||||
Note for Windows: in rare cases when typing non-ASCII characters, this
|
||||
function might wait for a second character and then return both at once.
|
||||
This is because certain Unicode characters look like special-key markers.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param echo: if set to `True`, the character read will also show up on
|
||||
the terminal. The default is to not show it.
|
||||
"""
|
||||
global _getchar
|
||||
|
||||
if _getchar is None:
|
||||
from ._termui_impl import getchar as f
|
||||
|
||||
_getchar = f
|
||||
|
||||
return _getchar(echo)
|
||||
|
||||
|
||||
def raw_terminal() -> AbstractContextManager[int]:
|
||||
from ._termui_impl import raw_terminal as f
|
||||
|
||||
return f()
|
||||
|
||||
|
||||
def pause(info: str | None = None, err: bool = False) -> None:
|
||||
"""This command stops execution and waits for the user to press any
|
||||
key to continue. This is similar to the Windows batch "pause"
|
||||
command. If the program is not run through a terminal, this command
|
||||
will instead do nothing.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param info: The message to print before pausing. Defaults to
|
||||
``"Press any key to continue..."``.
|
||||
:param err: if set to message goes to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
if not isatty(sys.stdin) or not isatty(sys.stdout):
|
||||
return
|
||||
|
||||
if info is None:
|
||||
info = _("Press any key to continue...")
|
||||
|
||||
try:
|
||||
if info:
|
||||
echo(info, nl=False, err=err)
|
||||
try:
|
||||
getchar()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
pass
|
||||
finally:
|
||||
if info:
|
||||
echo(err=err)
|
||||
@ -0,0 +1,577 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
import tempfile
|
||||
import typing as t
|
||||
from types import TracebackType
|
||||
|
||||
from . import _compat
|
||||
from . import formatting
|
||||
from . import termui
|
||||
from . import utils
|
||||
from ._compat import _find_binary_reader
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from _typeshed import ReadableBuffer
|
||||
|
||||
from .core import Command
|
||||
|
||||
|
||||
class EchoingStdin:
|
||||
def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None:
|
||||
self._input = input
|
||||
self._output = output
|
||||
self._paused = False
|
||||
|
||||
def __getattr__(self, x: str) -> t.Any:
|
||||
return getattr(self._input, x)
|
||||
|
||||
def _echo(self, rv: bytes) -> bytes:
|
||||
if not self._paused:
|
||||
self._output.write(rv)
|
||||
|
||||
return rv
|
||||
|
||||
def read(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.read(n))
|
||||
|
||||
def read1(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.read1(n)) # type: ignore
|
||||
|
||||
def readline(self, n: int = -1) -> bytes:
|
||||
return self._echo(self._input.readline(n))
|
||||
|
||||
def readlines(self) -> list[bytes]:
|
||||
return [self._echo(x) for x in self._input.readlines()]
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[bytes]:
|
||||
return iter(self._echo(x) for x in self._input)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._input)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]:
|
||||
if stream is None:
|
||||
yield
|
||||
else:
|
||||
stream._paused = True
|
||||
yield
|
||||
stream._paused = False
|
||||
|
||||
|
||||
class BytesIOCopy(io.BytesIO):
|
||||
"""Patch ``io.BytesIO`` to let the written stream be copied to another.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
"""
|
||||
|
||||
def __init__(self, copy_to: io.BytesIO) -> None:
|
||||
super().__init__()
|
||||
self.copy_to = copy_to
|
||||
|
||||
def flush(self) -> None:
|
||||
super().flush()
|
||||
self.copy_to.flush()
|
||||
|
||||
def write(self, b: ReadableBuffer) -> int:
|
||||
self.copy_to.write(b)
|
||||
return super().write(b)
|
||||
|
||||
|
||||
class StreamMixer:
|
||||
"""Mixes `<stdout>` and `<stderr>` streams.
|
||||
|
||||
The result is available in the ``output`` attribute.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.output: io.BytesIO = io.BytesIO()
|
||||
self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output)
|
||||
self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output)
|
||||
|
||||
def __del__(self) -> None:
|
||||
"""
|
||||
Guarantee that embedded file-like objects are closed in a
|
||||
predictable order, protecting against races between
|
||||
self.output being closed and other streams being flushed on close
|
||||
|
||||
.. versionadded:: 8.2.2
|
||||
"""
|
||||
self.stderr.close()
|
||||
self.stdout.close()
|
||||
self.output.close()
|
||||
|
||||
|
||||
class _NamedTextIOWrapper(io.TextIOWrapper):
|
||||
def __init__(
|
||||
self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any
|
||||
) -> None:
|
||||
super().__init__(buffer, **kwargs)
|
||||
self._name = name
|
||||
self._mode = mode
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def mode(self) -> str:
|
||||
return self._mode
|
||||
|
||||
|
||||
def make_input_stream(
|
||||
input: str | bytes | t.IO[t.Any] | None, charset: str
|
||||
) -> t.BinaryIO:
|
||||
# Is already an input stream.
|
||||
if hasattr(input, "read"):
|
||||
rv = _find_binary_reader(t.cast("t.IO[t.Any]", input))
|
||||
|
||||
if rv is not None:
|
||||
return rv
|
||||
|
||||
raise TypeError("Could not find binary reader for input stream.")
|
||||
|
||||
if input is None:
|
||||
input = b""
|
||||
elif isinstance(input, str):
|
||||
input = input.encode(charset)
|
||||
|
||||
return io.BytesIO(input)
|
||||
|
||||
|
||||
class Result:
|
||||
"""Holds the captured result of an invoked CLI script.
|
||||
|
||||
:param runner: The runner that created the result
|
||||
:param stdout_bytes: The standard output as bytes.
|
||||
:param stderr_bytes: The standard error as bytes.
|
||||
:param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the
|
||||
user would see it in its terminal.
|
||||
:param return_value: The value returned from the invoked command.
|
||||
:param exit_code: The exit code as integer.
|
||||
:param exception: The exception that happened if one did.
|
||||
:param exc_info: Exception information (exception type, exception instance,
|
||||
traceback type).
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
``stderr_bytes`` no longer optional, ``output_bytes`` introduced and
|
||||
``mix_stderr`` has been removed.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
Added ``return_value``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
stdout_bytes: bytes,
|
||||
stderr_bytes: bytes,
|
||||
output_bytes: bytes,
|
||||
return_value: t.Any,
|
||||
exit_code: int,
|
||||
exception: BaseException | None,
|
||||
exc_info: tuple[type[BaseException], BaseException, TracebackType]
|
||||
| None = None,
|
||||
):
|
||||
self.runner = runner
|
||||
self.stdout_bytes = stdout_bytes
|
||||
self.stderr_bytes = stderr_bytes
|
||||
self.output_bytes = output_bytes
|
||||
self.return_value = return_value
|
||||
self.exit_code = exit_code
|
||||
self.exception = exception
|
||||
self.exc_info = exc_info
|
||||
|
||||
@property
|
||||
def output(self) -> str:
|
||||
"""The terminal output as unicode string, as the user would see it.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
No longer a proxy for ``self.stdout``. Now has its own independent stream
|
||||
that is mixing `<stdout>` and `<stderr>`, in the order they were written.
|
||||
"""
|
||||
return self.output_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def stdout(self) -> str:
|
||||
"""The standard output as unicode string."""
|
||||
return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
@property
|
||||
def stderr(self) -> str:
|
||||
"""The standard error as unicode string.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
No longer raise an exception, always returns the `<stderr>` string.
|
||||
"""
|
||||
return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
|
||||
"\r\n", "\n"
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
exc_str = repr(self.exception) if self.exception else "okay"
|
||||
return f"<{type(self).__name__} {exc_str}>"
|
||||
|
||||
|
||||
class CliRunner:
|
||||
"""The CLI runner provides functionality to invoke a Click command line
|
||||
script for unittesting purposes in a isolated environment. This only
|
||||
works in single-threaded systems without any concurrency as it changes the
|
||||
global interpreter state.
|
||||
|
||||
:param charset: the character set for the input and output data.
|
||||
:param env: a dictionary with environment variables for overriding.
|
||||
:param echo_stdin: if this is set to `True`, then reading from `<stdin>` writes
|
||||
to `<stdout>`. This is useful for showing examples in
|
||||
some circumstances. Note that regular prompts
|
||||
will automatically echo the input.
|
||||
:param catch_exceptions: Whether to catch any exceptions other than
|
||||
``SystemExit`` when running :meth:`~CliRunner.invoke`.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Added the ``catch_exceptions`` parameter.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
``mix_stderr`` parameter has been removed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
charset: str = "utf-8",
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
echo_stdin: bool = False,
|
||||
catch_exceptions: bool = True,
|
||||
) -> None:
|
||||
self.charset = charset
|
||||
self.env: cabc.Mapping[str, str | None] = env or {}
|
||||
self.echo_stdin = echo_stdin
|
||||
self.catch_exceptions = catch_exceptions
|
||||
|
||||
def get_default_prog_name(self, cli: Command) -> str:
|
||||
"""Given a command object it will return the default program name
|
||||
for it. The default is the `name` attribute or ``"root"`` if not
|
||||
set.
|
||||
"""
|
||||
return cli.name or "root"
|
||||
|
||||
def make_env(
|
||||
self, overrides: cabc.Mapping[str, str | None] | None = None
|
||||
) -> cabc.Mapping[str, str | None]:
|
||||
"""Returns the environment overrides for invoking a script."""
|
||||
rv = dict(self.env)
|
||||
if overrides:
|
||||
rv.update(overrides)
|
||||
return rv
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolation(
|
||||
self,
|
||||
input: str | bytes | t.IO[t.Any] | None = None,
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
color: bool = False,
|
||||
) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]:
|
||||
"""A context manager that sets up the isolation for invoking of a
|
||||
command line tool. This sets up `<stdin>` with the given input data
|
||||
and `os.environ` with the overrides from the given dictionary.
|
||||
This also rebinds some internals in Click to be mocked (like the
|
||||
prompt functionality).
|
||||
|
||||
This is automatically done in the :meth:`invoke` method.
|
||||
|
||||
:param input: the input stream to put into `sys.stdin`.
|
||||
:param env: the environment overrides as dictionary.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
An additional output stream is returned, which is a mix of
|
||||
`<stdout>` and `<stderr>` streams.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
Always returns the `<stderr>` stream.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
`<stderr>` is opened with ``errors="backslashreplace"``
|
||||
instead of the default ``"strict"``.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
"""
|
||||
bytes_input = make_input_stream(input, self.charset)
|
||||
echo_input = None
|
||||
|
||||
old_stdin = sys.stdin
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
old_forced_width = formatting.FORCED_WIDTH
|
||||
formatting.FORCED_WIDTH = 80
|
||||
|
||||
env = self.make_env(env)
|
||||
|
||||
stream_mixer = StreamMixer()
|
||||
|
||||
if self.echo_stdin:
|
||||
bytes_input = echo_input = t.cast(
|
||||
t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout)
|
||||
)
|
||||
|
||||
sys.stdin = text_input = _NamedTextIOWrapper(
|
||||
bytes_input, encoding=self.charset, name="<stdin>", mode="r"
|
||||
)
|
||||
|
||||
if self.echo_stdin:
|
||||
# Force unbuffered reads, otherwise TextIOWrapper reads a
|
||||
# large chunk which is echoed early.
|
||||
text_input._CHUNK_SIZE = 1 # type: ignore
|
||||
|
||||
sys.stdout = _NamedTextIOWrapper(
|
||||
stream_mixer.stdout, encoding=self.charset, name="<stdout>", mode="w"
|
||||
)
|
||||
|
||||
sys.stderr = _NamedTextIOWrapper(
|
||||
stream_mixer.stderr,
|
||||
encoding=self.charset,
|
||||
name="<stderr>",
|
||||
mode="w",
|
||||
errors="backslashreplace",
|
||||
)
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def visible_input(prompt: str | None = None) -> str:
|
||||
sys.stdout.write(prompt or "")
|
||||
try:
|
||||
val = next(text_input).rstrip("\r\n")
|
||||
except StopIteration as e:
|
||||
raise EOFError() from e
|
||||
sys.stdout.write(f"{val}\n")
|
||||
sys.stdout.flush()
|
||||
return val
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def hidden_input(prompt: str | None = None) -> str:
|
||||
sys.stdout.write(f"{prompt or ''}\n")
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
return next(text_input).rstrip("\r\n")
|
||||
except StopIteration as e:
|
||||
raise EOFError() from e
|
||||
|
||||
@_pause_echo(echo_input) # type: ignore
|
||||
def _getchar(echo: bool) -> str:
|
||||
char = sys.stdin.read(1)
|
||||
|
||||
if echo:
|
||||
sys.stdout.write(char)
|
||||
|
||||
sys.stdout.flush()
|
||||
return char
|
||||
|
||||
default_color = color
|
||||
|
||||
def should_strip_ansi(
|
||||
stream: t.IO[t.Any] | None = None, color: bool | None = None
|
||||
) -> bool:
|
||||
if color is None:
|
||||
return not default_color
|
||||
return not color
|
||||
|
||||
old_visible_prompt_func = termui.visible_prompt_func
|
||||
old_hidden_prompt_func = termui.hidden_prompt_func
|
||||
old__getchar_func = termui._getchar
|
||||
old_should_strip_ansi = utils.should_strip_ansi # type: ignore
|
||||
old__compat_should_strip_ansi = _compat.should_strip_ansi
|
||||
termui.visible_prompt_func = visible_input
|
||||
termui.hidden_prompt_func = hidden_input
|
||||
termui._getchar = _getchar
|
||||
utils.should_strip_ansi = should_strip_ansi # type: ignore
|
||||
_compat.should_strip_ansi = should_strip_ansi
|
||||
|
||||
old_env = {}
|
||||
try:
|
||||
for key, value in env.items():
|
||||
old_env[key] = os.environ.get(key)
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output)
|
||||
finally:
|
||||
for key, value in old_env.items():
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
sys.stdin = old_stdin
|
||||
termui.visible_prompt_func = old_visible_prompt_func
|
||||
termui.hidden_prompt_func = old_hidden_prompt_func
|
||||
termui._getchar = old__getchar_func
|
||||
utils.should_strip_ansi = old_should_strip_ansi # type: ignore
|
||||
_compat.should_strip_ansi = old__compat_should_strip_ansi
|
||||
formatting.FORCED_WIDTH = old_forced_width
|
||||
|
||||
def invoke(
|
||||
self,
|
||||
cli: Command,
|
||||
args: str | cabc.Sequence[str] | None = None,
|
||||
input: str | bytes | t.IO[t.Any] | None = None,
|
||||
env: cabc.Mapping[str, str | None] | None = None,
|
||||
catch_exceptions: bool | None = None,
|
||||
color: bool = False,
|
||||
**extra: t.Any,
|
||||
) -> Result:
|
||||
"""Invokes a command in an isolated environment. The arguments are
|
||||
forwarded directly to the command line script, the `extra` keyword
|
||||
arguments are passed to the :meth:`~clickpkg.Command.main` function of
|
||||
the command.
|
||||
|
||||
This returns a :class:`Result` object.
|
||||
|
||||
:param cli: the command to invoke
|
||||
:param args: the arguments to invoke. It may be given as an iterable
|
||||
or a string. When given as string it will be interpreted
|
||||
as a Unix shell command. More details at
|
||||
:func:`shlex.split`.
|
||||
:param input: the input data for `sys.stdin`.
|
||||
:param env: the environment overrides.
|
||||
:param catch_exceptions: Whether to catch any other exceptions than
|
||||
``SystemExit``. If :data:`None`, the value
|
||||
from :class:`CliRunner` is used.
|
||||
:param extra: the keyword arguments to pass to :meth:`main`.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
|
||||
.. versionadded:: 8.2
|
||||
The result object has the ``output_bytes`` attribute with
|
||||
the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would
|
||||
see it in its terminal.
|
||||
|
||||
.. versionchanged:: 8.2
|
||||
The result object always returns the ``stderr_bytes`` stream.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
The result object has the ``return_value`` attribute with
|
||||
the value returned from the invoked command.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the ``catch_exceptions`` parameter.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The result object has the ``exc_info`` attribute with the
|
||||
traceback if available.
|
||||
"""
|
||||
exc_info = None
|
||||
if catch_exceptions is None:
|
||||
catch_exceptions = self.catch_exceptions
|
||||
|
||||
with self.isolation(input=input, env=env, color=color) as outstreams:
|
||||
return_value = None
|
||||
exception: BaseException | None = None
|
||||
exit_code = 0
|
||||
|
||||
if isinstance(args, str):
|
||||
args = shlex.split(args)
|
||||
|
||||
try:
|
||||
prog_name = extra.pop("prog_name")
|
||||
except KeyError:
|
||||
prog_name = self.get_default_prog_name(cli)
|
||||
|
||||
try:
|
||||
return_value = cli.main(args=args or (), prog_name=prog_name, **extra)
|
||||
except SystemExit as e:
|
||||
exc_info = sys.exc_info()
|
||||
e_code = t.cast("int | t.Any | None", e.code)
|
||||
|
||||
if e_code is None:
|
||||
e_code = 0
|
||||
|
||||
if e_code != 0:
|
||||
exception = e
|
||||
|
||||
if not isinstance(e_code, int):
|
||||
sys.stdout.write(str(e_code))
|
||||
sys.stdout.write("\n")
|
||||
e_code = 1
|
||||
|
||||
exit_code = e_code
|
||||
|
||||
except Exception as e:
|
||||
if not catch_exceptions:
|
||||
raise
|
||||
exception = e
|
||||
exit_code = 1
|
||||
exc_info = sys.exc_info()
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
stdout = outstreams[0].getvalue()
|
||||
stderr = outstreams[1].getvalue()
|
||||
output = outstreams[2].getvalue()
|
||||
|
||||
return Result(
|
||||
runner=self,
|
||||
stdout_bytes=stdout,
|
||||
stderr_bytes=stderr,
|
||||
output_bytes=output,
|
||||
return_value=return_value,
|
||||
exit_code=exit_code,
|
||||
exception=exception,
|
||||
exc_info=exc_info, # type: ignore
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolated_filesystem(
|
||||
self, temp_dir: str | os.PathLike[str] | None = None
|
||||
) -> cabc.Iterator[str]:
|
||||
"""A context manager that creates a temporary directory and
|
||||
changes the current working directory to it. This isolates tests
|
||||
that affect the contents of the CWD to prevent them from
|
||||
interfering with each other.
|
||||
|
||||
:param temp_dir: Create the temporary directory under this
|
||||
directory. If given, the created directory is not removed
|
||||
when exiting.
|
||||
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``temp_dir`` parameter.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
dt = tempfile.mkdtemp(dir=temp_dir)
|
||||
os.chdir(dt)
|
||||
|
||||
try:
|
||||
yield dt
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
if temp_dir is None:
|
||||
import shutil
|
||||
|
||||
try:
|
||||
shutil.rmtree(dt)
|
||||
except OSError:
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,627 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from types import ModuleType
|
||||
from types import TracebackType
|
||||
|
||||
from ._compat import _default_text_stderr
|
||||
from ._compat import _default_text_stdout
|
||||
from ._compat import _find_binary_writer
|
||||
from ._compat import auto_wrap_for_ansi
|
||||
from ._compat import binary_streams
|
||||
from ._compat import open_stream
|
||||
from ._compat import should_strip_ansi
|
||||
from ._compat import strip_ansi
|
||||
from ._compat import text_streams
|
||||
from ._compat import WIN
|
||||
from .globals import resolve_color_default
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
P = te.ParamSpec("P")
|
||||
|
||||
R = t.TypeVar("R")
|
||||
|
||||
|
||||
def _posixify(name: str) -> str:
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
|
||||
def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]:
|
||||
"""Wraps a function so that it swallows exceptions."""
|
||||
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
return update_wrapper(wrapper, func)
|
||||
|
||||
|
||||
def make_str(value: t.Any) -> str:
|
||||
"""Converts a value into a valid string."""
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
return value.decode(sys.getfilesystemencoding())
|
||||
except UnicodeError:
|
||||
return value.decode("utf-8", "replace")
|
||||
return str(value)
|
||||
|
||||
|
||||
def make_default_short_help(help: str, max_length: int = 45) -> str:
|
||||
"""Returns a condensed version of help string."""
|
||||
# Consider only the first paragraph.
|
||||
paragraph_end = help.find("\n\n")
|
||||
|
||||
if paragraph_end != -1:
|
||||
help = help[:paragraph_end]
|
||||
|
||||
# Collapse newlines, tabs, and spaces.
|
||||
words = help.split()
|
||||
|
||||
if not words:
|
||||
return ""
|
||||
|
||||
# The first paragraph started with a "no rewrap" marker, ignore it.
|
||||
if words[0] == "\b":
|
||||
words = words[1:]
|
||||
|
||||
total_length = 0
|
||||
last_index = len(words) - 1
|
||||
|
||||
for i, word in enumerate(words):
|
||||
total_length += len(word) + (i > 0)
|
||||
|
||||
if total_length > max_length: # too long, truncate
|
||||
break
|
||||
|
||||
if word[-1] == ".": # sentence end, truncate without "..."
|
||||
return " ".join(words[: i + 1])
|
||||
|
||||
if total_length == max_length and i != last_index:
|
||||
break # not at sentence end, truncate with "..."
|
||||
else:
|
||||
return " ".join(words) # no truncation needed
|
||||
|
||||
# Account for the length of the suffix.
|
||||
total_length += len("...")
|
||||
|
||||
# remove words until the length is short enough
|
||||
while i > 0:
|
||||
total_length -= len(words[i]) + (i > 0)
|
||||
|
||||
if total_length <= max_length:
|
||||
break
|
||||
|
||||
i -= 1
|
||||
|
||||
return " ".join(words[:i]) + "..."
|
||||
|
||||
|
||||
class LazyFile:
|
||||
"""A lazy file works like a regular file but it does not fully open
|
||||
the file but it does perform some basic checks early to see if the
|
||||
filename parameter does make sense. This is useful for safely opening
|
||||
files for writing.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
atomic: bool = False,
|
||||
):
|
||||
self.name: str = os.fspath(filename)
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.atomic = atomic
|
||||
self._f: t.IO[t.Any] | None
|
||||
self.should_close: bool
|
||||
|
||||
if self.name == "-":
|
||||
self._f, self.should_close = open_stream(filename, mode, encoding, errors)
|
||||
else:
|
||||
if "r" in mode:
|
||||
# Open and close the file in case we're opening it for
|
||||
# reading so that we can catch at least some errors in
|
||||
# some cases early.
|
||||
open(filename, mode).close()
|
||||
self._f = None
|
||||
self.should_close = True
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self.open(), name)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self._f is not None:
|
||||
return repr(self._f)
|
||||
return f"<unopened file '{format_filename(self.name)}' {self.mode}>"
|
||||
|
||||
def open(self) -> t.IO[t.Any]:
|
||||
"""Opens the file if it's not yet open. This call might fail with
|
||||
a :exc:`FileError`. Not handling this error will produce an error
|
||||
that Click shows.
|
||||
"""
|
||||
if self._f is not None:
|
||||
return self._f
|
||||
try:
|
||||
rv, self.should_close = open_stream(
|
||||
self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
|
||||
)
|
||||
except OSError as e:
|
||||
from .exceptions import FileError
|
||||
|
||||
raise FileError(self.name, hint=e.strerror) from e
|
||||
self._f = rv
|
||||
return rv
|
||||
|
||||
def close(self) -> None:
|
||||
"""Closes the underlying file, no matter what."""
|
||||
if self._f is not None:
|
||||
self._f.close()
|
||||
|
||||
def close_intelligently(self) -> None:
|
||||
"""This function only closes the file if it was opened by the lazy
|
||||
file wrapper. For instance this will never close stdin.
|
||||
"""
|
||||
if self.should_close:
|
||||
self.close()
|
||||
|
||||
def __enter__(self) -> LazyFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.close_intelligently()
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[t.AnyStr]:
|
||||
self.open()
|
||||
return iter(self._f) # type: ignore
|
||||
|
||||
|
||||
class KeepOpenFile:
|
||||
def __init__(self, file: t.IO[t.Any]) -> None:
|
||||
self._file: t.IO[t.Any] = file
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
return getattr(self._file, name)
|
||||
|
||||
def __enter__(self) -> KeepOpenFile:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._file)
|
||||
|
||||
def __iter__(self) -> cabc.Iterator[t.AnyStr]:
|
||||
return iter(self._file)
|
||||
|
||||
|
||||
def echo(
|
||||
message: t.Any | None = None,
|
||||
file: t.IO[t.Any] | None = None,
|
||||
nl: bool = True,
|
||||
err: bool = False,
|
||||
color: bool | None = None,
|
||||
) -> None:
|
||||
"""Print a message and newline to stdout or a file. This should be
|
||||
used instead of :func:`print` because it provides better support
|
||||
for different data, files, and environments.
|
||||
|
||||
Compared to :func:`print`, this does the following:
|
||||
|
||||
- Ensures that the output encoding is not misconfigured on Linux.
|
||||
- Supports Unicode in the Windows console.
|
||||
- Supports writing to binary outputs, and supports writing bytes
|
||||
to text outputs.
|
||||
- Supports colors and styles on Windows.
|
||||
- Removes ANSI color and style codes if the output does not look
|
||||
like an interactive terminal.
|
||||
- Always flushes the output.
|
||||
|
||||
:param message: The string or bytes to output. Other objects are
|
||||
converted to strings.
|
||||
:param file: The file to write to. Defaults to ``stdout``.
|
||||
:param err: Write to ``stderr`` instead of ``stdout``.
|
||||
:param nl: Print a newline after the message. Enabled by default.
|
||||
:param color: Force showing or hiding colors and other styles. By
|
||||
default Click will remove color if the output does not look like
|
||||
an interactive terminal.
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
Support Unicode output on the Windows console. Click does not
|
||||
modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()``
|
||||
will still not support Unicode.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the ``color`` parameter.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
Added the ``err`` parameter.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Support colors on Windows if colorama is installed.
|
||||
"""
|
||||
if file is None:
|
||||
if err:
|
||||
file = _default_text_stderr()
|
||||
else:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# There are no standard streams attached to write to. For example,
|
||||
# pythonw on Windows.
|
||||
if file is None:
|
||||
return
|
||||
|
||||
# Convert non bytes/text into the native string type.
|
||||
if message is not None and not isinstance(message, (str, bytes, bytearray)):
|
||||
out: str | bytes | bytearray | None = str(message)
|
||||
else:
|
||||
out = message
|
||||
|
||||
if nl:
|
||||
out = out or ""
|
||||
if isinstance(out, str):
|
||||
out += "\n"
|
||||
else:
|
||||
out += b"\n"
|
||||
|
||||
if not out:
|
||||
file.flush()
|
||||
return
|
||||
|
||||
# If there is a message and the value looks like bytes, we manually
|
||||
# need to find the binary stream and write the message in there.
|
||||
# This is done separately so that most stream types will work as you
|
||||
# would expect. Eg: you can write to StringIO for other cases.
|
||||
if isinstance(out, (bytes, bytearray)):
|
||||
binary_file = _find_binary_writer(file)
|
||||
|
||||
if binary_file is not None:
|
||||
file.flush()
|
||||
binary_file.write(out)
|
||||
binary_file.flush()
|
||||
return
|
||||
|
||||
# ANSI style code support. For no message or bytes, nothing happens.
|
||||
# When outputting to a file instead of a terminal, strip codes.
|
||||
else:
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if should_strip_ansi(file, color):
|
||||
out = strip_ansi(out)
|
||||
elif WIN:
|
||||
if auto_wrap_for_ansi is not None:
|
||||
file = auto_wrap_for_ansi(file, color) # type: ignore
|
||||
elif not color:
|
||||
out = strip_ansi(out)
|
||||
|
||||
file.write(out) # type: ignore
|
||||
file.flush()
|
||||
|
||||
|
||||
def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO:
|
||||
"""Returns a system stream for byte processing.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
"""
|
||||
opener = binary_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError(f"Unknown standard stream '{name}'")
|
||||
return opener()
|
||||
|
||||
|
||||
def get_text_stream(
|
||||
name: t.Literal["stdin", "stdout", "stderr"],
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
) -> t.TextIO:
|
||||
"""Returns a system stream for text processing. This usually returns
|
||||
a wrapped stream around a binary stream returned from
|
||||
:func:`get_binary_stream` but it also can take shortcuts for already
|
||||
correctly configured streams.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
:param encoding: overrides the detected default encoding.
|
||||
:param errors: overrides the default error mode.
|
||||
"""
|
||||
opener = text_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError(f"Unknown standard stream '{name}'")
|
||||
return opener(encoding, errors)
|
||||
|
||||
|
||||
def open_file(
|
||||
filename: str | os.PathLike[str],
|
||||
mode: str = "r",
|
||||
encoding: str | None = None,
|
||||
errors: str | None = "strict",
|
||||
lazy: bool = False,
|
||||
atomic: bool = False,
|
||||
) -> t.IO[t.Any]:
|
||||
"""Open a file, with extra behavior to handle ``'-'`` to indicate
|
||||
a standard stream, lazy open on write, and atomic write. Similar to
|
||||
the behavior of the :class:`~click.File` param type.
|
||||
|
||||
If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is
|
||||
wrapped so that using it in a context manager will not close it.
|
||||
This makes it possible to use the function without accidentally
|
||||
closing a standard stream:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with open_file(filename) as f:
|
||||
...
|
||||
|
||||
:param filename: The name or Path of the file to open, or ``'-'`` for
|
||||
``stdin``/``stdout``.
|
||||
:param mode: The mode in which to open the file.
|
||||
:param encoding: The encoding to decode or encode a file opened in
|
||||
text mode.
|
||||
:param errors: The error handling mode.
|
||||
:param lazy: Wait to open the file until it is accessed. For read
|
||||
mode, the file is temporarily opened to raise access errors
|
||||
early, then closed until it is read again.
|
||||
:param atomic: Write to a temporary file and replace the given file
|
||||
on close.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
if lazy:
|
||||
return t.cast(
|
||||
"t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic)
|
||||
)
|
||||
|
||||
f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
|
||||
|
||||
if not should_close:
|
||||
f = t.cast("t.IO[t.Any]", KeepOpenFile(f))
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def format_filename(
|
||||
filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
|
||||
shorten: bool = False,
|
||||
) -> str:
|
||||
"""Format a filename as a string for display. Ensures the filename can be
|
||||
displayed by replacing any invalid bytes or surrogate escapes in the name
|
||||
with the replacement character ``<EFBFBD>``.
|
||||
|
||||
Invalid bytes or surrogate escapes will raise an error when written to a
|
||||
stream with ``errors="strict"``. This will typically happen with ``stdout``
|
||||
when the locale is something like ``en_GB.UTF-8``.
|
||||
|
||||
Many scenarios *are* safe to write surrogates though, due to PEP 538 and
|
||||
PEP 540, including:
|
||||
|
||||
- Writing to ``stderr``, which uses ``errors="backslashreplace"``.
|
||||
- The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens
|
||||
stdout and stderr with ``errors="surrogateescape"``.
|
||||
- None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``.
|
||||
- Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``.
|
||||
Python opens stdout and stderr with ``errors="surrogateescape"``.
|
||||
|
||||
:param filename: formats a filename for UI display. This will also convert
|
||||
the filename into unicode without failing.
|
||||
:param shorten: this optionally shortens the filename to strip of the
|
||||
path that leads up to it.
|
||||
"""
|
||||
if shorten:
|
||||
filename = os.path.basename(filename)
|
||||
else:
|
||||
filename = os.fspath(filename)
|
||||
|
||||
if isinstance(filename, bytes):
|
||||
filename = filename.decode(sys.getfilesystemencoding(), "replace")
|
||||
else:
|
||||
filename = filename.encode("utf-8", "surrogateescape").decode(
|
||||
"utf-8", "replace"
|
||||
)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str:
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Windows (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Windows (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no effect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
if WIN:
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser("~")
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
|
||||
if sys.platform == "darwin":
|
||||
return os.path.join(
|
||||
os.path.expanduser("~/Library/Application Support"), app_name
|
||||
)
|
||||
return os.path.join(
|
||||
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||
_posixify(app_name),
|
||||
)
|
||||
|
||||
|
||||
class PacifyFlushWrapper:
|
||||
"""This wrapper is used to catch and suppress BrokenPipeErrors resulting
|
||||
from ``.flush()`` being called on broken pipe during the shutdown/final-GC
|
||||
of the Python interpreter. Notably ``.flush()`` is always called on
|
||||
``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
|
||||
other cleanup code, and the case where the underlying file is not a broken
|
||||
pipe, all calls and attributes are proxied.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped: t.IO[t.Any]) -> None:
|
||||
self.wrapped = wrapped
|
||||
|
||||
def flush(self) -> None:
|
||||
try:
|
||||
self.wrapped.flush()
|
||||
except OSError as e:
|
||||
import errno
|
||||
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def __getattr__(self, attr: str) -> t.Any:
|
||||
return getattr(self.wrapped, attr)
|
||||
|
||||
|
||||
def _detect_program_name(
|
||||
path: str | None = None, _main: ModuleType | None = None
|
||||
) -> str:
|
||||
"""Determine the command used to run the program, for use in help
|
||||
text. If a file or entry point was executed, the file name is
|
||||
returned. If ``python -m`` was used to execute a module or package,
|
||||
``python -m name`` is returned.
|
||||
|
||||
This doesn't try to be too precise, the goal is to give a concise
|
||||
name for help text. Files are only shown as their name without the
|
||||
path. ``python`` is only shown for modules, and the full path to
|
||||
``sys.executable`` is not shown.
|
||||
|
||||
:param path: The Python file being executed. Python puts this in
|
||||
``sys.argv[0]``, which is used by default.
|
||||
:param _main: The ``__main__`` module. This should only be passed
|
||||
during internal testing.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
Based on command args detection in the Werkzeug reloader.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
if _main is None:
|
||||
_main = sys.modules["__main__"]
|
||||
|
||||
if not path:
|
||||
path = sys.argv[0]
|
||||
|
||||
# The value of __package__ indicates how Python was called. It may
|
||||
# not exist if a setuptools script is installed as an egg. It may be
|
||||
# set incorrectly for entry points created with pip on Windows.
|
||||
# It is set to "" inside a Shiv or PEX zipapp.
|
||||
if getattr(_main, "__package__", None) in {None, ""} or (
|
||||
os.name == "nt"
|
||||
and _main.__package__ == ""
|
||||
and not os.path.exists(path)
|
||||
and os.path.exists(f"{path}.exe")
|
||||
):
|
||||
# Executed a file, like "python app.py".
|
||||
return os.path.basename(path)
|
||||
|
||||
# Executed a module, like "python -m example".
|
||||
# Rewritten by Python from "-m script" to "/path/to/script.py".
|
||||
# Need to look at main module to determine how it was executed.
|
||||
py_module = t.cast(str, _main.__package__)
|
||||
name = os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# A submodule like "example.cli".
|
||||
if name != "__main__":
|
||||
py_module = f"{py_module}.{name}"
|
||||
|
||||
return f"python -m {py_module.lstrip('.')}"
|
||||
|
||||
|
||||
def _expand_args(
|
||||
args: cabc.Iterable[str],
|
||||
*,
|
||||
user: bool = True,
|
||||
env: bool = True,
|
||||
glob_recursive: bool = True,
|
||||
) -> list[str]:
|
||||
"""Simulate Unix shell expansion with Python functions.
|
||||
|
||||
See :func:`glob.glob`, :func:`os.path.expanduser`, and
|
||||
:func:`os.path.expandvars`.
|
||||
|
||||
This is intended for use on Windows, where the shell does not do any
|
||||
expansion. It may not exactly match what a Unix shell would do.
|
||||
|
||||
:param args: List of command line arguments to expand.
|
||||
:param user: Expand user home directory.
|
||||
:param env: Expand environment variables.
|
||||
:param glob_recursive: ``**`` matches directories recursively.
|
||||
|
||||
.. versionchanged:: 8.1
|
||||
Invalid glob patterns are treated as empty expansions rather
|
||||
than raising an error.
|
||||
|
||||
.. versionadded:: 8.0
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
from glob import glob
|
||||
|
||||
out = []
|
||||
|
||||
for arg in args:
|
||||
if user:
|
||||
arg = os.path.expanduser(arg)
|
||||
|
||||
if env:
|
||||
arg = os.path.expandvars(arg)
|
||||
|
||||
try:
|
||||
matches = glob(arg, recursive=glob_recursive)
|
||||
except re.error:
|
||||
matches = []
|
||||
|
||||
if not matches:
|
||||
out.append(arg)
|
||||
else:
|
||||
out.extend(matches)
|
||||
|
||||
return out
|
||||
@ -0,0 +1,49 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key,
|
||||
unset_key)
|
||||
|
||||
|
||||
def load_ipython_extension(ipython: Any) -> None:
|
||||
from .ipython import load_ipython_extension
|
||||
load_ipython_extension(ipython)
|
||||
|
||||
|
||||
def get_cli_string(
|
||||
path: Optional[str] = None,
|
||||
action: Optional[str] = None,
|
||||
key: Optional[str] = None,
|
||||
value: Optional[str] = None,
|
||||
quote: Optional[str] = None,
|
||||
):
|
||||
"""Returns a string suitable for running as a shell script.
|
||||
|
||||
Useful for converting a arguments passed to a fabric task
|
||||
to be passed to a `local` or `run` command.
|
||||
"""
|
||||
command = ['dotenv']
|
||||
if quote:
|
||||
command.append(f'-q {quote}')
|
||||
if path:
|
||||
command.append(f'-f {path}')
|
||||
if action:
|
||||
command.append(action)
|
||||
if key:
|
||||
command.append(key)
|
||||
if value:
|
||||
if ' ' in value:
|
||||
command.append(f'"{value}"')
|
||||
else:
|
||||
command.append(value)
|
||||
|
||||
return ' '.join(command).strip()
|
||||
|
||||
|
||||
__all__ = ['get_cli_string',
|
||||
'load_dotenv',
|
||||
'dotenv_values',
|
||||
'get_key',
|
||||
'set_key',
|
||||
'unset_key',
|
||||
'find_dotenv',
|
||||
'load_ipython_extension']
|
||||
@ -0,0 +1,6 @@
|
||||
"""Entry point for cli, enables execution with `python -m dotenv`"""
|
||||
|
||||
from .cli import cli
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,199 @@
|
||||
import json
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from subprocess import Popen
|
||||
from typing import Any, Dict, IO, Iterator, List
|
||||
|
||||
try:
|
||||
import click
|
||||
except ImportError:
|
||||
sys.stderr.write('It seems python-dotenv is not installed with cli option. \n'
|
||||
'Run pip install "python-dotenv[cli]" to fix this.')
|
||||
sys.exit(1)
|
||||
|
||||
from .main import dotenv_values, set_key, unset_key
|
||||
from .version import __version__
|
||||
|
||||
|
||||
def enumerate_env():
|
||||
"""
|
||||
Return a path for the ${pwd}/.env file.
|
||||
|
||||
If pwd does not exist, return None.
|
||||
"""
|
||||
try:
|
||||
cwd = os.getcwd()
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
path = os.path.join(cwd, '.env')
|
||||
return path
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.option('-f', '--file', default=enumerate_env(),
|
||||
type=click.Path(file_okay=True),
|
||||
help="Location of the .env file, defaults to .env file in current working directory.")
|
||||
@click.option('-q', '--quote', default='always',
|
||||
type=click.Choice(['always', 'never', 'auto']),
|
||||
help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.")
|
||||
@click.option('-e', '--export', default=False,
|
||||
type=click.BOOL,
|
||||
help="Whether to write the dot file as an executable bash script.")
|
||||
@click.version_option(version=__version__)
|
||||
@click.pass_context
|
||||
def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None:
|
||||
"""This script is used to set, get or unset values from a .env file."""
|
||||
ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file}
|
||||
|
||||
|
||||
@contextmanager
|
||||
def stream_file(path: os.PathLike) -> Iterator[IO[str]]:
|
||||
"""
|
||||
Open a file and yield the corresponding (decoded) stream.
|
||||
|
||||
Exits with error code 2 if the file cannot be opened.
|
||||
"""
|
||||
|
||||
try:
|
||||
with open(path) as stream:
|
||||
yield stream
|
||||
except OSError as exc:
|
||||
print(f"Error opening env file: {exc}", file=sys.stderr)
|
||||
exit(2)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.option('--format', default='simple',
|
||||
type=click.Choice(['simple', 'json', 'shell', 'export']),
|
||||
help="The format in which to display the list. Default format is simple, "
|
||||
"which displays name=value without quotes.")
|
||||
def list(ctx: click.Context, format: bool) -> None:
|
||||
"""Display all the stored key/value."""
|
||||
file = ctx.obj['FILE']
|
||||
|
||||
with stream_file(file) as stream:
|
||||
values = dotenv_values(stream=stream)
|
||||
|
||||
if format == 'json':
|
||||
click.echo(json.dumps(values, indent=2, sort_keys=True))
|
||||
else:
|
||||
prefix = 'export ' if format == 'export' else ''
|
||||
for k in sorted(values):
|
||||
v = values[k]
|
||||
if v is not None:
|
||||
if format in ('export', 'shell'):
|
||||
v = shlex.quote(v)
|
||||
click.echo(f'{prefix}{k}={v}')
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
@click.argument('value', required=True)
|
||||
def set(ctx: click.Context, key: Any, value: Any) -> None:
|
||||
"""Store the given key/value."""
|
||||
file = ctx.obj['FILE']
|
||||
quote = ctx.obj['QUOTE']
|
||||
export = ctx.obj['EXPORT']
|
||||
success, key, value = set_key(file, key, value, quote, export)
|
||||
if success:
|
||||
click.echo(f'{key}={value}')
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
def get(ctx: click.Context, key: Any) -> None:
|
||||
"""Retrieve the value for the given key."""
|
||||
file = ctx.obj['FILE']
|
||||
|
||||
with stream_file(file) as stream:
|
||||
values = dotenv_values(stream=stream)
|
||||
|
||||
stored_value = values.get(key)
|
||||
if stored_value:
|
||||
click.echo(stored_value)
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_context
|
||||
@click.argument('key', required=True)
|
||||
def unset(ctx: click.Context, key: Any) -> None:
|
||||
"""Removes the given key."""
|
||||
file = ctx.obj['FILE']
|
||||
quote = ctx.obj['QUOTE']
|
||||
success, key = unset_key(file, key, quote)
|
||||
if success:
|
||||
click.echo(f"Successfully removed {key}")
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
|
||||
@cli.command(context_settings={'ignore_unknown_options': True})
|
||||
@click.pass_context
|
||||
@click.option(
|
||||
"--override/--no-override",
|
||||
default=True,
|
||||
help="Override variables from the environment file with those from the .env file.",
|
||||
)
|
||||
@click.argument('commandline', nargs=-1, type=click.UNPROCESSED)
|
||||
def run(ctx: click.Context, override: bool, commandline: List[str]) -> None:
|
||||
"""Run command with environment variables present."""
|
||||
file = ctx.obj['FILE']
|
||||
if not os.path.isfile(file):
|
||||
raise click.BadParameter(
|
||||
f'Invalid value for \'-f\' "{file}" does not exist.',
|
||||
ctx=ctx
|
||||
)
|
||||
dotenv_as_dict = {
|
||||
k: v
|
||||
for (k, v) in dotenv_values(file).items()
|
||||
if v is not None and (override or k not in os.environ)
|
||||
}
|
||||
|
||||
if not commandline:
|
||||
click.echo('No command given.')
|
||||
exit(1)
|
||||
ret = run_command(commandline, dotenv_as_dict)
|
||||
exit(ret)
|
||||
|
||||
|
||||
def run_command(command: List[str], env: Dict[str, str]) -> int:
|
||||
"""Run command in sub process.
|
||||
|
||||
Runs the command in a sub process with the variables from `env`
|
||||
added in the current environment variables.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
command: List[str]
|
||||
The command and it's parameters
|
||||
env: Dict
|
||||
The additional environment variables
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The return code of the command
|
||||
|
||||
"""
|
||||
# copy the current environment variables and add the vales from
|
||||
# `env`
|
||||
cmd_env = os.environ.copy()
|
||||
cmd_env.update(env)
|
||||
|
||||
p = Popen(command,
|
||||
universal_newlines=True,
|
||||
bufsize=0,
|
||||
shell=False,
|
||||
env=cmd_env)
|
||||
_, _ = p.communicate()
|
||||
|
||||
return p.returncode
|
||||
@ -0,0 +1,39 @@
|
||||
from IPython.core.magic import Magics, line_magic, magics_class # type: ignore
|
||||
from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore
|
||||
parse_argstring) # type: ignore
|
||||
|
||||
from .main import find_dotenv, load_dotenv
|
||||
|
||||
|
||||
@magics_class
|
||||
class IPythonDotEnv(Magics):
|
||||
|
||||
@magic_arguments()
|
||||
@argument(
|
||||
'-o', '--override', action='store_true',
|
||||
help="Indicate to override existing variables"
|
||||
)
|
||||
@argument(
|
||||
'-v', '--verbose', action='store_true',
|
||||
help="Indicate function calls to be verbose"
|
||||
)
|
||||
@argument('dotenv_path', nargs='?', type=str, default='.env',
|
||||
help='Search in increasingly higher folders for the `dotenv_path`')
|
||||
@line_magic
|
||||
def dotenv(self, line):
|
||||
args = parse_argstring(self.dotenv, line)
|
||||
# Locate the .env file
|
||||
dotenv_path = args.dotenv_path
|
||||
try:
|
||||
dotenv_path = find_dotenv(dotenv_path, True, True)
|
||||
except IOError:
|
||||
print("cannot find .env file")
|
||||
return
|
||||
|
||||
# Load the .env file
|
||||
load_dotenv(dotenv_path, verbose=args.verbose, override=args.override)
|
||||
|
||||
|
||||
def load_ipython_extension(ipython):
|
||||
"""Register the %dotenv magic."""
|
||||
ipython.register_magics(IPythonDotEnv)
|
||||
@ -0,0 +1,392 @@
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
from typing import (IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple,
|
||||
Union)
|
||||
|
||||
from .parser import Binding, parse_stream
|
||||
from .variables import parse_variables
|
||||
|
||||
# A type alias for a string path to be used for the paths in this file.
|
||||
# These paths may flow to `open()` and `shutil.move()`; `shutil.move()`
|
||||
# only accepts string paths, not byte paths or file descriptors. See
|
||||
# https://github.com/python/typeshed/pull/6832.
|
||||
StrPath = Union[str, 'os.PathLike[str]']
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]:
|
||||
for mapping in mappings:
|
||||
if mapping.error:
|
||||
logger.warning(
|
||||
"Python-dotenv could not parse statement starting at line %s",
|
||||
mapping.original.line,
|
||||
)
|
||||
yield mapping
|
||||
|
||||
|
||||
class DotEnv:
|
||||
def __init__(
|
||||
self,
|
||||
dotenv_path: Optional[StrPath],
|
||||
stream: Optional[IO[str]] = None,
|
||||
verbose: bool = False,
|
||||
encoding: Optional[str] = None,
|
||||
interpolate: bool = True,
|
||||
override: bool = True,
|
||||
) -> None:
|
||||
self.dotenv_path: Optional[StrPath] = dotenv_path
|
||||
self.stream: Optional[IO[str]] = stream
|
||||
self._dict: Optional[Dict[str, Optional[str]]] = None
|
||||
self.verbose: bool = verbose
|
||||
self.encoding: Optional[str] = encoding
|
||||
self.interpolate: bool = interpolate
|
||||
self.override: bool = override
|
||||
|
||||
@contextmanager
|
||||
def _get_stream(self) -> Iterator[IO[str]]:
|
||||
if self.dotenv_path and os.path.isfile(self.dotenv_path):
|
||||
with open(self.dotenv_path, encoding=self.encoding) as stream:
|
||||
yield stream
|
||||
elif self.stream is not None:
|
||||
yield self.stream
|
||||
else:
|
||||
if self.verbose:
|
||||
logger.info(
|
||||
"Python-dotenv could not find configuration file %s.",
|
||||
self.dotenv_path or '.env',
|
||||
)
|
||||
yield io.StringIO('')
|
||||
|
||||
def dict(self) -> Dict[str, Optional[str]]:
|
||||
"""Return dotenv as dict"""
|
||||
if self._dict:
|
||||
return self._dict
|
||||
|
||||
raw_values = self.parse()
|
||||
|
||||
if self.interpolate:
|
||||
self._dict = OrderedDict(resolve_variables(raw_values, override=self.override))
|
||||
else:
|
||||
self._dict = OrderedDict(raw_values)
|
||||
|
||||
return self._dict
|
||||
|
||||
def parse(self) -> Iterator[Tuple[str, Optional[str]]]:
|
||||
with self._get_stream() as stream:
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(stream)):
|
||||
if mapping.key is not None:
|
||||
yield mapping.key, mapping.value
|
||||
|
||||
def set_as_environment_variables(self) -> bool:
|
||||
"""
|
||||
Load the current dotenv as system environment variable.
|
||||
"""
|
||||
if not self.dict():
|
||||
return False
|
||||
|
||||
for k, v in self.dict().items():
|
||||
if k in os.environ and not self.override:
|
||||
continue
|
||||
if v is not None:
|
||||
os.environ[k] = v
|
||||
|
||||
return True
|
||||
|
||||
def get(self, key: str) -> Optional[str]:
|
||||
"""
|
||||
"""
|
||||
data = self.dict()
|
||||
|
||||
if key in data:
|
||||
return data[key]
|
||||
|
||||
if self.verbose:
|
||||
logger.warning("Key %s not found in %s.", key, self.dotenv_path)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_key(
|
||||
dotenv_path: StrPath,
|
||||
key_to_get: str,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Get the value of a given key from the given .env.
|
||||
|
||||
Returns `None` if the key isn't found or doesn't have a value.
|
||||
"""
|
||||
return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rewrite(
|
||||
path: StrPath,
|
||||
encoding: Optional[str],
|
||||
) -> Iterator[Tuple[IO[str], IO[str]]]:
|
||||
pathlib.Path(path).touch()
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest:
|
||||
error = None
|
||||
try:
|
||||
with open(path, encoding=encoding) as source:
|
||||
yield (source, dest)
|
||||
except BaseException as err:
|
||||
error = err
|
||||
|
||||
if error is None:
|
||||
shutil.move(dest.name, path)
|
||||
else:
|
||||
os.unlink(dest.name)
|
||||
raise error from None
|
||||
|
||||
|
||||
def set_key(
|
||||
dotenv_path: StrPath,
|
||||
key_to_set: str,
|
||||
value_to_set: str,
|
||||
quote_mode: str = "always",
|
||||
export: bool = False,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> Tuple[Optional[bool], str, str]:
|
||||
"""
|
||||
Adds or Updates a key/value to the given .env
|
||||
|
||||
If the .env path given doesn't exist, fails instead of risking creating
|
||||
an orphan .env somewhere in the filesystem
|
||||
"""
|
||||
if quote_mode not in ("always", "auto", "never"):
|
||||
raise ValueError(f"Unknown quote_mode: {quote_mode}")
|
||||
|
||||
quote = (
|
||||
quote_mode == "always"
|
||||
or (quote_mode == "auto" and not value_to_set.isalnum())
|
||||
)
|
||||
|
||||
if quote:
|
||||
value_out = "'{}'".format(value_to_set.replace("'", "\\'"))
|
||||
else:
|
||||
value_out = value_to_set
|
||||
if export:
|
||||
line_out = f'export {key_to_set}={value_out}\n'
|
||||
else:
|
||||
line_out = f"{key_to_set}={value_out}\n"
|
||||
|
||||
with rewrite(dotenv_path, encoding=encoding) as (source, dest):
|
||||
replaced = False
|
||||
missing_newline = False
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(source)):
|
||||
if mapping.key == key_to_set:
|
||||
dest.write(line_out)
|
||||
replaced = True
|
||||
else:
|
||||
dest.write(mapping.original.string)
|
||||
missing_newline = not mapping.original.string.endswith("\n")
|
||||
if not replaced:
|
||||
if missing_newline:
|
||||
dest.write("\n")
|
||||
dest.write(line_out)
|
||||
|
||||
return True, key_to_set, value_to_set
|
||||
|
||||
|
||||
def unset_key(
|
||||
dotenv_path: StrPath,
|
||||
key_to_unset: str,
|
||||
quote_mode: str = "always",
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> Tuple[Optional[bool], str]:
|
||||
"""
|
||||
Removes a given key from the given `.env` file.
|
||||
|
||||
If the .env path given doesn't exist, fails.
|
||||
If the given key doesn't exist in the .env, fails.
|
||||
"""
|
||||
if not os.path.exists(dotenv_path):
|
||||
logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path)
|
||||
return None, key_to_unset
|
||||
|
||||
removed = False
|
||||
with rewrite(dotenv_path, encoding=encoding) as (source, dest):
|
||||
for mapping in with_warn_for_invalid_lines(parse_stream(source)):
|
||||
if mapping.key == key_to_unset:
|
||||
removed = True
|
||||
else:
|
||||
dest.write(mapping.original.string)
|
||||
|
||||
if not removed:
|
||||
logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path)
|
||||
return None, key_to_unset
|
||||
|
||||
return removed, key_to_unset
|
||||
|
||||
|
||||
def resolve_variables(
|
||||
values: Iterable[Tuple[str, Optional[str]]],
|
||||
override: bool,
|
||||
) -> Mapping[str, Optional[str]]:
|
||||
new_values: Dict[str, Optional[str]] = {}
|
||||
|
||||
for (name, value) in values:
|
||||
if value is None:
|
||||
result = None
|
||||
else:
|
||||
atoms = parse_variables(value)
|
||||
env: Dict[str, Optional[str]] = {}
|
||||
if override:
|
||||
env.update(os.environ) # type: ignore
|
||||
env.update(new_values)
|
||||
else:
|
||||
env.update(new_values)
|
||||
env.update(os.environ) # type: ignore
|
||||
result = "".join(atom.resolve(env) for atom in atoms)
|
||||
|
||||
new_values[name] = result
|
||||
|
||||
return new_values
|
||||
|
||||
|
||||
def _walk_to_root(path: str) -> Iterator[str]:
|
||||
"""
|
||||
Yield directories starting from the given directory up to the root
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
raise IOError('Starting path not found')
|
||||
|
||||
if os.path.isfile(path):
|
||||
path = os.path.dirname(path)
|
||||
|
||||
last_dir = None
|
||||
current_dir = os.path.abspath(path)
|
||||
while last_dir != current_dir:
|
||||
yield current_dir
|
||||
parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))
|
||||
last_dir, current_dir = current_dir, parent_dir
|
||||
|
||||
|
||||
def find_dotenv(
|
||||
filename: str = '.env',
|
||||
raise_error_if_not_found: bool = False,
|
||||
usecwd: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Search in increasingly higher folders for the given file
|
||||
|
||||
Returns path to the file if found, or an empty string otherwise
|
||||
"""
|
||||
|
||||
def _is_interactive():
|
||||
""" Decide whether this is running in a REPL or IPython notebook """
|
||||
try:
|
||||
main = __import__('__main__', None, None, fromlist=['__file__'])
|
||||
except ModuleNotFoundError:
|
||||
return False
|
||||
return not hasattr(main, '__file__')
|
||||
|
||||
if usecwd or _is_interactive() or getattr(sys, 'frozen', False):
|
||||
# Should work without __file__, e.g. in REPL or IPython notebook.
|
||||
path = os.getcwd()
|
||||
else:
|
||||
# will work for .py files
|
||||
frame = sys._getframe()
|
||||
current_file = __file__
|
||||
|
||||
while frame.f_code.co_filename == current_file or not os.path.exists(
|
||||
frame.f_code.co_filename
|
||||
):
|
||||
assert frame.f_back is not None
|
||||
frame = frame.f_back
|
||||
frame_filename = frame.f_code.co_filename
|
||||
path = os.path.dirname(os.path.abspath(frame_filename))
|
||||
|
||||
for dirname in _walk_to_root(path):
|
||||
check_path = os.path.join(dirname, filename)
|
||||
if os.path.isfile(check_path):
|
||||
return check_path
|
||||
|
||||
if raise_error_if_not_found:
|
||||
raise IOError('File not found')
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def load_dotenv(
|
||||
dotenv_path: Optional[StrPath] = None,
|
||||
stream: Optional[IO[str]] = None,
|
||||
verbose: bool = False,
|
||||
override: bool = False,
|
||||
interpolate: bool = True,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> bool:
|
||||
"""Parse a .env file and then load all the variables found as environment variables.
|
||||
|
||||
Parameters:
|
||||
dotenv_path: Absolute or relative path to .env file.
|
||||
stream: Text stream (such as `io.StringIO`) with .env content, used if
|
||||
`dotenv_path` is `None`.
|
||||
verbose: Whether to output a warning the .env file is missing.
|
||||
override: Whether to override the system environment variables with the variables
|
||||
from the `.env` file.
|
||||
encoding: Encoding to be used to read the file.
|
||||
Returns:
|
||||
Bool: True if at least one environment variable is set else False
|
||||
|
||||
If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the
|
||||
.env file.
|
||||
"""
|
||||
if dotenv_path is None and stream is None:
|
||||
dotenv_path = find_dotenv()
|
||||
|
||||
dotenv = DotEnv(
|
||||
dotenv_path=dotenv_path,
|
||||
stream=stream,
|
||||
verbose=verbose,
|
||||
interpolate=interpolate,
|
||||
override=override,
|
||||
encoding=encoding,
|
||||
)
|
||||
return dotenv.set_as_environment_variables()
|
||||
|
||||
|
||||
def dotenv_values(
|
||||
dotenv_path: Optional[StrPath] = None,
|
||||
stream: Optional[IO[str]] = None,
|
||||
verbose: bool = False,
|
||||
interpolate: bool = True,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> Dict[str, Optional[str]]:
|
||||
"""
|
||||
Parse a .env file and return its content as a dict.
|
||||
|
||||
The returned dict will have `None` values for keys without values in the .env file.
|
||||
For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in
|
||||
`{"foo": None}`
|
||||
|
||||
Parameters:
|
||||
dotenv_path: Absolute or relative path to the .env file.
|
||||
stream: `StringIO` object with .env content, used if `dotenv_path` is `None`.
|
||||
verbose: Whether to output a warning if the .env file is missing.
|
||||
encoding: Encoding to be used to read the file.
|
||||
|
||||
If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the
|
||||
.env file.
|
||||
"""
|
||||
if dotenv_path is None and stream is None:
|
||||
dotenv_path = find_dotenv()
|
||||
|
||||
return DotEnv(
|
||||
dotenv_path=dotenv_path,
|
||||
stream=stream,
|
||||
verbose=verbose,
|
||||
interpolate=interpolate,
|
||||
override=True,
|
||||
encoding=encoding,
|
||||
).dict()
|
||||
@ -0,0 +1,175 @@
|
||||
import codecs
|
||||
import re
|
||||
from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401
|
||||
Pattern, Sequence, Tuple)
|
||||
|
||||
|
||||
def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]:
|
||||
return re.compile(string, re.UNICODE | extra_flags)
|
||||
|
||||
|
||||
_newline = make_regex(r"(\r\n|\n|\r)")
|
||||
_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE)
|
||||
_whitespace = make_regex(r"[^\S\r\n]*")
|
||||
_export = make_regex(r"(?:export[^\S\r\n]+)?")
|
||||
_single_quoted_key = make_regex(r"'([^']+)'")
|
||||
_unquoted_key = make_regex(r"([^=\#\s]+)")
|
||||
_equal_sign = make_regex(r"(=[^\S\r\n]*)")
|
||||
_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'")
|
||||
_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"')
|
||||
_unquoted_value = make_regex(r"([^\r\n]*)")
|
||||
_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?")
|
||||
_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)")
|
||||
_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?")
|
||||
_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]")
|
||||
_single_quote_escapes = make_regex(r"\\[\\']")
|
||||
|
||||
|
||||
class Original(NamedTuple):
|
||||
string: str
|
||||
line: int
|
||||
|
||||
|
||||
class Binding(NamedTuple):
|
||||
key: Optional[str]
|
||||
value: Optional[str]
|
||||
original: Original
|
||||
error: bool
|
||||
|
||||
|
||||
class Position:
|
||||
def __init__(self, chars: int, line: int) -> None:
|
||||
self.chars = chars
|
||||
self.line = line
|
||||
|
||||
@classmethod
|
||||
def start(cls) -> "Position":
|
||||
return cls(chars=0, line=1)
|
||||
|
||||
def set(self, other: "Position") -> None:
|
||||
self.chars = other.chars
|
||||
self.line = other.line
|
||||
|
||||
def advance(self, string: str) -> None:
|
||||
self.chars += len(string)
|
||||
self.line += len(re.findall(_newline, string))
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Reader:
|
||||
def __init__(self, stream: IO[str]) -> None:
|
||||
self.string = stream.read()
|
||||
self.position = Position.start()
|
||||
self.mark = Position.start()
|
||||
|
||||
def has_next(self) -> bool:
|
||||
return self.position.chars < len(self.string)
|
||||
|
||||
def set_mark(self) -> None:
|
||||
self.mark.set(self.position)
|
||||
|
||||
def get_marked(self) -> Original:
|
||||
return Original(
|
||||
string=self.string[self.mark.chars:self.position.chars],
|
||||
line=self.mark.line,
|
||||
)
|
||||
|
||||
def peek(self, count: int) -> str:
|
||||
return self.string[self.position.chars:self.position.chars + count]
|
||||
|
||||
def read(self, count: int) -> str:
|
||||
result = self.string[self.position.chars:self.position.chars + count]
|
||||
if len(result) < count:
|
||||
raise Error("read: End of string")
|
||||
self.position.advance(result)
|
||||
return result
|
||||
|
||||
def read_regex(self, regex: Pattern[str]) -> Sequence[str]:
|
||||
match = regex.match(self.string, self.position.chars)
|
||||
if match is None:
|
||||
raise Error("read_regex: Pattern not found")
|
||||
self.position.advance(self.string[match.start():match.end()])
|
||||
return match.groups()
|
||||
|
||||
|
||||
def decode_escapes(regex: Pattern[str], string: str) -> str:
|
||||
def decode_match(match: Match[str]) -> str:
|
||||
return codecs.decode(match.group(0), 'unicode-escape') # type: ignore
|
||||
|
||||
return regex.sub(decode_match, string)
|
||||
|
||||
|
||||
def parse_key(reader: Reader) -> Optional[str]:
|
||||
char = reader.peek(1)
|
||||
if char == "#":
|
||||
return None
|
||||
elif char == "'":
|
||||
(key,) = reader.read_regex(_single_quoted_key)
|
||||
else:
|
||||
(key,) = reader.read_regex(_unquoted_key)
|
||||
return key
|
||||
|
||||
|
||||
def parse_unquoted_value(reader: Reader) -> str:
|
||||
(part,) = reader.read_regex(_unquoted_value)
|
||||
return re.sub(r"\s+#.*", "", part).rstrip()
|
||||
|
||||
|
||||
def parse_value(reader: Reader) -> str:
|
||||
char = reader.peek(1)
|
||||
if char == u"'":
|
||||
(value,) = reader.read_regex(_single_quoted_value)
|
||||
return decode_escapes(_single_quote_escapes, value)
|
||||
elif char == u'"':
|
||||
(value,) = reader.read_regex(_double_quoted_value)
|
||||
return decode_escapes(_double_quote_escapes, value)
|
||||
elif char in (u"", u"\n", u"\r"):
|
||||
return u""
|
||||
else:
|
||||
return parse_unquoted_value(reader)
|
||||
|
||||
|
||||
def parse_binding(reader: Reader) -> Binding:
|
||||
reader.set_mark()
|
||||
try:
|
||||
reader.read_regex(_multiline_whitespace)
|
||||
if not reader.has_next():
|
||||
return Binding(
|
||||
key=None,
|
||||
value=None,
|
||||
original=reader.get_marked(),
|
||||
error=False,
|
||||
)
|
||||
reader.read_regex(_export)
|
||||
key = parse_key(reader)
|
||||
reader.read_regex(_whitespace)
|
||||
if reader.peek(1) == "=":
|
||||
reader.read_regex(_equal_sign)
|
||||
value: Optional[str] = parse_value(reader)
|
||||
else:
|
||||
value = None
|
||||
reader.read_regex(_comment)
|
||||
reader.read_regex(_end_of_line)
|
||||
return Binding(
|
||||
key=key,
|
||||
value=value,
|
||||
original=reader.get_marked(),
|
||||
error=False,
|
||||
)
|
||||
except Error:
|
||||
reader.read_regex(_rest_of_line)
|
||||
return Binding(
|
||||
key=None,
|
||||
value=None,
|
||||
original=reader.get_marked(),
|
||||
error=True,
|
||||
)
|
||||
|
||||
|
||||
def parse_stream(stream: IO[str]) -> Iterator[Binding]:
|
||||
reader = Reader(stream)
|
||||
while reader.has_next():
|
||||
yield parse_binding(reader)
|
||||
@ -0,0 +1 @@
|
||||
# Marker file for PEP 561
|
||||
@ -0,0 +1,86 @@
|
||||
import re
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from typing import Iterator, Mapping, Optional, Pattern
|
||||
|
||||
_posix_variable: Pattern[str] = re.compile(
|
||||
r"""
|
||||
\$\{
|
||||
(?P<name>[^\}:]*)
|
||||
(?::-
|
||||
(?P<default>[^\}]*)
|
||||
)?
|
||||
\}
|
||||
""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
class Atom(metaclass=ABCMeta):
|
||||
def __ne__(self, other: object) -> bool:
|
||||
result = self.__eq__(other)
|
||||
if result is NotImplemented:
|
||||
return NotImplemented
|
||||
return not result
|
||||
|
||||
@abstractmethod
|
||||
def resolve(self, env: Mapping[str, Optional[str]]) -> str: ...
|
||||
|
||||
|
||||
class Literal(Atom):
|
||||
def __init__(self, value: str) -> None:
|
||||
self.value = value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Literal(value={self.value})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return self.value == other.value
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.value))
|
||||
|
||||
def resolve(self, env: Mapping[str, Optional[str]]) -> str:
|
||||
return self.value
|
||||
|
||||
|
||||
class Variable(Atom):
|
||||
def __init__(self, name: str, default: Optional[str]) -> None:
|
||||
self.name = name
|
||||
self.default = default
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Variable(name={self.name}, default={self.default})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return (self.name, self.default) == (other.name, other.default)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name, self.default))
|
||||
|
||||
def resolve(self, env: Mapping[str, Optional[str]]) -> str:
|
||||
default = self.default if self.default is not None else ""
|
||||
result = env.get(self.name, default)
|
||||
return result if result is not None else ""
|
||||
|
||||
|
||||
def parse_variables(value: str) -> Iterator[Atom]:
|
||||
cursor = 0
|
||||
|
||||
for match in _posix_variable.finditer(value):
|
||||
(start, end) = match.span()
|
||||
name = match["name"]
|
||||
default = match["default"]
|
||||
|
||||
if start > cursor:
|
||||
yield Literal(value=value[cursor:start])
|
||||
|
||||
yield Variable(name=name, default=default)
|
||||
cursor = end
|
||||
|
||||
length = len(value)
|
||||
if cursor < length:
|
||||
yield Literal(value=value[cursor:length])
|
||||
@ -0,0 +1 @@
|
||||
__version__ = "1.0.1"
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,28 @@
|
||||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,101 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Flask
|
||||
Version: 3.0.3
|
||||
Summary: A simple framework for building complex web applications.
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/markdown
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Flask
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Dist: Werkzeug>=3.0.0
|
||||
Requires-Dist: Jinja2>=3.1.2
|
||||
Requires-Dist: itsdangerous>=2.1.2
|
||||
Requires-Dist: click>=8.1.3
|
||||
Requires-Dist: blinker>=1.6.2
|
||||
Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10'
|
||||
Requires-Dist: asgiref>=3.2 ; extra == "async"
|
||||
Requires-Dist: python-dotenv ; extra == "dotenv"
|
||||
Project-URL: Changes, https://flask.palletsprojects.com/changes/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://flask.palletsprojects.com/
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Source, https://github.com/pallets/flask/
|
||||
Provides-Extra: async
|
||||
Provides-Extra: dotenv
|
||||
|
||||
# Flask
|
||||
|
||||
Flask is a lightweight [WSGI][] web application framework. It is designed
|
||||
to make getting started quick and easy, with the ability to scale up to
|
||||
complex applications. It began as a simple wrapper around [Werkzeug][]
|
||||
and [Jinja][], and has become one of the most popular Python web
|
||||
application frameworks.
|
||||
|
||||
Flask offers suggestions, but doesn't enforce any dependencies or
|
||||
project layout. It is up to the developer to choose the tools and
|
||||
libraries they want to use. There are many extensions provided by the
|
||||
community that make adding new functionality easy.
|
||||
|
||||
[WSGI]: https://wsgi.readthedocs.io/
|
||||
[Werkzeug]: https://werkzeug.palletsprojects.com/
|
||||
[Jinja]: https://jinja.palletsprojects.com/
|
||||
|
||||
|
||||
## Installing
|
||||
|
||||
Install and update from [PyPI][] using an installer such as [pip][]:
|
||||
|
||||
```
|
||||
$ pip install -U Flask
|
||||
```
|
||||
|
||||
[PyPI]: https://pypi.org/project/Flask/
|
||||
[pip]: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
## A Simple Example
|
||||
|
||||
```python
|
||||
# save this as app.py
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/")
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
```
|
||||
|
||||
```
|
||||
$ flask run
|
||||
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
|
||||
```
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
For guidance on setting up a development environment and how to make a
|
||||
contribution to Flask, see the [contributing guidelines][].
|
||||
|
||||
[contributing guidelines]: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst
|
||||
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports Flask and the libraries
|
||||
it uses. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, [please
|
||||
donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
||||
|
||||
@ -0,0 +1,58 @@
|
||||
../../../bin/flask,sha256=YPIKOeaeOxiaQIeYXHRMvLkNQJezmnZ1XRBCdYf9cFA,284
|
||||
flask-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
flask-3.0.3.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
flask-3.0.3.dist-info/METADATA,sha256=exPahy4aahjV-mYqd9qb5HNP8haB_IxTuaotoSvCtag,3177
|
||||
flask-3.0.3.dist-info/RECORD,,
|
||||
flask-3.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
flask-3.0.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
flask-3.0.3.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40
|
||||
flask/__init__.py,sha256=6xMqdVA0FIQ2U1KVaGX3lzNCdXPzoHPaa0hvQCNcfSk,2625
|
||||
flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
|
||||
flask/__pycache__/__init__.cpython-312.pyc,,
|
||||
flask/__pycache__/__main__.cpython-312.pyc,,
|
||||
flask/__pycache__/app.cpython-312.pyc,,
|
||||
flask/__pycache__/blueprints.cpython-312.pyc,,
|
||||
flask/__pycache__/cli.cpython-312.pyc,,
|
||||
flask/__pycache__/config.cpython-312.pyc,,
|
||||
flask/__pycache__/ctx.cpython-312.pyc,,
|
||||
flask/__pycache__/debughelpers.cpython-312.pyc,,
|
||||
flask/__pycache__/globals.cpython-312.pyc,,
|
||||
flask/__pycache__/helpers.cpython-312.pyc,,
|
||||
flask/__pycache__/logging.cpython-312.pyc,,
|
||||
flask/__pycache__/sessions.cpython-312.pyc,,
|
||||
flask/__pycache__/signals.cpython-312.pyc,,
|
||||
flask/__pycache__/templating.cpython-312.pyc,,
|
||||
flask/__pycache__/testing.cpython-312.pyc,,
|
||||
flask/__pycache__/typing.cpython-312.pyc,,
|
||||
flask/__pycache__/views.cpython-312.pyc,,
|
||||
flask/__pycache__/wrappers.cpython-312.pyc,,
|
||||
flask/app.py,sha256=7-lh6cIj27riTE1Q18Ok1p5nOZ8qYiMux4Btc6o6mNc,60143
|
||||
flask/blueprints.py,sha256=7INXPwTkUxfOQXOOv1yu52NpHPmPGI5fMTMFZ-BG9yY,4430
|
||||
flask/cli.py,sha256=OOaf_Efqih1i2in58j-5ZZZmQnPpaSfiUFbEjlL9bzw,35825
|
||||
flask/config.py,sha256=bLzLVAj-cq-Xotu9erqOFte0xSFaVXyfz0AkP4GbwmY,13312
|
||||
flask/ctx.py,sha256=4atDhJJ_cpV1VMq4qsfU4E_61M1oN93jlS2H9gjrl58,15120
|
||||
flask/debughelpers.py,sha256=PGIDhStW_efRjpaa3zHIpo-htStJOR41Ip3OJWPYBwo,6080
|
||||
flask/globals.py,sha256=XdQZmStBmPIs8t93tjx6pO7Bm3gobAaONWkFcUHaGas,1713
|
||||
flask/helpers.py,sha256=tYrcQ_73GuSZVEgwFr-eMmV69UriFQDBmt8wZJIAqvg,23084
|
||||
flask/json/__init__.py,sha256=hLNR898paqoefdeAhraa5wyJy-bmRB2k2dV4EgVy2Z8,5602
|
||||
flask/json/__pycache__/__init__.cpython-312.pyc,,
|
||||
flask/json/__pycache__/provider.cpython-312.pyc,,
|
||||
flask/json/__pycache__/tag.cpython-312.pyc,,
|
||||
flask/json/provider.py,sha256=q6iB83lSiopy80DZPrU-9mGcWwrD0mvLjiv9fHrRZgc,7646
|
||||
flask/json/tag.py,sha256=DhaNwuIOhdt2R74oOC9Y4Z8ZprxFYiRb5dUP5byyINw,9281
|
||||
flask/logging.py,sha256=8sM3WMTubi1cBb2c_lPkWpN0J8dMAqrgKRYLLi1dCVI,2377
|
||||
flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
flask/sansio/README.md,sha256=-0X1tECnilmz1cogx-YhNw5d7guK7GKrq_DEV2OzlU0,228
|
||||
flask/sansio/__pycache__/app.cpython-312.pyc,,
|
||||
flask/sansio/__pycache__/blueprints.cpython-312.pyc,,
|
||||
flask/sansio/__pycache__/scaffold.cpython-312.pyc,,
|
||||
flask/sansio/app.py,sha256=YG5Gf7JVf1c0yccWDZ86q5VSfJUidOVp27HFxFNxC7U,38053
|
||||
flask/sansio/blueprints.py,sha256=Tqe-7EkZ-tbWchm8iDoCfD848f0_3nLv6NNjeIPvHwM,24637
|
||||
flask/sansio/scaffold.py,sha256=WLV9TRQMMhGlXz-1OKtQ3lv6mtIBQZxdW2HezYrGxoI,30633
|
||||
flask/sessions.py,sha256=RU4lzm9MQW9CtH8rVLRTDm8USMJyT4LbvYe7sxM2__k,14807
|
||||
flask/signals.py,sha256=V7lMUww7CqgJ2ThUBn1PiatZtQanOyt7OZpu2GZI-34,750
|
||||
flask/templating.py,sha256=2TcXLT85Asflm2W9WOSFxKCmYn5e49w_Jkg9-NaaJWo,7537
|
||||
flask/testing.py,sha256=3BFXb3bP7R5r-XLBuobhczbxDu8-1LWRzYuhbr-lwaE,10163
|
||||
flask/typing.py,sha256=ZavK-wV28Yv8CQB7u73qZp_jLalpbWdrXS37QR1ftN0,3190
|
||||
flask/views.py,sha256=B66bTvYBBcHMYk4dA1ScZD0oTRTBl0I5smp1lRm9riI,6939
|
||||
flask/wrappers.py,sha256=m1j5tIJxIu8_sPPgTAB_G4TTh52Q-HoDuw_qHV5J59g,5831
|
||||
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.9.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@ -0,0 +1,3 @@
|
||||
[console_scripts]
|
||||
flask=flask.cli:main
|
||||
|
||||
@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from . import json as json
|
||||
from .app import Flask as Flask
|
||||
from .blueprints import Blueprint as Blueprint
|
||||
from .config import Config as Config
|
||||
from .ctx import after_this_request as after_this_request
|
||||
from .ctx import copy_current_request_context as copy_current_request_context
|
||||
from .ctx import has_app_context as has_app_context
|
||||
from .ctx import has_request_context as has_request_context
|
||||
from .globals import current_app as current_app
|
||||
from .globals import g as g
|
||||
from .globals import request as request
|
||||
from .globals import session as session
|
||||
from .helpers import abort as abort
|
||||
from .helpers import flash as flash
|
||||
from .helpers import get_flashed_messages as get_flashed_messages
|
||||
from .helpers import get_template_attribute as get_template_attribute
|
||||
from .helpers import make_response as make_response
|
||||
from .helpers import redirect as redirect
|
||||
from .helpers import send_file as send_file
|
||||
from .helpers import send_from_directory as send_from_directory
|
||||
from .helpers import stream_with_context as stream_with_context
|
||||
from .helpers import url_for as url_for
|
||||
from .json import jsonify as jsonify
|
||||
from .signals import appcontext_popped as appcontext_popped
|
||||
from .signals import appcontext_pushed as appcontext_pushed
|
||||
from .signals import appcontext_tearing_down as appcontext_tearing_down
|
||||
from .signals import before_render_template as before_render_template
|
||||
from .signals import got_request_exception as got_request_exception
|
||||
from .signals import message_flashed as message_flashed
|
||||
from .signals import request_finished as request_finished
|
||||
from .signals import request_started as request_started
|
||||
from .signals import request_tearing_down as request_tearing_down
|
||||
from .signals import template_rendered as template_rendered
|
||||
from .templating import render_template as render_template
|
||||
from .templating import render_template_string as render_template_string
|
||||
from .templating import stream_template as stream_template
|
||||
from .templating import stream_template_string as stream_template_string
|
||||
from .wrappers import Request as Request
|
||||
from .wrappers import Response as Response
|
||||
|
||||
|
||||
def __getattr__(name: str) -> t.Any:
|
||||
if name == "__version__":
|
||||
import importlib.metadata
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The '__version__' attribute is deprecated and will be removed in"
|
||||
" Flask 3.1. Use feature detection or"
|
||||
" 'importlib.metadata.version(\"flask\")' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return importlib.metadata.version("flask")
|
||||
|
||||
raise AttributeError(name)
|
||||
@ -0,0 +1,3 @@
|
||||
from .cli import main
|
||||
|
||||
main()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
1498
syllable_game/christmasenv/lib/python3.12/site-packages/flask/app.py
Normal file
1498
syllable_game/christmasenv/lib/python3.12/site-packages/flask/app.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,129 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import typing as t
|
||||
from datetime import timedelta
|
||||
|
||||
from .cli import AppGroup
|
||||
from .globals import current_app
|
||||
from .helpers import send_from_directory
|
||||
from .sansio.blueprints import Blueprint as SansioBlueprint
|
||||
from .sansio.blueprints import BlueprintSetupState as BlueprintSetupState # noqa
|
||||
from .sansio.scaffold import _sentinel
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .wrappers import Response
|
||||
|
||||
|
||||
class Blueprint(SansioBlueprint):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
import_name: str,
|
||||
static_folder: str | os.PathLike[str] | None = None,
|
||||
static_url_path: str | None = None,
|
||||
template_folder: str | os.PathLike[str] | None = None,
|
||||
url_prefix: str | None = None,
|
||||
subdomain: str | None = None,
|
||||
url_defaults: dict[str, t.Any] | None = None,
|
||||
root_path: str | None = None,
|
||||
cli_group: str | None = _sentinel, # type: ignore
|
||||
) -> None:
|
||||
super().__init__(
|
||||
name,
|
||||
import_name,
|
||||
static_folder,
|
||||
static_url_path,
|
||||
template_folder,
|
||||
url_prefix,
|
||||
subdomain,
|
||||
url_defaults,
|
||||
root_path,
|
||||
cli_group,
|
||||
)
|
||||
|
||||
#: The Click command group for registering CLI commands for this
|
||||
#: object. The commands are available from the ``flask`` command
|
||||
#: once the application has been discovered and blueprints have
|
||||
#: been registered.
|
||||
self.cli = AppGroup()
|
||||
|
||||
# Set the name of the Click group in case someone wants to add
|
||||
# the app's commands to another CLI tool.
|
||||
self.cli.name = self.name
|
||||
|
||||
def get_send_file_max_age(self, filename: str | None) -> int | None:
|
||||
"""Used by :func:`send_file` to determine the ``max_age`` cache
|
||||
value for a given file path if it wasn't passed.
|
||||
|
||||
By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from
|
||||
the configuration of :data:`~flask.current_app`. This defaults
|
||||
to ``None``, which tells the browser to use conditional requests
|
||||
instead of a timed cache, which is usually preferable.
|
||||
|
||||
Note this is a duplicate of the same method in the Flask
|
||||
class.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
The default configuration is ``None`` instead of 12 hours.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"]
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, timedelta):
|
||||
return int(value.total_seconds())
|
||||
|
||||
return value # type: ignore[no-any-return]
|
||||
|
||||
def send_static_file(self, filename: str) -> Response:
|
||||
"""The view function used to serve files from
|
||||
:attr:`static_folder`. A route is automatically registered for
|
||||
this view at :attr:`static_url_path` if :attr:`static_folder` is
|
||||
set.
|
||||
|
||||
Note this is a duplicate of the same method in the Flask
|
||||
class.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
"""
|
||||
if not self.has_static_folder:
|
||||
raise RuntimeError("'static_folder' must be set to serve static_files.")
|
||||
|
||||
# send_file only knows to call get_send_file_max_age on the app,
|
||||
# call it here so it works for blueprints too.
|
||||
max_age = self.get_send_file_max_age(filename)
|
||||
return send_from_directory(
|
||||
t.cast(str, self.static_folder), filename, max_age=max_age
|
||||
)
|
||||
|
||||
def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]:
|
||||
"""Open a resource file relative to :attr:`root_path` for
|
||||
reading.
|
||||
|
||||
For example, if the file ``schema.sql`` is next to the file
|
||||
``app.py`` where the ``Flask`` app is defined, it can be opened
|
||||
with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with app.open_resource("schema.sql") as f:
|
||||
conn.executescript(f.read())
|
||||
|
||||
:param resource: Path to the resource relative to
|
||||
:attr:`root_path`.
|
||||
:param mode: Open the file in this mode. Only reading is
|
||||
supported, valid values are "r" (or "rt") and "rb".
|
||||
|
||||
Note this is a duplicate of the same method in the Flask
|
||||
class.
|
||||
|
||||
"""
|
||||
if mode not in {"r", "rt", "rb"}:
|
||||
raise ValueError("Resources can only be opened for reading.")
|
||||
|
||||
return open(os.path.join(self.root_path, resource), mode)
|
||||
1109
syllable_game/christmasenv/lib/python3.12/site-packages/flask/cli.py
Normal file
1109
syllable_game/christmasenv/lib/python3.12/site-packages/flask/cli.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,370 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import types
|
||||
import typing as t
|
||||
|
||||
from werkzeug.utils import import_string
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
from .sansio.app import App
|
||||
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
class ConfigAttribute(t.Generic[T]):
|
||||
"""Makes an attribute forward to the config"""
|
||||
|
||||
def __init__(
|
||||
self, name: str, get_converter: t.Callable[[t.Any], T] | None = None
|
||||
) -> None:
|
||||
self.__name__ = name
|
||||
self.get_converter = get_converter
|
||||
|
||||
@t.overload
|
||||
def __get__(self, obj: None, owner: None) -> te.Self: ...
|
||||
|
||||
@t.overload
|
||||
def __get__(self, obj: App, owner: type[App]) -> T: ...
|
||||
|
||||
def __get__(self, obj: App | None, owner: type[App] | None = None) -> T | te.Self:
|
||||
if obj is None:
|
||||
return self
|
||||
|
||||
rv = obj.config[self.__name__]
|
||||
|
||||
if self.get_converter is not None:
|
||||
rv = self.get_converter(rv)
|
||||
|
||||
return rv # type: ignore[no-any-return]
|
||||
|
||||
def __set__(self, obj: App, value: t.Any) -> None:
|
||||
obj.config[self.__name__] = value
|
||||
|
||||
|
||||
class Config(dict): # type: ignore[type-arg]
|
||||
"""Works exactly like a dict but provides ways to fill it from files
|
||||
or special dictionaries. There are two common patterns to populate the
|
||||
config.
|
||||
|
||||
Either you can fill the config from a config file::
|
||||
|
||||
app.config.from_pyfile('yourconfig.cfg')
|
||||
|
||||
Or alternatively you can define the configuration options in the
|
||||
module that calls :meth:`from_object` or provide an import path to
|
||||
a module that should be loaded. It is also possible to tell it to
|
||||
use the same module and with that provide the configuration values
|
||||
just before the call::
|
||||
|
||||
DEBUG = True
|
||||
SECRET_KEY = 'development key'
|
||||
app.config.from_object(__name__)
|
||||
|
||||
In both cases (loading from any Python file or loading from modules),
|
||||
only uppercase keys are added to the config. This makes it possible to use
|
||||
lowercase values in the config file for temporary values that are not added
|
||||
to the config or to define the config keys in the same file that implements
|
||||
the application.
|
||||
|
||||
Probably the most interesting way to load configurations is from an
|
||||
environment variable pointing to a file::
|
||||
|
||||
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
|
||||
|
||||
In this case before launching the application you have to set this
|
||||
environment variable to the file you want to use. On Linux and OS X
|
||||
use the export statement::
|
||||
|
||||
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
|
||||
|
||||
On windows use `set` instead.
|
||||
|
||||
:param root_path: path to which files are read relative from. When the
|
||||
config object is created by the application, this is
|
||||
the application's :attr:`~flask.Flask.root_path`.
|
||||
:param defaults: an optional dictionary of default values
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root_path: str | os.PathLike[str],
|
||||
defaults: dict[str, t.Any] | None = None,
|
||||
) -> None:
|
||||
super().__init__(defaults or {})
|
||||
self.root_path = root_path
|
||||
|
||||
def from_envvar(self, variable_name: str, silent: bool = False) -> bool:
|
||||
"""Loads a configuration from an environment variable pointing to
|
||||
a configuration file. This is basically just a shortcut with nicer
|
||||
error messages for this line of code::
|
||||
|
||||
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
|
||||
|
||||
:param variable_name: name of the environment variable
|
||||
:param silent: set to ``True`` if you want silent failure for missing
|
||||
files.
|
||||
:return: ``True`` if the file was loaded successfully.
|
||||
"""
|
||||
rv = os.environ.get(variable_name)
|
||||
if not rv:
|
||||
if silent:
|
||||
return False
|
||||
raise RuntimeError(
|
||||
f"The environment variable {variable_name!r} is not set"
|
||||
" and as such configuration could not be loaded. Set"
|
||||
" this variable and make it point to a configuration"
|
||||
" file"
|
||||
)
|
||||
return self.from_pyfile(rv, silent=silent)
|
||||
|
||||
def from_prefixed_env(
|
||||
self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads
|
||||
) -> bool:
|
||||
"""Load any environment variables that start with ``FLASK_``,
|
||||
dropping the prefix from the env key for the config key. Values
|
||||
are passed through a loading function to attempt to convert them
|
||||
to more specific types than strings.
|
||||
|
||||
Keys are loaded in :func:`sorted` order.
|
||||
|
||||
The default loading function attempts to parse values as any
|
||||
valid JSON type, including dicts and lists.
|
||||
|
||||
Specific items in nested dicts can be set by separating the
|
||||
keys with double underscores (``__``). If an intermediate key
|
||||
doesn't exist, it will be initialized to an empty dict.
|
||||
|
||||
:param prefix: Load env vars that start with this prefix,
|
||||
separated with an underscore (``_``).
|
||||
:param loads: Pass each string value to this function and use
|
||||
the returned value as the config value. If any error is
|
||||
raised it is ignored and the value remains a string. The
|
||||
default is :func:`json.loads`.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
prefix = f"{prefix}_"
|
||||
len_prefix = len(prefix)
|
||||
|
||||
for key in sorted(os.environ):
|
||||
if not key.startswith(prefix):
|
||||
continue
|
||||
|
||||
value = os.environ[key]
|
||||
|
||||
try:
|
||||
value = loads(value)
|
||||
except Exception:
|
||||
# Keep the value as a string if loading failed.
|
||||
pass
|
||||
|
||||
# Change to key.removeprefix(prefix) on Python >= 3.9.
|
||||
key = key[len_prefix:]
|
||||
|
||||
if "__" not in key:
|
||||
# A non-nested key, set directly.
|
||||
self[key] = value
|
||||
continue
|
||||
|
||||
# Traverse nested dictionaries with keys separated by "__".
|
||||
current = self
|
||||
*parts, tail = key.split("__")
|
||||
|
||||
for part in parts:
|
||||
# If an intermediate dict does not exist, create it.
|
||||
if part not in current:
|
||||
current[part] = {}
|
||||
|
||||
current = current[part]
|
||||
|
||||
current[tail] = value
|
||||
|
||||
return True
|
||||
|
||||
def from_pyfile(
|
||||
self, filename: str | os.PathLike[str], silent: bool = False
|
||||
) -> bool:
|
||||
"""Updates the values in the config from a Python file. This function
|
||||
behaves as if the file was imported as module with the
|
||||
:meth:`from_object` function.
|
||||
|
||||
:param filename: the filename of the config. This can either be an
|
||||
absolute filename or a filename relative to the
|
||||
root path.
|
||||
:param silent: set to ``True`` if you want silent failure for missing
|
||||
files.
|
||||
:return: ``True`` if the file was loaded successfully.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
`silent` parameter.
|
||||
"""
|
||||
filename = os.path.join(self.root_path, filename)
|
||||
d = types.ModuleType("config")
|
||||
d.__file__ = filename
|
||||
try:
|
||||
with open(filename, mode="rb") as config_file:
|
||||
exec(compile(config_file.read(), filename, "exec"), d.__dict__)
|
||||
except OSError as e:
|
||||
if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR):
|
||||
return False
|
||||
e.strerror = f"Unable to load configuration file ({e.strerror})"
|
||||
raise
|
||||
self.from_object(d)
|
||||
return True
|
||||
|
||||
def from_object(self, obj: object | str) -> None:
|
||||
"""Updates the values from the given object. An object can be of one
|
||||
of the following two types:
|
||||
|
||||
- a string: in this case the object with that name will be imported
|
||||
- an actual object reference: that object is used directly
|
||||
|
||||
Objects are usually either modules or classes. :meth:`from_object`
|
||||
loads only the uppercase attributes of the module/class. A ``dict``
|
||||
object will not work with :meth:`from_object` because the keys of a
|
||||
``dict`` are not attributes of the ``dict`` class.
|
||||
|
||||
Example of module-based configuration::
|
||||
|
||||
app.config.from_object('yourapplication.default_config')
|
||||
from yourapplication import default_config
|
||||
app.config.from_object(default_config)
|
||||
|
||||
Nothing is done to the object before loading. If the object is a
|
||||
class and has ``@property`` attributes, it needs to be
|
||||
instantiated before being passed to this method.
|
||||
|
||||
You should not use this function to load the actual configuration but
|
||||
rather configuration defaults. The actual config should be loaded
|
||||
with :meth:`from_pyfile` and ideally from a location not within the
|
||||
package because the package might be installed system wide.
|
||||
|
||||
See :ref:`config-dev-prod` for an example of class-based configuration
|
||||
using :meth:`from_object`.
|
||||
|
||||
:param obj: an import name or object
|
||||
"""
|
||||
if isinstance(obj, str):
|
||||
obj = import_string(obj)
|
||||
for key in dir(obj):
|
||||
if key.isupper():
|
||||
self[key] = getattr(obj, key)
|
||||
|
||||
def from_file(
|
||||
self,
|
||||
filename: str | os.PathLike[str],
|
||||
load: t.Callable[[t.IO[t.Any]], t.Mapping[str, t.Any]],
|
||||
silent: bool = False,
|
||||
text: bool = True,
|
||||
) -> bool:
|
||||
"""Update the values in the config from a file that is loaded
|
||||
using the ``load`` parameter. The loaded data is passed to the
|
||||
:meth:`from_mapping` method.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import json
|
||||
app.config.from_file("config.json", load=json.load)
|
||||
|
||||
import tomllib
|
||||
app.config.from_file("config.toml", load=tomllib.load, text=False)
|
||||
|
||||
:param filename: The path to the data file. This can be an
|
||||
absolute path or relative to the config root path.
|
||||
:param load: A callable that takes a file handle and returns a
|
||||
mapping of loaded data from the file.
|
||||
:type load: ``Callable[[Reader], Mapping]`` where ``Reader``
|
||||
implements a ``read`` method.
|
||||
:param silent: Ignore the file if it doesn't exist.
|
||||
:param text: Open the file in text or binary mode.
|
||||
:return: ``True`` if the file was loaded successfully.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
The ``text`` parameter was added.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
filename = os.path.join(self.root_path, filename)
|
||||
|
||||
try:
|
||||
with open(filename, "r" if text else "rb") as f:
|
||||
obj = load(f)
|
||||
except OSError as e:
|
||||
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
|
||||
return False
|
||||
|
||||
e.strerror = f"Unable to load configuration file ({e.strerror})"
|
||||
raise
|
||||
|
||||
return self.from_mapping(obj)
|
||||
|
||||
def from_mapping(
|
||||
self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any
|
||||
) -> bool:
|
||||
"""Updates the config like :meth:`update` ignoring items with
|
||||
non-upper keys.
|
||||
|
||||
:return: Always returns ``True``.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
mappings: dict[str, t.Any] = {}
|
||||
if mapping is not None:
|
||||
mappings.update(mapping)
|
||||
mappings.update(kwargs)
|
||||
for key, value in mappings.items():
|
||||
if key.isupper():
|
||||
self[key] = value
|
||||
return True
|
||||
|
||||
def get_namespace(
|
||||
self, namespace: str, lowercase: bool = True, trim_namespace: bool = True
|
||||
) -> dict[str, t.Any]:
|
||||
"""Returns a dictionary containing a subset of configuration options
|
||||
that match the specified namespace/prefix. Example usage::
|
||||
|
||||
app.config['IMAGE_STORE_TYPE'] = 'fs'
|
||||
app.config['IMAGE_STORE_PATH'] = '/var/app/images'
|
||||
app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com'
|
||||
image_store_config = app.config.get_namespace('IMAGE_STORE_')
|
||||
|
||||
The resulting dictionary `image_store_config` would look like::
|
||||
|
||||
{
|
||||
'type': 'fs',
|
||||
'path': '/var/app/images',
|
||||
'base_url': 'http://img.website.com'
|
||||
}
|
||||
|
||||
This is often useful when configuration options map directly to
|
||||
keyword arguments in functions or class constructors.
|
||||
|
||||
:param namespace: a configuration namespace
|
||||
:param lowercase: a flag indicating if the keys of the resulting
|
||||
dictionary should be lowercase
|
||||
:param trim_namespace: a flag indicating if the keys of the resulting
|
||||
dictionary should not include the namespace
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
rv = {}
|
||||
for k, v in self.items():
|
||||
if not k.startswith(namespace):
|
||||
continue
|
||||
if trim_namespace:
|
||||
key = k[len(namespace) :]
|
||||
else:
|
||||
key = k
|
||||
if lowercase:
|
||||
key = key.lower()
|
||||
rv[key] = v
|
||||
return rv
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{type(self).__name__} {dict.__repr__(self)}>"
|
||||
@ -0,0 +1,449 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import sys
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from types import TracebackType
|
||||
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
from . import typing as ft
|
||||
from .globals import _cv_app
|
||||
from .globals import _cv_request
|
||||
from .signals import appcontext_popped
|
||||
from .signals import appcontext_pushed
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from _typeshed.wsgi import WSGIEnvironment
|
||||
|
||||
from .app import Flask
|
||||
from .sessions import SessionMixin
|
||||
from .wrappers import Request
|
||||
|
||||
|
||||
# a singleton sentinel value for parameter defaults
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
class _AppCtxGlobals:
|
||||
"""A plain object. Used as a namespace for storing data during an
|
||||
application context.
|
||||
|
||||
Creating an app context automatically creates this object, which is
|
||||
made available as the :data:`g` proxy.
|
||||
|
||||
.. describe:: 'key' in g
|
||||
|
||||
Check whether an attribute is present.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
.. describe:: iter(g)
|
||||
|
||||
Return an iterator over the attribute names.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
|
||||
# Define attr methods to let mypy know this is a namespace object
|
||||
# that has arbitrary attributes.
|
||||
|
||||
def __getattr__(self, name: str) -> t.Any:
|
||||
try:
|
||||
return self.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name) from None
|
||||
|
||||
def __setattr__(self, name: str, value: t.Any) -> None:
|
||||
self.__dict__[name] = value
|
||||
|
||||
def __delattr__(self, name: str) -> None:
|
||||
try:
|
||||
del self.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name) from None
|
||||
|
||||
def get(self, name: str, default: t.Any | None = None) -> t.Any:
|
||||
"""Get an attribute by name, or a default value. Like
|
||||
:meth:`dict.get`.
|
||||
|
||||
:param name: Name of attribute to get.
|
||||
:param default: Value to return if the attribute is not present.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
return self.__dict__.get(name, default)
|
||||
|
||||
def pop(self, name: str, default: t.Any = _sentinel) -> t.Any:
|
||||
"""Get and remove an attribute by name. Like :meth:`dict.pop`.
|
||||
|
||||
:param name: Name of attribute to pop.
|
||||
:param default: Value to return if the attribute is not present,
|
||||
instead of raising a ``KeyError``.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
if default is _sentinel:
|
||||
return self.__dict__.pop(name)
|
||||
else:
|
||||
return self.__dict__.pop(name, default)
|
||||
|
||||
def setdefault(self, name: str, default: t.Any = None) -> t.Any:
|
||||
"""Get the value of an attribute if it is present, otherwise
|
||||
set and return a default value. Like :meth:`dict.setdefault`.
|
||||
|
||||
:param name: Name of attribute to get.
|
||||
:param default: Value to set and return if the attribute is not
|
||||
present.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
return self.__dict__.setdefault(name, default)
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
return item in self.__dict__
|
||||
|
||||
def __iter__(self) -> t.Iterator[str]:
|
||||
return iter(self.__dict__)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
ctx = _cv_app.get(None)
|
||||
if ctx is not None:
|
||||
return f"<flask.g of '{ctx.app.name}'>"
|
||||
return object.__repr__(self)
|
||||
|
||||
|
||||
def after_this_request(
|
||||
f: ft.AfterRequestCallable[t.Any],
|
||||
) -> ft.AfterRequestCallable[t.Any]:
|
||||
"""Executes a function after this request. This is useful to modify
|
||||
response objects. The function is passed the response object and has
|
||||
to return the same or a new one.
|
||||
|
||||
Example::
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
@after_this_request
|
||||
def add_header(response):
|
||||
response.headers['X-Foo'] = 'Parachute'
|
||||
return response
|
||||
return 'Hello World!'
|
||||
|
||||
This is more useful if a function other than the view function wants to
|
||||
modify a response. For instance think of a decorator that wants to add
|
||||
some headers without converting the return value into a response object.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
ctx = _cv_request.get(None)
|
||||
|
||||
if ctx is None:
|
||||
raise RuntimeError(
|
||||
"'after_this_request' can only be used when a request"
|
||||
" context is active, such as in a view function."
|
||||
)
|
||||
|
||||
ctx._after_request_functions.append(f)
|
||||
return f
|
||||
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
|
||||
def copy_current_request_context(f: F) -> F:
|
||||
"""A helper function that decorates a function to retain the current
|
||||
request context. This is useful when working with greenlets. The moment
|
||||
the function is decorated a copy of the request context is created and
|
||||
then pushed when the function is called. The current session is also
|
||||
included in the copied request context.
|
||||
|
||||
Example::
|
||||
|
||||
import gevent
|
||||
from flask import copy_current_request_context
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
@copy_current_request_context
|
||||
def do_some_work():
|
||||
# do some work here, it can access flask.request or
|
||||
# flask.session like you would otherwise in the view function.
|
||||
...
|
||||
gevent.spawn(do_some_work)
|
||||
return 'Regular response'
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
ctx = _cv_request.get(None)
|
||||
|
||||
if ctx is None:
|
||||
raise RuntimeError(
|
||||
"'copy_current_request_context' can only be used when a"
|
||||
" request context is active, such as in a view function."
|
||||
)
|
||||
|
||||
ctx = ctx.copy()
|
||||
|
||||
def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
with ctx: # type: ignore[union-attr]
|
||||
return ctx.app.ensure_sync(f)(*args, **kwargs) # type: ignore[union-attr]
|
||||
|
||||
return update_wrapper(wrapper, f) # type: ignore[return-value]
|
||||
|
||||
|
||||
def has_request_context() -> bool:
|
||||
"""If you have code that wants to test if a request context is there or
|
||||
not this function can be used. For instance, you may want to take advantage
|
||||
of request information if the request object is available, but fail
|
||||
silently if it is unavailable.
|
||||
|
||||
::
|
||||
|
||||
class User(db.Model):
|
||||
|
||||
def __init__(self, username, remote_addr=None):
|
||||
self.username = username
|
||||
if remote_addr is None and has_request_context():
|
||||
remote_addr = request.remote_addr
|
||||
self.remote_addr = remote_addr
|
||||
|
||||
Alternatively you can also just test any of the context bound objects
|
||||
(such as :class:`request` or :class:`g`) for truthness::
|
||||
|
||||
class User(db.Model):
|
||||
|
||||
def __init__(self, username, remote_addr=None):
|
||||
self.username = username
|
||||
if remote_addr is None and request:
|
||||
remote_addr = request.remote_addr
|
||||
self.remote_addr = remote_addr
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
return _cv_request.get(None) is not None
|
||||
|
||||
|
||||
def has_app_context() -> bool:
|
||||
"""Works like :func:`has_request_context` but for the application
|
||||
context. You can also just do a boolean check on the
|
||||
:data:`current_app` object instead.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
return _cv_app.get(None) is not None
|
||||
|
||||
|
||||
class AppContext:
|
||||
"""The app context contains application-specific information. An app
|
||||
context is created and pushed at the beginning of each request if
|
||||
one is not already active. An app context is also pushed when
|
||||
running CLI commands.
|
||||
"""
|
||||
|
||||
def __init__(self, app: Flask) -> None:
|
||||
self.app = app
|
||||
self.url_adapter = app.create_url_adapter(None)
|
||||
self.g: _AppCtxGlobals = app.app_ctx_globals_class()
|
||||
self._cv_tokens: list[contextvars.Token[AppContext]] = []
|
||||
|
||||
def push(self) -> None:
|
||||
"""Binds the app context to the current context."""
|
||||
self._cv_tokens.append(_cv_app.set(self))
|
||||
appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync)
|
||||
|
||||
def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore
|
||||
"""Pops the app context."""
|
||||
try:
|
||||
if len(self._cv_tokens) == 1:
|
||||
if exc is _sentinel:
|
||||
exc = sys.exc_info()[1]
|
||||
self.app.do_teardown_appcontext(exc)
|
||||
finally:
|
||||
ctx = _cv_app.get()
|
||||
_cv_app.reset(self._cv_tokens.pop())
|
||||
|
||||
if ctx is not self:
|
||||
raise AssertionError(
|
||||
f"Popped wrong app context. ({ctx!r} instead of {self!r})"
|
||||
)
|
||||
|
||||
appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync)
|
||||
|
||||
def __enter__(self) -> AppContext:
|
||||
self.push()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.pop(exc_value)
|
||||
|
||||
|
||||
class RequestContext:
|
||||
"""The request context contains per-request information. The Flask
|
||||
app creates and pushes it at the beginning of the request, then pops
|
||||
it at the end of the request. It will create the URL adapter and
|
||||
request object for the WSGI environment provided.
|
||||
|
||||
Do not attempt to use this class directly, instead use
|
||||
:meth:`~flask.Flask.test_request_context` and
|
||||
:meth:`~flask.Flask.request_context` to create this object.
|
||||
|
||||
When the request context is popped, it will evaluate all the
|
||||
functions registered on the application for teardown execution
|
||||
(:meth:`~flask.Flask.teardown_request`).
|
||||
|
||||
The request context is automatically popped at the end of the
|
||||
request. When using the interactive debugger, the context will be
|
||||
restored so ``request`` is still accessible. Similarly, the test
|
||||
client can preserve the context after the request ends. However,
|
||||
teardown functions may already have closed some resources such as
|
||||
database connections.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: Flask,
|
||||
environ: WSGIEnvironment,
|
||||
request: Request | None = None,
|
||||
session: SessionMixin | None = None,
|
||||
) -> None:
|
||||
self.app = app
|
||||
if request is None:
|
||||
request = app.request_class(environ)
|
||||
request.json_module = app.json
|
||||
self.request: Request = request
|
||||
self.url_adapter = None
|
||||
try:
|
||||
self.url_adapter = app.create_url_adapter(self.request)
|
||||
except HTTPException as e:
|
||||
self.request.routing_exception = e
|
||||
self.flashes: list[tuple[str, str]] | None = None
|
||||
self.session: SessionMixin | None = session
|
||||
# Functions that should be executed after the request on the response
|
||||
# object. These will be called before the regular "after_request"
|
||||
# functions.
|
||||
self._after_request_functions: list[ft.AfterRequestCallable[t.Any]] = []
|
||||
|
||||
self._cv_tokens: list[
|
||||
tuple[contextvars.Token[RequestContext], AppContext | None]
|
||||
] = []
|
||||
|
||||
def copy(self) -> RequestContext:
|
||||
"""Creates a copy of this request context with the same request object.
|
||||
This can be used to move a request context to a different greenlet.
|
||||
Because the actual request object is the same this cannot be used to
|
||||
move a request context to a different thread unless access to the
|
||||
request object is locked.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
.. versionchanged:: 1.1
|
||||
The current session object is used instead of reloading the original
|
||||
data. This prevents `flask.session` pointing to an out-of-date object.
|
||||
"""
|
||||
return self.__class__(
|
||||
self.app,
|
||||
environ=self.request.environ,
|
||||
request=self.request,
|
||||
session=self.session,
|
||||
)
|
||||
|
||||
def match_request(self) -> None:
|
||||
"""Can be overridden by a subclass to hook into the matching
|
||||
of the request.
|
||||
"""
|
||||
try:
|
||||
result = self.url_adapter.match(return_rule=True) # type: ignore
|
||||
self.request.url_rule, self.request.view_args = result # type: ignore
|
||||
except HTTPException as e:
|
||||
self.request.routing_exception = e
|
||||
|
||||
def push(self) -> None:
|
||||
# Before we push the request context we have to ensure that there
|
||||
# is an application context.
|
||||
app_ctx = _cv_app.get(None)
|
||||
|
||||
if app_ctx is None or app_ctx.app is not self.app:
|
||||
app_ctx = self.app.app_context()
|
||||
app_ctx.push()
|
||||
else:
|
||||
app_ctx = None
|
||||
|
||||
self._cv_tokens.append((_cv_request.set(self), app_ctx))
|
||||
|
||||
# Open the session at the moment that the request context is available.
|
||||
# This allows a custom open_session method to use the request context.
|
||||
# Only open a new session if this is the first time the request was
|
||||
# pushed, otherwise stream_with_context loses the session.
|
||||
if self.session is None:
|
||||
session_interface = self.app.session_interface
|
||||
self.session = session_interface.open_session(self.app, self.request)
|
||||
|
||||
if self.session is None:
|
||||
self.session = session_interface.make_null_session(self.app)
|
||||
|
||||
# Match the request URL after loading the session, so that the
|
||||
# session is available in custom URL converters.
|
||||
if self.url_adapter is not None:
|
||||
self.match_request()
|
||||
|
||||
def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore
|
||||
"""Pops the request context and unbinds it by doing that. This will
|
||||
also trigger the execution of functions registered by the
|
||||
:meth:`~flask.Flask.teardown_request` decorator.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
Added the `exc` argument.
|
||||
"""
|
||||
clear_request = len(self._cv_tokens) == 1
|
||||
|
||||
try:
|
||||
if clear_request:
|
||||
if exc is _sentinel:
|
||||
exc = sys.exc_info()[1]
|
||||
self.app.do_teardown_request(exc)
|
||||
|
||||
request_close = getattr(self.request, "close", None)
|
||||
if request_close is not None:
|
||||
request_close()
|
||||
finally:
|
||||
ctx = _cv_request.get()
|
||||
token, app_ctx = self._cv_tokens.pop()
|
||||
_cv_request.reset(token)
|
||||
|
||||
# get rid of circular dependencies at the end of the request
|
||||
# so that we don't require the GC to be active.
|
||||
if clear_request:
|
||||
ctx.request.environ["werkzeug.request"] = None
|
||||
|
||||
if app_ctx is not None:
|
||||
app_ctx.pop(exc)
|
||||
|
||||
if ctx is not self:
|
||||
raise AssertionError(
|
||||
f"Popped wrong request context. ({ctx!r} instead of {self!r})"
|
||||
)
|
||||
|
||||
def __enter__(self) -> RequestContext:
|
||||
self.push()
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.pop(exc_value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<{type(self).__name__} {self.request.url!r}"
|
||||
f" [{self.request.method}] of {self.app.name}>"
|
||||
)
|
||||
@ -0,0 +1,178 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from jinja2.loaders import BaseLoader
|
||||
from werkzeug.routing import RequestRedirect
|
||||
|
||||
from .blueprints import Blueprint
|
||||
from .globals import request_ctx
|
||||
from .sansio.app import App
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .sansio.scaffold import Scaffold
|
||||
from .wrappers import Request
|
||||
|
||||
|
||||
class UnexpectedUnicodeError(AssertionError, UnicodeError):
|
||||
"""Raised in places where we want some better error reporting for
|
||||
unexpected unicode or binary data.
|
||||
"""
|
||||
|
||||
|
||||
class DebugFilesKeyError(KeyError, AssertionError):
|
||||
"""Raised from request.files during debugging. The idea is that it can
|
||||
provide a better error message than just a generic KeyError/BadRequest.
|
||||
"""
|
||||
|
||||
def __init__(self, request: Request, key: str) -> None:
|
||||
form_matches = request.form.getlist(key)
|
||||
buf = [
|
||||
f"You tried to access the file {key!r} in the request.files"
|
||||
" dictionary but it does not exist. The mimetype for the"
|
||||
f" request is {request.mimetype!r} instead of"
|
||||
" 'multipart/form-data' which means that no file contents"
|
||||
" were transmitted. To fix this error you should provide"
|
||||
' enctype="multipart/form-data" in your form.'
|
||||
]
|
||||
if form_matches:
|
||||
names = ", ".join(repr(x) for x in form_matches)
|
||||
buf.append(
|
||||
"\n\nThe browser instead transmitted some file names. "
|
||||
f"This was submitted: {names}"
|
||||
)
|
||||
self.msg = "".join(buf)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.msg
|
||||
|
||||
|
||||
class FormDataRoutingRedirect(AssertionError):
|
||||
"""This exception is raised in debug mode if a routing redirect
|
||||
would cause the browser to drop the method or body. This happens
|
||||
when method is not GET, HEAD or OPTIONS and the status code is not
|
||||
307 or 308.
|
||||
"""
|
||||
|
||||
def __init__(self, request: Request) -> None:
|
||||
exc = request.routing_exception
|
||||
assert isinstance(exc, RequestRedirect)
|
||||
buf = [
|
||||
f"A request was sent to '{request.url}', but routing issued"
|
||||
f" a redirect to the canonical URL '{exc.new_url}'."
|
||||
]
|
||||
|
||||
if f"{request.base_url}/" == exc.new_url.partition("?")[0]:
|
||||
buf.append(
|
||||
" The URL was defined with a trailing slash. Flask"
|
||||
" will redirect to the URL with a trailing slash if it"
|
||||
" was accessed without one."
|
||||
)
|
||||
|
||||
buf.append(
|
||||
" Send requests to the canonical URL, or use 307 or 308 for"
|
||||
" routing redirects. Otherwise, browsers will drop form"
|
||||
" data.\n\n"
|
||||
"This exception is only raised in debug mode."
|
||||
)
|
||||
super().__init__("".join(buf))
|
||||
|
||||
|
||||
def attach_enctype_error_multidict(request: Request) -> None:
|
||||
"""Patch ``request.files.__getitem__`` to raise a descriptive error
|
||||
about ``enctype=multipart/form-data``.
|
||||
|
||||
:param request: The request to patch.
|
||||
:meta private:
|
||||
"""
|
||||
oldcls = request.files.__class__
|
||||
|
||||
class newcls(oldcls): # type: ignore[valid-type, misc]
|
||||
def __getitem__(self, key: str) -> t.Any:
|
||||
try:
|
||||
return super().__getitem__(key)
|
||||
except KeyError as e:
|
||||
if key not in request.form:
|
||||
raise
|
||||
|
||||
raise DebugFilesKeyError(request, key).with_traceback(
|
||||
e.__traceback__
|
||||
) from None
|
||||
|
||||
newcls.__name__ = oldcls.__name__
|
||||
newcls.__module__ = oldcls.__module__
|
||||
request.files.__class__ = newcls
|
||||
|
||||
|
||||
def _dump_loader_info(loader: BaseLoader) -> t.Iterator[str]:
|
||||
yield f"class: {type(loader).__module__}.{type(loader).__name__}"
|
||||
for key, value in sorted(loader.__dict__.items()):
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
if isinstance(value, (tuple, list)):
|
||||
if not all(isinstance(x, str) for x in value):
|
||||
continue
|
||||
yield f"{key}:"
|
||||
for item in value:
|
||||
yield f" - {item}"
|
||||
continue
|
||||
elif not isinstance(value, (str, int, float, bool)):
|
||||
continue
|
||||
yield f"{key}: {value!r}"
|
||||
|
||||
|
||||
def explain_template_loading_attempts(
|
||||
app: App,
|
||||
template: str,
|
||||
attempts: list[
|
||||
tuple[
|
||||
BaseLoader,
|
||||
Scaffold,
|
||||
tuple[str, str | None, t.Callable[[], bool] | None] | None,
|
||||
]
|
||||
],
|
||||
) -> None:
|
||||
"""This should help developers understand what failed"""
|
||||
info = [f"Locating template {template!r}:"]
|
||||
total_found = 0
|
||||
blueprint = None
|
||||
if request_ctx and request_ctx.request.blueprint is not None:
|
||||
blueprint = request_ctx.request.blueprint
|
||||
|
||||
for idx, (loader, srcobj, triple) in enumerate(attempts):
|
||||
if isinstance(srcobj, App):
|
||||
src_info = f"application {srcobj.import_name!r}"
|
||||
elif isinstance(srcobj, Blueprint):
|
||||
src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})"
|
||||
else:
|
||||
src_info = repr(srcobj)
|
||||
|
||||
info.append(f"{idx + 1:5}: trying loader of {src_info}")
|
||||
|
||||
for line in _dump_loader_info(loader):
|
||||
info.append(f" {line}")
|
||||
|
||||
if triple is None:
|
||||
detail = "no match"
|
||||
else:
|
||||
detail = f"found ({triple[1] or '<string>'!r})"
|
||||
total_found += 1
|
||||
info.append(f" -> {detail}")
|
||||
|
||||
seems_fishy = False
|
||||
if total_found == 0:
|
||||
info.append("Error: the template could not be found.")
|
||||
seems_fishy = True
|
||||
elif total_found > 1:
|
||||
info.append("Warning: multiple loaders returned a match for the template.")
|
||||
seems_fishy = True
|
||||
|
||||
if blueprint is not None and seems_fishy:
|
||||
info.append(
|
||||
" The template was looked up from an endpoint that belongs"
|
||||
f" to the blueprint {blueprint!r}."
|
||||
)
|
||||
info.append(" Maybe you did not place a template in the right folder?")
|
||||
info.append(" See https://flask.palletsprojects.com/blueprints/#templates")
|
||||
|
||||
app.logger.info("\n".join(info))
|
||||
@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from contextvars import ContextVar
|
||||
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .app import Flask
|
||||
from .ctx import _AppCtxGlobals
|
||||
from .ctx import AppContext
|
||||
from .ctx import RequestContext
|
||||
from .sessions import SessionMixin
|
||||
from .wrappers import Request
|
||||
|
||||
|
||||
_no_app_msg = """\
|
||||
Working outside of application context.
|
||||
|
||||
This typically means that you attempted to use functionality that needed
|
||||
the current application. To solve this, set up an application context
|
||||
with app.app_context(). See the documentation for more information.\
|
||||
"""
|
||||
_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx")
|
||||
app_ctx: AppContext = LocalProxy( # type: ignore[assignment]
|
||||
_cv_app, unbound_message=_no_app_msg
|
||||
)
|
||||
current_app: Flask = LocalProxy( # type: ignore[assignment]
|
||||
_cv_app, "app", unbound_message=_no_app_msg
|
||||
)
|
||||
g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment]
|
||||
_cv_app, "g", unbound_message=_no_app_msg
|
||||
)
|
||||
|
||||
_no_req_msg = """\
|
||||
Working outside of request context.
|
||||
|
||||
This typically means that you attempted to use functionality that needed
|
||||
an active HTTP request. Consult the documentation on testing for
|
||||
information about how to avoid this problem.\
|
||||
"""
|
||||
_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx")
|
||||
request_ctx: RequestContext = LocalProxy( # type: ignore[assignment]
|
||||
_cv_request, unbound_message=_no_req_msg
|
||||
)
|
||||
request: Request = LocalProxy( # type: ignore[assignment]
|
||||
_cv_request, "request", unbound_message=_no_req_msg
|
||||
)
|
||||
session: SessionMixin = LocalProxy( # type: ignore[assignment]
|
||||
_cv_request, "session", unbound_message=_no_req_msg
|
||||
)
|
||||
@ -0,0 +1,621 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import typing as t
|
||||
from datetime import datetime
|
||||
from functools import lru_cache
|
||||
from functools import update_wrapper
|
||||
|
||||
import werkzeug.utils
|
||||
from werkzeug.exceptions import abort as _wz_abort
|
||||
from werkzeug.utils import redirect as _wz_redirect
|
||||
from werkzeug.wrappers import Response as BaseResponse
|
||||
|
||||
from .globals import _cv_request
|
||||
from .globals import current_app
|
||||
from .globals import request
|
||||
from .globals import request_ctx
|
||||
from .globals import session
|
||||
from .signals import message_flashed
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .wrappers import Response
|
||||
|
||||
|
||||
def get_debug_flag() -> bool:
|
||||
"""Get whether debug mode should be enabled for the app, indicated by the
|
||||
:envvar:`FLASK_DEBUG` environment variable. The default is ``False``.
|
||||
"""
|
||||
val = os.environ.get("FLASK_DEBUG")
|
||||
return bool(val and val.lower() not in {"0", "false", "no"})
|
||||
|
||||
|
||||
def get_load_dotenv(default: bool = True) -> bool:
|
||||
"""Get whether the user has disabled loading default dotenv files by
|
||||
setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load
|
||||
the files.
|
||||
|
||||
:param default: What to return if the env var isn't set.
|
||||
"""
|
||||
val = os.environ.get("FLASK_SKIP_DOTENV")
|
||||
|
||||
if not val:
|
||||
return default
|
||||
|
||||
return val.lower() in ("0", "false", "no")
|
||||
|
||||
|
||||
def stream_with_context(
|
||||
generator_or_function: t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]],
|
||||
) -> t.Iterator[t.AnyStr]:
|
||||
"""Request contexts disappear when the response is started on the server.
|
||||
This is done for efficiency reasons and to make it less likely to encounter
|
||||
memory leaks with badly written WSGI middlewares. The downside is that if
|
||||
you are using streamed responses, the generator cannot access request bound
|
||||
information any more.
|
||||
|
||||
This function however can help you keep the context around for longer::
|
||||
|
||||
from flask import stream_with_context, request, Response
|
||||
|
||||
@app.route('/stream')
|
||||
def streamed_response():
|
||||
@stream_with_context
|
||||
def generate():
|
||||
yield 'Hello '
|
||||
yield request.args['name']
|
||||
yield '!'
|
||||
return Response(generate())
|
||||
|
||||
Alternatively it can also be used around a specific generator::
|
||||
|
||||
from flask import stream_with_context, request, Response
|
||||
|
||||
@app.route('/stream')
|
||||
def streamed_response():
|
||||
def generate():
|
||||
yield 'Hello '
|
||||
yield request.args['name']
|
||||
yield '!'
|
||||
return Response(stream_with_context(generate()))
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
try:
|
||||
gen = iter(generator_or_function) # type: ignore[arg-type]
|
||||
except TypeError:
|
||||
|
||||
def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
gen = generator_or_function(*args, **kwargs) # type: ignore[operator]
|
||||
return stream_with_context(gen)
|
||||
|
||||
return update_wrapper(decorator, generator_or_function) # type: ignore[arg-type]
|
||||
|
||||
def generator() -> t.Iterator[t.AnyStr | None]:
|
||||
ctx = _cv_request.get(None)
|
||||
if ctx is None:
|
||||
raise RuntimeError(
|
||||
"'stream_with_context' can only be used when a request"
|
||||
" context is active, such as in a view function."
|
||||
)
|
||||
with ctx:
|
||||
# Dummy sentinel. Has to be inside the context block or we're
|
||||
# not actually keeping the context around.
|
||||
yield None
|
||||
|
||||
# The try/finally is here so that if someone passes a WSGI level
|
||||
# iterator in we're still running the cleanup logic. Generators
|
||||
# don't need that because they are closed on their destruction
|
||||
# automatically.
|
||||
try:
|
||||
yield from gen
|
||||
finally:
|
||||
if hasattr(gen, "close"):
|
||||
gen.close()
|
||||
|
||||
# The trick is to start the generator. Then the code execution runs until
|
||||
# the first dummy None is yielded at which point the context was already
|
||||
# pushed. This item is discarded. Then when the iteration continues the
|
||||
# real generator is executed.
|
||||
wrapped_g = generator()
|
||||
next(wrapped_g)
|
||||
return wrapped_g # type: ignore[return-value]
|
||||
|
||||
|
||||
def make_response(*args: t.Any) -> Response:
|
||||
"""Sometimes it is necessary to set additional headers in a view. Because
|
||||
views do not have to return response objects but can return a value that
|
||||
is converted into a response object by Flask itself, it becomes tricky to
|
||||
add headers to it. This function can be called instead of using a return
|
||||
and you will get a response object which you can use to attach headers.
|
||||
|
||||
If view looked like this and you want to add a new header::
|
||||
|
||||
def index():
|
||||
return render_template('index.html', foo=42)
|
||||
|
||||
You can now do something like this::
|
||||
|
||||
def index():
|
||||
response = make_response(render_template('index.html', foo=42))
|
||||
response.headers['X-Parachutes'] = 'parachutes are cool'
|
||||
return response
|
||||
|
||||
This function accepts the very same arguments you can return from a
|
||||
view function. This for example creates a response with a 404 error
|
||||
code::
|
||||
|
||||
response = make_response(render_template('not_found.html'), 404)
|
||||
|
||||
The other use case of this function is to force the return value of a
|
||||
view function into a response which is helpful with view
|
||||
decorators::
|
||||
|
||||
response = make_response(view_function())
|
||||
response.headers['X-Parachutes'] = 'parachutes are cool'
|
||||
|
||||
Internally this function does the following things:
|
||||
|
||||
- if no arguments are passed, it creates a new response argument
|
||||
- if one argument is passed, :meth:`flask.Flask.make_response`
|
||||
is invoked with it.
|
||||
- if more than one argument is passed, the arguments are passed
|
||||
to the :meth:`flask.Flask.make_response` function as tuple.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
if not args:
|
||||
return current_app.response_class()
|
||||
if len(args) == 1:
|
||||
args = args[0]
|
||||
return current_app.make_response(args)
|
||||
|
||||
|
||||
def url_for(
|
||||
endpoint: str,
|
||||
*,
|
||||
_anchor: str | None = None,
|
||||
_method: str | None = None,
|
||||
_scheme: str | None = None,
|
||||
_external: bool | None = None,
|
||||
**values: t.Any,
|
||||
) -> str:
|
||||
"""Generate a URL to the given endpoint with the given values.
|
||||
|
||||
This requires an active request or application context, and calls
|
||||
:meth:`current_app.url_for() <flask.Flask.url_for>`. See that method
|
||||
for full documentation.
|
||||
|
||||
:param endpoint: The endpoint name associated with the URL to
|
||||
generate. If this starts with a ``.``, the current blueprint
|
||||
name (if any) will be used.
|
||||
:param _anchor: If given, append this as ``#anchor`` to the URL.
|
||||
:param _method: If given, generate the URL associated with this
|
||||
method for the endpoint.
|
||||
:param _scheme: If given, the URL will have this scheme if it is
|
||||
external.
|
||||
:param _external: If given, prefer the URL to be internal (False) or
|
||||
require it to be external (True). External URLs include the
|
||||
scheme and domain. When not in an active request, URLs are
|
||||
external by default.
|
||||
:param values: Values to use for the variable parts of the URL rule.
|
||||
Unknown keys are appended as query string arguments, like
|
||||
``?a=b&c=d``.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.url_for``, allowing an app to override the
|
||||
behavior.
|
||||
|
||||
.. versionchanged:: 0.10
|
||||
The ``_scheme`` parameter was added.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
The ``_anchor`` and ``_method`` parameters were added.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
Calls ``app.handle_url_build_error`` on build errors.
|
||||
"""
|
||||
return current_app.url_for(
|
||||
endpoint,
|
||||
_anchor=_anchor,
|
||||
_method=_method,
|
||||
_scheme=_scheme,
|
||||
_external=_external,
|
||||
**values,
|
||||
)
|
||||
|
||||
|
||||
def redirect(
|
||||
location: str, code: int = 302, Response: type[BaseResponse] | None = None
|
||||
) -> BaseResponse:
|
||||
"""Create a redirect response object.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will use its
|
||||
:meth:`~flask.Flask.redirect` method, otherwise it will use
|
||||
:func:`werkzeug.utils.redirect`.
|
||||
|
||||
:param location: The URL to redirect to.
|
||||
:param code: The status code for the redirect.
|
||||
:param Response: The response class to use. Not used when
|
||||
``current_app`` is active, which uses ``app.response_class``.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
Calls ``current_app.redirect`` if available instead of always
|
||||
using Werkzeug's default ``redirect``.
|
||||
"""
|
||||
if current_app:
|
||||
return current_app.redirect(location, code=code)
|
||||
|
||||
return _wz_redirect(location, code=code, Response=Response)
|
||||
|
||||
|
||||
def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn:
|
||||
"""Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given
|
||||
status code.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will call its
|
||||
:attr:`~flask.Flask.aborter` object, otherwise it will use
|
||||
:func:`werkzeug.exceptions.abort`.
|
||||
|
||||
:param code: The status code for the exception, which must be
|
||||
registered in ``app.aborter``.
|
||||
:param args: Passed to the exception.
|
||||
:param kwargs: Passed to the exception.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
Calls ``current_app.aborter`` if available instead of always
|
||||
using Werkzeug's default ``abort``.
|
||||
"""
|
||||
if current_app:
|
||||
current_app.aborter(code, *args, **kwargs)
|
||||
|
||||
_wz_abort(code, *args, **kwargs)
|
||||
|
||||
|
||||
def get_template_attribute(template_name: str, attribute: str) -> t.Any:
|
||||
"""Loads a macro (or variable) a template exports. This can be used to
|
||||
invoke a macro from within Python code. If you for example have a
|
||||
template named :file:`_cider.html` with the following contents:
|
||||
|
||||
.. sourcecode:: html+jinja
|
||||
|
||||
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}
|
||||
|
||||
You can access this from Python code like this::
|
||||
|
||||
hello = get_template_attribute('_cider.html', 'hello')
|
||||
return hello('World')
|
||||
|
||||
.. versionadded:: 0.2
|
||||
|
||||
:param template_name: the name of the template
|
||||
:param attribute: the name of the variable of macro to access
|
||||
"""
|
||||
return getattr(current_app.jinja_env.get_template(template_name).module, attribute)
|
||||
|
||||
|
||||
def flash(message: str, category: str = "message") -> None:
|
||||
"""Flashes a message to the next request. In order to remove the
|
||||
flashed message from the session and to display it to the user,
|
||||
the template has to call :func:`get_flashed_messages`.
|
||||
|
||||
.. versionchanged:: 0.3
|
||||
`category` parameter added.
|
||||
|
||||
:param message: the message to be flashed.
|
||||
:param category: the category for the message. The following values
|
||||
are recommended: ``'message'`` for any kind of message,
|
||||
``'error'`` for errors, ``'info'`` for information
|
||||
messages and ``'warning'`` for warnings. However any
|
||||
kind of string can be used as category.
|
||||
"""
|
||||
# Original implementation:
|
||||
#
|
||||
# session.setdefault('_flashes', []).append((category, message))
|
||||
#
|
||||
# This assumed that changes made to mutable structures in the session are
|
||||
# always in sync with the session object, which is not true for session
|
||||
# implementations that use external storage for keeping their keys/values.
|
||||
flashes = session.get("_flashes", [])
|
||||
flashes.append((category, message))
|
||||
session["_flashes"] = flashes
|
||||
app = current_app._get_current_object() # type: ignore
|
||||
message_flashed.send(
|
||||
app,
|
||||
_async_wrapper=app.ensure_sync,
|
||||
message=message,
|
||||
category=category,
|
||||
)
|
||||
|
||||
|
||||
def get_flashed_messages(
|
||||
with_categories: bool = False, category_filter: t.Iterable[str] = ()
|
||||
) -> list[str] | list[tuple[str, str]]:
|
||||
"""Pulls all flashed messages from the session and returns them.
|
||||
Further calls in the same request to the function will return
|
||||
the same messages. By default just the messages are returned,
|
||||
but when `with_categories` is set to ``True``, the return value will
|
||||
be a list of tuples in the form ``(category, message)`` instead.
|
||||
|
||||
Filter the flashed messages to one or more categories by providing those
|
||||
categories in `category_filter`. This allows rendering categories in
|
||||
separate html blocks. The `with_categories` and `category_filter`
|
||||
arguments are distinct:
|
||||
|
||||
* `with_categories` controls whether categories are returned with message
|
||||
text (``True`` gives a tuple, where ``False`` gives just the message text).
|
||||
* `category_filter` filters the messages down to only those matching the
|
||||
provided categories.
|
||||
|
||||
See :doc:`/patterns/flashing` for examples.
|
||||
|
||||
.. versionchanged:: 0.3
|
||||
`with_categories` parameter added.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
`category_filter` parameter added.
|
||||
|
||||
:param with_categories: set to ``True`` to also receive categories.
|
||||
:param category_filter: filter of categories to limit return values. Only
|
||||
categories in the list will be returned.
|
||||
"""
|
||||
flashes = request_ctx.flashes
|
||||
if flashes is None:
|
||||
flashes = session.pop("_flashes") if "_flashes" in session else []
|
||||
request_ctx.flashes = flashes
|
||||
if category_filter:
|
||||
flashes = list(filter(lambda f: f[0] in category_filter, flashes))
|
||||
if not with_categories:
|
||||
return [x[1] for x in flashes]
|
||||
return flashes
|
||||
|
||||
|
||||
def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]:
|
||||
if kwargs.get("max_age") is None:
|
||||
kwargs["max_age"] = current_app.get_send_file_max_age
|
||||
|
||||
kwargs.update(
|
||||
environ=request.environ,
|
||||
use_x_sendfile=current_app.config["USE_X_SENDFILE"],
|
||||
response_class=current_app.response_class,
|
||||
_root_path=current_app.root_path, # type: ignore
|
||||
)
|
||||
return kwargs
|
||||
|
||||
|
||||
def send_file(
|
||||
path_or_file: os.PathLike[t.AnyStr] | str | t.BinaryIO,
|
||||
mimetype: str | None = None,
|
||||
as_attachment: bool = False,
|
||||
download_name: str | None = None,
|
||||
conditional: bool = True,
|
||||
etag: bool | str = True,
|
||||
last_modified: datetime | int | float | None = None,
|
||||
max_age: None | (int | t.Callable[[str | None], int | None]) = None,
|
||||
) -> Response:
|
||||
"""Send the contents of a file to the client.
|
||||
|
||||
The first argument can be a file path or a file-like object. Paths
|
||||
are preferred in most cases because Werkzeug can manage the file and
|
||||
get extra information from the path. Passing a file-like object
|
||||
requires that the file is opened in binary mode, and is mostly
|
||||
useful when building a file in memory with :class:`io.BytesIO`.
|
||||
|
||||
Never pass file paths provided by a user. The path is assumed to be
|
||||
trusted, so a user could craft a path to access a file you didn't
|
||||
intend. Use :func:`send_from_directory` to safely serve
|
||||
user-requested paths from within a directory.
|
||||
|
||||
If the WSGI server sets a ``file_wrapper`` in ``environ``, it is
|
||||
used, otherwise Werkzeug's built-in wrapper is used. Alternatively,
|
||||
if the HTTP server supports ``X-Sendfile``, configuring Flask with
|
||||
``USE_X_SENDFILE = True`` will tell the server to send the given
|
||||
path, which is much more efficient than reading it in Python.
|
||||
|
||||
:param path_or_file: The path to the file to send, relative to the
|
||||
current working directory if a relative path is given.
|
||||
Alternatively, a file-like object opened in binary mode. Make
|
||||
sure the file pointer is seeked to the start of the data.
|
||||
:param mimetype: The MIME type to send for the file. If not
|
||||
provided, it will try to detect it from the file name.
|
||||
:param as_attachment: Indicate to a browser that it should offer to
|
||||
save the file instead of displaying it.
|
||||
:param download_name: The default name browsers will use when saving
|
||||
the file. Defaults to the passed file name.
|
||||
:param conditional: Enable conditional and range responses based on
|
||||
request headers. Requires passing a file path and ``environ``.
|
||||
:param etag: Calculate an ETag for the file, which requires passing
|
||||
a file path. Can also be a string to use instead.
|
||||
:param last_modified: The last modified time to send for the file,
|
||||
in seconds. If not provided, it will try to detect it from the
|
||||
file path.
|
||||
:param max_age: How long the client should cache the file, in
|
||||
seconds. If set, ``Cache-Control`` will be ``public``, otherwise
|
||||
it will be ``no-cache`` to prefer conditional caching.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``download_name`` replaces the ``attachment_filename``
|
||||
parameter. If ``as_attachment=False``, it is passed with
|
||||
``Content-Disposition: inline`` instead.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``max_age`` replaces the ``cache_timeout`` parameter.
|
||||
``conditional`` is enabled and ``max_age`` is not set by
|
||||
default.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``etag`` replaces the ``add_etags`` parameter. It can be a
|
||||
string to use instead of generating one.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Passing a file-like object that inherits from
|
||||
:class:`~io.TextIOBase` will raise a :exc:`ValueError` rather
|
||||
than sending an empty file.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
Moved the implementation to Werkzeug. This is now a wrapper to
|
||||
pass some Flask-specific arguments.
|
||||
|
||||
.. versionchanged:: 1.1
|
||||
``filename`` may be a :class:`~os.PathLike` object.
|
||||
|
||||
.. versionchanged:: 1.1
|
||||
Passing a :class:`~io.BytesIO` object supports range requests.
|
||||
|
||||
.. versionchanged:: 1.0.3
|
||||
Filenames are encoded with ASCII instead of Latin-1 for broader
|
||||
compatibility with WSGI servers.
|
||||
|
||||
.. versionchanged:: 1.0
|
||||
UTF-8 filenames as specified in :rfc:`2231` are supported.
|
||||
|
||||
.. versionchanged:: 0.12
|
||||
The filename is no longer automatically inferred from file
|
||||
objects. If you want to use automatic MIME and etag support,
|
||||
pass a filename via ``filename_or_fp`` or
|
||||
``attachment_filename``.
|
||||
|
||||
.. versionchanged:: 0.12
|
||||
``attachment_filename`` is preferred over ``filename`` for MIME
|
||||
detection.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
``cache_timeout`` defaults to
|
||||
:meth:`Flask.get_send_file_max_age`.
|
||||
|
||||
.. versionchanged:: 0.7
|
||||
MIME guessing and etag support for file-like objects was
|
||||
removed because it was unreliable. Pass a filename if you are
|
||||
able to, otherwise attach an etag yourself.
|
||||
|
||||
.. versionchanged:: 0.5
|
||||
The ``add_etags``, ``cache_timeout`` and ``conditional``
|
||||
parameters were added. The default behavior is to add etags.
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return werkzeug.utils.send_file( # type: ignore[return-value]
|
||||
**_prepare_send_file_kwargs(
|
||||
path_or_file=path_or_file,
|
||||
environ=request.environ,
|
||||
mimetype=mimetype,
|
||||
as_attachment=as_attachment,
|
||||
download_name=download_name,
|
||||
conditional=conditional,
|
||||
etag=etag,
|
||||
last_modified=last_modified,
|
||||
max_age=max_age,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def send_from_directory(
|
||||
directory: os.PathLike[str] | str,
|
||||
path: os.PathLike[str] | str,
|
||||
**kwargs: t.Any,
|
||||
) -> Response:
|
||||
"""Send a file from within a directory using :func:`send_file`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@app.route("/uploads/<path:name>")
|
||||
def download_file(name):
|
||||
return send_from_directory(
|
||||
app.config['UPLOAD_FOLDER'], name, as_attachment=True
|
||||
)
|
||||
|
||||
This is a secure way to serve files from a folder, such as static
|
||||
files or uploads. Uses :func:`~werkzeug.security.safe_join` to
|
||||
ensure the path coming from the client is not maliciously crafted to
|
||||
point outside the specified directory.
|
||||
|
||||
If the final path does not point to an existing regular file,
|
||||
raises a 404 :exc:`~werkzeug.exceptions.NotFound` error.
|
||||
|
||||
:param directory: The directory that ``path`` must be located under,
|
||||
relative to the current application's root path.
|
||||
:param path: The path to the file to send, relative to
|
||||
``directory``.
|
||||
:param kwargs: Arguments to pass to :func:`send_file`.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``path`` replaces the ``filename`` parameter.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
Moved the implementation to Werkzeug. This is now a wrapper to
|
||||
pass some Flask-specific arguments.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
return werkzeug.utils.send_from_directory( # type: ignore[return-value]
|
||||
directory, path, **_prepare_send_file_kwargs(**kwargs)
|
||||
)
|
||||
|
||||
|
||||
def get_root_path(import_name: str) -> str:
|
||||
"""Find the root path of a package, or the path that contains a
|
||||
module. If it cannot be found, returns the current working
|
||||
directory.
|
||||
|
||||
Not to be confused with the value returned by :func:`find_package`.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
# Module already imported and has a file attribute. Use that first.
|
||||
mod = sys.modules.get(import_name)
|
||||
|
||||
if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None:
|
||||
return os.path.dirname(os.path.abspath(mod.__file__))
|
||||
|
||||
# Next attempt: check the loader.
|
||||
try:
|
||||
spec = importlib.util.find_spec(import_name)
|
||||
|
||||
if spec is None:
|
||||
raise ValueError
|
||||
except (ImportError, ValueError):
|
||||
loader = None
|
||||
else:
|
||||
loader = spec.loader
|
||||
|
||||
# Loader does not exist or we're referring to an unloaded main
|
||||
# module or a main module without path (interactive sessions), go
|
||||
# with the current working directory.
|
||||
if loader is None:
|
||||
return os.getcwd()
|
||||
|
||||
if hasattr(loader, "get_filename"):
|
||||
filepath = loader.get_filename(import_name)
|
||||
else:
|
||||
# Fall back to imports.
|
||||
__import__(import_name)
|
||||
mod = sys.modules[import_name]
|
||||
filepath = getattr(mod, "__file__", None)
|
||||
|
||||
# If we don't have a file path it might be because it is a
|
||||
# namespace package. In this case pick the root path from the
|
||||
# first module that is contained in the package.
|
||||
if filepath is None:
|
||||
raise RuntimeError(
|
||||
"No root path can be found for the provided module"
|
||||
f" {import_name!r}. This can happen because the module"
|
||||
" came from an import hook that does not provide file"
|
||||
" name information or because it's a namespace package."
|
||||
" In this case the root path needs to be explicitly"
|
||||
" provided."
|
||||
)
|
||||
|
||||
# filepath is import_name.py for a module, or __init__.py for a package.
|
||||
return os.path.dirname(os.path.abspath(filepath)) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def _split_blueprint_path(name: str) -> list[str]:
|
||||
out: list[str] = [name]
|
||||
|
||||
if "." in name:
|
||||
out.extend(_split_blueprint_path(name.rpartition(".")[0]))
|
||||
|
||||
return out
|
||||
@ -0,0 +1,170 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json as _json
|
||||
import typing as t
|
||||
|
||||
from ..globals import current_app
|
||||
from .provider import _default
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from ..wrappers import Response
|
||||
|
||||
|
||||
def dumps(obj: t.Any, **kwargs: t.Any) -> str:
|
||||
"""Serialize data as JSON.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will use its
|
||||
:meth:`app.json.dumps() <flask.json.provider.JSONProvider.dumps>`
|
||||
method, otherwise it will use :func:`json.dumps`.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:param kwargs: Arguments passed to the ``dumps`` implementation.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
The ``app`` parameter was removed.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.json.dumps``, allowing an app to override
|
||||
the behavior.
|
||||
|
||||
.. versionchanged:: 2.0.2
|
||||
:class:`decimal.Decimal` is supported by converting to a string.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``encoding`` will be removed in Flask 2.1.
|
||||
|
||||
.. versionchanged:: 1.0.3
|
||||
``app`` can be passed directly, rather than requiring an app
|
||||
context for configuration.
|
||||
"""
|
||||
if current_app:
|
||||
return current_app.json.dumps(obj, **kwargs)
|
||||
|
||||
kwargs.setdefault("default", _default)
|
||||
return _json.dumps(obj, **kwargs)
|
||||
|
||||
|
||||
def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None:
|
||||
"""Serialize data as JSON and write to a file.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will use its
|
||||
:meth:`app.json.dump() <flask.json.provider.JSONProvider.dump>`
|
||||
method, otherwise it will use :func:`json.dump`.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:param fp: A file opened for writing text. Should use the UTF-8
|
||||
encoding to be valid JSON.
|
||||
:param kwargs: Arguments passed to the ``dump`` implementation.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
The ``app`` parameter was removed.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.json.dump``, allowing an app to override
|
||||
the behavior.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Writing to a binary file, and the ``encoding`` argument, will be
|
||||
removed in Flask 2.1.
|
||||
"""
|
||||
if current_app:
|
||||
current_app.json.dump(obj, fp, **kwargs)
|
||||
else:
|
||||
kwargs.setdefault("default", _default)
|
||||
_json.dump(obj, fp, **kwargs)
|
||||
|
||||
|
||||
def loads(s: str | bytes, **kwargs: t.Any) -> t.Any:
|
||||
"""Deserialize data as JSON.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will use its
|
||||
:meth:`app.json.loads() <flask.json.provider.JSONProvider.loads>`
|
||||
method, otherwise it will use :func:`json.loads`.
|
||||
|
||||
:param s: Text or UTF-8 bytes.
|
||||
:param kwargs: Arguments passed to the ``loads`` implementation.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
The ``app`` parameter was removed.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.json.loads``, allowing an app to override
|
||||
the behavior.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``encoding`` will be removed in Flask 2.1. The data must be a
|
||||
string or UTF-8 bytes.
|
||||
|
||||
.. versionchanged:: 1.0.3
|
||||
``app`` can be passed directly, rather than requiring an app
|
||||
context for configuration.
|
||||
"""
|
||||
if current_app:
|
||||
return current_app.json.loads(s, **kwargs)
|
||||
|
||||
return _json.loads(s, **kwargs)
|
||||
|
||||
|
||||
def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any:
|
||||
"""Deserialize data as JSON read from a file.
|
||||
|
||||
If :data:`~flask.current_app` is available, it will use its
|
||||
:meth:`app.json.load() <flask.json.provider.JSONProvider.load>`
|
||||
method, otherwise it will use :func:`json.load`.
|
||||
|
||||
:param fp: A file opened for reading text or UTF-8 bytes.
|
||||
:param kwargs: Arguments passed to the ``load`` implementation.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
The ``app`` parameter was removed.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.json.load``, allowing an app to override
|
||||
the behavior.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
The ``app`` parameter will be removed in Flask 2.3.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
``encoding`` will be removed in Flask 2.1. The file must be text
|
||||
mode, or binary mode with UTF-8 bytes.
|
||||
"""
|
||||
if current_app:
|
||||
return current_app.json.load(fp, **kwargs)
|
||||
|
||||
return _json.load(fp, **kwargs)
|
||||
|
||||
|
||||
def jsonify(*args: t.Any, **kwargs: t.Any) -> Response:
|
||||
"""Serialize the given arguments as JSON, and return a
|
||||
:class:`~flask.Response` object with the ``application/json``
|
||||
mimetype. A dict or list returned from a view will be converted to a
|
||||
JSON response automatically without needing to call this.
|
||||
|
||||
This requires an active request or application context, and calls
|
||||
:meth:`app.json.response() <flask.json.provider.JSONProvider.response>`.
|
||||
|
||||
In debug mode, the output is formatted with indentation to make it
|
||||
easier to read. This may also be controlled by the provider.
|
||||
|
||||
Either positional or keyword arguments can be given, not both.
|
||||
If no arguments are given, ``None`` is serialized.
|
||||
|
||||
:param args: A single value to serialize, or multiple values to
|
||||
treat as a list to serialize.
|
||||
:param kwargs: Treat as a dict to serialize.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Calls ``current_app.json.response``, allowing an app to override
|
||||
the behavior.
|
||||
|
||||
.. versionchanged:: 2.0.2
|
||||
:class:`decimal.Decimal` is supported by converting to a string.
|
||||
|
||||
.. versionchanged:: 0.11
|
||||
Added support for serializing top-level arrays. This was a
|
||||
security risk in ancient browsers. See :ref:`security-json`.
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
return current_app.json.response(*args, **kwargs) # type: ignore[return-value]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,215 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import decimal
|
||||
import json
|
||||
import typing as t
|
||||
import uuid
|
||||
import weakref
|
||||
from datetime import date
|
||||
|
||||
from werkzeug.http import http_date
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from werkzeug.sansio.response import Response
|
||||
|
||||
from ..sansio.app import App
|
||||
|
||||
|
||||
class JSONProvider:
|
||||
"""A standard set of JSON operations for an application. Subclasses
|
||||
of this can be used to customize JSON behavior or use different
|
||||
JSON libraries.
|
||||
|
||||
To implement a provider for a specific library, subclass this base
|
||||
class and implement at least :meth:`dumps` and :meth:`loads`. All
|
||||
other methods have default implementations.
|
||||
|
||||
To use a different provider, either subclass ``Flask`` and set
|
||||
:attr:`~flask.Flask.json_provider_class` to a provider class, or set
|
||||
:attr:`app.json <flask.Flask.json>` to an instance of the class.
|
||||
|
||||
:param app: An application instance. This will be stored as a
|
||||
:class:`weakref.proxy` on the :attr:`_app` attribute.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
def __init__(self, app: App) -> None:
|
||||
self._app: App = weakref.proxy(app)
|
||||
|
||||
def dumps(self, obj: t.Any, **kwargs: t.Any) -> str:
|
||||
"""Serialize data as JSON.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:param kwargs: May be passed to the underlying JSON library.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None:
|
||||
"""Serialize data as JSON and write to a file.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:param fp: A file opened for writing text. Should use the UTF-8
|
||||
encoding to be valid JSON.
|
||||
:param kwargs: May be passed to the underlying JSON library.
|
||||
"""
|
||||
fp.write(self.dumps(obj, **kwargs))
|
||||
|
||||
def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any:
|
||||
"""Deserialize data as JSON.
|
||||
|
||||
:param s: Text or UTF-8 bytes.
|
||||
:param kwargs: May be passed to the underlying JSON library.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any:
|
||||
"""Deserialize data as JSON read from a file.
|
||||
|
||||
:param fp: A file opened for reading text or UTF-8 bytes.
|
||||
:param kwargs: May be passed to the underlying JSON library.
|
||||
"""
|
||||
return self.loads(fp.read(), **kwargs)
|
||||
|
||||
def _prepare_response_obj(
|
||||
self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any]
|
||||
) -> t.Any:
|
||||
if args and kwargs:
|
||||
raise TypeError("app.json.response() takes either args or kwargs, not both")
|
||||
|
||||
if not args and not kwargs:
|
||||
return None
|
||||
|
||||
if len(args) == 1:
|
||||
return args[0]
|
||||
|
||||
return args or kwargs
|
||||
|
||||
def response(self, *args: t.Any, **kwargs: t.Any) -> Response:
|
||||
"""Serialize the given arguments as JSON, and return a
|
||||
:class:`~flask.Response` object with the ``application/json``
|
||||
mimetype.
|
||||
|
||||
The :func:`~flask.json.jsonify` function calls this method for
|
||||
the current application.
|
||||
|
||||
Either positional or keyword arguments can be given, not both.
|
||||
If no arguments are given, ``None`` is serialized.
|
||||
|
||||
:param args: A single value to serialize, or multiple values to
|
||||
treat as a list to serialize.
|
||||
:param kwargs: Treat as a dict to serialize.
|
||||
"""
|
||||
obj = self._prepare_response_obj(args, kwargs)
|
||||
return self._app.response_class(self.dumps(obj), mimetype="application/json")
|
||||
|
||||
|
||||
def _default(o: t.Any) -> t.Any:
|
||||
if isinstance(o, date):
|
||||
return http_date(o)
|
||||
|
||||
if isinstance(o, (decimal.Decimal, uuid.UUID)):
|
||||
return str(o)
|
||||
|
||||
if dataclasses and dataclasses.is_dataclass(o):
|
||||
return dataclasses.asdict(o)
|
||||
|
||||
if hasattr(o, "__html__"):
|
||||
return str(o.__html__())
|
||||
|
||||
raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable")
|
||||
|
||||
|
||||
class DefaultJSONProvider(JSONProvider):
|
||||
"""Provide JSON operations using Python's built-in :mod:`json`
|
||||
library. Serializes the following additional data types:
|
||||
|
||||
- :class:`datetime.datetime` and :class:`datetime.date` are
|
||||
serialized to :rfc:`822` strings. This is the same as the HTTP
|
||||
date format.
|
||||
- :class:`uuid.UUID` is serialized to a string.
|
||||
- :class:`dataclasses.dataclass` is passed to
|
||||
:func:`dataclasses.asdict`.
|
||||
- :class:`~markupsafe.Markup` (or any object with a ``__html__``
|
||||
method) will call the ``__html__`` method to get a string.
|
||||
"""
|
||||
|
||||
default: t.Callable[[t.Any], t.Any] = staticmethod(_default) # type: ignore[assignment]
|
||||
"""Apply this function to any object that :meth:`json.dumps` does
|
||||
not know how to serialize. It should return a valid JSON type or
|
||||
raise a ``TypeError``.
|
||||
"""
|
||||
|
||||
ensure_ascii = True
|
||||
"""Replace non-ASCII characters with escape sequences. This may be
|
||||
more compatible with some clients, but can be disabled for better
|
||||
performance and size.
|
||||
"""
|
||||
|
||||
sort_keys = True
|
||||
"""Sort the keys in any serialized dicts. This may be useful for
|
||||
some caching situations, but can be disabled for better performance.
|
||||
When enabled, keys must all be strings, they are not converted
|
||||
before sorting.
|
||||
"""
|
||||
|
||||
compact: bool | None = None
|
||||
"""If ``True``, or ``None`` out of debug mode, the :meth:`response`
|
||||
output will not add indentation, newlines, or spaces. If ``False``,
|
||||
or ``None`` in debug mode, it will use a non-compact representation.
|
||||
"""
|
||||
|
||||
mimetype = "application/json"
|
||||
"""The mimetype set in :meth:`response`."""
|
||||
|
||||
def dumps(self, obj: t.Any, **kwargs: t.Any) -> str:
|
||||
"""Serialize data as JSON to a string.
|
||||
|
||||
Keyword arguments are passed to :func:`json.dumps`. Sets some
|
||||
parameter defaults from the :attr:`default`,
|
||||
:attr:`ensure_ascii`, and :attr:`sort_keys` attributes.
|
||||
|
||||
:param obj: The data to serialize.
|
||||
:param kwargs: Passed to :func:`json.dumps`.
|
||||
"""
|
||||
kwargs.setdefault("default", self.default)
|
||||
kwargs.setdefault("ensure_ascii", self.ensure_ascii)
|
||||
kwargs.setdefault("sort_keys", self.sort_keys)
|
||||
return json.dumps(obj, **kwargs)
|
||||
|
||||
def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any:
|
||||
"""Deserialize data as JSON from a string or bytes.
|
||||
|
||||
:param s: Text or UTF-8 bytes.
|
||||
:param kwargs: Passed to :func:`json.loads`.
|
||||
"""
|
||||
return json.loads(s, **kwargs)
|
||||
|
||||
def response(self, *args: t.Any, **kwargs: t.Any) -> Response:
|
||||
"""Serialize the given arguments as JSON, and return a
|
||||
:class:`~flask.Response` object with it. The response mimetype
|
||||
will be "application/json" and can be changed with
|
||||
:attr:`mimetype`.
|
||||
|
||||
If :attr:`compact` is ``False`` or debug mode is enabled, the
|
||||
output will be formatted to be easier to read.
|
||||
|
||||
Either positional or keyword arguments can be given, not both.
|
||||
If no arguments are given, ``None`` is serialized.
|
||||
|
||||
:param args: A single value to serialize, or multiple values to
|
||||
treat as a list to serialize.
|
||||
:param kwargs: Treat as a dict to serialize.
|
||||
"""
|
||||
obj = self._prepare_response_obj(args, kwargs)
|
||||
dump_args: dict[str, t.Any] = {}
|
||||
|
||||
if (self.compact is None and self._app.debug) or self.compact is False:
|
||||
dump_args.setdefault("indent", 2)
|
||||
else:
|
||||
dump_args.setdefault("separators", (",", ":"))
|
||||
|
||||
return self._app.response_class(
|
||||
f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype
|
||||
)
|
||||
@ -0,0 +1,327 @@
|
||||
"""
|
||||
Tagged JSON
|
||||
~~~~~~~~~~~
|
||||
|
||||
A compact representation for lossless serialization of non-standard JSON
|
||||
types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this
|
||||
to serialize the session data, but it may be useful in other places. It
|
||||
can be extended to support other types.
|
||||
|
||||
.. autoclass:: TaggedJSONSerializer
|
||||
:members:
|
||||
|
||||
.. autoclass:: JSONTag
|
||||
:members:
|
||||
|
||||
Let's see an example that adds support for
|
||||
:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so
|
||||
to handle this we will dump the items as a list of ``[key, value]``
|
||||
pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to
|
||||
identify the type. The session serializer processes dicts first, so
|
||||
insert the new tag at the front of the order since ``OrderedDict`` must
|
||||
be processed before ``dict``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from flask.json.tag import JSONTag
|
||||
|
||||
class TagOrderedDict(JSONTag):
|
||||
__slots__ = ('serializer',)
|
||||
key = ' od'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, OrderedDict)
|
||||
|
||||
def to_json(self, value):
|
||||
return [[k, self.serializer.tag(v)] for k, v in iteritems(value)]
|
||||
|
||||
def to_python(self, value):
|
||||
return OrderedDict(value)
|
||||
|
||||
app.session_interface.serializer.register(TagOrderedDict, index=0)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from base64 import b64decode
|
||||
from base64 import b64encode
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from markupsafe import Markup
|
||||
from werkzeug.http import http_date
|
||||
from werkzeug.http import parse_date
|
||||
|
||||
from ..json import dumps
|
||||
from ..json import loads
|
||||
|
||||
|
||||
class JSONTag:
|
||||
"""Base class for defining type tags for :class:`TaggedJSONSerializer`."""
|
||||
|
||||
__slots__ = ("serializer",)
|
||||
|
||||
#: The tag to mark the serialized object with. If empty, this tag is
|
||||
#: only used as an intermediate step during tagging.
|
||||
key: str = ""
|
||||
|
||||
def __init__(self, serializer: TaggedJSONSerializer) -> None:
|
||||
"""Create a tagger for the given serializer."""
|
||||
self.serializer = serializer
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
"""Check if the given value should be tagged by this tag."""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
"""Convert the Python object to an object that is a valid JSON type.
|
||||
The tag will be added later."""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
"""Convert the JSON representation back to the correct type. The tag
|
||||
will already be removed."""
|
||||
raise NotImplementedError
|
||||
|
||||
def tag(self, value: t.Any) -> dict[str, t.Any]:
|
||||
"""Convert the value to a valid JSON type and add the tag structure
|
||||
around it."""
|
||||
return {self.key: self.to_json(value)}
|
||||
|
||||
|
||||
class TagDict(JSONTag):
|
||||
"""Tag for 1-item dicts whose only key matches a registered tag.
|
||||
|
||||
Internally, the dict key is suffixed with `__`, and the suffix is removed
|
||||
when deserializing.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
key = " di"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return (
|
||||
isinstance(value, dict)
|
||||
and len(value) == 1
|
||||
and next(iter(value)) in self.serializer.tags
|
||||
)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
key = next(iter(value))
|
||||
return {f"{key}__": self.serializer.tag(value[key])}
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
key = next(iter(value))
|
||||
return {key[:-2]: value[key]}
|
||||
|
||||
|
||||
class PassDict(JSONTag):
|
||||
__slots__ = ()
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, dict)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
# JSON objects may only have string keys, so don't bother tagging the
|
||||
# key here.
|
||||
return {k: self.serializer.tag(v) for k, v in value.items()}
|
||||
|
||||
tag = to_json
|
||||
|
||||
|
||||
class TagTuple(JSONTag):
|
||||
__slots__ = ()
|
||||
key = " t"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, tuple)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return [self.serializer.tag(item) for item in value]
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
return tuple(value)
|
||||
|
||||
|
||||
class PassList(JSONTag):
|
||||
__slots__ = ()
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, list)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return [self.serializer.tag(item) for item in value]
|
||||
|
||||
tag = to_json
|
||||
|
||||
|
||||
class TagBytes(JSONTag):
|
||||
__slots__ = ()
|
||||
key = " b"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, bytes)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return b64encode(value).decode("ascii")
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
return b64decode(value)
|
||||
|
||||
|
||||
class TagMarkup(JSONTag):
|
||||
"""Serialize anything matching the :class:`~markupsafe.Markup` API by
|
||||
having a ``__html__`` method to the result of that method. Always
|
||||
deserializes to an instance of :class:`~markupsafe.Markup`."""
|
||||
|
||||
__slots__ = ()
|
||||
key = " m"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return callable(getattr(value, "__html__", None))
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return str(value.__html__())
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
return Markup(value)
|
||||
|
||||
|
||||
class TagUUID(JSONTag):
|
||||
__slots__ = ()
|
||||
key = " u"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, UUID)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return value.hex
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
return UUID(value)
|
||||
|
||||
|
||||
class TagDateTime(JSONTag):
|
||||
__slots__ = ()
|
||||
key = " d"
|
||||
|
||||
def check(self, value: t.Any) -> bool:
|
||||
return isinstance(value, datetime)
|
||||
|
||||
def to_json(self, value: t.Any) -> t.Any:
|
||||
return http_date(value)
|
||||
|
||||
def to_python(self, value: t.Any) -> t.Any:
|
||||
return parse_date(value)
|
||||
|
||||
|
||||
class TaggedJSONSerializer:
|
||||
"""Serializer that uses a tag system to compactly represent objects that
|
||||
are not JSON types. Passed as the intermediate serializer to
|
||||
:class:`itsdangerous.Serializer`.
|
||||
|
||||
The following extra types are supported:
|
||||
|
||||
* :class:`dict`
|
||||
* :class:`tuple`
|
||||
* :class:`bytes`
|
||||
* :class:`~markupsafe.Markup`
|
||||
* :class:`~uuid.UUID`
|
||||
* :class:`~datetime.datetime`
|
||||
"""
|
||||
|
||||
__slots__ = ("tags", "order")
|
||||
|
||||
#: Tag classes to bind when creating the serializer. Other tags can be
|
||||
#: added later using :meth:`~register`.
|
||||
default_tags = [
|
||||
TagDict,
|
||||
PassDict,
|
||||
TagTuple,
|
||||
PassList,
|
||||
TagBytes,
|
||||
TagMarkup,
|
||||
TagUUID,
|
||||
TagDateTime,
|
||||
]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.tags: dict[str, JSONTag] = {}
|
||||
self.order: list[JSONTag] = []
|
||||
|
||||
for cls in self.default_tags:
|
||||
self.register(cls)
|
||||
|
||||
def register(
|
||||
self,
|
||||
tag_class: type[JSONTag],
|
||||
force: bool = False,
|
||||
index: int | None = None,
|
||||
) -> None:
|
||||
"""Register a new tag with this serializer.
|
||||
|
||||
:param tag_class: tag class to register. Will be instantiated with this
|
||||
serializer instance.
|
||||
:param force: overwrite an existing tag. If false (default), a
|
||||
:exc:`KeyError` is raised.
|
||||
:param index: index to insert the new tag in the tag order. Useful when
|
||||
the new tag is a special case of an existing tag. If ``None``
|
||||
(default), the tag is appended to the end of the order.
|
||||
|
||||
:raise KeyError: if the tag key is already registered and ``force`` is
|
||||
not true.
|
||||
"""
|
||||
tag = tag_class(self)
|
||||
key = tag.key
|
||||
|
||||
if key:
|
||||
if not force and key in self.tags:
|
||||
raise KeyError(f"Tag '{key}' is already registered.")
|
||||
|
||||
self.tags[key] = tag
|
||||
|
||||
if index is None:
|
||||
self.order.append(tag)
|
||||
else:
|
||||
self.order.insert(index, tag)
|
||||
|
||||
def tag(self, value: t.Any) -> t.Any:
|
||||
"""Convert a value to a tagged representation if necessary."""
|
||||
for tag in self.order:
|
||||
if tag.check(value):
|
||||
return tag.tag(value)
|
||||
|
||||
return value
|
||||
|
||||
def untag(self, value: dict[str, t.Any]) -> t.Any:
|
||||
"""Convert a tagged representation back to the original type."""
|
||||
if len(value) != 1:
|
||||
return value
|
||||
|
||||
key = next(iter(value))
|
||||
|
||||
if key not in self.tags:
|
||||
return value
|
||||
|
||||
return self.tags[key].to_python(value[key])
|
||||
|
||||
def _untag_scan(self, value: t.Any) -> t.Any:
|
||||
if isinstance(value, dict):
|
||||
# untag each item recursively
|
||||
value = {k: self._untag_scan(v) for k, v in value.items()}
|
||||
# untag the dict itself
|
||||
value = self.untag(value)
|
||||
elif isinstance(value, list):
|
||||
# untag each item recursively
|
||||
value = [self._untag_scan(item) for item in value]
|
||||
|
||||
return value
|
||||
|
||||
def dumps(self, value: t.Any) -> str:
|
||||
"""Tag the value and dump it to a compact JSON string."""
|
||||
return dumps(self.tag(value), separators=(",", ":"))
|
||||
|
||||
def loads(self, value: str) -> t.Any:
|
||||
"""Load data from a JSON string and deserialized any tagged objects."""
|
||||
return self._untag_scan(loads(value))
|
||||
@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import typing as t
|
||||
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
from .globals import request
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .sansio.app import App
|
||||
|
||||
|
||||
@LocalProxy
|
||||
def wsgi_errors_stream() -> t.TextIO:
|
||||
"""Find the most appropriate error stream for the application. If a request
|
||||
is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``.
|
||||
|
||||
If you configure your own :class:`logging.StreamHandler`, you may want to
|
||||
use this for the stream. If you are using file or dict configuration and
|
||||
can't import this directly, you can refer to it as
|
||||
``ext://flask.logging.wsgi_errors_stream``.
|
||||
"""
|
||||
if request:
|
||||
return request.environ["wsgi.errors"] # type: ignore[no-any-return]
|
||||
|
||||
return sys.stderr
|
||||
|
||||
|
||||
def has_level_handler(logger: logging.Logger) -> bool:
|
||||
"""Check if there is a handler in the logging chain that will handle the
|
||||
given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`.
|
||||
"""
|
||||
level = logger.getEffectiveLevel()
|
||||
current = logger
|
||||
|
||||
while current:
|
||||
if any(handler.level <= level for handler in current.handlers):
|
||||
return True
|
||||
|
||||
if not current.propagate:
|
||||
break
|
||||
|
||||
current = current.parent # type: ignore
|
||||
|
||||
return False
|
||||
|
||||
|
||||
#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format
|
||||
#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``.
|
||||
default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore
|
||||
default_handler.setFormatter(
|
||||
logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s")
|
||||
)
|
||||
|
||||
|
||||
def create_logger(app: App) -> logging.Logger:
|
||||
"""Get the Flask app's logger and configure it if needed.
|
||||
|
||||
The logger name will be the same as
|
||||
:attr:`app.import_name <flask.Flask.name>`.
|
||||
|
||||
When :attr:`~flask.Flask.debug` is enabled, set the logger level to
|
||||
:data:`logging.DEBUG` if it is not set.
|
||||
|
||||
If there is no handler for the logger's effective level, add a
|
||||
:class:`~logging.StreamHandler` for
|
||||
:func:`~flask.logging.wsgi_errors_stream` with a basic format.
|
||||
"""
|
||||
logger = logging.getLogger(app.name)
|
||||
|
||||
if app.debug and not logger.level:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
if not has_level_handler(logger):
|
||||
logger.addHandler(default_handler)
|
||||
|
||||
return logger
|
||||
@ -0,0 +1,6 @@
|
||||
# Sansio
|
||||
|
||||
This folder contains code that can be used by alternative Flask
|
||||
implementations, for example Quart. The code therefore cannot do any
|
||||
IO, nor be part of a likely IO path. Finally this code cannot use the
|
||||
Flask globals.
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,964 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import typing as t
|
||||
from datetime import timedelta
|
||||
from itertools import chain
|
||||
|
||||
from werkzeug.exceptions import Aborter
|
||||
from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.exceptions import BadRequestKeyError
|
||||
from werkzeug.routing import BuildError
|
||||
from werkzeug.routing import Map
|
||||
from werkzeug.routing import Rule
|
||||
from werkzeug.sansio.response import Response
|
||||
from werkzeug.utils import cached_property
|
||||
from werkzeug.utils import redirect as _wz_redirect
|
||||
|
||||
from .. import typing as ft
|
||||
from ..config import Config
|
||||
from ..config import ConfigAttribute
|
||||
from ..ctx import _AppCtxGlobals
|
||||
from ..helpers import _split_blueprint_path
|
||||
from ..helpers import get_debug_flag
|
||||
from ..json.provider import DefaultJSONProvider
|
||||
from ..json.provider import JSONProvider
|
||||
from ..logging import create_logger
|
||||
from ..templating import DispatchingJinjaLoader
|
||||
from ..templating import Environment
|
||||
from .scaffold import _endpoint_from_view_func
|
||||
from .scaffold import find_package
|
||||
from .scaffold import Scaffold
|
||||
from .scaffold import setupmethod
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from werkzeug.wrappers import Response as BaseResponse
|
||||
|
||||
from ..testing import FlaskClient
|
||||
from ..testing import FlaskCliRunner
|
||||
from .blueprints import Blueprint
|
||||
|
||||
T_shell_context_processor = t.TypeVar(
|
||||
"T_shell_context_processor", bound=ft.ShellContextProcessorCallable
|
||||
)
|
||||
T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable)
|
||||
T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable)
|
||||
T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable)
|
||||
T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable)
|
||||
|
||||
|
||||
def _make_timedelta(value: timedelta | int | None) -> timedelta | None:
|
||||
if value is None or isinstance(value, timedelta):
|
||||
return value
|
||||
|
||||
return timedelta(seconds=value)
|
||||
|
||||
|
||||
class App(Scaffold):
|
||||
"""The flask object implements a WSGI application and acts as the central
|
||||
object. It is passed the name of the module or package of the
|
||||
application. Once it is created it will act as a central registry for
|
||||
the view functions, the URL rules, template configuration and much more.
|
||||
|
||||
The name of the package is used to resolve resources from inside the
|
||||
package or the folder the module is contained in depending on if the
|
||||
package parameter resolves to an actual python package (a folder with
|
||||
an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file).
|
||||
|
||||
For more information about resource loading, see :func:`open_resource`.
|
||||
|
||||
Usually you create a :class:`Flask` instance in your main module or
|
||||
in the :file:`__init__.py` file of your package like this::
|
||||
|
||||
from flask import Flask
|
||||
app = Flask(__name__)
|
||||
|
||||
.. admonition:: About the First Parameter
|
||||
|
||||
The idea of the first parameter is to give Flask an idea of what
|
||||
belongs to your application. This name is used to find resources
|
||||
on the filesystem, can be used by extensions to improve debugging
|
||||
information and a lot more.
|
||||
|
||||
So it's important what you provide there. If you are using a single
|
||||
module, `__name__` is always the correct value. If you however are
|
||||
using a package, it's usually recommended to hardcode the name of
|
||||
your package there.
|
||||
|
||||
For example if your application is defined in :file:`yourapplication/app.py`
|
||||
you should create it with one of the two versions below::
|
||||
|
||||
app = Flask('yourapplication')
|
||||
app = Flask(__name__.split('.')[0])
|
||||
|
||||
Why is that? The application will work even with `__name__`, thanks
|
||||
to how resources are looked up. However it will make debugging more
|
||||
painful. Certain extensions can make assumptions based on the
|
||||
import name of your application. For example the Flask-SQLAlchemy
|
||||
extension will look for the code in your application that triggered
|
||||
an SQL query in debug mode. If the import name is not properly set
|
||||
up, that debugging information is lost. (For example it would only
|
||||
pick up SQL queries in `yourapplication.app` and not
|
||||
`yourapplication.views.frontend`)
|
||||
|
||||
.. versionadded:: 0.7
|
||||
The `static_url_path`, `static_folder`, and `template_folder`
|
||||
parameters were added.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
The `instance_path` and `instance_relative_config` parameters were
|
||||
added.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
The `root_path` parameter was added.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
The ``host_matching`` and ``static_host`` parameters were added.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
The ``subdomain_matching`` parameter was added. Subdomain
|
||||
matching needs to be enabled manually now. Setting
|
||||
:data:`SERVER_NAME` does not implicitly enable it.
|
||||
|
||||
:param import_name: the name of the application package
|
||||
:param static_url_path: can be used to specify a different path for the
|
||||
static files on the web. Defaults to the name
|
||||
of the `static_folder` folder.
|
||||
:param static_folder: The folder with static files that is served at
|
||||
``static_url_path``. Relative to the application ``root_path``
|
||||
or an absolute path. Defaults to ``'static'``.
|
||||
:param static_host: the host to use when adding the static route.
|
||||
Defaults to None. Required when using ``host_matching=True``
|
||||
with a ``static_folder`` configured.
|
||||
:param host_matching: set ``url_map.host_matching`` attribute.
|
||||
Defaults to False.
|
||||
:param subdomain_matching: consider the subdomain relative to
|
||||
:data:`SERVER_NAME` when matching routes. Defaults to False.
|
||||
:param template_folder: the folder that contains the templates that should
|
||||
be used by the application. Defaults to
|
||||
``'templates'`` folder in the root path of the
|
||||
application.
|
||||
:param instance_path: An alternative instance path for the application.
|
||||
By default the folder ``'instance'`` next to the
|
||||
package or module is assumed to be the instance
|
||||
path.
|
||||
:param instance_relative_config: if set to ``True`` relative filenames
|
||||
for loading the config are assumed to
|
||||
be relative to the instance path instead
|
||||
of the application root.
|
||||
:param root_path: The path to the root of the application files.
|
||||
This should only be set manually when it can't be detected
|
||||
automatically, such as for namespace packages.
|
||||
"""
|
||||
|
||||
#: The class of the object assigned to :attr:`aborter`, created by
|
||||
#: :meth:`create_aborter`. That object is called by
|
||||
#: :func:`flask.abort` to raise HTTP errors, and can be
|
||||
#: called directly as well.
|
||||
#:
|
||||
#: Defaults to :class:`werkzeug.exceptions.Aborter`.
|
||||
#:
|
||||
#: .. versionadded:: 2.2
|
||||
aborter_class = Aborter
|
||||
|
||||
#: The class that is used for the Jinja environment.
|
||||
#:
|
||||
#: .. versionadded:: 0.11
|
||||
jinja_environment = Environment
|
||||
|
||||
#: The class that is used for the :data:`~flask.g` instance.
|
||||
#:
|
||||
#: Example use cases for a custom class:
|
||||
#:
|
||||
#: 1. Store arbitrary attributes on flask.g.
|
||||
#: 2. Add a property for lazy per-request database connectors.
|
||||
#: 3. Return None instead of AttributeError on unexpected attributes.
|
||||
#: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g.
|
||||
#:
|
||||
#: In Flask 0.9 this property was called `request_globals_class` but it
|
||||
#: was changed in 0.10 to :attr:`app_ctx_globals_class` because the
|
||||
#: flask.g object is now application context scoped.
|
||||
#:
|
||||
#: .. versionadded:: 0.10
|
||||
app_ctx_globals_class = _AppCtxGlobals
|
||||
|
||||
#: The class that is used for the ``config`` attribute of this app.
|
||||
#: Defaults to :class:`~flask.Config`.
|
||||
#:
|
||||
#: Example use cases for a custom class:
|
||||
#:
|
||||
#: 1. Default values for certain config options.
|
||||
#: 2. Access to config values through attributes in addition to keys.
|
||||
#:
|
||||
#: .. versionadded:: 0.11
|
||||
config_class = Config
|
||||
|
||||
#: The testing flag. Set this to ``True`` to enable the test mode of
|
||||
#: Flask extensions (and in the future probably also Flask itself).
|
||||
#: For example this might activate test helpers that have an
|
||||
#: additional runtime cost which should not be enabled by default.
|
||||
#:
|
||||
#: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the
|
||||
#: default it's implicitly enabled.
|
||||
#:
|
||||
#: This attribute can also be configured from the config with the
|
||||
#: ``TESTING`` configuration key. Defaults to ``False``.
|
||||
testing = ConfigAttribute[bool]("TESTING")
|
||||
|
||||
#: If a secret key is set, cryptographic components can use this to
|
||||
#: sign cookies and other things. Set this to a complex random value
|
||||
#: when you want to use the secure cookie for instance.
|
||||
#:
|
||||
#: This attribute can also be configured from the config with the
|
||||
#: :data:`SECRET_KEY` configuration key. Defaults to ``None``.
|
||||
secret_key = ConfigAttribute[t.Union[str, bytes, None]]("SECRET_KEY")
|
||||
|
||||
#: A :class:`~datetime.timedelta` which is used to set the expiration
|
||||
#: date of a permanent session. The default is 31 days which makes a
|
||||
#: permanent session survive for roughly one month.
|
||||
#:
|
||||
#: This attribute can also be configured from the config with the
|
||||
#: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to
|
||||
#: ``timedelta(days=31)``
|
||||
permanent_session_lifetime = ConfigAttribute[timedelta](
|
||||
"PERMANENT_SESSION_LIFETIME",
|
||||
get_converter=_make_timedelta, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
json_provider_class: type[JSONProvider] = DefaultJSONProvider
|
||||
"""A subclass of :class:`~flask.json.provider.JSONProvider`. An
|
||||
instance is created and assigned to :attr:`app.json` when creating
|
||||
the app.
|
||||
|
||||
The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses
|
||||
Python's built-in :mod:`json` library. A different provider can use
|
||||
a different JSON library.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
#: Options that are passed to the Jinja environment in
|
||||
#: :meth:`create_jinja_environment`. Changing these options after
|
||||
#: the environment is created (accessing :attr:`jinja_env`) will
|
||||
#: have no effect.
|
||||
#:
|
||||
#: .. versionchanged:: 1.1.0
|
||||
#: This is a ``dict`` instead of an ``ImmutableDict`` to allow
|
||||
#: easier configuration.
|
||||
#:
|
||||
jinja_options: dict[str, t.Any] = {}
|
||||
|
||||
#: The rule object to use for URL rules created. This is used by
|
||||
#: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`.
|
||||
#:
|
||||
#: .. versionadded:: 0.7
|
||||
url_rule_class = Rule
|
||||
|
||||
#: The map object to use for storing the URL rules and routing
|
||||
#: configuration parameters. Defaults to :class:`werkzeug.routing.Map`.
|
||||
#:
|
||||
#: .. versionadded:: 1.1.0
|
||||
url_map_class = Map
|
||||
|
||||
#: The :meth:`test_client` method creates an instance of this test
|
||||
#: client class. Defaults to :class:`~flask.testing.FlaskClient`.
|
||||
#:
|
||||
#: .. versionadded:: 0.7
|
||||
test_client_class: type[FlaskClient] | None = None
|
||||
|
||||
#: The :class:`~click.testing.CliRunner` subclass, by default
|
||||
#: :class:`~flask.testing.FlaskCliRunner` that is used by
|
||||
#: :meth:`test_cli_runner`. Its ``__init__`` method should take a
|
||||
#: Flask app object as the first argument.
|
||||
#:
|
||||
#: .. versionadded:: 1.0
|
||||
test_cli_runner_class: type[FlaskCliRunner] | None = None
|
||||
|
||||
default_config: dict[str, t.Any]
|
||||
response_class: type[Response]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
import_name: str,
|
||||
static_url_path: str | None = None,
|
||||
static_folder: str | os.PathLike[str] | None = "static",
|
||||
static_host: str | None = None,
|
||||
host_matching: bool = False,
|
||||
subdomain_matching: bool = False,
|
||||
template_folder: str | os.PathLike[str] | None = "templates",
|
||||
instance_path: str | None = None,
|
||||
instance_relative_config: bool = False,
|
||||
root_path: str | None = None,
|
||||
):
|
||||
super().__init__(
|
||||
import_name=import_name,
|
||||
static_folder=static_folder,
|
||||
static_url_path=static_url_path,
|
||||
template_folder=template_folder,
|
||||
root_path=root_path,
|
||||
)
|
||||
|
||||
if instance_path is None:
|
||||
instance_path = self.auto_find_instance_path()
|
||||
elif not os.path.isabs(instance_path):
|
||||
raise ValueError(
|
||||
"If an instance path is provided it must be absolute."
|
||||
" A relative path was given instead."
|
||||
)
|
||||
|
||||
#: Holds the path to the instance folder.
|
||||
#:
|
||||
#: .. versionadded:: 0.8
|
||||
self.instance_path = instance_path
|
||||
|
||||
#: The configuration dictionary as :class:`Config`. This behaves
|
||||
#: exactly like a regular dictionary but supports additional methods
|
||||
#: to load a config from files.
|
||||
self.config = self.make_config(instance_relative_config)
|
||||
|
||||
#: An instance of :attr:`aborter_class` created by
|
||||
#: :meth:`make_aborter`. This is called by :func:`flask.abort`
|
||||
#: to raise HTTP errors, and can be called directly as well.
|
||||
#:
|
||||
#: .. versionadded:: 2.2
|
||||
#: Moved from ``flask.abort``, which calls this object.
|
||||
self.aborter = self.make_aborter()
|
||||
|
||||
self.json: JSONProvider = self.json_provider_class(self)
|
||||
"""Provides access to JSON methods. Functions in ``flask.json``
|
||||
will call methods on this provider when the application context
|
||||
is active. Used for handling JSON requests and responses.
|
||||
|
||||
An instance of :attr:`json_provider_class`. Can be customized by
|
||||
changing that attribute on a subclass, or by assigning to this
|
||||
attribute afterwards.
|
||||
|
||||
The default, :class:`~flask.json.provider.DefaultJSONProvider`,
|
||||
uses Python's built-in :mod:`json` library. A different provider
|
||||
can use a different JSON library.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
#: A list of functions that are called by
|
||||
#: :meth:`handle_url_build_error` when :meth:`.url_for` raises a
|
||||
#: :exc:`~werkzeug.routing.BuildError`. Each function is called
|
||||
#: with ``error``, ``endpoint`` and ``values``. If a function
|
||||
#: returns ``None`` or raises a ``BuildError``, it is skipped.
|
||||
#: Otherwise, its return value is returned by ``url_for``.
|
||||
#:
|
||||
#: .. versionadded:: 0.9
|
||||
self.url_build_error_handlers: list[
|
||||
t.Callable[[Exception, str, dict[str, t.Any]], str]
|
||||
] = []
|
||||
|
||||
#: A list of functions that are called when the application context
|
||||
#: is destroyed. Since the application context is also torn down
|
||||
#: if the request ends this is the place to store code that disconnects
|
||||
#: from databases.
|
||||
#:
|
||||
#: .. versionadded:: 0.9
|
||||
self.teardown_appcontext_funcs: list[ft.TeardownCallable] = []
|
||||
|
||||
#: A list of shell context processor functions that should be run
|
||||
#: when a shell context is created.
|
||||
#:
|
||||
#: .. versionadded:: 0.11
|
||||
self.shell_context_processors: list[ft.ShellContextProcessorCallable] = []
|
||||
|
||||
#: Maps registered blueprint names to blueprint objects. The
|
||||
#: dict retains the order the blueprints were registered in.
|
||||
#: Blueprints can be registered multiple times, this dict does
|
||||
#: not track how often they were attached.
|
||||
#:
|
||||
#: .. versionadded:: 0.7
|
||||
self.blueprints: dict[str, Blueprint] = {}
|
||||
|
||||
#: a place where extensions can store application specific state. For
|
||||
#: example this is where an extension could store database engines and
|
||||
#: similar things.
|
||||
#:
|
||||
#: The key must match the name of the extension module. For example in
|
||||
#: case of a "Flask-Foo" extension in `flask_foo`, the key would be
|
||||
#: ``'foo'``.
|
||||
#:
|
||||
#: .. versionadded:: 0.7
|
||||
self.extensions: dict[str, t.Any] = {}
|
||||
|
||||
#: The :class:`~werkzeug.routing.Map` for this instance. You can use
|
||||
#: this to change the routing converters after the class was created
|
||||
#: but before any routes are connected. Example::
|
||||
#:
|
||||
#: from werkzeug.routing import BaseConverter
|
||||
#:
|
||||
#: class ListConverter(BaseConverter):
|
||||
#: def to_python(self, value):
|
||||
#: return value.split(',')
|
||||
#: def to_url(self, values):
|
||||
#: return ','.join(super(ListConverter, self).to_url(value)
|
||||
#: for value in values)
|
||||
#:
|
||||
#: app = Flask(__name__)
|
||||
#: app.url_map.converters['list'] = ListConverter
|
||||
self.url_map = self.url_map_class(host_matching=host_matching)
|
||||
|
||||
self.subdomain_matching = subdomain_matching
|
||||
|
||||
# tracks internally if the application already handled at least one
|
||||
# request.
|
||||
self._got_first_request = False
|
||||
|
||||
def _check_setup_finished(self, f_name: str) -> None:
|
||||
if self._got_first_request:
|
||||
raise AssertionError(
|
||||
f"The setup method '{f_name}' can no longer be called"
|
||||
" on the application. It has already handled its first"
|
||||
" request, any changes will not be applied"
|
||||
" consistently.\n"
|
||||
"Make sure all imports, decorators, functions, etc."
|
||||
" needed to set up the application are done before"
|
||||
" running it."
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def name(self) -> str: # type: ignore
|
||||
"""The name of the application. This is usually the import name
|
||||
with the difference that it's guessed from the run file if the
|
||||
import name is main. This name is used as a display name when
|
||||
Flask needs the name of the application. It can be set and overridden
|
||||
to change the value.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
if self.import_name == "__main__":
|
||||
fn: str | None = getattr(sys.modules["__main__"], "__file__", None)
|
||||
if fn is None:
|
||||
return "__main__"
|
||||
return os.path.splitext(os.path.basename(fn))[0]
|
||||
return self.import_name
|
||||
|
||||
@cached_property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""A standard Python :class:`~logging.Logger` for the app, with
|
||||
the same name as :attr:`name`.
|
||||
|
||||
In debug mode, the logger's :attr:`~logging.Logger.level` will
|
||||
be set to :data:`~logging.DEBUG`.
|
||||
|
||||
If there are no handlers configured, a default handler will be
|
||||
added. See :doc:`/logging` for more information.
|
||||
|
||||
.. versionchanged:: 1.1.0
|
||||
The logger takes the same name as :attr:`name` rather than
|
||||
hard-coding ``"flask.app"``.
|
||||
|
||||
.. versionchanged:: 1.0.0
|
||||
Behavior was simplified. The logger is always named
|
||||
``"flask.app"``. The level is only set during configuration,
|
||||
it doesn't check ``app.debug`` each time. Only one format is
|
||||
used, not different ones depending on ``app.debug``. No
|
||||
handlers are removed, and a handler is only added if no
|
||||
handlers are already configured.
|
||||
|
||||
.. versionadded:: 0.3
|
||||
"""
|
||||
return create_logger(self)
|
||||
|
||||
@cached_property
|
||||
def jinja_env(self) -> Environment:
|
||||
"""The Jinja environment used to load templates.
|
||||
|
||||
The environment is created the first time this property is
|
||||
accessed. Changing :attr:`jinja_options` after that will have no
|
||||
effect.
|
||||
"""
|
||||
return self.create_jinja_environment()
|
||||
|
||||
def create_jinja_environment(self) -> Environment:
|
||||
raise NotImplementedError()
|
||||
|
||||
def make_config(self, instance_relative: bool = False) -> Config:
|
||||
"""Used to create the config attribute by the Flask constructor.
|
||||
The `instance_relative` parameter is passed in from the constructor
|
||||
of Flask (there named `instance_relative_config`) and indicates if
|
||||
the config should be relative to the instance path or the root path
|
||||
of the application.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
root_path = self.root_path
|
||||
if instance_relative:
|
||||
root_path = self.instance_path
|
||||
defaults = dict(self.default_config)
|
||||
defaults["DEBUG"] = get_debug_flag()
|
||||
return self.config_class(root_path, defaults)
|
||||
|
||||
def make_aborter(self) -> Aborter:
|
||||
"""Create the object to assign to :attr:`aborter`. That object
|
||||
is called by :func:`flask.abort` to raise HTTP errors, and can
|
||||
be called directly as well.
|
||||
|
||||
By default, this creates an instance of :attr:`aborter_class`,
|
||||
which defaults to :class:`werkzeug.exceptions.Aborter`.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
return self.aborter_class()
|
||||
|
||||
def auto_find_instance_path(self) -> str:
|
||||
"""Tries to locate the instance path if it was not provided to the
|
||||
constructor of the application class. It will basically calculate
|
||||
the path to a folder named ``instance`` next to your main file or
|
||||
the package.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
prefix, package_path = find_package(self.import_name)
|
||||
if prefix is None:
|
||||
return os.path.join(package_path, "instance")
|
||||
return os.path.join(prefix, "var", f"{self.name}-instance")
|
||||
|
||||
def create_global_jinja_loader(self) -> DispatchingJinjaLoader:
|
||||
"""Creates the loader for the Jinja2 environment. Can be used to
|
||||
override just the loader and keeping the rest unchanged. It's
|
||||
discouraged to override this function. Instead one should override
|
||||
the :meth:`jinja_loader` function instead.
|
||||
|
||||
The global loader dispatches between the loaders of the application
|
||||
and the individual blueprints.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
return DispatchingJinjaLoader(self)
|
||||
|
||||
def select_jinja_autoescape(self, filename: str) -> bool:
|
||||
"""Returns ``True`` if autoescaping should be active for the given
|
||||
template name. If no template name is given, returns `True`.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Autoescaping is now enabled by default for ``.svg`` files.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
if filename is None:
|
||||
return True
|
||||
return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg"))
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
"""Whether debug mode is enabled. When using ``flask run`` to start the
|
||||
development server, an interactive debugger will be shown for unhandled
|
||||
exceptions, and the server will be reloaded when code changes. This maps to the
|
||||
:data:`DEBUG` config key. It may not behave as expected if set late.
|
||||
|
||||
**Do not enable debug mode when deploying in production.**
|
||||
|
||||
Default: ``False``
|
||||
"""
|
||||
return self.config["DEBUG"] # type: ignore[no-any-return]
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value: bool) -> None:
|
||||
self.config["DEBUG"] = value
|
||||
|
||||
if self.config["TEMPLATES_AUTO_RELOAD"] is None:
|
||||
self.jinja_env.auto_reload = value
|
||||
|
||||
@setupmethod
|
||||
def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None:
|
||||
"""Register a :class:`~flask.Blueprint` on the application. Keyword
|
||||
arguments passed to this method will override the defaults set on the
|
||||
blueprint.
|
||||
|
||||
Calls the blueprint's :meth:`~flask.Blueprint.register` method after
|
||||
recording the blueprint in the application's :attr:`blueprints`.
|
||||
|
||||
:param blueprint: The blueprint to register.
|
||||
:param url_prefix: Blueprint routes will be prefixed with this.
|
||||
:param subdomain: Blueprint routes will match on this subdomain.
|
||||
:param url_defaults: Blueprint routes will use these default values for
|
||||
view arguments.
|
||||
:param options: Additional keyword arguments are passed to
|
||||
:class:`~flask.blueprints.BlueprintSetupState`. They can be
|
||||
accessed in :meth:`~flask.Blueprint.record` callbacks.
|
||||
|
||||
.. versionchanged:: 2.0.1
|
||||
The ``name`` option can be used to change the (pre-dotted)
|
||||
name the blueprint is registered with. This allows the same
|
||||
blueprint to be registered multiple times with unique names
|
||||
for ``url_for``.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
blueprint.register(self, options)
|
||||
|
||||
def iter_blueprints(self) -> t.ValuesView[Blueprint]:
|
||||
"""Iterates over all blueprints by the order they were registered.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
return self.blueprints.values()
|
||||
|
||||
@setupmethod
|
||||
def add_url_rule(
|
||||
self,
|
||||
rule: str,
|
||||
endpoint: str | None = None,
|
||||
view_func: ft.RouteCallable | None = None,
|
||||
provide_automatic_options: bool | None = None,
|
||||
**options: t.Any,
|
||||
) -> None:
|
||||
if endpoint is None:
|
||||
endpoint = _endpoint_from_view_func(view_func) # type: ignore
|
||||
options["endpoint"] = endpoint
|
||||
methods = options.pop("methods", None)
|
||||
|
||||
# if the methods are not given and the view_func object knows its
|
||||
# methods we can use that instead. If neither exists, we go with
|
||||
# a tuple of only ``GET`` as default.
|
||||
if methods is None:
|
||||
methods = getattr(view_func, "methods", None) or ("GET",)
|
||||
if isinstance(methods, str):
|
||||
raise TypeError(
|
||||
"Allowed methods must be a list of strings, for"
|
||||
' example: @app.route(..., methods=["POST"])'
|
||||
)
|
||||
methods = {item.upper() for item in methods}
|
||||
|
||||
# Methods that should always be added
|
||||
required_methods = set(getattr(view_func, "required_methods", ()))
|
||||
|
||||
# starting with Flask 0.8 the view_func object can disable and
|
||||
# force-enable the automatic options handling.
|
||||
if provide_automatic_options is None:
|
||||
provide_automatic_options = getattr(
|
||||
view_func, "provide_automatic_options", None
|
||||
)
|
||||
|
||||
if provide_automatic_options is None:
|
||||
if "OPTIONS" not in methods:
|
||||
provide_automatic_options = True
|
||||
required_methods.add("OPTIONS")
|
||||
else:
|
||||
provide_automatic_options = False
|
||||
|
||||
# Add the required methods now.
|
||||
methods |= required_methods
|
||||
|
||||
rule_obj = self.url_rule_class(rule, methods=methods, **options)
|
||||
rule_obj.provide_automatic_options = provide_automatic_options # type: ignore[attr-defined]
|
||||
|
||||
self.url_map.add(rule_obj)
|
||||
if view_func is not None:
|
||||
old_func = self.view_functions.get(endpoint)
|
||||
if old_func is not None and old_func != view_func:
|
||||
raise AssertionError(
|
||||
"View function mapping is overwriting an existing"
|
||||
f" endpoint function: {endpoint}"
|
||||
)
|
||||
self.view_functions[endpoint] = view_func
|
||||
|
||||
@setupmethod
|
||||
def template_filter(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_filter], T_template_filter]:
|
||||
"""A decorator that is used to register custom template filter.
|
||||
You can specify a name for the filter, otherwise the function
|
||||
name will be used. Example::
|
||||
|
||||
@app.template_filter()
|
||||
def reverse(s):
|
||||
return s[::-1]
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_filter) -> T_template_filter:
|
||||
self.add_template_filter(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_template_filter(
|
||||
self, f: ft.TemplateFilterCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a custom template filter. Works exactly like the
|
||||
:meth:`template_filter` decorator.
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
self.jinja_env.filters[name or f.__name__] = f
|
||||
|
||||
@setupmethod
|
||||
def template_test(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_test], T_template_test]:
|
||||
"""A decorator that is used to register custom template test.
|
||||
You can specify a name for the test, otherwise the function
|
||||
name will be used. Example::
|
||||
|
||||
@app.template_test()
|
||||
def is_prime(n):
|
||||
if n == 2:
|
||||
return True
|
||||
for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
|
||||
if n % i == 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_test) -> T_template_test:
|
||||
self.add_template_test(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_template_test(
|
||||
self, f: ft.TemplateTestCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a custom template test. Works exactly like the
|
||||
:meth:`template_test` decorator.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
self.jinja_env.tests[name or f.__name__] = f
|
||||
|
||||
@setupmethod
|
||||
def template_global(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_global], T_template_global]:
|
||||
"""A decorator that is used to register a custom template global function.
|
||||
You can specify a name for the global function, otherwise the function
|
||||
name will be used. Example::
|
||||
|
||||
@app.template_global()
|
||||
def double(n):
|
||||
return 2 * n
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global function, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_global) -> T_template_global:
|
||||
self.add_template_global(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_template_global(
|
||||
self, f: ft.TemplateGlobalCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a custom template global function. Works exactly like the
|
||||
:meth:`template_global` decorator.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global function, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
self.jinja_env.globals[name or f.__name__] = f
|
||||
|
||||
@setupmethod
|
||||
def teardown_appcontext(self, f: T_teardown) -> T_teardown:
|
||||
"""Registers a function to be called when the application
|
||||
context is popped. The application context is typically popped
|
||||
after the request context for each request, at the end of CLI
|
||||
commands, or after a manually pushed context ends.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with app.app_context():
|
||||
...
|
||||
|
||||
When the ``with`` block exits (or ``ctx.pop()`` is called), the
|
||||
teardown functions are called just before the app context is
|
||||
made inactive. Since a request context typically also manages an
|
||||
application context it would also be called when you pop a
|
||||
request context.
|
||||
|
||||
When a teardown function was called because of an unhandled
|
||||
exception it will be passed an error object. If an
|
||||
:meth:`errorhandler` is registered, it will handle the exception
|
||||
and the teardown will not receive it.
|
||||
|
||||
Teardown functions must avoid raising exceptions. If they
|
||||
execute code that might fail they must surround that code with a
|
||||
``try``/``except`` block and log any errors.
|
||||
|
||||
The return values of teardown functions are ignored.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
self.teardown_appcontext_funcs.append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def shell_context_processor(
|
||||
self, f: T_shell_context_processor
|
||||
) -> T_shell_context_processor:
|
||||
"""Registers a shell context processor function.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
self.shell_context_processors.append(f)
|
||||
return f
|
||||
|
||||
def _find_error_handler(
|
||||
self, e: Exception, blueprints: list[str]
|
||||
) -> ft.ErrorHandlerCallable | None:
|
||||
"""Return a registered error handler for an exception in this order:
|
||||
blueprint handler for a specific code, app handler for a specific code,
|
||||
blueprint handler for an exception class, app handler for an exception
|
||||
class, or ``None`` if a suitable handler is not found.
|
||||
"""
|
||||
exc_class, code = self._get_exc_class_and_code(type(e))
|
||||
names = (*blueprints, None)
|
||||
|
||||
for c in (code, None) if code is not None else (None,):
|
||||
for name in names:
|
||||
handler_map = self.error_handler_spec[name][c]
|
||||
|
||||
if not handler_map:
|
||||
continue
|
||||
|
||||
for cls in exc_class.__mro__:
|
||||
handler = handler_map.get(cls)
|
||||
|
||||
if handler is not None:
|
||||
return handler
|
||||
return None
|
||||
|
||||
def trap_http_exception(self, e: Exception) -> bool:
|
||||
"""Checks if an HTTP exception should be trapped or not. By default
|
||||
this will return ``False`` for all exceptions except for a bad request
|
||||
key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It
|
||||
also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``.
|
||||
|
||||
This is called for all HTTP exceptions raised by a view function.
|
||||
If it returns ``True`` for any exception the error handler for this
|
||||
exception is not called and it shows up as regular exception in the
|
||||
traceback. This is helpful for debugging implicitly raised HTTP
|
||||
exceptions.
|
||||
|
||||
.. versionchanged:: 1.0
|
||||
Bad request errors are not trapped by default in debug mode.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
if self.config["TRAP_HTTP_EXCEPTIONS"]:
|
||||
return True
|
||||
|
||||
trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"]
|
||||
|
||||
# if unset, trap key errors in debug mode
|
||||
if (
|
||||
trap_bad_request is None
|
||||
and self.debug
|
||||
and isinstance(e, BadRequestKeyError)
|
||||
):
|
||||
return True
|
||||
|
||||
if trap_bad_request:
|
||||
return isinstance(e, BadRequest)
|
||||
|
||||
return False
|
||||
|
||||
def should_ignore_error(self, error: BaseException | None) -> bool:
|
||||
"""This is called to figure out if an error should be ignored
|
||||
or not as far as the teardown system is concerned. If this
|
||||
function returns ``True`` then the teardown handlers will not be
|
||||
passed the error.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
return False
|
||||
|
||||
def redirect(self, location: str, code: int = 302) -> BaseResponse:
|
||||
"""Create a redirect response object.
|
||||
|
||||
This is called by :func:`flask.redirect`, and can be called
|
||||
directly as well.
|
||||
|
||||
:param location: The URL to redirect to.
|
||||
:param code: The status code for the redirect.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
Moved from ``flask.redirect``, which calls this method.
|
||||
"""
|
||||
return _wz_redirect(
|
||||
location,
|
||||
code=code,
|
||||
Response=self.response_class, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
def inject_url_defaults(self, endpoint: str, values: dict[str, t.Any]) -> None:
|
||||
"""Injects the URL defaults for the given endpoint directly into
|
||||
the values dictionary passed. This is used internally and
|
||||
automatically called on URL building.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
names: t.Iterable[str | None] = (None,)
|
||||
|
||||
# url_for may be called outside a request context, parse the
|
||||
# passed endpoint instead of using request.blueprints.
|
||||
if "." in endpoint:
|
||||
names = chain(
|
||||
names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0]))
|
||||
)
|
||||
|
||||
for name in names:
|
||||
if name in self.url_default_functions:
|
||||
for func in self.url_default_functions[name]:
|
||||
func(endpoint, values)
|
||||
|
||||
def handle_url_build_error(
|
||||
self, error: BuildError, endpoint: str, values: dict[str, t.Any]
|
||||
) -> str:
|
||||
"""Called by :meth:`.url_for` if a
|
||||
:exc:`~werkzeug.routing.BuildError` was raised. If this returns
|
||||
a value, it will be returned by ``url_for``, otherwise the error
|
||||
will be re-raised.
|
||||
|
||||
Each function in :attr:`url_build_error_handlers` is called with
|
||||
``error``, ``endpoint`` and ``values``. If a function returns
|
||||
``None`` or raises a ``BuildError``, it is skipped. Otherwise,
|
||||
its return value is returned by ``url_for``.
|
||||
|
||||
:param error: The active ``BuildError`` being handled.
|
||||
:param endpoint: The endpoint being built.
|
||||
:param values: The keyword arguments passed to ``url_for``.
|
||||
"""
|
||||
for handler in self.url_build_error_handlers:
|
||||
try:
|
||||
rv = handler(error, endpoint, values)
|
||||
except BuildError as e:
|
||||
# make error available outside except block
|
||||
error = e
|
||||
else:
|
||||
if rv is not None:
|
||||
return rv
|
||||
|
||||
# Re-raise if called with an active exception, otherwise raise
|
||||
# the passed in exception.
|
||||
if error is sys.exc_info()[1]:
|
||||
raise
|
||||
|
||||
raise error
|
||||
@ -0,0 +1,632 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import typing as t
|
||||
from collections import defaultdict
|
||||
from functools import update_wrapper
|
||||
|
||||
from .. import typing as ft
|
||||
from .scaffold import _endpoint_from_view_func
|
||||
from .scaffold import _sentinel
|
||||
from .scaffold import Scaffold
|
||||
from .scaffold import setupmethod
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .app import App
|
||||
|
||||
DeferredSetupFunction = t.Callable[["BlueprintSetupState"], None]
|
||||
T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any])
|
||||
T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable)
|
||||
T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable)
|
||||
T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable)
|
||||
T_template_context_processor = t.TypeVar(
|
||||
"T_template_context_processor", bound=ft.TemplateContextProcessorCallable
|
||||
)
|
||||
T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable)
|
||||
T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable)
|
||||
T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable)
|
||||
T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable)
|
||||
T_url_value_preprocessor = t.TypeVar(
|
||||
"T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable
|
||||
)
|
||||
|
||||
|
||||
class BlueprintSetupState:
|
||||
"""Temporary holder object for registering a blueprint with the
|
||||
application. An instance of this class is created by the
|
||||
:meth:`~flask.Blueprint.make_setup_state` method and later passed
|
||||
to all register callback functions.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
blueprint: Blueprint,
|
||||
app: App,
|
||||
options: t.Any,
|
||||
first_registration: bool,
|
||||
) -> None:
|
||||
#: a reference to the current application
|
||||
self.app = app
|
||||
|
||||
#: a reference to the blueprint that created this setup state.
|
||||
self.blueprint = blueprint
|
||||
|
||||
#: a dictionary with all options that were passed to the
|
||||
#: :meth:`~flask.Flask.register_blueprint` method.
|
||||
self.options = options
|
||||
|
||||
#: as blueprints can be registered multiple times with the
|
||||
#: application and not everything wants to be registered
|
||||
#: multiple times on it, this attribute can be used to figure
|
||||
#: out if the blueprint was registered in the past already.
|
||||
self.first_registration = first_registration
|
||||
|
||||
subdomain = self.options.get("subdomain")
|
||||
if subdomain is None:
|
||||
subdomain = self.blueprint.subdomain
|
||||
|
||||
#: The subdomain that the blueprint should be active for, ``None``
|
||||
#: otherwise.
|
||||
self.subdomain = subdomain
|
||||
|
||||
url_prefix = self.options.get("url_prefix")
|
||||
if url_prefix is None:
|
||||
url_prefix = self.blueprint.url_prefix
|
||||
#: The prefix that should be used for all URLs defined on the
|
||||
#: blueprint.
|
||||
self.url_prefix = url_prefix
|
||||
|
||||
self.name = self.options.get("name", blueprint.name)
|
||||
self.name_prefix = self.options.get("name_prefix", "")
|
||||
|
||||
#: A dictionary with URL defaults that is added to each and every
|
||||
#: URL that was defined with the blueprint.
|
||||
self.url_defaults = dict(self.blueprint.url_values_defaults)
|
||||
self.url_defaults.update(self.options.get("url_defaults", ()))
|
||||
|
||||
def add_url_rule(
|
||||
self,
|
||||
rule: str,
|
||||
endpoint: str | None = None,
|
||||
view_func: ft.RouteCallable | None = None,
|
||||
**options: t.Any,
|
||||
) -> None:
|
||||
"""A helper method to register a rule (and optionally a view function)
|
||||
to the application. The endpoint is automatically prefixed with the
|
||||
blueprint's name.
|
||||
"""
|
||||
if self.url_prefix is not None:
|
||||
if rule:
|
||||
rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/")))
|
||||
else:
|
||||
rule = self.url_prefix
|
||||
options.setdefault("subdomain", self.subdomain)
|
||||
if endpoint is None:
|
||||
endpoint = _endpoint_from_view_func(view_func) # type: ignore
|
||||
defaults = self.url_defaults
|
||||
if "defaults" in options:
|
||||
defaults = dict(defaults, **options.pop("defaults"))
|
||||
|
||||
self.app.add_url_rule(
|
||||
rule,
|
||||
f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."),
|
||||
view_func,
|
||||
defaults=defaults,
|
||||
**options,
|
||||
)
|
||||
|
||||
|
||||
class Blueprint(Scaffold):
|
||||
"""Represents a blueprint, a collection of routes and other
|
||||
app-related functions that can be registered on a real application
|
||||
later.
|
||||
|
||||
A blueprint is an object that allows defining application functions
|
||||
without requiring an application object ahead of time. It uses the
|
||||
same decorators as :class:`~flask.Flask`, but defers the need for an
|
||||
application by recording them for later registration.
|
||||
|
||||
Decorating a function with a blueprint creates a deferred function
|
||||
that is called with :class:`~flask.blueprints.BlueprintSetupState`
|
||||
when the blueprint is registered on an application.
|
||||
|
||||
See :doc:`/blueprints` for more information.
|
||||
|
||||
:param name: The name of the blueprint. Will be prepended to each
|
||||
endpoint name.
|
||||
:param import_name: The name of the blueprint package, usually
|
||||
``__name__``. This helps locate the ``root_path`` for the
|
||||
blueprint.
|
||||
:param static_folder: A folder with static files that should be
|
||||
served by the blueprint's static route. The path is relative to
|
||||
the blueprint's root path. Blueprint static files are disabled
|
||||
by default.
|
||||
:param static_url_path: The url to serve static files from.
|
||||
Defaults to ``static_folder``. If the blueprint does not have
|
||||
a ``url_prefix``, the app's static route will take precedence,
|
||||
and the blueprint's static files won't be accessible.
|
||||
:param template_folder: A folder with templates that should be added
|
||||
to the app's template search path. The path is relative to the
|
||||
blueprint's root path. Blueprint templates are disabled by
|
||||
default. Blueprint templates have a lower precedence than those
|
||||
in the app's templates folder.
|
||||
:param url_prefix: A path to prepend to all of the blueprint's URLs,
|
||||
to make them distinct from the rest of the app's routes.
|
||||
:param subdomain: A subdomain that blueprint routes will match on by
|
||||
default.
|
||||
:param url_defaults: A dict of default values that blueprint routes
|
||||
will receive by default.
|
||||
:param root_path: By default, the blueprint will automatically set
|
||||
this based on ``import_name``. In certain situations this
|
||||
automatic detection can fail, so the path can be specified
|
||||
manually instead.
|
||||
|
||||
.. versionchanged:: 1.1.0
|
||||
Blueprints have a ``cli`` group to register nested CLI commands.
|
||||
The ``cli_group`` parameter controls the name of the group under
|
||||
the ``flask`` command.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
|
||||
_got_registered_once = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
import_name: str,
|
||||
static_folder: str | os.PathLike[str] | None = None,
|
||||
static_url_path: str | None = None,
|
||||
template_folder: str | os.PathLike[str] | None = None,
|
||||
url_prefix: str | None = None,
|
||||
subdomain: str | None = None,
|
||||
url_defaults: dict[str, t.Any] | None = None,
|
||||
root_path: str | None = None,
|
||||
cli_group: str | None = _sentinel, # type: ignore[assignment]
|
||||
):
|
||||
super().__init__(
|
||||
import_name=import_name,
|
||||
static_folder=static_folder,
|
||||
static_url_path=static_url_path,
|
||||
template_folder=template_folder,
|
||||
root_path=root_path,
|
||||
)
|
||||
|
||||
if not name:
|
||||
raise ValueError("'name' may not be empty.")
|
||||
|
||||
if "." in name:
|
||||
raise ValueError("'name' may not contain a dot '.' character.")
|
||||
|
||||
self.name = name
|
||||
self.url_prefix = url_prefix
|
||||
self.subdomain = subdomain
|
||||
self.deferred_functions: list[DeferredSetupFunction] = []
|
||||
|
||||
if url_defaults is None:
|
||||
url_defaults = {}
|
||||
|
||||
self.url_values_defaults = url_defaults
|
||||
self.cli_group = cli_group
|
||||
self._blueprints: list[tuple[Blueprint, dict[str, t.Any]]] = []
|
||||
|
||||
def _check_setup_finished(self, f_name: str) -> None:
|
||||
if self._got_registered_once:
|
||||
raise AssertionError(
|
||||
f"The setup method '{f_name}' can no longer be called on the blueprint"
|
||||
f" '{self.name}'. It has already been registered at least once, any"
|
||||
" changes will not be applied consistently.\n"
|
||||
"Make sure all imports, decorators, functions, etc. needed to set up"
|
||||
" the blueprint are done before registering it."
|
||||
)
|
||||
|
||||
@setupmethod
|
||||
def record(self, func: DeferredSetupFunction) -> None:
|
||||
"""Registers a function that is called when the blueprint is
|
||||
registered on the application. This function is called with the
|
||||
state as argument as returned by the :meth:`make_setup_state`
|
||||
method.
|
||||
"""
|
||||
self.deferred_functions.append(func)
|
||||
|
||||
@setupmethod
|
||||
def record_once(self, func: DeferredSetupFunction) -> None:
|
||||
"""Works like :meth:`record` but wraps the function in another
|
||||
function that will ensure the function is only called once. If the
|
||||
blueprint is registered a second time on the application, the
|
||||
function passed is not called.
|
||||
"""
|
||||
|
||||
def wrapper(state: BlueprintSetupState) -> None:
|
||||
if state.first_registration:
|
||||
func(state)
|
||||
|
||||
self.record(update_wrapper(wrapper, func))
|
||||
|
||||
def make_setup_state(
|
||||
self, app: App, options: dict[str, t.Any], first_registration: bool = False
|
||||
) -> BlueprintSetupState:
|
||||
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
|
||||
object that is later passed to the register callback functions.
|
||||
Subclasses can override this to return a subclass of the setup state.
|
||||
"""
|
||||
return BlueprintSetupState(self, app, options, first_registration)
|
||||
|
||||
@setupmethod
|
||||
def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None:
|
||||
"""Register a :class:`~flask.Blueprint` on this blueprint. Keyword
|
||||
arguments passed to this method will override the defaults set
|
||||
on the blueprint.
|
||||
|
||||
.. versionchanged:: 2.0.1
|
||||
The ``name`` option can be used to change the (pre-dotted)
|
||||
name the blueprint is registered with. This allows the same
|
||||
blueprint to be registered multiple times with unique names
|
||||
for ``url_for``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if blueprint is self:
|
||||
raise ValueError("Cannot register a blueprint on itself")
|
||||
self._blueprints.append((blueprint, options))
|
||||
|
||||
def register(self, app: App, options: dict[str, t.Any]) -> None:
|
||||
"""Called by :meth:`Flask.register_blueprint` to register all
|
||||
views and callbacks registered on the blueprint with the
|
||||
application. Creates a :class:`.BlueprintSetupState` and calls
|
||||
each :meth:`record` callback with it.
|
||||
|
||||
:param app: The application this blueprint is being registered
|
||||
with.
|
||||
:param options: Keyword arguments forwarded from
|
||||
:meth:`~Flask.register_blueprint`.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
Nested blueprints now correctly apply subdomains.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
Registering the same blueprint with the same name multiple
|
||||
times is an error.
|
||||
|
||||
.. versionchanged:: 2.0.1
|
||||
Nested blueprints are registered with their dotted name.
|
||||
This allows different blueprints with the same name to be
|
||||
nested at different locations.
|
||||
|
||||
.. versionchanged:: 2.0.1
|
||||
The ``name`` option can be used to change the (pre-dotted)
|
||||
name the blueprint is registered with. This allows the same
|
||||
blueprint to be registered multiple times with unique names
|
||||
for ``url_for``.
|
||||
"""
|
||||
name_prefix = options.get("name_prefix", "")
|
||||
self_name = options.get("name", self.name)
|
||||
name = f"{name_prefix}.{self_name}".lstrip(".")
|
||||
|
||||
if name in app.blueprints:
|
||||
bp_desc = "this" if app.blueprints[name] is self else "a different"
|
||||
existing_at = f" '{name}'" if self_name != name else ""
|
||||
|
||||
raise ValueError(
|
||||
f"The name '{self_name}' is already registered for"
|
||||
f" {bp_desc} blueprint{existing_at}. Use 'name=' to"
|
||||
f" provide a unique name."
|
||||
)
|
||||
|
||||
first_bp_registration = not any(bp is self for bp in app.blueprints.values())
|
||||
first_name_registration = name not in app.blueprints
|
||||
|
||||
app.blueprints[name] = self
|
||||
self._got_registered_once = True
|
||||
state = self.make_setup_state(app, options, first_bp_registration)
|
||||
|
||||
if self.has_static_folder:
|
||||
state.add_url_rule(
|
||||
f"{self.static_url_path}/<path:filename>",
|
||||
view_func=self.send_static_file, # type: ignore[attr-defined]
|
||||
endpoint="static",
|
||||
)
|
||||
|
||||
# Merge blueprint data into parent.
|
||||
if first_bp_registration or first_name_registration:
|
||||
self._merge_blueprint_funcs(app, name)
|
||||
|
||||
for deferred in self.deferred_functions:
|
||||
deferred(state)
|
||||
|
||||
cli_resolved_group = options.get("cli_group", self.cli_group)
|
||||
|
||||
if self.cli.commands:
|
||||
if cli_resolved_group is None:
|
||||
app.cli.commands.update(self.cli.commands)
|
||||
elif cli_resolved_group is _sentinel:
|
||||
self.cli.name = name
|
||||
app.cli.add_command(self.cli)
|
||||
else:
|
||||
self.cli.name = cli_resolved_group
|
||||
app.cli.add_command(self.cli)
|
||||
|
||||
for blueprint, bp_options in self._blueprints:
|
||||
bp_options = bp_options.copy()
|
||||
bp_url_prefix = bp_options.get("url_prefix")
|
||||
bp_subdomain = bp_options.get("subdomain")
|
||||
|
||||
if bp_subdomain is None:
|
||||
bp_subdomain = blueprint.subdomain
|
||||
|
||||
if state.subdomain is not None and bp_subdomain is not None:
|
||||
bp_options["subdomain"] = bp_subdomain + "." + state.subdomain
|
||||
elif bp_subdomain is not None:
|
||||
bp_options["subdomain"] = bp_subdomain
|
||||
elif state.subdomain is not None:
|
||||
bp_options["subdomain"] = state.subdomain
|
||||
|
||||
if bp_url_prefix is None:
|
||||
bp_url_prefix = blueprint.url_prefix
|
||||
|
||||
if state.url_prefix is not None and bp_url_prefix is not None:
|
||||
bp_options["url_prefix"] = (
|
||||
state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/")
|
||||
)
|
||||
elif bp_url_prefix is not None:
|
||||
bp_options["url_prefix"] = bp_url_prefix
|
||||
elif state.url_prefix is not None:
|
||||
bp_options["url_prefix"] = state.url_prefix
|
||||
|
||||
bp_options["name_prefix"] = name
|
||||
blueprint.register(app, bp_options)
|
||||
|
||||
def _merge_blueprint_funcs(self, app: App, name: str) -> None:
|
||||
def extend(
|
||||
bp_dict: dict[ft.AppOrBlueprintKey, list[t.Any]],
|
||||
parent_dict: dict[ft.AppOrBlueprintKey, list[t.Any]],
|
||||
) -> None:
|
||||
for key, values in bp_dict.items():
|
||||
key = name if key is None else f"{name}.{key}"
|
||||
parent_dict[key].extend(values)
|
||||
|
||||
for key, value in self.error_handler_spec.items():
|
||||
key = name if key is None else f"{name}.{key}"
|
||||
value = defaultdict(
|
||||
dict,
|
||||
{
|
||||
code: {exc_class: func for exc_class, func in code_values.items()}
|
||||
for code, code_values in value.items()
|
||||
},
|
||||
)
|
||||
app.error_handler_spec[key] = value
|
||||
|
||||
for endpoint, func in self.view_functions.items():
|
||||
app.view_functions[endpoint] = func
|
||||
|
||||
extend(self.before_request_funcs, app.before_request_funcs)
|
||||
extend(self.after_request_funcs, app.after_request_funcs)
|
||||
extend(
|
||||
self.teardown_request_funcs,
|
||||
app.teardown_request_funcs,
|
||||
)
|
||||
extend(self.url_default_functions, app.url_default_functions)
|
||||
extend(self.url_value_preprocessors, app.url_value_preprocessors)
|
||||
extend(self.template_context_processors, app.template_context_processors)
|
||||
|
||||
@setupmethod
|
||||
def add_url_rule(
|
||||
self,
|
||||
rule: str,
|
||||
endpoint: str | None = None,
|
||||
view_func: ft.RouteCallable | None = None,
|
||||
provide_automatic_options: bool | None = None,
|
||||
**options: t.Any,
|
||||
) -> None:
|
||||
"""Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for
|
||||
full documentation.
|
||||
|
||||
The URL rule is prefixed with the blueprint's URL prefix. The endpoint name,
|
||||
used with :func:`url_for`, is prefixed with the blueprint's name.
|
||||
"""
|
||||
if endpoint and "." in endpoint:
|
||||
raise ValueError("'endpoint' may not contain a dot '.' character.")
|
||||
|
||||
if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__:
|
||||
raise ValueError("'view_func' name may not contain a dot '.' character.")
|
||||
|
||||
self.record(
|
||||
lambda s: s.add_url_rule(
|
||||
rule,
|
||||
endpoint,
|
||||
view_func,
|
||||
provide_automatic_options=provide_automatic_options,
|
||||
**options,
|
||||
)
|
||||
)
|
||||
|
||||
@setupmethod
|
||||
def app_template_filter(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_filter], T_template_filter]:
|
||||
"""Register a template filter, available in any template rendered by the
|
||||
application. Equivalent to :meth:`.Flask.template_filter`.
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_filter) -> T_template_filter:
|
||||
self.add_app_template_filter(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_app_template_filter(
|
||||
self, f: ft.TemplateFilterCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a template filter, available in any template rendered by the
|
||||
application. Works like the :meth:`app_template_filter` decorator. Equivalent to
|
||||
:meth:`.Flask.add_template_filter`.
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def register_template(state: BlueprintSetupState) -> None:
|
||||
state.app.jinja_env.filters[name or f.__name__] = f
|
||||
|
||||
self.record_once(register_template)
|
||||
|
||||
@setupmethod
|
||||
def app_template_test(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_test], T_template_test]:
|
||||
"""Register a template test, available in any template rendered by the
|
||||
application. Equivalent to :meth:`.Flask.template_test`.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_test) -> T_template_test:
|
||||
self.add_app_template_test(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_app_template_test(
|
||||
self, f: ft.TemplateTestCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a template test, available in any template rendered by the
|
||||
application. Works like the :meth:`app_template_test` decorator. Equivalent to
|
||||
:meth:`.Flask.add_template_test`.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def register_template(state: BlueprintSetupState) -> None:
|
||||
state.app.jinja_env.tests[name or f.__name__] = f
|
||||
|
||||
self.record_once(register_template)
|
||||
|
||||
@setupmethod
|
||||
def app_template_global(
|
||||
self, name: str | None = None
|
||||
) -> t.Callable[[T_template_global], T_template_global]:
|
||||
"""Register a template global, available in any template rendered by the
|
||||
application. Equivalent to :meth:`.Flask.template_global`.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def decorator(f: T_template_global) -> T_template_global:
|
||||
self.add_app_template_global(f, name=name)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_app_template_global(
|
||||
self, f: ft.TemplateGlobalCallable, name: str | None = None
|
||||
) -> None:
|
||||
"""Register a template global, available in any template rendered by the
|
||||
application. Works like the :meth:`app_template_global` decorator. Equivalent to
|
||||
:meth:`.Flask.add_template_global`.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
|
||||
def register_template(state: BlueprintSetupState) -> None:
|
||||
state.app.jinja_env.globals[name or f.__name__] = f
|
||||
|
||||
self.record_once(register_template)
|
||||
|
||||
@setupmethod
|
||||
def before_app_request(self, f: T_before_request) -> T_before_request:
|
||||
"""Like :meth:`before_request`, but before every request, not only those handled
|
||||
by the blueprint. Equivalent to :meth:`.Flask.before_request`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.before_request_funcs.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def after_app_request(self, f: T_after_request) -> T_after_request:
|
||||
"""Like :meth:`after_request`, but after every request, not only those handled
|
||||
by the blueprint. Equivalent to :meth:`.Flask.after_request`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.after_request_funcs.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def teardown_app_request(self, f: T_teardown) -> T_teardown:
|
||||
"""Like :meth:`teardown_request`, but after every request, not only those
|
||||
handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def app_context_processor(
|
||||
self, f: T_template_context_processor
|
||||
) -> T_template_context_processor:
|
||||
"""Like :meth:`context_processor`, but for templates rendered by every view, not
|
||||
only by the blueprint. Equivalent to :meth:`.Flask.context_processor`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.template_context_processors.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def app_errorhandler(
|
||||
self, code: type[Exception] | int
|
||||
) -> t.Callable[[T_error_handler], T_error_handler]:
|
||||
"""Like :meth:`errorhandler`, but for every request, not only those handled by
|
||||
the blueprint. Equivalent to :meth:`.Flask.errorhandler`.
|
||||
"""
|
||||
|
||||
def decorator(f: T_error_handler) -> T_error_handler:
|
||||
def from_blueprint(state: BlueprintSetupState) -> None:
|
||||
state.app.errorhandler(code)(f)
|
||||
|
||||
self.record_once(from_blueprint)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def app_url_value_preprocessor(
|
||||
self, f: T_url_value_preprocessor
|
||||
) -> T_url_value_preprocessor:
|
||||
"""Like :meth:`url_value_preprocessor`, but for every request, not only those
|
||||
handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults:
|
||||
"""Like :meth:`url_defaults`, but for every request, not only those handled by
|
||||
the blueprint. Equivalent to :meth:`.Flask.url_defaults`.
|
||||
"""
|
||||
self.record_once(
|
||||
lambda s: s.app.url_default_functions.setdefault(None, []).append(f)
|
||||
)
|
||||
return f
|
||||
@ -0,0 +1,801 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import typing as t
|
||||
from collections import defaultdict
|
||||
from functools import update_wrapper
|
||||
|
||||
from jinja2 import BaseLoader
|
||||
from jinja2 import FileSystemLoader
|
||||
from werkzeug.exceptions import default_exceptions
|
||||
from werkzeug.exceptions import HTTPException
|
||||
from werkzeug.utils import cached_property
|
||||
|
||||
from .. import typing as ft
|
||||
from ..helpers import get_root_path
|
||||
from ..templating import _default_template_ctx_processor
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from click import Group
|
||||
|
||||
# a singleton sentinel value for parameter defaults
|
||||
_sentinel = object()
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any])
|
||||
T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable)
|
||||
T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable)
|
||||
T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable)
|
||||
T_template_context_processor = t.TypeVar(
|
||||
"T_template_context_processor", bound=ft.TemplateContextProcessorCallable
|
||||
)
|
||||
T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable)
|
||||
T_url_value_preprocessor = t.TypeVar(
|
||||
"T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable
|
||||
)
|
||||
T_route = t.TypeVar("T_route", bound=ft.RouteCallable)
|
||||
|
||||
|
||||
def setupmethod(f: F) -> F:
|
||||
f_name = f.__name__
|
||||
|
||||
def wrapper_func(self: Scaffold, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
self._check_setup_finished(f_name)
|
||||
return f(self, *args, **kwargs)
|
||||
|
||||
return t.cast(F, update_wrapper(wrapper_func, f))
|
||||
|
||||
|
||||
class Scaffold:
|
||||
"""Common behavior shared between :class:`~flask.Flask` and
|
||||
:class:`~flask.blueprints.Blueprint`.
|
||||
|
||||
:param import_name: The import name of the module where this object
|
||||
is defined. Usually :attr:`__name__` should be used.
|
||||
:param static_folder: Path to a folder of static files to serve.
|
||||
If this is set, a static route will be added.
|
||||
:param static_url_path: URL prefix for the static route.
|
||||
:param template_folder: Path to a folder containing template files.
|
||||
for rendering. If this is set, a Jinja loader will be added.
|
||||
:param root_path: The path that static, template, and resource files
|
||||
are relative to. Typically not set, it is discovered based on
|
||||
the ``import_name``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
|
||||
cli: Group
|
||||
name: str
|
||||
_static_folder: str | None = None
|
||||
_static_url_path: str | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
import_name: str,
|
||||
static_folder: str | os.PathLike[str] | None = None,
|
||||
static_url_path: str | None = None,
|
||||
template_folder: str | os.PathLike[str] | None = None,
|
||||
root_path: str | None = None,
|
||||
):
|
||||
#: The name of the package or module that this object belongs
|
||||
#: to. Do not change this once it is set by the constructor.
|
||||
self.import_name = import_name
|
||||
|
||||
self.static_folder = static_folder # type: ignore
|
||||
self.static_url_path = static_url_path
|
||||
|
||||
#: The path to the templates folder, relative to
|
||||
#: :attr:`root_path`, to add to the template loader. ``None`` if
|
||||
#: templates should not be added.
|
||||
self.template_folder = template_folder
|
||||
|
||||
if root_path is None:
|
||||
root_path = get_root_path(self.import_name)
|
||||
|
||||
#: Absolute path to the package on the filesystem. Used to look
|
||||
#: up resources contained in the package.
|
||||
self.root_path = root_path
|
||||
|
||||
#: A dictionary mapping endpoint names to view functions.
|
||||
#:
|
||||
#: To register a view function, use the :meth:`route` decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.view_functions: dict[str, ft.RouteCallable] = {}
|
||||
|
||||
#: A data structure of registered error handlers, in the format
|
||||
#: ``{scope: {code: {class: handler}}}``. The ``scope`` key is
|
||||
#: the name of a blueprint the handlers are active for, or
|
||||
#: ``None`` for all requests. The ``code`` key is the HTTP
|
||||
#: status code for ``HTTPException``, or ``None`` for
|
||||
#: other exceptions. The innermost dictionary maps exception
|
||||
#: classes to handler functions.
|
||||
#:
|
||||
#: To register an error handler, use the :meth:`errorhandler`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.error_handler_spec: dict[
|
||||
ft.AppOrBlueprintKey,
|
||||
dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]],
|
||||
] = defaultdict(lambda: defaultdict(dict))
|
||||
|
||||
#: A data structure of functions to call at the beginning of
|
||||
#: each request, in the format ``{scope: [functions]}``. The
|
||||
#: ``scope`` key is the name of a blueprint the functions are
|
||||
#: active for, or ``None`` for all requests.
|
||||
#:
|
||||
#: To register a function, use the :meth:`before_request`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.before_request_funcs: dict[
|
||||
ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable]
|
||||
] = defaultdict(list)
|
||||
|
||||
#: A data structure of functions to call at the end of each
|
||||
#: request, in the format ``{scope: [functions]}``. The
|
||||
#: ``scope`` key is the name of a blueprint the functions are
|
||||
#: active for, or ``None`` for all requests.
|
||||
#:
|
||||
#: To register a function, use the :meth:`after_request`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.after_request_funcs: dict[
|
||||
ft.AppOrBlueprintKey, list[ft.AfterRequestCallable[t.Any]]
|
||||
] = defaultdict(list)
|
||||
|
||||
#: A data structure of functions to call at the end of each
|
||||
#: request even if an exception is raised, in the format
|
||||
#: ``{scope: [functions]}``. The ``scope`` key is the name of a
|
||||
#: blueprint the functions are active for, or ``None`` for all
|
||||
#: requests.
|
||||
#:
|
||||
#: To register a function, use the :meth:`teardown_request`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.teardown_request_funcs: dict[
|
||||
ft.AppOrBlueprintKey, list[ft.TeardownCallable]
|
||||
] = defaultdict(list)
|
||||
|
||||
#: A data structure of functions to call to pass extra context
|
||||
#: values when rendering templates, in the format
|
||||
#: ``{scope: [functions]}``. The ``scope`` key is the name of a
|
||||
#: blueprint the functions are active for, or ``None`` for all
|
||||
#: requests.
|
||||
#:
|
||||
#: To register a function, use the :meth:`context_processor`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.template_context_processors: dict[
|
||||
ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable]
|
||||
] = defaultdict(list, {None: [_default_template_ctx_processor]})
|
||||
|
||||
#: A data structure of functions to call to modify the keyword
|
||||
#: arguments passed to the view function, in the format
|
||||
#: ``{scope: [functions]}``. The ``scope`` key is the name of a
|
||||
#: blueprint the functions are active for, or ``None`` for all
|
||||
#: requests.
|
||||
#:
|
||||
#: To register a function, use the
|
||||
#: :meth:`url_value_preprocessor` decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.url_value_preprocessors: dict[
|
||||
ft.AppOrBlueprintKey,
|
||||
list[ft.URLValuePreprocessorCallable],
|
||||
] = defaultdict(list)
|
||||
|
||||
#: A data structure of functions to call to modify the keyword
|
||||
#: arguments when generating URLs, in the format
|
||||
#: ``{scope: [functions]}``. The ``scope`` key is the name of a
|
||||
#: blueprint the functions are active for, or ``None`` for all
|
||||
#: requests.
|
||||
#:
|
||||
#: To register a function, use the :meth:`url_defaults`
|
||||
#: decorator.
|
||||
#:
|
||||
#: This data structure is internal. It should not be modified
|
||||
#: directly and its format may change at any time.
|
||||
self.url_default_functions: dict[
|
||||
ft.AppOrBlueprintKey, list[ft.URLDefaultCallable]
|
||||
] = defaultdict(list)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{type(self).__name__} {self.name!r}>"
|
||||
|
||||
def _check_setup_finished(self, f_name: str) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def static_folder(self) -> str | None:
|
||||
"""The absolute path to the configured static folder. ``None``
|
||||
if no static folder is set.
|
||||
"""
|
||||
if self._static_folder is not None:
|
||||
return os.path.join(self.root_path, self._static_folder)
|
||||
else:
|
||||
return None
|
||||
|
||||
@static_folder.setter
|
||||
def static_folder(self, value: str | os.PathLike[str] | None) -> None:
|
||||
if value is not None:
|
||||
value = os.fspath(value).rstrip(r"\/")
|
||||
|
||||
self._static_folder = value
|
||||
|
||||
@property
|
||||
def has_static_folder(self) -> bool:
|
||||
"""``True`` if :attr:`static_folder` is set.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
return self.static_folder is not None
|
||||
|
||||
@property
|
||||
def static_url_path(self) -> str | None:
|
||||
"""The URL prefix that the static route will be accessible from.
|
||||
|
||||
If it was not configured during init, it is derived from
|
||||
:attr:`static_folder`.
|
||||
"""
|
||||
if self._static_url_path is not None:
|
||||
return self._static_url_path
|
||||
|
||||
if self.static_folder is not None:
|
||||
basename = os.path.basename(self.static_folder)
|
||||
return f"/{basename}".rstrip("/")
|
||||
|
||||
return None
|
||||
|
||||
@static_url_path.setter
|
||||
def static_url_path(self, value: str | None) -> None:
|
||||
if value is not None:
|
||||
value = value.rstrip("/")
|
||||
|
||||
self._static_url_path = value
|
||||
|
||||
@cached_property
|
||||
def jinja_loader(self) -> BaseLoader | None:
|
||||
"""The Jinja loader for this object's templates. By default this
|
||||
is a class :class:`jinja2.loaders.FileSystemLoader` to
|
||||
:attr:`template_folder` if it is set.
|
||||
|
||||
.. versionadded:: 0.5
|
||||
"""
|
||||
if self.template_folder is not None:
|
||||
return FileSystemLoader(os.path.join(self.root_path, self.template_folder))
|
||||
else:
|
||||
return None
|
||||
|
||||
def _method_route(
|
||||
self,
|
||||
method: str,
|
||||
rule: str,
|
||||
options: dict[str, t.Any],
|
||||
) -> t.Callable[[T_route], T_route]:
|
||||
if "methods" in options:
|
||||
raise TypeError("Use the 'route' decorator to use the 'methods' argument.")
|
||||
|
||||
return self.route(rule, methods=[method], **options)
|
||||
|
||||
@setupmethod
|
||||
def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Shortcut for :meth:`route` with ``methods=["GET"]``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
return self._method_route("GET", rule, options)
|
||||
|
||||
@setupmethod
|
||||
def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Shortcut for :meth:`route` with ``methods=["POST"]``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
return self._method_route("POST", rule, options)
|
||||
|
||||
@setupmethod
|
||||
def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Shortcut for :meth:`route` with ``methods=["PUT"]``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
return self._method_route("PUT", rule, options)
|
||||
|
||||
@setupmethod
|
||||
def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Shortcut for :meth:`route` with ``methods=["DELETE"]``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
return self._method_route("DELETE", rule, options)
|
||||
|
||||
@setupmethod
|
||||
def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Shortcut for :meth:`route` with ``methods=["PATCH"]``.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
return self._method_route("PATCH", rule, options)
|
||||
|
||||
@setupmethod
|
||||
def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]:
|
||||
"""Decorate a view function to register it with the given URL
|
||||
rule and options. Calls :meth:`add_url_rule`, which has more
|
||||
details about the implementation.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return "Hello, World!"
|
||||
|
||||
See :ref:`url-route-registrations`.
|
||||
|
||||
The endpoint name for the route defaults to the name of the view
|
||||
function if the ``endpoint`` parameter isn't passed.
|
||||
|
||||
The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and
|
||||
``OPTIONS`` are added automatically.
|
||||
|
||||
:param rule: The URL rule string.
|
||||
:param options: Extra options passed to the
|
||||
:class:`~werkzeug.routing.Rule` object.
|
||||
"""
|
||||
|
||||
def decorator(f: T_route) -> T_route:
|
||||
endpoint = options.pop("endpoint", None)
|
||||
self.add_url_rule(rule, endpoint, f, **options)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def add_url_rule(
|
||||
self,
|
||||
rule: str,
|
||||
endpoint: str | None = None,
|
||||
view_func: ft.RouteCallable | None = None,
|
||||
provide_automatic_options: bool | None = None,
|
||||
**options: t.Any,
|
||||
) -> None:
|
||||
"""Register a rule for routing incoming requests and building
|
||||
URLs. The :meth:`route` decorator is a shortcut to call this
|
||||
with the ``view_func`` argument. These are equivalent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
...
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def index():
|
||||
...
|
||||
|
||||
app.add_url_rule("/", view_func=index)
|
||||
|
||||
See :ref:`url-route-registrations`.
|
||||
|
||||
The endpoint name for the route defaults to the name of the view
|
||||
function if the ``endpoint`` parameter isn't passed. An error
|
||||
will be raised if a function has already been registered for the
|
||||
endpoint.
|
||||
|
||||
The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is
|
||||
always added automatically, and ``OPTIONS`` is added
|
||||
automatically by default.
|
||||
|
||||
``view_func`` does not necessarily need to be passed, but if the
|
||||
rule should participate in routing an endpoint name must be
|
||||
associated with a view function at some point with the
|
||||
:meth:`endpoint` decorator.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
app.add_url_rule("/", endpoint="index")
|
||||
|
||||
@app.endpoint("index")
|
||||
def index():
|
||||
...
|
||||
|
||||
If ``view_func`` has a ``required_methods`` attribute, those
|
||||
methods are added to the passed and automatic methods. If it
|
||||
has a ``provide_automatic_methods`` attribute, it is used as the
|
||||
default if the parameter is not passed.
|
||||
|
||||
:param rule: The URL rule string.
|
||||
:param endpoint: The endpoint name to associate with the rule
|
||||
and view function. Used when routing and building URLs.
|
||||
Defaults to ``view_func.__name__``.
|
||||
:param view_func: The view function to associate with the
|
||||
endpoint name.
|
||||
:param provide_automatic_options: Add the ``OPTIONS`` method and
|
||||
respond to ``OPTIONS`` requests automatically.
|
||||
:param options: Extra options passed to the
|
||||
:class:`~werkzeug.routing.Rule` object.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@setupmethod
|
||||
def endpoint(self, endpoint: str) -> t.Callable[[F], F]:
|
||||
"""Decorate a view function to register it for the given
|
||||
endpoint. Used if a rule is added without a ``view_func`` with
|
||||
:meth:`add_url_rule`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
app.add_url_rule("/ex", endpoint="example")
|
||||
|
||||
@app.endpoint("example")
|
||||
def example():
|
||||
...
|
||||
|
||||
:param endpoint: The endpoint name to associate with the view
|
||||
function.
|
||||
"""
|
||||
|
||||
def decorator(f: F) -> F:
|
||||
self.view_functions[endpoint] = f
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def before_request(self, f: T_before_request) -> T_before_request:
|
||||
"""Register a function to run before each request.
|
||||
|
||||
For example, this can be used to open a database connection, or
|
||||
to load the logged in user from the session.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@app.before_request
|
||||
def load_user():
|
||||
if "user_id" in session:
|
||||
g.user = db.session.get(session["user_id"])
|
||||
|
||||
The function will be called without any arguments. If it returns
|
||||
a non-``None`` value, the value is handled as if it was the
|
||||
return value from the view, and further request handling is
|
||||
stopped.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
executes before every request. When used on a blueprint, this executes before
|
||||
every request that the blueprint handles. To register with a blueprint and
|
||||
execute before every request, use :meth:`.Blueprint.before_app_request`.
|
||||
"""
|
||||
self.before_request_funcs.setdefault(None, []).append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def after_request(self, f: T_after_request) -> T_after_request:
|
||||
"""Register a function to run after each request to this object.
|
||||
|
||||
The function is called with the response object, and must return
|
||||
a response object. This allows the functions to modify or
|
||||
replace the response before it is sent.
|
||||
|
||||
If a function raises an exception, any remaining
|
||||
``after_request`` functions will not be called. Therefore, this
|
||||
should not be used for actions that must execute, such as to
|
||||
close resources. Use :meth:`teardown_request` for that.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
executes after every request. When used on a blueprint, this executes after
|
||||
every request that the blueprint handles. To register with a blueprint and
|
||||
execute after every request, use :meth:`.Blueprint.after_app_request`.
|
||||
"""
|
||||
self.after_request_funcs.setdefault(None, []).append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def teardown_request(self, f: T_teardown) -> T_teardown:
|
||||
"""Register a function to be called when the request context is
|
||||
popped. Typically this happens at the end of each request, but
|
||||
contexts may be pushed manually as well during testing.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with app.test_request_context():
|
||||
...
|
||||
|
||||
When the ``with`` block exits (or ``ctx.pop()`` is called), the
|
||||
teardown functions are called just before the request context is
|
||||
made inactive.
|
||||
|
||||
When a teardown function was called because of an unhandled
|
||||
exception it will be passed an error object. If an
|
||||
:meth:`errorhandler` is registered, it will handle the exception
|
||||
and the teardown will not receive it.
|
||||
|
||||
Teardown functions must avoid raising exceptions. If they
|
||||
execute code that might fail they must surround that code with a
|
||||
``try``/``except`` block and log any errors.
|
||||
|
||||
The return values of teardown functions are ignored.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
executes after every request. When used on a blueprint, this executes after
|
||||
every request that the blueprint handles. To register with a blueprint and
|
||||
execute after every request, use :meth:`.Blueprint.teardown_app_request`.
|
||||
"""
|
||||
self.teardown_request_funcs.setdefault(None, []).append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def context_processor(
|
||||
self,
|
||||
f: T_template_context_processor,
|
||||
) -> T_template_context_processor:
|
||||
"""Registers a template context processor function. These functions run before
|
||||
rendering a template. The keys of the returned dict are added as variables
|
||||
available in the template.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
is called for every rendered template. When used on a blueprint, this is called
|
||||
for templates rendered from the blueprint's views. To register with a blueprint
|
||||
and affect every template, use :meth:`.Blueprint.app_context_processor`.
|
||||
"""
|
||||
self.template_context_processors[None].append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def url_value_preprocessor(
|
||||
self,
|
||||
f: T_url_value_preprocessor,
|
||||
) -> T_url_value_preprocessor:
|
||||
"""Register a URL value preprocessor function for all view
|
||||
functions in the application. These functions will be called before the
|
||||
:meth:`before_request` functions.
|
||||
|
||||
The function can modify the values captured from the matched url before
|
||||
they are passed to the view. For example, this can be used to pop a
|
||||
common language code value and place it in ``g`` rather than pass it to
|
||||
every view.
|
||||
|
||||
The function is passed the endpoint name and values dict. The return
|
||||
value is ignored.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
is called for every request. When used on a blueprint, this is called for
|
||||
requests that the blueprint handles. To register with a blueprint and affect
|
||||
every request, use :meth:`.Blueprint.app_url_value_preprocessor`.
|
||||
"""
|
||||
self.url_value_preprocessors[None].append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def url_defaults(self, f: T_url_defaults) -> T_url_defaults:
|
||||
"""Callback function for URL defaults for all view functions of the
|
||||
application. It's called with the endpoint and values and should
|
||||
update the values passed in place.
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
is called for every request. When used on a blueprint, this is called for
|
||||
requests that the blueprint handles. To register with a blueprint and affect
|
||||
every request, use :meth:`.Blueprint.app_url_defaults`.
|
||||
"""
|
||||
self.url_default_functions[None].append(f)
|
||||
return f
|
||||
|
||||
@setupmethod
|
||||
def errorhandler(
|
||||
self, code_or_exception: type[Exception] | int
|
||||
) -> t.Callable[[T_error_handler], T_error_handler]:
|
||||
"""Register a function to handle errors by code or exception class.
|
||||
|
||||
A decorator that is used to register a function given an
|
||||
error code. Example::
|
||||
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(error):
|
||||
return 'This page does not exist', 404
|
||||
|
||||
You can also register handlers for arbitrary exceptions::
|
||||
|
||||
@app.errorhandler(DatabaseError)
|
||||
def special_exception_handler(error):
|
||||
return 'Database connection failed', 500
|
||||
|
||||
This is available on both app and blueprint objects. When used on an app, this
|
||||
can handle errors from every request. When used on a blueprint, this can handle
|
||||
errors from requests that the blueprint handles. To register with a blueprint
|
||||
and affect every request, use :meth:`.Blueprint.app_errorhandler`.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
Use :meth:`register_error_handler` instead of modifying
|
||||
:attr:`error_handler_spec` directly, for application wide error
|
||||
handlers.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
One can now additionally also register custom exception types
|
||||
that do not necessarily have to be a subclass of the
|
||||
:class:`~werkzeug.exceptions.HTTPException` class.
|
||||
|
||||
:param code_or_exception: the code as integer for the handler, or
|
||||
an arbitrary exception
|
||||
"""
|
||||
|
||||
def decorator(f: T_error_handler) -> T_error_handler:
|
||||
self.register_error_handler(code_or_exception, f)
|
||||
return f
|
||||
|
||||
return decorator
|
||||
|
||||
@setupmethod
|
||||
def register_error_handler(
|
||||
self,
|
||||
code_or_exception: type[Exception] | int,
|
||||
f: ft.ErrorHandlerCallable,
|
||||
) -> None:
|
||||
"""Alternative error attach function to the :meth:`errorhandler`
|
||||
decorator that is more straightforward to use for non decorator
|
||||
usage.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
exc_class, code = self._get_exc_class_and_code(code_or_exception)
|
||||
self.error_handler_spec[None][code][exc_class] = f
|
||||
|
||||
@staticmethod
|
||||
def _get_exc_class_and_code(
|
||||
exc_class_or_code: type[Exception] | int,
|
||||
) -> tuple[type[Exception], int | None]:
|
||||
"""Get the exception class being handled. For HTTP status codes
|
||||
or ``HTTPException`` subclasses, return both the exception and
|
||||
status code.
|
||||
|
||||
:param exc_class_or_code: Any exception class, or an HTTP status
|
||||
code as an integer.
|
||||
"""
|
||||
exc_class: type[Exception]
|
||||
|
||||
if isinstance(exc_class_or_code, int):
|
||||
try:
|
||||
exc_class = default_exceptions[exc_class_or_code]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
f"'{exc_class_or_code}' is not a recognized HTTP"
|
||||
" error code. Use a subclass of HTTPException with"
|
||||
" that code instead."
|
||||
) from None
|
||||
else:
|
||||
exc_class = exc_class_or_code
|
||||
|
||||
if isinstance(exc_class, Exception):
|
||||
raise TypeError(
|
||||
f"{exc_class!r} is an instance, not a class. Handlers"
|
||||
" can only be registered for Exception classes or HTTP"
|
||||
" error codes."
|
||||
)
|
||||
|
||||
if not issubclass(exc_class, Exception):
|
||||
raise ValueError(
|
||||
f"'{exc_class.__name__}' is not a subclass of Exception."
|
||||
" Handlers can only be registered for Exception classes"
|
||||
" or HTTP error codes."
|
||||
)
|
||||
|
||||
if issubclass(exc_class, HTTPException):
|
||||
return exc_class, exc_class.code
|
||||
else:
|
||||
return exc_class, None
|
||||
|
||||
|
||||
def _endpoint_from_view_func(view_func: ft.RouteCallable) -> str:
|
||||
"""Internal helper that returns the default endpoint for a given
|
||||
function. This always is the function name.
|
||||
"""
|
||||
assert view_func is not None, "expected view func if endpoint is not provided."
|
||||
return view_func.__name__
|
||||
|
||||
|
||||
def _path_is_relative_to(path: pathlib.PurePath, base: str) -> bool:
|
||||
# Path.is_relative_to doesn't exist until Python 3.9
|
||||
try:
|
||||
path.relative_to(base)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def _find_package_path(import_name: str) -> str:
|
||||
"""Find the path that contains the package or module."""
|
||||
root_mod_name, _, _ = import_name.partition(".")
|
||||
|
||||
try:
|
||||
root_spec = importlib.util.find_spec(root_mod_name)
|
||||
|
||||
if root_spec is None:
|
||||
raise ValueError("not found")
|
||||
except (ImportError, ValueError):
|
||||
# ImportError: the machinery told us it does not exist
|
||||
# ValueError:
|
||||
# - the module name was invalid
|
||||
# - the module name is __main__
|
||||
# - we raised `ValueError` due to `root_spec` being `None`
|
||||
return os.getcwd()
|
||||
|
||||
if root_spec.submodule_search_locations:
|
||||
if root_spec.origin is None or root_spec.origin == "namespace":
|
||||
# namespace package
|
||||
package_spec = importlib.util.find_spec(import_name)
|
||||
|
||||
if package_spec is not None and package_spec.submodule_search_locations:
|
||||
# Pick the path in the namespace that contains the submodule.
|
||||
package_path = pathlib.Path(
|
||||
os.path.commonpath(package_spec.submodule_search_locations)
|
||||
)
|
||||
search_location = next(
|
||||
location
|
||||
for location in root_spec.submodule_search_locations
|
||||
if _path_is_relative_to(package_path, location)
|
||||
)
|
||||
else:
|
||||
# Pick the first path.
|
||||
search_location = root_spec.submodule_search_locations[0]
|
||||
|
||||
return os.path.dirname(search_location)
|
||||
else:
|
||||
# package with __init__.py
|
||||
return os.path.dirname(os.path.dirname(root_spec.origin))
|
||||
else:
|
||||
# module
|
||||
return os.path.dirname(root_spec.origin) # type: ignore[type-var, return-value]
|
||||
|
||||
|
||||
def find_package(import_name: str) -> tuple[str | None, str]:
|
||||
"""Find the prefix that a package is installed under, and the path
|
||||
that it would be imported from.
|
||||
|
||||
The prefix is the directory containing the standard directory
|
||||
hierarchy (lib, bin, etc.). If the package is not installed to the
|
||||
system (:attr:`sys.prefix`) or a virtualenv (``site-packages``),
|
||||
``None`` is returned.
|
||||
|
||||
The path is the entry in :attr:`sys.path` that contains the package
|
||||
for import. If the package is not installed, it's assumed that the
|
||||
package was imported from the current working directory.
|
||||
"""
|
||||
package_path = _find_package_path(import_name)
|
||||
py_prefix = os.path.abspath(sys.prefix)
|
||||
|
||||
# installed to the system
|
||||
if _path_is_relative_to(pathlib.PurePath(package_path), py_prefix):
|
||||
return py_prefix, package_path
|
||||
|
||||
site_parent, site_folder = os.path.split(package_path)
|
||||
|
||||
# installed to a virtualenv
|
||||
if site_folder.lower() == "site-packages":
|
||||
parent, folder = os.path.split(site_parent)
|
||||
|
||||
# Windows (prefix/lib/site-packages)
|
||||
if folder.lower() == "lib":
|
||||
return parent, package_path
|
||||
|
||||
# Unix (prefix/lib/pythonX.Y/site-packages)
|
||||
if os.path.basename(parent).lower() == "lib":
|
||||
return os.path.dirname(parent), package_path
|
||||
|
||||
# something else (prefix/site-packages)
|
||||
return site_parent, package_path
|
||||
|
||||
# not installed
|
||||
return None, package_path
|
||||
@ -0,0 +1,379 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import typing as t
|
||||
from collections.abc import MutableMapping
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
|
||||
from itsdangerous import BadSignature
|
||||
from itsdangerous import URLSafeTimedSerializer
|
||||
from werkzeug.datastructures import CallbackDict
|
||||
|
||||
from .json.tag import TaggedJSONSerializer
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
import typing_extensions as te
|
||||
|
||||
from .app import Flask
|
||||
from .wrappers import Request
|
||||
from .wrappers import Response
|
||||
|
||||
|
||||
# TODO generic when Python > 3.8
|
||||
class SessionMixin(MutableMapping): # type: ignore[type-arg]
|
||||
"""Expands a basic dictionary with session attributes."""
|
||||
|
||||
@property
|
||||
def permanent(self) -> bool:
|
||||
"""This reflects the ``'_permanent'`` key in the dict."""
|
||||
return self.get("_permanent", False)
|
||||
|
||||
@permanent.setter
|
||||
def permanent(self, value: bool) -> None:
|
||||
self["_permanent"] = bool(value)
|
||||
|
||||
#: Some implementations can detect whether a session is newly
|
||||
#: created, but that is not guaranteed. Use with caution. The mixin
|
||||
# default is hard-coded ``False``.
|
||||
new = False
|
||||
|
||||
#: Some implementations can detect changes to the session and set
|
||||
#: this when that happens. The mixin default is hard coded to
|
||||
#: ``True``.
|
||||
modified = True
|
||||
|
||||
#: Some implementations can detect when session data is read or
|
||||
#: written and set this when that happens. The mixin default is hard
|
||||
#: coded to ``True``.
|
||||
accessed = True
|
||||
|
||||
|
||||
# TODO generic when Python > 3.8
|
||||
class SecureCookieSession(CallbackDict, SessionMixin): # type: ignore[type-arg]
|
||||
"""Base class for sessions based on signed cookies.
|
||||
|
||||
This session backend will set the :attr:`modified` and
|
||||
:attr:`accessed` attributes. It cannot reliably track whether a
|
||||
session is new (vs. empty), so :attr:`new` remains hard coded to
|
||||
``False``.
|
||||
"""
|
||||
|
||||
#: When data is changed, this is set to ``True``. Only the session
|
||||
#: dictionary itself is tracked; if the session contains mutable
|
||||
#: data (for example a nested dict) then this must be set to
|
||||
#: ``True`` manually when modifying that data. The session cookie
|
||||
#: will only be written to the response if this is ``True``.
|
||||
modified = False
|
||||
|
||||
#: When data is read or written, this is set to ``True``. Used by
|
||||
# :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie``
|
||||
#: header, which allows caching proxies to cache different pages for
|
||||
#: different users.
|
||||
accessed = False
|
||||
|
||||
def __init__(self, initial: t.Any = None) -> None:
|
||||
def on_update(self: te.Self) -> None:
|
||||
self.modified = True
|
||||
self.accessed = True
|
||||
|
||||
super().__init__(initial, on_update)
|
||||
|
||||
def __getitem__(self, key: str) -> t.Any:
|
||||
self.accessed = True
|
||||
return super().__getitem__(key)
|
||||
|
||||
def get(self, key: str, default: t.Any = None) -> t.Any:
|
||||
self.accessed = True
|
||||
return super().get(key, default)
|
||||
|
||||
def setdefault(self, key: str, default: t.Any = None) -> t.Any:
|
||||
self.accessed = True
|
||||
return super().setdefault(key, default)
|
||||
|
||||
|
||||
class NullSession(SecureCookieSession):
|
||||
"""Class used to generate nicer error messages if sessions are not
|
||||
available. Will still allow read-only access to the empty session
|
||||
but fail on setting.
|
||||
"""
|
||||
|
||||
def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn:
|
||||
raise RuntimeError(
|
||||
"The session is unavailable because no secret "
|
||||
"key was set. Set the secret_key on the "
|
||||
"application to something unique and secret."
|
||||
)
|
||||
|
||||
__setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # type: ignore # noqa: B950
|
||||
del _fail
|
||||
|
||||
|
||||
class SessionInterface:
|
||||
"""The basic interface you have to implement in order to replace the
|
||||
default session interface which uses werkzeug's securecookie
|
||||
implementation. The only methods you have to implement are
|
||||
:meth:`open_session` and :meth:`save_session`, the others have
|
||||
useful defaults which you don't need to change.
|
||||
|
||||
The session object returned by the :meth:`open_session` method has to
|
||||
provide a dictionary like interface plus the properties and methods
|
||||
from the :class:`SessionMixin`. We recommend just subclassing a dict
|
||||
and adding that mixin::
|
||||
|
||||
class Session(dict, SessionMixin):
|
||||
pass
|
||||
|
||||
If :meth:`open_session` returns ``None`` Flask will call into
|
||||
:meth:`make_null_session` to create a session that acts as replacement
|
||||
if the session support cannot work because some requirement is not
|
||||
fulfilled. The default :class:`NullSession` class that is created
|
||||
will complain that the secret key was not set.
|
||||
|
||||
To replace the session interface on an application all you have to do
|
||||
is to assign :attr:`flask.Flask.session_interface`::
|
||||
|
||||
app = Flask(__name__)
|
||||
app.session_interface = MySessionInterface()
|
||||
|
||||
Multiple requests with the same session may be sent and handled
|
||||
concurrently. When implementing a new session interface, consider
|
||||
whether reads or writes to the backing store must be synchronized.
|
||||
There is no guarantee on the order in which the session for each
|
||||
request is opened or saved, it will occur in the order that requests
|
||||
begin and end processing.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
#: :meth:`make_null_session` will look here for the class that should
|
||||
#: be created when a null session is requested. Likewise the
|
||||
#: :meth:`is_null_session` method will perform a typecheck against
|
||||
#: this type.
|
||||
null_session_class = NullSession
|
||||
|
||||
#: A flag that indicates if the session interface is pickle based.
|
||||
#: This can be used by Flask extensions to make a decision in regards
|
||||
#: to how to deal with the session object.
|
||||
#:
|
||||
#: .. versionadded:: 0.10
|
||||
pickle_based = False
|
||||
|
||||
def make_null_session(self, app: Flask) -> NullSession:
|
||||
"""Creates a null session which acts as a replacement object if the
|
||||
real session support could not be loaded due to a configuration
|
||||
error. This mainly aids the user experience because the job of the
|
||||
null session is to still support lookup without complaining but
|
||||
modifications are answered with a helpful error message of what
|
||||
failed.
|
||||
|
||||
This creates an instance of :attr:`null_session_class` by default.
|
||||
"""
|
||||
return self.null_session_class()
|
||||
|
||||
def is_null_session(self, obj: object) -> bool:
|
||||
"""Checks if a given object is a null session. Null sessions are
|
||||
not asked to be saved.
|
||||
|
||||
This checks if the object is an instance of :attr:`null_session_class`
|
||||
by default.
|
||||
"""
|
||||
return isinstance(obj, self.null_session_class)
|
||||
|
||||
def get_cookie_name(self, app: Flask) -> str:
|
||||
"""The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``."""
|
||||
return app.config["SESSION_COOKIE_NAME"] # type: ignore[no-any-return]
|
||||
|
||||
def get_cookie_domain(self, app: Flask) -> str | None:
|
||||
"""The value of the ``Domain`` parameter on the session cookie. If not set,
|
||||
browsers will only send the cookie to the exact domain it was set from.
|
||||
Otherwise, they will send it to any subdomain of the given value as well.
|
||||
|
||||
Uses the :data:`SESSION_COOKIE_DOMAIN` config.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
Not set by default, does not fall back to ``SERVER_NAME``.
|
||||
"""
|
||||
return app.config["SESSION_COOKIE_DOMAIN"] # type: ignore[no-any-return]
|
||||
|
||||
def get_cookie_path(self, app: Flask) -> str:
|
||||
"""Returns the path for which the cookie should be valid. The
|
||||
default implementation uses the value from the ``SESSION_COOKIE_PATH``
|
||||
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
|
||||
uses ``/`` if it's ``None``.
|
||||
"""
|
||||
return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] # type: ignore[no-any-return]
|
||||
|
||||
def get_cookie_httponly(self, app: Flask) -> bool:
|
||||
"""Returns True if the session cookie should be httponly. This
|
||||
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
|
||||
config var.
|
||||
"""
|
||||
return app.config["SESSION_COOKIE_HTTPONLY"] # type: ignore[no-any-return]
|
||||
|
||||
def get_cookie_secure(self, app: Flask) -> bool:
|
||||
"""Returns True if the cookie should be secure. This currently
|
||||
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
|
||||
"""
|
||||
return app.config["SESSION_COOKIE_SECURE"] # type: ignore[no-any-return]
|
||||
|
||||
def get_cookie_samesite(self, app: Flask) -> str | None:
|
||||
"""Return ``'Strict'`` or ``'Lax'`` if the cookie should use the
|
||||
``SameSite`` attribute. This currently just returns the value of
|
||||
the :data:`SESSION_COOKIE_SAMESITE` setting.
|
||||
"""
|
||||
return app.config["SESSION_COOKIE_SAMESITE"] # type: ignore[no-any-return]
|
||||
|
||||
def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None:
|
||||
"""A helper method that returns an expiration date for the session
|
||||
or ``None`` if the session is linked to the browser session. The
|
||||
default implementation returns now + the permanent session
|
||||
lifetime configured on the application.
|
||||
"""
|
||||
if session.permanent:
|
||||
return datetime.now(timezone.utc) + app.permanent_session_lifetime
|
||||
return None
|
||||
|
||||
def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool:
|
||||
"""Used by session backends to determine if a ``Set-Cookie`` header
|
||||
should be set for this session cookie for this response. If the session
|
||||
has been modified, the cookie is set. If the session is permanent and
|
||||
the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is
|
||||
always set.
|
||||
|
||||
This check is usually skipped if the session was deleted.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
|
||||
return session.modified or (
|
||||
session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"]
|
||||
)
|
||||
|
||||
def open_session(self, app: Flask, request: Request) -> SessionMixin | None:
|
||||
"""This is called at the beginning of each request, after
|
||||
pushing the request context, before matching the URL.
|
||||
|
||||
This must return an object which implements a dictionary-like
|
||||
interface as well as the :class:`SessionMixin` interface.
|
||||
|
||||
This will return ``None`` to indicate that loading failed in
|
||||
some way that is not immediately an error. The request
|
||||
context will fall back to using :meth:`make_null_session`
|
||||
in this case.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def save_session(
|
||||
self, app: Flask, session: SessionMixin, response: Response
|
||||
) -> None:
|
||||
"""This is called at the end of each request, after generating
|
||||
a response, before removing the request context. It is skipped
|
||||
if :meth:`is_null_session` returns ``True``.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
session_json_serializer = TaggedJSONSerializer()
|
||||
|
||||
|
||||
def _lazy_sha1(string: bytes = b"") -> t.Any:
|
||||
"""Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include
|
||||
SHA-1, in which case the import and use as a default would fail before the
|
||||
developer can configure something else.
|
||||
"""
|
||||
return hashlib.sha1(string)
|
||||
|
||||
|
||||
class SecureCookieSessionInterface(SessionInterface):
|
||||
"""The default session interface that stores sessions in signed cookies
|
||||
through the :mod:`itsdangerous` module.
|
||||
"""
|
||||
|
||||
#: the salt that should be applied on top of the secret key for the
|
||||
#: signing of cookie based sessions.
|
||||
salt = "cookie-session"
|
||||
#: the hash function to use for the signature. The default is sha1
|
||||
digest_method = staticmethod(_lazy_sha1)
|
||||
#: the name of the itsdangerous supported key derivation. The default
|
||||
#: is hmac.
|
||||
key_derivation = "hmac"
|
||||
#: A python serializer for the payload. The default is a compact
|
||||
#: JSON derived serializer with support for some extra Python types
|
||||
#: such as datetime objects or tuples.
|
||||
serializer = session_json_serializer
|
||||
session_class = SecureCookieSession
|
||||
|
||||
def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None:
|
||||
if not app.secret_key:
|
||||
return None
|
||||
signer_kwargs = dict(
|
||||
key_derivation=self.key_derivation, digest_method=self.digest_method
|
||||
)
|
||||
return URLSafeTimedSerializer(
|
||||
app.secret_key,
|
||||
salt=self.salt,
|
||||
serializer=self.serializer,
|
||||
signer_kwargs=signer_kwargs,
|
||||
)
|
||||
|
||||
def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None:
|
||||
s = self.get_signing_serializer(app)
|
||||
if s is None:
|
||||
return None
|
||||
val = request.cookies.get(self.get_cookie_name(app))
|
||||
if not val:
|
||||
return self.session_class()
|
||||
max_age = int(app.permanent_session_lifetime.total_seconds())
|
||||
try:
|
||||
data = s.loads(val, max_age=max_age)
|
||||
return self.session_class(data)
|
||||
except BadSignature:
|
||||
return self.session_class()
|
||||
|
||||
def save_session(
|
||||
self, app: Flask, session: SessionMixin, response: Response
|
||||
) -> None:
|
||||
name = self.get_cookie_name(app)
|
||||
domain = self.get_cookie_domain(app)
|
||||
path = self.get_cookie_path(app)
|
||||
secure = self.get_cookie_secure(app)
|
||||
samesite = self.get_cookie_samesite(app)
|
||||
httponly = self.get_cookie_httponly(app)
|
||||
|
||||
# Add a "Vary: Cookie" header if the session was accessed at all.
|
||||
if session.accessed:
|
||||
response.vary.add("Cookie")
|
||||
|
||||
# If the session is modified to be empty, remove the cookie.
|
||||
# If the session is empty, return without setting the cookie.
|
||||
if not session:
|
||||
if session.modified:
|
||||
response.delete_cookie(
|
||||
name,
|
||||
domain=domain,
|
||||
path=path,
|
||||
secure=secure,
|
||||
samesite=samesite,
|
||||
httponly=httponly,
|
||||
)
|
||||
response.vary.add("Cookie")
|
||||
|
||||
return
|
||||
|
||||
if not self.should_set_cookie(app, session):
|
||||
return
|
||||
|
||||
expires = self.get_expiration_time(app, session)
|
||||
val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore
|
||||
response.set_cookie(
|
||||
name,
|
||||
val, # type: ignore
|
||||
expires=expires,
|
||||
httponly=httponly,
|
||||
domain=domain,
|
||||
path=path,
|
||||
secure=secure,
|
||||
samesite=samesite,
|
||||
)
|
||||
response.vary.add("Cookie")
|
||||
@ -0,0 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from blinker import Namespace
|
||||
|
||||
# This namespace is only for signals provided by Flask itself.
|
||||
_signals = Namespace()
|
||||
|
||||
template_rendered = _signals.signal("template-rendered")
|
||||
before_render_template = _signals.signal("before-render-template")
|
||||
request_started = _signals.signal("request-started")
|
||||
request_finished = _signals.signal("request-finished")
|
||||
request_tearing_down = _signals.signal("request-tearing-down")
|
||||
got_request_exception = _signals.signal("got-request-exception")
|
||||
appcontext_tearing_down = _signals.signal("appcontext-tearing-down")
|
||||
appcontext_pushed = _signals.signal("appcontext-pushed")
|
||||
appcontext_popped = _signals.signal("appcontext-popped")
|
||||
message_flashed = _signals.signal("message-flashed")
|
||||
@ -0,0 +1,219 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from jinja2 import BaseLoader
|
||||
from jinja2 import Environment as BaseEnvironment
|
||||
from jinja2 import Template
|
||||
from jinja2 import TemplateNotFound
|
||||
|
||||
from .globals import _cv_app
|
||||
from .globals import _cv_request
|
||||
from .globals import current_app
|
||||
from .globals import request
|
||||
from .helpers import stream_with_context
|
||||
from .signals import before_render_template
|
||||
from .signals import template_rendered
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from .app import Flask
|
||||
from .sansio.app import App
|
||||
from .sansio.scaffold import Scaffold
|
||||
|
||||
|
||||
def _default_template_ctx_processor() -> dict[str, t.Any]:
|
||||
"""Default template context processor. Injects `request`,
|
||||
`session` and `g`.
|
||||
"""
|
||||
appctx = _cv_app.get(None)
|
||||
reqctx = _cv_request.get(None)
|
||||
rv: dict[str, t.Any] = {}
|
||||
if appctx is not None:
|
||||
rv["g"] = appctx.g
|
||||
if reqctx is not None:
|
||||
rv["request"] = reqctx.request
|
||||
rv["session"] = reqctx.session
|
||||
return rv
|
||||
|
||||
|
||||
class Environment(BaseEnvironment):
|
||||
"""Works like a regular Jinja2 environment but has some additional
|
||||
knowledge of how Flask's blueprint works so that it can prepend the
|
||||
name of the blueprint to referenced templates if necessary.
|
||||
"""
|
||||
|
||||
def __init__(self, app: App, **options: t.Any) -> None:
|
||||
if "loader" not in options:
|
||||
options["loader"] = app.create_global_jinja_loader()
|
||||
BaseEnvironment.__init__(self, **options)
|
||||
self.app = app
|
||||
|
||||
|
||||
class DispatchingJinjaLoader(BaseLoader):
|
||||
"""A loader that looks for templates in the application and all
|
||||
the blueprint folders.
|
||||
"""
|
||||
|
||||
def __init__(self, app: App) -> None:
|
||||
self.app = app
|
||||
|
||||
def get_source(
|
||||
self, environment: BaseEnvironment, template: str
|
||||
) -> tuple[str, str | None, t.Callable[[], bool] | None]:
|
||||
if self.app.config["EXPLAIN_TEMPLATE_LOADING"]:
|
||||
return self._get_source_explained(environment, template)
|
||||
return self._get_source_fast(environment, template)
|
||||
|
||||
def _get_source_explained(
|
||||
self, environment: BaseEnvironment, template: str
|
||||
) -> tuple[str, str | None, t.Callable[[], bool] | None]:
|
||||
attempts = []
|
||||
rv: tuple[str, str | None, t.Callable[[], bool] | None] | None
|
||||
trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None
|
||||
|
||||
for srcobj, loader in self._iter_loaders(template):
|
||||
try:
|
||||
rv = loader.get_source(environment, template)
|
||||
if trv is None:
|
||||
trv = rv
|
||||
except TemplateNotFound:
|
||||
rv = None
|
||||
attempts.append((loader, srcobj, rv))
|
||||
|
||||
from .debughelpers import explain_template_loading_attempts
|
||||
|
||||
explain_template_loading_attempts(self.app, template, attempts)
|
||||
|
||||
if trv is not None:
|
||||
return trv
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def _get_source_fast(
|
||||
self, environment: BaseEnvironment, template: str
|
||||
) -> tuple[str, str | None, t.Callable[[], bool] | None]:
|
||||
for _srcobj, loader in self._iter_loaders(template):
|
||||
try:
|
||||
return loader.get_source(environment, template)
|
||||
except TemplateNotFound:
|
||||
continue
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def _iter_loaders(self, template: str) -> t.Iterator[tuple[Scaffold, BaseLoader]]:
|
||||
loader = self.app.jinja_loader
|
||||
if loader is not None:
|
||||
yield self.app, loader
|
||||
|
||||
for blueprint in self.app.iter_blueprints():
|
||||
loader = blueprint.jinja_loader
|
||||
if loader is not None:
|
||||
yield blueprint, loader
|
||||
|
||||
def list_templates(self) -> list[str]:
|
||||
result = set()
|
||||
loader = self.app.jinja_loader
|
||||
if loader is not None:
|
||||
result.update(loader.list_templates())
|
||||
|
||||
for blueprint in self.app.iter_blueprints():
|
||||
loader = blueprint.jinja_loader
|
||||
if loader is not None:
|
||||
for template in loader.list_templates():
|
||||
result.add(template)
|
||||
|
||||
return list(result)
|
||||
|
||||
|
||||
def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str:
|
||||
app.update_template_context(context)
|
||||
before_render_template.send(
|
||||
app, _async_wrapper=app.ensure_sync, template=template, context=context
|
||||
)
|
||||
rv = template.render(context)
|
||||
template_rendered.send(
|
||||
app, _async_wrapper=app.ensure_sync, template=template, context=context
|
||||
)
|
||||
return rv
|
||||
|
||||
|
||||
def render_template(
|
||||
template_name_or_list: str | Template | list[str | Template],
|
||||
**context: t.Any,
|
||||
) -> str:
|
||||
"""Render a template by name with the given context.
|
||||
|
||||
:param template_name_or_list: The name of the template to render. If
|
||||
a list is given, the first name to exist will be rendered.
|
||||
:param context: The variables to make available in the template.
|
||||
"""
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
template = app.jinja_env.get_or_select_template(template_name_or_list)
|
||||
return _render(app, template, context)
|
||||
|
||||
|
||||
def render_template_string(source: str, **context: t.Any) -> str:
|
||||
"""Render a template from the given source string with the given
|
||||
context.
|
||||
|
||||
:param source: The source code of the template to render.
|
||||
:param context: The variables to make available in the template.
|
||||
"""
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
template = app.jinja_env.from_string(source)
|
||||
return _render(app, template, context)
|
||||
|
||||
|
||||
def _stream(
|
||||
app: Flask, template: Template, context: dict[str, t.Any]
|
||||
) -> t.Iterator[str]:
|
||||
app.update_template_context(context)
|
||||
before_render_template.send(
|
||||
app, _async_wrapper=app.ensure_sync, template=template, context=context
|
||||
)
|
||||
|
||||
def generate() -> t.Iterator[str]:
|
||||
yield from template.generate(context)
|
||||
template_rendered.send(
|
||||
app, _async_wrapper=app.ensure_sync, template=template, context=context
|
||||
)
|
||||
|
||||
rv = generate()
|
||||
|
||||
# If a request context is active, keep it while generating.
|
||||
if request:
|
||||
rv = stream_with_context(rv)
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def stream_template(
|
||||
template_name_or_list: str | Template | list[str | Template],
|
||||
**context: t.Any,
|
||||
) -> t.Iterator[str]:
|
||||
"""Render a template by name with the given context as a stream.
|
||||
This returns an iterator of strings, which can be used as a
|
||||
streaming response from a view.
|
||||
|
||||
:param template_name_or_list: The name of the template to render. If
|
||||
a list is given, the first name to exist will be rendered.
|
||||
:param context: The variables to make available in the template.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
template = app.jinja_env.get_or_select_template(template_name_or_list)
|
||||
return _stream(app, template, context)
|
||||
|
||||
|
||||
def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]:
|
||||
"""Render a template from the given source string with the given
|
||||
context as a stream. This returns an iterator of strings, which can
|
||||
be used as a streaming response from a view.
|
||||
|
||||
:param source: The source code of the template to render.
|
||||
:param context: The variables to make available in the template.
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
template = app.jinja_env.from_string(source)
|
||||
return _stream(app, template, context)
|
||||
@ -0,0 +1,298 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.metadata
|
||||
import typing as t
|
||||
from contextlib import contextmanager
|
||||
from contextlib import ExitStack
|
||||
from copy import copy
|
||||
from types import TracebackType
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import werkzeug.test
|
||||
from click.testing import CliRunner
|
||||
from werkzeug.test import Client
|
||||
from werkzeug.wrappers import Request as BaseRequest
|
||||
|
||||
from .cli import ScriptInfo
|
||||
from .sessions import SessionMixin
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from _typeshed.wsgi import WSGIEnvironment
|
||||
from werkzeug.test import TestResponse
|
||||
|
||||
from .app import Flask
|
||||
|
||||
|
||||
class EnvironBuilder(werkzeug.test.EnvironBuilder):
|
||||
"""An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the
|
||||
application.
|
||||
|
||||
:param app: The Flask application to configure the environment from.
|
||||
:param path: URL path being requested.
|
||||
:param base_url: Base URL where the app is being served, which
|
||||
``path`` is relative to. If not given, built from
|
||||
:data:`PREFERRED_URL_SCHEME`, ``subdomain``,
|
||||
:data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`.
|
||||
:param subdomain: Subdomain name to append to :data:`SERVER_NAME`.
|
||||
:param url_scheme: Scheme to use instead of
|
||||
:data:`PREFERRED_URL_SCHEME`.
|
||||
:param json: If given, this is serialized as JSON and passed as
|
||||
``data``. Also defaults ``content_type`` to
|
||||
``application/json``.
|
||||
:param args: other positional arguments passed to
|
||||
:class:`~werkzeug.test.EnvironBuilder`.
|
||||
:param kwargs: other keyword arguments passed to
|
||||
:class:`~werkzeug.test.EnvironBuilder`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: Flask,
|
||||
path: str = "/",
|
||||
base_url: str | None = None,
|
||||
subdomain: str | None = None,
|
||||
url_scheme: str | None = None,
|
||||
*args: t.Any,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
assert not (base_url or subdomain or url_scheme) or (
|
||||
base_url is not None
|
||||
) != bool(
|
||||
subdomain or url_scheme
|
||||
), 'Cannot pass "subdomain" or "url_scheme" with "base_url".'
|
||||
|
||||
if base_url is None:
|
||||
http_host = app.config.get("SERVER_NAME") or "localhost"
|
||||
app_root = app.config["APPLICATION_ROOT"]
|
||||
|
||||
if subdomain:
|
||||
http_host = f"{subdomain}.{http_host}"
|
||||
|
||||
if url_scheme is None:
|
||||
url_scheme = app.config["PREFERRED_URL_SCHEME"]
|
||||
|
||||
url = urlsplit(path)
|
||||
base_url = (
|
||||
f"{url.scheme or url_scheme}://{url.netloc or http_host}"
|
||||
f"/{app_root.lstrip('/')}"
|
||||
)
|
||||
path = url.path
|
||||
|
||||
if url.query:
|
||||
sep = b"?" if isinstance(url.query, bytes) else "?"
|
||||
path += sep + url.query
|
||||
|
||||
self.app = app
|
||||
super().__init__(path, base_url, *args, **kwargs)
|
||||
|
||||
def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: # type: ignore
|
||||
"""Serialize ``obj`` to a JSON-formatted string.
|
||||
|
||||
The serialization will be configured according to the config associated
|
||||
with this EnvironBuilder's ``app``.
|
||||
"""
|
||||
return self.app.json.dumps(obj, **kwargs)
|
||||
|
||||
|
||||
_werkzeug_version = ""
|
||||
|
||||
|
||||
def _get_werkzeug_version() -> str:
|
||||
global _werkzeug_version
|
||||
|
||||
if not _werkzeug_version:
|
||||
_werkzeug_version = importlib.metadata.version("werkzeug")
|
||||
|
||||
return _werkzeug_version
|
||||
|
||||
|
||||
class FlaskClient(Client):
|
||||
"""Works like a regular Werkzeug test client but has knowledge about
|
||||
Flask's contexts to defer the cleanup of the request context until
|
||||
the end of a ``with`` block. For general information about how to
|
||||
use this class refer to :class:`werkzeug.test.Client`.
|
||||
|
||||
.. versionchanged:: 0.12
|
||||
`app.test_client()` includes preset default environment, which can be
|
||||
set after instantiation of the `app.test_client()` object in
|
||||
`client.environ_base`.
|
||||
|
||||
Basic usage is outlined in the :doc:`/testing` chapter.
|
||||
"""
|
||||
|
||||
application: Flask
|
||||
|
||||
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.preserve_context = False
|
||||
self._new_contexts: list[t.ContextManager[t.Any]] = []
|
||||
self._context_stack = ExitStack()
|
||||
self.environ_base = {
|
||||
"REMOTE_ADDR": "127.0.0.1",
|
||||
"HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}",
|
||||
}
|
||||
|
||||
@contextmanager
|
||||
def session_transaction(
|
||||
self, *args: t.Any, **kwargs: t.Any
|
||||
) -> t.Iterator[SessionMixin]:
|
||||
"""When used in combination with a ``with`` statement this opens a
|
||||
session transaction. This can be used to modify the session that
|
||||
the test client uses. Once the ``with`` block is left the session is
|
||||
stored back.
|
||||
|
||||
::
|
||||
|
||||
with client.session_transaction() as session:
|
||||
session['value'] = 42
|
||||
|
||||
Internally this is implemented by going through a temporary test
|
||||
request context and since session handling could depend on
|
||||
request variables this function accepts the same arguments as
|
||||
:meth:`~flask.Flask.test_request_context` which are directly
|
||||
passed through.
|
||||
"""
|
||||
if self._cookies is None:
|
||||
raise TypeError(
|
||||
"Cookies are disabled. Create a client with 'use_cookies=True'."
|
||||
)
|
||||
|
||||
app = self.application
|
||||
ctx = app.test_request_context(*args, **kwargs)
|
||||
self._add_cookies_to_wsgi(ctx.request.environ)
|
||||
|
||||
with ctx:
|
||||
sess = app.session_interface.open_session(app, ctx.request)
|
||||
|
||||
if sess is None:
|
||||
raise RuntimeError("Session backend did not open a session.")
|
||||
|
||||
yield sess
|
||||
resp = app.response_class()
|
||||
|
||||
if app.session_interface.is_null_session(sess):
|
||||
return
|
||||
|
||||
with ctx:
|
||||
app.session_interface.save_session(app, sess, resp)
|
||||
|
||||
self._update_cookies_from_response(
|
||||
ctx.request.host.partition(":")[0],
|
||||
ctx.request.path,
|
||||
resp.headers.getlist("Set-Cookie"),
|
||||
)
|
||||
|
||||
def _copy_environ(self, other: WSGIEnvironment) -> WSGIEnvironment:
|
||||
out = {**self.environ_base, **other}
|
||||
|
||||
if self.preserve_context:
|
||||
out["werkzeug.debug.preserve_context"] = self._new_contexts.append
|
||||
|
||||
return out
|
||||
|
||||
def _request_from_builder_args(
|
||||
self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any]
|
||||
) -> BaseRequest:
|
||||
kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {}))
|
||||
builder = EnvironBuilder(self.application, *args, **kwargs)
|
||||
|
||||
try:
|
||||
return builder.get_request()
|
||||
finally:
|
||||
builder.close()
|
||||
|
||||
def open(
|
||||
self,
|
||||
*args: t.Any,
|
||||
buffered: bool = False,
|
||||
follow_redirects: bool = False,
|
||||
**kwargs: t.Any,
|
||||
) -> TestResponse:
|
||||
if args and isinstance(
|
||||
args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest)
|
||||
):
|
||||
if isinstance(args[0], werkzeug.test.EnvironBuilder):
|
||||
builder = copy(args[0])
|
||||
builder.environ_base = self._copy_environ(builder.environ_base or {}) # type: ignore[arg-type]
|
||||
request = builder.get_request()
|
||||
elif isinstance(args[0], dict):
|
||||
request = EnvironBuilder.from_environ(
|
||||
args[0], app=self.application, environ_base=self._copy_environ({})
|
||||
).get_request()
|
||||
else:
|
||||
# isinstance(args[0], BaseRequest)
|
||||
request = copy(args[0])
|
||||
request.environ = self._copy_environ(request.environ)
|
||||
else:
|
||||
# request is None
|
||||
request = self._request_from_builder_args(args, kwargs)
|
||||
|
||||
# Pop any previously preserved contexts. This prevents contexts
|
||||
# from being preserved across redirects or multiple requests
|
||||
# within a single block.
|
||||
self._context_stack.close()
|
||||
|
||||
response = super().open(
|
||||
request,
|
||||
buffered=buffered,
|
||||
follow_redirects=follow_redirects,
|
||||
)
|
||||
response.json_module = self.application.json # type: ignore[assignment]
|
||||
|
||||
# Re-push contexts that were preserved during the request.
|
||||
while self._new_contexts:
|
||||
cm = self._new_contexts.pop()
|
||||
self._context_stack.enter_context(cm)
|
||||
|
||||
return response
|
||||
|
||||
def __enter__(self) -> FlaskClient:
|
||||
if self.preserve_context:
|
||||
raise RuntimeError("Cannot nest client invocations")
|
||||
self.preserve_context = True
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type | None,
|
||||
exc_value: BaseException | None,
|
||||
tb: TracebackType | None,
|
||||
) -> None:
|
||||
self.preserve_context = False
|
||||
self._context_stack.close()
|
||||
|
||||
|
||||
class FlaskCliRunner(CliRunner):
|
||||
"""A :class:`~click.testing.CliRunner` for testing a Flask app's
|
||||
CLI commands. Typically created using
|
||||
:meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`.
|
||||
"""
|
||||
|
||||
def __init__(self, app: Flask, **kwargs: t.Any) -> None:
|
||||
self.app = app
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def invoke( # type: ignore
|
||||
self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any
|
||||
) -> t.Any:
|
||||
"""Invokes a CLI command in an isolated environment. See
|
||||
:meth:`CliRunner.invoke <click.testing.CliRunner.invoke>` for
|
||||
full method documentation. See :ref:`testing-cli` for examples.
|
||||
|
||||
If the ``obj`` argument is not given, passes an instance of
|
||||
:class:`~flask.cli.ScriptInfo` that knows how to load the Flask
|
||||
app being tested.
|
||||
|
||||
:param cli: Command object to invoke. Default is the app's
|
||||
:attr:`~flask.app.Flask.cli` group.
|
||||
:param args: List of strings to invoke the command with.
|
||||
|
||||
:return: a :class:`~click.testing.Result` object.
|
||||
"""
|
||||
if cli is None:
|
||||
cli = self.app.cli
|
||||
|
||||
if "obj" not in kwargs:
|
||||
kwargs["obj"] = ScriptInfo(create_app=lambda: self.app)
|
||||
|
||||
return super().invoke(cli, args, **kwargs)
|
||||
@ -0,0 +1,90 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from _typeshed.wsgi import WSGIApplication # noqa: F401
|
||||
from werkzeug.datastructures import Headers # noqa: F401
|
||||
from werkzeug.sansio.response import Response # noqa: F401
|
||||
|
||||
# The possible types that are directly convertible or are a Response object.
|
||||
ResponseValue = t.Union[
|
||||
"Response",
|
||||
str,
|
||||
bytes,
|
||||
t.List[t.Any],
|
||||
# Only dict is actually accepted, but Mapping allows for TypedDict.
|
||||
t.Mapping[str, t.Any],
|
||||
t.Iterator[str],
|
||||
t.Iterator[bytes],
|
||||
]
|
||||
|
||||
# the possible types for an individual HTTP header
|
||||
# This should be a Union, but mypy doesn't pass unless it's a TypeVar.
|
||||
HeaderValue = t.Union[str, t.List[str], t.Tuple[str, ...]]
|
||||
|
||||
# the possible types for HTTP headers
|
||||
HeadersValue = t.Union[
|
||||
"Headers",
|
||||
t.Mapping[str, HeaderValue],
|
||||
t.Sequence[t.Tuple[str, HeaderValue]],
|
||||
]
|
||||
|
||||
# The possible types returned by a route function.
|
||||
ResponseReturnValue = t.Union[
|
||||
ResponseValue,
|
||||
t.Tuple[ResponseValue, HeadersValue],
|
||||
t.Tuple[ResponseValue, int],
|
||||
t.Tuple[ResponseValue, int, HeadersValue],
|
||||
"WSGIApplication",
|
||||
]
|
||||
|
||||
# Allow any subclass of werkzeug.Response, such as the one from Flask,
|
||||
# as a callback argument. Using werkzeug.Response directly makes a
|
||||
# callback annotated with flask.Response fail type checking.
|
||||
ResponseClass = t.TypeVar("ResponseClass", bound="Response")
|
||||
|
||||
AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named
|
||||
AfterRequestCallable = t.Union[
|
||||
t.Callable[[ResponseClass], ResponseClass],
|
||||
t.Callable[[ResponseClass], t.Awaitable[ResponseClass]],
|
||||
]
|
||||
BeforeFirstRequestCallable = t.Union[
|
||||
t.Callable[[], None], t.Callable[[], t.Awaitable[None]]
|
||||
]
|
||||
BeforeRequestCallable = t.Union[
|
||||
t.Callable[[], t.Optional[ResponseReturnValue]],
|
||||
t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]],
|
||||
]
|
||||
ShellContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]]
|
||||
TeardownCallable = t.Union[
|
||||
t.Callable[[t.Optional[BaseException]], None],
|
||||
t.Callable[[t.Optional[BaseException]], t.Awaitable[None]],
|
||||
]
|
||||
TemplateContextProcessorCallable = t.Union[
|
||||
t.Callable[[], t.Dict[str, t.Any]],
|
||||
t.Callable[[], t.Awaitable[t.Dict[str, t.Any]]],
|
||||
]
|
||||
TemplateFilterCallable = t.Callable[..., t.Any]
|
||||
TemplateGlobalCallable = t.Callable[..., t.Any]
|
||||
TemplateTestCallable = t.Callable[..., bool]
|
||||
URLDefaultCallable = t.Callable[[str, t.Dict[str, t.Any]], None]
|
||||
URLValuePreprocessorCallable = t.Callable[
|
||||
[t.Optional[str], t.Optional[t.Dict[str, t.Any]]], None
|
||||
]
|
||||
|
||||
# This should take Exception, but that either breaks typing the argument
|
||||
# with a specific exception, or decorating multiple times with different
|
||||
# exceptions (and using a union type on the argument).
|
||||
# https://github.com/pallets/flask/issues/4095
|
||||
# https://github.com/pallets/flask/issues/4295
|
||||
# https://github.com/pallets/flask/issues/4297
|
||||
ErrorHandlerCallable = t.Union[
|
||||
t.Callable[[t.Any], ResponseReturnValue],
|
||||
t.Callable[[t.Any], t.Awaitable[ResponseReturnValue]],
|
||||
]
|
||||
|
||||
RouteCallable = t.Union[
|
||||
t.Callable[..., ResponseReturnValue],
|
||||
t.Callable[..., t.Awaitable[ResponseReturnValue]],
|
||||
]
|
||||
@ -0,0 +1,191 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from . import typing as ft
|
||||
from .globals import current_app
|
||||
from .globals import request
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
http_method_funcs = frozenset(
|
||||
["get", "post", "head", "options", "delete", "put", "trace", "patch"]
|
||||
)
|
||||
|
||||
|
||||
class View:
|
||||
"""Subclass this class and override :meth:`dispatch_request` to
|
||||
create a generic class-based view. Call :meth:`as_view` to create a
|
||||
view function that creates an instance of the class with the given
|
||||
arguments and calls its ``dispatch_request`` method with any URL
|
||||
variables.
|
||||
|
||||
See :doc:`views` for a detailed guide.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Hello(View):
|
||||
init_every_request = False
|
||||
|
||||
def dispatch_request(self, name):
|
||||
return f"Hello, {name}!"
|
||||
|
||||
app.add_url_rule(
|
||||
"/hello/<name>", view_func=Hello.as_view("hello")
|
||||
)
|
||||
|
||||
Set :attr:`methods` on the class to change what methods the view
|
||||
accepts.
|
||||
|
||||
Set :attr:`decorators` on the class to apply a list of decorators to
|
||||
the generated view function. Decorators applied to the class itself
|
||||
will not be applied to the generated view function!
|
||||
|
||||
Set :attr:`init_every_request` to ``False`` for efficiency, unless
|
||||
you need to store request-global data on ``self``.
|
||||
"""
|
||||
|
||||
#: The methods this view is registered for. Uses the same default
|
||||
#: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and
|
||||
#: ``add_url_rule`` by default.
|
||||
methods: t.ClassVar[t.Collection[str] | None] = None
|
||||
|
||||
#: Control whether the ``OPTIONS`` method is handled automatically.
|
||||
#: Uses the same default (``True``) as ``route`` and
|
||||
#: ``add_url_rule`` by default.
|
||||
provide_automatic_options: t.ClassVar[bool | None] = None
|
||||
|
||||
#: A list of decorators to apply, in order, to the generated view
|
||||
#: function. Remember that ``@decorator`` syntax is applied bottom
|
||||
#: to top, so the first decorator in the list would be the bottom
|
||||
#: decorator.
|
||||
#:
|
||||
#: .. versionadded:: 0.8
|
||||
decorators: t.ClassVar[list[t.Callable[[F], F]]] = []
|
||||
|
||||
#: Create a new instance of this view class for every request by
|
||||
#: default. If a view subclass sets this to ``False``, the same
|
||||
#: instance is used for every request.
|
||||
#:
|
||||
#: A single instance is more efficient, especially if complex setup
|
||||
#: is done during init. However, storing data on ``self`` is no
|
||||
#: longer safe across requests, and :data:`~flask.g` should be used
|
||||
#: instead.
|
||||
#:
|
||||
#: .. versionadded:: 2.2
|
||||
init_every_request: t.ClassVar[bool] = True
|
||||
|
||||
def dispatch_request(self) -> ft.ResponseReturnValue:
|
||||
"""The actual view function behavior. Subclasses must override
|
||||
this and return a valid response. Any variables from the URL
|
||||
rule are passed as keyword arguments.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def as_view(
|
||||
cls, name: str, *class_args: t.Any, **class_kwargs: t.Any
|
||||
) -> ft.RouteCallable:
|
||||
"""Convert the class into a view function that can be registered
|
||||
for a route.
|
||||
|
||||
By default, the generated view will create a new instance of the
|
||||
view class for every request and call its
|
||||
:meth:`dispatch_request` method. If the view class sets
|
||||
:attr:`init_every_request` to ``False``, the same instance will
|
||||
be used for every request.
|
||||
|
||||
Except for ``name``, all other arguments passed to this method
|
||||
are forwarded to the view class ``__init__`` method.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
Added the ``init_every_request`` class attribute.
|
||||
"""
|
||||
if cls.init_every_request:
|
||||
|
||||
def view(**kwargs: t.Any) -> ft.ResponseReturnValue:
|
||||
self = view.view_class( # type: ignore[attr-defined]
|
||||
*class_args, **class_kwargs
|
||||
)
|
||||
return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return]
|
||||
|
||||
else:
|
||||
self = cls(*class_args, **class_kwargs)
|
||||
|
||||
def view(**kwargs: t.Any) -> ft.ResponseReturnValue:
|
||||
return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return]
|
||||
|
||||
if cls.decorators:
|
||||
view.__name__ = name
|
||||
view.__module__ = cls.__module__
|
||||
for decorator in cls.decorators:
|
||||
view = decorator(view)
|
||||
|
||||
# We attach the view class to the view function for two reasons:
|
||||
# first of all it allows us to easily figure out what class-based
|
||||
# view this thing came from, secondly it's also used for instantiating
|
||||
# the view class so you can actually replace it with something else
|
||||
# for testing purposes and debugging.
|
||||
view.view_class = cls # type: ignore
|
||||
view.__name__ = name
|
||||
view.__doc__ = cls.__doc__
|
||||
view.__module__ = cls.__module__
|
||||
view.methods = cls.methods # type: ignore
|
||||
view.provide_automatic_options = cls.provide_automatic_options # type: ignore
|
||||
return view
|
||||
|
||||
|
||||
class MethodView(View):
|
||||
"""Dispatches request methods to the corresponding instance methods.
|
||||
For example, if you implement a ``get`` method, it will be used to
|
||||
handle ``GET`` requests.
|
||||
|
||||
This can be useful for defining a REST API.
|
||||
|
||||
:attr:`methods` is automatically set based on the methods defined on
|
||||
the class.
|
||||
|
||||
See :doc:`views` for a detailed guide.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CounterAPI(MethodView):
|
||||
def get(self):
|
||||
return str(session.get("counter", 0))
|
||||
|
||||
def post(self):
|
||||
session["counter"] = session.get("counter", 0) + 1
|
||||
return redirect(url_for("counter"))
|
||||
|
||||
app.add_url_rule(
|
||||
"/counter", view_func=CounterAPI.as_view("counter")
|
||||
)
|
||||
"""
|
||||
|
||||
def __init_subclass__(cls, **kwargs: t.Any) -> None:
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
if "methods" not in cls.__dict__:
|
||||
methods = set()
|
||||
|
||||
for base in cls.__bases__:
|
||||
if getattr(base, "methods", None):
|
||||
methods.update(base.methods) # type: ignore[attr-defined]
|
||||
|
||||
for key in http_method_funcs:
|
||||
if hasattr(cls, key):
|
||||
methods.add(key.upper())
|
||||
|
||||
if methods:
|
||||
cls.methods = methods
|
||||
|
||||
def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue:
|
||||
meth = getattr(self, request.method.lower(), None)
|
||||
|
||||
# If the request method is HEAD and we don't have a handler for it
|
||||
# retry with GET.
|
||||
if meth is None and request.method == "HEAD":
|
||||
meth = getattr(self, "get", None)
|
||||
|
||||
assert meth is not None, f"Unimplemented method {request.method!r}"
|
||||
return current_app.ensure_sync(meth)(**kwargs) # type: ignore[no-any-return]
|
||||
@ -0,0 +1,174 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.exceptions import HTTPException
|
||||
from werkzeug.wrappers import Request as RequestBase
|
||||
from werkzeug.wrappers import Response as ResponseBase
|
||||
|
||||
from . import json
|
||||
from .globals import current_app
|
||||
from .helpers import _split_blueprint_path
|
||||
|
||||
if t.TYPE_CHECKING: # pragma: no cover
|
||||
from werkzeug.routing import Rule
|
||||
|
||||
|
||||
class Request(RequestBase):
|
||||
"""The request object used by default in Flask. Remembers the
|
||||
matched endpoint and view arguments.
|
||||
|
||||
It is what ends up as :class:`~flask.request`. If you want to replace
|
||||
the request object used you can subclass this and set
|
||||
:attr:`~flask.Flask.request_class` to your subclass.
|
||||
|
||||
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
|
||||
provides all of the attributes Werkzeug defines plus a few Flask
|
||||
specific ones.
|
||||
"""
|
||||
|
||||
json_module: t.Any = json
|
||||
|
||||
#: The internal URL rule that matched the request. This can be
|
||||
#: useful to inspect which methods are allowed for the URL from
|
||||
#: a before/after handler (``request.url_rule.methods``) etc.
|
||||
#: Though if the request's method was invalid for the URL rule,
|
||||
#: the valid list is available in ``routing_exception.valid_methods``
|
||||
#: instead (an attribute of the Werkzeug exception
|
||||
#: :exc:`~werkzeug.exceptions.MethodNotAllowed`)
|
||||
#: because the request was never internally bound.
|
||||
#:
|
||||
#: .. versionadded:: 0.6
|
||||
url_rule: Rule | None = None
|
||||
|
||||
#: A dict of view arguments that matched the request. If an exception
|
||||
#: happened when matching, this will be ``None``.
|
||||
view_args: dict[str, t.Any] | None = None
|
||||
|
||||
#: If matching the URL failed, this is the exception that will be
|
||||
#: raised / was raised as part of the request handling. This is
|
||||
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
|
||||
#: something similar.
|
||||
routing_exception: HTTPException | None = None
|
||||
|
||||
@property
|
||||
def max_content_length(self) -> int | None: # type: ignore[override]
|
||||
"""Read-only view of the ``MAX_CONTENT_LENGTH`` config key."""
|
||||
if current_app:
|
||||
return current_app.config["MAX_CONTENT_LENGTH"] # type: ignore[no-any-return]
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def endpoint(self) -> str | None:
|
||||
"""The endpoint that matched the request URL.
|
||||
|
||||
This will be ``None`` if matching failed or has not been
|
||||
performed yet.
|
||||
|
||||
This in combination with :attr:`view_args` can be used to
|
||||
reconstruct the same URL or a modified URL.
|
||||
"""
|
||||
if self.url_rule is not None:
|
||||
return self.url_rule.endpoint
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def blueprint(self) -> str | None:
|
||||
"""The registered name of the current blueprint.
|
||||
|
||||
This will be ``None`` if the endpoint is not part of a
|
||||
blueprint, or if URL matching failed or has not been performed
|
||||
yet.
|
||||
|
||||
This does not necessarily match the name the blueprint was
|
||||
created with. It may have been nested, or registered with a
|
||||
different name.
|
||||
"""
|
||||
endpoint = self.endpoint
|
||||
|
||||
if endpoint is not None and "." in endpoint:
|
||||
return endpoint.rpartition(".")[0]
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def blueprints(self) -> list[str]:
|
||||
"""The registered names of the current blueprint upwards through
|
||||
parent blueprints.
|
||||
|
||||
This will be an empty list if there is no current blueprint, or
|
||||
if URL matching failed.
|
||||
|
||||
.. versionadded:: 2.0.1
|
||||
"""
|
||||
name = self.blueprint
|
||||
|
||||
if name is None:
|
||||
return []
|
||||
|
||||
return _split_blueprint_path(name)
|
||||
|
||||
def _load_form_data(self) -> None:
|
||||
super()._load_form_data()
|
||||
|
||||
# In debug mode we're replacing the files multidict with an ad-hoc
|
||||
# subclass that raises a different error for key errors.
|
||||
if (
|
||||
current_app
|
||||
and current_app.debug
|
||||
and self.mimetype != "multipart/form-data"
|
||||
and not self.files
|
||||
):
|
||||
from .debughelpers import attach_enctype_error_multidict
|
||||
|
||||
attach_enctype_error_multidict(self)
|
||||
|
||||
def on_json_loading_failed(self, e: ValueError | None) -> t.Any:
|
||||
try:
|
||||
return super().on_json_loading_failed(e)
|
||||
except BadRequest as e:
|
||||
if current_app and current_app.debug:
|
||||
raise
|
||||
|
||||
raise BadRequest() from e
|
||||
|
||||
|
||||
class Response(ResponseBase):
|
||||
"""The response object that is used by default in Flask. Works like the
|
||||
response object from Werkzeug but is set to have an HTML mimetype by
|
||||
default. Quite often you don't have to create this object yourself because
|
||||
:meth:`~flask.Flask.make_response` will take care of that for you.
|
||||
|
||||
If you want to replace the response object used you can subclass this and
|
||||
set :attr:`~flask.Flask.response_class` to your subclass.
|
||||
|
||||
.. versionchanged:: 1.0
|
||||
JSON support is added to the response, like the request. This is useful
|
||||
when testing to get the test client response data as JSON.
|
||||
|
||||
.. versionchanged:: 1.0
|
||||
|
||||
Added :attr:`max_cookie_size`.
|
||||
"""
|
||||
|
||||
default_mimetype: str | None = "text/html"
|
||||
|
||||
json_module = json
|
||||
|
||||
autocorrect_location_header = False
|
||||
|
||||
@property
|
||||
def max_cookie_size(self) -> int: # type: ignore
|
||||
"""Read-only view of the :data:`MAX_COOKIE_SIZE` config key.
|
||||
|
||||
See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in
|
||||
Werkzeug's docs.
|
||||
"""
|
||||
if current_app:
|
||||
return current_app.config["MAX_COOKIE_SIZE"] # type: ignore[no-any-return]
|
||||
|
||||
# return Werkzeug's default when not in an app context
|
||||
return super().max_cookie_size
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,28 @@
|
||||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,109 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Flask-SQLAlchemy
|
||||
Version: 3.1.1
|
||||
Summary: Add SQLAlchemy support to your Flask application.
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/x-rst
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Requires-Dist: flask>=2.2.5
|
||||
Requires-Dist: sqlalchemy>=2.0.16
|
||||
Project-URL: Changes, https://flask-sqlalchemy.palletsprojects.com/changes/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://flask-sqlalchemy.palletsprojects.com
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Issue Tracker, https://github.com/pallets-eco/flask-sqlalchemy/issues/
|
||||
Project-URL: Source Code, https://github.com/pallets-eco/flask-sqlalchemy/
|
||||
|
||||
Flask-SQLAlchemy
|
||||
================
|
||||
|
||||
Flask-SQLAlchemy is an extension for `Flask`_ that adds support for
|
||||
`SQLAlchemy`_ to your application. It aims to simplify using SQLAlchemy
|
||||
with Flask by providing useful defaults and extra helpers that make it
|
||||
easier to accomplish common tasks.
|
||||
|
||||
.. _Flask: https://palletsprojects.com/p/flask/
|
||||
.. _SQLAlchemy: https://www.sqlalchemy.org
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U Flask-SQLAlchemy
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from flask import Flask
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///example.sqlite"
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
db = SQLAlchemy(app, model_class=Base)
|
||||
|
||||
class User(db.Model):
|
||||
id: Mapped[int] = mapped_column(db.Integer, primary_key=True)
|
||||
username: Mapped[str] = mapped_column(db.String, unique=True, nullable=False)
|
||||
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
|
||||
db.session.add(User(username="example"))
|
||||
db.session.commit()
|
||||
|
||||
users = db.session.execute(db.select(User)).scalars()
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
For guidance on setting up a development environment and how to make a
|
||||
contribution to Flask-SQLAlchemy, see the `contributing guidelines`_.
|
||||
|
||||
.. _contributing guidelines: https://github.com/pallets-eco/flask-sqlalchemy/blob/main/CONTRIBUTING.rst
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Flask-SQLAlchemy and
|
||||
other popular packages. In order to grow the community of contributors
|
||||
and users, and allow the maintainers to devote more time to the
|
||||
projects, `please donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://flask-sqlalchemy.palletsprojects.com/
|
||||
- Changes: https://flask-sqlalchemy.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/Flask-SQLAlchemy/
|
||||
- Source Code: https://github.com/pallets-eco/flask-sqlalchemy/
|
||||
- Issue Tracker: https://github.com/pallets-eco/flask-sqlalchemy/issues/
|
||||
- Website: https://palletsprojects.com/
|
||||
- Twitter: https://twitter.com/PalletsTeam
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
@ -0,0 +1,27 @@
|
||||
flask_sqlalchemy-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
flask_sqlalchemy-3.1.1.dist-info/METADATA,sha256=lBxR1akBt7n9XBjIVTL2OV52OhCfFrb-Mqtoe0DCbR8,3432
|
||||
flask_sqlalchemy-3.1.1.dist-info/RECORD,,
|
||||
flask_sqlalchemy-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
flask_sqlalchemy-3.1.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
flask_sqlalchemy/__init__.py,sha256=he_w4qQQVS2Z1ms5GCTptDTXNOXBXw0n8zSuWCp8n6Y,653
|
||||
flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/cli.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/extension.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/model.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/query.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/session.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/table.cpython-312.pyc,,
|
||||
flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc,,
|
||||
flask_sqlalchemy/cli.py,sha256=pg3QDxP36GW2qnwe_CpPtkRhPchyVSGM6zlBNWuNCFE,484
|
||||
flask_sqlalchemy/extension.py,sha256=71tP_kNtb5VgZdafy_OH1sWdZOA6PaT7cJqX7tKgZ-k,38261
|
||||
flask_sqlalchemy/model.py,sha256=_mSisC2Eni0TgTyFWeN_O4LIexTeP_sVTdxh03yMK50,11461
|
||||
flask_sqlalchemy/pagination.py,sha256=JFpllrqkRkwacb8DAmQWaz9wsvQa0dypfSkhUDSC2ws,11119
|
||||
flask_sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
flask_sqlalchemy/query.py,sha256=Uls9qbmnpb9Vba43EDfsRP17eHJ0X4VG7SE22tH5R3g,3748
|
||||
flask_sqlalchemy/record_queries.py,sha256=ouS1ayj16h76LJprx13iYdoFZbm6m8OncrOgAVbG1Sk,3520
|
||||
flask_sqlalchemy/session.py,sha256=pBbtN8iDc8yuGVt0k18BvZHh2uEI7QPzZXO7eXrRi1g,3426
|
||||
flask_sqlalchemy/table.py,sha256=wAPOy8qwyAxpMwOIUJY4iMOultzz2W0D6xvBkQ7U2CE,859
|
||||
flask_sqlalchemy/track_modifications.py,sha256=yieyozj7IiVzwnAGZ-ZrgqrzjrUfG0kPrXBfW_hStSU,2755
|
||||
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.9.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from .extension import SQLAlchemy
|
||||
|
||||
__all__ = [
|
||||
"SQLAlchemy",
|
||||
]
|
||||
|
||||
|
||||
def __getattr__(name: str) -> t.Any:
|
||||
if name == "__version__":
|
||||
import importlib.metadata
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The '__version__' attribute is deprecated and will be removed in"
|
||||
" Flask-SQLAlchemy 3.2. Use feature detection or"
|
||||
" 'importlib.metadata.version(\"flask-sqlalchemy\")' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return importlib.metadata.version("flask-sqlalchemy")
|
||||
|
||||
raise AttributeError(name)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def add_models_to_shell() -> dict[str, t.Any]:
|
||||
"""Registered with :meth:`~flask.Flask.shell_context_processor` if
|
||||
``add_models_to_shell`` is enabled. Adds the ``db`` instance and all model classes
|
||||
to ``flask shell``.
|
||||
"""
|
||||
db = current_app.extensions["sqlalchemy"]
|
||||
out = {m.class_.__name__: m.class_ for m in db.Model._sa_registry.mappers}
|
||||
out["db"] = db
|
||||
return out
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,330 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import typing as t
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as sa_orm
|
||||
|
||||
from .query import Query
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .extension import SQLAlchemy
|
||||
|
||||
|
||||
class _QueryProperty:
|
||||
"""A class property that creates a query object for a model.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
|
||||
def __get__(self, obj: Model | None, cls: type[Model]) -> Query:
|
||||
return cls.query_class(
|
||||
cls, session=cls.__fsa__.session() # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
|
||||
class Model:
|
||||
"""The base class of the :attr:`.SQLAlchemy.Model` declarative model class.
|
||||
|
||||
To define models, subclass :attr:`db.Model <.SQLAlchemy.Model>`, not this. To
|
||||
customize ``db.Model``, subclass this and pass it as ``model_class`` to
|
||||
:class:`.SQLAlchemy`. To customize ``db.Model`` at the metaclass level, pass an
|
||||
already created declarative model class as ``model_class``.
|
||||
"""
|
||||
|
||||
__fsa__: t.ClassVar[SQLAlchemy]
|
||||
"""Internal reference to the extension object.
|
||||
|
||||
:meta private:
|
||||
"""
|
||||
|
||||
query_class: t.ClassVar[type[Query]] = Query
|
||||
"""Query class used by :attr:`query`. Defaults to :attr:`.SQLAlchemy.Query`, which
|
||||
defaults to :class:`.Query`.
|
||||
"""
|
||||
|
||||
query: t.ClassVar[Query] = _QueryProperty() # type: ignore[assignment]
|
||||
"""A SQLAlchemy query for a model. Equivalent to ``db.session.query(Model)``. Can be
|
||||
customized per-model by overriding :attr:`query_class`.
|
||||
|
||||
.. warning::
|
||||
The query interface is considered legacy in SQLAlchemy. Prefer using
|
||||
``session.execute(select())`` instead.
|
||||
"""
|
||||
|
||||
def __repr__(self) -> str:
|
||||
state = sa.inspect(self)
|
||||
assert state is not None
|
||||
|
||||
if state.transient:
|
||||
pk = f"(transient {id(self)})"
|
||||
elif state.pending:
|
||||
pk = f"(pending {id(self)})"
|
||||
else:
|
||||
pk = ", ".join(map(str, state.identity))
|
||||
|
||||
return f"<{type(self).__name__} {pk}>"
|
||||
|
||||
|
||||
class BindMetaMixin(type):
|
||||
"""Metaclass mixin that sets a model's ``metadata`` based on its ``__bind_key__``.
|
||||
|
||||
If the model sets ``metadata`` or ``__table__`` directly, ``__bind_key__`` is
|
||||
ignored. If the ``metadata`` is the same as the parent model, it will not be set
|
||||
directly on the child model.
|
||||
"""
|
||||
|
||||
__fsa__: SQLAlchemy
|
||||
metadata: sa.MetaData
|
||||
|
||||
def __init__(
|
||||
cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any
|
||||
) -> None:
|
||||
if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__):
|
||||
bind_key = getattr(cls, "__bind_key__", None)
|
||||
parent_metadata = getattr(cls, "metadata", None)
|
||||
metadata = cls.__fsa__._make_metadata(bind_key)
|
||||
|
||||
if metadata is not parent_metadata:
|
||||
cls.metadata = metadata
|
||||
|
||||
super().__init__(name, bases, d, **kwargs)
|
||||
|
||||
|
||||
class BindMixin:
|
||||
"""DeclarativeBase mixin to set a model's ``metadata`` based on ``__bind_key__``.
|
||||
|
||||
If no ``__bind_key__`` is specified, the model will use the default metadata
|
||||
provided by ``DeclarativeBase`` or ``DeclarativeBaseNoMeta``.
|
||||
If the model doesn't set ``metadata`` or ``__table__`` directly
|
||||
and does set ``__bind_key__``, the model will use the metadata
|
||||
for the specified bind key.
|
||||
If the ``metadata`` is the same as the parent model, it will not be set
|
||||
directly on the child model.
|
||||
|
||||
.. versionchanged:: 3.1.0
|
||||
"""
|
||||
|
||||
__fsa__: SQLAlchemy
|
||||
metadata: sa.MetaData
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: t.Type[BindMixin], **kwargs: t.Dict[str, t.Any]) -> None:
|
||||
if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__) and hasattr(
|
||||
cls, "__bind_key__"
|
||||
):
|
||||
bind_key = getattr(cls, "__bind_key__", None)
|
||||
parent_metadata = getattr(cls, "metadata", None)
|
||||
metadata = cls.__fsa__._make_metadata(bind_key)
|
||||
|
||||
if metadata is not parent_metadata:
|
||||
cls.metadata = metadata
|
||||
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
|
||||
class NameMetaMixin(type):
|
||||
"""Metaclass mixin that sets a model's ``__tablename__`` by converting the
|
||||
``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models
|
||||
that do not otherwise define ``__tablename__``. If a model does not define a primary
|
||||
key, it will not generate a name or ``__table__``, for single-table inheritance.
|
||||
"""
|
||||
|
||||
metadata: sa.MetaData
|
||||
__tablename__: str
|
||||
__table__: sa.Table
|
||||
|
||||
def __init__(
|
||||
cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any
|
||||
) -> None:
|
||||
if should_set_tablename(cls):
|
||||
cls.__tablename__ = camel_to_snake_case(cls.__name__)
|
||||
|
||||
super().__init__(name, bases, d, **kwargs)
|
||||
|
||||
# __table_cls__ has run. If no table was created, use the parent table.
|
||||
if (
|
||||
"__tablename__" not in cls.__dict__
|
||||
and "__table__" in cls.__dict__
|
||||
and cls.__dict__["__table__"] is None
|
||||
):
|
||||
del cls.__table__
|
||||
|
||||
def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None:
|
||||
"""This is called by SQLAlchemy during mapper setup. It determines the final
|
||||
table object that the model will use.
|
||||
|
||||
If no primary key is found, that indicates single-table inheritance, so no table
|
||||
will be created and ``__tablename__`` will be unset.
|
||||
"""
|
||||
schema = kwargs.get("schema")
|
||||
|
||||
if schema is None:
|
||||
key = args[0]
|
||||
else:
|
||||
key = f"{schema}.{args[0]}"
|
||||
|
||||
# Check if a table with this name already exists. Allows reflected tables to be
|
||||
# applied to models by name.
|
||||
if key in cls.metadata.tables:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# If a primary key is found, create a table for joined-table inheritance.
|
||||
for arg in args:
|
||||
if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance(
|
||||
arg, sa.PrimaryKeyConstraint
|
||||
):
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# If no base classes define a table, return one that's missing a primary key
|
||||
# so SQLAlchemy shows the correct error.
|
||||
for base in cls.__mro__[1:-1]:
|
||||
if "__table__" in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# Single-table inheritance, use the parent table name. __init__ will unset
|
||||
# __table__ based on this.
|
||||
if "__tablename__" in cls.__dict__:
|
||||
del cls.__tablename__
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class NameMixin:
|
||||
"""DeclarativeBase mixin that sets a model's ``__tablename__`` by converting the
|
||||
``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models
|
||||
that do not otherwise define ``__tablename__``. If a model does not define a primary
|
||||
key, it will not generate a name or ``__table__``, for single-table inheritance.
|
||||
|
||||
.. versionchanged:: 3.1.0
|
||||
"""
|
||||
|
||||
metadata: sa.MetaData
|
||||
__tablename__: str
|
||||
__table__: sa.Table
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: t.Type[NameMixin], **kwargs: t.Dict[str, t.Any]) -> None:
|
||||
if should_set_tablename(cls):
|
||||
cls.__tablename__ = camel_to_snake_case(cls.__name__)
|
||||
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
# __table_cls__ has run. If no table was created, use the parent table.
|
||||
if (
|
||||
"__tablename__" not in cls.__dict__
|
||||
and "__table__" in cls.__dict__
|
||||
and cls.__dict__["__table__"] is None
|
||||
):
|
||||
del cls.__table__
|
||||
|
||||
@classmethod
|
||||
def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None:
|
||||
"""This is called by SQLAlchemy during mapper setup. It determines the final
|
||||
table object that the model will use.
|
||||
|
||||
If no primary key is found, that indicates single-table inheritance, so no table
|
||||
will be created and ``__tablename__`` will be unset.
|
||||
"""
|
||||
schema = kwargs.get("schema")
|
||||
|
||||
if schema is None:
|
||||
key = args[0]
|
||||
else:
|
||||
key = f"{schema}.{args[0]}"
|
||||
|
||||
# Check if a table with this name already exists. Allows reflected tables to be
|
||||
# applied to models by name.
|
||||
if key in cls.metadata.tables:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# If a primary key is found, create a table for joined-table inheritance.
|
||||
for arg in args:
|
||||
if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance(
|
||||
arg, sa.PrimaryKeyConstraint
|
||||
):
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# If no base classes define a table, return one that's missing a primary key
|
||||
# so SQLAlchemy shows the correct error.
|
||||
for base in cls.__mro__[1:-1]:
|
||||
if "__table__" in base.__dict__:
|
||||
break
|
||||
else:
|
||||
return sa.Table(*args, **kwargs)
|
||||
|
||||
# Single-table inheritance, use the parent table name. __init__ will unset
|
||||
# __table__ based on this.
|
||||
if "__tablename__" in cls.__dict__:
|
||||
del cls.__tablename__
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def should_set_tablename(cls: type) -> bool:
|
||||
"""Determine whether ``__tablename__`` should be generated for a model.
|
||||
|
||||
- If no class in the MRO sets a name, one should be generated.
|
||||
- If a declared attr is found, it should be used instead.
|
||||
- If a name is found, it should be used if the class is a mixin, otherwise one
|
||||
should be generated.
|
||||
- Abstract models should not have one generated.
|
||||
|
||||
Later, ``__table_cls__`` will determine if the model looks like single or
|
||||
joined-table inheritance. If no primary key is found, the name will be unset.
|
||||
"""
|
||||
if (
|
||||
cls.__dict__.get("__abstract__", False)
|
||||
or (
|
||||
not issubclass(cls, (sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta))
|
||||
and not any(isinstance(b, sa_orm.DeclarativeMeta) for b in cls.__mro__[1:])
|
||||
)
|
||||
or any(
|
||||
(b is sa_orm.DeclarativeBase or b is sa_orm.DeclarativeBaseNoMeta)
|
||||
for b in cls.__bases__
|
||||
)
|
||||
):
|
||||
return False
|
||||
|
||||
for base in cls.__mro__:
|
||||
if "__tablename__" not in base.__dict__:
|
||||
continue
|
||||
|
||||
if isinstance(base.__dict__["__tablename__"], sa_orm.declared_attr):
|
||||
return False
|
||||
|
||||
return not (
|
||||
base is cls
|
||||
or base.__dict__.get("__abstract__", False)
|
||||
or not (
|
||||
# SQLAlchemy 1.x
|
||||
isinstance(base, sa_orm.DeclarativeMeta)
|
||||
# 2.x: DeclarativeBas uses this as metaclass
|
||||
or isinstance(base, sa_orm.decl_api.DeclarativeAttributeIntercept)
|
||||
# 2.x: DeclarativeBaseNoMeta doesn't use a metaclass
|
||||
or issubclass(base, sa_orm.DeclarativeBaseNoMeta)
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def camel_to_snake_case(name: str) -> str:
|
||||
"""Convert a ``CamelCase`` name to ``snake_case``."""
|
||||
name = re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", name)
|
||||
return name.lower().lstrip("_")
|
||||
|
||||
|
||||
class DefaultMeta(BindMetaMixin, NameMetaMixin, sa_orm.DeclarativeMeta):
|
||||
"""SQLAlchemy declarative metaclass that provides ``__bind_key__`` and
|
||||
``__tablename__`` support.
|
||||
"""
|
||||
|
||||
|
||||
class DefaultMetaNoName(BindMetaMixin, sa_orm.DeclarativeMeta):
|
||||
"""SQLAlchemy declarative metaclass that provides ``__bind_key__`` and
|
||||
``__tablename__`` support.
|
||||
"""
|
||||
@ -0,0 +1,364 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from math import ceil
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as sa_orm
|
||||
from flask import abort
|
||||
from flask import request
|
||||
|
||||
|
||||
class Pagination:
|
||||
"""Apply an offset and limit to the query based on the current page and number of
|
||||
items per page.
|
||||
|
||||
Don't create pagination objects manually. They are created by
|
||||
:meth:`.SQLAlchemy.paginate` and :meth:`.Query.paginate`.
|
||||
|
||||
This is a base class, a subclass must implement :meth:`_query_items` and
|
||||
:meth:`_query_count`. Those methods will use arguments passed as ``kwargs`` to
|
||||
perform the queries.
|
||||
|
||||
:param page: The current page, used to calculate the offset. Defaults to the
|
||||
``page`` query arg during a request, or 1 otherwise.
|
||||
:param per_page: The maximum number of items on a page, used to calculate the
|
||||
offset and limit. Defaults to the ``per_page`` query arg during a request,
|
||||
or 20 otherwise.
|
||||
:param max_per_page: The maximum allowed value for ``per_page``, to limit a
|
||||
user-provided value. Use ``None`` for no limit. Defaults to 100.
|
||||
:param error_out: Abort with a ``404 Not Found`` error if no items are returned
|
||||
and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if
|
||||
either are not ints.
|
||||
:param count: Calculate the total number of values by issuing an extra count
|
||||
query. For very complex queries this may be inaccurate or slow, so it can be
|
||||
disabled and set manually if necessary.
|
||||
:param kwargs: Information about the query to paginate. Different subclasses will
|
||||
require different arguments.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Iterating over a pagination object iterates over its items.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Creating instances manually is not a public API.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
max_per_page: int | None = 100,
|
||||
error_out: bool = True,
|
||||
count: bool = True,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
self._query_args = kwargs
|
||||
page, per_page = self._prepare_page_args(
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
max_per_page=max_per_page,
|
||||
error_out=error_out,
|
||||
)
|
||||
|
||||
self.page: int = page
|
||||
"""The current page."""
|
||||
|
||||
self.per_page: int = per_page
|
||||
"""The maximum number of items on a page."""
|
||||
|
||||
self.max_per_page: int | None = max_per_page
|
||||
"""The maximum allowed value for ``per_page``."""
|
||||
|
||||
items = self._query_items()
|
||||
|
||||
if not items and page != 1 and error_out:
|
||||
abort(404)
|
||||
|
||||
self.items: list[t.Any] = items
|
||||
"""The items on the current page. Iterating over the pagination object is
|
||||
equivalent to iterating over the items.
|
||||
"""
|
||||
|
||||
if count:
|
||||
total = self._query_count()
|
||||
else:
|
||||
total = None
|
||||
|
||||
self.total: int | None = total
|
||||
"""The total number of items across all pages."""
|
||||
|
||||
@staticmethod
|
||||
def _prepare_page_args(
|
||||
*,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
max_per_page: int | None = None,
|
||||
error_out: bool = True,
|
||||
) -> tuple[int, int]:
|
||||
if request:
|
||||
if page is None:
|
||||
try:
|
||||
page = int(request.args.get("page", 1))
|
||||
except (TypeError, ValueError):
|
||||
if error_out:
|
||||
abort(404)
|
||||
|
||||
page = 1
|
||||
|
||||
if per_page is None:
|
||||
try:
|
||||
per_page = int(request.args.get("per_page", 20))
|
||||
except (TypeError, ValueError):
|
||||
if error_out:
|
||||
abort(404)
|
||||
|
||||
per_page = 20
|
||||
else:
|
||||
if page is None:
|
||||
page = 1
|
||||
|
||||
if per_page is None:
|
||||
per_page = 20
|
||||
|
||||
if max_per_page is not None:
|
||||
per_page = min(per_page, max_per_page)
|
||||
|
||||
if page < 1:
|
||||
if error_out:
|
||||
abort(404)
|
||||
else:
|
||||
page = 1
|
||||
|
||||
if per_page < 1:
|
||||
if error_out:
|
||||
abort(404)
|
||||
else:
|
||||
per_page = 20
|
||||
|
||||
return page, per_page
|
||||
|
||||
@property
|
||||
def _query_offset(self) -> int:
|
||||
"""The index of the first item to query, passed to ``offset()``.
|
||||
|
||||
:meta private:
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
return (self.page - 1) * self.per_page
|
||||
|
||||
def _query_items(self) -> list[t.Any]:
|
||||
"""Execute the query to get the items on the current page.
|
||||
|
||||
Uses init arguments stored in :attr:`_query_args`.
|
||||
|
||||
:meta private:
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _query_count(self) -> int:
|
||||
"""Execute the query to get the total number of items.
|
||||
|
||||
Uses init arguments stored in :attr:`_query_args`.
|
||||
|
||||
:meta private:
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def first(self) -> int:
|
||||
"""The number of the first item on the page, starting from 1, or 0 if there are
|
||||
no items.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
if len(self.items) == 0:
|
||||
return 0
|
||||
|
||||
return (self.page - 1) * self.per_page + 1
|
||||
|
||||
@property
|
||||
def last(self) -> int:
|
||||
"""The number of the last item on the page, starting from 1, inclusive, or 0 if
|
||||
there are no items.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
first = self.first
|
||||
return max(first, first + len(self.items) - 1)
|
||||
|
||||
@property
|
||||
def pages(self) -> int:
|
||||
"""The total number of pages."""
|
||||
if self.total == 0 or self.total is None:
|
||||
return 0
|
||||
|
||||
return ceil(self.total / self.per_page)
|
||||
|
||||
@property
|
||||
def has_prev(self) -> bool:
|
||||
"""``True`` if this is not the first page."""
|
||||
return self.page > 1
|
||||
|
||||
@property
|
||||
def prev_num(self) -> int | None:
|
||||
"""The previous page number, or ``None`` if this is the first page."""
|
||||
if not self.has_prev:
|
||||
return None
|
||||
|
||||
return self.page - 1
|
||||
|
||||
def prev(self, *, error_out: bool = False) -> Pagination:
|
||||
"""Query the :class:`Pagination` object for the previous page.
|
||||
|
||||
:param error_out: Abort with a ``404 Not Found`` error if no items are returned
|
||||
and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if
|
||||
either are not ints.
|
||||
"""
|
||||
p = type(self)(
|
||||
page=self.page - 1,
|
||||
per_page=self.per_page,
|
||||
error_out=error_out,
|
||||
count=False,
|
||||
**self._query_args,
|
||||
)
|
||||
p.total = self.total
|
||||
return p
|
||||
|
||||
@property
|
||||
def has_next(self) -> bool:
|
||||
"""``True`` if this is not the last page."""
|
||||
return self.page < self.pages
|
||||
|
||||
@property
|
||||
def next_num(self) -> int | None:
|
||||
"""The next page number, or ``None`` if this is the last page."""
|
||||
if not self.has_next:
|
||||
return None
|
||||
|
||||
return self.page + 1
|
||||
|
||||
def next(self, *, error_out: bool = False) -> Pagination:
|
||||
"""Query the :class:`Pagination` object for the next page.
|
||||
|
||||
:param error_out: Abort with a ``404 Not Found`` error if no items are returned
|
||||
and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if
|
||||
either are not ints.
|
||||
"""
|
||||
p = type(self)(
|
||||
page=self.page + 1,
|
||||
per_page=self.per_page,
|
||||
max_per_page=self.max_per_page,
|
||||
error_out=error_out,
|
||||
count=False,
|
||||
**self._query_args,
|
||||
)
|
||||
p.total = self.total
|
||||
return p
|
||||
|
||||
def iter_pages(
|
||||
self,
|
||||
*,
|
||||
left_edge: int = 2,
|
||||
left_current: int = 2,
|
||||
right_current: int = 4,
|
||||
right_edge: int = 2,
|
||||
) -> t.Iterator[int | None]:
|
||||
"""Yield page numbers for a pagination widget. Skipped pages between the edges
|
||||
and middle are represented by a ``None``.
|
||||
|
||||
For example, if there are 20 pages and the current page is 7, the following
|
||||
values are yielded.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
1, 2, None, 5, 6, 7, 8, 9, 10, 11, None, 19, 20
|
||||
|
||||
:param left_edge: How many pages to show from the first page.
|
||||
:param left_current: How many pages to show left of the current page.
|
||||
:param right_current: How many pages to show right of the current page.
|
||||
:param right_edge: How many pages to show from the last page.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Improved efficiency of calculating what to yield.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
``right_current`` boundary is inclusive.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
All parameters are keyword-only.
|
||||
"""
|
||||
pages_end = self.pages + 1
|
||||
|
||||
if pages_end == 1:
|
||||
return
|
||||
|
||||
left_end = min(1 + left_edge, pages_end)
|
||||
yield from range(1, left_end)
|
||||
|
||||
if left_end == pages_end:
|
||||
return
|
||||
|
||||
mid_start = max(left_end, self.page - left_current)
|
||||
mid_end = min(self.page + right_current + 1, pages_end)
|
||||
|
||||
if mid_start - left_end > 0:
|
||||
yield None
|
||||
|
||||
yield from range(mid_start, mid_end)
|
||||
|
||||
if mid_end == pages_end:
|
||||
return
|
||||
|
||||
right_start = max(mid_end, pages_end - right_edge)
|
||||
|
||||
if right_start - mid_end > 0:
|
||||
yield None
|
||||
|
||||
yield from range(right_start, pages_end)
|
||||
|
||||
def __iter__(self) -> t.Iterator[t.Any]:
|
||||
yield from self.items
|
||||
|
||||
|
||||
class SelectPagination(Pagination):
|
||||
"""Returned by :meth:`.SQLAlchemy.paginate`. Takes ``select`` and ``session``
|
||||
arguments in addition to the :class:`Pagination` arguments.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
|
||||
def _query_items(self) -> list[t.Any]:
|
||||
select = self._query_args["select"]
|
||||
select = select.limit(self.per_page).offset(self._query_offset)
|
||||
session = self._query_args["session"]
|
||||
return list(session.execute(select).unique().scalars())
|
||||
|
||||
def _query_count(self) -> int:
|
||||
select = self._query_args["select"]
|
||||
sub = select.options(sa_orm.lazyload("*")).order_by(None).subquery()
|
||||
session = self._query_args["session"]
|
||||
out = session.execute(sa.select(sa.func.count()).select_from(sub)).scalar()
|
||||
return out # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class QueryPagination(Pagination):
|
||||
"""Returned by :meth:`.Query.paginate`. Takes a ``query`` argument in addition to
|
||||
the :class:`Pagination` arguments.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
|
||||
def _query_items(self) -> list[t.Any]:
|
||||
query = self._query_args["query"]
|
||||
out = query.limit(self.per_page).offset(self._query_offset).all()
|
||||
return out # type: ignore[no-any-return]
|
||||
|
||||
def _query_count(self) -> int:
|
||||
# Query.count automatically disables eager loads
|
||||
out = self._query_args["query"].order_by(None).count()
|
||||
return out # type: ignore[no-any-return]
|
||||
@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import sqlalchemy.exc as sa_exc
|
||||
import sqlalchemy.orm as sa_orm
|
||||
from flask import abort
|
||||
|
||||
from .pagination import Pagination
|
||||
from .pagination import QueryPagination
|
||||
|
||||
|
||||
class Query(sa_orm.Query): # type: ignore[type-arg]
|
||||
"""SQLAlchemy :class:`~sqlalchemy.orm.query.Query` subclass with some extra methods
|
||||
useful for querying in a web application.
|
||||
|
||||
This is the default query class for :attr:`.Model.query`.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Renamed to ``Query`` from ``BaseQuery``.
|
||||
"""
|
||||
|
||||
def get_or_404(self, ident: t.Any, description: str | None = None) -> t.Any:
|
||||
"""Like :meth:`~sqlalchemy.orm.Query.get` but aborts with a ``404 Not Found``
|
||||
error instead of returning ``None``.
|
||||
|
||||
:param ident: The primary key to query.
|
||||
:param description: A custom message to show on the error page.
|
||||
"""
|
||||
rv = self.get(ident)
|
||||
|
||||
if rv is None:
|
||||
abort(404, description=description)
|
||||
|
||||
return rv
|
||||
|
||||
def first_or_404(self, description: str | None = None) -> t.Any:
|
||||
"""Like :meth:`~sqlalchemy.orm.Query.first` but aborts with a ``404 Not Found``
|
||||
error instead of returning ``None``.
|
||||
|
||||
:param description: A custom message to show on the error page.
|
||||
"""
|
||||
rv = self.first()
|
||||
|
||||
if rv is None:
|
||||
abort(404, description=description)
|
||||
|
||||
return rv
|
||||
|
||||
def one_or_404(self, description: str | None = None) -> t.Any:
|
||||
"""Like :meth:`~sqlalchemy.orm.Query.one` but aborts with a ``404 Not Found``
|
||||
error instead of raising ``NoResultFound`` or ``MultipleResultsFound``.
|
||||
|
||||
:param description: A custom message to show on the error page.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
try:
|
||||
return self.one()
|
||||
except (sa_exc.NoResultFound, sa_exc.MultipleResultsFound):
|
||||
abort(404, description=description)
|
||||
|
||||
def paginate(
|
||||
self,
|
||||
*,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
max_per_page: int | None = None,
|
||||
error_out: bool = True,
|
||||
count: bool = True,
|
||||
) -> Pagination:
|
||||
"""Apply an offset and limit to the query based on the current page and number
|
||||
of items per page, returning a :class:`.Pagination` object.
|
||||
|
||||
:param page: The current page, used to calculate the offset. Defaults to the
|
||||
``page`` query arg during a request, or 1 otherwise.
|
||||
:param per_page: The maximum number of items on a page, used to calculate the
|
||||
offset and limit. Defaults to the ``per_page`` query arg during a request,
|
||||
or 20 otherwise.
|
||||
:param max_per_page: The maximum allowed value for ``per_page``, to limit a
|
||||
user-provided value. Use ``None`` for no limit. Defaults to 100.
|
||||
:param error_out: Abort with a ``404 Not Found`` error if no items are returned
|
||||
and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if
|
||||
either are not ints.
|
||||
:param count: Calculate the total number of values by issuing an extra count
|
||||
query. For very complex queries this may be inaccurate or slow, so it can be
|
||||
disabled and set manually if necessary.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
All parameters are keyword-only.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The ``count`` query is more efficient.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
``max_per_page`` defaults to 100.
|
||||
"""
|
||||
return QueryPagination(
|
||||
query=self,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
max_per_page=max_per_page,
|
||||
error_out=error_out,
|
||||
count=count,
|
||||
)
|
||||
@ -0,0 +1,117 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import inspect
|
||||
import typing as t
|
||||
from time import perf_counter
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.event as sa_event
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import has_app_context
|
||||
|
||||
|
||||
def get_recorded_queries() -> list[_QueryInfo]:
|
||||
"""Get the list of recorded query information for the current session. Queries are
|
||||
recorded if the config :data:`.SQLALCHEMY_RECORD_QUERIES` is enabled.
|
||||
|
||||
Each query info object has the following attributes:
|
||||
|
||||
``statement``
|
||||
The string of SQL generated by SQLAlchemy with parameter placeholders.
|
||||
``parameters``
|
||||
The parameters sent with the SQL statement.
|
||||
``start_time`` / ``end_time``
|
||||
Timing info about when the query started execution and when the results where
|
||||
returned. Accuracy and value depends on the operating system.
|
||||
``duration``
|
||||
The time the query took in seconds.
|
||||
``location``
|
||||
A string description of where in your application code the query was executed.
|
||||
This may not be possible to calculate, and the format is not stable.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Renamed from ``get_debug_queries``.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The info object is a dataclass instead of a tuple.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The info object attribute ``context`` is renamed to ``location``.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Not enabled automatically in debug or testing mode.
|
||||
"""
|
||||
return g.get("_sqlalchemy_queries", []) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class _QueryInfo:
|
||||
"""Information about an executed query. Returned by :func:`get_recorded_queries`.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Renamed from ``_DebugQueryTuple``.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Changed to a dataclass instead of a tuple.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
``context`` is renamed to ``location``.
|
||||
"""
|
||||
|
||||
statement: str | None
|
||||
parameters: t.Any
|
||||
start_time: float
|
||||
end_time: float
|
||||
location: str
|
||||
|
||||
@property
|
||||
def duration(self) -> float:
|
||||
return self.end_time - self.start_time
|
||||
|
||||
|
||||
def _listen(engine: sa.engine.Engine) -> None:
|
||||
sa_event.listen(engine, "before_cursor_execute", _record_start, named=True)
|
||||
sa_event.listen(engine, "after_cursor_execute", _record_end, named=True)
|
||||
|
||||
|
||||
def _record_start(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None:
|
||||
if not has_app_context():
|
||||
return
|
||||
|
||||
context._fsa_start_time = perf_counter() # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def _record_end(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None:
|
||||
if not has_app_context():
|
||||
return
|
||||
|
||||
if "_sqlalchemy_queries" not in g:
|
||||
g._sqlalchemy_queries = []
|
||||
|
||||
import_top = current_app.import_name.partition(".")[0]
|
||||
import_dot = f"{import_top}."
|
||||
frame = inspect.currentframe()
|
||||
|
||||
while frame:
|
||||
name = frame.f_globals.get("__name__")
|
||||
|
||||
if name and (name == import_top or name.startswith(import_dot)):
|
||||
code = frame.f_code
|
||||
location = f"{code.co_filename}:{frame.f_lineno} ({code.co_name})"
|
||||
break
|
||||
|
||||
frame = frame.f_back
|
||||
else:
|
||||
location = "<unknown>"
|
||||
|
||||
g._sqlalchemy_queries.append(
|
||||
_QueryInfo(
|
||||
statement=context.statement,
|
||||
parameters=context.parameters,
|
||||
start_time=context._fsa_start_time, # type: ignore[attr-defined]
|
||||
end_time=perf_counter(),
|
||||
location=location,
|
||||
)
|
||||
)
|
||||
@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.exc as sa_exc
|
||||
import sqlalchemy.orm as sa_orm
|
||||
from flask.globals import app_ctx
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .extension import SQLAlchemy
|
||||
|
||||
|
||||
class Session(sa_orm.Session):
|
||||
"""A SQLAlchemy :class:`~sqlalchemy.orm.Session` class that chooses what engine to
|
||||
use based on the bind key associated with the metadata associated with the thing
|
||||
being queried.
|
||||
|
||||
To customize ``db.session``, subclass this and pass it as the ``class_`` key in the
|
||||
``session_options`` to :class:`.SQLAlchemy`.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Renamed from ``SignallingSession``.
|
||||
"""
|
||||
|
||||
def __init__(self, db: SQLAlchemy, **kwargs: t.Any) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self._db = db
|
||||
self._model_changes: dict[object, tuple[t.Any, str]] = {}
|
||||
|
||||
def get_bind(
|
||||
self,
|
||||
mapper: t.Any | None = None,
|
||||
clause: t.Any | None = None,
|
||||
bind: sa.engine.Engine | sa.engine.Connection | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> sa.engine.Engine | sa.engine.Connection:
|
||||
"""Select an engine based on the ``bind_key`` of the metadata associated with
|
||||
the model or table being queried. If no bind key is set, uses the default bind.
|
||||
|
||||
.. versionchanged:: 3.0.3
|
||||
Fix finding the bind for a joined inheritance model.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The implementation more closely matches the base SQLAlchemy implementation.
|
||||
|
||||
.. versionchanged:: 2.1
|
||||
Support joining an external transaction.
|
||||
"""
|
||||
if bind is not None:
|
||||
return bind
|
||||
|
||||
engines = self._db.engines
|
||||
|
||||
if mapper is not None:
|
||||
try:
|
||||
mapper = sa.inspect(mapper)
|
||||
except sa_exc.NoInspectionAvailable as e:
|
||||
if isinstance(mapper, type):
|
||||
raise sa_orm.exc.UnmappedClassError(mapper) from e
|
||||
|
||||
raise
|
||||
|
||||
engine = _clause_to_engine(mapper.local_table, engines)
|
||||
|
||||
if engine is not None:
|
||||
return engine
|
||||
|
||||
if clause is not None:
|
||||
engine = _clause_to_engine(clause, engines)
|
||||
|
||||
if engine is not None:
|
||||
return engine
|
||||
|
||||
if None in engines:
|
||||
return engines[None]
|
||||
|
||||
return super().get_bind(mapper=mapper, clause=clause, bind=bind, **kwargs)
|
||||
|
||||
|
||||
def _clause_to_engine(
|
||||
clause: sa.ClauseElement | None,
|
||||
engines: t.Mapping[str | None, sa.engine.Engine],
|
||||
) -> sa.engine.Engine | None:
|
||||
"""If the clause is a table, return the engine associated with the table's
|
||||
metadata's bind key.
|
||||
"""
|
||||
table = None
|
||||
|
||||
if clause is not None:
|
||||
if isinstance(clause, sa.Table):
|
||||
table = clause
|
||||
elif isinstance(clause, sa.UpdateBase) and isinstance(clause.table, sa.Table):
|
||||
table = clause.table
|
||||
|
||||
if table is not None and "bind_key" in table.metadata.info:
|
||||
key = table.metadata.info["bind_key"]
|
||||
|
||||
if key not in engines:
|
||||
raise sa_exc.UnboundExecutionError(
|
||||
f"Bind key '{key}' is not in 'SQLALCHEMY_BINDS' config."
|
||||
)
|
||||
|
||||
return engines[key]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _app_ctx_id() -> int:
|
||||
"""Get the id of the current Flask application context for the session scope."""
|
||||
return id(app_ctx._get_current_object()) # type: ignore[attr-defined]
|
||||
@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.sql.schema as sa_sql_schema
|
||||
|
||||
|
||||
class _Table(sa.Table):
|
||||
@t.overload
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
*args: sa_sql_schema.SchemaItem,
|
||||
bind_key: str | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
@t.overload
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
metadata: sa.MetaData,
|
||||
*args: sa_sql_schema.SchemaItem,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
...
|
||||
|
||||
@t.overload
|
||||
def __init__(
|
||||
self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any
|
||||
) -> None:
|
||||
...
|
||||
|
||||
def __init__(
|
||||
self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any
|
||||
) -> None:
|
||||
super().__init__(name, *args, **kwargs) # type: ignore[arg-type]
|
||||
@ -0,0 +1,88 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.event as sa_event
|
||||
import sqlalchemy.orm as sa_orm
|
||||
from flask import current_app
|
||||
from flask import has_app_context
|
||||
from flask.signals import Namespace # type: ignore[attr-defined]
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .session import Session
|
||||
|
||||
_signals = Namespace()
|
||||
|
||||
models_committed = _signals.signal("models-committed")
|
||||
"""This Blinker signal is sent after the session is committed if there were changed
|
||||
models in the session.
|
||||
|
||||
The sender is the application that emitted the changes. The receiver is passed the
|
||||
``changes`` argument with a list of tuples in the form ``(instance, operation)``.
|
||||
The operations are ``"insert"``, ``"update"``, and ``"delete"``.
|
||||
"""
|
||||
|
||||
before_models_committed = _signals.signal("before-models-committed")
|
||||
"""This signal works exactly like :data:`models_committed` but is emitted before the
|
||||
commit takes place.
|
||||
"""
|
||||
|
||||
|
||||
def _listen(session: sa_orm.scoped_session[Session]) -> None:
|
||||
sa_event.listen(session, "before_flush", _record_ops, named=True)
|
||||
sa_event.listen(session, "before_commit", _record_ops, named=True)
|
||||
sa_event.listen(session, "before_commit", _before_commit)
|
||||
sa_event.listen(session, "after_commit", _after_commit)
|
||||
sa_event.listen(session, "after_rollback", _after_rollback)
|
||||
|
||||
|
||||
def _record_ops(session: Session, **kwargs: t.Any) -> None:
|
||||
if not has_app_context():
|
||||
return
|
||||
|
||||
if not current_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]:
|
||||
return
|
||||
|
||||
for targets, operation in (
|
||||
(session.new, "insert"),
|
||||
(session.dirty, "update"),
|
||||
(session.deleted, "delete"),
|
||||
):
|
||||
for target in targets:
|
||||
state = sa.inspect(target)
|
||||
key = state.identity_key if state.has_identity else id(target)
|
||||
session._model_changes[key] = (target, operation)
|
||||
|
||||
|
||||
def _before_commit(session: Session) -> None:
|
||||
if not has_app_context():
|
||||
return
|
||||
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
|
||||
if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]:
|
||||
return
|
||||
|
||||
if session._model_changes:
|
||||
changes = list(session._model_changes.values())
|
||||
before_models_committed.send(app, changes=changes)
|
||||
|
||||
|
||||
def _after_commit(session: Session) -> None:
|
||||
if not has_app_context():
|
||||
return
|
||||
|
||||
app = current_app._get_current_object() # type: ignore[attr-defined]
|
||||
|
||||
if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]:
|
||||
return
|
||||
|
||||
if session._model_changes:
|
||||
changes = list(session._model_changes.values())
|
||||
models_committed.send(app, changes=changes)
|
||||
session._model_changes.clear()
|
||||
|
||||
|
||||
def _after_rollback(session: Session) -> None:
|
||||
session._model_changes.clear()
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,28 @@
|
||||
Copyright 2011 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,60 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: itsdangerous
|
||||
Version: 2.2.0
|
||||
Summary: Safely pass data to untrusted environments and back.
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.8
|
||||
Description-Content-Type: text/markdown
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://itsdangerous.palletsprojects.com/
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Source, https://github.com/pallets/itsdangerous/
|
||||
|
||||
# ItsDangerous
|
||||
|
||||
... so better sign this
|
||||
|
||||
Various helpers to pass data to untrusted environments and to get it
|
||||
back safe and sound. Data is cryptographically signed to ensure that a
|
||||
token has not been tampered with.
|
||||
|
||||
It's possible to customize how data is serialized. Data is compressed as
|
||||
needed. A timestamp can be added and verified automatically while
|
||||
loading a token.
|
||||
|
||||
|
||||
## A Simple Example
|
||||
|
||||
Here's how you could generate a token for transmitting a user's id and
|
||||
name between web requests.
|
||||
|
||||
```python
|
||||
from itsdangerous import URLSafeSerializer
|
||||
auth_s = URLSafeSerializer("secret key", "auth")
|
||||
token = auth_s.dumps({"id": 5, "name": "itsdangerous"})
|
||||
|
||||
print(token)
|
||||
# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg
|
||||
|
||||
data = auth_s.loads(token)
|
||||
print(data["name"])
|
||||
# itsdangerous
|
||||
```
|
||||
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports ItsDangerous and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
[please donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
||||
|
||||
@ -0,0 +1,23 @@
|
||||
itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475
|
||||
itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924
|
||||
itsdangerous-2.2.0.dist-info/RECORD,,
|
||||
itsdangerous-2.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
||||
itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427
|
||||
itsdangerous/__pycache__/__init__.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/_json.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/encoding.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/exc.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/serializer.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/signer.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/timed.cpython-312.pyc,,
|
||||
itsdangerous/__pycache__/url_safe.cpython-312.pyc,,
|
||||
itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473
|
||||
itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409
|
||||
itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201
|
||||
itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601
|
||||
itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647
|
||||
itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083
|
||||
itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505
|
||||
@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: flit 3.9.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@ -0,0 +1,38 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
|
||||
from .encoding import base64_decode as base64_decode
|
||||
from .encoding import base64_encode as base64_encode
|
||||
from .encoding import want_bytes as want_bytes
|
||||
from .exc import BadData as BadData
|
||||
from .exc import BadHeader as BadHeader
|
||||
from .exc import BadPayload as BadPayload
|
||||
from .exc import BadSignature as BadSignature
|
||||
from .exc import BadTimeSignature as BadTimeSignature
|
||||
from .exc import SignatureExpired as SignatureExpired
|
||||
from .serializer import Serializer as Serializer
|
||||
from .signer import HMACAlgorithm as HMACAlgorithm
|
||||
from .signer import NoneAlgorithm as NoneAlgorithm
|
||||
from .signer import Signer as Signer
|
||||
from .timed import TimedSerializer as TimedSerializer
|
||||
from .timed import TimestampSigner as TimestampSigner
|
||||
from .url_safe import URLSafeSerializer as URLSafeSerializer
|
||||
from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer
|
||||
|
||||
|
||||
def __getattr__(name: str) -> t.Any:
|
||||
if name == "__version__":
|
||||
import importlib.metadata
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The '__version__' attribute is deprecated and will be removed in"
|
||||
" ItsDangerous 2.3. Use feature detection or"
|
||||
" 'importlib.metadata.version(\"itsdangerous\")' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return importlib.metadata.version("itsdangerous")
|
||||
|
||||
raise AttributeError(name)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json as _json
|
||||
import typing as t
|
||||
|
||||
|
||||
class _CompactJSON:
|
||||
"""Wrapper around json module that strips whitespace."""
|
||||
|
||||
@staticmethod
|
||||
def loads(payload: str | bytes) -> t.Any:
|
||||
return _json.loads(payload)
|
||||
|
||||
@staticmethod
|
||||
def dumps(obj: t.Any, **kwargs: t.Any) -> str:
|
||||
kwargs.setdefault("ensure_ascii", False)
|
||||
kwargs.setdefault("separators", (",", ":"))
|
||||
return _json.dumps(obj, **kwargs)
|
||||
@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import string
|
||||
import struct
|
||||
import typing as t
|
||||
|
||||
from .exc import BadData
|
||||
|
||||
|
||||
def want_bytes(
|
||||
s: str | bytes, encoding: str = "utf-8", errors: str = "strict"
|
||||
) -> bytes:
|
||||
if isinstance(s, str):
|
||||
s = s.encode(encoding, errors)
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def base64_encode(string: str | bytes) -> bytes:
|
||||
"""Base64 encode a string of bytes or text. The resulting bytes are
|
||||
safe to use in URLs.
|
||||
"""
|
||||
string = want_bytes(string)
|
||||
return base64.urlsafe_b64encode(string).rstrip(b"=")
|
||||
|
||||
|
||||
def base64_decode(string: str | bytes) -> bytes:
|
||||
"""Base64 decode a URL-safe string of bytes or text. The result is
|
||||
bytes.
|
||||
"""
|
||||
string = want_bytes(string, encoding="ascii", errors="ignore")
|
||||
string += b"=" * (-len(string) % 4)
|
||||
|
||||
try:
|
||||
return base64.urlsafe_b64decode(string)
|
||||
except (TypeError, ValueError) as e:
|
||||
raise BadData("Invalid base64-encoded data") from e
|
||||
|
||||
|
||||
# The alphabet used by base64.urlsafe_*
|
||||
_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii")
|
||||
|
||||
_int64_struct = struct.Struct(">Q")
|
||||
_int_to_bytes = _int64_struct.pack
|
||||
_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack)
|
||||
|
||||
|
||||
def int_to_bytes(num: int) -> bytes:
|
||||
return _int_to_bytes(num).lstrip(b"\x00")
|
||||
|
||||
|
||||
def bytes_to_int(bytestr: bytes) -> int:
|
||||
return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0]
|
||||
@ -0,0 +1,106 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class BadData(Exception):
|
||||
"""Raised if bad data of any sort was encountered. This is the base
|
||||
for all exceptions that ItsDangerous defines.
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(self, message: str):
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.message
|
||||
|
||||
|
||||
class BadSignature(BadData):
|
||||
"""Raised if a signature does not match."""
|
||||
|
||||
def __init__(self, message: str, payload: t.Any | None = None):
|
||||
super().__init__(message)
|
||||
|
||||
#: The payload that failed the signature test. In some
|
||||
#: situations you might still want to inspect this, even if
|
||||
#: you know it was tampered with.
|
||||
#:
|
||||
#: .. versionadded:: 0.14
|
||||
self.payload: t.Any | None = payload
|
||||
|
||||
|
||||
class BadTimeSignature(BadSignature):
|
||||
"""Raised if a time-based signature is invalid. This is a subclass
|
||||
of :class:`BadSignature`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
payload: t.Any | None = None,
|
||||
date_signed: datetime | None = None,
|
||||
):
|
||||
super().__init__(message, payload)
|
||||
|
||||
#: If the signature expired this exposes the date of when the
|
||||
#: signature was created. This can be helpful in order to
|
||||
#: tell the user how long a link has been gone stale.
|
||||
#:
|
||||
#: .. versionchanged:: 2.0
|
||||
#: The datetime value is timezone-aware rather than naive.
|
||||
#:
|
||||
#: .. versionadded:: 0.14
|
||||
self.date_signed = date_signed
|
||||
|
||||
|
||||
class SignatureExpired(BadTimeSignature):
|
||||
"""Raised if a signature timestamp is older than ``max_age``. This
|
||||
is a subclass of :exc:`BadTimeSignature`.
|
||||
"""
|
||||
|
||||
|
||||
class BadHeader(BadSignature):
|
||||
"""Raised if a signed header is invalid in some form. This only
|
||||
happens for serializers that have a header that goes with the
|
||||
signature.
|
||||
|
||||
.. versionadded:: 0.24
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
payload: t.Any | None = None,
|
||||
header: t.Any | None = None,
|
||||
original_error: Exception | None = None,
|
||||
):
|
||||
super().__init__(message, payload)
|
||||
|
||||
#: If the header is actually available but just malformed it
|
||||
#: might be stored here.
|
||||
self.header: t.Any | None = header
|
||||
|
||||
#: If available, the error that indicates why the payload was
|
||||
#: not valid. This might be ``None``.
|
||||
self.original_error: Exception | None = original_error
|
||||
|
||||
|
||||
class BadPayload(BadData):
|
||||
"""Raised if a payload is invalid. This could happen if the payload
|
||||
is loaded despite an invalid signature, or if there is a mismatch
|
||||
between the serializer and deserializer. The original exception
|
||||
that occurred during loading is stored on as :attr:`original_error`.
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
|
||||
def __init__(self, message: str, original_error: Exception | None = None):
|
||||
super().__init__(message)
|
||||
|
||||
#: If available, the error that indicates why the payload was
|
||||
#: not valid. This might be ``None``.
|
||||
self.original_error: Exception | None = original_error
|
||||
@ -0,0 +1,406 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import json
|
||||
import typing as t
|
||||
|
||||
from .encoding import want_bytes
|
||||
from .exc import BadPayload
|
||||
from .exc import BadSignature
|
||||
from .signer import _make_keys_list
|
||||
from .signer import Signer
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
# This should be either be str or bytes. To avoid having to specify the
|
||||
# bound type, it falls back to a union if structural matching fails.
|
||||
_TSerialized = te.TypeVar(
|
||||
"_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes]
|
||||
)
|
||||
else:
|
||||
# Still available at runtime on Python < 3.13, but without the default.
|
||||
_TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes])
|
||||
|
||||
|
||||
class _PDataSerializer(t.Protocol[_TSerialized]):
|
||||
def loads(self, payload: _TSerialized, /) -> t.Any: ...
|
||||
# A signature with additional arguments is not handled correctly by type
|
||||
# checkers right now, so an overload is used below for serializers that
|
||||
# don't match this strict protocol.
|
||||
def dumps(self, obj: t.Any, /) -> _TSerialized: ...
|
||||
|
||||
|
||||
# Use TypeIs once it's available in typing_extensions or 3.13.
|
||||
def is_text_serializer(
|
||||
serializer: _PDataSerializer[t.Any],
|
||||
) -> te.TypeGuard[_PDataSerializer[str]]:
|
||||
"""Checks whether a serializer generates text or binary."""
|
||||
return isinstance(serializer.dumps({}), str)
|
||||
|
||||
|
||||
class Serializer(t.Generic[_TSerialized]):
|
||||
"""A serializer wraps a :class:`~itsdangerous.signer.Signer` to
|
||||
enable serializing and securely signing data other than bytes. It
|
||||
can unsign to verify that the data hasn't been changed.
|
||||
|
||||
The serializer provides :meth:`dumps` and :meth:`loads`, similar to
|
||||
:mod:`json`, and by default uses :mod:`json` internally to serialize
|
||||
the data to bytes.
|
||||
|
||||
The secret key should be a random string of ``bytes`` and should not
|
||||
be saved to code or version control. Different salts should be used
|
||||
to distinguish signing in different contexts. See :doc:`/concepts`
|
||||
for information about the security of the secret key and salt.
|
||||
|
||||
:param secret_key: The secret key to sign and verify with. Can be a
|
||||
list of keys, oldest to newest, to support key rotation.
|
||||
:param salt: Extra key to combine with ``secret_key`` to distinguish
|
||||
signatures in different contexts.
|
||||
:param serializer: An object that provides ``dumps`` and ``loads``
|
||||
methods for serializing data to a string. Defaults to
|
||||
:attr:`default_serializer`, which defaults to :mod:`json`.
|
||||
:param serializer_kwargs: Keyword arguments to pass when calling
|
||||
``serializer.dumps``.
|
||||
:param signer: A ``Signer`` class to instantiate when signing data.
|
||||
Defaults to :attr:`default_signer`, which defaults to
|
||||
:class:`~itsdangerous.signer.Signer`.
|
||||
:param signer_kwargs: Keyword arguments to pass when instantiating
|
||||
the ``Signer`` class.
|
||||
:param fallback_signers: List of signer parameters to try when
|
||||
unsigning with the default signer fails. Each item can be a dict
|
||||
of ``signer_kwargs``, a ``Signer`` class, or a tuple of
|
||||
``(signer, signer_kwargs)``. Defaults to
|
||||
:attr:`default_fallback_signers`.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Added support for key rotation by passing a list to
|
||||
``secret_key``.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Removed the default SHA-512 fallback signer from
|
||||
``default_fallback_signers``.
|
||||
|
||||
.. versionchanged:: 1.1
|
||||
Added support for ``fallback_signers`` and configured a default
|
||||
SHA-512 fallback. This fallback is for users who used the yanked
|
||||
1.0.0 release which defaulted to SHA-512.
|
||||
|
||||
.. versionchanged:: 0.14
|
||||
The ``signer`` and ``signer_kwargs`` parameters were added to
|
||||
the constructor.
|
||||
"""
|
||||
|
||||
#: The default serialization module to use to serialize data to a
|
||||
#: string internally. The default is :mod:`json`, but can be changed
|
||||
#: to any object that provides ``dumps`` and ``loads`` methods.
|
||||
default_serializer: _PDataSerializer[t.Any] = json
|
||||
|
||||
#: The default ``Signer`` class to instantiate when signing data.
|
||||
#: The default is :class:`itsdangerous.signer.Signer`.
|
||||
default_signer: type[Signer] = Signer
|
||||
|
||||
#: The default fallback signers to try when unsigning fails.
|
||||
default_fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
] = []
|
||||
|
||||
# Serializer[str] if no data serializer is provided, or if it returns str.
|
||||
@t.overload
|
||||
def __init__(
|
||||
self: Serializer[str],
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None = b"itsdangerous",
|
||||
serializer: None | _PDataSerializer[str] = None,
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
): ...
|
||||
|
||||
# Serializer[bytes] with a bytes data serializer positional argument.
|
||||
@t.overload
|
||||
def __init__(
|
||||
self: Serializer[bytes],
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None,
|
||||
serializer: _PDataSerializer[bytes],
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
): ...
|
||||
|
||||
# Serializer[bytes] with a bytes data serializer keyword argument.
|
||||
@t.overload
|
||||
def __init__(
|
||||
self: Serializer[bytes],
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None = b"itsdangerous",
|
||||
*,
|
||||
serializer: _PDataSerializer[bytes],
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
): ...
|
||||
|
||||
# Fall back with a positional argument. If the strict signature of
|
||||
# _PDataSerializer doesn't match, fall back to a union, requiring the user
|
||||
# to specify the type.
|
||||
@t.overload
|
||||
def __init__(
|
||||
self,
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None,
|
||||
serializer: t.Any,
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
): ...
|
||||
|
||||
# Fall back with a keyword argument.
|
||||
@t.overload
|
||||
def __init__(
|
||||
self,
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None = b"itsdangerous",
|
||||
*,
|
||||
serializer: t.Any,
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
): ...
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None = b"itsdangerous",
|
||||
serializer: t.Any | None = None,
|
||||
serializer_kwargs: dict[str, t.Any] | None = None,
|
||||
signer: type[Signer] | None = None,
|
||||
signer_kwargs: dict[str, t.Any] | None = None,
|
||||
fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
]
|
||||
| None = None,
|
||||
):
|
||||
#: The list of secret keys to try for verifying signatures, from
|
||||
#: oldest to newest. The newest (last) key is used for signing.
|
||||
#:
|
||||
#: This allows a key rotation system to keep a list of allowed
|
||||
#: keys and remove expired ones.
|
||||
self.secret_keys: list[bytes] = _make_keys_list(secret_key)
|
||||
|
||||
if salt is not None:
|
||||
salt = want_bytes(salt)
|
||||
# if salt is None then the signer's default is used
|
||||
|
||||
self.salt = salt
|
||||
|
||||
if serializer is None:
|
||||
serializer = self.default_serializer
|
||||
|
||||
self.serializer: _PDataSerializer[_TSerialized] = serializer
|
||||
self.is_text_serializer: bool = is_text_serializer(serializer)
|
||||
|
||||
if signer is None:
|
||||
signer = self.default_signer
|
||||
|
||||
self.signer: type[Signer] = signer
|
||||
self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {}
|
||||
|
||||
if fallback_signers is None:
|
||||
fallback_signers = list(self.default_fallback_signers)
|
||||
|
||||
self.fallback_signers: list[
|
||||
dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer]
|
||||
] = fallback_signers
|
||||
self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {}
|
||||
|
||||
@property
|
||||
def secret_key(self) -> bytes:
|
||||
"""The newest (last) entry in the :attr:`secret_keys` list. This
|
||||
is for compatibility from before key rotation support was added.
|
||||
"""
|
||||
return self.secret_keys[-1]
|
||||
|
||||
def load_payload(
|
||||
self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None
|
||||
) -> t.Any:
|
||||
"""Loads the encoded object. This function raises
|
||||
:class:`.BadPayload` if the payload is not valid. The
|
||||
``serializer`` parameter can be used to override the serializer
|
||||
stored on the class. The encoded ``payload`` should always be
|
||||
bytes.
|
||||
"""
|
||||
if serializer is None:
|
||||
use_serializer = self.serializer
|
||||
is_text = self.is_text_serializer
|
||||
else:
|
||||
use_serializer = serializer
|
||||
is_text = is_text_serializer(serializer)
|
||||
|
||||
try:
|
||||
if is_text:
|
||||
return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type]
|
||||
|
||||
return use_serializer.loads(payload) # type: ignore[arg-type]
|
||||
except Exception as e:
|
||||
raise BadPayload(
|
||||
"Could not load the payload because an exception"
|
||||
" occurred on unserializing the data.",
|
||||
original_error=e,
|
||||
) from e
|
||||
|
||||
def dump_payload(self, obj: t.Any) -> bytes:
|
||||
"""Dumps the encoded object. The return value is always bytes.
|
||||
If the internal serializer returns text, the value will be
|
||||
encoded as UTF-8.
|
||||
"""
|
||||
return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs))
|
||||
|
||||
def make_signer(self, salt: str | bytes | None = None) -> Signer:
|
||||
"""Creates a new instance of the signer to be used. The default
|
||||
implementation uses the :class:`.Signer` base class.
|
||||
"""
|
||||
if salt is None:
|
||||
salt = self.salt
|
||||
|
||||
return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs)
|
||||
|
||||
def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]:
|
||||
"""Iterates over all signers to be tried for unsigning. Starts
|
||||
with the configured signer, then constructs each signer
|
||||
specified in ``fallback_signers``.
|
||||
"""
|
||||
if salt is None:
|
||||
salt = self.salt
|
||||
|
||||
yield self.make_signer(salt)
|
||||
|
||||
for fallback in self.fallback_signers:
|
||||
if isinstance(fallback, dict):
|
||||
kwargs = fallback
|
||||
fallback = self.signer
|
||||
elif isinstance(fallback, tuple):
|
||||
fallback, kwargs = fallback
|
||||
else:
|
||||
kwargs = self.signer_kwargs
|
||||
|
||||
for secret_key in self.secret_keys:
|
||||
yield fallback(secret_key, salt=salt, **kwargs)
|
||||
|
||||
def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized:
|
||||
"""Returns a signed string serialized with the internal
|
||||
serializer. The return value can be either a byte or unicode
|
||||
string depending on the format of the internal serializer.
|
||||
"""
|
||||
payload = want_bytes(self.dump_payload(obj))
|
||||
rv = self.make_signer(salt).sign(payload)
|
||||
|
||||
if self.is_text_serializer:
|
||||
return rv.decode("utf-8") # type: ignore[return-value]
|
||||
|
||||
return rv # type: ignore[return-value]
|
||||
|
||||
def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None:
|
||||
"""Like :meth:`dumps` but dumps into a file. The file handle has
|
||||
to be compatible with what the internal serializer expects.
|
||||
"""
|
||||
f.write(self.dumps(obj, salt))
|
||||
|
||||
def loads(
|
||||
self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any
|
||||
) -> t.Any:
|
||||
"""Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the
|
||||
signature validation fails.
|
||||
"""
|
||||
s = want_bytes(s)
|
||||
last_exception = None
|
||||
|
||||
for signer in self.iter_unsigners(salt):
|
||||
try:
|
||||
return self.load_payload(signer.unsign(s))
|
||||
except BadSignature as err:
|
||||
last_exception = err
|
||||
|
||||
raise t.cast(BadSignature, last_exception)
|
||||
|
||||
def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any:
|
||||
"""Like :meth:`loads` but loads from a file."""
|
||||
return self.loads(f.read(), salt)
|
||||
|
||||
def loads_unsafe(
|
||||
self, s: str | bytes, salt: str | bytes | None = None
|
||||
) -> tuple[bool, t.Any]:
|
||||
"""Like :meth:`loads` but without verifying the signature. This
|
||||
is potentially very dangerous to use depending on how your
|
||||
serializer works. The return value is ``(signature_valid,
|
||||
payload)`` instead of just the payload. The first item will be a
|
||||
boolean that indicates if the signature is valid. This function
|
||||
never fails.
|
||||
|
||||
Use it for debugging only and if you know that your serializer
|
||||
module is not exploitable (for example, do not use it with a
|
||||
pickle serializer).
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
return self._loads_unsafe_impl(s, salt)
|
||||
|
||||
def _loads_unsafe_impl(
|
||||
self,
|
||||
s: str | bytes,
|
||||
salt: str | bytes | None,
|
||||
load_kwargs: dict[str, t.Any] | None = None,
|
||||
load_payload_kwargs: dict[str, t.Any] | None = None,
|
||||
) -> tuple[bool, t.Any]:
|
||||
"""Low level helper function to implement :meth:`loads_unsafe`
|
||||
in serializer subclasses.
|
||||
"""
|
||||
if load_kwargs is None:
|
||||
load_kwargs = {}
|
||||
|
||||
try:
|
||||
return True, self.loads(s, salt=salt, **load_kwargs)
|
||||
except BadSignature as e:
|
||||
if e.payload is None:
|
||||
return False, None
|
||||
|
||||
if load_payload_kwargs is None:
|
||||
load_payload_kwargs = {}
|
||||
|
||||
try:
|
||||
return (
|
||||
False,
|
||||
self.load_payload(e.payload, **load_payload_kwargs),
|
||||
)
|
||||
except BadPayload:
|
||||
return False, None
|
||||
|
||||
def load_unsafe(
|
||||
self, f: t.IO[t.Any], salt: str | bytes | None = None
|
||||
) -> tuple[bool, t.Any]:
|
||||
"""Like :meth:`loads_unsafe` but loads from a file.
|
||||
|
||||
.. versionadded:: 0.15
|
||||
"""
|
||||
return self.loads_unsafe(f.read(), salt=salt)
|
||||
@ -0,0 +1,266 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import hashlib
|
||||
import hmac
|
||||
import typing as t
|
||||
|
||||
from .encoding import _base64_alphabet
|
||||
from .encoding import base64_decode
|
||||
from .encoding import base64_encode
|
||||
from .encoding import want_bytes
|
||||
from .exc import BadSignature
|
||||
|
||||
|
||||
class SigningAlgorithm:
|
||||
"""Subclasses must implement :meth:`get_signature` to provide
|
||||
signature generation functionality.
|
||||
"""
|
||||
|
||||
def get_signature(self, key: bytes, value: bytes) -> bytes:
|
||||
"""Returns the signature for the given key and value."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool:
|
||||
"""Verifies the given signature matches the expected
|
||||
signature.
|
||||
"""
|
||||
return hmac.compare_digest(sig, self.get_signature(key, value))
|
||||
|
||||
|
||||
class NoneAlgorithm(SigningAlgorithm):
|
||||
"""Provides an algorithm that does not perform any signing and
|
||||
returns an empty signature.
|
||||
"""
|
||||
|
||||
def get_signature(self, key: bytes, value: bytes) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
def _lazy_sha1(string: bytes = b"") -> t.Any:
|
||||
"""Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include
|
||||
SHA-1, in which case the import and use as a default would fail before the
|
||||
developer can configure something else.
|
||||
"""
|
||||
return hashlib.sha1(string)
|
||||
|
||||
|
||||
class HMACAlgorithm(SigningAlgorithm):
|
||||
"""Provides signature generation using HMACs."""
|
||||
|
||||
#: The digest method to use with the MAC algorithm. This defaults to
|
||||
#: SHA1, but can be changed to any other function in the hashlib
|
||||
#: module.
|
||||
default_digest_method: t.Any = staticmethod(_lazy_sha1)
|
||||
|
||||
def __init__(self, digest_method: t.Any = None):
|
||||
if digest_method is None:
|
||||
digest_method = self.default_digest_method
|
||||
|
||||
self.digest_method: t.Any = digest_method
|
||||
|
||||
def get_signature(self, key: bytes, value: bytes) -> bytes:
|
||||
mac = hmac.new(key, msg=value, digestmod=self.digest_method)
|
||||
return mac.digest()
|
||||
|
||||
|
||||
def _make_keys_list(
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
) -> list[bytes]:
|
||||
if isinstance(secret_key, (str, bytes)):
|
||||
return [want_bytes(secret_key)]
|
||||
|
||||
return [want_bytes(s) for s in secret_key] # pyright: ignore
|
||||
|
||||
|
||||
class Signer:
|
||||
"""A signer securely signs bytes, then unsigns them to verify that
|
||||
the value hasn't been changed.
|
||||
|
||||
The secret key should be a random string of ``bytes`` and should not
|
||||
be saved to code or version control. Different salts should be used
|
||||
to distinguish signing in different contexts. See :doc:`/concepts`
|
||||
for information about the security of the secret key and salt.
|
||||
|
||||
:param secret_key: The secret key to sign and verify with. Can be a
|
||||
list of keys, oldest to newest, to support key rotation.
|
||||
:param salt: Extra key to combine with ``secret_key`` to distinguish
|
||||
signatures in different contexts.
|
||||
:param sep: Separator between the signature and value.
|
||||
:param key_derivation: How to derive the signing key from the secret
|
||||
key and salt. Possible values are ``concat``, ``django-concat``,
|
||||
or ``hmac``. Defaults to :attr:`default_key_derivation`, which
|
||||
defaults to ``django-concat``.
|
||||
:param digest_method: Hash function to use when generating the HMAC
|
||||
signature. Defaults to :attr:`default_digest_method`, which
|
||||
defaults to :func:`hashlib.sha1`. Note that the security of the
|
||||
hash alone doesn't apply when used intermediately in HMAC.
|
||||
:param algorithm: A :class:`SigningAlgorithm` instance to use
|
||||
instead of building a default :class:`HMACAlgorithm` with the
|
||||
``digest_method``.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Added support for key rotation by passing a list to
|
||||
``secret_key``.
|
||||
|
||||
.. versionchanged:: 0.18
|
||||
``algorithm`` was added as an argument to the class constructor.
|
||||
|
||||
.. versionchanged:: 0.14
|
||||
``key_derivation`` and ``digest_method`` were added as arguments
|
||||
to the class constructor.
|
||||
"""
|
||||
|
||||
#: The default digest method to use for the signer. The default is
|
||||
#: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or
|
||||
#: compatible object. Note that the security of the hash alone
|
||||
#: doesn't apply when used intermediately in HMAC.
|
||||
#:
|
||||
#: .. versionadded:: 0.14
|
||||
default_digest_method: t.Any = staticmethod(_lazy_sha1)
|
||||
|
||||
#: The default scheme to use to derive the signing key from the
|
||||
#: secret key and salt. The default is ``django-concat``. Possible
|
||||
#: values are ``concat``, ``django-concat``, and ``hmac``.
|
||||
#:
|
||||
#: .. versionadded:: 0.14
|
||||
default_key_derivation: str = "django-concat"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes],
|
||||
salt: str | bytes | None = b"itsdangerous.Signer",
|
||||
sep: str | bytes = b".",
|
||||
key_derivation: str | None = None,
|
||||
digest_method: t.Any | None = None,
|
||||
algorithm: SigningAlgorithm | None = None,
|
||||
):
|
||||
#: The list of secret keys to try for verifying signatures, from
|
||||
#: oldest to newest. The newest (last) key is used for signing.
|
||||
#:
|
||||
#: This allows a key rotation system to keep a list of allowed
|
||||
#: keys and remove expired ones.
|
||||
self.secret_keys: list[bytes] = _make_keys_list(secret_key)
|
||||
self.sep: bytes = want_bytes(sep)
|
||||
|
||||
if self.sep in _base64_alphabet:
|
||||
raise ValueError(
|
||||
"The given separator cannot be used because it may be"
|
||||
" contained in the signature itself. ASCII letters,"
|
||||
" digits, and '-_=' must not be used."
|
||||
)
|
||||
|
||||
if salt is not None:
|
||||
salt = want_bytes(salt)
|
||||
else:
|
||||
salt = b"itsdangerous.Signer"
|
||||
|
||||
self.salt = salt
|
||||
|
||||
if key_derivation is None:
|
||||
key_derivation = self.default_key_derivation
|
||||
|
||||
self.key_derivation: str = key_derivation
|
||||
|
||||
if digest_method is None:
|
||||
digest_method = self.default_digest_method
|
||||
|
||||
self.digest_method: t.Any = digest_method
|
||||
|
||||
if algorithm is None:
|
||||
algorithm = HMACAlgorithm(self.digest_method)
|
||||
|
||||
self.algorithm: SigningAlgorithm = algorithm
|
||||
|
||||
@property
|
||||
def secret_key(self) -> bytes:
|
||||
"""The newest (last) entry in the :attr:`secret_keys` list. This
|
||||
is for compatibility from before key rotation support was added.
|
||||
"""
|
||||
return self.secret_keys[-1]
|
||||
|
||||
def derive_key(self, secret_key: str | bytes | None = None) -> bytes:
|
||||
"""This method is called to derive the key. The default key
|
||||
derivation choices can be overridden here. Key derivation is not
|
||||
intended to be used as a security method to make a complex key
|
||||
out of a short password. Instead you should use large random
|
||||
secret keys.
|
||||
|
||||
:param secret_key: A specific secret key to derive from.
|
||||
Defaults to the last item in :attr:`secret_keys`.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Added the ``secret_key`` parameter.
|
||||
"""
|
||||
if secret_key is None:
|
||||
secret_key = self.secret_keys[-1]
|
||||
else:
|
||||
secret_key = want_bytes(secret_key)
|
||||
|
||||
if self.key_derivation == "concat":
|
||||
return t.cast(bytes, self.digest_method(self.salt + secret_key).digest())
|
||||
elif self.key_derivation == "django-concat":
|
||||
return t.cast(
|
||||
bytes, self.digest_method(self.salt + b"signer" + secret_key).digest()
|
||||
)
|
||||
elif self.key_derivation == "hmac":
|
||||
mac = hmac.new(secret_key, digestmod=self.digest_method)
|
||||
mac.update(self.salt)
|
||||
return mac.digest()
|
||||
elif self.key_derivation == "none":
|
||||
return secret_key
|
||||
else:
|
||||
raise TypeError("Unknown key derivation method")
|
||||
|
||||
def get_signature(self, value: str | bytes) -> bytes:
|
||||
"""Returns the signature for the given value."""
|
||||
value = want_bytes(value)
|
||||
key = self.derive_key()
|
||||
sig = self.algorithm.get_signature(key, value)
|
||||
return base64_encode(sig)
|
||||
|
||||
def sign(self, value: str | bytes) -> bytes:
|
||||
"""Signs the given string."""
|
||||
value = want_bytes(value)
|
||||
return value + self.sep + self.get_signature(value)
|
||||
|
||||
def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool:
|
||||
"""Verifies the signature for the given value."""
|
||||
try:
|
||||
sig = base64_decode(sig)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
value = want_bytes(value)
|
||||
|
||||
for secret_key in reversed(self.secret_keys):
|
||||
key = self.derive_key(secret_key)
|
||||
|
||||
if self.algorithm.verify_signature(key, value, sig):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def unsign(self, signed_value: str | bytes) -> bytes:
|
||||
"""Unsigns the given string."""
|
||||
signed_value = want_bytes(signed_value)
|
||||
|
||||
if self.sep not in signed_value:
|
||||
raise BadSignature(f"No {self.sep!r} found in value")
|
||||
|
||||
value, sig = signed_value.rsplit(self.sep, 1)
|
||||
|
||||
if self.verify_signature(value, sig):
|
||||
return value
|
||||
|
||||
raise BadSignature(f"Signature {sig!r} does not match", payload=value)
|
||||
|
||||
def validate(self, signed_value: str | bytes) -> bool:
|
||||
"""Only validates the given signed value. Returns ``True`` if
|
||||
the signature exists and is valid.
|
||||
"""
|
||||
try:
|
||||
self.unsign(signed_value)
|
||||
return True
|
||||
except BadSignature:
|
||||
return False
|
||||
@ -0,0 +1,228 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as cabc
|
||||
import time
|
||||
import typing as t
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
|
||||
from .encoding import base64_decode
|
||||
from .encoding import base64_encode
|
||||
from .encoding import bytes_to_int
|
||||
from .encoding import int_to_bytes
|
||||
from .encoding import want_bytes
|
||||
from .exc import BadSignature
|
||||
from .exc import BadTimeSignature
|
||||
from .exc import SignatureExpired
|
||||
from .serializer import _TSerialized
|
||||
from .serializer import Serializer
|
||||
from .signer import Signer
|
||||
|
||||
|
||||
class TimestampSigner(Signer):
|
||||
"""Works like the regular :class:`.Signer` but also records the time
|
||||
of the signing and can be used to expire signatures. The
|
||||
:meth:`unsign` method can raise :exc:`.SignatureExpired` if the
|
||||
unsigning failed because the signature is expired.
|
||||
"""
|
||||
|
||||
def get_timestamp(self) -> int:
|
||||
"""Returns the current timestamp. The function must return an
|
||||
integer.
|
||||
"""
|
||||
return int(time.time())
|
||||
|
||||
def timestamp_to_datetime(self, ts: int) -> datetime:
|
||||
"""Convert the timestamp from :meth:`get_timestamp` into an
|
||||
aware :class`datetime.datetime` in UTC.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
The timestamp is returned as a timezone-aware ``datetime``
|
||||
in UTC rather than a naive ``datetime`` assumed to be UTC.
|
||||
"""
|
||||
return datetime.fromtimestamp(ts, tz=timezone.utc)
|
||||
|
||||
def sign(self, value: str | bytes) -> bytes:
|
||||
"""Signs the given string and also attaches time information."""
|
||||
value = want_bytes(value)
|
||||
timestamp = base64_encode(int_to_bytes(self.get_timestamp()))
|
||||
sep = want_bytes(self.sep)
|
||||
value = value + sep + timestamp
|
||||
return value + sep + self.get_signature(value)
|
||||
|
||||
# Ignore overlapping signatures check, return_timestamp is the only
|
||||
# parameter that affects the return type.
|
||||
|
||||
@t.overload
|
||||
def unsign( # type: ignore[overload-overlap]
|
||||
self,
|
||||
signed_value: str | bytes,
|
||||
max_age: int | None = None,
|
||||
return_timestamp: t.Literal[False] = False,
|
||||
) -> bytes: ...
|
||||
|
||||
@t.overload
|
||||
def unsign(
|
||||
self,
|
||||
signed_value: str | bytes,
|
||||
max_age: int | None = None,
|
||||
return_timestamp: t.Literal[True] = True,
|
||||
) -> tuple[bytes, datetime]: ...
|
||||
|
||||
def unsign(
|
||||
self,
|
||||
signed_value: str | bytes,
|
||||
max_age: int | None = None,
|
||||
return_timestamp: bool = False,
|
||||
) -> tuple[bytes, datetime] | bytes:
|
||||
"""Works like the regular :meth:`.Signer.unsign` but can also
|
||||
validate the time. See the base docstring of the class for
|
||||
the general behavior. If ``return_timestamp`` is ``True`` the
|
||||
timestamp of the signature will be returned as an aware
|
||||
:class:`datetime.datetime` object in UTC.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
The timestamp is returned as a timezone-aware ``datetime``
|
||||
in UTC rather than a naive ``datetime`` assumed to be UTC.
|
||||
"""
|
||||
try:
|
||||
result = super().unsign(signed_value)
|
||||
sig_error = None
|
||||
except BadSignature as e:
|
||||
sig_error = e
|
||||
result = e.payload or b""
|
||||
|
||||
sep = want_bytes(self.sep)
|
||||
|
||||
# If there is no timestamp in the result there is something
|
||||
# seriously wrong. In case there was a signature error, we raise
|
||||
# that one directly, otherwise we have a weird situation in
|
||||
# which we shouldn't have come except someone uses a time-based
|
||||
# serializer on non-timestamp data, so catch that.
|
||||
if sep not in result:
|
||||
if sig_error:
|
||||
raise sig_error
|
||||
|
||||
raise BadTimeSignature("timestamp missing", payload=result)
|
||||
|
||||
value, ts_bytes = result.rsplit(sep, 1)
|
||||
ts_int: int | None = None
|
||||
ts_dt: datetime | None = None
|
||||
|
||||
try:
|
||||
ts_int = bytes_to_int(base64_decode(ts_bytes))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Signature is *not* okay. Raise a proper error now that we have
|
||||
# split the value and the timestamp.
|
||||
if sig_error is not None:
|
||||
if ts_int is not None:
|
||||
try:
|
||||
ts_dt = self.timestamp_to_datetime(ts_int)
|
||||
except (ValueError, OSError, OverflowError) as exc:
|
||||
# Windows raises OSError
|
||||
# 32-bit raises OverflowError
|
||||
raise BadTimeSignature(
|
||||
"Malformed timestamp", payload=value
|
||||
) from exc
|
||||
|
||||
raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt)
|
||||
|
||||
# Signature was okay but the timestamp is actually not there or
|
||||
# malformed. Should not happen, but we handle it anyway.
|
||||
if ts_int is None:
|
||||
raise BadTimeSignature("Malformed timestamp", payload=value)
|
||||
|
||||
# Check timestamp is not older than max_age
|
||||
if max_age is not None:
|
||||
age = self.get_timestamp() - ts_int
|
||||
|
||||
if age > max_age:
|
||||
raise SignatureExpired(
|
||||
f"Signature age {age} > {max_age} seconds",
|
||||
payload=value,
|
||||
date_signed=self.timestamp_to_datetime(ts_int),
|
||||
)
|
||||
|
||||
if age < 0:
|
||||
raise SignatureExpired(
|
||||
f"Signature age {age} < 0 seconds",
|
||||
payload=value,
|
||||
date_signed=self.timestamp_to_datetime(ts_int),
|
||||
)
|
||||
|
||||
if return_timestamp:
|
||||
return value, self.timestamp_to_datetime(ts_int)
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool:
|
||||
"""Only validates the given signed value. Returns ``True`` if
|
||||
the signature exists and is valid."""
|
||||
try:
|
||||
self.unsign(signed_value, max_age=max_age)
|
||||
return True
|
||||
except BadSignature:
|
||||
return False
|
||||
|
||||
|
||||
class TimedSerializer(Serializer[_TSerialized]):
|
||||
"""Uses :class:`TimestampSigner` instead of the default
|
||||
:class:`.Signer`.
|
||||
"""
|
||||
|
||||
default_signer: type[TimestampSigner] = TimestampSigner
|
||||
|
||||
def iter_unsigners(
|
||||
self, salt: str | bytes | None = None
|
||||
) -> cabc.Iterator[TimestampSigner]:
|
||||
return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt))
|
||||
|
||||
# TODO: Signature is incompatible because parameters were added
|
||||
# before salt.
|
||||
|
||||
def loads( # type: ignore[override]
|
||||
self,
|
||||
s: str | bytes,
|
||||
max_age: int | None = None,
|
||||
return_timestamp: bool = False,
|
||||
salt: str | bytes | None = None,
|
||||
) -> t.Any:
|
||||
"""Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the
|
||||
signature validation fails. If a ``max_age`` is provided it will
|
||||
ensure the signature is not older than that time in seconds. In
|
||||
case the signature is outdated, :exc:`.SignatureExpired` is
|
||||
raised. All arguments are forwarded to the signer's
|
||||
:meth:`~TimestampSigner.unsign` method.
|
||||
"""
|
||||
s = want_bytes(s)
|
||||
last_exception = None
|
||||
|
||||
for signer in self.iter_unsigners(salt):
|
||||
try:
|
||||
base64d, timestamp = signer.unsign(
|
||||
s, max_age=max_age, return_timestamp=True
|
||||
)
|
||||
payload = self.load_payload(base64d)
|
||||
|
||||
if return_timestamp:
|
||||
return payload, timestamp
|
||||
|
||||
return payload
|
||||
except SignatureExpired:
|
||||
# The signature was unsigned successfully but was
|
||||
# expired. Do not try the next signer.
|
||||
raise
|
||||
except BadSignature as err:
|
||||
last_exception = err
|
||||
|
||||
raise t.cast(BadSignature, last_exception)
|
||||
|
||||
def loads_unsafe( # type: ignore[override]
|
||||
self,
|
||||
s: str | bytes,
|
||||
max_age: int | None = None,
|
||||
salt: str | bytes | None = None,
|
||||
) -> tuple[bool, t.Any]:
|
||||
return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age})
|
||||
@ -0,0 +1,83 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing as t
|
||||
import zlib
|
||||
|
||||
from ._json import _CompactJSON
|
||||
from .encoding import base64_decode
|
||||
from .encoding import base64_encode
|
||||
from .exc import BadPayload
|
||||
from .serializer import _PDataSerializer
|
||||
from .serializer import Serializer
|
||||
from .timed import TimedSerializer
|
||||
|
||||
|
||||
class URLSafeSerializerMixin(Serializer[str]):
|
||||
"""Mixed in with a regular serializer it will attempt to zlib
|
||||
compress the string to make it shorter if necessary. It will also
|
||||
base64 encode the string so that it can safely be placed in a URL.
|
||||
"""
|
||||
|
||||
default_serializer: _PDataSerializer[str] = _CompactJSON
|
||||
|
||||
def load_payload(
|
||||
self,
|
||||
payload: bytes,
|
||||
*args: t.Any,
|
||||
serializer: t.Any | None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> t.Any:
|
||||
decompress = False
|
||||
|
||||
if payload.startswith(b"."):
|
||||
payload = payload[1:]
|
||||
decompress = True
|
||||
|
||||
try:
|
||||
json = base64_decode(payload)
|
||||
except Exception as e:
|
||||
raise BadPayload(
|
||||
"Could not base64 decode the payload because of an exception",
|
||||
original_error=e,
|
||||
) from e
|
||||
|
||||
if decompress:
|
||||
try:
|
||||
json = zlib.decompress(json)
|
||||
except Exception as e:
|
||||
raise BadPayload(
|
||||
"Could not zlib decompress the payload before decoding the payload",
|
||||
original_error=e,
|
||||
) from e
|
||||
|
||||
return super().load_payload(json, *args, **kwargs)
|
||||
|
||||
def dump_payload(self, obj: t.Any) -> bytes:
|
||||
json = super().dump_payload(obj)
|
||||
is_compressed = False
|
||||
compressed = zlib.compress(json)
|
||||
|
||||
if len(compressed) < (len(json) - 1):
|
||||
json = compressed
|
||||
is_compressed = True
|
||||
|
||||
base64d = base64_encode(json)
|
||||
|
||||
if is_compressed:
|
||||
base64d = b"." + base64d
|
||||
|
||||
return base64d
|
||||
|
||||
|
||||
class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]):
|
||||
"""Works like :class:`.Serializer` but dumps and loads into a URL
|
||||
safe string consisting of the upper and lowercase character of the
|
||||
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
|
||||
"""
|
||||
|
||||
|
||||
class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]):
|
||||
"""Works like :class:`.TimedSerializer` but dumps and loads into a
|
||||
URL safe string consisting of the upper and lowercase character of
|
||||
the alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
|
||||
"""
|
||||
@ -0,0 +1 @@
|
||||
pip
|
||||
@ -0,0 +1,28 @@
|
||||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user