This commit is contained in:
Kevin Muñoz 2024-11-19 15:19:23 -05:00
parent f40bd4e2ca
commit c21df591bd
Signed by: mrhacker
GPG Key ID: E5616555DD4EDAAE
1937 changed files with 306731 additions and 1 deletions

1
.gitignore vendored
View File

@ -5,6 +5,7 @@
/templates/result.html
/templates/scan.html
/templates/upload.html
/venv
app.py~
app2.py
.env

3
app.py
View File

@ -186,5 +186,6 @@ def format_scan_result(scan_result):
return formatted_result
if __name__ == '__main__':
socketio.run(app, debug=True)
socketio.run(app, host='0.0.0.0', port=5001, debug=os.getenv('FLASK_DEBUG', 'False').lower() == 'true')

247
venv/bin/Activate.ps1 Normal file
View File

@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

70
venv/bin/activate Normal file
View File

@ -0,0 +1,70 @@
# This file must be used with "source bin/activate" *from bash*
# You cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
# on Windows, a path can contain colons and backslashes and has to be converted:
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
# transform D:\path\to\venv to /d/path/to/venv on MSYS
# and to /cygdrive/d/path/to/venv on Cygwin
export VIRTUAL_ENV=$(cygpath "/home/mrhacker/Descargas/web2/venv")
else
# use the path as-is
export VIRTUAL_ENV="/home/mrhacker/Descargas/web2/venv"
fi
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(venv) ${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT="(venv) "
export VIRTUAL_ENV_PROMPT
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null

27
venv/bin/activate.csh Normal file
View File

@ -0,0 +1,27 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/mrhacker/Descargas/web2/venv"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = "(venv) $prompt"
setenv VIRTUAL_ENV_PROMPT "(venv) "
endif
alias pydoc python -m pydoc
rehash

69
venv/bin/activate.fish Normal file
View File

@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/mrhacker/Descargas/web2/venv"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT "(venv) "
end

8
venv/bin/dotenv Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from dotenv.__main__ import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli())

8
venv/bin/filetype Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from filetype.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/flask Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/normalizer Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

8
venv/bin/pip Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3 Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pip3.12 Executable file
View File

@ -0,0 +1,8 @@
#!/home/mrhacker/Descargas/web2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
venv/bin/python Symbolic link
View File

@ -0,0 +1 @@
/usr/bin/python

1
venv/bin/python3 Symbolic link
View File

@ -0,0 +1 @@
python

1
venv/bin/python3.12 Symbolic link
View File

@ -0,0 +1 @@
python

View File

@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2014 Miguel Grinberg
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,77 @@
Metadata-Version: 2.1
Name: Flask-SocketIO
Version: 5.3.6
Summary: Socket.IO integration for Flask applications
Home-page: https://github.com/miguelgrinberg/flask-socketio
Author: Miguel Grinberg
Author-email: miguel.grinberg@gmail.com
Project-URL: Bug Tracker, https://github.com/miguelgrinberg/flask-socketio/issues
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python :: 3
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Requires-Python: >=3.6
Description-Content-Type: text/markdown
License-File: LICENSE
Requires-Dist: Flask >=0.9
Requires-Dist: python-socketio >=5.0.2
Provides-Extra: docs
Requires-Dist: sphinx ; extra == 'docs'
Flask-SocketIO
==============
[![Build status](https://github.com/miguelgrinberg/flask-socketio/workflows/build/badge.svg)](https://github.com/miguelgrinberg/Flask-SocketIO/actions) [![codecov](https://codecov.io/gh/miguelgrinberg/flask-socketio/branch/main/graph/badge.svg)](https://codecov.io/gh/miguelgrinberg/flask-socketio)
Socket.IO integration for Flask applications.
Sponsors
--------
The following organizations are funding this project:
![Socket.IO](https://images.opencollective.com/socketio/050e5eb/logo/64.png)<br>[Socket.IO](https://socket.io) | [Add your company here!](https://github.com/sponsors/miguelgrinberg)|
-|-
Many individual sponsors also support this project through small ongoing contributions. Why not [join them](https://github.com/sponsors/miguelgrinberg)?
Installation
------------
You can install this package as usual with pip:
pip install flask-socketio
Example
-------
```py
from flask import Flask, render_template
from flask_socketio import SocketIO, emit
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
@app.route('/')
def index():
return render_template('index.html')
@socketio.event
def my_event(message):
emit('my response', {'data': 'got it!'})
if __name__ == '__main__':
socketio.run(app)
```
Resources
---------
- [Tutorial](http://blog.miguelgrinberg.com/post/easy-websockets-with-flask-and-gevent)
- [Documentation](http://flask-socketio.readthedocs.io/en/latest/)
- [PyPI](https://pypi.python.org/pypi/Flask-SocketIO)
- [Change Log](https://github.com/miguelgrinberg/Flask-SocketIO/blob/main/CHANGES.md)
- Questions? See the [questions](https://stackoverflow.com/questions/tagged/flask-socketio) others have asked on Stack Overflow, or [ask](https://stackoverflow.com/questions/ask?tags=python+flask-socketio+python-socketio) your own question.

View File

@ -0,0 +1,13 @@
Flask_SocketIO-5.3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Flask_SocketIO-5.3.6.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082
Flask_SocketIO-5.3.6.dist-info/METADATA,sha256=vmIOzjkNLXRjmocRXtso6hLV27aiJgH7_A55TVJyD4k,2631
Flask_SocketIO-5.3.6.dist-info/RECORD,,
Flask_SocketIO-5.3.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
Flask_SocketIO-5.3.6.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
Flask_SocketIO-5.3.6.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15
flask_socketio/__init__.py,sha256=ea3QXRYKBje4JQGcNSEOmj42qlf2peRNbCzZZWfD9DE,54731
flask_socketio/__pycache__/__init__.cpython-312.pyc,,
flask_socketio/__pycache__/namespace.cpython-312.pyc,,
flask_socketio/__pycache__/test_client.cpython-312.pyc,,
flask_socketio/namespace.py,sha256=b3oyXEemu2po-wpoy4ILTHQMVuVQqicogCDxfymfz_w,2020
flask_socketio/test_client.py,sha256=9_R1y_vP8yr8wzimQUEMAUyVqX12FMXurLj8t1ecDdc,11034

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.41.2)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1 @@
flask_socketio

View File

@ -0,0 +1,28 @@
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,93 @@
Metadata-Version: 2.1
Name: MarkupSafe
Version: 2.1.5
Summary: Safely add untrusted strings to HTML/XML markup.
Home-page: https://palletsprojects.com/p/markupsafe/
Maintainer: Pallets
Maintainer-email: contact@palletsprojects.com
License: BSD-3-Clause
Project-URL: Donate, https://palletsprojects.com/donate
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
Project-URL: Source Code, https://github.com/pallets/markupsafe/
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
Project-URL: Chat, https://discord.gg/pallets
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
Classifier: Topic :: Text Processing :: Markup :: HTML
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.rst
MarkupSafe
==========
MarkupSafe implements a text object that escapes characters so it is
safe to use in HTML and XML. Characters that have special meanings are
replaced so that they display as the actual characters. This mitigates
injection attacks, meaning untrusted user input can safely be displayed
on a page.
Installing
----------
Install and update using `pip`_:
.. code-block:: text
pip install -U MarkupSafe
.. _pip: https://pip.pypa.io/en/stable/getting-started/
Examples
--------
.. code-block:: pycon
>>> from markupsafe import Markup, escape
>>> # escape replaces special characters and wraps in Markup
>>> escape("<script>alert(document.cookie);</script>")
Markup('&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
>>> # wrap in Markup to mark text "safe" and prevent escaping
>>> Markup("<strong>Hello</strong>")
Markup('<strong>hello</strong>')
>>> escape(Markup("<strong>Hello</strong>"))
Markup('<strong>hello</strong>')
>>> # Markup is a str subclass
>>> # methods and operators escape their arguments
>>> template = Markup("Hello <em>{name}</em>")
>>> template.format(name='"World"')
Markup('Hello <em>&#34;World&#34;</em>')
Donate
------
The Pallets organization develops and supports MarkupSafe and other
popular packages. In order to grow the community of contributors and
users, and allow the maintainers to devote more time to the projects,
`please donate today`_.
.. _please donate today: https://palletsprojects.com/donate
Links
-----
- Documentation: https://markupsafe.palletsprojects.com/
- Changes: https://markupsafe.palletsprojects.com/changes/
- PyPI Releases: https://pypi.org/project/MarkupSafe/
- Source Code: https://github.com/pallets/markupsafe/
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
- Chat: https://discord.gg/pallets

View File

@ -0,0 +1,15 @@
MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003
MarkupSafe-2.1.5.dist-info/RECORD,,
MarkupSafe-2.1.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
MarkupSafe-2.1.5.dist-info/WHEEL,sha256=vJMp7mUkE-fMIYyE5xJ9Q2cYPnWVgHf20clVdwMSXAg,152
MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958
markupsafe/__pycache__/__init__.cpython-312.pyc,,
markupsafe/__pycache__/_native.cpython-312.pyc,,
markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713
markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083
markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so,sha256=Y2jIPiSLPZlb82iRu9UUj27sbTui5o7SSoi-2SIXEUg,54072
markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

View File

@ -0,0 +1 @@
markupsafe

View File

@ -0,0 +1,376 @@
Mozilla Public License Version 2.0
==================================
Copyright 2009-2024 Joshua Bronson. All rights reserved.
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View File

@ -0,0 +1,260 @@
Metadata-Version: 2.1
Name: bidict
Version: 0.23.1
Summary: The bidirectional mapping library for Python.
Author-email: Joshua Bronson <jabronson@gmail.com>
License: MPL 2.0
Project-URL: Changelog, https://bidict.readthedocs.io/changelog.html
Project-URL: Documentation, https://bidict.readthedocs.io
Project-URL: Funding, https://bidict.readthedocs.io/#sponsoring
Project-URL: Repository, https://github.com/jab/bidict
Keywords: bidict,bimap,bidirectional,dict,dictionary,mapping,collections
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Typing :: Typed
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE
.. role:: doc
.. (Forward declaration for the "doc" role that Sphinx defines for interop with renderers that
are often used to show this doc and that are unaware of Sphinx (GitHub.com, PyPI.org, etc.).
Use :doc: rather than :ref: here for better interop as well.)
bidict
======
*The bidirectional mapping library for Python.*
Status
------
.. image:: https://img.shields.io/pypi/v/bidict.svg
:target: https://pypi.org/project/bidict
:alt: Latest release
.. image:: https://img.shields.io/readthedocs/bidict/main.svg
:target: https://bidict.readthedocs.io/en/main/
:alt: Documentation
.. image:: https://github.com/jab/bidict/actions/workflows/test.yml/badge.svg
:target: https://github.com/jab/bidict/actions/workflows/test.yml?query=branch%3Amain
:alt: GitHub Actions CI status
.. image:: https://img.shields.io/pypi/l/bidict.svg
:target: https://raw.githubusercontent.com/jab/bidict/main/LICENSE
:alt: License
.. image:: https://static.pepy.tech/badge/bidict
:target: https://pepy.tech/project/bidict
:alt: PyPI Downloads
.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4
:target: https://github.com/sponsors/jab
:alt: Sponsor
Features
--------
- Mature: Depended on by
Google, Venmo, CERN, Baidu, Tencent,
and teams across the world since 2009
- Familiar, Pythonic APIs
that are carefully designed for
safety, simplicity, flexibility, and ergonomics
- Lightweight, with no runtime dependencies
outside Python's standard library
- Implemented in
concise, well-factored, fully type-hinted Python code
that is optimized for running efficiently
as well as for long-term maintenance and stability
(as well as `joy <#learning-from-bidict>`__)
- Extensively `documented <https://bidict.readthedocs.io>`__
- 100% test coverage
running continuously across all supported Python versions
(including property-based tests and benchmarks)
Installation
------------
``pip install bidict``
Quick Start
-----------
.. code:: python
>>> from bidict import bidict
>>> element_by_symbol = bidict({'H': 'hydrogen'})
>>> element_by_symbol['H']
'hydrogen'
>>> element_by_symbol.inverse['hydrogen']
'H'
For more usage documentation,
head to the :doc:`intro` [#fn-intro]_
and proceed from there.
Enterprise Support
------------------
Enterprise-level support for bidict can be obtained via the
`Tidelift subscription <https://tidelift.com/subscription/pkg/pypi-bidict?utm_source=pypi-bidict&utm_medium=referral&utm_campaign=readme>`__
or by `contacting me directly <mailto:jabronson@gmail.com>`__.
I have a US-based LLC set up for invoicing,
and I have 15+ years of professional experience
delivering software and support to companies successfully.
You can also sponsor my work through several platforms, including GitHub Sponsors.
See the `Sponsoring <#sponsoring>`__ section below for details,
including rationale and examples of companies
supporting the open source projects they depend on.
Voluntary Community Support
---------------------------
Please search through already-asked questions and answers
in `GitHub Discussions <https://github.com/jab/bidict/discussions>`__
and the `issue tracker <https://github.com/jab/bidict/issues?q=is%3Aissue>`__
in case your question has already been addressed.
Otherwise, please feel free to
`start a new discussion <https://github.com/jab/bidict/discussions>`__
or `create a new issue <https://github.com/jab/bidict/issues/new>`__ on GitHub
for voluntary community support.
Notice of Usage
---------------
If you use bidict,
and especially if your usage or your organization is significant in some way,
please let me know in any of the following ways:
- `star bidict on GitHub <https://github.com/jab/bidict>`__
- post in `GitHub Discussions <https://github.com/jab/bidict/discussions>`__
- `email me <mailto:jabronson@gmail.com>`__
Changelog
---------
For bidict release notes, see the :doc:`changelog`. [#fn-changelog]_
Release Notifications
---------------------
.. duplicated in CHANGELOG.rst:
(Would use `.. include::` but GitHub's renderer doesn't support it.)
Watch `bidict releases on GitHub <https://github.com/jab/bidict/releases>`__
to be notified when new versions of bidict are published.
Click the "Watch" dropdown, choose "Custom", and then choose "Releases".
Learning from bidict
--------------------
One of the best things about bidict
is that it touches a surprising number of
interesting Python corners,
especially given its small size and scope.
Check out :doc:`learning-from-bidict` [#fn-learning]_
if you're interested in learning more.
Contributing
------------
I have been bidict's sole maintainer
and `active contributor <https://github.com/jab/bidict/graphs/contributors>`__
since I started the project ~15 years ago.
Your help would be most welcome!
See the :doc:`contributors-guide` [#fn-contributing]_
for more information.
Sponsoring
----------
.. duplicated in CONTRIBUTING.rst
(Would use `.. include::` but GitHub's renderer doesn't support it.)
.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4
:target: https://github.com/sponsors/jab
:alt: Sponsor through GitHub
Bidict is the product of thousands of hours of my unpaid work
over the 15+ years that I've been the sole maintainer.
If bidict has helped you or your company accomplish your work,
please sponsor my work through one of the following,
and/or ask your company to do the same:
- `GitHub <https://github.com/sponsors/jab>`__
- `PayPal <https://www.paypal.com/cgi-bin/webscr?cmd=_xclick&business=jabronson%40gmail%2ecom&lc=US&item_name=Sponsor%20bidict>`__
- `Tidelift <https://tidelift.com>`__
- `thanks.dev <https://thanks.dev>`__
- `Gumroad <https://gumroad.com/l/bidict>`__
- `a support engagement with my LLC <#enterprise-support>`__
If you're not sure which to use, GitHub is an easy option,
especially if you already have a GitHub account.
Just choose a monthly or one-time amount, and GitHub handles everything else.
Your bidict sponsorship on GitHub will automatically go
on the same regular bill as any other GitHub charges you pay for.
PayPal is another easy option for one-time contributions.
See the following for rationale and examples of companies
supporting the open source projects they depend on
in this manner:
- `<https://engineering.atspotify.com/2022/04/announcing-the-spotify-foss-fund/>`__
- `<https://blog.sentry.io/2021/10/21/we-just-gave-154-999-dollars-and-89-cents-to-open-source-maintainers>`__
- `<https://engineering.indeedblog.com/blog/2019/07/foss-fund-six-months-in/>`__
.. - `<https://sethmlarson.dev/blog/people-in-your-software-supply-chain>`__
.. - `<https://www.cognitect.com/blog/supporting-open-source-developers>`__
.. - `<https://vorpus.org/blog/the-unreasonable-effectiveness-of-investment-in-open-source-infrastructure/>`__
Finding Documentation
---------------------
If you're viewing this on `<https://bidict.readthedocs.io>`__,
note that multiple versions of the documentation are available,
and you can choose a different version using the popup menu at the bottom-right.
Please make sure you're viewing the version of the documentation
that corresponds to the version of bidict you'd like to use.
If you're viewing this on GitHub, PyPI, or some other place
that can't render and link this documentation properly
and are seeing broken links,
try these alternate links instead:
.. [#fn-intro] `<https://bidict.readthedocs.io/intro.html>`__ | `<docs/intro.rst>`__
.. [#fn-changelog] `<https://bidict.readthedocs.io/changelog.html>`__ | `<CHANGELOG.rst>`__
.. [#fn-learning] `<https://bidict.readthedocs.io/learning-from-bidict.html>`__ | `<docs/learning-from-bidict.rst>`__
.. [#fn-contributing] `<https://bidict.readthedocs.io/contributors-guide.html>`__ | `<CONTRIBUTING.rst>`__

View File

@ -0,0 +1,32 @@
bidict-0.23.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
bidict-0.23.1.dist-info/LICENSE,sha256=8_U63OyqSNc6ZuI4-lupBstBh2eDtF0ooTRrMULuvZo,16784
bidict-0.23.1.dist-info/METADATA,sha256=2ovIRm6Df8gdwAMekGqkeBSF5TWj2mv1jpmh4W4ks7o,8704
bidict-0.23.1.dist-info/RECORD,,
bidict-0.23.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
bidict-0.23.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
bidict-0.23.1.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7
bidict/__init__.py,sha256=pL87KsrDpBsl3AG09LQk1t1TSFt0hIJVYa2POMdErN8,4398
bidict/__pycache__/__init__.cpython-312.pyc,,
bidict/__pycache__/_abc.cpython-312.pyc,,
bidict/__pycache__/_base.cpython-312.pyc,,
bidict/__pycache__/_bidict.cpython-312.pyc,,
bidict/__pycache__/_dup.cpython-312.pyc,,
bidict/__pycache__/_exc.cpython-312.pyc,,
bidict/__pycache__/_frozen.cpython-312.pyc,,
bidict/__pycache__/_iter.cpython-312.pyc,,
bidict/__pycache__/_orderedbase.cpython-312.pyc,,
bidict/__pycache__/_orderedbidict.cpython-312.pyc,,
bidict/__pycache__/_typing.cpython-312.pyc,,
bidict/__pycache__/metadata.cpython-312.pyc,,
bidict/_abc.py,sha256=SMCNdCsmqSWg0OGnMZtnnXY8edjXcyZup5tva4HBm_c,3172
bidict/_base.py,sha256=YiauA0aj52fNB6cfZ4gBt6OV-CRQoZm7WVhuw1nT-Cg,24439
bidict/_bidict.py,sha256=Sr-RoEzWOaxpnDRbDJ7ngaGRIsyGnqZgzvR-NyT4jl4,6923
bidict/_dup.py,sha256=YAn5gWA6lwMBA5A6ebVF19UTZyambGS8WxmbK4TN1Ww,2079
bidict/_exc.py,sha256=HnD_WgteI5PrXa3zBx9RUiGlgnZTO6CF4nIU9p3-njk,1066
bidict/_frozen.py,sha256=p4TaRHKeyTs0KmlpwSnZiTlN_CR4J97kAgBpNdZHQMs,1771
bidict/_iter.py,sha256=zVUx-hJ1M4YuJROoFWRjPKlcaFnyo1AAuRpOaKAFhOQ,1530
bidict/_orderedbase.py,sha256=M7v5rHa7vrym9Z3DxQBFQDxjnrr39Z8p26V0c1PggoE,8942
bidict/_orderedbidict.py,sha256=pPnmC19mIISrj8_yjnb-4r_ti1B74tD5eTd08DETNuI,7080
bidict/_typing.py,sha256=AylMZpBhEFTQegfziPSxfKkKLk7oUsH6o3awDIg2z_k,1289
bidict/metadata.py,sha256=BMIKu6fBY_OKeV_q48EpumE7MdmFw8rFcdaUz8kcIYk,573
bidict/py.typed,sha256=RJao5SVFYIp8IfbxhL_SpZkBQYe3XXzPlobSRdh4B_c,16

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,103 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# ============================================================================
# * Welcome to the bidict source code *
# ============================================================================
# Reading through the code? You'll find a "Code review nav" comment like the one
# below at the top and bottom of the key source files. Follow these cues to take
# a path through the code that's optimized for familiarizing yourself with it.
#
# If you're not reading this on https://github.com/jab/bidict already, go there
# to ensure you have the latest version of the code. While there, you can also
# star the project, watch it for updates, fork the code, and submit an issue or
# pull request with any proposed changes. More information can be found linked
# from README.rst, which is also shown on https://github.com/jab/bidict.
# * Code review nav *
# ============================================================================
# Current: __init__.py Next: _abc.py →
# ============================================================================
"""The bidirectional mapping library for Python.
----
bidict by example:
.. code-block:: python
>>> from bidict import bidict
>>> element_by_symbol = bidict({'H': 'hydrogen'})
>>> element_by_symbol['H']
'hydrogen'
>>> element_by_symbol.inverse['hydrogen']
'H'
Please see https://github.com/jab/bidict for the most up-to-date code and
https://bidict.readthedocs.io for the most up-to-date documentation
if you are reading this elsewhere.
----
.. :copyright: (c) 2009-2024 Joshua Bronson.
.. :license: MPLv2. See LICENSE for details.
"""
# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members).
from __future__ import annotations as _annotations
from contextlib import suppress as _suppress
from ._abc import BidirectionalMapping as BidirectionalMapping
from ._abc import MutableBidirectionalMapping as MutableBidirectionalMapping
from ._base import BidictBase as BidictBase
from ._base import BidictKeysView as BidictKeysView
from ._base import GeneratedBidictInverse as GeneratedBidictInverse
from ._bidict import MutableBidict as MutableBidict
from ._bidict import bidict as bidict
from ._dup import DROP_NEW as DROP_NEW
from ._dup import DROP_OLD as DROP_OLD
from ._dup import ON_DUP_DEFAULT as ON_DUP_DEFAULT
from ._dup import ON_DUP_DROP_OLD as ON_DUP_DROP_OLD
from ._dup import ON_DUP_RAISE as ON_DUP_RAISE
from ._dup import RAISE as RAISE
from ._dup import OnDup as OnDup
from ._dup import OnDupAction as OnDupAction
from ._exc import BidictException as BidictException
from ._exc import DuplicationError as DuplicationError
from ._exc import KeyAndValueDuplicationError as KeyAndValueDuplicationError
from ._exc import KeyDuplicationError as KeyDuplicationError
from ._exc import ValueDuplicationError as ValueDuplicationError
from ._frozen import frozenbidict as frozenbidict
from ._iter import inverted as inverted
from ._orderedbase import OrderedBidictBase as OrderedBidictBase
from ._orderedbidict import OrderedBidict as OrderedBidict
from .metadata import __author__ as __author__
from .metadata import __copyright__ as __copyright__
from .metadata import __description__ as __description__
from .metadata import __license__ as __license__
from .metadata import __url__ as __url__
from .metadata import __version__ as __version__
# Set __module__ of re-exported classes to the 'bidict' top-level module, so that e.g.
# 'bidict.bidict' shows up as 'bidict.bidict` rather than 'bidict._bidict.bidict'.
for _obj in tuple(locals().values()): # pragma: no cover
if not getattr(_obj, '__module__', '').startswith('bidict.'):
continue
with _suppress(AttributeError):
_obj.__module__ = 'bidict'
# * Code review nav *
# ============================================================================
# Current: __init__.py Next: _abc.py →
# ============================================================================

View File

@ -0,0 +1,79 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
# ============================================================================
"""Provide the :class:`BidirectionalMapping` abstract base class."""
from __future__ import annotations
import typing as t
from abc import abstractmethod
from ._typing import KT
from ._typing import VT
class BidirectionalMapping(t.Mapping[KT, VT]):
"""Abstract base class for bidirectional mapping types.
Extends :class:`collections.abc.Mapping` primarily by adding the
(abstract) :attr:`inverse` property,
which implementers of :class:`BidirectionalMapping`
should override to return a reference to the inverse
:class:`BidirectionalMapping` instance.
"""
__slots__ = ()
@property
@abstractmethod
def inverse(self) -> BidirectionalMapping[VT, KT]:
"""The inverse of this bidirectional mapping instance.
*See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv`
:raises NotImplementedError: Meant to be overridden in subclasses.
"""
# The @abstractmethod decorator prevents subclasses from being instantiated unless they
# override this method. But an overriding implementation may merely return super().inverse,
# in which case this implementation is used. Raise NotImplementedError to indicate that
# subclasses must actually provide their own implementation.
raise NotImplementedError
def __inverted__(self) -> t.Iterator[tuple[VT, KT]]:
"""Get an iterator over the items in :attr:`inverse`.
This is functionally equivalent to iterating over the items in the
forward mapping and inverting each one on the fly, but this provides a
more efficient implementation: Assuming the already-inverted items
are stored in :attr:`inverse`, just return an iterator over them directly.
Providing this default implementation enables external functions,
particularly :func:`~bidict.inverted`, to use this optimized
implementation when available, instead of having to invert on the fly.
*See also* :func:`bidict.inverted`
"""
return iter(self.inverse.items())
class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], t.MutableMapping[KT, VT]):
"""Abstract base class for mutable bidirectional mapping types."""
__slots__ = ()
# * Code review nav *
# ============================================================================
# ← Prev: __init__.py Current: _abc.py Next: _base.py →
# ============================================================================

View File

@ -0,0 +1,556 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: _abc.py Current: _base.py Next: _frozen.py →
# ============================================================================
"""Provide :class:`BidictBase`."""
from __future__ import annotations
import typing as t
import weakref
from itertools import starmap
from operator import eq
from types import MappingProxyType
from ._abc import BidirectionalMapping
from ._dup import DROP_NEW
from ._dup import DROP_OLD
from ._dup import ON_DUP_DEFAULT
from ._dup import RAISE
from ._dup import OnDup
from ._exc import DuplicationError
from ._exc import KeyAndValueDuplicationError
from ._exc import KeyDuplicationError
from ._exc import ValueDuplicationError
from ._iter import inverted
from ._iter import iteritems
from ._typing import KT
from ._typing import MISSING
from ._typing import OKT
from ._typing import OVT
from ._typing import VT
from ._typing import Maplike
from ._typing import MapOrItems
OldKV = t.Tuple[OKT[KT], OVT[VT]]
DedupResult = t.Optional[OldKV[KT, VT]]
Unwrites = t.List[t.Tuple[t.Any, ...]]
BT = t.TypeVar('BT', bound='BidictBase[t.Any, t.Any]')
class BidictKeysView(t.KeysView[KT], t.ValuesView[KT]):
"""Since the keys of a bidict are the values of its inverse (and vice versa),
the :class:`~collections.abc.ValuesView` result of calling *bi.values()*
is also a :class:`~collections.abc.KeysView` of *bi.inverse*.
"""
class BidictBase(BidirectionalMapping[KT, VT]):
"""Base class implementing :class:`BidirectionalMapping`."""
#: The default :class:`~bidict.OnDup`
#: that governs behavior when a provided item
#: duplicates the key or value of other item(s).
#:
#: *See also*
#: :ref:`basic-usage:Values Must Be Unique` (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique),
#: :doc:`extending` (https://bidict.rtfd.io/extending.html)
on_dup = ON_DUP_DEFAULT
_fwdm: t.MutableMapping[KT, VT] #: the backing forward mapping (*key* → *val*)
_invm: t.MutableMapping[VT, KT] #: the backing inverse mapping (*val* → *key*)
# Use Any rather than KT/VT in the following to avoid "ClassVar cannot contain type variables" errors:
_fwdm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing forward mapping
_invm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing inverse mapping
#: The class of the inverse bidict instance.
_inv_cls: t.ClassVar[type[BidictBase[t.Any, t.Any]]]
def __init_subclass__(cls) -> None:
super().__init_subclass__()
cls._init_class()
@classmethod
def _init_class(cls) -> None:
cls._ensure_inv_cls()
cls._set_reversed()
__reversed__: t.ClassVar[t.Any]
@classmethod
def _set_reversed(cls) -> None:
"""Set __reversed__ for subclasses that do not set it explicitly
according to whether backing mappings are reversible.
"""
if cls is not BidictBase:
resolved = cls.__reversed__
overridden = resolved is not BidictBase.__reversed__
if overridden: # E.g. OrderedBidictBase, OrderedBidict
return
backing_reversible = all(issubclass(i, t.Reversible) for i in (cls._fwdm_cls, cls._invm_cls))
cls.__reversed__ = _fwdm_reversed if backing_reversible else None
@classmethod
def _ensure_inv_cls(cls) -> None:
"""Ensure :attr:`_inv_cls` is set, computing it dynamically if necessary.
All subclasses provided in :mod:`bidict` are their own inverse classes,
i.e., their backing forward and inverse mappings are both the same type,
but users may define subclasses where this is not the case.
This method ensures that the inverse class is computed correctly regardless.
See: :ref:`extending:Dynamic Inverse Class Generation`
(https://bidict.rtfd.io/extending.html#dynamic-inverse-class-generation)
"""
# This _ensure_inv_cls() method is (indirectly) corecursive with _make_inv_cls() below
# in the case that we need to dynamically generate the inverse class:
# 1. _ensure_inv_cls() calls cls._make_inv_cls()
# 2. cls._make_inv_cls() calls type(..., (cls, ...), ...) to dynamically generate inv_cls
# 3. Our __init_subclass__ hook (see above) is automatically called on inv_cls
# 4. inv_cls.__init_subclass__() calls inv_cls._ensure_inv_cls()
# 5. inv_cls._ensure_inv_cls() resolves to this implementation
# (inv_cls deliberately does not override this), so we're back where we started.
# But since the _make_inv_cls() call will have set inv_cls.__dict__._inv_cls,
# just check if it's already set before calling _make_inv_cls() to prevent infinite recursion.
if getattr(cls, '__dict__', {}).get('_inv_cls'): # Don't assume cls.__dict__ (e.g. mypyc native class)
return
cls._inv_cls = cls._make_inv_cls()
@classmethod
def _make_inv_cls(cls: type[BT]) -> type[BT]:
diff = cls._inv_cls_dict_diff()
cls_is_own_inv = all(getattr(cls, k, MISSING) == v for (k, v) in diff.items())
if cls_is_own_inv:
return cls
# Suppress auto-calculation of _inv_cls's _inv_cls since we know it already.
# Works with the guard in BidictBase._ensure_inv_cls() to prevent infinite recursion.
diff['_inv_cls'] = cls
inv_cls = type(f'{cls.__name__}Inv', (cls, GeneratedBidictInverse), diff)
inv_cls.__module__ = cls.__module__
return t.cast(t.Type[BT], inv_cls)
@classmethod
def _inv_cls_dict_diff(cls) -> dict[str, t.Any]:
return {
'_fwdm_cls': cls._invm_cls,
'_invm_cls': cls._fwdm_cls,
}
def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
"""Make a new bidirectional mapping.
The signature behaves like that of :class:`dict`.
ktems passed via positional arg are processed first,
followed by any items passed via keyword argument.
Any duplication encountered along the way
is handled as per :attr:`on_dup`.
"""
self._fwdm = self._fwdm_cls()
self._invm = self._invm_cls()
self._update(arg, kw, rollback=False)
# If Python ever adds support for higher-kinded types, `inverse` could use them, e.g.
# def inverse(self: BT[KT, VT]) -> BT[VT, KT]:
# Ref: https://github.com/python/typing/issues/548#issuecomment-621571821
@property
def inverse(self) -> BidictBase[VT, KT]:
"""The inverse of this bidirectional mapping instance."""
# When `bi.inverse` is called for the first time, this method
# computes the inverse instance, stores it for subsequent use, and then
# returns it. It also stores a reference on `bi.inverse` back to `bi`,
# but uses a weakref to avoid creating a reference cycle. Strong references
# to inverse instances are stored in ._inv, and weak references are stored
# in ._invweak.
# First check if a strong reference is already stored.
inv: BidictBase[VT, KT] | None = getattr(self, '_inv', None)
if inv is not None:
return inv
# Next check if a weak reference is already stored.
invweak = getattr(self, '_invweak', None)
if invweak is not None:
inv = invweak() # Try to resolve a strong reference and return it.
if inv is not None:
return inv
# No luck. Compute the inverse reference and store it for subsequent use.
inv = self._make_inverse()
self._inv: BidictBase[VT, KT] | None = inv
self._invweak: weakref.ReferenceType[BidictBase[VT, KT]] | None = None
# Also store a weak reference back to `instance` on its inverse instance, so that
# the second `.inverse` access in `bi.inverse.inverse` hits the cached weakref.
inv._inv = None
inv._invweak = weakref.ref(self)
# In e.g. `bidict().inverse.inverse`, this design ensures that a strong reference
# back to the original instance is retained before its refcount drops to zero,
# avoiding an unintended potential deallocation.
return inv
def _make_inverse(self) -> BidictBase[VT, KT]:
inv: BidictBase[VT, KT] = self._inv_cls()
inv._fwdm = self._invm
inv._invm = self._fwdm
return inv
@property
def inv(self) -> BidictBase[VT, KT]:
"""Alias for :attr:`inverse`."""
return self.inverse
def __repr__(self) -> str:
"""See :func:`repr`."""
clsname = self.__class__.__name__
items = dict(self.items()) if self else ''
return f'{clsname}({items})'
def values(self) -> BidictKeysView[VT]:
"""A set-like object providing a view on the contained values.
Since the values of a bidict are equivalent to the keys of its inverse,
this method returns a set-like object for this bidict's values
rather than just a collections.abc.ValuesView.
This object supports set operations like union and difference,
and constant- rather than linear-time containment checks,
and is no more expensive to provide than the less capable
collections.abc.ValuesView would be.
See :meth:`keys` for more information.
"""
return t.cast(BidictKeysView[VT], self.inverse.keys())
def keys(self) -> t.KeysView[KT]:
"""A set-like object providing a view on the contained keys.
When *b._fwdm* is a :class:`dict`, *b.keys()* returns a
*dict_keys* object that behaves exactly the same as
*collections.abc.KeysView(b)*, except for
- offering better performance
- being reversible on Python 3.8+
- having a .mapping attribute in Python 3.10+
that exposes a mappingproxy to *b._fwdm*.
"""
fwdm, fwdm_cls = self._fwdm, self._fwdm_cls
return fwdm.keys() if fwdm_cls is dict else BidictKeysView(self)
def items(self) -> t.ItemsView[KT, VT]:
"""A set-like object providing a view on the contained items.
When *b._fwdm* is a :class:`dict`, *b.items()* returns a
*dict_items* object that behaves exactly the same as
*collections.abc.ItemsView(b)*, except for:
- offering better performance
- being reversible on Python 3.8+
- having a .mapping attribute in Python 3.10+
that exposes a mappingproxy to *b._fwdm*.
"""
return self._fwdm.items() if self._fwdm_cls is dict else super().items()
# The inherited collections.abc.Mapping.__contains__() method is implemented by doing a `try`
# `except KeyError` around `self[key]`. The following implementation is much faster,
# especially in the missing case.
def __contains__(self, key: t.Any) -> bool:
"""True if the mapping contains the specified key, else False."""
return key in self._fwdm
# The inherited collections.abc.Mapping.__eq__() method is implemented in terms of an inefficient
# `dict(self.items()) == dict(other.items())` comparison, so override it with a
# more efficient implementation.
def __eq__(self, other: object) -> bool:
"""*x.__eq__(other)  x == other*
Equivalent to *dict(x.items()) == dict(other.items())*
but more efficient.
Note that :meth:`bidict's __eq__() <bidict.BidictBase.__eq__>` implementation
is inherited by subclasses,
in particular by the ordered bidict subclasses,
so even with ordered bidicts,
:ref:`== comparison is order-insensitive <eq-order-insensitive>`
(https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive).
*See also* :meth:`equals_order_sensitive`
"""
if isinstance(other, t.Mapping):
return self._fwdm.items() == other.items()
# Ref: https://docs.python.org/3/library/constants.html#NotImplemented
return NotImplemented
def equals_order_sensitive(self, other: object) -> bool:
"""Order-sensitive equality check.
*See also* :ref:`eq-order-insensitive`
(https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive)
"""
if not isinstance(other, t.Mapping) or len(self) != len(other):
return False
return all(starmap(eq, zip(self.items(), other.items())))
def _dedup(self, key: KT, val: VT, on_dup: OnDup) -> DedupResult[KT, VT]:
"""Check *key* and *val* for any duplication in self.
Handle any duplication as per the passed in *on_dup*.
If (key, val) is already present, return None
since writing (key, val) would be a no-op.
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
:attr:`~bidict.DROP_NEW`, return None.
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
:attr:`~bidict.RAISE`, raise the appropriate exception.
If duplication is found and the corresponding :class:`~bidict.OnDupAction` is
:attr:`~bidict.DROP_OLD`, or if no duplication is found,
return *(oldkey, oldval)*.
"""
fwdm, invm = self._fwdm, self._invm
oldval: OVT[VT] = fwdm.get(key, MISSING)
oldkey: OKT[KT] = invm.get(val, MISSING)
isdupkey, isdupval = oldval is not MISSING, oldkey is not MISSING
if isdupkey and isdupval:
if key == oldkey:
assert val == oldval
# (key, val) duplicates an existing item -> no-op.
return None
# key and val each duplicate a different existing item.
if on_dup.val is RAISE:
raise KeyAndValueDuplicationError(key, val)
if on_dup.val is DROP_NEW:
return None
assert on_dup.val is DROP_OLD
# Fall through to the return statement on the last line.
elif isdupkey:
if on_dup.key is RAISE:
raise KeyDuplicationError(key)
if on_dup.key is DROP_NEW:
return None
assert on_dup.key is DROP_OLD
# Fall through to the return statement on the last line.
elif isdupval:
if on_dup.val is RAISE:
raise ValueDuplicationError(val)
if on_dup.val is DROP_NEW:
return None
assert on_dup.val is DROP_OLD
# Fall through to the return statement on the last line.
# else neither isdupkey nor isdupval.
return oldkey, oldval
def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None:
"""Insert (newkey, newval), extending *unwrites* with associated inverse operations if provided.
*oldkey* and *oldval* are as returned by :meth:`_dedup`.
If *unwrites* is not None, it is extended with the inverse operations necessary to undo the write.
This design allows :meth:`_update` to roll back a partially applied update that fails part-way through
when necessary.
This design also allows subclasses that require additional operations to easily extend this implementation.
For example, :class:`bidict.OrderedBidictBase` calls this inherited implementation, and then extends *unwrites*
with additional operations needed to keep its internal linked list nodes consistent with its items' order
as changes are made.
"""
fwdm, invm = self._fwdm, self._invm
fwdm_set, invm_set = fwdm.__setitem__, invm.__setitem__
fwdm_del, invm_del = fwdm.__delitem__, invm.__delitem__
# Always perform the following writes regardless of duplication.
fwdm_set(newkey, newval)
invm_set(newval, newkey)
if oldval is MISSING and oldkey is MISSING: # no key or value duplication
# {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5}
if unwrites is not None:
unwrites.extend((
(fwdm_del, newkey),
(invm_del, newval),
))
elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items
# {0: 1, 2: 3} | {0: 3} => {0: 3}
fwdm_del(oldkey)
invm_del(oldval)
if unwrites is not None:
unwrites.extend((
(fwdm_set, newkey, oldval),
(invm_set, oldval, newkey),
(fwdm_set, oldkey, newval),
(invm_set, newval, oldkey),
))
elif oldval is not MISSING: # just key duplication
# {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4}
invm_del(oldval)
if unwrites is not None:
unwrites.extend((
(fwdm_set, newkey, oldval),
(invm_set, oldval, newkey),
(invm_del, newval),
))
else:
assert oldkey is not MISSING # just value duplication
# {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3}
fwdm_del(oldkey)
if unwrites is not None:
unwrites.extend((
(fwdm_set, oldkey, newval),
(invm_set, newval, oldkey),
(fwdm_del, newkey),
))
def _update(
self,
arg: MapOrItems[KT, VT],
kw: t.Mapping[str, VT] = MappingProxyType({}),
*,
rollback: bool | None = None,
on_dup: OnDup | None = None,
) -> None:
"""Update with the items from *arg* and *kw*, maybe failing and rolling back as per *on_dup* and *rollback*."""
# Note: We must process input in a single pass, since arg may be a generator.
if not isinstance(arg, (t.Iterable, Maplike)):
raise TypeError(f"'{arg.__class__.__name__}' object is not iterable")
if not arg and not kw:
return
if on_dup is None:
on_dup = self.on_dup
if rollback is None:
rollback = RAISE in on_dup
# Fast path when we're empty and updating only from another bidict (i.e. no dup vals in new items).
if not self and not kw and isinstance(arg, BidictBase):
self._init_from(arg)
return
# Fast path when we're adding more items than we contain already and rollback is enabled:
# Update a copy of self with rollback disabled. Fail if that fails, otherwise become the copy.
if rollback and isinstance(arg, t.Sized) and len(arg) + len(kw) > len(self):
tmp = self.copy()
tmp._update(arg, kw, rollback=False, on_dup=on_dup)
self._init_from(tmp)
return
# In all other cases, benchmarking has indicated that the update is best implemented as follows:
# For each new item, perform a dup check (raising if necessary), and apply the associated writes we need to
# perform on our backing _fwdm and _invm mappings. If rollback is enabled, also compute the associated unwrites
# as we go. If the update results in a DuplicationError and rollback is enabled, apply the accumulated unwrites
# before raising, to ensure that we fail clean.
write = self._write
unwrites: Unwrites | None = [] if rollback else None
for key, val in iteritems(arg, **kw):
try:
dedup_result = self._dedup(key, val, on_dup)
except DuplicationError:
if unwrites is not None:
for fn, *args in reversed(unwrites):
fn(*args)
raise
if dedup_result is not None:
write(key, val, *dedup_result, unwrites=unwrites)
def __copy__(self: BT) -> BT:
"""Used for the copy protocol. See the :mod:`copy` module."""
return self.copy()
def copy(self: BT) -> BT:
"""Make a (shallow) copy of this bidict."""
# Could just `return self.__class__(self)` here, but the below is faster. The former
# would copy this bidict's items into a new instance one at a time (checking for duplication
# for each item), whereas the below copies from the backing mappings all at once, and foregoes
# item-by-item duplication checking since the backing mappings have been checked already.
return self._from_other(self.__class__, self)
@staticmethod
def _from_other(bt: type[BT], other: MapOrItems[KT, VT], inv: bool = False) -> BT:
"""Fast, private constructor based on :meth:`_init_from`.
If *inv* is true, return the inverse of the instance instead of the instance itself.
(Useful for pickling with dynamically-generated inverse classes -- see :meth:`__reduce__`.)
"""
inst = bt()
inst._init_from(other)
return t.cast(BT, inst.inverse) if inv else inst
def _init_from(self, other: MapOrItems[KT, VT]) -> None:
"""Fast init from *other*, bypassing item-by-item duplication checking."""
self._fwdm.clear()
self._invm.clear()
self._fwdm.update(other)
# If other is a bidict, use its existing backing inverse mapping, otherwise
# other could be a generator that's now exhausted, so invert self._fwdm on the fly.
inv = other.inverse if isinstance(other, BidictBase) else inverted(self._fwdm)
self._invm.update(inv)
# other's type is Mapping rather than Maplike since bidict() | SupportsKeysAndGetItem({})
# raises a TypeError, just like dict() | SupportsKeysAndGetItem({}) does.
def __or__(self: BT, other: t.Mapping[KT, VT]) -> BT:
"""Return self|other."""
if not isinstance(other, t.Mapping):
return NotImplemented
new = self.copy()
new._update(other, rollback=False)
return new
def __ror__(self: BT, other: t.Mapping[KT, VT]) -> BT:
"""Return other|self."""
if not isinstance(other, t.Mapping):
return NotImplemented
new = self.__class__(other)
new._update(self, rollback=False)
return new
def __len__(self) -> int:
"""The number of contained items."""
return len(self._fwdm)
def __iter__(self) -> t.Iterator[KT]:
"""Iterator over the contained keys."""
return iter(self._fwdm)
def __getitem__(self, key: KT) -> VT:
"""*x.__getitem__(key) ⟺ x[key]*"""
return self._fwdm[key]
def __reduce__(self) -> tuple[t.Any, ...]:
"""Return state information for pickling."""
cls = self.__class__
inst: t.Mapping[t.Any, t.Any] = self
# If this bidict's class is dynamically generated, pickle the inverse instead, whose (presumably not
# dynamically generated) class the caller is more likely to have a reference to somewhere in sys.modules
# that pickle can discover.
if should_invert := isinstance(self, GeneratedBidictInverse):
cls = self._inv_cls
inst = self.inverse
return self._from_other, (cls, dict(inst), should_invert)
# See BidictBase._set_reversed() above.
def _fwdm_reversed(self: BidictBase[KT, t.Any]) -> t.Iterator[KT]:
"""Iterator over the contained keys in reverse order."""
assert isinstance(self._fwdm, t.Reversible)
return reversed(self._fwdm)
BidictBase._init_class()
class GeneratedBidictInverse:
"""Base class for dynamically-generated inverse bidict classes."""
# * Code review nav *
# ============================================================================
# ← Prev: _abc.py Current: _base.py Next: _frozen.py →
# ============================================================================

View File

@ -0,0 +1,194 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py →
# ============================================================================
"""Provide :class:`MutableBidict` and :class:`bidict`."""
from __future__ import annotations
import typing as t
from ._abc import MutableBidirectionalMapping
from ._base import BidictBase
from ._dup import ON_DUP_DROP_OLD
from ._dup import ON_DUP_RAISE
from ._dup import OnDup
from ._typing import DT
from ._typing import KT
from ._typing import MISSING
from ._typing import ODT
from ._typing import VT
from ._typing import MapOrItems
class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]):
"""Base class for mutable bidirectional mappings."""
if t.TYPE_CHECKING:
@property
def inverse(self) -> MutableBidict[VT, KT]: ...
@property
def inv(self) -> MutableBidict[VT, KT]: ...
def _pop(self, key: KT) -> VT:
val = self._fwdm.pop(key)
del self._invm[val]
return val
def __delitem__(self, key: KT) -> None:
"""*x.__delitem__(y)  del x[y]*"""
self._pop(key)
def __setitem__(self, key: KT, val: VT) -> None:
"""Set the value for *key* to *val*.
If *key* is already associated with *val*, this is a no-op.
If *key* is already associated with a different value,
the old value will be replaced with *val*,
as with dict's :meth:`__setitem__`.
If *val* is already associated with a different key,
an exception is raised
to protect against accidental removal of the key
that's currently associated with *val*.
Use :meth:`put` instead if you want to specify different behavior in
the case that the provided key or value duplicates an existing one.
Or use :meth:`forceput` to unconditionally associate *key* with *val*,
replacing any existing items as necessary to preserve uniqueness.
:raises bidict.ValueDuplicationError: if *val* duplicates that of an
existing item.
:raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an
existing item and *val* duplicates the value of a different
existing item.
"""
self.put(key, val, on_dup=self.on_dup)
def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None:
"""Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*.
For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`,
then *key* will be associated with *val* if and only if
*key* is not already associated with an existing value and
*val* is not already associated with an existing key,
otherwise an exception will be raised.
If *key* is already associated with *val*, this is a no-op.
:raises bidict.KeyDuplicationError: if attempting to insert an item
whose key only duplicates an existing item's, and *on_dup.key* is
:attr:`~bidict.RAISE`.
:raises bidict.ValueDuplicationError: if attempting to insert an item
whose value only duplicates an existing item's, and *on_dup.val* is
:attr:`~bidict.RAISE`.
:raises bidict.KeyAndValueDuplicationError: if attempting to insert an
item whose key duplicates one existing item's, and whose value
duplicates another existing item's, and *on_dup.val* is
:attr:`~bidict.RAISE`.
"""
self._update(((key, val),), on_dup=on_dup)
def forceput(self, key: KT, val: VT) -> None:
"""Associate *key* with *val* unconditionally.
Replace any existing mappings containing key *key* or value *val*
as necessary to preserve uniqueness.
"""
self.put(key, val, on_dup=ON_DUP_DROP_OLD)
def clear(self) -> None:
"""Remove all items."""
self._fwdm.clear()
self._invm.clear()
@t.overload
def pop(self, key: KT, /) -> VT: ...
@t.overload
def pop(self, key: KT, default: DT = ..., /) -> VT | DT: ...
def pop(self, key: KT, default: ODT[DT] = MISSING, /) -> VT | DT:
"""*x.pop(k[, d]) → v*
Remove specified key and return the corresponding value.
:raises KeyError: if *key* is not found and no *default* is provided.
"""
try:
return self._pop(key)
except KeyError:
if default is MISSING:
raise
return default
def popitem(self) -> tuple[KT, VT]:
"""*x.popitem() → (k, v)*
Remove and return some item as a (key, value) pair.
:raises KeyError: if *x* is empty.
"""
key, val = self._fwdm.popitem()
del self._invm[val]
return key, val
def update(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
"""Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*."""
self._update(arg, kw=kw)
def forceupdate(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
"""Like a bulk :meth:`forceput`."""
self._update(arg, kw=kw, on_dup=ON_DUP_DROP_OLD)
def putall(self, items: MapOrItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None:
"""Like a bulk :meth:`put`.
If one of the given items causes an exception to be raised,
none of the items is inserted.
"""
self._update(items, on_dup=on_dup)
# other's type is Mapping rather than Maplike since bidict() |= SupportsKeysAndGetItem({})
# raises a TypeError, just like dict() |= SupportsKeysAndGetItem({}) does.
def __ior__(self, other: t.Mapping[KT, VT]) -> MutableBidict[KT, VT]:
"""Return self|=other."""
self.update(other)
return self
class bidict(MutableBidict[KT, VT]):
"""The main bidirectional mapping type.
See :ref:`intro:Introduction` and :ref:`basic-usage:Basic Usage`
to get started (also available at https://bidict.rtfd.io).
"""
if t.TYPE_CHECKING:
@property
def inverse(self) -> bidict[VT, KT]: ...
@property
def inv(self) -> bidict[VT, KT]: ...
# * Code review nav *
# ============================================================================
# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py →
# ============================================================================

View File

@ -0,0 +1,61 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Provide :class:`OnDup` and related functionality."""
from __future__ import annotations
import typing as t
from enum import Enum
class OnDupAction(Enum):
"""An action to take to prevent duplication from occurring."""
#: Raise a :class:`~bidict.DuplicationError`.
RAISE = 'RAISE'
#: Overwrite existing items with new items.
DROP_OLD = 'DROP_OLD'
#: Keep existing items and drop new items.
DROP_NEW = 'DROP_NEW'
def __repr__(self) -> str:
return f'{self.__class__.__name__}.{self.name}'
RAISE: t.Final[OnDupAction] = OnDupAction.RAISE
DROP_OLD: t.Final[OnDupAction] = OnDupAction.DROP_OLD
DROP_NEW: t.Final[OnDupAction] = OnDupAction.DROP_NEW
class OnDup(t.NamedTuple):
r"""A combination of :class:`~bidict.OnDupAction`\s specifying how to handle various types of duplication.
The :attr:`~OnDup.key` field specifies what action to take when a duplicate key is encountered.
The :attr:`~OnDup.val` field specifies what action to take when a duplicate value is encountered.
In the case of both key and value duplication across two different items,
only :attr:`~OnDup.val` is used.
*See also* :ref:`basic-usage:Values Must Be Unique`
(https://bidict.rtfd.io/basic-usage.html#values-must-be-unique)
"""
key: OnDupAction = DROP_OLD
val: OnDupAction = RAISE
#: Default :class:`OnDup` used for the
#: :meth:`~bidict.bidict.__init__`,
#: :meth:`~bidict.bidict.__setitem__`, and
#: :meth:`~bidict.bidict.update` methods.
ON_DUP_DEFAULT: t.Final[OnDup] = OnDup(key=DROP_OLD, val=RAISE)
#: An :class:`OnDup` whose members are all :obj:`RAISE`.
ON_DUP_RAISE: t.Final[OnDup] = OnDup(key=RAISE, val=RAISE)
#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`.
ON_DUP_DROP_OLD: t.Final[OnDup] = OnDup(key=DROP_OLD, val=DROP_OLD)

View File

@ -0,0 +1,36 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Provide all bidict exceptions."""
from __future__ import annotations
class BidictException(Exception):
"""Base class for bidict exceptions."""
class DuplicationError(BidictException):
"""Base class for exceptions raised when uniqueness is violated
as per the :attr:`~bidict.RAISE` :class:`~bidict.OnDupAction`.
"""
class KeyDuplicationError(DuplicationError):
"""Raised when a given key is not unique."""
class ValueDuplicationError(DuplicationError):
"""Raised when a given value is not unique."""
class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError):
"""Raised when a given item's key and value are not unique.
That is, its key duplicates that of another item,
and its value duplicates that of a different other item.
"""

View File

@ -0,0 +1,50 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: _base.py Current: _frozen.py Next: _bidict.py →
# ============================================================================
"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type."""
from __future__ import annotations
import typing as t
from ._base import BidictBase
from ._typing import KT
from ._typing import VT
class frozenbidict(BidictBase[KT, VT]):
"""Immutable, hashable bidict type."""
_hash: int
if t.TYPE_CHECKING:
@property
def inverse(self) -> frozenbidict[VT, KT]: ...
@property
def inv(self) -> frozenbidict[VT, KT]: ...
def __hash__(self) -> int:
"""The hash of this bidict as determined by its items."""
if getattr(self, '_hash', None) is None:
# The following is like hash(frozenset(self.items()))
# but more memory efficient. See also: https://bugs.python.org/issue46684
self._hash = t.ItemsView(self)._hash()
return self._hash
# * Code review nav *
# ============================================================================
# ← Prev: _base.py Current: _frozen.py Next: _bidict.py →
# ============================================================================

View File

@ -0,0 +1,51 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Functions for iterating over items in a mapping."""
from __future__ import annotations
import typing as t
from operator import itemgetter
from ._typing import KT
from ._typing import VT
from ._typing import ItemsIter
from ._typing import Maplike
from ._typing import MapOrItems
def iteritems(arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> ItemsIter[KT, VT]:
"""Yield the items from *arg* and *kw* in the order given."""
if isinstance(arg, t.Mapping):
yield from arg.items()
elif isinstance(arg, Maplike):
yield from ((k, arg[k]) for k in arg.keys())
else:
yield from arg
yield from t.cast(ItemsIter[KT, VT], kw.items())
swap: t.Final = itemgetter(1, 0)
def inverted(arg: MapOrItems[KT, VT]) -> ItemsIter[VT, KT]:
"""Yield the inverse items of the provided object.
If *arg* has a :func:`callable` ``__inverted__`` attribute,
return the result of calling it.
Otherwise, return an iterator over the items in `arg`,
inverting each item on the fly.
*See also* :attr:`bidict.BidirectionalMapping.__inverted__`
"""
invattr = getattr(arg, '__inverted__', None)
if callable(invattr):
inv: ItemsIter[VT, KT] = invattr()
return inv
return map(swap, iteritems(arg))

View File

@ -0,0 +1,238 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py →
# ============================================================================
"""Provide :class:`OrderedBidictBase`."""
from __future__ import annotations
import typing as t
from weakref import ref as weakref
from ._base import BidictBase
from ._base import Unwrites
from ._bidict import bidict
from ._iter import iteritems
from ._typing import KT
from ._typing import MISSING
from ._typing import OKT
from ._typing import OVT
from ._typing import VT
from ._typing import MapOrItems
AT = t.TypeVar('AT') # attr type
class WeakAttr(t.Generic[AT]):
"""Descriptor to automatically manage (de)referencing the given slot as a weakref.
See https://docs.python.org/3/howto/descriptor.html#managed-attributes
for an intro to using descriptors like this for managed attributes.
"""
def __init__(self, *, slot: str) -> None:
self.slot = slot
def __set__(self, instance: t.Any, value: AT) -> None:
setattr(instance, self.slot, weakref(value))
def __get__(self, instance: t.Any, __owner: t.Any = None) -> AT:
return t.cast(AT, getattr(instance, self.slot)())
class Node:
"""A node in a circular doubly-linked list
used to encode the order of items in an ordered bidict.
A weak reference to the previous node is stored
to avoid creating strong reference cycles.
Referencing/dereferencing the weakref is handled automatically by :class:`WeakAttr`.
"""
prv: WeakAttr[Node] = WeakAttr(slot='_prv_weak')
__slots__ = ('__weakref__', '_prv_weak', 'nxt')
nxt: Node | WeakAttr[Node] # Allow subclasses to use a WeakAttr for nxt too (see SentinelNode)
def __init__(self, prv: Node, nxt: Node) -> None:
self.prv = prv
self.nxt = nxt
def unlink(self) -> None:
"""Remove self from in between prv and nxt.
Self's references to prv and nxt are retained so it can be relinked (see below).
"""
self.prv.nxt = self.nxt
self.nxt.prv = self.prv
def relink(self) -> None:
"""Restore self between prv and nxt after unlinking (see above)."""
self.prv.nxt = self.nxt.prv = self
class SentinelNode(Node):
"""Special node in a circular doubly-linked list
that links the first node with the last node.
When its next and previous references point back to itself
it represents an empty list.
"""
nxt: WeakAttr[Node] = WeakAttr(slot='_nxt_weak')
__slots__ = ('_nxt_weak',)
def __init__(self) -> None:
super().__init__(self, self)
def iternodes(self, *, reverse: bool = False) -> t.Iterator[Node]:
"""Iterator yielding nodes in the requested order."""
attr = 'prv' if reverse else 'nxt'
node = getattr(self, attr)
while node is not self:
yield node
node = getattr(node, attr)
def new_last_node(self) -> Node:
"""Create and return a new terminal node."""
old_last = self.prv
new_last = Node(old_last, self)
old_last.nxt = self.prv = new_last
return new_last
class OrderedBidictBase(BidictBase[KT, VT]):
"""Base class implementing an ordered :class:`BidirectionalMapping`."""
_node_by_korv: bidict[t.Any, Node]
_bykey: bool
def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None:
"""Make a new ordered bidirectional mapping.
The signature behaves like that of :class:`dict`.
Items passed in are added in the order they are passed,
respecting the :attr:`~bidict.BidictBase.on_dup`
class attribute in the process.
The order in which items are inserted is remembered,
similar to :class:`collections.OrderedDict`.
"""
self._sntl = SentinelNode()
self._node_by_korv = bidict()
self._bykey = True
super().__init__(arg, **kw)
if t.TYPE_CHECKING:
@property
def inverse(self) -> OrderedBidictBase[VT, KT]: ...
@property
def inv(self) -> OrderedBidictBase[VT, KT]: ...
def _make_inverse(self) -> OrderedBidictBase[VT, KT]:
inv = t.cast(OrderedBidictBase[VT, KT], super()._make_inverse())
inv._sntl = self._sntl
inv._node_by_korv = self._node_by_korv
inv._bykey = not self._bykey
return inv
def _assoc_node(self, node: Node, key: KT, val: VT) -> None:
korv = key if self._bykey else val
self._node_by_korv.forceput(korv, node)
def _dissoc_node(self, node: Node) -> None:
del self._node_by_korv.inverse[node]
node.unlink()
def _init_from(self, other: MapOrItems[KT, VT]) -> None:
"""See :meth:`BidictBase._init_from`."""
super()._init_from(other)
bykey = self._bykey
korv_by_node = self._node_by_korv.inverse
korv_by_node.clear()
korv_by_node_set = korv_by_node.__setitem__
self._sntl.nxt = self._sntl.prv = self._sntl
new_node = self._sntl.new_last_node
for k, v in iteritems(other):
korv_by_node_set(new_node(), k if bykey else v)
def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None:
"""See :meth:`bidict.BidictBase._spec_write`."""
super()._write(newkey, newval, oldkey, oldval, unwrites)
assoc, dissoc = self._assoc_node, self._dissoc_node
node_by_korv, bykey = self._node_by_korv, self._bykey
if oldval is MISSING and oldkey is MISSING: # no key or value duplication
# {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5}
newnode = self._sntl.new_last_node()
assoc(newnode, newkey, newval)
if unwrites is not None:
unwrites.append((dissoc, newnode))
elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items
# {0: 1, 2: 3} | {0: 3} => {0: 3}
# n1, n2 => n1 (collapse n1 and n2 into n1)
# oldkey: 2, oldval: 1, oldnode: n2, newkey: 0, newval: 3, newnode: n1
if bykey:
oldnode = node_by_korv[oldkey]
newnode = node_by_korv[newkey]
else:
oldnode = node_by_korv[newval]
newnode = node_by_korv[oldval]
dissoc(oldnode)
assoc(newnode, newkey, newval)
if unwrites is not None:
unwrites.extend((
(assoc, newnode, newkey, oldval),
(assoc, oldnode, oldkey, newval),
(oldnode.relink,),
))
elif oldval is not MISSING: # just key duplication
# {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4}
# oldkey: MISSING, oldval: 3, newkey: 2, newval: 4
node = node_by_korv[newkey if bykey else oldval]
assoc(node, newkey, newval)
if unwrites is not None:
unwrites.append((assoc, node, newkey, oldval))
else:
assert oldkey is not MISSING # just value duplication
# {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3}
# oldkey: 2, oldval: MISSING, newkey: 4, newval: 3
node = node_by_korv[oldkey if bykey else newval]
assoc(node, newkey, newval)
if unwrites is not None:
unwrites.append((assoc, node, oldkey, newval))
def __iter__(self) -> t.Iterator[KT]:
"""Iterator over the contained keys in insertion order."""
return self._iter(reverse=False)
def __reversed__(self) -> t.Iterator[KT]:
"""Iterator over the contained keys in reverse insertion order."""
return self._iter(reverse=True)
def _iter(self, *, reverse: bool = False) -> t.Iterator[KT]:
nodes = self._sntl.iternodes(reverse=reverse)
korv_by_node = self._node_by_korv.inverse
if self._bykey:
for node in nodes:
yield korv_by_node[node]
else:
key_by_val = self._invm
for node in nodes:
val = korv_by_node[node]
yield key_by_val[val]
# * Code review nav *
# ============================================================================
# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py →
# ============================================================================

View File

@ -0,0 +1,172 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# * Code review nav *
# (see comments in __init__.py)
# ============================================================================
# ← Prev: _orderedbase.py Current: _orderedbidict.py <FIN>
# ============================================================================
"""Provide :class:`OrderedBidict`."""
from __future__ import annotations
import typing as t
from collections.abc import Set
from ._base import BidictKeysView
from ._bidict import MutableBidict
from ._orderedbase import OrderedBidictBase
from ._typing import KT
from ._typing import VT
class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]):
"""Mutable bidict type that maintains items in insertion order."""
if t.TYPE_CHECKING:
@property
def inverse(self) -> OrderedBidict[VT, KT]: ...
@property
def inv(self) -> OrderedBidict[VT, KT]: ...
def clear(self) -> None:
"""Remove all items."""
super().clear()
self._node_by_korv.clear()
self._sntl.nxt = self._sntl.prv = self._sntl
def _pop(self, key: KT) -> VT:
val = super()._pop(key)
node = self._node_by_korv[key if self._bykey else val]
self._dissoc_node(node)
return val
def popitem(self, last: bool = True) -> tuple[KT, VT]:
"""*b.popitem() → (k, v)*
If *last* is true,
remove and return the most recently added item as a (key, value) pair.
Otherwise, remove and return the least recently added item.
:raises KeyError: if *b* is empty.
"""
if not self:
raise KeyError('OrderedBidict is empty')
node = getattr(self._sntl, 'prv' if last else 'nxt')
korv = self._node_by_korv.inverse[node]
if self._bykey:
return korv, self._pop(korv)
return self.inverse._pop(korv), korv
def move_to_end(self, key: KT, last: bool = True) -> None:
"""Move the item with the given key to the end if *last* is true, else to the beginning.
:raises KeyError: if *key* is missing
"""
korv = key if self._bykey else self._fwdm[key]
node = self._node_by_korv[korv]
node.prv.nxt = node.nxt
node.nxt.prv = node.prv
sntl = self._sntl
if last:
lastnode = sntl.prv
node.prv = lastnode
node.nxt = sntl
sntl.prv = lastnode.nxt = node
else:
firstnode = sntl.nxt
node.prv = sntl
node.nxt = firstnode
sntl.nxt = firstnode.prv = node
# Override the keys() and items() implementations inherited from BidictBase,
# which may delegate to the backing _fwdm dict, since this is a mutable ordered bidict,
# and therefore the ordering of items can get out of sync with the backing mappings
# after mutation. (Need not override values() because it delegates to .inverse.keys().)
def keys(self) -> t.KeysView[KT]:
"""A set-like object providing a view on the contained keys."""
return _OrderedBidictKeysView(self)
def items(self) -> t.ItemsView[KT, VT]:
"""A set-like object providing a view on the contained items."""
return _OrderedBidictItemsView(self)
# The following MappingView implementations use the __iter__ implementations
# inherited from their superclass counterparts in collections.abc, so they
# continue to yield items in the correct order even after an OrderedBidict
# is mutated. They also provide a __reversed__ implementation, which is not
# provided by the collections.abc superclasses.
class _OrderedBidictKeysView(BidictKeysView[KT]):
_mapping: OrderedBidict[KT, t.Any]
def __reversed__(self) -> t.Iterator[KT]:
return reversed(self._mapping)
class _OrderedBidictItemsView(t.ItemsView[KT, VT]):
_mapping: OrderedBidict[KT, VT]
def __reversed__(self) -> t.Iterator[tuple[KT, VT]]:
ob = self._mapping
for key in reversed(ob):
yield key, ob[key]
# For better performance, make _OrderedBidictKeysView and _OrderedBidictItemsView delegate
# to backing dicts for the methods they inherit from collections.abc.Set. (Cannot delegate
# for __iter__ and __reversed__ since they are order-sensitive.) See also: https://bugs.python.org/issue46713
_OView = t.Union[t.Type[_OrderedBidictKeysView[KT]], t.Type[_OrderedBidictItemsView[KT, t.Any]]]
_setmethodnames: t.Iterable[str] = (
'__lt__ __le__ __gt__ __ge__ __eq__ __ne__ __sub__ __rsub__ '
'__or__ __ror__ __xor__ __rxor__ __and__ __rand__ isdisjoint'
).split()
def _override_set_methods_to_use_backing_dict(cls: _OView[KT], viewname: str) -> None:
def make_proxy_method(methodname: str) -> t.Any:
def method(self: _OrderedBidictKeysView[KT] | _OrderedBidictItemsView[KT, t.Any], *args: t.Any) -> t.Any:
fwdm = self._mapping._fwdm
if not isinstance(fwdm, dict): # dict view speedup not available, fall back to Set's implementation.
return getattr(Set, methodname)(self, *args)
fwdm_dict_view = getattr(fwdm, viewname)()
fwdm_dict_view_method = getattr(fwdm_dict_view, methodname)
if (
len(args) != 1
or not isinstance((arg := args[0]), self.__class__)
or not isinstance(arg._mapping._fwdm, dict)
):
return fwdm_dict_view_method(*args)
# self and arg are both _OrderedBidictKeysViews or _OrderedBidictItemsViews whose bidicts are backed by
# a dict. Use arg's backing dict's corresponding view instead of arg. Otherwise, e.g. `ob1.keys()
# < ob2.keys()` would give "TypeError: '<' not supported between instances of '_OrderedBidictKeysView' and
# '_OrderedBidictKeysView'", because both `dict_keys(ob1).__lt__(ob2.keys()) is NotImplemented` and
# `dict_keys(ob2).__gt__(ob1.keys()) is NotImplemented`.
arg_dict = arg._mapping._fwdm
arg_dict_view = getattr(arg_dict, viewname)()
return fwdm_dict_view_method(arg_dict_view)
method.__name__ = methodname
method.__qualname__ = f'{cls.__qualname__}.{methodname}'
return method
for name in _setmethodnames:
setattr(cls, name, make_proxy_method(name))
_override_set_methods_to_use_backing_dict(_OrderedBidictKeysView, 'keys')
_override_set_methods_to_use_backing_dict(_OrderedBidictItemsView, 'items')
# * Code review nav *
# ============================================================================
# ← Prev: _orderedbase.py Current: _orderedbidict.py <FIN>
# ============================================================================

View File

@ -0,0 +1,49 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Provide typing-related objects."""
from __future__ import annotations
import typing as t
from enum import Enum
KT = t.TypeVar('KT')
VT = t.TypeVar('VT')
VT_co = t.TypeVar('VT_co', covariant=True)
Items = t.Iterable[t.Tuple[KT, VT]]
@t.runtime_checkable
class Maplike(t.Protocol[KT, VT_co]):
"""Like typeshed's SupportsKeysAndGetItem, but usable at runtime."""
def keys(self) -> t.Iterable[KT]: ...
def __getitem__(self, __key: KT) -> VT_co: ...
MapOrItems = t.Union[Maplike[KT, VT], Items[KT, VT]]
MappOrItems = t.Union[t.Mapping[KT, VT], Items[KT, VT]]
ItemsIter = t.Iterator[t.Tuple[KT, VT]]
class MissingT(Enum):
"""Sentinel used to represent none/missing when None itself can't be used."""
MISSING = 'MISSING'
MISSING: t.Final[t.Literal[MissingT.MISSING]] = MissingT.MISSING
OKT = t.Union[KT, MissingT] #: optional key type
OVT = t.Union[VT, MissingT] #: optional value type
DT = t.TypeVar('DT') #: for default arguments
ODT = t.Union[DT, MissingT] #: optional default arg type

View File

@ -0,0 +1,14 @@
# Copyright 2009-2024 Joshua Bronson. All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""Define bidict package metadata."""
__version__ = '0.23.1'
__author__ = {'name': 'Joshua Bronson', 'email': 'jabronson@gmail.com'}
__copyright__ = '© 2009-2024 Joshua Bronson'
__description__ = 'The bidirectional mapping library for Python.'
__license__ = 'MPL 2.0'
__url__ = 'https://bidict.readthedocs.io'

View File

@ -0,0 +1 @@
PEP-561 marker.

View File

@ -0,0 +1,20 @@
Copyright 2010 Jason Kirtland
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,60 @@
Metadata-Version: 2.1
Name: blinker
Version: 1.8.2
Summary: Fast, simple object-to-object and broadcast signaling
Author: Jason Kirtland
Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com>
Requires-Python: >=3.8
Description-Content-Type: text/markdown
Classifier: Development Status :: 5 - Production/Stable
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Typing :: Typed
Project-URL: Chat, https://discord.gg/pallets
Project-URL: Documentation, https://blinker.readthedocs.io
Project-URL: Source, https://github.com/pallets-eco/blinker/
# Blinker
Blinker provides a fast dispatching system that allows any number of
interested parties to subscribe to events, or "signals".
## Pallets Community Ecosystem
> [!IMPORTANT]\
> This project is part of the Pallets Community Ecosystem. Pallets is the open
> source organization that maintains Flask; Pallets-Eco enables community
> maintenance of related projects. If you are interested in helping maintain
> this project, please reach out on [the Pallets Discord server][discord].
>
> [discord]: https://discord.gg/pallets
## Example
Signal receivers can subscribe to specific senders or receive signals
sent by any sender.
```pycon
>>> from blinker import signal
>>> started = signal('round-started')
>>> def each(round):
... print(f"Round {round}")
...
>>> started.connect(each)
>>> def round_two(round):
... print("This is round two.")
...
>>> started.connect(round_two, sender=2)
>>> for round in range(1, 4):
... started.send(round)
...
Round 1!
Round 2!
This is round two.
Round 3!
```

View File

@ -0,0 +1,13 @@
blinker-1.8.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
blinker-1.8.2.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
blinker-1.8.2.dist-info/METADATA,sha256=3tEx40hm9IEofyFqDPJsDPE9MAIEhtifapoSp7FqzuA,1633
blinker-1.8.2.dist-info/RECORD,,
blinker-1.8.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
blinker-1.8.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
blinker/__init__.py,sha256=ymyJY_PoTgBzaPgdr4dq-RRsGh7D-sYQIGMNp8Rx4qc,1577
blinker/__pycache__/__init__.cpython-312.pyc,,
blinker/__pycache__/_utilities.cpython-312.pyc,,
blinker/__pycache__/base.cpython-312.pyc,,
blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675
blinker/base.py,sha256=nIZJEtXQ8LLZZJrwVp2wQcdfCzDixvAHR9VpSWiyVcQ,22574
blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

View File

@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: flit 3.9.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,60 @@
from __future__ import annotations
import typing as t
from .base import ANY
from .base import default_namespace
from .base import NamedSignal
from .base import Namespace
from .base import Signal
from .base import signal
__all__ = [
"ANY",
"default_namespace",
"NamedSignal",
"Namespace",
"Signal",
"signal",
]
def __getattr__(name: str) -> t.Any:
import warnings
if name == "__version__":
import importlib.metadata
warnings.warn(
"The '__version__' attribute is deprecated and will be removed in"
" Blinker 1.9.0. Use feature detection or"
" 'importlib.metadata.version(\"blinker\")' instead.",
DeprecationWarning,
stacklevel=2,
)
return importlib.metadata.version("blinker")
if name == "receiver_connected":
from .base import _receiver_connected
warnings.warn(
"The global 'receiver_connected' signal is deprecated and will be"
" removed in Blinker 1.9. Use 'Signal.receiver_connected' and"
" 'Signal.receiver_disconnected' instead.",
DeprecationWarning,
stacklevel=2,
)
return _receiver_connected
if name == "WeakNamespace":
from .base import _WeakNamespace
warnings.warn(
"'WeakNamespace' is deprecated and will be removed in Blinker 1.9."
" Use 'Namespace' instead.",
DeprecationWarning,
stacklevel=2,
)
return _WeakNamespace
raise AttributeError(name)

View File

@ -0,0 +1,64 @@
from __future__ import annotations
import collections.abc as c
import inspect
import typing as t
from weakref import ref
from weakref import WeakMethod
T = t.TypeVar("T")
class Symbol:
"""A constant symbol, nicer than ``object()``. Repeated calls return the
same instance.
>>> Symbol('foo') is Symbol('foo')
True
>>> Symbol('foo')
foo
"""
symbols: t.ClassVar[dict[str, Symbol]] = {}
def __new__(cls, name: str) -> Symbol:
if name in cls.symbols:
return cls.symbols[name]
obj = super().__new__(cls)
cls.symbols[name] = obj
return obj
def __init__(self, name: str) -> None:
self.name = name
def __repr__(self) -> str:
return self.name
def __getnewargs__(self) -> tuple[t.Any, ...]:
return (self.name,)
def make_id(obj: object) -> c.Hashable:
"""Get a stable identifier for a receiver or sender, to be used as a dict
key or in a set.
"""
if inspect.ismethod(obj):
# The id of a bound method is not stable, but the id of the unbound
# function and instance are.
return id(obj.__func__), id(obj.__self__)
if isinstance(obj, (str, int)):
# Instances with the same value always compare equal and have the same
# hash, even if the id may change.
return obj
# Assume other types are not hashable but will always be the same instance.
return id(obj)
def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]:
if inspect.ismethod(obj):
return WeakMethod(obj, callback) # type: ignore[arg-type, return-value]
return ref(obj, callback)

View File

@ -0,0 +1,621 @@
from __future__ import annotations
import collections.abc as c
import typing as t
import warnings
import weakref
from collections import defaultdict
from contextlib import AbstractContextManager
from contextlib import contextmanager
from functools import cached_property
from inspect import iscoroutinefunction
from weakref import WeakValueDictionary
from ._utilities import make_id
from ._utilities import make_ref
from ._utilities import Symbol
if t.TYPE_CHECKING:
F = t.TypeVar("F", bound=c.Callable[..., t.Any])
ANY = Symbol("ANY")
"""Symbol for "any sender"."""
ANY_ID = 0
class Signal:
"""A notification emitter.
:param doc: The docstring for the signal.
"""
ANY = ANY
"""An alias for the :data:`~blinker.ANY` sender symbol."""
set_class: type[set[t.Any]] = set
"""The set class to use for tracking connected receivers and senders.
Python's ``set`` is unordered. If receivers must be dispatched in the order
they were connected, an ordered set implementation can be used.
.. versionadded:: 1.7
"""
@cached_property
def receiver_connected(self) -> Signal:
"""Emitted at the end of each :meth:`connect` call.
The signal sender is the signal instance, and the :meth:`connect`
arguments are passed through: ``receiver``, ``sender``, and ``weak``.
.. versionadded:: 1.2
"""
return Signal(doc="Emitted after a receiver connects.")
@cached_property
def receiver_disconnected(self) -> Signal:
"""Emitted at the end of each :meth:`disconnect` call.
The sender is the signal instance, and the :meth:`disconnect` arguments
are passed through: ``receiver`` and ``sender``.
This signal is emitted **only** when :meth:`disconnect` is called
explicitly. This signal cannot be emitted by an automatic disconnect
when a weakly referenced receiver or sender goes out of scope, as the
instance is no longer be available to be used as the sender for this
signal.
An alternative approach is available by subscribing to
:attr:`receiver_connected` and setting up a custom weakref cleanup
callback on weak receivers and senders.
.. versionadded:: 1.2
"""
return Signal(doc="Emitted after a receiver disconnects.")
def __init__(self, doc: str | None = None) -> None:
if doc:
self.__doc__ = doc
self.receivers: dict[
t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any]
] = {}
"""The map of connected receivers. Useful to quickly check if any
receivers are connected to the signal: ``if s.receivers:``. The
structure and data is not part of the public API, but checking its
boolean value is.
"""
self.is_muted: bool = False
self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {}
def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F:
"""Connect ``receiver`` to be called when the signal is sent by
``sender``.
:param receiver: The callable to call when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument
along with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender. A receiver may be connected
to multiple senders by calling :meth:`connect` multiple times.
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
be automatically disconnected when it is garbage collected. When
connecting a receiver defined within a function, set to ``False``,
otherwise it will be disconnected when the function scope ends.
"""
receiver_id = make_id(receiver)
sender_id = ANY_ID if sender is ANY else make_id(sender)
if weak:
self.receivers[receiver_id] = make_ref(
receiver, self._make_cleanup_receiver(receiver_id)
)
else:
self.receivers[receiver_id] = receiver
self._by_sender[sender_id].add(receiver_id)
self._by_receiver[receiver_id].add(sender_id)
if sender is not ANY and sender_id not in self._weak_senders:
# store a cleanup for weakref-able senders
try:
self._weak_senders[sender_id] = make_ref(
sender, self._make_cleanup_sender(sender_id)
)
except TypeError:
pass
if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
try:
self.receiver_connected.send(
self, receiver=receiver, sender=sender, weak=weak
)
except TypeError:
# TODO no explanation or test for this
self.disconnect(receiver, sender)
raise
if _receiver_connected.receivers and self is not _receiver_connected:
try:
_receiver_connected.send(
self, receiver_arg=receiver, sender_arg=sender, weak_arg=weak
)
except TypeError:
self.disconnect(receiver, sender)
raise
return receiver
def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]:
"""Connect the decorated function to be called when the signal is sent
by ``sender``.
The decorated function will be called when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument along
with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender. A receiver may be connected
to multiple senders by calling :meth:`connect` multiple times.
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
be automatically disconnected when it is garbage collected. When
connecting a receiver defined within a function, set to ``False``,
otherwise it will be disconnected when the function scope ends.=
.. versionadded:: 1.1
"""
def decorator(fn: F) -> F:
self.connect(fn, sender, weak)
return fn
return decorator
@contextmanager
def connected_to(
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
) -> c.Generator[None, None, None]:
"""A context manager that temporarily connects ``receiver`` to the
signal while a ``with`` block executes. When the block exits, the
receiver is disconnected. Useful for tests.
:param receiver: The callable to call when :meth:`send` is called with
the given ``sender``, passing ``sender`` as a positional argument
along with any extra keyword arguments.
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
called when :meth:`send` is called with this sender. If ``ANY``, the
receiver will be called for any sender.
.. versionadded:: 1.1
"""
self.connect(receiver, sender=sender, weak=False)
try:
yield None
finally:
self.disconnect(receiver)
@contextmanager
def muted(self) -> c.Generator[None, None, None]:
"""A context manager that temporarily disables the signal. No receivers
will be called if the signal is sent, until the ``with`` block exits.
Useful for tests.
"""
self.is_muted = True
try:
yield None
finally:
self.is_muted = False
def temporarily_connected_to(
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
) -> AbstractContextManager[None]:
"""Deprecated alias for :meth:`connected_to`.
.. deprecated:: 1.1
Renamed to ``connected_to``. Will be removed in Blinker 1.9.
.. versionadded:: 0.9
"""
warnings.warn(
"'temporarily_connected_to' is renamed to 'connected_to'. The old name is"
" deprecated and will be removed in Blinker 1.9.",
DeprecationWarning,
stacklevel=2,
)
return self.connected_to(receiver, sender)
def send(
self,
sender: t.Any | None = None,
/,
*,
_async_wrapper: c.Callable[
[c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any]
]
| None = None,
**kwargs: t.Any,
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
"""Call all receivers that are connected to the given ``sender``
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
argument along with any extra keyword arguments. Return a list of
``(receiver, return value)`` tuples.
The order receivers are called is undefined, but can be influenced by
setting :attr:`set_class`.
If a receiver raises an exception, that exception will propagate up.
This makes debugging straightforward, with an assumption that correctly
implemented receivers will not raise.
:param sender: Call receivers connected to this sender, in addition to
those connected to :data:`ANY`.
:param _async_wrapper: Will be called on any receivers that are async
coroutines to turn them into sync callables. For example, could run
the receiver with an event loop.
:param kwargs: Extra keyword arguments to pass to each receiver.
.. versionchanged:: 1.7
Added the ``_async_wrapper`` argument.
"""
if self.is_muted:
return []
results = []
for receiver in self.receivers_for(sender):
if iscoroutinefunction(receiver):
if _async_wrapper is None:
raise RuntimeError("Cannot send to a coroutine function.")
result = _async_wrapper(receiver)(sender, **kwargs)
else:
result = receiver(sender, **kwargs)
results.append((receiver, result))
return results
async def send_async(
self,
sender: t.Any | None = None,
/,
*,
_sync_wrapper: c.Callable[
[c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]
]
| None = None,
**kwargs: t.Any,
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
"""Await all receivers that are connected to the given ``sender``
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
argument along with any extra keyword arguments. Return a list of
``(receiver, return value)`` tuples.
The order receivers are called is undefined, but can be influenced by
setting :attr:`set_class`.
If a receiver raises an exception, that exception will propagate up.
This makes debugging straightforward, with an assumption that correctly
implemented receivers will not raise.
:param sender: Call receivers connected to this sender, in addition to
those connected to :data:`ANY`.
:param _sync_wrapper: Will be called on any receivers that are sync
callables to turn them into async coroutines. For example,
could call the receiver in a thread.
:param kwargs: Extra keyword arguments to pass to each receiver.
.. versionadded:: 1.7
"""
if self.is_muted:
return []
results = []
for receiver in self.receivers_for(sender):
if not iscoroutinefunction(receiver):
if _sync_wrapper is None:
raise RuntimeError("Cannot send to a non-coroutine function.")
result = await _sync_wrapper(receiver)(sender, **kwargs)
else:
result = await receiver(sender, **kwargs)
results.append((receiver, result))
return results
def has_receivers_for(self, sender: t.Any) -> bool:
"""Check if there is at least one receiver that will be called with the
given ``sender``. A receiver connected to :data:`ANY` will always be
called, regardless of sender. Does not check if weakly referenced
receivers are still live. See :meth:`receivers_for` for a stronger
search.
:param sender: Check for receivers connected to this sender, in addition
to those connected to :data:`ANY`.
"""
if not self.receivers:
return False
if self._by_sender[ANY_ID]:
return True
if sender is ANY:
return False
return make_id(sender) in self._by_sender
def receivers_for(
self, sender: t.Any
) -> c.Generator[c.Callable[..., t.Any], None, None]:
"""Yield each receiver to be called for ``sender``, in addition to those
to be called for :data:`ANY`. Weakly referenced receivers that are not
live will be disconnected and skipped.
:param sender: Yield receivers connected to this sender, in addition
to those connected to :data:`ANY`.
"""
# TODO: test receivers_for(ANY)
if not self.receivers:
return
sender_id = make_id(sender)
if sender_id in self._by_sender:
ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
else:
ids = self._by_sender[ANY_ID].copy()
for receiver_id in ids:
receiver = self.receivers.get(receiver_id)
if receiver is None:
continue
if isinstance(receiver, weakref.ref):
strong = receiver()
if strong is None:
self._disconnect(receiver_id, ANY_ID)
continue
yield strong
else:
yield receiver
def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None:
"""Disconnect ``receiver`` from being called when the signal is sent by
``sender``.
:param receiver: A connected receiver callable.
:param sender: Disconnect from only this sender. By default, disconnect
from all senders.
"""
sender_id: c.Hashable
if sender is ANY:
sender_id = ANY_ID
else:
sender_id = make_id(sender)
receiver_id = make_id(receiver)
self._disconnect(receiver_id, sender_id)
if (
"receiver_disconnected" in self.__dict__
and self.receiver_disconnected.receivers
):
self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None:
if sender_id == ANY_ID:
if self._by_receiver.pop(receiver_id, None) is not None:
for bucket in self._by_sender.values():
bucket.discard(receiver_id)
self.receivers.pop(receiver_id, None)
else:
self._by_sender[sender_id].discard(receiver_id)
self._by_receiver[receiver_id].discard(sender_id)
def _make_cleanup_receiver(
self, receiver_id: c.Hashable
) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]:
"""Create a callback function to disconnect a weakly referenced
receiver when it is garbage collected.
"""
def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None:
self._disconnect(receiver_id, ANY_ID)
return cleanup
def _make_cleanup_sender(
self, sender_id: c.Hashable
) -> c.Callable[[weakref.ref[t.Any]], None]:
"""Create a callback function to disconnect all receivers for a weakly
referenced sender when it is garbage collected.
"""
assert sender_id != ANY_ID
def cleanup(ref: weakref.ref[t.Any]) -> None:
self._weak_senders.pop(sender_id, None)
for receiver_id in self._by_sender.pop(sender_id, ()):
self._by_receiver[receiver_id].discard(sender_id)
return cleanup
def _cleanup_bookkeeping(self) -> None:
"""Prune unused sender/receiver bookkeeping. Not threadsafe.
Connecting & disconnecting leaves behind a small amount of bookkeeping
data. Typical workloads using Blinker, for example in most web apps,
Flask, CLI scripts, etc., are not adversely affected by this
bookkeeping.
With a long-running process performing dynamic signal routing with high
volume, e.g. connecting to function closures, senders are all unique
object instances. Doing all of this over and over may cause memory usage
to grow due to extraneous bookkeeping. (An empty ``set`` for each stale
sender/receiver pair.)
This method will prune that bookkeeping away, with the caveat that such
pruning is not threadsafe. The risk is that cleanup of a fully
disconnected receiver/sender pair occurs while another thread is
connecting that same pair. If you are in the highly dynamic, unique
receiver/sender situation that has lead you to this method, that failure
mode is perhaps not a big deal for you.
"""
for mapping in (self._by_sender, self._by_receiver):
for ident, bucket in list(mapping.items()):
if not bucket:
mapping.pop(ident, None)
def _clear_state(self) -> None:
"""Disconnect all receivers and senders. Useful for tests."""
self._weak_senders.clear()
self.receivers.clear()
self._by_sender.clear()
self._by_receiver.clear()
_receiver_connected = Signal(
"""\
Sent by a :class:`Signal` after a receiver connects.
:argument: the Signal that was connected to
:keyword receiver_arg: the connected receiver
:keyword sender_arg: the sender to connect to
:keyword weak_arg: true if the connection to receiver_arg is a weak reference
.. deprecated:: 1.2
Individual signals have their own :attr:`~Signal.receiver_connected` and
:attr:`~Signal.receiver_disconnected` signals with a slightly simplified
call signature. This global signal will be removed in Blinker 1.9.
"""
)
class NamedSignal(Signal):
"""A named generic notification emitter. The name is not used by the signal
itself, but matches the key in the :class:`Namespace` that it belongs to.
:param name: The name of the signal within the namespace.
:param doc: The docstring for the signal.
"""
def __init__(self, name: str, doc: str | None = None) -> None:
super().__init__(doc)
#: The name of this signal.
self.name: str = name
def __repr__(self) -> str:
base = super().__repr__()
return f"{base[:-1]}; {self.name!r}>" # noqa: E702
if t.TYPE_CHECKING:
class PNamespaceSignal(t.Protocol):
def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ...
# Python < 3.9
_NamespaceBase = dict[str, NamedSignal] # type: ignore[misc]
else:
_NamespaceBase = dict
class Namespace(_NamespaceBase):
"""A dict mapping names to signals."""
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
"""Return the :class:`NamedSignal` for the given ``name``, creating it
if required. Repeated calls with the same name return the same signal.
:param name: The name of the signal.
:param doc: The docstring of the signal.
"""
if name not in self:
self[name] = NamedSignal(name, doc)
return self[name]
class _WeakNamespace(WeakValueDictionary): # type: ignore[type-arg]
"""A weak mapping of names to signals.
Automatically cleans up unused signals when the last reference goes out
of scope. This namespace implementation provides similar behavior to Blinker
<= 1.2.
.. deprecated:: 1.3
Will be removed in Blinker 1.9.
.. versionadded:: 1.3
"""
def __init__(self) -> None:
warnings.warn(
"'WeakNamespace' is deprecated and will be removed in Blinker 1.9."
" Use 'Namespace' instead.",
DeprecationWarning,
stacklevel=2,
)
super().__init__()
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
"""Return the :class:`NamedSignal` for the given ``name``, creating it
if required. Repeated calls with the same name return the same signal.
:param name: The name of the signal.
:param doc: The docstring of the signal.
"""
if name not in self:
self[name] = NamedSignal(name, doc)
return self[name] # type: ignore[no-any-return]
default_namespace: Namespace = Namespace()
"""A default :class:`Namespace` for creating named signals. :func:`signal`
creates a :class:`NamedSignal` in this namespace.
"""
signal: PNamespaceSignal = default_namespace.signal
"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given
``name``, creating it if required. Repeated calls with the same name return the
same signal.
"""
def __getattr__(name: str) -> t.Any:
if name == "receiver_connected":
warnings.warn(
"The global 'receiver_connected' signal is deprecated and will be"
" removed in Blinker 1.9. Use 'Signal.receiver_connected' and"
" 'Signal.receiver_disconnected' instead.",
DeprecationWarning,
stacklevel=2,
)
return _receiver_connected
if name == "WeakNamespace":
warnings.warn(
"'WeakNamespace' is deprecated and will be removed in Blinker 1.9."
" Use 'Namespace' instead.",
DeprecationWarning,
stacklevel=2,
)
return _WeakNamespace
raise AttributeError(name)

View File

@ -0,0 +1,20 @@
This package contains a modified version of ca-bundle.crt:
ca-bundle.crt -- Bundle of CA Root Certificates
This is a bundle of X.509 certificates of public Certificate Authorities
(CA). These were automatically extracted from Mozilla's root certificates
file (certdata.txt). This file can be found in the mozilla source tree:
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
It contains the certificates in PEM format and therefore
can be directly used with curl / libcurl / php_curl, or with
an Apache+mod_ssl webserver for SSL client authentication.
Just configure this file as the SSLCACertificateFile.#
***** BEGIN LICENSE BLOCK *****
This Source Code Form is subject to the terms of the Mozilla Public License,
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
one at http://mozilla.org/MPL/2.0/.
***** END LICENSE BLOCK *****
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $

View File

@ -0,0 +1,67 @@
Metadata-Version: 2.1
Name: certifi
Version: 2024.7.4
Summary: Python package for providing Mozilla's CA Bundle.
Home-page: https://github.com/certifi/python-certifi
Author: Kenneth Reitz
Author-email: me@kennethreitz.com
License: MPL-2.0
Project-URL: Source, https://github.com/certifi/python-certifi
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
Classifier: Natural Language :: English
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Requires-Python: >=3.6
License-File: LICENSE
Certifi: Python SSL Certificates
================================
Certifi provides Mozilla's carefully curated collection of Root Certificates for
validating the trustworthiness of SSL certificates while verifying the identity
of TLS hosts. It has been extracted from the `Requests`_ project.
Installation
------------
``certifi`` is available on PyPI. Simply install it with ``pip``::
$ pip install certifi
Usage
-----
To reference the installed certificate authority (CA) bundle, you can use the
built-in function::
>>> import certifi
>>> certifi.where()
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
Or from the command line::
$ python -m certifi
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
Enjoy!
.. _`Requests`: https://requests.readthedocs.io/en/master/
Addition/Removal of Certificates
--------------------------------
Certifi does not support any addition/removal or other modification of the
CA trust store content. This project is intended to provide a reliable and
highly portable root of trust to python deployments. Look to upstream projects
for methods to use alternate trust.

View File

@ -0,0 +1,15 @@
certifi-2024.7.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
certifi-2024.7.4.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
certifi-2024.7.4.dist-info/METADATA,sha256=L9_EuPoQQvHFzxu03_ctaEZxhEty7inz569jGWjlLGo,2221
certifi-2024.7.4.dist-info/RECORD,,
certifi-2024.7.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
certifi-2024.7.4.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
certifi-2024.7.4.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
certifi/__init__.py,sha256=LHXz7E80YJYBzCBv6ZyidQ5-ciYSkSebpY2E5OM0l7o,94
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
certifi/__pycache__/__init__.cpython-312.pyc,,
certifi/__pycache__/__main__.cpython-312.pyc,,
certifi/__pycache__/core.cpython-312.pyc,,
certifi/cacert.pem,sha256=SIupYGAr8HzGP073rsEIaS_sQYIPwzKKjj894DgUmu4,291528
certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (70.2.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,4 @@
from .core import contents, where
__all__ = ["contents", "where"]
__version__ = "2024.07.04"

View File

@ -0,0 +1,12 @@
import argparse
from certifi import contents, where
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--contents", action="store_true")
args = parser.parse_args()
if args.contents:
print(contents())
else:
print(where())

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,114 @@
"""
certifi.py
~~~~~~~~~~
This module returns the installation location of cacert.pem or its contents.
"""
import sys
import atexit
def exit_cacert_ctx() -> None:
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
if sys.version_info >= (3, 11):
from importlib.resources import as_file, files
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the file
# in cases where we're inside of a zipimport situation until someone
# actually calls where(), but we don't want to re-extract the file
# on every call of where(), so we'll do it once then store it in a
# global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you to
# manage the cleanup of this file, so it doesn't actually return a
# path, it returns a context manager that will give you the path
# when you enter it and will do any cleanup when you leave it. In
# the common case of not needing a temporary file, it will just
# return the file system location and the __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
_CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH
def contents() -> str:
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
elif sys.version_info >= (3, 7):
from importlib.resources import path as get_path, read_text
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the
# file in cases where we're inside of a zipimport situation until
# someone actually calls where(), but we don't want to re-extract
# the file on every call of where(), so we'll do it once then store
# it in a global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you
# to manage the cleanup of this file, so it doesn't actually
# return a path, it returns a context manager that will give
# you the path when you enter it and will do any cleanup when
# you leave it. In the common case of not needing a temporary
# file, it will just return the file system location and the
# __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = get_path("certifi", "cacert.pem")
_CACERT_PATH = str(_CACERT_CTX.__enter__())
atexit.register(exit_cacert_ctx)
return _CACERT_PATH
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")
else:
import os
import types
from typing import Union
Package = Union[types.ModuleType, str]
Resource = Union[str, "os.PathLike"]
# This fallback will work for Python versions prior to 3.7 that lack the
# importlib.resources module but relies on the existing `where` function
# so won't address issues with environments like PyOxidizer that don't set
# __file__ on modules.
def read_text(
package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict'
) -> str:
with open(where(), encoding=encoding) as data:
return data.read()
# If we don't have importlib.resources, then we will just do the old logic
# of assuming we're on the filesystem and munge the path directly.
def where() -> str:
f = os.path.dirname(__file__)
return os.path.join(f, "cacert.pem")
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2019 TAHRI Ahmed R.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,683 @@
Metadata-Version: 2.1
Name: charset-normalizer
Version: 3.3.2
Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
Home-page: https://github.com/Ousret/charset_normalizer
Author: Ahmed TAHRI
Author-email: ahmed.tahri@cloudnursery.dev
License: MIT
Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
Classifier: Development Status :: 5 - Production/Stable
Classifier: License :: OSI Approved :: MIT License
Classifier: Intended Audience :: Developers
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Text Processing :: Linguistic
Classifier: Topic :: Utilities
Classifier: Typing :: Typed
Requires-Python: >=3.7.0
Description-Content-Type: text/markdown
License-File: LICENSE
Provides-Extra: unicode_backport
<h1 align="center">Charset Detection, for Everyone 👋</h1>
<p align="center">
<sup>The Real First Universal Charset Detector</sup><br>
<a href="https://pypi.org/project/charset-normalizer">
<img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
</a>
<a href="https://pepy.tech/project/charset-normalizer/">
<img alt="Download Count Total" src="https://static.pepy.tech/badge/charset-normalizer/month" />
</a>
<a href="https://bestpractices.coreinfrastructure.org/projects/7297">
<img src="https://bestpractices.coreinfrastructure.org/projects/7297/badge">
</a>
</p>
<p align="center">
<sup><i>Featured Packages</i></sup><br>
<a href="https://github.com/jawah/niquests">
<img alt="Static Badge" src="https://img.shields.io/badge/Niquests-HTTP_1.1%2C%202%2C_and_3_Client-cyan">
</a>
<a href="https://github.com/jawah/wassima">
<img alt="Static Badge" src="https://img.shields.io/badge/Wassima-Certifi_Killer-cyan">
</a>
</p>
<p align="center">
<sup><i>In other language (unofficial port - by the community)</i></sup><br>
<a href="https://github.com/nickspring/charset-normalizer-rs">
<img alt="Static Badge" src="https://img.shields.io/badge/Rust-red">
</a>
</p>
> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
> I'm trying to resolve the issue by taking a new approach.
> All IANA character set names for which the Python core library provides codecs are supported.
<p align="center">
>>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
</p>
This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
| `Fast` | ❌ | ✅ | ✅ |
| `Universal**` | ❌ | ✅ | ❌ |
| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
| `License` | LGPL-2.1<br>_restrictive_ | MIT | MPL-1.1<br>_restrictive_ |
| `Native Python` | ✅ | ✅ | ❌ |
| `Detect spoken language` | ❌ | ✅ | N/A |
| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
<p align="center">
<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
</p>
*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
## ⚡ Performance
This package offer better performance than its counterpart Chardet. Here are some numbers.
| Package | Accuracy | Mean per file (ms) | File per sec (est) |
|-----------------------------------------------|:--------:|:------------------:|:------------------:|
| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec |
| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
| Package | 99th percentile | 95th percentile | 50th percentile |
|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms |
| charset-normalizer | 100 ms | 50 ms | 5 ms |
Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
> And yes, these results might change at any time. The dataset can be updated to include more files.
> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
> (eg. Supported Encoding) Challenge-them if you want.
## ✨ Installation
Using pip:
```sh
pip install charset-normalizer -U
```
## 🚀 Basic Usage
### CLI
This package comes with a CLI.
```
usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
file [file ...]
The Real First Universal Charset Detector. Discover originating encoding used
on text file. Normalize text to unicode.
positional arguments:
files File(s) to be analysed
optional arguments:
-h, --help show this help message and exit
-v, --verbose Display complementary information about file if any.
Stdout will contain logs about the detection process.
-a, --with-alternative
Output complementary possibilities if any. Top-level
JSON WILL be a list.
-n, --normalize Permit to normalize input file. If not set, program
does not write anything.
-m, --minimal Only output the charset detected to STDOUT. Disabling
JSON output.
-r, --replace Replace file when trying to normalize it instead of
creating a new one.
-f, --force Replace file without asking if you are sure, use this
flag with caution.
-t THRESHOLD, --threshold THRESHOLD
Define a custom maximum amount of chaos allowed in
decoded content. 0. <= chaos <= 1.
--version Show version information and exit.
```
```bash
normalizer ./data/sample.1.fr.srt
```
or
```bash
python -m charset_normalizer ./data/sample.1.fr.srt
```
🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
```json
{
"path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
"encoding": "cp1252",
"encoding_aliases": [
"1252",
"windows_1252"
],
"alternative_encodings": [
"cp1254",
"cp1256",
"cp1258",
"iso8859_14",
"iso8859_15",
"iso8859_16",
"iso8859_3",
"iso8859_9",
"latin_1",
"mbcs"
],
"language": "French",
"alphabets": [
"Basic Latin",
"Latin-1 Supplement"
],
"has_sig_or_bom": false,
"chaos": 0.149,
"coherence": 97.152,
"unicode_path": null,
"is_preferred": true
}
```
### Python
*Just print out normalized text*
```python
from charset_normalizer import from_path
results = from_path('./my_subtitle.srt')
print(str(results.best()))
```
*Upgrade your code without effort*
```python
from charset_normalizer import detect
```
The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
## 😇 Why
When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
reliable alternative using a completely different method. Also! I never back down on a good challenge!
I **don't care** about the **originating charset** encoding, because **two different tables** can
produce **two identical rendered string.**
What I want is to get readable text, the best I can.
In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
## 🍰 How
- Discard all charset encoding table that could not fit the binary content.
- Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
- Extract matches with the lowest mess detected.
- Additionally, we measure coherence / probe for a language.
**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
**I established** some ground rules about **what is obvious** when **it seems like** a mess.
I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
improve or rewrite it.
*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
## ⚡ Known limitations
- Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
- Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
## ⚠️ About Python EOLs
**If you are running:**
- Python >=2.7,<3.5: Unsupported
- Python 3.5: charset-normalizer < 2.1
- Python 3.6: charset-normalizer < 3.1
- Python 3.7: charset-normalizer < 4.0
Upgrade your Python interpreter as soon as possible.
## 👤 Contributing
Contributions, issues and feature requests are very much welcome.<br />
Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
## 📝 License
Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
## 💼 For Enterprise
Professional support for charset-normalizer is available as part of the [Tidelift
Subscription][1]. Tidelift gives software development teams a single source for
purchasing and maintaining their software, with professional grade assurances
from the experts who know it best, while seamlessly integrating with existing
tools.
[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
# Changelog
All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
### Fixed
- Unintentional memory usage regression when using large payload that match several encoding (#376)
- Regression on some detection case showcased in the documentation (#371)
### Added
- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
### Changed
- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
- Improved the general detection reliability based on reports from the community
## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
### Added
- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
### Removed
- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
### Changed
- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
### Fixed
- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
### Changed
- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
- Minor improvement over the global detection reliability
### Added
- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
- Explicit support for Python 3.12
### Fixed
- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
### Added
- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
### Removed
- Support for Python 3.6 (PR #260)
### Changed
- Optional speedup provided by mypy/c 1.0.1
## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
### Fixed
- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
### Changed
- Speedup provided by mypy/c 0.990 on Python >= 3.7
## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
### Added
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
### Changed
- Build with static metadata using 'build' frontend
- Make the language detection stricter
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
### Fixed
- CLI with opt --normalize fail when using full path for files
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
- Sphinx warnings when generating the documentation
### Removed
- Coherence detector no longer return 'Simple English' instead return 'English'
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
- Breaking: Method `first()` and `best()` from CharsetMatch
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
- Breaking: Top-level function `normalize`
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
- Support for the backport `unicodedata2`
## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
### Added
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
### Changed
- Build with static metadata using 'build' frontend
- Make the language detection stricter
### Fixed
- CLI with opt --normalize fail when using full path for files
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
### Removed
- Coherence detector no longer return 'Simple English' instead return 'English'
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
### Added
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
### Removed
- Breaking: Method `first()` and `best()` from CharsetMatch
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
### Fixed
- Sphinx warnings when generating the documentation
## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
### Changed
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
### Removed
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
- Breaking: Top-level function `normalize`
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
- Support for the backport `unicodedata2`
## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
### Deprecated
- Function `normalize` scheduled for removal in 3.0
### Changed
- Removed useless call to decode in fn is_unprintable (#206)
### Fixed
- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
### Added
- Output the Unicode table version when running the CLI with `--version` (PR #194)
### Changed
- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
### Fixed
- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
### Removed
- Support for Python 3.5 (PR #192)
### Deprecated
- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
### Fixed
- ASCII miss-detection on rare cases (PR #170)
## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
### Added
- Explicit support for Python 3.11 (PR #164)
### Changed
- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
### Fixed
- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
### Changed
- Skipping the language-detection (CD) on ASCII (PR #155)
## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
### Changed
- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
### Fixed
- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
### Changed
- Improvement over Vietnamese detection (PR #126)
- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
- Code style as refactored by Sourcery-AI (PR #131)
- Minor adjustment on the MD around european words (PR #133)
- Remove and replace SRTs from assets / tests (PR #139)
- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
### Fixed
- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
- Avoid using too insignificant chunk (PR #137)
### Added
- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
### Added
- Add support for Kazakh (Cyrillic) language detection (PR #109)
### Changed
- Further, improve inferring the language from a given single-byte code page (PR #112)
- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
- Various detection improvement (MD+CD) (PR #117)
### Removed
- Remove redundant logging entry about detected language(s) (PR #115)
### Fixed
- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
### Fixed
- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
- Fix CLI crash when using --minimal output in certain cases (PR #103)
### Changed
- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
### Changed
- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
- The Unicode detection is slightly improved (PR #93)
- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
### Removed
- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
### Fixed
- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
- The MANIFEST.in was not exhaustive (PR #78)
## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
### Fixed
- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
- Submatch factoring could be wrong in rare edge cases (PR #72)
- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
- Fix line endings from CRLF to LF for certain project files (PR #67)
### Changed
- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
- Allow fallback on specified encoding if any (PR #71)
## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
### Changed
- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
### Fixed
- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
### Changed
- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
### Fixed
- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
### Changed
- Public function normalize default args values were not aligned with from_bytes (PR #53)
### Added
- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
### Changed
- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
- Accent has been made on UTF-8 detection, should perform rather instantaneous.
- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
- utf_7 detection has been reinstated.
### Removed
- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
- The exception hook on UnicodeDecodeError has been removed.
### Deprecated
- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
### Fixed
- The CLI output used the relative path of the file(s). Should be absolute.
## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
### Fixed
- Logger configuration/usage no longer conflict with others (PR #44)
## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
### Removed
- Using standard logging instead of using the package loguru.
- Dropping nose test framework in favor of the maintained pytest.
- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
- Stop support for UTF-7 that does not contain a SIG.
- Dropping PrettyTable, replaced with pure JSON output in CLI.
### Fixed
- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
- Not searching properly for the BOM when trying utf32/16 parent codec.
### Changed
- Improving the package final size by compressing frequencies.json.
- Huge improvement over the larges payload.
### Added
- CLI now produces JSON consumable output.
- Return ASCII if given sequences fit. Given reasonable confidence.
## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
### Fixed
- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
### Fixed
- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
### Fixed
- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
### Changed
- Amend the previous release to allow prettytable 2.0 (PR #35)
## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
### Fixed
- Fix error while using the package with a python pre-release interpreter (PR #33)
### Changed
- Dependencies refactoring, constraints revised.
### Added
- Add python 3.9 and 3.10 to the supported interpreters
MIT License
Copyright (c) 2019 TAHRI Ahmed R.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,36 @@
../../../bin/normalizer,sha256=IVMExHM8kCx7p1_QlEzEijpIvXFpAKHP9_HPcGpSR0k,262
charset_normalizer-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550
charset_normalizer-3.3.2.dist-info/RECORD,,
charset_normalizer-3.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
charset_normalizer-3.3.2.dist-info/WHEEL,sha256=4ZiCdXIWMxJyEClivrQv1QAHZpQh8kVYU92_ZAVwaok,152
charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73
charset_normalizer/__pycache__/__init__.cpython-312.pyc,,
charset_normalizer/__pycache__/__main__.cpython-312.pyc,,
charset_normalizer/__pycache__/api.cpython-312.pyc,,
charset_normalizer/__pycache__/cd.cpython-312.pyc,,
charset_normalizer/__pycache__/constant.cpython-312.pyc,,
charset_normalizer/__pycache__/legacy.cpython-312.pyc,,
charset_normalizer/__pycache__/md.cpython-312.pyc,,
charset_normalizer/__pycache__/models.cpython-312.pyc,,
charset_normalizer/__pycache__/utils.cpython-312.pyc,,
charset_normalizer/__pycache__/version.cpython-312.pyc,,
charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097
charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560
charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100
charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744
charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc,,
charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc,,
charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481
charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071
charset_normalizer/md.cpython-312-x86_64-linux-gnu.so,sha256=W654QTU3QZI6eWJ0fanScAr0_O6sL0I61fyRSdC-39Y,16064
charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624
charset_normalizer/md__mypyc.cpython-312-x86_64-linux-gnu.so,sha256=IlObIV4dmRhFV8V7H-zK4rTxPzTSi9JmrWZD26JQfxI,272640
charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894
charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.41.2)
Root-Is-Purelib: false
Tag: cp312-cp312-manylinux_2_17_x86_64
Tag: cp312-cp312-manylinux2014_x86_64

View File

@ -0,0 +1,2 @@
[console_scripts]
normalizer = charset_normalizer.cli:cli_detect

View File

@ -0,0 +1 @@
charset_normalizer

View File

@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
"""
Charset-Normalizer
~~~~~~~~~~~~~~
The Real First Universal Charset Detector.
A library that helps you read text from an unknown charset encoding.
Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
All IANA character set names for which the Python core library provides codecs are supported.
Basic usage:
>>> from charset_normalizer import from_bytes
>>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
>>> best_guess = results.best()
>>> str(best_guess)
'Bсеки човек има право на образование. Oбразованието!'
Others methods and usages are available - see the full documentation
at <https://github.com/Ousret/charset_normalizer>.
:copyright: (c) 2021 by Ahmed TAHRI
:license: MIT, see LICENSE for more details.
"""
import logging
from .api import from_bytes, from_fp, from_path, is_binary
from .legacy import detect
from .models import CharsetMatch, CharsetMatches
from .utils import set_logging_handler
from .version import VERSION, __version__
__all__ = (
"from_fp",
"from_path",
"from_bytes",
"is_binary",
"detect",
"CharsetMatch",
"CharsetMatches",
"__version__",
"VERSION",
"set_logging_handler",
)
# Attach a NullHandler to the top level logger by default
# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())

Some files were not shown because too many files have changed in this diff Show More