mirror of
https://github.com/jlengrand/checkoutCreate.git
synced 2026-03-10 08:11:18 +00:00
Removing venv from remote
This commit is contained in:
BIN
venv/.DS_Store
vendored
BIN
venv/.DS_Store
vendored
Binary file not shown.
@@ -1,76 +0,0 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "$1" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV="/Users/anamo/Desktop/Demos/Hackathon/CheckoutCreate/venv"
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
if [ "x(venv) " != x ] ; then
|
||||
PS1="(venv) ${PS1:-}"
|
||||
else
|
||||
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
|
||||
else
|
||||
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
|
||||
fi
|
||||
fi
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
||||
@@ -1,37 +0,0 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/Users/anamo/Desktop/Demos/Hackathon/CheckoutCreate/venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
if ("venv" != "") then
|
||||
set env_name = "venv"
|
||||
else
|
||||
if (`basename "VIRTUAL_ENV"` == "__") then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
|
||||
else
|
||||
set env_name = `basename "$VIRTUAL_ENV"`
|
||||
endif
|
||||
endif
|
||||
set prompt = "[$env_name] $prompt"
|
||||
unset env_name
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
@@ -1,75 +0,0 @@
|
||||
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
|
||||
# you cannot run it directly
|
||||
|
||||
function deactivate -d "Exit virtualenv and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
functions -e fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/Users/bradleyl/hackathon/checkoutCreate/venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# save the current fish_prompt function as the function _old_fish_prompt
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# with the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command
|
||||
set -l old_status $status
|
||||
|
||||
# Prompt override?
|
||||
if test -n "(venv) "
|
||||
printf "%s%s" "(venv) " (set_color normal)
|
||||
else
|
||||
# ...Otherwise, prepend env
|
||||
set -l _checkbase (basename "$VIRTUAL_ENV")
|
||||
if test $_checkbase = "__"
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
|
||||
else
|
||||
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
|
||||
end
|
||||
end
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from dotenv.cli import cli
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from flask.cli import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from charset_normalizer.cli.normalizer import cli_detect
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli_detect())
|
||||
11
venv/bin/pip
11
venv/bin/pip
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/Users/anamo/Desktop/Demos/WebCheckoutComponent/adyen-python-online-payments/venv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -1 +0,0 @@
|
||||
python3
|
||||
@@ -1 +0,0 @@
|
||||
/usr/local/adyen/python/bin/python3
|
||||
BIN
venv/lib/.DS_Store
vendored
BIN
venv/lib/.DS_Store
vendored
Binary file not shown.
BIN
venv/lib/python3.7/.DS_Store
vendored
BIN
venv/lib/python3.7/.DS_Store
vendored
Binary file not shown.
BIN
venv/lib/python3.7/site-packages/.DS_Store
vendored
BIN
venv/lib/python3.7/site-packages/.DS_Store
vendored
Binary file not shown.
@@ -1,19 +0,0 @@
|
||||
Metadata-Version: 1.2
|
||||
Name: Adyen
|
||||
Version: 6.0.0
|
||||
Summary: Adyen Python Api
|
||||
Home-page: https://github.com/Adyen/adyen-python-api-library
|
||||
Author: Adyen
|
||||
Author-email: support@adyen.com
|
||||
Maintainer: Adyen
|
||||
Maintainer-email: support@adyen.com
|
||||
License: UNKNOWN
|
||||
Description: A Python client library for accessing Adyen APIs
|
||||
Keywords: payments,adyen,fintech
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Topic :: Software Development :: Libraries
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
@@ -1,14 +0,0 @@
|
||||
README.md
|
||||
setup.cfg
|
||||
setup.py
|
||||
Adyen/__init__.py
|
||||
Adyen/client.py
|
||||
Adyen/exceptions.py
|
||||
Adyen/httpclient.py
|
||||
Adyen/services.py
|
||||
Adyen/settings.py
|
||||
Adyen/util.py
|
||||
Adyen.egg-info/PKG-INFO
|
||||
Adyen.egg-info/SOURCES.txt
|
||||
Adyen.egg-info/dependency_links.txt
|
||||
Adyen.egg-info/top_level.txt
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
../Adyen/__init__.py
|
||||
../Adyen/__pycache__/__init__.cpython-37.pyc
|
||||
../Adyen/__pycache__/client.cpython-37.pyc
|
||||
../Adyen/__pycache__/exceptions.cpython-37.pyc
|
||||
../Adyen/__pycache__/httpclient.cpython-37.pyc
|
||||
../Adyen/__pycache__/services.cpython-37.pyc
|
||||
../Adyen/__pycache__/settings.cpython-37.pyc
|
||||
../Adyen/__pycache__/util.cpython-37.pyc
|
||||
../Adyen/client.py
|
||||
../Adyen/exceptions.py
|
||||
../Adyen/httpclient.py
|
||||
../Adyen/services.py
|
||||
../Adyen/settings.py
|
||||
../Adyen/util.py
|
||||
PKG-INFO
|
||||
SOURCES.txt
|
||||
dependency_links.txt
|
||||
top_level.txt
|
||||
@@ -1 +0,0 @@
|
||||
Adyen
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from . import util
|
||||
from .util import generate_hpp_sig
|
||||
from .exceptions import (
|
||||
AdyenAPICommunicationError,
|
||||
AdyenAPIAuthenticationError,
|
||||
AdyenAPIInvalidPermission,
|
||||
AdyenAPIValidationError,
|
||||
AdyenInvalidRequestError,
|
||||
AdyenError
|
||||
)
|
||||
from .client import AdyenClient
|
||||
from .services import (
|
||||
AdyenBase,
|
||||
AdyenBinLookup,
|
||||
AdyenRecurring,
|
||||
AdyenPayment,
|
||||
AdyenThirdPartyPayout,
|
||||
AdyenHPP,
|
||||
AdyenCheckoutApi
|
||||
)
|
||||
|
||||
from .httpclient import HTTPClient
|
||||
|
||||
|
||||
class Adyen(AdyenBase):
|
||||
def __init__(self, **kwargs):
|
||||
self.client = AdyenClient(**kwargs)
|
||||
self.payment = AdyenPayment(client=self.client)
|
||||
self.binlookup = AdyenBinLookup(client=self.client)
|
||||
self.payout = AdyenThirdPartyPayout(client=self.client)
|
||||
self.hpp = AdyenHPP(client=self.client)
|
||||
self.recurring = AdyenRecurring(client=self.client)
|
||||
self.checkout = AdyenCheckoutApi(client=self.client)
|
||||
|
||||
|
||||
_base_adyen_obj = Adyen()
|
||||
recurring = _base_adyen_obj.recurring
|
||||
hpp = _base_adyen_obj.hpp
|
||||
payment = _base_adyen_obj.payment
|
||||
payout = _base_adyen_obj.payout
|
||||
checkout = _base_adyen_obj.checkout
|
||||
binlookup = _base_adyen_obj.binlookup
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,779 +0,0 @@
|
||||
#!/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
import json as json_lib
|
||||
import re
|
||||
|
||||
from . import util
|
||||
from .httpclient import HTTPClient
|
||||
from .exceptions import (
|
||||
AdyenAPICommunicationError,
|
||||
AdyenAPIAuthenticationError,
|
||||
AdyenAPIInvalidPermission,
|
||||
AdyenAPIValidationError,
|
||||
AdyenInvalidRequestError,
|
||||
AdyenAPIInvalidFormat,
|
||||
AdyenAPIInvalidAmount,
|
||||
AdyenEndpointInvalidFormat)
|
||||
from . import settings
|
||||
|
||||
|
||||
class AdyenResult(object):
|
||||
"""
|
||||
Args:
|
||||
message (dict, optional): Parsed message returned from API client.
|
||||
status_code (int, optional): Default 200. HTTP response code, ie 200,
|
||||
404, 500, etc.
|
||||
psp (str, optional): Psp reference returned by Adyen for a payment.
|
||||
raw_request (str, optional): Raw request placed to Adyen.
|
||||
raw_response (str, optional): Raw response returned by Adyen.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, message=None, status_code=200,
|
||||
psp="", raw_request="", raw_response=""):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.psp = psp
|
||||
self.raw_request = raw_request
|
||||
self.raw_response = raw_response
|
||||
self.details = {}
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.message)
|
||||
|
||||
|
||||
class AdyenClient(object):
|
||||
IDEMPOTENCY_HEADER_NAME = 'Idempotency-Key'
|
||||
"""A requesting client that interacts with Adyen. This class holds the
|
||||
adyen logic of Adyen HTTP API communication. This is the object that can
|
||||
maintain its own username, password, merchant_account, hmac and skin_code.
|
||||
When these values aren't within this object, the root adyen module
|
||||
variables will be used.
|
||||
|
||||
The public methods, call_api and call_hpp, only return AdyenResult objects.
|
||||
Otherwise raising various validation and communication errors.
|
||||
|
||||
Args:
|
||||
username (str, optional): Username of webservice user
|
||||
password (str, optional): Password of webservice user
|
||||
merchant_account (str, optional): Merchant account for requests to be
|
||||
placed through
|
||||
platform (str, optional): Defaults "test". The Adyen platform to make
|
||||
requests against.
|
||||
skin_code (str, optional): skin_code to place directory_lookup requests
|
||||
and generate hpp signatures with.
|
||||
hmac (str, optional): Hmac key that is used for signature calculation.
|
||||
http_timeout (int, optional): The timeout in seconds for HTTP calls,
|
||||
default 30.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username=None,
|
||||
password=None,
|
||||
xapikey=None,
|
||||
review_payout_username=None,
|
||||
review_payout_password=None,
|
||||
store_payout_username=None, store_payout_password=None,
|
||||
platform="test", merchant_account=None,
|
||||
merchant_specific_url=None, skin_code=None,
|
||||
hmac=None,
|
||||
http_force=None,
|
||||
live_endpoint_prefix=None,
|
||||
http_timeout=30,
|
||||
api_bin_lookup_version=None,
|
||||
api_checkout_utility_version=None,
|
||||
api_checkout_version=None,
|
||||
api_payment_version=None,
|
||||
api_payout_version=None,
|
||||
api_recurring_version=None,
|
||||
):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.xapikey = xapikey
|
||||
self.review_payout_username = review_payout_username
|
||||
self.review_payout_password = review_payout_password
|
||||
self.store_payout_username = store_payout_username
|
||||
self.store_payout_password = store_payout_password
|
||||
self.platform = platform
|
||||
self.merchant_specific_url = merchant_specific_url
|
||||
self.hmac = hmac
|
||||
self.merchant_account = merchant_account
|
||||
self.skin_code = skin_code
|
||||
self.psp_list = []
|
||||
self.LIB_VERSION = settings.LIB_VERSION
|
||||
self.USER_AGENT_SUFFIX = settings.LIB_NAME + "/"
|
||||
self.http_init = False
|
||||
self.http_force = http_force
|
||||
self.live_endpoint_prefix = live_endpoint_prefix
|
||||
self.http_timeout = http_timeout
|
||||
self.api_bin_lookup_version = api_bin_lookup_version or settings.API_BIN_LOOKUP_VERSION
|
||||
self.api_checkout_utility_version = api_checkout_utility_version or settings.API_CHECKOUT_UTILITY_VERSION
|
||||
self.api_checkout_version = api_checkout_version or settings.API_CHECKOUT_VERSION
|
||||
self.api_payment_version = api_payment_version or settings.API_PAYMENT_VERSION
|
||||
self.api_payout_version = api_payout_version or settings.API_PAYOUT_VERSION
|
||||
self.api_recurring_version = api_recurring_version or settings.API_RECURRING_VERSION
|
||||
|
||||
def _determine_api_url(self, platform, service, action):
|
||||
"""This returns the Adyen API endpoint based on the provided platform,
|
||||
service and action.
|
||||
|
||||
Args:
|
||||
platform (str): Adyen platform, ie 'live' or 'test'.
|
||||
service (str): API service to place request through.
|
||||
action (str): the API action to perform.
|
||||
"""
|
||||
if platform == "live" and self.live_endpoint_prefix:
|
||||
base_uri = settings.PAL_LIVE_ENDPOINT_URL_TEMPLATE.format(
|
||||
self.live_endpoint_prefix
|
||||
)
|
||||
else:
|
||||
base_uri = settings.BASE_PAL_URL.format(platform)
|
||||
|
||||
if service == "Recurring":
|
||||
api_version = self.api_recurring_version
|
||||
elif service == "Payout":
|
||||
api_version = self.api_payout_version
|
||||
elif service == "BinLookup":
|
||||
api_version = self.api_bin_lookup_version
|
||||
else:
|
||||
api_version = self.api_payment_version
|
||||
return '/'.join([base_uri, service, api_version, action])
|
||||
|
||||
@staticmethod
|
||||
def _determine_hpp_url(platform, action):
|
||||
"""This returns the Adyen HPP endpoint based on the provided platform,
|
||||
and action.
|
||||
|
||||
Args:
|
||||
platform (str): Adyen platform, ie 'live' or 'test'.
|
||||
action (str): the HPP action to perform.
|
||||
possible actions: select, pay, skipDetails, directory
|
||||
"""
|
||||
base_uri = settings.BASE_HPP_URL.format(platform)
|
||||
service = action + '.shtml'
|
||||
result = '/'.join([base_uri, service])
|
||||
return result
|
||||
|
||||
def _determine_checkout_url(self, platform, action):
|
||||
"""This returns the Adyen API endpoint based on the provided platform,
|
||||
service and action.
|
||||
|
||||
Args:
|
||||
platform (str): Adyen platform, ie 'live' or 'test'.
|
||||
action (str): the API action to perform.
|
||||
"""
|
||||
api_version = self.api_checkout_version
|
||||
if platform == "test":
|
||||
base_uri = settings.ENDPOINT_CHECKOUT_TEST
|
||||
elif self.live_endpoint_prefix is not None and platform == "live":
|
||||
base_uri = settings.ENDPOINT_CHECKOUT_LIVE_SUFFIX.format(
|
||||
self.live_endpoint_prefix)
|
||||
elif self.live_endpoint_prefix is None and platform == "live":
|
||||
errorstring = """Please set your live suffix. You can set it
|
||||
by running 'settings.
|
||||
ENDPOINT_CHECKOUT_LIVE_SUFFIX = 'Your live suffix'"""
|
||||
raise AdyenEndpointInvalidFormat(errorstring)
|
||||
else:
|
||||
raise AdyenEndpointInvalidFormat("invalid config")
|
||||
|
||||
if action == "paymentsDetails":
|
||||
action = "payments/details"
|
||||
if action == "paymentsResult":
|
||||
action = "payments/result"
|
||||
if action == "originKeys":
|
||||
api_version = self.api_checkout_utility_version
|
||||
if action == "paymentMethodsBalance":
|
||||
action = "paymentMethods/balance"
|
||||
if action == "ordersCancel":
|
||||
action = "orders/cancel"
|
||||
|
||||
return '/'.join([base_uri, api_version, action])
|
||||
|
||||
def _review_payout_username(self, **kwargs):
|
||||
if 'username' in kwargs:
|
||||
return kwargs['username']
|
||||
elif self.review_payout_username:
|
||||
return self.review_payout_username
|
||||
errorstring = """Please set your review payout
|
||||
webservice username. You can do this by running
|
||||
'Adyen.review_payout_username = 'Your payout username' """
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
def _review_payout_pass(self, **kwargs):
|
||||
if 'password' in kwargs:
|
||||
return kwargs["password"]
|
||||
elif self.review_payout_password:
|
||||
return self.review_payout_password
|
||||
errorstring = """Please set your review payout
|
||||
webservice password. You can do this by running
|
||||
'Adyen.review_payout_password = 'Your payout password'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
def _store_payout_username(self, **kwargs):
|
||||
if 'username' in kwargs:
|
||||
return kwargs['username']
|
||||
elif self.store_payout_username:
|
||||
return self.store_payout_username
|
||||
errorstring = """Please set your store payout
|
||||
webservice username. You can do this by running
|
||||
'Adyen.store_payout_username = 'Your payout username'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
def _store_payout_pass(self, **kwargs):
|
||||
if 'password' in kwargs:
|
||||
return kwargs["password"]
|
||||
elif self.store_payout_password:
|
||||
return self.store_payout_password
|
||||
errorstring = """Please set your store payout
|
||||
webservice password. You can do this by running
|
||||
'Adyen.store_payout_password = 'Your payout password'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
def call_api(
|
||||
self,
|
||||
request_data,
|
||||
service,
|
||||
action,
|
||||
idempotency_key=None,
|
||||
**kwargs
|
||||
):
|
||||
"""This will call the adyen api. username, password, merchant_account,
|
||||
and platform are pulled from root module level and or self object.
|
||||
AdyenResult will be returned on 200 response. Otherwise, an exception
|
||||
is raised.
|
||||
|
||||
Args:
|
||||
idempotency_key: https://docs.adyen.com/development-resources
|
||||
/api-idempotency
|
||||
request_data (dict): The dictionary of the request to place. This
|
||||
should be in the structure of the Adyen API.
|
||||
https://docs.adyen.com/api-explorer
|
||||
service (str): This is the API service to be called.
|
||||
action (str): The specific action of the API service to be called
|
||||
idempotency (bool, optional): Whether the transaction should be
|
||||
processed idempotently.
|
||||
https://docs.adyen.com/development-resources/api-idempotency
|
||||
Returns:
|
||||
AdyenResult: The AdyenResult is returned when a request was
|
||||
successful.
|
||||
"""
|
||||
if not self.http_init:
|
||||
self._init_http_client()
|
||||
|
||||
# username at self object has highest priority. fallback to root module
|
||||
# and ensure that it is set.
|
||||
xapikey = None
|
||||
if self.xapikey:
|
||||
xapikey = self.xapikey
|
||||
elif 'xapikey' in kwargs:
|
||||
xapikey = kwargs.pop("xapikey")
|
||||
|
||||
username = None
|
||||
if self.username:
|
||||
username = self.username
|
||||
elif 'username' in kwargs:
|
||||
username = kwargs.pop("username")
|
||||
if service == "Payout":
|
||||
if any(substring in action for substring in
|
||||
["store", "submit"]):
|
||||
username = self._store_payout_username(**kwargs)
|
||||
else:
|
||||
username = self._review_payout_username(**kwargs)
|
||||
|
||||
if not username and not xapikey:
|
||||
errorstring = """Please set your webservice username.
|
||||
You can do this by running
|
||||
'Adyen.username = 'Your username'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
# password at self object has highest priority.
|
||||
# fallback to root module
|
||||
# and ensure that it is set.
|
||||
|
||||
password = None
|
||||
if self.password and not xapikey:
|
||||
password = self.password
|
||||
elif 'password' in kwargs:
|
||||
password = kwargs.pop("password")
|
||||
if service == "Payout":
|
||||
if any(substring in action for substring in
|
||||
["store", "submit"]):
|
||||
password = self._store_payout_pass(**kwargs)
|
||||
else:
|
||||
password = self._review_payout_pass(**kwargs)
|
||||
|
||||
if not password and not xapikey:
|
||||
errorstring = """Please set your webservice password.
|
||||
You can do this by running
|
||||
'Adyen.password = 'Your password'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
# xapikey at self object has highest priority.
|
||||
# fallback to root module
|
||||
# and ensure that it is set.
|
||||
|
||||
# platform at self object has highest priority. fallback to root module
|
||||
# and ensure that it is set to either 'live' or 'test'.
|
||||
platform = None
|
||||
if self.platform:
|
||||
platform = self.platform
|
||||
elif 'platform' in kwargs:
|
||||
platform = kwargs.pop('platform')
|
||||
|
||||
if not isinstance(platform, str):
|
||||
errorstring = "'platform' value must be type of string"
|
||||
raise TypeError(errorstring)
|
||||
elif platform.lower() not in ['live', 'test']:
|
||||
errorstring = "'platform' must be the value of 'live' or 'test'"
|
||||
raise ValueError(errorstring)
|
||||
|
||||
message = request_data
|
||||
|
||||
if not message.get('merchantAccount'):
|
||||
message['merchantAccount'] = self.merchant_account
|
||||
|
||||
# Add application info
|
||||
if 'applicationInfo' in request_data:
|
||||
request_data['applicationInfo'].update({
|
||||
"adyenLibrary": {
|
||||
"name": settings.LIB_NAME,
|
||||
"version": settings.LIB_VERSION
|
||||
}
|
||||
})
|
||||
else:
|
||||
request_data['applicationInfo'] = {
|
||||
"adyenLibrary": {
|
||||
"name": settings.LIB_NAME,
|
||||
"version": settings.LIB_VERSION
|
||||
}
|
||||
}
|
||||
# Adyen requires this header to be set and uses the combination of
|
||||
# merchant account and merchant reference to determine uniqueness.
|
||||
headers = {}
|
||||
if idempotency_key:
|
||||
headers[self.IDEMPOTENCY_HEADER_NAME] = idempotency_key
|
||||
|
||||
url = self._determine_api_url(platform, service, action)
|
||||
|
||||
if xapikey:
|
||||
raw_response, raw_request, status_code, headers = \
|
||||
self.http_client.request(url, json=request_data,
|
||||
xapikey=xapikey, headers=headers,
|
||||
**kwargs)
|
||||
else:
|
||||
raw_response, raw_request, status_code, headers = \
|
||||
self.http_client.request(url, json=message, username=username,
|
||||
password=password,
|
||||
headers=headers,
|
||||
**kwargs)
|
||||
|
||||
# Creates AdyenResponse if request was successful, raises error if not.
|
||||
adyen_result = self._handle_response(url, raw_response, raw_request,
|
||||
status_code, headers, message)
|
||||
|
||||
return adyen_result
|
||||
|
||||
def _init_http_client(self):
|
||||
self.http_client = HTTPClient(
|
||||
user_agent_suffix=self.USER_AGENT_SUFFIX,
|
||||
lib_version=self.LIB_VERSION,
|
||||
force_request=self.http_force,
|
||||
timeout=self.http_timeout,
|
||||
)
|
||||
self.http_init = True
|
||||
|
||||
def call_hpp(self, message, action, hmac_key="", **kwargs):
|
||||
"""This will call the adyen hpp. hmac_key and platform are pulled from
|
||||
root module level and or self object.
|
||||
AdyenResult will be returned on 200 response.
|
||||
Otherwise, an exception is raised.
|
||||
|
||||
Args:
|
||||
request_data (dict): The dictionary of the request to place. This
|
||||
should be in the structure of the Adyen API.
|
||||
https://docs.adyen.com/online-payments/classic-integrations/hosted-payment-pages/hosted-payment-pages-api
|
||||
service (str): This is the API service to be called.
|
||||
action (str): The specific action of the API service to be called
|
||||
Returns:
|
||||
AdyenResult: The AdyenResult is returned when a request was
|
||||
succesful.
|
||||
:param message:
|
||||
:param hmac_key:
|
||||
"""
|
||||
if not self.http_init:
|
||||
self._init_http_client()
|
||||
|
||||
# hmac provided in function has highest priority. fallback to self then
|
||||
# root module and ensure that it is set.
|
||||
hmac = hmac_key
|
||||
if self.hmac:
|
||||
hmac = self.hmac
|
||||
elif not hmac:
|
||||
errorstring = """Please set an hmac with your Adyen.Adyen
|
||||
class instance.
|
||||
'Adyen.hmac = \"!WR#F@...\"' or as an additional
|
||||
parameter in the function call ie.
|
||||
'Adyen.hpp.directory_lookup(hmac=\"!WR#F@...\"'. Please reach
|
||||
out to support@Adyen.com if the issue persists."""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
# platform provided in self has highest priority,
|
||||
# fallback to root module and ensure that it is set.
|
||||
platform = self.platform
|
||||
if not isinstance(platform, str):
|
||||
errorstring = "'platform' must be type string"
|
||||
raise TypeError(errorstring)
|
||||
elif platform.lower() not in ['live', 'test']:
|
||||
errorstring = " 'platform' must be the value of 'live' or 'test' "
|
||||
raise ValueError(errorstring)
|
||||
|
||||
if 'skinCode' not in message:
|
||||
message['skinCode'] = self.skin_code
|
||||
|
||||
if 'merchantAccount' not in message:
|
||||
message['merchantAccount'] = self.merchant_account
|
||||
if message['merchantAccount'] == "":
|
||||
message['merchantAccount'] = self.merchant_account
|
||||
|
||||
message["merchantSig"] = util.generate_hpp_sig(message, hmac)
|
||||
|
||||
url = self._determine_hpp_url(platform, action)
|
||||
|
||||
raw_response, raw_request, status_code, headers = \
|
||||
self.http_client.request(url, data=message,
|
||||
username="", password="", **kwargs)
|
||||
|
||||
# Creates AdyenResponse if request was successful, raises error if not.
|
||||
adyen_result = self._handle_response(url, raw_response, raw_request,
|
||||
status_code, headers, message)
|
||||
return adyen_result
|
||||
|
||||
def call_checkout_api(self, request_data, action, idempotency_key=None,
|
||||
**kwargs):
|
||||
"""This will call the checkout adyen api. xapi key merchant_account,
|
||||
and platform are pulled from root module level and or self object.
|
||||
AdyenResult will be returned on 200 response. Otherwise, an exception
|
||||
is raised.
|
||||
|
||||
Args:
|
||||
idempotency_key: https://docs.adyen.com/development-resources
|
||||
/api-idempotency
|
||||
request_data (dict): The dictionary of the request to place. This
|
||||
should be in the structure of the Adyen API.
|
||||
https://docs.adyen.com/api-explorer/#/CheckoutService
|
||||
service (str): This is the API service to be called.
|
||||
action (str): The specific action of the API service to be called
|
||||
"""
|
||||
if not self.http_init:
|
||||
self._init_http_client()
|
||||
|
||||
# xapi at self object has highest priority. fallback to root module
|
||||
# and ensure that it is set.
|
||||
xapikey = False
|
||||
if self.xapikey:
|
||||
xapikey = self.xapikey
|
||||
elif 'xapikey' in kwargs:
|
||||
xapikey = kwargs.pop("xapikey")
|
||||
|
||||
if not xapikey:
|
||||
errorstring = """Please set your webservice xapikey.
|
||||
You can do this by running 'Adyen.xapikey = 'Your xapikey'"""
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
# platform at self object has highest priority. fallback to root module
|
||||
# and ensure that it is set to either 'live' or 'test'.
|
||||
platform = None
|
||||
if self.platform:
|
||||
platform = self.platform
|
||||
elif 'platform' in kwargs:
|
||||
platform = kwargs.pop('platform')
|
||||
|
||||
if not isinstance(platform, str):
|
||||
errorstring = "'platform' value must be type of string"
|
||||
raise TypeError(errorstring)
|
||||
elif platform.lower() not in ['live', 'test']:
|
||||
errorstring = "'platform' must be the value of 'live' or 'test'"
|
||||
raise ValueError(errorstring)
|
||||
|
||||
if not request_data.get('merchantAccount'):
|
||||
request_data['merchantAccount'] = self.merchant_account
|
||||
|
||||
with_app_info = [
|
||||
"authorise",
|
||||
"authorise3d",
|
||||
"authorise3ds2",
|
||||
"payments",
|
||||
"paymentSession",
|
||||
"paymentLinks",
|
||||
"paymentMethodsBalance",
|
||||
"sessions"
|
||||
]
|
||||
|
||||
if action in with_app_info:
|
||||
if 'applicationInfo' in request_data:
|
||||
request_data['applicationInfo'].update({
|
||||
"adyenLibrary": {
|
||||
"name": settings.LIB_NAME,
|
||||
"version": settings.LIB_VERSION
|
||||
}
|
||||
})
|
||||
else:
|
||||
request_data['applicationInfo'] = {
|
||||
"adyenLibrary": {
|
||||
"name": settings.LIB_NAME,
|
||||
"version": settings.LIB_VERSION
|
||||
}
|
||||
}
|
||||
# Adyen requires this header to be set and uses the combination of
|
||||
# merchant account and merchant reference to determine uniqueness.
|
||||
headers = {}
|
||||
if idempotency_key:
|
||||
headers[self.IDEMPOTENCY_HEADER_NAME] = idempotency_key
|
||||
url = self._determine_checkout_url(platform, action)
|
||||
|
||||
raw_response, raw_request, status_code, headers = \
|
||||
self.http_client.request(url, json=request_data,
|
||||
xapikey=xapikey, headers=headers,
|
||||
**kwargs)
|
||||
|
||||
# Creates AdyenResponse if request was successful, raises error if not.
|
||||
adyen_result = self._handle_response(url, raw_response, raw_request,
|
||||
status_code, headers,
|
||||
request_data)
|
||||
|
||||
return adyen_result
|
||||
|
||||
def hpp_payment(self, request_data, action, hmac_key="", **kwargs):
|
||||
if not self.http_init:
|
||||
self._init_http_client()
|
||||
|
||||
platform = self.platform
|
||||
if not isinstance(platform, str):
|
||||
errorstring = "'platform' must be type string"
|
||||
raise TypeError(errorstring)
|
||||
elif platform.lower() not in ['live', 'test']:
|
||||
errorstring = " 'platform' must be the value of 'live' or 'test' "
|
||||
raise ValueError(errorstring)
|
||||
|
||||
if 'skinCode' not in request_data:
|
||||
request_data['skinCode'] = self.skin_code
|
||||
|
||||
hmac = self.hmac
|
||||
|
||||
if 'merchantAccount' not in request_data:
|
||||
request_data['merchantAccount'] = self.merchant_account
|
||||
if request_data['merchantAccount'] == "":
|
||||
request_data['merchantAccount'] = self.merchant_account
|
||||
|
||||
request_data["merchantSig"] = util.generate_hpp_sig(request_data, hmac)
|
||||
|
||||
url = self._determine_hpp_url(platform, action)
|
||||
|
||||
adyen_result = {
|
||||
'url': url,
|
||||
'message': request_data
|
||||
}
|
||||
|
||||
return adyen_result
|
||||
|
||||
def _handle_response(self, url, raw_response, raw_request,
|
||||
status_code, headers, request_dict):
|
||||
"""This parses the content from raw communication, raising an error if
|
||||
anything other than 200 was returned.
|
||||
|
||||
Args:
|
||||
url (str): URL where request was made
|
||||
raw_response (str): The raw communication sent to Adyen
|
||||
raw_request (str): The raw response returned by Adyen
|
||||
status_code (int): The HTTP status code
|
||||
headers (dict): Key/Value of the headers.
|
||||
request_dict (dict): The original request dictionary that was given
|
||||
to the HTTPClient.
|
||||
|
||||
Returns:
|
||||
AdyenResult: Result object if successful.
|
||||
"""
|
||||
if (status_code != 200 and status_code != 201):
|
||||
response = {}
|
||||
# If the result can't be parsed into json, most likely is raw html.
|
||||
# Some response are neither json or raw html, handle them here:
|
||||
if raw_response:
|
||||
response = json_lib.loads(raw_response)
|
||||
# Pass raised error to error handler.
|
||||
self._handle_http_error(url, response, status_code,
|
||||
headers.get('pspReference'),
|
||||
raw_request, raw_response,
|
||||
headers, request_dict)
|
||||
|
||||
try:
|
||||
if response['errorCode']:
|
||||
raise AdyenAPICommunicationError(
|
||||
"Unexpected error while communicating with Adyen."
|
||||
" Received the response data:'{}', HTTP Code:'{}'. "
|
||||
"Please reach out to support@adyen.com if the "
|
||||
"problem persists with the psp:{}".format(
|
||||
raw_response,
|
||||
status_code,
|
||||
headers.get('pspReference')),
|
||||
status_code=status_code,
|
||||
raw_request=raw_request,
|
||||
raw_response=raw_response,
|
||||
url=url,
|
||||
psp=headers.get('pspReference'),
|
||||
headers=headers,
|
||||
error_code=response['errorCode'])
|
||||
except KeyError:
|
||||
erstr = 'KeyError: errorCode'
|
||||
raise AdyenAPICommunicationError(erstr)
|
||||
else:
|
||||
try:
|
||||
response = json_lib.loads(raw_response)
|
||||
psp = headers.get('pspReference', response.get('pspReference'))
|
||||
return AdyenResult(message=response, status_code=status_code,
|
||||
psp=psp, raw_request=raw_request,
|
||||
raw_response=raw_response)
|
||||
except ValueError:
|
||||
# Couldn't parse json so try to pull error from html.
|
||||
|
||||
error = self._error_from_hpp(raw_response)
|
||||
|
||||
message = request_dict
|
||||
|
||||
reference = message.get("reference",
|
||||
message.get("merchantReference"))
|
||||
|
||||
errorstring = """Unable to retrieve payment "
|
||||
list. Received the error: {}. Please verify your request "
|
||||
and try again. If the issue persists, please reach out to "
|
||||
support@adyen.com including the "
|
||||
merchantReference: {}""".format(error, reference),
|
||||
|
||||
raise AdyenInvalidRequestError(errorstring)
|
||||
|
||||
def _handle_http_error(self, url, response_obj, status_code, psp_ref,
|
||||
raw_request, raw_response, headers, message):
|
||||
"""This function handles the non 200 responses from Adyen, raising an
|
||||
error that should provide more information.
|
||||
|
||||
Args:
|
||||
url (str): url of the request
|
||||
response_obj (dict): Dict containing the parsed JSON response from
|
||||
Adyen
|
||||
status_code (int): HTTP status code of the request
|
||||
psp_ref (str): Psp reference of the request attempt
|
||||
raw_request (str): The raw request placed to Adyen
|
||||
raw_response (str): The raw response(body) returned by Adyen
|
||||
headers(dict): headers of the response
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
if status_code == 404:
|
||||
if url == self.merchant_specific_url:
|
||||
erstr = "Received a 404 for url:'{}'. Please ensure that" \
|
||||
" the custom merchant specific url is correct" \
|
||||
.format(url)
|
||||
raise AdyenAPICommunicationError(erstr,
|
||||
error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
else:
|
||||
erstr = "Unexpected error while communicating with Adyen." \
|
||||
" Please reach out to support@adyen.com" \
|
||||
" if the problem persists"
|
||||
raise AdyenAPICommunicationError(erstr,
|
||||
raw_request=raw_request,
|
||||
raw_response=raw_response,
|
||||
url=url,
|
||||
psp=psp_ref,
|
||||
headers=headers,
|
||||
error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
elif status_code == 400:
|
||||
erstr = "Received validation error with errorCode: %s," \
|
||||
" message: %s, HTTP Code: %s. Please verify" \
|
||||
" the values provided. Please reach out" \
|
||||
" to support@adyen.com if the problem persists," \
|
||||
" providing the PSP reference: %s" % (
|
||||
response_obj["errorCode"], response_obj["message"],
|
||||
status_code, psp_ref)
|
||||
|
||||
raise AdyenAPIValidationError(erstr, error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
elif status_code == 401:
|
||||
erstr = "Unable to authenticate with Adyen's Servers." \
|
||||
" Please verify the credentials set with the Adyen base" \
|
||||
" class. Please reach out to your Adyen Admin" \
|
||||
" if the problem persists"
|
||||
raise AdyenAPIAuthenticationError(erstr,
|
||||
error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
elif status_code == 403:
|
||||
|
||||
if response_obj.get("message") == "Invalid Merchant Account":
|
||||
erstr = ("You provided the merchant account:'%s' that"
|
||||
" doesn't exist or you don't have access to it.\n"
|
||||
"Please verify the merchant account provided. \n"
|
||||
"Reach out to support@adyen.com"
|
||||
" if the issue persists") \
|
||||
% raw_request['merchantAccount']
|
||||
raise AdyenAPIInvalidPermission(erstr,
|
||||
error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
|
||||
erstr = "Unable to perform the requested action. message: %s." \
|
||||
" If you think your webservice user: %s might not have" \
|
||||
" the necessary permissions to perform this request." \
|
||||
" Please reach out to support@adyen.com, providing" \
|
||||
" the PSP reference: %s" % (
|
||||
response_obj["message"], self.username, psp_ref)
|
||||
raise AdyenAPIInvalidPermission(erstr, error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
elif status_code == 422:
|
||||
if response_obj.get("message") == "Invalid amount specified":
|
||||
raise AdyenAPIInvalidAmount(
|
||||
"Invalid amount specified"
|
||||
"Amount may be improperly formatted, too small or too big."
|
||||
"If the issue persists, contact support@adyen.com",
|
||||
error_code=response_obj.get("errorCode"))
|
||||
|
||||
elif status_code == 500:
|
||||
if response_obj.get("errorType") == "validation":
|
||||
err_args = (response_obj.get("errorCode"),
|
||||
response_obj.get("message"),
|
||||
status_code)
|
||||
erstr = "Received validation error with errorCode: %s," \
|
||||
" message: %s, HTTP Code: %s. Please verify" \
|
||||
" the values provided." % err_args
|
||||
raise AdyenAPIValidationError(erstr,
|
||||
error_code=response_obj.get(
|
||||
"errorCode"))
|
||||
|
||||
if response_obj.get("message") == "Failed to serialize node " \
|
||||
"Failed to parse [123.34]" \
|
||||
" as a Long":
|
||||
raise AdyenAPIInvalidFormat(
|
||||
"The payment amount must be set in cents,"
|
||||
" and can not contain commas or points.",
|
||||
error_code=response_obj.get("errorCode")
|
||||
)
|
||||
else:
|
||||
raise AdyenAPICommunicationError(
|
||||
"Unexpected error while communicating with Adyen. Received the"
|
||||
" response data:'{}', HTTP Code:'{}'. Please reach out to "
|
||||
"support@adyen.com if the problem persists"
|
||||
" with the psp:{}".format(raw_response, status_code, psp_ref),
|
||||
status_code=status_code,
|
||||
raw_request=raw_request,
|
||||
raw_response=raw_response,
|
||||
url=url,
|
||||
psp=psp_ref,
|
||||
headers=headers, error_code=response_obj.get("errorCode"))
|
||||
|
||||
@staticmethod
|
||||
def _error_from_hpp(html):
|
||||
# Must be updated when Adyen response is changed:
|
||||
match_obj = re.search(r'>Error:\s*(.*?)<br', html)
|
||||
if match_obj:
|
||||
return match_obj.group(1)
|
||||
@@ -1,71 +0,0 @@
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
|
||||
class AdyenError(Exception):
|
||||
def __init__(self,
|
||||
message,
|
||||
raw_request="",
|
||||
raw_response="",
|
||||
url="",
|
||||
psp="",
|
||||
headers="",
|
||||
status_code="",
|
||||
error_code=""):
|
||||
self.message = message
|
||||
self.raw_request = raw_request
|
||||
self.raw_response = raw_response
|
||||
self.url = url
|
||||
self.psp = psp
|
||||
self.headers = headers
|
||||
self.status_code = status_code
|
||||
self.error_code = error_code
|
||||
|
||||
def __str__(self):
|
||||
return repr("{}:{}".format(self.__class__.__name__, self.message))
|
||||
|
||||
def debug(self):
|
||||
return ("class: {}\nmessage: {}\nHTTP status_code:{}\nurl: {}"
|
||||
"request: {}\nresponse: {}\nheaders: {}"
|
||||
.format(self.__class__.__name__, self.message,
|
||||
self.status_code, self.url, self.raw_request,
|
||||
self.raw_response, self.headers))
|
||||
|
||||
|
||||
class AdyenInvalidRequestError(AdyenError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPIResponseError(AdyenError):
|
||||
def __init__(self,
|
||||
message,
|
||||
*args,
|
||||
**kwargs):
|
||||
super(AdyenAPIResponseError, self).__init__(message, *args, **kwargs)
|
||||
|
||||
|
||||
class AdyenAPIAuthenticationError(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPIInvalidPermission(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPICommunicationError(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPIValidationError(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPIInvalidAmount(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenAPIInvalidFormat(AdyenAPIResponseError):
|
||||
pass
|
||||
|
||||
|
||||
class AdyenEndpointInvalidFormat(AdyenError):
|
||||
pass
|
||||
@@ -1,351 +0,0 @@
|
||||
#!/bin/python
|
||||
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
import sys
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
requests = None
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
except ImportError:
|
||||
pycurl = None
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import Request, urlopen
|
||||
from urllib.error import HTTPError
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from urllib import urlencode
|
||||
from urllib2 import Request, urlopen, HTTPError
|
||||
|
||||
try:
|
||||
# Python 2
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
# Python 3
|
||||
from io import BytesIO
|
||||
|
||||
import json as json_lib
|
||||
import base64
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
def __init__(
|
||||
self,
|
||||
user_agent_suffix,
|
||||
lib_version,
|
||||
force_request=None,
|
||||
timeout=None,
|
||||
):
|
||||
# Check if requests already available, default to urllib
|
||||
self.user_agent = user_agent_suffix + lib_version
|
||||
if not force_request:
|
||||
if requests:
|
||||
self.request = self._requests_post
|
||||
elif pycurl:
|
||||
self.request = self._pycurl_post
|
||||
else:
|
||||
self.request = self._urllib_post
|
||||
else:
|
||||
if force_request == 'requests':
|
||||
self.request = self._requests_post
|
||||
elif force_request == 'pycurl':
|
||||
self.request = self._pycurl_post
|
||||
else:
|
||||
self.request = self._urllib_post
|
||||
|
||||
self.timeout = timeout
|
||||
|
||||
def _pycurl_post(
|
||||
self,
|
||||
url,
|
||||
json=None,
|
||||
data=None,
|
||||
username="",
|
||||
password="",
|
||||
xapikey="",
|
||||
headers=None
|
||||
):
|
||||
"""This function will POST to the url endpoint using pycurl. returning
|
||||
an AdyenResult object on 200 HTTP response. Either json or data has to
|
||||
be provided. If username and password are provided, basic auth will be
|
||||
used.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): url to send the POST
|
||||
json (dict, optional): Dict of the JSON to POST
|
||||
data (dict, optional): Dict, presumed flat structure
|
||||
of key/value of request to place
|
||||
username (str, optional): Username for basic auth. Must be included
|
||||
as part of password.
|
||||
password (str, optional): Password for basic auth. Must be included
|
||||
as part of username.
|
||||
xapikey (str, optional): Adyen API key. Will be used for auth
|
||||
if username and password are absent.
|
||||
headers (dict, optional): Key/Value pairs of headers to include
|
||||
timeout (int, optional): Default 30. Timeout for the request.
|
||||
|
||||
Returns:
|
||||
str: Raw response received
|
||||
str: Raw request placed
|
||||
int: HTTP status code, eg 200,404,401
|
||||
dict: Key/Value pairs of the headers received.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
response_headers = {}
|
||||
|
||||
curl = pycurl.Curl()
|
||||
curl.setopt(curl.URL, url)
|
||||
if sys.version_info[0] >= 3:
|
||||
stringbuffer = BytesIO()
|
||||
else:
|
||||
stringbuffer = StringIO()
|
||||
|
||||
curl.setopt(curl.WRITEDATA, stringbuffer)
|
||||
|
||||
# Add User-Agent header to request so that the
|
||||
# request can be identified as coming from the Adyen Python library.
|
||||
headers['User-Agent'] = self.user_agent
|
||||
|
||||
if username and password:
|
||||
curl.setopt(curl.USERPWD, '%s:%s' % (username, password))
|
||||
elif xapikey:
|
||||
headers["X-API-KEY"] = xapikey
|
||||
|
||||
# Convert the header dict to formatted array as pycurl needs.
|
||||
if sys.version_info[0] >= 3:
|
||||
header_list = ["%s:%s" % (k, v) for k, v in headers.items()]
|
||||
else:
|
||||
header_list = ["%s:%s" % (k, v) for k, v in headers.iteritems()]
|
||||
# Ensure proper content-type when adding headers
|
||||
if json:
|
||||
header_list.append("Content-Type:application/json")
|
||||
|
||||
curl.setopt(pycurl.HTTPHEADER, header_list)
|
||||
|
||||
# Return regular dict instead of JSON encoded dict for request:
|
||||
raw_store = json
|
||||
|
||||
# Set the request body.
|
||||
raw_request = json_lib.dumps(json) if json else urlencode(data)
|
||||
curl.setopt(curl.POSTFIELDS, raw_request)
|
||||
|
||||
curl.setopt(curl.TIMEOUT, self.timeout)
|
||||
curl.perform()
|
||||
|
||||
# Grab the response content
|
||||
result = stringbuffer.getvalue()
|
||||
status_code = curl.getinfo(curl.RESPONSE_CODE)
|
||||
|
||||
curl.close()
|
||||
|
||||
# Return regular dict instead of JSON encoded dict for request:
|
||||
raw_request = raw_store
|
||||
|
||||
return result, raw_request, status_code, response_headers
|
||||
|
||||
def _requests_post(
|
||||
self,
|
||||
url,
|
||||
json=None,
|
||||
data=None,
|
||||
username="",
|
||||
password="",
|
||||
xapikey="",
|
||||
headers=None
|
||||
):
|
||||
"""This function will POST to the url endpoint using requests.
|
||||
Returning an AdyenResult object on 200 HTTP response.
|
||||
Either json or data has to be provided.
|
||||
If username and password are provided, basic auth will be used.
|
||||
|
||||
|
||||
Args:
|
||||
url (str): url to send the POST
|
||||
json (dict, optional): Dict of the JSON to POST
|
||||
data (dict, optional): Dict, presumed flat structure of key/value
|
||||
of request to place
|
||||
username (str, optionl): Username for basic auth. Must be included
|
||||
as part of password.
|
||||
password (str, optional): Password for basic auth. Must be included
|
||||
as part of username.
|
||||
xapikey (str, optional): Adyen API key. Will be used for auth
|
||||
if username and password are absent.
|
||||
headers (dict, optional): Key/Value pairs of headers to include
|
||||
timeout (int, optional): Default 30. Timeout for the request.
|
||||
|
||||
Returns:
|
||||
str: Raw response received
|
||||
str: Raw request placed
|
||||
int: HTTP status code, eg 200,404,401
|
||||
dict: Key/Value pairs of the headers received.
|
||||
"""
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
# Adding basic auth if username and password provided.
|
||||
auth = None
|
||||
if username and password:
|
||||
auth = requests.auth.HTTPBasicAuth(username, password)
|
||||
elif xapikey:
|
||||
headers['x-api-key'] = xapikey
|
||||
|
||||
# Add User-Agent header to request so that the request
|
||||
# can be identified as coming from the Adyen Python library.
|
||||
headers['User-Agent'] = self.user_agent
|
||||
|
||||
request = requests.post(
|
||||
url=url,
|
||||
auth=auth,
|
||||
data=data,
|
||||
json=json,
|
||||
headers=headers,
|
||||
timeout=self.timeout
|
||||
)
|
||||
|
||||
# Ensure either json or data is returned for raw request
|
||||
# Updated: Only return regular dict,
|
||||
# don't switch out formats if this is not important.
|
||||
message = json
|
||||
|
||||
return request.text, message, request.status_code, request.headers
|
||||
|
||||
def _urllib_post(
|
||||
self,
|
||||
url,
|
||||
json=None,
|
||||
data=None,
|
||||
username="",
|
||||
password="",
|
||||
xapikey="",
|
||||
headers=None,
|
||||
):
|
||||
|
||||
"""This function will POST to the url endpoint using urllib2. returning
|
||||
an AdyenResult object on 200 HTTP responce. Either json or data has to
|
||||
be provided. If username and password are provided, basic auth will be
|
||||
used.
|
||||
|
||||
Args:
|
||||
url (str): url to send the POST
|
||||
json (dict, optional): Dict of the JSON to POST
|
||||
data (dict, optional): Dict, presumed flat structure of
|
||||
key/value of request to place as
|
||||
www-form
|
||||
username (str, optional): Username for basic auth. Must be
|
||||
uncluded as part of password.
|
||||
password (str, optional): Password for basic auth. Must be
|
||||
included as part of username.
|
||||
xapikey (str, optional): Adyen API key. Will be used for auth
|
||||
if username and password are absent.
|
||||
headers (dict, optional): Key/Value pairs of headers to include
|
||||
|
||||
Returns:
|
||||
str: Raw response received
|
||||
str: Raw request placed
|
||||
int: HTTP status code, eg 200,404,401
|
||||
dict: Key/Value pairs of the headers received.
|
||||
"""
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
# Store regular dict to return later:
|
||||
raw_store = json
|
||||
|
||||
raw_request = json_lib.dumps(json) if json else urlencode(data)
|
||||
url_request = Request(url, data=raw_request.encode('utf8'))
|
||||
if json:
|
||||
url_request.add_header('Content-Type', 'application/json')
|
||||
elif not data:
|
||||
raise ValueError("Please provide either a json or a data field.")
|
||||
|
||||
# Add User-Agent header to request so that the
|
||||
# request can be identified as coming from the Adyen Python library.
|
||||
headers['User-Agent'] = self.user_agent
|
||||
|
||||
# Set regular dict to return as raw_request:
|
||||
raw_request = raw_store
|
||||
|
||||
# Adding basic auth is username and password provided.
|
||||
if username and password:
|
||||
if sys.version_info[0] >= 3:
|
||||
basic_authstring = base64.encodebytes(('%s:%s' %
|
||||
(username, password))
|
||||
.encode()).decode(). \
|
||||
replace('\n', '')
|
||||
else:
|
||||
basic_authstring = base64.encodestring('%s:%s' % (username,
|
||||
password)). \
|
||||
replace('\n', '')
|
||||
url_request.add_header("Authorization",
|
||||
"Basic %s" % basic_authstring)
|
||||
elif xapikey:
|
||||
headers["X-API-KEY"] = xapikey
|
||||
|
||||
# Adding the headers to the request.
|
||||
for key, value in headers.items():
|
||||
url_request.add_header(key, str(value))
|
||||
|
||||
# URLlib raises all non 200 responses as en error.
|
||||
try:
|
||||
response = urlopen(url_request, timeout=self.timeout)
|
||||
except HTTPError as e:
|
||||
raw_response = e.read()
|
||||
|
||||
return raw_response, raw_request, e.getcode(), e.headers
|
||||
else:
|
||||
raw_response = response.read()
|
||||
response.close()
|
||||
|
||||
# The dict(response.info()) is the headers of the response
|
||||
# Raw response, raw request, status code and headers returned
|
||||
return (raw_response, raw_request,
|
||||
response.getcode(), dict(response.info()))
|
||||
|
||||
def request(
|
||||
self,
|
||||
url,
|
||||
json="",
|
||||
data="",
|
||||
username="",
|
||||
password="",
|
||||
headers=None,
|
||||
):
|
||||
"""This is overridden on module initialization. This function will make
|
||||
an HTTP POST to a given url. Either json/data will be what is posted to
|
||||
the end point. he HTTP request needs to be basicAuth when username and
|
||||
password are provided. a headers dict maybe provided,
|
||||
whatever the values are should be applied.
|
||||
|
||||
Args:
|
||||
url (str): url to send the POST
|
||||
json (dict, optional): Dict of the JSON to POST
|
||||
data (dict, optional): Dict, presumed flat structure of
|
||||
key/value of request to place as
|
||||
www-form
|
||||
username (str, optional): Username for basic auth. Must be
|
||||
included as part of password.
|
||||
password (str, optional): Password for basic auth. Must be
|
||||
included as part of username.
|
||||
xapikey (str, optional): Adyen API key. Will be used for auth
|
||||
if username and password are absent.
|
||||
headers (dict, optional): Key/Value pairs of headers to include
|
||||
Returns:
|
||||
str: Raw request placed
|
||||
str: Raw response received
|
||||
int: HTTP status code, eg 200,404,401
|
||||
dict: Key/Value pairs of the headers received.
|
||||
"""
|
||||
raise NotImplementedError('request of HTTPClient should have been '
|
||||
'overridden on initialization. '
|
||||
'Otherwise, can be overridden to '
|
||||
'supply your own post method')
|
||||
@@ -1,369 +0,0 @@
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
from Adyen import AdyenClient
|
||||
|
||||
|
||||
class AdyenBase(object):
|
||||
def __setattr__(self, attr, value):
|
||||
client_attr = ["username", "password", "platform"]
|
||||
if attr in client_attr:
|
||||
if value:
|
||||
self.client[attr] = value
|
||||
else:
|
||||
super(AdyenBase, self).__setattr__(attr, value)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
client_attr = ["username", "password", "platform"]
|
||||
if attr in client_attr:
|
||||
return self.client[attr]
|
||||
|
||||
|
||||
class AdyenServiceBase(AdyenBase):
|
||||
def __init__(self, client=None):
|
||||
if client:
|
||||
self.client = client
|
||||
else:
|
||||
self.client = AdyenClient()
|
||||
|
||||
|
||||
class AdyenRecurring(AdyenServiceBase):
|
||||
"""This represents the Adyen API Recurring Service.
|
||||
|
||||
API calls currently implemented: listRecurringDetails and disable. Please
|
||||
refer to the Recurring Manual for specifics around the API.
|
||||
https://docs.adyen.com/online-payments/tokenization
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenRecurring, self).__init__(client=client)
|
||||
self.service = "Recurring"
|
||||
|
||||
def list_recurring_details(self, request, **kwargs):
|
||||
|
||||
action = "listRecurringDetails"
|
||||
|
||||
return self.client.call_api(request, self.service,
|
||||
action, **kwargs)
|
||||
|
||||
def disable(self, request, **kwargs):
|
||||
|
||||
action = "disable"
|
||||
|
||||
if 'recurringDetailReference' not in request:
|
||||
raise ValueError("Include a 'recurringDetailReference'"
|
||||
" to disable a specific recurring contract.")
|
||||
else:
|
||||
return self.client.call_api(request, self.service,
|
||||
action, **kwargs)
|
||||
|
||||
|
||||
class AdyenHPP(AdyenServiceBase):
|
||||
"""This represents the Adyen HPP Service.
|
||||
|
||||
This currently only implements the directory_lookup request which will
|
||||
return the list of payment methods available for given shopper. Please
|
||||
refer to the HPP manual and the directory lookup section for the specifics.
|
||||
https://docs.adyen.com/online-payments/classic-integrations/hosted-payment-pages/directory-lookup
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenHPP, self).__init__(client=client)
|
||||
|
||||
def directory_lookup(self, request, **kwargs):
|
||||
|
||||
action = "directory"
|
||||
|
||||
try:
|
||||
datetime.datetime.strptime(request['sessionValidity'],
|
||||
'%Y-%m-%dT%H:%M:%SZ')
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
"Incorrect date format, should be Y-m-dH:M:SZ,"
|
||||
" use datetime.strftime('%Y-%m-%dT%H:%M:%SZ')"
|
||||
" to format a datetime object.")
|
||||
|
||||
return self.client.call_hpp(request, action)
|
||||
|
||||
def hpp_payment(self, request, skip_details=None, **kwargs):
|
||||
|
||||
if skip_details:
|
||||
action = "skipDetails"
|
||||
else:
|
||||
action = "select"
|
||||
|
||||
if action == "skipDetails":
|
||||
if "issuerId" not in request:
|
||||
request['issuerId'] = ""
|
||||
if type(request['sessionValidity']) is not str:
|
||||
raise TypeError(
|
||||
'HPP: sessionValidity must be type of str,'
|
||||
' use datetime.strftime to convert and format.')
|
||||
if all(k in request for k in ("shopperEmail", "shopperReference",
|
||||
"recurringContract")):
|
||||
recc = request['recurringContract']
|
||||
if recc != 'ONECLICK' and recc != 'RECURRING' \
|
||||
and recc != 'ONECLICK,RECURRING':
|
||||
raise ValueError(
|
||||
"HPP: recurringContract must be on of the following"
|
||||
" values: 'ONECLICK', 'RECURRING',"
|
||||
" 'ONECLICK,RECURRING'")
|
||||
|
||||
result = self.client.hpp_payment(request, action)
|
||||
return result
|
||||
|
||||
|
||||
class AdyenPayment(AdyenServiceBase):
|
||||
"""This represents the Adyen API Payment Service.
|
||||
|
||||
API calls currently implemented:
|
||||
authorise
|
||||
authorise3d
|
||||
adjustAuthorisation
|
||||
cancel
|
||||
capture
|
||||
refund
|
||||
cancelOrRefund
|
||||
Please refer to our API Explorer for specifics around these APIs.
|
||||
https://docs.adyen.com/api-explorer/
|
||||
|
||||
The AdyenPayment class, is accessible as adyen.payment.method(args)
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenPayment, self).__init__(client=client)
|
||||
self.service = "Payment"
|
||||
|
||||
def authorise(self, request, idempotency_key=None, **kwargs):
|
||||
|
||||
action = "authorise"
|
||||
|
||||
if 'shopperEmail' in request:
|
||||
if request['shopperEmail'] == '':
|
||||
raise ValueError(
|
||||
'shopperEmail must contain the shopper email'
|
||||
' when authorising recurring contracts.')
|
||||
if 'shopperReference' in request:
|
||||
if request['shopperReference'] == '':
|
||||
raise ValueError(
|
||||
'shopperReference must contain the shopper'
|
||||
' name when authorising recurring contracts.')
|
||||
|
||||
return self.client.call_api(request, self.service,
|
||||
action, idempotency_key, **kwargs)
|
||||
|
||||
def authorise3d(self, request, idempotency_key=None, **kwargs):
|
||||
action = "authorise3d"
|
||||
|
||||
return self.client.call_api(request, self.service,
|
||||
action, idempotency_key, **kwargs)
|
||||
|
||||
def adjustAuthorisation(self, request, **kwargs):
|
||||
action = "adjustAuthorisation"
|
||||
|
||||
return self.client.call_api(request, self.service,
|
||||
action, **kwargs)
|
||||
|
||||
def cancel(self, request, idempotency_key=None, **kwargs):
|
||||
action = "cancel"
|
||||
|
||||
return self.client.call_api(request, self.service,
|
||||
action, idempotency_key, **kwargs)
|
||||
|
||||
def capture(self, request, idempotency_key=None, **kwargs):
|
||||
|
||||
action = "capture"
|
||||
|
||||
if request['modificationAmount']["value"] == "" or \
|
||||
request['modificationAmount']['value'] == "0":
|
||||
raise ValueError(
|
||||
"Set the 'modificationAmount' to the original transaction"
|
||||
" amount, or less for a partial capture. "
|
||||
"modificationAmount should be an object with the following"
|
||||
" keys: {'currency':,'value':}")
|
||||
if request['originalReference'] == "":
|
||||
raise ValueError("Set the 'originalReference' to the psp "
|
||||
"reference of the transaction to be modified")
|
||||
|
||||
response = self.client.call_api(request, self.service,
|
||||
action, idempotency_key, **kwargs)
|
||||
return response
|
||||
|
||||
def refund(self, request, idempotency_key=None, **kwargs):
|
||||
|
||||
action = "refund"
|
||||
|
||||
if request['modificationAmount']['value'] == "" or \
|
||||
request['modificationAmount']['value'] == "0":
|
||||
raise ValueError(
|
||||
"To refund this payment, provide the original value. "
|
||||
"Set the value to less than the original amount, "
|
||||
"to partially refund this payment.")
|
||||
else:
|
||||
return self.client.call_api(request, self.service,
|
||||
action, idempotency_key, **kwargs)
|
||||
|
||||
def cancel_or_refund(self, request, idempotency_key=None, **kwargs):
|
||||
action = "cancelOrRefund"
|
||||
|
||||
return self.client.call_api(
|
||||
request, self.service, action, idempotency_key, **kwargs
|
||||
)
|
||||
|
||||
|
||||
class AdyenThirdPartyPayout(AdyenServiceBase):
|
||||
"""This represents the Adyen Payouts Service.
|
||||
https://docs.adyen.com/api-explorer/#/Payout/overview
|
||||
|
||||
The AdyenThirdPartyPayout class is accessible as adyen.payout.method(args)
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenThirdPartyPayout, self).__init__(client=client)
|
||||
self.service = "Payout"
|
||||
|
||||
def confirm(self, request=None, **kwargs):
|
||||
action = "confirmThirdParty"
|
||||
return self.client.call_api(
|
||||
request, self.service, action, **kwargs
|
||||
)
|
||||
|
||||
def decline(self, request=None, **kwargs):
|
||||
action = "declineThirdParty"
|
||||
return self.client.call_api(
|
||||
request, self.service, action, **kwargs
|
||||
)
|
||||
|
||||
def store_detail(self, request=None, **kwargs):
|
||||
action = "storeDetail"
|
||||
return self.client.call_api(
|
||||
request, self.service, action, **kwargs
|
||||
)
|
||||
|
||||
def submit(self, request=None, **kwargs):
|
||||
action = "submitThirdParty"
|
||||
return self.client.call_api(
|
||||
request, self.service, action, **kwargs
|
||||
)
|
||||
|
||||
def store_detail_and_submit(self, request=None, **kwargs):
|
||||
action = "storeDetailAndSubmitThirdParty"
|
||||
return self.client.call_api(
|
||||
request, self.service, action, **kwargs
|
||||
)
|
||||
|
||||
|
||||
class AdyenCheckoutApi(AdyenServiceBase):
|
||||
"""This represents the Adyen Checkout API .
|
||||
|
||||
API calls currently implemented:
|
||||
paymentMethods
|
||||
payments
|
||||
payments/details
|
||||
originKeys
|
||||
|
||||
Please refer to the checkout documentation for specifics around the API.
|
||||
https://docs.adyen.com/online-payments
|
||||
|
||||
The AdyenPayment class, is accessible as adyen.payment.method(args)
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenCheckoutApi, self).__init__(client=client)
|
||||
self.service = "Checkout"
|
||||
|
||||
def payment_methods(self, request, **kwargs):
|
||||
action = "paymentMethods"
|
||||
if 'merchantAccount' in request:
|
||||
if request['merchantAccount'] == '':
|
||||
raise ValueError(
|
||||
'merchantAccount must contain the merchant account'
|
||||
' when retrieving payment methods.')
|
||||
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
def payments(self, request, idempotency_key=None, **kwargs):
|
||||
action = "payments"
|
||||
return self.client.call_checkout_api(request, action, idempotency_key,
|
||||
**kwargs)
|
||||
|
||||
def payments_details(self, request=None, idempotency_key=None, **kwargs):
|
||||
action = "paymentsDetails"
|
||||
return self.client.call_checkout_api(request, action, idempotency_key,
|
||||
**kwargs)
|
||||
|
||||
def payment_session(self, request=None, **kwargs):
|
||||
action = "paymentSession"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
def payment_result(self, request=None, **kwargs):
|
||||
action = "paymentsResult"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
def origin_keys(self, request=None, **kwargs):
|
||||
action = "originKeys"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
def sessions(self, request=None, **kwargs):
|
||||
action = "sessions"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
# Orders endpoints
|
||||
|
||||
# /paymentMethods/balance
|
||||
def payment_methods_balance(self, request, **kwargs):
|
||||
action = "paymentMethodsBalance"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
# /orders
|
||||
def orders(self, request, **kwargs):
|
||||
action = "orders"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
# /orders/cancel
|
||||
def orders_cancel(self, request, **kwargs):
|
||||
action = "ordersCancel"
|
||||
return self.client.call_checkout_api(request, action, **kwargs)
|
||||
|
||||
|
||||
class AdyenBinLookup(AdyenServiceBase):
|
||||
"""This represents the Adyen API Bin Lookup service.
|
||||
|
||||
API call currently implemented: getCostEstimate.
|
||||
Please refer to the Bin Lookup Manual for specifics around the API.
|
||||
https://docs.adyen.com/api-explorer/#/BinLookup/
|
||||
|
||||
Args:
|
||||
client (AdyenAPIClient, optional): An API client for the service to
|
||||
use. If not provided, a new API client will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None):
|
||||
super(AdyenBinLookup, self).__init__(client=client)
|
||||
self.service = "BinLookup"
|
||||
|
||||
def get_cost_estimate(self, request="", **kwargs):
|
||||
|
||||
action = "getCostEstimate"
|
||||
|
||||
return self.client.call_api(request, self.service, action, **kwargs)
|
||||
@@ -1,16 +0,0 @@
|
||||
# Those constants are used from the library only
|
||||
BASE_PAL_URL = "https://pal-{}.adyen.com/pal/servlet"
|
||||
PAL_LIVE_ENDPOINT_URL_TEMPLATE = "https://{}-pal-live" \
|
||||
".adyenpayments.com/pal/servlet"
|
||||
BASE_HPP_URL = "https://{}.adyen.com/hpp"
|
||||
ENDPOINT_CHECKOUT_TEST = "https://checkout-test.adyen.com"
|
||||
ENDPOINT_CHECKOUT_LIVE_SUFFIX = "https://{}-checkout-live" \
|
||||
".adyenpayments.com/checkout"
|
||||
API_BIN_LOOKUP_VERSION = "v50"
|
||||
API_CHECKOUT_VERSION = "v68"
|
||||
API_CHECKOUT_UTILITY_VERSION = "v1"
|
||||
API_RECURRING_VERSION = "v49"
|
||||
API_PAYMENT_VERSION = "v64"
|
||||
API_PAYOUT_VERSION = "v64"
|
||||
LIB_VERSION = "6.0.0"
|
||||
LIB_NAME = "adyen-python-api-library"
|
||||
@@ -1,96 +0,0 @@
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
from itertools import chain
|
||||
from collections import OrderedDict
|
||||
import base64
|
||||
import hmac
|
||||
import hashlib
|
||||
import binascii
|
||||
|
||||
|
||||
def generate_hpp_sig(dict_object, hmac_key):
|
||||
if 'issuerId' in dict_object:
|
||||
if dict_object['issuerId'] == "":
|
||||
del dict_object['issuerId']
|
||||
|
||||
if not isinstance(dict_object, dict):
|
||||
raise ValueError("Must Provide dictionary object")
|
||||
|
||||
def escape_val(val):
|
||||
if isinstance(val, int):
|
||||
return val
|
||||
return val.replace('\\', '\\\\').replace(':', '\\:')
|
||||
|
||||
hmac_key = binascii.a2b_hex(hmac_key)
|
||||
|
||||
ordered_request = OrderedDict(sorted(dict_object.items(),
|
||||
key=lambda t: t[0]))
|
||||
|
||||
signing_string = ':'.join(
|
||||
map(escape_val, chain(map(str, ordered_request.keys()),
|
||||
map(str, ordered_request.values()))))
|
||||
|
||||
hm = hmac.new(hmac_key, signing_string.encode('utf-8'), hashlib.sha256)
|
||||
return base64.b64encode(hm.digest())
|
||||
|
||||
|
||||
def is_valid_hmac(dict_object, hmac_key):
|
||||
if 'additionalData' in dict_object:
|
||||
if dict_object['additionalData']['hmacSignature'] == "":
|
||||
raise ValueError("Must Provide hmacSignature in additionalData")
|
||||
else:
|
||||
expected_sign = dict_object['additionalData']['hmacSignature']
|
||||
del dict_object['additionalData']
|
||||
merchant_sign = generate_hpp_sig(dict_object, hmac_key)
|
||||
merchant_sign_str = merchant_sign.decode("utf-8")
|
||||
return merchant_sign_str == expected_sign
|
||||
|
||||
|
||||
def generate_notification_sig(dict_object, hmac_key):
|
||||
if 'issuerId' in dict_object:
|
||||
if dict_object['issuerId'] == "":
|
||||
del dict_object['issuerId']
|
||||
|
||||
if not isinstance(dict_object, dict):
|
||||
raise ValueError("Must Provide dictionary object")
|
||||
|
||||
def escape_val(val):
|
||||
if isinstance(val, int):
|
||||
return val
|
||||
return val.replace('\\', '\\\\')
|
||||
|
||||
hmac_key = binascii.a2b_hex(hmac_key)
|
||||
|
||||
request_dict = dict(dict_object)
|
||||
request_dict['value'] = request_dict['amount']['value']
|
||||
request_dict['currency'] = request_dict['amount']['currency']
|
||||
|
||||
element_orders = [
|
||||
'pspReference',
|
||||
'originalReference',
|
||||
'merchantAccountCode',
|
||||
'merchantReference',
|
||||
'value',
|
||||
'currency',
|
||||
'eventCode',
|
||||
'success',
|
||||
]
|
||||
|
||||
signing_string = ':'.join(
|
||||
map(escape_val, map(str, (
|
||||
request_dict.get(element, '') for element in element_orders))))
|
||||
|
||||
hm = hmac.new(hmac_key, signing_string.encode('utf-8'), hashlib.sha256)
|
||||
return base64.b64encode(hm.digest())
|
||||
|
||||
|
||||
def is_valid_hmac_notification(dict_object, hmac_key):
|
||||
if 'additionalData' in dict_object:
|
||||
if dict_object['additionalData']['hmacSignature'] == "":
|
||||
raise ValueError("Must Provide hmacSignature in additionalData")
|
||||
else:
|
||||
expected_sign = dict_object['additionalData']['hmacSignature']
|
||||
del dict_object['additionalData']
|
||||
merchant_sign = generate_notification_sig(dict_object, hmac_key)
|
||||
merchant_sign_str = merchant_sign.decode("utf-8")
|
||||
return merchant_sign_str == expected_sign
|
||||
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,28 +0,0 @@
|
||||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,125 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Flask
|
||||
Version: 2.0.3
|
||||
Summary: A simple framework for building complex web applications.
|
||||
Home-page: https://palletsprojects.com/p/flask
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://flask.palletsprojects.com/
|
||||
Project-URL: Changes, https://flask.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/flask/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/
|
||||
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Flask
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.rst
|
||||
Requires-Dist: Werkzeug (>=2.0)
|
||||
Requires-Dist: Jinja2 (>=3.0)
|
||||
Requires-Dist: itsdangerous (>=2.0)
|
||||
Requires-Dist: click (>=7.1.2)
|
||||
Provides-Extra: async
|
||||
Requires-Dist: asgiref (>=3.2) ; extra == 'async'
|
||||
Provides-Extra: dotenv
|
||||
Requires-Dist: python-dotenv ; extra == 'dotenv'
|
||||
|
||||
Flask
|
||||
=====
|
||||
|
||||
Flask is a lightweight `WSGI`_ web application framework. It is designed
|
||||
to make getting started quick and easy, with the ability to scale up to
|
||||
complex applications. It began as a simple wrapper around `Werkzeug`_
|
||||
and `Jinja`_ and has become one of the most popular Python web
|
||||
application frameworks.
|
||||
|
||||
Flask offers suggestions, but doesn't enforce any dependencies or
|
||||
project layout. It is up to the developer to choose the tools and
|
||||
libraries they want to use. There are many extensions provided by the
|
||||
community that make adding new functionality easy.
|
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io/
|
||||
.. _Werkzeug: https://werkzeug.palletsprojects.com/
|
||||
.. _Jinja: https://jinja.palletsprojects.com/
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U Flask
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# save this as app.py
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/")
|
||||
def hello():
|
||||
return "Hello, World!"
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ flask run
|
||||
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
For guidance on setting up a development environment and how to make a
|
||||
contribution to Flask, see the `contributing guidelines`_.
|
||||
|
||||
.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Flask and the libraries
|
||||
it uses. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, `please
|
||||
donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://flask.palletsprojects.com/
|
||||
- Changes: https://flask.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/Flask/
|
||||
- Source Code: https://github.com/pallets/flask/
|
||||
- Issue Tracker: https://github.com/pallets/flask/issues/
|
||||
- Website: https://palletsprojects.com/p/flask/
|
||||
- Twitter: https://twitter.com/PalletsTeam
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
../../../bin/flask,sha256=sR4yhMrSywRbEyuOk0H-s-iJmN0WEkOx-nXnh3QFpg8,289
|
||||
Flask-2.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Flask-2.0.3.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
Flask-2.0.3.dist-info/METADATA,sha256=jK50YtxZfODLQP_GF1sNH6dOXRCI5bBLrAc7pWQwuXw,3839
|
||||
Flask-2.0.3.dist-info/RECORD,,
|
||||
Flask-2.0.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
Flask-2.0.3.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41
|
||||
Flask-2.0.3.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
|
||||
flask/__init__.py,sha256=ubQS5Xt6LMjPSwGO3Jksi5yx8AyuU0vT_VdHjt0j97A,2251
|
||||
flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
|
||||
flask/__pycache__/__init__.cpython-37.pyc,,
|
||||
flask/__pycache__/__main__.cpython-37.pyc,,
|
||||
flask/__pycache__/app.cpython-37.pyc,,
|
||||
flask/__pycache__/blueprints.cpython-37.pyc,,
|
||||
flask/__pycache__/cli.cpython-37.pyc,,
|
||||
flask/__pycache__/config.cpython-37.pyc,,
|
||||
flask/__pycache__/ctx.cpython-37.pyc,,
|
||||
flask/__pycache__/debughelpers.cpython-37.pyc,,
|
||||
flask/__pycache__/globals.cpython-37.pyc,,
|
||||
flask/__pycache__/helpers.cpython-37.pyc,,
|
||||
flask/__pycache__/logging.cpython-37.pyc,,
|
||||
flask/__pycache__/scaffold.cpython-37.pyc,,
|
||||
flask/__pycache__/sessions.cpython-37.pyc,,
|
||||
flask/__pycache__/signals.cpython-37.pyc,,
|
||||
flask/__pycache__/templating.cpython-37.pyc,,
|
||||
flask/__pycache__/testing.cpython-37.pyc,,
|
||||
flask/__pycache__/typing.cpython-37.pyc,,
|
||||
flask/__pycache__/views.cpython-37.pyc,,
|
||||
flask/__pycache__/wrappers.cpython-37.pyc,,
|
||||
flask/app.py,sha256=ectBbi9hGmVHAse5TNcFQZIDRkDAxYUAnLgfuKD0Xws,81975
|
||||
flask/blueprints.py,sha256=AkAVXZ_MMkjwjklzCAMdBNowTiM0wVQPynnUnXjTL2M,23781
|
||||
flask/cli.py,sha256=9v7FDIwWZ3QZsR6ka-qMYzMxSThfmQ4PEA4lkI38R6c,32287
|
||||
flask/config.py,sha256=70Uyjh1Jzb9MfTCT7NDhuZWAzyIEu-TIyk6-22MP3zQ,11285
|
||||
flask/ctx.py,sha256=Rmw5VOFQdbomLoCQPbU_0FbQkuB56CtpnQVU4yzXYB8,17589
|
||||
flask/debughelpers.py,sha256=W82-xrRmodjopBngI9roYH-q08EbQwN2HEGfDAi6SA0,6184
|
||||
flask/globals.py,sha256=cWd-R2hUH3VqPhnmQNww892tQS6Yjqg_wg8UvW1M7NM,1723
|
||||
flask/helpers.py,sha256=kstplLDtD0Isobilp87Lfmwq1tk2spnHjUf_O5-EhoE,30618
|
||||
flask/json/__init__.py,sha256=_YIqOsy8YOSyoLbplFtNcKvF5kwNKenmJ87Ub2Myc0k,12104
|
||||
flask/json/__pycache__/__init__.cpython-37.pyc,,
|
||||
flask/json/__pycache__/tag.cpython-37.pyc,,
|
||||
flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857
|
||||
flask/logging.py,sha256=1o_hirVGqdj7SBdETnhX7IAjklG89RXlrwz_2CjzQQE,2273
|
||||
flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
flask/scaffold.py,sha256=fM9mRy7QBh9fhJ0VTogVx900dDa5oxz8FOw6OK5F-TU,32796
|
||||
flask/sessions.py,sha256=46jK4JlcdeBiYbDWTZJn_6u8EqDV-ByRdhlKrbgFi5M,15714
|
||||
flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136
|
||||
flask/templating.py,sha256=l96VD39JQ0nue4Bcj7wZ4-FWWs-ppLxvgBCpwDQ4KAk,5626
|
||||
flask/testing.py,sha256=T3mr2PLQEkfxoftSTxmGfTtb_FSX3PgfGT8DUGNPWuk,10840
|
||||
flask/typing.py,sha256=L5JMltVjj8fovGS1hrMpb13IPfsFDESCCnpRN5CPT4U,1844
|
||||
flask/views.py,sha256=nhq31TRB5Z-z2mjFGZACaaB2Et5XPCmWhWxJxOvLWww,5948
|
||||
flask/wrappers.py,sha256=VndbHPRBSUUOejmd2Y3ydkoCVUtsS2OJIdJEVIkBVD8,5604
|
||||
@@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[console_scripts]
|
||||
flask = flask.cli:main
|
||||
@@ -1 +0,0 @@
|
||||
flask
|
||||
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,28 +0,0 @@
|
||||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,113 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Jinja2
|
||||
Version: 3.0.3
|
||||
Summary: A very fast and expressive template engine.
|
||||
Home-page: https://palletsprojects.com/p/jinja/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://jinja.palletsprojects.com/
|
||||
Project-URL: Changes, https://jinja.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/jinja/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/
|
||||
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.rst
|
||||
Requires-Dist: MarkupSafe (>=2.0)
|
||||
Provides-Extra: i18n
|
||||
Requires-Dist: Babel (>=2.7) ; extra == 'i18n'
|
||||
|
||||
Jinja
|
||||
=====
|
||||
|
||||
Jinja is a fast, expressive, extensible templating engine. Special
|
||||
placeholders in the template allow writing code similar to Python
|
||||
syntax. Then the template is passed data to render the final document.
|
||||
|
||||
It includes:
|
||||
|
||||
- Template inheritance and inclusion.
|
||||
- Define and import macros within templates.
|
||||
- HTML templates can use autoescaping to prevent XSS from untrusted
|
||||
user input.
|
||||
- A sandboxed environment can safely render untrusted templates.
|
||||
- AsyncIO support for generating templates and calling async
|
||||
functions.
|
||||
- I18N support with Babel.
|
||||
- Templates are compiled to optimized Python code just-in-time and
|
||||
cached, or can be compiled ahead-of-time.
|
||||
- Exceptions point to the correct line in templates to make debugging
|
||||
easier.
|
||||
- Extensible filters, tests, functions, and even syntax.
|
||||
|
||||
Jinja's philosophy is that while application logic belongs in Python if
|
||||
possible, it shouldn't make the template designer's job difficult by
|
||||
restricting functionality too much.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ pip install -U Jinja2
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
In A Nutshell
|
||||
-------------
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% extends "base.html" %}
|
||||
{% block title %}Members{% endblock %}
|
||||
{% block content %}
|
||||
<ul>
|
||||
{% for user in users %}
|
||||
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Jinja and other popular
|
||||
packages. In order to grow the community of contributors and users, and
|
||||
allow the maintainers to devote more time to the projects, `please
|
||||
donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://jinja.palletsprojects.com/
|
||||
- Changes: https://jinja.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/Jinja2/
|
||||
- Source Code: https://github.com/pallets/jinja/
|
||||
- Issue Tracker: https://github.com/pallets/jinja/issues/
|
||||
- Website: https://palletsprojects.com/p/jinja/
|
||||
- Twitter: https://twitter.com/PalletsTeam
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
Jinja2-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Jinja2-3.0.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
||||
Jinja2-3.0.3.dist-info/METADATA,sha256=uvKoBSMLvh0qHK-6khEqSe1yOV4jxFzbPSREOp-3BXk,3539
|
||||
Jinja2-3.0.3.dist-info/RECORD,,
|
||||
Jinja2-3.0.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
|
||||
Jinja2-3.0.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61
|
||||
Jinja2-3.0.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
|
||||
jinja2/__init__.py,sha256=V3JjnTV-nyIHN6rwj03N1M11fegjGvv-weiHMQwH1pk,2205
|
||||
jinja2/__pycache__/__init__.cpython-37.pyc,,
|
||||
jinja2/__pycache__/_identifier.cpython-37.pyc,,
|
||||
jinja2/__pycache__/async_utils.cpython-37.pyc,,
|
||||
jinja2/__pycache__/bccache.cpython-37.pyc,,
|
||||
jinja2/__pycache__/compiler.cpython-37.pyc,,
|
||||
jinja2/__pycache__/constants.cpython-37.pyc,,
|
||||
jinja2/__pycache__/debug.cpython-37.pyc,,
|
||||
jinja2/__pycache__/defaults.cpython-37.pyc,,
|
||||
jinja2/__pycache__/environment.cpython-37.pyc,,
|
||||
jinja2/__pycache__/exceptions.cpython-37.pyc,,
|
||||
jinja2/__pycache__/ext.cpython-37.pyc,,
|
||||
jinja2/__pycache__/filters.cpython-37.pyc,,
|
||||
jinja2/__pycache__/idtracking.cpython-37.pyc,,
|
||||
jinja2/__pycache__/lexer.cpython-37.pyc,,
|
||||
jinja2/__pycache__/loaders.cpython-37.pyc,,
|
||||
jinja2/__pycache__/meta.cpython-37.pyc,,
|
||||
jinja2/__pycache__/nativetypes.cpython-37.pyc,,
|
||||
jinja2/__pycache__/nodes.cpython-37.pyc,,
|
||||
jinja2/__pycache__/optimizer.cpython-37.pyc,,
|
||||
jinja2/__pycache__/parser.cpython-37.pyc,,
|
||||
jinja2/__pycache__/runtime.cpython-37.pyc,,
|
||||
jinja2/__pycache__/sandbox.cpython-37.pyc,,
|
||||
jinja2/__pycache__/tests.cpython-37.pyc,,
|
||||
jinja2/__pycache__/utils.cpython-37.pyc,,
|
||||
jinja2/__pycache__/visitor.cpython-37.pyc,,
|
||||
jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775
|
||||
jinja2/async_utils.py,sha256=jBcJSmLoQa2PjJdNcOpwaUmBxFNE9rZNwMF7Ob3dP9I,1947
|
||||
jinja2/bccache.py,sha256=v5rKAlYxIvfJEa0uGzAC6yCYSS3KuXT5Eqi-n9qvNi8,12670
|
||||
jinja2/compiler.py,sha256=v7zKz-mgSYXmfXD9mRmi2BU0B6Z-1RGZmOXCrsPKzc0,72209
|
||||
jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433
|
||||
jinja2/debug.py,sha256=r0JL0vfO7HPlyKZEdr6eVlg7HoIg2OQGmJ7SeUEyAeI,8494
|
||||
jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267
|
||||
jinja2/environment.py,sha256=Vz20npBX5-SUH_eguQuxrSQDEsLFjho0qcHLdMhY3hA,60983
|
||||
jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071
|
||||
jinja2/ext.py,sha256=44SjDjeYkkxQTpmC2BetOTxEFMgQ42p2dfSwXmPFcSo,32122
|
||||
jinja2/filters.py,sha256=jusKTZbd0ddZMaibZkxMUVKNsOsaYtOq_Il8Imtx4BE,52609
|
||||
jinja2/idtracking.py,sha256=WekexMql3u5n3vDxFsQ_i8HW0j24AtjWTjrPBLWrHww,10721
|
||||
jinja2/lexer.py,sha256=qNEQqDQw_zO5EaH6rFQsER7Qwn2du0o22prB-TR11HE,29930
|
||||
jinja2/loaders.py,sha256=1MjXJOU6p4VywFqtpDZhtvtT_vIlmHnZKMKHHw4SZzA,22754
|
||||
jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396
|
||||
jinja2/nativetypes.py,sha256=KCJl71MogrDih_BHBu6xV5p7Cr_jggAgu-shKTg6L28,3969
|
||||
jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550
|
||||
jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650
|
||||
jinja2/parser.py,sha256=kHnU8v92GwMYkfr0MVakWv8UlSf_kJPx8LUsgQMof70,39767
|
||||
jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jinja2/runtime.py,sha256=wVRlkEmAgNU67AIQDqLvI6UkNLkzDqpLA-z4Mi3vl3g,35054
|
||||
jinja2/sandbox.py,sha256=-8zxR6TO9kUkciAVFsIKu8Oq-C7PTeYEdZ5TtA55-gw,14600
|
||||
jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905
|
||||
jinja2/utils.py,sha256=udQxWIKaq4QDCZiXN31ngKOaGGdaMA5fl0JMaM-F6fg,26971
|
||||
jinja2/visitor.py,sha256=ZmeLuTj66ic35-uFH-1m0EKXiw4ObDDb_WuE6h5vPFg,3572
|
||||
@@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
[babel.extractors]
|
||||
jinja2 = jinja2.ext:babel_extract [i18n]
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
jinja2
|
||||
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,28 +0,0 @@
|
||||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,101 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: MarkupSafe
|
||||
Version: 2.1.0
|
||||
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||
Home-page: https://palletsprojects.com/p/markupsafe/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/markupsafe/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/
|
||||
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.rst
|
||||
|
||||
MarkupSafe
|
||||
==========
|
||||
|
||||
MarkupSafe implements a text object that escapes characters so it is
|
||||
safe to use in HTML and XML. Characters that have special meanings are
|
||||
replaced so that they display as the actual characters. This mitigates
|
||||
injection attacks, meaning untrusted user input can safely be displayed
|
||||
on a page.
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U MarkupSafe
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> from markupsafe import Markup, escape
|
||||
|
||||
>>> # escape replaces special characters and wraps in Markup
|
||||
>>> escape("<script>alert(document.cookie);</script>")
|
||||
Markup('<script>alert(document.cookie);</script>')
|
||||
|
||||
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||
>>> Markup("<strong>Hello</strong>")
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> escape(Markup("<strong>Hello</strong>"))
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> # Markup is a str subclass
|
||||
>>> # methods and operators escape their arguments
|
||||
>>> template = Markup("Hello <em>{name}</em>")
|
||||
>>> template.format(name='"World"')
|
||||
Markup('Hello <em>"World"</em>')
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports MarkupSafe and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
`please donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://markupsafe.palletsprojects.com/
|
||||
- Changes: https://markupsafe.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/MarkupSafe/
|
||||
- Source Code: https://github.com/pallets/markupsafe/
|
||||
- Issue Tracker: https://github.com/pallets/markupsafe/issues/
|
||||
- Website: https://palletsprojects.com/p/markupsafe/
|
||||
- Twitter: https://twitter.com/PalletsTeam
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
MarkupSafe-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
MarkupSafe-2.1.0.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
MarkupSafe-2.1.0.dist-info/METADATA,sha256=585PQ3HNHmJeHpbdXckhscUSR9AaQnh5RWaaMtCB4_8,3242
|
||||
MarkupSafe-2.1.0.dist-info/RECORD,,
|
||||
MarkupSafe-2.1.0.dist-info/WHEEL,sha256=MTGYRPpheMM24WbmS90Rp7e0ky6cZJ8CkS_x1qgwoGk,110
|
||||
MarkupSafe-2.1.0.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
||||
markupsafe/__init__.py,sha256=7p5vza0YNtAtfancH4eGnJLe9V4uEFATInoX7Ko7aig,9130
|
||||
markupsafe/__pycache__/__init__.cpython-37.pyc,,
|
||||
markupsafe/__pycache__/_native.cpython-37.pyc,,
|
||||
markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713
|
||||
markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083
|
||||
markupsafe/_speedups.cpython-37m-darwin.so,sha256=bsPzxjMESIuCzC-OuQDjL_P579nPHCp-GkcwUbKjrYo,34984
|
||||
markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229
|
||||
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
@@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp37-cp37m-macosx_10_9_x86_64
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
markupsafe
|
||||
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,28 +0,0 @@
|
||||
Copyright 2007 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,129 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Werkzeug
|
||||
Version: 2.0.3
|
||||
Summary: The comprehensive WSGI web application library.
|
||||
Home-page: https://palletsprojects.com/p/werkzeug/
|
||||
Author: Armin Ronacher
|
||||
Author-email: armin.ronacher@active-4.com
|
||||
Maintainer: Pallets
|
||||
Maintainer-email: contact@palletsprojects.com
|
||||
License: BSD-3-Clause
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://werkzeug.palletsprojects.com/
|
||||
Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/
|
||||
Project-URL: Source Code, https://github.com/pallets/werkzeug/
|
||||
Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/
|
||||
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.rst
|
||||
Requires-Dist: dataclasses ; python_version < "3.7"
|
||||
Provides-Extra: watchdog
|
||||
Requires-Dist: watchdog ; extra == 'watchdog'
|
||||
|
||||
Werkzeug
|
||||
========
|
||||
|
||||
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
|
||||
|
||||
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
|
||||
a simple collection of various utilities for WSGI applications and has
|
||||
become one of the most advanced WSGI utility libraries.
|
||||
|
||||
It includes:
|
||||
|
||||
- An interactive debugger that allows inspecting stack traces and
|
||||
source code in the browser with an interactive interpreter for any
|
||||
frame in the stack.
|
||||
- A full-featured request object with objects to interact with
|
||||
headers, query args, form data, files, and cookies.
|
||||
- A response object that can wrap other WSGI applications and handle
|
||||
streaming data.
|
||||
- A routing system for matching URLs to endpoints and generating URLs
|
||||
for endpoints, with an extensible system for capturing variables
|
||||
from URLs.
|
||||
- HTTP utilities to handle entity tags, cache control, dates, user
|
||||
agents, cookies, files, and more.
|
||||
- A threaded WSGI server for use while developing applications
|
||||
locally.
|
||||
- A test client for simulating HTTP requests during testing without
|
||||
requiring running a server.
|
||||
|
||||
Werkzeug doesn't enforce any dependencies. It is up to the developer to
|
||||
choose a template engine, database adapter, and even how to handle
|
||||
requests. It can be used to build all sorts of end user applications
|
||||
such as blogs, wikis, or bulletin boards.
|
||||
|
||||
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
|
||||
providing more structure and patterns for defining powerful
|
||||
applications.
|
||||
|
||||
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
|
||||
.. _Flask: https://www.palletsprojects.com/p/flask/
|
||||
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install and update using `pip`_:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
pip install -U Werkzeug
|
||||
|
||||
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
||||
|
||||
|
||||
A Simple Example
|
||||
----------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from werkzeug.wrappers import Request, Response
|
||||
|
||||
@Request.application
|
||||
def application(request):
|
||||
return Response('Hello, World!')
|
||||
|
||||
if __name__ == '__main__':
|
||||
from werkzeug.serving import run_simple
|
||||
run_simple('localhost', 4000, application)
|
||||
|
||||
|
||||
Donate
|
||||
------
|
||||
|
||||
The Pallets organization develops and supports Werkzeug and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
`please donate today`_.
|
||||
|
||||
.. _please donate today: https://palletsprojects.com/donate
|
||||
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
- Documentation: https://werkzeug.palletsprojects.com/
|
||||
- Changes: https://werkzeug.palletsprojects.com/changes/
|
||||
- PyPI Releases: https://pypi.org/project/Werkzeug/
|
||||
- Source Code: https://github.com/pallets/werkzeug/
|
||||
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
|
||||
- Website: https://palletsprojects.com/p/werkzeug/
|
||||
- Twitter: https://twitter.com/PalletsTeam
|
||||
- Chat: https://discord.gg/pallets
|
||||
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
Werkzeug-2.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
Werkzeug-2.0.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
||||
Werkzeug-2.0.3.dist-info/METADATA,sha256=Rxzda7JFgpyr7oqR42Z57bNxRp-pjna_KYhcivqvXY4,4452
|
||||
Werkzeug-2.0.3.dist-info/RECORD,,
|
||||
Werkzeug-2.0.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
Werkzeug-2.0.3.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
|
||||
werkzeug/__init__.py,sha256=2frslFsD2EbmZUTfzZ5njDmic66S5f6XMdT24AOGYhk,188
|
||||
werkzeug/__pycache__/__init__.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/_internal.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/_reloader.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/datastructures.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/exceptions.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/filesystem.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/formparser.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/http.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/local.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/routing.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/security.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/serving.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/test.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/testapp.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/urls.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/user_agent.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/useragents.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/utils.cpython-37.pyc,,
|
||||
werkzeug/__pycache__/wsgi.cpython-37.pyc,,
|
||||
werkzeug/_internal.py,sha256=_0GZM3B6gE4eoRTp9K6T7spvY5qJQ9Od9GRIp4lZpzU,18572
|
||||
werkzeug/_reloader.py,sha256=B1hEfgsUOz2IginBQM5Zak_eaIF7gr3GS5-0x2OHvAE,13950
|
||||
werkzeug/datastructures.py,sha256=m79A8rHQEt5B7qVqyrjARXzHL66Katn8S92urGscTw4,97929
|
||||
werkzeug/datastructures.pyi,sha256=uFOqffFoaOEa-43IPlK9otu1X4lDOoqIgG4ULS0ObiE,34119
|
||||
werkzeug/debug/__init__.py,sha256=Vn0WQfD9w6DGg1j_2gWpSKKTaFlwxhbCBwi7QQMz1s8,17917
|
||||
werkzeug/debug/__pycache__/__init__.cpython-37.pyc,,
|
||||
werkzeug/debug/__pycache__/console.cpython-37.pyc,,
|
||||
werkzeug/debug/__pycache__/repr.cpython-37.pyc,,
|
||||
werkzeug/debug/__pycache__/tbtools.cpython-37.pyc,,
|
||||
werkzeug/debug/console.py,sha256=jJjid1dIlCNWbDHXTtjJW5XqNfPjSOKbtUmEX5weNdY,5976
|
||||
werkzeug/debug/repr.py,sha256=QCSHENKsChEZDCIApkVi_UNjhJ77v8BMXK1OfxO189M,9483
|
||||
werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
|
||||
werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222
|
||||
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
|
||||
werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521
|
||||
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
|
||||
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
|
||||
werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
|
||||
werkzeug/debug/shared/style.css,sha256=h1ZSUVaKNpfbfcYzRb513WAhPySGDQom1uih3uEDxPw,6704
|
||||
werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
|
||||
werkzeug/debug/tbtools.py,sha256=khUCWQcpbxzeOs5NlT-E9n99BI-ELH9K9RY5exc-X_o,19362
|
||||
werkzeug/exceptions.py,sha256=WLCqXBEHm5Xj2d2sfON9XIneeRS3MlNXKH85k1AQIJU,28776
|
||||
werkzeug/filesystem.py,sha256=JS2Dv2QF98WILxY4_thHl-WMcUcwluF_4igkDPaP1l4,1956
|
||||
werkzeug/formparser.py,sha256=X-p3Ek4ji8XrKrbmaWxr8StLSc6iuksbpIeweaabs4s,17400
|
||||
werkzeug/http.py,sha256=Xm3WhYKRQKh_J12514F8y8prILldXceOceeO8EiQEZI,45222
|
||||
werkzeug/local.py,sha256=5HbGdD0vVNJgXH3SXfkMjdxIpzy7iqkHJMGCNjljFNo,23664
|
||||
werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500
|
||||
werkzeug/middleware/__pycache__/__init__.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/dispatcher.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/http_proxy.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/lint.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/profiler.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/proxy_fix.cpython-37.pyc,,
|
||||
werkzeug/middleware/__pycache__/shared_data.cpython-37.pyc,,
|
||||
werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580
|
||||
werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558
|
||||
werkzeug/middleware/lint.py,sha256=sAg3GcOhICIkwYX5bJGG8n8iebX0Yipq_UH0HvrBvoU,13964
|
||||
werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899
|
||||
werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974
|
||||
werkzeug/middleware/shared_data.py,sha256=xydEqOhAGg0aQJEllPDVfz2-8jHwWvJpAxfPsfPCu7k,10960
|
||||
werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
werkzeug/routing.py,sha256=rATL0ZkbTBgvdgJp6WgihuwKyivCF8K4a8kQ4hFgY6A,84581
|
||||
werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
werkzeug/sansio/__pycache__/__init__.cpython-37.pyc,,
|
||||
werkzeug/sansio/__pycache__/multipart.cpython-37.pyc,,
|
||||
werkzeug/sansio/__pycache__/request.cpython-37.pyc,,
|
||||
werkzeug/sansio/__pycache__/response.cpython-37.pyc,,
|
||||
werkzeug/sansio/__pycache__/utils.cpython-37.pyc,,
|
||||
werkzeug/sansio/multipart.py,sha256=BRjBk_mCPjSJzwNVvBgmrJGk3QxA9pYfsgzFki28bxc,8751
|
||||
werkzeug/sansio/request.py,sha256=kt7fizz15HPuYKYU1_3TTEkNSuXeeaM4aLcjW84qvv4,20247
|
||||
werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098
|
||||
werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164
|
||||
werkzeug/security.py,sha256=gPDRuCjkjWrcqj99tBMq8_nHFZLFQjgoW5Ga5XIw9jo,8158
|
||||
werkzeug/serving.py,sha256=6aV-RKbZm4rUHveQGuh4SY0wFZTmXyR43yD_kCQm8Wo,38287
|
||||
werkzeug/test.py,sha256=eUORFaeIDXcmncLdYxgFqYiVdolZkYRY67QV1_ATk20,48235
|
||||
werkzeug/testapp.py,sha256=f48prWSGJhbSrvYb8e1fnAah4BkrLb0enHSdChgsjBY,9471
|
||||
werkzeug/urls.py,sha256=Du2lreBHvgBh5c2_bcx72g3hzV2ZabXYZsp-picUIJs,41023
|
||||
werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420
|
||||
werkzeug/useragents.py,sha256=G8tmv_6vxJaPrLQH3eODNgIYe0_V6KETROQlJI-WxDE,7264
|
||||
werkzeug/utils.py,sha256=D_dnCLUfodQ4k0GRSpnI6qDoVoaX7-Dza57bx7sabG0,37101
|
||||
werkzeug/wrappers/__init__.py,sha256=-s75nPbyXHzU_rwmLPDhoMuGbEUk0jZT_n0ZQAOFGf8,654
|
||||
werkzeug/wrappers/__pycache__/__init__.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/accept.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/auth.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/base_request.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/base_response.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/common_descriptors.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/cors.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/etag.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/json.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/request.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/response.cpython-37.pyc,,
|
||||
werkzeug/wrappers/__pycache__/user_agent.cpython-37.pyc,,
|
||||
werkzeug/wrappers/accept.py,sha256=NzyLfKH3qC5cSbkEc5azw5-lp_kU8JIrtc8AdGQ0HBs,413
|
||||
werkzeug/wrappers/auth.py,sha256=ArJiEn8HHzy1B7wUGuN7s3AHpnClKlaDY0F7N7QZSLA,824
|
||||
werkzeug/wrappers/base_request.py,sha256=saz9RyNQkvI_XLPYVm29KijNHmD1YzgxDqa0qHTbgss,1174
|
||||
werkzeug/wrappers/base_response.py,sha256=q_-TaYywT5G4zA-DWDRDJhJSat2_4O7gOPob6ye4_9A,1186
|
||||
werkzeug/wrappers/common_descriptors.py,sha256=aeVFTsTb0HJn5O8zF6WwELEDDULdOLFkWaUrvD1Huds,866
|
||||
werkzeug/wrappers/cors.py,sha256=9Ho7aXd64sB2Msz71jRXAdAI8UyqIJgv-CJsnlfUSzM,814
|
||||
werkzeug/wrappers/etag.py,sha256=7SI34rtlXJHyJlqe8B0dFu4ouo6L0DJmYyqwWoY79oc,814
|
||||
werkzeug/wrappers/json.py,sha256=h_XfBZV5ZETkHYgONuoSyB9KXR9W90mgBh_mFUysp6c,394
|
||||
werkzeug/wrappers/request.py,sha256=I77nwHgCzynmgwJVNw7bo7MfTU_CusNBO0b4TjpIRdQ,24790
|
||||
werkzeug/wrappers/response.py,sha256=c24tBeq8G5RwPCU5iCJvJPaKyUEIrfMiWO4yGtTOwmI,35214
|
||||
werkzeug/wrappers/user_agent.py,sha256=IMUJCFohZSMsBTmqyJZtjG5y4sB1zxQBE690bixb6uY,419
|
||||
werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727
|
||||
@@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
werkzeug
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,21 +0,0 @@
|
||||
This package contains a modified version of ca-bundle.crt:
|
||||
|
||||
ca-bundle.crt -- Bundle of CA Root Certificates
|
||||
|
||||
Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011#
|
||||
This is a bundle of X.509 certificates of public Certificate Authorities
|
||||
(CA). These were automatically extracted from Mozilla's root certificates
|
||||
file (certdata.txt). This file can be found in the mozilla source tree:
|
||||
http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1#
|
||||
It contains the certificates in PEM format and therefore
|
||||
can be directly used with curl / libcurl / php_curl, or with
|
||||
an Apache+mod_ssl webserver for SSL client authentication.
|
||||
Just configure this file as the SSLCACertificateFile.#
|
||||
|
||||
***** BEGIN LICENSE BLOCK *****
|
||||
This Source Code Form is subject to the terms of the Mozilla Public License,
|
||||
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
||||
one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
***** END LICENSE BLOCK *****
|
||||
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
||||
@@ -1,83 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: certifi
|
||||
Version: 2021.10.8
|
||||
Summary: Python package for providing Mozilla's CA Bundle.
|
||||
Home-page: https://certifiio.readthedocs.io/en/latest/
|
||||
Author: Kenneth Reitz
|
||||
Author-email: me@kennethreitz.com
|
||||
License: MPL-2.0
|
||||
Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/
|
||||
Project-URL: Source, https://github.com/certifi/python-certifi
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
|
||||
Certifi: Python SSL Certificates
|
||||
================================
|
||||
|
||||
`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for
|
||||
validating the trustworthiness of SSL certificates while verifying the identity
|
||||
of TLS hosts. It has been extracted from the `Requests`_ project.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
``certifi`` is available on PyPI. Simply install it with ``pip``::
|
||||
|
||||
$ pip install certifi
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
To reference the installed certificate authority (CA) bundle, you can use the
|
||||
built-in function::
|
||||
|
||||
>>> import certifi
|
||||
|
||||
>>> certifi.where()
|
||||
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
|
||||
|
||||
Or from the command line::
|
||||
|
||||
$ python -m certifi
|
||||
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
|
||||
|
||||
Enjoy!
|
||||
|
||||
1024-bit Root Certificates
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Browsers and certificate authorities have concluded that 1024-bit keys are
|
||||
unacceptably weak for certificates, particularly root certificates. For this
|
||||
reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its
|
||||
bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key)
|
||||
certificate from the same CA. Because Mozilla removed these certificates from
|
||||
its bundle, ``certifi`` removed them as well.
|
||||
|
||||
In previous versions, ``certifi`` provided the ``certifi.old_where()`` function
|
||||
to intentionally re-add the 1024-bit roots back into your bundle. This was not
|
||||
recommended in production and therefore was removed at the end of 2018.
|
||||
|
||||
.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/
|
||||
.. _`Requests`: https://requests.readthedocs.io/en/master/
|
||||
|
||||
Addition/Removal of Certificates
|
||||
--------------------------------
|
||||
|
||||
Certifi does not support any addition/removal or other modification of the
|
||||
CA trust store content. This project is intended to provide a reliable and
|
||||
highly portable root of trust to python deployments. Look to upstream projects
|
||||
for methods to use alternate trust.
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
certifi-2021.10.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
certifi-2021.10.8.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049
|
||||
certifi-2021.10.8.dist-info/METADATA,sha256=iB_zbT1uX_8_NC7iGv0YEB-9b3idhQwHrFTSq8R1kD8,2994
|
||||
certifi-2021.10.8.dist-info/RECORD,,
|
||||
certifi-2021.10.8.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
|
||||
certifi-2021.10.8.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
|
||||
certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62
|
||||
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
|
||||
certifi/__pycache__/__init__.cpython-37.pyc,,
|
||||
certifi/__pycache__/__main__.cpython-37.pyc,,
|
||||
certifi/__pycache__/core.cpython-37.pyc,,
|
||||
certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969
|
||||
certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303
|
||||
@@ -1,6 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.35.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
certifi
|
||||
@@ -1,3 +0,0 @@
|
||||
from .core import contents, where
|
||||
|
||||
__version__ = "2021.10.08"
|
||||
@@ -1,12 +0,0 @@
|
||||
import argparse
|
||||
|
||||
from certifi import contents, where
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--contents", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.contents:
|
||||
print(contents())
|
||||
else:
|
||||
print(where())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -1,60 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
certifi.py
|
||||
~~~~~~~~~~
|
||||
|
||||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import os
|
||||
|
||||
try:
|
||||
from importlib.resources import path as get_path, read_text
|
||||
|
||||
_CACERT_CTX = None
|
||||
_CACERT_PATH = None
|
||||
|
||||
def where():
|
||||
# This is slightly terrible, but we want to delay extracting the file
|
||||
# in cases where we're inside of a zipimport situation until someone
|
||||
# actually calls where(), but we don't want to re-extract the file
|
||||
# on every call of where(), so we'll do it once then store it in a
|
||||
# global variable.
|
||||
global _CACERT_CTX
|
||||
global _CACERT_PATH
|
||||
if _CACERT_PATH is None:
|
||||
# This is slightly janky, the importlib.resources API wants you to
|
||||
# manage the cleanup of this file, so it doesn't actually return a
|
||||
# path, it returns a context manager that will give you the path
|
||||
# when you enter it and will do any cleanup when you leave it. In
|
||||
# the common case of not needing a temporary file, it will just
|
||||
# return the file system location and the __exit__() is a no-op.
|
||||
#
|
||||
# We also have to hold onto the actual context manager, because
|
||||
# it will do the cleanup whenever it gets garbage collected, so
|
||||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
||||
except ImportError:
|
||||
# This fallback will work for Python versions prior to 3.7 that lack the
|
||||
# importlib.resources module but relies on the existing `where` function
|
||||
# so won't address issues with environments like PyOxidizer that don't set
|
||||
# __file__ on modules.
|
||||
def read_text(_module, _path, encoding="ascii"):
|
||||
with open(where(), "r", encoding=encoding) as data:
|
||||
return data.read()
|
||||
|
||||
# If we don't have importlib.resources, then we will just do the old logic
|
||||
# of assuming we're on the filesystem and munge the path directly.
|
||||
def where():
|
||||
f = os.path.dirname(__file__)
|
||||
|
||||
return os.path.join(f, "cacert.pem")
|
||||
|
||||
|
||||
def contents():
|
||||
return read_text("certifi", "cacert.pem", encoding="ascii")
|
||||
@@ -1 +0,0 @@
|
||||
pip
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 TAHRI Ahmed R.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,269 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: charset-normalizer
|
||||
Version: 2.0.12
|
||||
Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
|
||||
Home-page: https://github.com/ousret/charset_normalizer
|
||||
Author: Ahmed TAHRI @Ousret
|
||||
Author-email: ahmed.tahri@cloudnursery.dev
|
||||
License: MIT
|
||||
Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
|
||||
Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
|
||||
Keywords: encoding,i18n,txt,text,charset,charset-detector,normalization,unicode,chardet
|
||||
Platform: UNKNOWN
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Topic :: Text Processing :: Linguistic
|
||||
Classifier: Topic :: Utilities
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.5.0
|
||||
Description-Content-Type: text/markdown
|
||||
License-File: LICENSE
|
||||
Provides-Extra: unicode_backport
|
||||
Requires-Dist: unicodedata2 ; extra == 'unicode_backport'
|
||||
|
||||
|
||||
<h1 align="center">Charset Detection, for Everyone 👋 <a href="https://twitter.com/intent/tweet?text=The%20Real%20First%20Universal%20Charset%20%26%20Language%20Detector&url=https://www.github.com/Ousret/charset_normalizer&hashtags=python,encoding,chardet,developers"><img src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"/></a></h1>
|
||||
|
||||
<p align="center">
|
||||
<sup>The Real First Universal Charset Detector</sup><br>
|
||||
<a href="https://pypi.org/project/charset-normalizer">
|
||||
<img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
|
||||
</a>
|
||||
<a href="https://codecov.io/gh/Ousret/charset_normalizer">
|
||||
<img src="https://codecov.io/gh/Ousret/charset_normalizer/branch/master/graph/badge.svg" />
|
||||
</a>
|
||||
<a href="https://pepy.tech/project/charset-normalizer/">
|
||||
<img alt="Download Count Total" src="https://pepy.tech/badge/charset-normalizer/month" />
|
||||
</a>
|
||||
</p>
|
||||
|
||||
> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
|
||||
> I'm trying to resolve the issue by taking a new approach.
|
||||
> All IANA character set names for which the Python core library provides codecs are supported.
|
||||
|
||||
<p align="center">
|
||||
>>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
|
||||
</p>
|
||||
|
||||
This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
|
||||
|
||||
| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
|
||||
| ------------- | :-------------: | :------------------: | :------------------: |
|
||||
| `Fast` | ❌<br> | ✅<br> | ✅ <br> |
|
||||
| `Universal**` | ❌ | ✅ | ❌ |
|
||||
| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
|
||||
| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
|
||||
| `Free & Open` | ✅ | ✅ | ✅ |
|
||||
| `License` | LGPL-2.1 | MIT | MPL-1.1
|
||||
| `Native Python` | ✅ | ✅ | ❌ |
|
||||
| `Detect spoken language` | ❌ | ✅ | N/A |
|
||||
| `Supported Encoding` | 30 | :tada: [93](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40
|
||||
|
||||
<p align="center">
|
||||
<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
|
||||
|
||||
*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
|
||||
Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
|
||||
|
||||
## ⭐ Your support
|
||||
|
||||
*Fork, test-it, star-it, submit your ideas! We do listen.*
|
||||
|
||||
## ⚡ Performance
|
||||
|
||||
This package offer better performance than its counterpart Chardet. Here are some numbers.
|
||||
|
||||
| Package | Accuracy | Mean per file (ms) | File per sec (est) |
|
||||
| ------------- | :-------------: | :------------------: | :------------------: |
|
||||
| [chardet](https://github.com/chardet/chardet) | 92 % | 220 ms | 5 file/sec |
|
||||
| charset-normalizer | **98 %** | **40 ms** | 25 file/sec |
|
||||
|
||||
| Package | 99th percentile | 95th percentile | 50th percentile |
|
||||
| ------------- | :-------------: | :------------------: | :------------------: |
|
||||
| [chardet](https://github.com/chardet/chardet) | 1115 ms | 300 ms | 27 ms |
|
||||
| charset-normalizer | 460 ms | 240 ms | 18 ms |
|
||||
|
||||
Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
|
||||
|
||||
> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
|
||||
> And yes, these results might change at any time. The dataset can be updated to include more files.
|
||||
> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
|
||||
|
||||
[cchardet](https://github.com/PyYoshi/cChardet) is a non-native (cpp binding) and unmaintained faster alternative with
|
||||
a better accuracy than chardet but lower than this package. If speed is the most important factor, you should try it.
|
||||
|
||||
## ✨ Installation
|
||||
|
||||
Using PyPi for latest stable
|
||||
```sh
|
||||
pip install charset-normalizer -U
|
||||
```
|
||||
|
||||
If you want a more up-to-date `unicodedata` than the one available in your Python setup.
|
||||
```sh
|
||||
pip install charset-normalizer[unicode_backport] -U
|
||||
```
|
||||
|
||||
## 🚀 Basic Usage
|
||||
|
||||
### CLI
|
||||
This package comes with a CLI.
|
||||
|
||||
```
|
||||
usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
|
||||
file [file ...]
|
||||
|
||||
The Real First Universal Charset Detector. Discover originating encoding used
|
||||
on text file. Normalize text to unicode.
|
||||
|
||||
positional arguments:
|
||||
files File(s) to be analysed
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-v, --verbose Display complementary information about file if any.
|
||||
Stdout will contain logs about the detection process.
|
||||
-a, --with-alternative
|
||||
Output complementary possibilities if any. Top-level
|
||||
JSON WILL be a list.
|
||||
-n, --normalize Permit to normalize input file. If not set, program
|
||||
does not write anything.
|
||||
-m, --minimal Only output the charset detected to STDOUT. Disabling
|
||||
JSON output.
|
||||
-r, --replace Replace file when trying to normalize it instead of
|
||||
creating a new one.
|
||||
-f, --force Replace file without asking if you are sure, use this
|
||||
flag with caution.
|
||||
-t THRESHOLD, --threshold THRESHOLD
|
||||
Define a custom maximum amount of chaos allowed in
|
||||
decoded content. 0. <= chaos <= 1.
|
||||
--version Show version information and exit.
|
||||
```
|
||||
|
||||
```bash
|
||||
normalizer ./data/sample.1.fr.srt
|
||||
```
|
||||
|
||||
:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
|
||||
|
||||
```json
|
||||
{
|
||||
"path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
|
||||
"encoding": "cp1252",
|
||||
"encoding_aliases": [
|
||||
"1252",
|
||||
"windows_1252"
|
||||
],
|
||||
"alternative_encodings": [
|
||||
"cp1254",
|
||||
"cp1256",
|
||||
"cp1258",
|
||||
"iso8859_14",
|
||||
"iso8859_15",
|
||||
"iso8859_16",
|
||||
"iso8859_3",
|
||||
"iso8859_9",
|
||||
"latin_1",
|
||||
"mbcs"
|
||||
],
|
||||
"language": "French",
|
||||
"alphabets": [
|
||||
"Basic Latin",
|
||||
"Latin-1 Supplement"
|
||||
],
|
||||
"has_sig_or_bom": false,
|
||||
"chaos": 0.149,
|
||||
"coherence": 97.152,
|
||||
"unicode_path": null,
|
||||
"is_preferred": true
|
||||
}
|
||||
```
|
||||
|
||||
### Python
|
||||
*Just print out normalized text*
|
||||
```python
|
||||
from charset_normalizer import from_path
|
||||
|
||||
results = from_path('./my_subtitle.srt')
|
||||
|
||||
print(str(results.best()))
|
||||
```
|
||||
|
||||
*Normalize any text file*
|
||||
```python
|
||||
from charset_normalizer import normalize
|
||||
try:
|
||||
normalize('./my_subtitle.srt') # should write to disk my_subtitle-***.srt
|
||||
except IOError as e:
|
||||
print('Sadly, we are unable to perform charset normalization.', str(e))
|
||||
```
|
||||
|
||||
*Upgrade your code without effort*
|
||||
```python
|
||||
from charset_normalizer import detect
|
||||
```
|
||||
|
||||
The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
|
||||
|
||||
See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
|
||||
|
||||
## 😇 Why
|
||||
|
||||
When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
|
||||
reliable alternative using a completely different method. Also! I never back down on a good challenge!
|
||||
|
||||
I **don't care** about the **originating charset** encoding, because **two different tables** can
|
||||
produce **two identical rendered string.**
|
||||
What I want is to get readable text, the best I can.
|
||||
|
||||
In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
|
||||
|
||||
Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
|
||||
|
||||
## 🍰 How
|
||||
|
||||
- Discard all charset encoding table that could not fit the binary content.
|
||||
- Measure chaos, or the mess once opened (by chunks) with a corresponding charset encoding.
|
||||
- Extract matches with the lowest mess detected.
|
||||
- Additionally, we measure coherence / probe for a language.
|
||||
|
||||
**Wait a minute**, what is chaos/mess and coherence according to **YOU ?**
|
||||
|
||||
*Chaos :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
|
||||
**I established** some ground rules about **what is obvious** when **it seems like** a mess.
|
||||
I know that my interpretation of what is chaotic is very subjective, feel free to contribute in order to
|
||||
improve or rewrite it.
|
||||
|
||||
*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
|
||||
that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
|
||||
|
||||
## ⚡ Known limitations
|
||||
|
||||
- Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
|
||||
- Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
|
||||
|
||||
## 👤 Contributing
|
||||
|
||||
Contributions, issues and feature requests are very much welcome.<br />
|
||||
Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
|
||||
|
||||
## 📝 License
|
||||
|
||||
Copyright © 2019 [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
|
||||
This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
|
||||
|
||||
Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
../../../bin/normalizer,sha256=cvQIDNDw9DOwJLsKITnYoAFW6rlHOLju_1jpmDfZL0s,325
|
||||
charset_normalizer-2.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
charset_normalizer-2.0.12.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
|
||||
charset_normalizer-2.0.12.dist-info/METADATA,sha256=eX-U3s7nb6wcvXZFyM1mdBf1yz4I0msVBgNvLEscAbo,11713
|
||||
charset_normalizer-2.0.12.dist-info/RECORD,,
|
||||
charset_normalizer-2.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
charset_normalizer-2.0.12.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77
|
||||
charset_normalizer-2.0.12.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
|
||||
charset_normalizer/__init__.py,sha256=x2A2OW29MBcqdxsvy6t1wzkUlH3ma0guxL6ZCfS8J94,1790
|
||||
charset_normalizer/__pycache__/__init__.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/api.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/cd.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/constant.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/legacy.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/md.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/models.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/utils.cpython-37.pyc,,
|
||||
charset_normalizer/__pycache__/version.cpython-37.pyc,,
|
||||
charset_normalizer/api.py,sha256=r__Wz85F5pYOkRwEY5imXY_pCZ2Nil1DkdaAJY7T5o0,20303
|
||||
charset_normalizer/assets/__init__.py,sha256=FPnfk8limZRb8ZIUQcTvPEcbuM1eqOdWGw0vbWGycDs,25485
|
||||
charset_normalizer/assets/__pycache__/__init__.cpython-37.pyc,,
|
||||
charset_normalizer/cd.py,sha256=a9Kzzd9tHl_W08ExbCFMmRJqdo2k7EBQ8Z_3y9DmYsg,11076
|
||||
charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
charset_normalizer/cli/__pycache__/__init__.cpython-37.pyc,,
|
||||
charset_normalizer/cli/__pycache__/normalizer.cpython-37.pyc,,
|
||||
charset_normalizer/cli/normalizer.py,sha256=LkeFIRc1l28rOgXpEby695x0bcKQv4D8z9FmA3Z2c3A,9364
|
||||
charset_normalizer/constant.py,sha256=51u_RS10I1vYVpBao__xHqf--HHNrR6me1A1se5r5Y0,19449
|
||||
charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384
|
||||
charset_normalizer/md.py,sha256=WEwnu2MyIiMeEaorRduqcTxGjIBclWIG3i-9_UL6LLs,18191
|
||||
charset_normalizer/models.py,sha256=XrGpVxfonhcilIWC1WeiP3-ZORGEe_RG3sgrfPLl9qM,13303
|
||||
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
charset_normalizer/utils.py,sha256=AWSL0z1B42IwdLfjX4ZMASA9cTUsTp0PweCdW98SI-4,9308
|
||||
charset_normalizer/version.py,sha256=uxO2cT0YIavQv4dQlNGmHPIOOwOa-exspxXi3IR7dck,80
|
||||
@@ -1,5 +0,0 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
[console_scripts]
|
||||
normalizer = charset_normalizer.cli.normalizer:cli_detect
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
charset_normalizer
|
||||
@@ -1,56 +0,0 @@
|
||||
# -*- coding: utf_8 -*-
|
||||
"""
|
||||
Charset-Normalizer
|
||||
~~~~~~~~~~~~~~
|
||||
The Real First Universal Charset Detector.
|
||||
A library that helps you read text from an unknown charset encoding.
|
||||
Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
|
||||
All IANA character set names for which the Python core library provides codecs are supported.
|
||||
|
||||
Basic usage:
|
||||
>>> from charset_normalizer import from_bytes
|
||||
>>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
|
||||
>>> best_guess = results.best()
|
||||
>>> str(best_guess)
|
||||
'Bсеки човек има право на образование. Oбразованието!'
|
||||
|
||||
Others methods and usages are available - see the full documentation
|
||||
at <https://github.com/Ousret/charset_normalizer>.
|
||||
:copyright: (c) 2021 by Ahmed TAHRI
|
||||
:license: MIT, see LICENSE for more details.
|
||||
"""
|
||||
import logging
|
||||
|
||||
from .api import from_bytes, from_fp, from_path, normalize
|
||||
from .legacy import (
|
||||
CharsetDetector,
|
||||
CharsetDoctor,
|
||||
CharsetNormalizerMatch,
|
||||
CharsetNormalizerMatches,
|
||||
detect,
|
||||
)
|
||||
from .models import CharsetMatch, CharsetMatches
|
||||
from .utils import set_logging_handler
|
||||
from .version import VERSION, __version__
|
||||
|
||||
__all__ = (
|
||||
"from_fp",
|
||||
"from_path",
|
||||
"from_bytes",
|
||||
"normalize",
|
||||
"detect",
|
||||
"CharsetMatch",
|
||||
"CharsetMatches",
|
||||
"CharsetNormalizerMatch",
|
||||
"CharsetNormalizerMatches",
|
||||
"CharsetDetector",
|
||||
"CharsetDoctor",
|
||||
"__version__",
|
||||
"VERSION",
|
||||
"set_logging_handler",
|
||||
)
|
||||
|
||||
# Attach a NullHandler to the top level logger by default
|
||||
# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
|
||||
|
||||
logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,608 +0,0 @@
|
||||
import logging
|
||||
from os.path import basename, splitext
|
||||
from typing import BinaryIO, List, Optional, Set
|
||||
|
||||
try:
|
||||
from os import PathLike
|
||||
except ImportError: # pragma: no cover
|
||||
PathLike = str # type: ignore
|
||||
|
||||
from .cd import (
|
||||
coherence_ratio,
|
||||
encoding_languages,
|
||||
mb_encoding_languages,
|
||||
merge_coherence_ratios,
|
||||
)
|
||||
from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
|
||||
from .md import mess_ratio
|
||||
from .models import CharsetMatch, CharsetMatches
|
||||
from .utils import (
|
||||
any_specified_encoding,
|
||||
iana_name,
|
||||
identify_sig_or_bom,
|
||||
is_cp_similar,
|
||||
is_multi_byte_encoding,
|
||||
should_strip_sig_or_bom,
|
||||
)
|
||||
|
||||
# Will most likely be controversial
|
||||
# logging.addLevelName(TRACE, "TRACE")
|
||||
logger = logging.getLogger("charset_normalizer")
|
||||
explain_handler = logging.StreamHandler()
|
||||
explain_handler.setFormatter(
|
||||
logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
|
||||
)
|
||||
|
||||
|
||||
def from_bytes(
|
||||
sequences: bytes,
|
||||
steps: int = 5,
|
||||
chunk_size: int = 512,
|
||||
threshold: float = 0.2,
|
||||
cp_isolation: List[str] = None,
|
||||
cp_exclusion: List[str] = None,
|
||||
preemptive_behaviour: bool = True,
|
||||
explain: bool = False,
|
||||
) -> CharsetMatches:
|
||||
"""
|
||||
Given a raw bytes sequence, return the best possibles charset usable to render str objects.
|
||||
If there is no results, it is a strong indicator that the source is binary/not text.
|
||||
By default, the process will extract 5 blocs of 512o each to assess the mess and coherence of a given sequence.
|
||||
And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
|
||||
|
||||
The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
|
||||
but never take it for granted. Can improve the performance.
|
||||
|
||||
You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
|
||||
purpose.
|
||||
|
||||
This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
|
||||
By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
|
||||
toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
|
||||
Custom logging format and handler can be set manually.
|
||||
"""
|
||||
|
||||
if not isinstance(sequences, (bytearray, bytes)):
|
||||
raise TypeError(
|
||||
"Expected object of type bytes or bytearray, got: {0}".format(
|
||||
type(sequences)
|
||||
)
|
||||
)
|
||||
|
||||
if explain:
|
||||
previous_logger_level = logger.level # type: int
|
||||
logger.addHandler(explain_handler)
|
||||
logger.setLevel(TRACE)
|
||||
|
||||
length = len(sequences) # type: int
|
||||
|
||||
if length == 0:
|
||||
logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level or logging.WARNING)
|
||||
return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
|
||||
|
||||
if cp_isolation is not None:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"cp_isolation is set. use this flag for debugging purpose. "
|
||||
"limited list of encoding allowed : %s.",
|
||||
", ".join(cp_isolation),
|
||||
)
|
||||
cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
|
||||
else:
|
||||
cp_isolation = []
|
||||
|
||||
if cp_exclusion is not None:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"cp_exclusion is set. use this flag for debugging purpose. "
|
||||
"limited list of encoding excluded : %s.",
|
||||
", ".join(cp_exclusion),
|
||||
)
|
||||
cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
|
||||
else:
|
||||
cp_exclusion = []
|
||||
|
||||
if length <= (chunk_size * steps):
|
||||
logger.log(
|
||||
TRACE,
|
||||
"override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
|
||||
steps,
|
||||
chunk_size,
|
||||
length,
|
||||
)
|
||||
steps = 1
|
||||
chunk_size = length
|
||||
|
||||
if steps > 1 and length / steps < chunk_size:
|
||||
chunk_size = int(length / steps)
|
||||
|
||||
is_too_small_sequence = len(sequences) < TOO_SMALL_SEQUENCE # type: bool
|
||||
is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool
|
||||
|
||||
if is_too_small_sequence:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
|
||||
length
|
||||
),
|
||||
)
|
||||
elif is_too_large_sequence:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
|
||||
length
|
||||
),
|
||||
)
|
||||
|
||||
prioritized_encodings = [] # type: List[str]
|
||||
|
||||
specified_encoding = (
|
||||
any_specified_encoding(sequences) if preemptive_behaviour else None
|
||||
) # type: Optional[str]
|
||||
|
||||
if specified_encoding is not None:
|
||||
prioritized_encodings.append(specified_encoding)
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Detected declarative mark in sequence. Priority +1 given for %s.",
|
||||
specified_encoding,
|
||||
)
|
||||
|
||||
tested = set() # type: Set[str]
|
||||
tested_but_hard_failure = [] # type: List[str]
|
||||
tested_but_soft_failure = [] # type: List[str]
|
||||
|
||||
fallback_ascii = None # type: Optional[CharsetMatch]
|
||||
fallback_u8 = None # type: Optional[CharsetMatch]
|
||||
fallback_specified = None # type: Optional[CharsetMatch]
|
||||
|
||||
results = CharsetMatches() # type: CharsetMatches
|
||||
|
||||
sig_encoding, sig_payload = identify_sig_or_bom(sequences)
|
||||
|
||||
if sig_encoding is not None:
|
||||
prioritized_encodings.append(sig_encoding)
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
|
||||
len(sig_payload),
|
||||
sig_encoding,
|
||||
)
|
||||
|
||||
prioritized_encodings.append("ascii")
|
||||
|
||||
if "utf_8" not in prioritized_encodings:
|
||||
prioritized_encodings.append("utf_8")
|
||||
|
||||
for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
|
||||
|
||||
if cp_isolation and encoding_iana not in cp_isolation:
|
||||
continue
|
||||
|
||||
if cp_exclusion and encoding_iana in cp_exclusion:
|
||||
continue
|
||||
|
||||
if encoding_iana in tested:
|
||||
continue
|
||||
|
||||
tested.add(encoding_iana)
|
||||
|
||||
decoded_payload = None # type: Optional[str]
|
||||
bom_or_sig_available = sig_encoding == encoding_iana # type: bool
|
||||
strip_sig_or_bom = bom_or_sig_available and should_strip_sig_or_bom(
|
||||
encoding_iana
|
||||
) # type: bool
|
||||
|
||||
if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
|
||||
encoding_iana,
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Encoding %s does not provide an IncrementalDecoder",
|
||||
encoding_iana,
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
if is_too_large_sequence and is_multi_byte_decoder is False:
|
||||
str(
|
||||
sequences[: int(50e4)]
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) : int(50e4)],
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
else:
|
||||
decoded_payload = str(
|
||||
sequences
|
||||
if strip_sig_or_bom is False
|
||||
else sequences[len(sig_payload) :],
|
||||
encoding=encoding_iana,
|
||||
)
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
if not isinstance(e, LookupError):
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Code page %s does not fit given bytes sequence at ALL. %s",
|
||||
encoding_iana,
|
||||
str(e),
|
||||
)
|
||||
tested_but_hard_failure.append(encoding_iana)
|
||||
continue
|
||||
|
||||
similar_soft_failure_test = False # type: bool
|
||||
|
||||
for encoding_soft_failed in tested_but_soft_failure:
|
||||
if is_cp_similar(encoding_iana, encoding_soft_failed):
|
||||
similar_soft_failure_test = True
|
||||
break
|
||||
|
||||
if similar_soft_failure_test:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
|
||||
encoding_iana,
|
||||
encoding_soft_failed,
|
||||
)
|
||||
continue
|
||||
|
||||
r_ = range(
|
||||
0 if not bom_or_sig_available else len(sig_payload),
|
||||
length,
|
||||
int(length / steps),
|
||||
)
|
||||
|
||||
multi_byte_bonus = (
|
||||
is_multi_byte_decoder
|
||||
and decoded_payload is not None
|
||||
and len(decoded_payload) < length
|
||||
) # type: bool
|
||||
|
||||
if multi_byte_bonus:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Code page %s is a multi byte encoding table and it appear that at least one character "
|
||||
"was encoded using n-bytes.",
|
||||
encoding_iana,
|
||||
)
|
||||
|
||||
max_chunk_gave_up = int(len(r_) / 4) # type: int
|
||||
|
||||
max_chunk_gave_up = max(max_chunk_gave_up, 2)
|
||||
early_stop_count = 0 # type: int
|
||||
lazy_str_hard_failure = False
|
||||
|
||||
md_chunks = [] # type: List[str]
|
||||
md_ratios = []
|
||||
|
||||
for i in r_:
|
||||
if i + chunk_size > length + 8:
|
||||
continue
|
||||
|
||||
cut_sequence = sequences[i : i + chunk_size]
|
||||
|
||||
if bom_or_sig_available and strip_sig_or_bom is False:
|
||||
cut_sequence = sig_payload + cut_sequence
|
||||
|
||||
try:
|
||||
chunk = cut_sequence.decode(
|
||||
encoding_iana,
|
||||
errors="ignore" if is_multi_byte_decoder else "strict",
|
||||
) # type: str
|
||||
except UnicodeDecodeError as e: # Lazy str loading may have missed something there
|
||||
logger.log(
|
||||
TRACE,
|
||||
"LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
|
||||
encoding_iana,
|
||||
str(e),
|
||||
)
|
||||
early_stop_count = max_chunk_gave_up
|
||||
lazy_str_hard_failure = True
|
||||
break
|
||||
|
||||
# multi-byte bad cutting detector and adjustment
|
||||
# not the cleanest way to perform that fix but clever enough for now.
|
||||
if is_multi_byte_decoder and i > 0 and sequences[i] >= 0x80:
|
||||
|
||||
chunk_partial_size_chk = min(chunk_size, 16) # type: int
|
||||
|
||||
if (
|
||||
decoded_payload
|
||||
and chunk[:chunk_partial_size_chk] not in decoded_payload
|
||||
):
|
||||
for j in range(i, i - 4, -1):
|
||||
cut_sequence = sequences[j : i + chunk_size]
|
||||
|
||||
if bom_or_sig_available and strip_sig_or_bom is False:
|
||||
cut_sequence = sig_payload + cut_sequence
|
||||
|
||||
chunk = cut_sequence.decode(encoding_iana, errors="ignore")
|
||||
|
||||
if chunk[:chunk_partial_size_chk] in decoded_payload:
|
||||
break
|
||||
|
||||
md_chunks.append(chunk)
|
||||
|
||||
md_ratios.append(mess_ratio(chunk, threshold))
|
||||
|
||||
if md_ratios[-1] >= threshold:
|
||||
early_stop_count += 1
|
||||
|
||||
if (early_stop_count >= max_chunk_gave_up) or (
|
||||
bom_or_sig_available and strip_sig_or_bom is False
|
||||
):
|
||||
break
|
||||
|
||||
# We might want to check the sequence again with the whole content
|
||||
# Only if initial MD tests passes
|
||||
if (
|
||||
not lazy_str_hard_failure
|
||||
and is_too_large_sequence
|
||||
and not is_multi_byte_decoder
|
||||
):
|
||||
try:
|
||||
sequences[int(50e3) :].decode(encoding_iana, errors="strict")
|
||||
except UnicodeDecodeError as e:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
|
||||
encoding_iana,
|
||||
str(e),
|
||||
)
|
||||
tested_but_hard_failure.append(encoding_iana)
|
||||
continue
|
||||
|
||||
mean_mess_ratio = (
|
||||
sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
|
||||
) # type: float
|
||||
if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
|
||||
tested_but_soft_failure.append(encoding_iana)
|
||||
logger.log(
|
||||
TRACE,
|
||||
"%s was excluded because of initial chaos probing. Gave up %i time(s). "
|
||||
"Computed mean chaos is %f %%.",
|
||||
encoding_iana,
|
||||
early_stop_count,
|
||||
round(mean_mess_ratio * 100, ndigits=3),
|
||||
)
|
||||
# Preparing those fallbacks in case we got nothing.
|
||||
if (
|
||||
encoding_iana in ["ascii", "utf_8", specified_encoding]
|
||||
and not lazy_str_hard_failure
|
||||
):
|
||||
fallback_entry = CharsetMatch(
|
||||
sequences, encoding_iana, threshold, False, [], decoded_payload
|
||||
)
|
||||
if encoding_iana == specified_encoding:
|
||||
fallback_specified = fallback_entry
|
||||
elif encoding_iana == "ascii":
|
||||
fallback_ascii = fallback_entry
|
||||
else:
|
||||
fallback_u8 = fallback_entry
|
||||
continue
|
||||
|
||||
logger.log(
|
||||
TRACE,
|
||||
"%s passed initial chaos probing. Mean measured chaos is %f %%",
|
||||
encoding_iana,
|
||||
round(mean_mess_ratio * 100, ndigits=3),
|
||||
)
|
||||
|
||||
if not is_multi_byte_decoder:
|
||||
target_languages = encoding_languages(encoding_iana) # type: List[str]
|
||||
else:
|
||||
target_languages = mb_encoding_languages(encoding_iana)
|
||||
|
||||
if target_languages:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"{} should target any language(s) of {}".format(
|
||||
encoding_iana, str(target_languages)
|
||||
),
|
||||
)
|
||||
|
||||
cd_ratios = []
|
||||
|
||||
# We shall skip the CD when its about ASCII
|
||||
# Most of the time its not relevant to run "language-detection" on it.
|
||||
if encoding_iana != "ascii":
|
||||
for chunk in md_chunks:
|
||||
chunk_languages = coherence_ratio(
|
||||
chunk, 0.1, ",".join(target_languages) if target_languages else None
|
||||
)
|
||||
|
||||
cd_ratios.append(chunk_languages)
|
||||
|
||||
cd_ratios_merged = merge_coherence_ratios(cd_ratios)
|
||||
|
||||
if cd_ratios_merged:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"We detected language {} using {}".format(
|
||||
cd_ratios_merged, encoding_iana
|
||||
),
|
||||
)
|
||||
|
||||
results.append(
|
||||
CharsetMatch(
|
||||
sequences,
|
||||
encoding_iana,
|
||||
mean_mess_ratio,
|
||||
bom_or_sig_available,
|
||||
cd_ratios_merged,
|
||||
decoded_payload,
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
encoding_iana in [specified_encoding, "ascii", "utf_8"]
|
||||
and mean_mess_ratio < 0.1
|
||||
):
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one.", encoding_iana
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
return CharsetMatches([results[encoding_iana]])
|
||||
|
||||
if encoding_iana == sig_encoding:
|
||||
logger.debug(
|
||||
"Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
|
||||
"the beginning of the sequence.",
|
||||
encoding_iana,
|
||||
)
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
return CharsetMatches([results[encoding_iana]])
|
||||
|
||||
if len(results) == 0:
|
||||
if fallback_u8 or fallback_ascii or fallback_specified:
|
||||
logger.log(
|
||||
TRACE,
|
||||
"Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
|
||||
)
|
||||
|
||||
if fallback_specified:
|
||||
logger.debug(
|
||||
"Encoding detection: %s will be used as a fallback match",
|
||||
fallback_specified.encoding,
|
||||
)
|
||||
results.append(fallback_specified)
|
||||
elif (
|
||||
(fallback_u8 and fallback_ascii is None)
|
||||
or (
|
||||
fallback_u8
|
||||
and fallback_ascii
|
||||
and fallback_u8.fingerprint != fallback_ascii.fingerprint
|
||||
)
|
||||
or (fallback_u8 is not None)
|
||||
):
|
||||
logger.debug("Encoding detection: utf_8 will be used as a fallback match")
|
||||
results.append(fallback_u8)
|
||||
elif fallback_ascii:
|
||||
logger.debug("Encoding detection: ascii will be used as a fallback match")
|
||||
results.append(fallback_ascii)
|
||||
|
||||
if results:
|
||||
logger.debug(
|
||||
"Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
|
||||
results.best().encoding, # type: ignore
|
||||
len(results) - 1,
|
||||
)
|
||||
else:
|
||||
logger.debug("Encoding detection: Unable to determine any suitable charset.")
|
||||
|
||||
if explain:
|
||||
logger.removeHandler(explain_handler)
|
||||
logger.setLevel(previous_logger_level)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def from_fp(
|
||||
fp: BinaryIO,
|
||||
steps: int = 5,
|
||||
chunk_size: int = 512,
|
||||
threshold: float = 0.20,
|
||||
cp_isolation: List[str] = None,
|
||||
cp_exclusion: List[str] = None,
|
||||
preemptive_behaviour: bool = True,
|
||||
explain: bool = False,
|
||||
) -> CharsetMatches:
|
||||
"""
|
||||
Same thing than the function from_bytes but using a file pointer that is already ready.
|
||||
Will not close the file pointer.
|
||||
"""
|
||||
return from_bytes(
|
||||
fp.read(),
|
||||
steps,
|
||||
chunk_size,
|
||||
threshold,
|
||||
cp_isolation,
|
||||
cp_exclusion,
|
||||
preemptive_behaviour,
|
||||
explain,
|
||||
)
|
||||
|
||||
|
||||
def from_path(
|
||||
path: PathLike,
|
||||
steps: int = 5,
|
||||
chunk_size: int = 512,
|
||||
threshold: float = 0.20,
|
||||
cp_isolation: List[str] = None,
|
||||
cp_exclusion: List[str] = None,
|
||||
preemptive_behaviour: bool = True,
|
||||
explain: bool = False,
|
||||
) -> CharsetMatches:
|
||||
"""
|
||||
Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
|
||||
Can raise IOError.
|
||||
"""
|
||||
with open(path, "rb") as fp:
|
||||
return from_fp(
|
||||
fp,
|
||||
steps,
|
||||
chunk_size,
|
||||
threshold,
|
||||
cp_isolation,
|
||||
cp_exclusion,
|
||||
preemptive_behaviour,
|
||||
explain,
|
||||
)
|
||||
|
||||
|
||||
def normalize(
|
||||
path: PathLike,
|
||||
steps: int = 5,
|
||||
chunk_size: int = 512,
|
||||
threshold: float = 0.20,
|
||||
cp_isolation: List[str] = None,
|
||||
cp_exclusion: List[str] = None,
|
||||
preemptive_behaviour: bool = True,
|
||||
) -> CharsetMatch:
|
||||
"""
|
||||
Take a (text-based) file path and try to create another file next to it, this time using UTF-8.
|
||||
"""
|
||||
results = from_path(
|
||||
path,
|
||||
steps,
|
||||
chunk_size,
|
||||
threshold,
|
||||
cp_isolation,
|
||||
cp_exclusion,
|
||||
preemptive_behaviour,
|
||||
)
|
||||
|
||||
filename = basename(path)
|
||||
target_extensions = list(splitext(filename))
|
||||
|
||||
if len(results) == 0:
|
||||
raise IOError(
|
||||
'Unable to normalize "{}", no encoding charset seems to fit.'.format(
|
||||
filename
|
||||
)
|
||||
)
|
||||
|
||||
result = results.best()
|
||||
|
||||
target_extensions[0] += "-" + result.encoding # type: ignore
|
||||
|
||||
with open(
|
||||
"{}".format(str(path).replace(filename, "".join(target_extensions))), "wb"
|
||||
) as fp:
|
||||
fp.write(result.output()) # type: ignore
|
||||
|
||||
return result # type: ignore
|
||||
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -1,340 +0,0 @@
|
||||
import importlib
|
||||
from codecs import IncrementalDecoder
|
||||
from collections import Counter, OrderedDict
|
||||
from functools import lru_cache
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from .assets import FREQUENCIES
|
||||
from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES
|
||||
from .md import is_suspiciously_successive_range
|
||||
from .models import CoherenceMatches
|
||||
from .utils import (
|
||||
is_accentuated,
|
||||
is_latin,
|
||||
is_multi_byte_encoding,
|
||||
is_unicode_range_secondary,
|
||||
unicode_range,
|
||||
)
|
||||
|
||||
|
||||
def encoding_unicode_range(iana_name: str) -> List[str]:
|
||||
"""
|
||||
Return associated unicode ranges in a single byte code page.
|
||||
"""
|
||||
if is_multi_byte_encoding(iana_name):
|
||||
raise IOError("Function not supported on multi-byte code page")
|
||||
|
||||
decoder = importlib.import_module("encodings.{}".format(iana_name)).IncrementalDecoder # type: ignore
|
||||
|
||||
p = decoder(errors="ignore") # type: IncrementalDecoder
|
||||
seen_ranges = {} # type: Dict[str, int]
|
||||
character_count = 0 # type: int
|
||||
|
||||
for i in range(0x40, 0xFF):
|
||||
chunk = p.decode(bytes([i])) # type: str
|
||||
|
||||
if chunk:
|
||||
character_range = unicode_range(chunk) # type: Optional[str]
|
||||
|
||||
if character_range is None:
|
||||
continue
|
||||
|
||||
if is_unicode_range_secondary(character_range) is False:
|
||||
if character_range not in seen_ranges:
|
||||
seen_ranges[character_range] = 0
|
||||
seen_ranges[character_range] += 1
|
||||
character_count += 1
|
||||
|
||||
return sorted(
|
||||
[
|
||||
character_range
|
||||
for character_range in seen_ranges
|
||||
if seen_ranges[character_range] / character_count >= 0.15
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def unicode_range_languages(primary_range: str) -> List[str]:
|
||||
"""
|
||||
Return inferred languages used with a unicode range.
|
||||
"""
|
||||
languages = [] # type: List[str]
|
||||
|
||||
for language, characters in FREQUENCIES.items():
|
||||
for character in characters:
|
||||
if unicode_range(character) == primary_range:
|
||||
languages.append(language)
|
||||
break
|
||||
|
||||
return languages
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def encoding_languages(iana_name: str) -> List[str]:
|
||||
"""
|
||||
Single-byte encoding language association. Some code page are heavily linked to particular language(s).
|
||||
This function does the correspondence.
|
||||
"""
|
||||
unicode_ranges = encoding_unicode_range(iana_name) # type: List[str]
|
||||
primary_range = None # type: Optional[str]
|
||||
|
||||
for specified_range in unicode_ranges:
|
||||
if "Latin" not in specified_range:
|
||||
primary_range = specified_range
|
||||
break
|
||||
|
||||
if primary_range is None:
|
||||
return ["Latin Based"]
|
||||
|
||||
return unicode_range_languages(primary_range)
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def mb_encoding_languages(iana_name: str) -> List[str]:
|
||||
"""
|
||||
Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
|
||||
This function does the correspondence.
|
||||
"""
|
||||
if (
|
||||
iana_name.startswith("shift_")
|
||||
or iana_name.startswith("iso2022_jp")
|
||||
or iana_name.startswith("euc_j")
|
||||
or iana_name == "cp932"
|
||||
):
|
||||
return ["Japanese"]
|
||||
if iana_name.startswith("gb") or iana_name in ZH_NAMES:
|
||||
return ["Chinese", "Classical Chinese"]
|
||||
if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
|
||||
return ["Korean"]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
|
||||
def get_target_features(language: str) -> Tuple[bool, bool]:
|
||||
"""
|
||||
Determine main aspects from a supported language if it contains accents and if is pure Latin.
|
||||
"""
|
||||
target_have_accents = False # type: bool
|
||||
target_pure_latin = True # type: bool
|
||||
|
||||
for character in FREQUENCIES[language]:
|
||||
if not target_have_accents and is_accentuated(character):
|
||||
target_have_accents = True
|
||||
if target_pure_latin and is_latin(character) is False:
|
||||
target_pure_latin = False
|
||||
|
||||
return target_have_accents, target_pure_latin
|
||||
|
||||
|
||||
def alphabet_languages(
|
||||
characters: List[str], ignore_non_latin: bool = False
|
||||
) -> List[str]:
|
||||
"""
|
||||
Return associated languages associated to given characters.
|
||||
"""
|
||||
languages = [] # type: List[Tuple[str, float]]
|
||||
|
||||
source_have_accents = any(is_accentuated(character) for character in characters)
|
||||
|
||||
for language, language_characters in FREQUENCIES.items():
|
||||
|
||||
target_have_accents, target_pure_latin = get_target_features(language)
|
||||
|
||||
if ignore_non_latin and target_pure_latin is False:
|
||||
continue
|
||||
|
||||
if target_have_accents is False and source_have_accents:
|
||||
continue
|
||||
|
||||
character_count = len(language_characters) # type: int
|
||||
|
||||
character_match_count = len(
|
||||
[c for c in language_characters if c in characters]
|
||||
) # type: int
|
||||
|
||||
ratio = character_match_count / character_count # type: float
|
||||
|
||||
if ratio >= 0.2:
|
||||
languages.append((language, ratio))
|
||||
|
||||
languages = sorted(languages, key=lambda x: x[1], reverse=True)
|
||||
|
||||
return [compatible_language[0] for compatible_language in languages]
|
||||
|
||||
|
||||
def characters_popularity_compare(
|
||||
language: str, ordered_characters: List[str]
|
||||
) -> float:
|
||||
"""
|
||||
Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
|
||||
The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
|
||||
Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
|
||||
"""
|
||||
if language not in FREQUENCIES:
|
||||
raise ValueError("{} not available".format(language))
|
||||
|
||||
character_approved_count = 0 # type: int
|
||||
|
||||
for character in ordered_characters:
|
||||
if character not in FREQUENCIES[language]:
|
||||
continue
|
||||
|
||||
characters_before_source = FREQUENCIES[language][
|
||||
0 : FREQUENCIES[language].index(character)
|
||||
] # type: List[str]
|
||||
characters_after_source = FREQUENCIES[language][
|
||||
FREQUENCIES[language].index(character) :
|
||||
] # type: List[str]
|
||||
|
||||
characters_before = ordered_characters[
|
||||
0 : ordered_characters.index(character)
|
||||
] # type: List[str]
|
||||
characters_after = ordered_characters[
|
||||
ordered_characters.index(character) :
|
||||
] # type: List[str]
|
||||
|
||||
before_match_count = [
|
||||
e in characters_before for e in characters_before_source
|
||||
].count(
|
||||
True
|
||||
) # type: int
|
||||
after_match_count = [
|
||||
e in characters_after for e in characters_after_source
|
||||
].count(
|
||||
True
|
||||
) # type: int
|
||||
|
||||
if len(characters_before_source) == 0 and before_match_count <= 4:
|
||||
character_approved_count += 1
|
||||
continue
|
||||
|
||||
if len(characters_after_source) == 0 and after_match_count <= 4:
|
||||
character_approved_count += 1
|
||||
continue
|
||||
|
||||
if (
|
||||
before_match_count / len(characters_before_source) >= 0.4
|
||||
or after_match_count / len(characters_after_source) >= 0.4
|
||||
):
|
||||
character_approved_count += 1
|
||||
continue
|
||||
|
||||
return character_approved_count / len(ordered_characters)
|
||||
|
||||
|
||||
def alpha_unicode_split(decoded_sequence: str) -> List[str]:
|
||||
"""
|
||||
Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
|
||||
Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
|
||||
One containing the latin letters and the other hebrew.
|
||||
"""
|
||||
layers = OrderedDict() # type: Dict[str, str]
|
||||
|
||||
for character in decoded_sequence:
|
||||
if character.isalpha() is False:
|
||||
continue
|
||||
|
||||
character_range = unicode_range(character) # type: Optional[str]
|
||||
|
||||
if character_range is None:
|
||||
continue
|
||||
|
||||
layer_target_range = None # type: Optional[str]
|
||||
|
||||
for discovered_range in layers:
|
||||
if (
|
||||
is_suspiciously_successive_range(discovered_range, character_range)
|
||||
is False
|
||||
):
|
||||
layer_target_range = discovered_range
|
||||
break
|
||||
|
||||
if layer_target_range is None:
|
||||
layer_target_range = character_range
|
||||
|
||||
if layer_target_range not in layers:
|
||||
layers[layer_target_range] = character.lower()
|
||||
continue
|
||||
|
||||
layers[layer_target_range] += character.lower()
|
||||
|
||||
return list(layers.values())
|
||||
|
||||
|
||||
def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches:
|
||||
"""
|
||||
This function merge results previously given by the function coherence_ratio.
|
||||
The return type is the same as coherence_ratio.
|
||||
"""
|
||||
per_language_ratios = OrderedDict() # type: Dict[str, List[float]]
|
||||
for result in results:
|
||||
for sub_result in result:
|
||||
language, ratio = sub_result
|
||||
if language not in per_language_ratios:
|
||||
per_language_ratios[language] = [ratio]
|
||||
continue
|
||||
per_language_ratios[language].append(ratio)
|
||||
|
||||
merge = [
|
||||
(
|
||||
language,
|
||||
round(
|
||||
sum(per_language_ratios[language]) / len(per_language_ratios[language]),
|
||||
4,
|
||||
),
|
||||
)
|
||||
for language in per_language_ratios
|
||||
]
|
||||
|
||||
return sorted(merge, key=lambda x: x[1], reverse=True)
|
||||
|
||||
|
||||
@lru_cache(maxsize=2048)
|
||||
def coherence_ratio(
|
||||
decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None
|
||||
) -> CoherenceMatches:
|
||||
"""
|
||||
Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
|
||||
A layer = Character extraction by alphabets/ranges.
|
||||
"""
|
||||
|
||||
results = [] # type: List[Tuple[str, float]]
|
||||
ignore_non_latin = False # type: bool
|
||||
|
||||
sufficient_match_count = 0 # type: int
|
||||
|
||||
lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
|
||||
if "Latin Based" in lg_inclusion_list:
|
||||
ignore_non_latin = True
|
||||
lg_inclusion_list.remove("Latin Based")
|
||||
|
||||
for layer in alpha_unicode_split(decoded_sequence):
|
||||
sequence_frequencies = Counter(layer) # type: Counter
|
||||
most_common = sequence_frequencies.most_common()
|
||||
|
||||
character_count = sum(o for c, o in most_common) # type: int
|
||||
|
||||
if character_count <= TOO_SMALL_SEQUENCE:
|
||||
continue
|
||||
|
||||
popular_character_ordered = [c for c, o in most_common] # type: List[str]
|
||||
|
||||
for language in lg_inclusion_list or alphabet_languages(
|
||||
popular_character_ordered, ignore_non_latin
|
||||
):
|
||||
ratio = characters_popularity_compare(
|
||||
language, popular_character_ordered
|
||||
) # type: float
|
||||
|
||||
if ratio < threshold:
|
||||
continue
|
||||
elif ratio >= 0.8:
|
||||
sufficient_match_count += 1
|
||||
|
||||
results.append((language, round(ratio, 4)))
|
||||
|
||||
if sufficient_match_count >= 3:
|
||||
break
|
||||
|
||||
return sorted(results, key=lambda x: x[1], reverse=True)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user