mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-22 10:31:09 -05:00
okay fine
This commit is contained in:
@@ -0,0 +1,31 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Twisted Python: Utilities and Enhancements for Python.
|
||||
"""
|
||||
|
||||
|
||||
from .deprecate import deprecatedModuleAttribute
|
||||
|
||||
# Deprecating twisted.python.constants.
|
||||
from .versions import Version
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 16, 5, 0),
|
||||
"Please use constantly from PyPI instead.",
|
||||
"twisted.python",
|
||||
"constants",
|
||||
)
|
||||
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 17, 5, 0),
|
||||
"Please use hyperlink from PyPI instead.",
|
||||
"twisted.python",
|
||||
"url",
|
||||
)
|
||||
|
||||
|
||||
del Version
|
||||
del deprecatedModuleAttribute
|
||||
@@ -0,0 +1,32 @@
|
||||
# -*- test-case-name: twisted.python.test.test_appdirs -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Application data directory support.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
from typing import cast
|
||||
|
||||
import appdirs # type: ignore[import-untyped]
|
||||
|
||||
from twisted.python.compat import currentframe
|
||||
|
||||
|
||||
def getDataDirectory(moduleName: str = "") -> str:
|
||||
"""
|
||||
Get a data directory for the caller function, or C{moduleName} if given.
|
||||
|
||||
@param moduleName: The module name if you don't wish to have the caller's
|
||||
module.
|
||||
|
||||
@returns: A directory for putting data in.
|
||||
"""
|
||||
if not moduleName:
|
||||
caller = currentframe(1)
|
||||
module = inspect.getmodule(caller)
|
||||
assert module is not None
|
||||
moduleName = module.__name__
|
||||
|
||||
return cast(str, appdirs.user_data_dir(moduleName))
|
||||
100
.venv/lib/python3.12/site-packages/twisted/python/_inotify.py
Normal file
100
.venv/lib/python3.12/site-packages/twisted/python/_inotify.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# -*- test-case-name: twisted.internet.test.test_inotify -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Very low-level ctypes-based interface to Linux inotify(7).
|
||||
|
||||
ctypes and a version of libc which supports inotify system calls are
|
||||
required.
|
||||
"""
|
||||
|
||||
import ctypes
|
||||
import ctypes.util
|
||||
from typing import Any, cast
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
|
||||
class INotifyError(Exception):
|
||||
"""
|
||||
Unify all the possible exceptions that can be raised by the INotify API.
|
||||
"""
|
||||
|
||||
|
||||
def init() -> int:
|
||||
"""
|
||||
Create an inotify instance and return the associated file descriptor.
|
||||
"""
|
||||
fd = cast(int, libc.inotify_init())
|
||||
if fd < 0:
|
||||
raise INotifyError("INotify initialization error.")
|
||||
return fd
|
||||
|
||||
|
||||
def add(fd: int, path: FilePath[Any], mask: int) -> int:
|
||||
"""
|
||||
Add a watch for the given path to the inotify file descriptor, and return
|
||||
the watch descriptor.
|
||||
|
||||
@param fd: The file descriptor returned by C{libc.inotify_init}.
|
||||
@param path: The path to watch via inotify.
|
||||
@param mask: Bitmask specifying the events that inotify should monitor.
|
||||
"""
|
||||
wd = cast(int, libc.inotify_add_watch(fd, path.asBytesMode().path, mask))
|
||||
if wd < 0:
|
||||
raise INotifyError(f"Failed to add watch on '{path!r}' - ({wd!r})")
|
||||
return wd
|
||||
|
||||
|
||||
def remove(fd: int, wd: int) -> None:
|
||||
"""
|
||||
Remove the given watch descriptor from the inotify file descriptor.
|
||||
"""
|
||||
# When inotify_rm_watch returns -1 there's an error:
|
||||
# The errno for this call can be either one of the following:
|
||||
# EBADF: fd is not a valid file descriptor.
|
||||
# EINVAL: The watch descriptor wd is not valid; or fd is
|
||||
# not an inotify file descriptor.
|
||||
#
|
||||
# if we can't access the errno here we cannot even raise
|
||||
# an exception and we need to ignore the problem, one of
|
||||
# the most common cases is when you remove a directory from
|
||||
# the filesystem and that directory is observed. When inotify
|
||||
# tries to call inotify_rm_watch with a non existing directory
|
||||
# either of the 2 errors might come up because the files inside
|
||||
# it might have events generated way before they were handled.
|
||||
# Unfortunately only ctypes in Python 2.6 supports accessing errno:
|
||||
# http://bugs.python.org/issue1798 and in order to solve
|
||||
# the problem for previous versions we need to introduce
|
||||
# code that is quite complex:
|
||||
# http://stackoverflow.com/questions/661017/access-to-errno-from-python
|
||||
#
|
||||
# See #4310 for future resolution of this issue.
|
||||
libc.inotify_rm_watch(fd, wd)
|
||||
|
||||
|
||||
def initializeModule(libc: ctypes.CDLL) -> None:
|
||||
"""
|
||||
Initialize the module, checking if the expected APIs exist and setting the
|
||||
argtypes and restype for C{inotify_init}, C{inotify_add_watch}, and
|
||||
C{inotify_rm_watch}.
|
||||
"""
|
||||
for function in ("inotify_add_watch", "inotify_init", "inotify_rm_watch"):
|
||||
if getattr(libc, function, None) is None:
|
||||
raise ImportError("libc6 2.4 or higher needed")
|
||||
libc.inotify_init.argtypes = []
|
||||
libc.inotify_init.restype = ctypes.c_int
|
||||
|
||||
libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
|
||||
libc.inotify_rm_watch.restype = ctypes.c_int
|
||||
|
||||
libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32]
|
||||
libc.inotify_add_watch.restype = ctypes.c_int
|
||||
|
||||
|
||||
name = ctypes.util.find_library("c")
|
||||
if not name:
|
||||
raise ImportError("Can't find C library.")
|
||||
libc = ctypes.cdll.LoadLibrary(name)
|
||||
initializeModule(libc)
|
||||
@@ -0,0 +1,29 @@
|
||||
<div style="display: none" id="current-docs-container" class="container">
|
||||
<div class="col-sm-12">
|
||||
<a id="current-docs-link">
|
||||
Go to the latest version of this document.
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<!-- Google analytics, obviously. -->
|
||||
<script src="//www.google-analytics.com/urchin.js" type="text/javascript"></script>
|
||||
<script type="text/javascript">
|
||||
_uacct = "UA-99018-6";
|
||||
urchinTracker();
|
||||
</script>
|
||||
|
||||
<!-- If the documentation isn't current, insert a current link. -->
|
||||
<script type="text/javascript">
|
||||
if (window.location.pathname.indexOf('/current/') == -1) {
|
||||
<!-- Give the user a link to this page, but in the current version of the docs. -->
|
||||
var link = document.getElementById('current-docs-link');
|
||||
link.href = window.location.pathname.replace(/\/\d+\.\d+\.\d+\/api\//, '/current/api/');
|
||||
<!-- And make it visible -->
|
||||
var container = document.getElementById('current-docs-container');
|
||||
container.style.display = "";
|
||||
delete link;
|
||||
delete container;
|
||||
}
|
||||
</script>
|
||||
|
||||
</div>
|
||||
281
.venv/lib/python3.12/site-packages/twisted/python/_release.py
Normal file
281
.venv/lib/python3.12/site-packages/twisted/python/_release.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# -*- test-case-name: twisted.python.test.test_release -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Twisted's automated release system.
|
||||
|
||||
This module is only for use within Twisted's release system. If you are anyone
|
||||
else, do not use it. The interface and behaviour will change without notice.
|
||||
|
||||
Only Linux is supported by this code. It should not be used by any tools
|
||||
which must run on multiple platforms (eg the setup.py script).
|
||||
"""
|
||||
|
||||
import os
|
||||
from subprocess import STDOUT, CalledProcessError, check_output
|
||||
from typing import Dict
|
||||
|
||||
from zope.interface import Interface, implementer
|
||||
|
||||
from twisted.python.compat import execfile
|
||||
|
||||
|
||||
def runCommand(args, **kwargs):
|
||||
"""Execute a vector of arguments.
|
||||
|
||||
This is a wrapper around L{subprocess.check_output}, so it takes
|
||||
the same arguments as L{subprocess.Popen} with one difference: all
|
||||
arguments after the vector must be keyword arguments.
|
||||
|
||||
@param args: arguments passed to L{subprocess.check_output}
|
||||
@param kwargs: keyword arguments passed to L{subprocess.check_output}
|
||||
@return: command output
|
||||
@rtype: L{bytes}
|
||||
"""
|
||||
kwargs["stderr"] = STDOUT
|
||||
return check_output(args, **kwargs)
|
||||
|
||||
|
||||
class IVCSCommand(Interface):
|
||||
"""
|
||||
An interface for VCS commands.
|
||||
"""
|
||||
|
||||
def ensureIsWorkingDirectory(path):
|
||||
"""
|
||||
Ensure that C{path} is a working directory of this VCS.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to check.
|
||||
"""
|
||||
|
||||
def isStatusClean(path):
|
||||
"""
|
||||
Return the Git status of the files in the specified path.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to get the status from (can be a directory or a
|
||||
file.)
|
||||
"""
|
||||
|
||||
def remove(path):
|
||||
"""
|
||||
Remove the specified path from a the VCS.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to remove from the repository.
|
||||
"""
|
||||
|
||||
def exportTo(fromDir, exportDir):
|
||||
"""
|
||||
Export the content of the VCSrepository to the specified directory.
|
||||
|
||||
@type fromDir: L{twisted.python.filepath.FilePath}
|
||||
@param fromDir: The path to the VCS repository to export.
|
||||
|
||||
@type exportDir: L{twisted.python.filepath.FilePath}
|
||||
@param exportDir: The directory to export the content of the
|
||||
repository to. This directory doesn't have to exist prior to
|
||||
exporting the repository.
|
||||
"""
|
||||
|
||||
|
||||
@implementer(IVCSCommand)
|
||||
class GitCommand:
|
||||
"""
|
||||
Subset of Git commands to release Twisted from a Git repository.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def ensureIsWorkingDirectory(path):
|
||||
"""
|
||||
Ensure that C{path} is a Git working directory.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to check.
|
||||
"""
|
||||
try:
|
||||
runCommand(["git", "rev-parse"], cwd=path.path)
|
||||
except (CalledProcessError, OSError):
|
||||
raise NotWorkingDirectory(
|
||||
f"{path.path} does not appear to be a Git repository."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def isStatusClean(path):
|
||||
"""
|
||||
Return the Git status of the files in the specified path.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to get the status from (can be a directory or a
|
||||
file.)
|
||||
"""
|
||||
status = runCommand(["git", "-C", path.path, "status", "--short"]).strip()
|
||||
return status == b""
|
||||
|
||||
@staticmethod
|
||||
def remove(path):
|
||||
"""
|
||||
Remove the specified path from a Git repository.
|
||||
|
||||
@type path: L{twisted.python.filepath.FilePath}
|
||||
@param path: The path to remove from the repository.
|
||||
"""
|
||||
runCommand(["git", "-C", path.dirname(), "rm", path.path])
|
||||
|
||||
@staticmethod
|
||||
def exportTo(fromDir, exportDir):
|
||||
"""
|
||||
Export the content of a Git repository to the specified directory.
|
||||
|
||||
@type fromDir: L{twisted.python.filepath.FilePath}
|
||||
@param fromDir: The path to the Git repository to export.
|
||||
|
||||
@type exportDir: L{twisted.python.filepath.FilePath}
|
||||
@param exportDir: The directory to export the content of the
|
||||
repository to. This directory doesn't have to exist prior to
|
||||
exporting the repository.
|
||||
"""
|
||||
runCommand(
|
||||
[
|
||||
"git",
|
||||
"-C",
|
||||
fromDir.path,
|
||||
"checkout-index",
|
||||
"--all",
|
||||
"--force",
|
||||
# prefix has to end up with a "/" so that files get copied
|
||||
# to a directory whose name is the prefix.
|
||||
"--prefix",
|
||||
exportDir.path + "/",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def getRepositoryCommand(directory):
|
||||
"""
|
||||
Detect the VCS used in the specified directory and return a L{GitCommand}
|
||||
if the directory is a Git repository. If the directory is not git, it
|
||||
raises a L{NotWorkingDirectory} exception.
|
||||
|
||||
@type directory: L{FilePath}
|
||||
@param directory: The directory to detect the VCS used from.
|
||||
|
||||
@rtype: L{GitCommand}
|
||||
|
||||
@raise NotWorkingDirectory: if no supported VCS can be found from the
|
||||
specified directory.
|
||||
"""
|
||||
try:
|
||||
GitCommand.ensureIsWorkingDirectory(directory)
|
||||
return GitCommand
|
||||
except (NotWorkingDirectory, OSError):
|
||||
# It's not Git, but that's okay, eat the error
|
||||
pass
|
||||
|
||||
raise NotWorkingDirectory(f"No supported VCS can be found in {directory.path}")
|
||||
|
||||
|
||||
class Project:
|
||||
"""
|
||||
A representation of a project that has a version.
|
||||
|
||||
@ivar directory: A L{twisted.python.filepath.FilePath} pointing to the base
|
||||
directory of a Twisted-style Python package. The package should contain
|
||||
a C{_version.py} file and a C{newsfragments} directory that contains a
|
||||
C{README} file.
|
||||
"""
|
||||
|
||||
def __init__(self, directory):
|
||||
self.directory = directory
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.directory!r})"
|
||||
|
||||
def getVersion(self):
|
||||
"""
|
||||
@return: A L{incremental.Version} specifying the version number of the
|
||||
project based on live python modules.
|
||||
"""
|
||||
namespace: Dict[str, object] = {}
|
||||
directory = self.directory
|
||||
while not namespace:
|
||||
if directory.path == "/":
|
||||
raise Exception("Not inside a Twisted project.")
|
||||
elif not directory.basename() == "twisted":
|
||||
directory = directory.parent()
|
||||
else:
|
||||
execfile(directory.child("_version.py").path, namespace)
|
||||
return namespace["__version__"]
|
||||
|
||||
|
||||
def findTwistedProjects(baseDirectory):
|
||||
"""
|
||||
Find all Twisted-style projects beneath a base directory.
|
||||
|
||||
@param baseDirectory: A L{twisted.python.filepath.FilePath} to look inside.
|
||||
@return: A list of L{Project}.
|
||||
"""
|
||||
projects = []
|
||||
for filePath in baseDirectory.walk():
|
||||
if filePath.basename() == "newsfragments":
|
||||
projectDirectory = filePath.parent()
|
||||
projects.append(Project(projectDirectory))
|
||||
return projects
|
||||
|
||||
|
||||
def replaceInFile(filename, oldToNew):
|
||||
"""
|
||||
I replace the text `oldstr' with `newstr' in `filename' using science.
|
||||
"""
|
||||
os.rename(filename, filename + ".bak")
|
||||
with open(filename + ".bak") as f:
|
||||
d = f.read()
|
||||
for k, v in oldToNew.items():
|
||||
d = d.replace(k, v)
|
||||
with open(filename + ".new", "w") as f:
|
||||
f.write(d)
|
||||
os.rename(filename + ".new", filename)
|
||||
os.unlink(filename + ".bak")
|
||||
|
||||
|
||||
class NoDocumentsFound(Exception):
|
||||
"""
|
||||
Raised when no input documents are found.
|
||||
"""
|
||||
|
||||
|
||||
def filePathDelta(origin, destination):
|
||||
"""
|
||||
Return a list of strings that represent C{destination} as a path relative
|
||||
to C{origin}.
|
||||
|
||||
It is assumed that both paths represent directories, not files. That is to
|
||||
say, the delta of L{twisted.python.filepath.FilePath} /foo/bar to
|
||||
L{twisted.python.filepath.FilePath} /foo/baz will be C{../baz},
|
||||
not C{baz}.
|
||||
|
||||
@type origin: L{twisted.python.filepath.FilePath}
|
||||
@param origin: The origin of the relative path.
|
||||
|
||||
@type destination: L{twisted.python.filepath.FilePath}
|
||||
@param destination: The destination of the relative path.
|
||||
"""
|
||||
commonItems = 0
|
||||
path1 = origin.path.split(os.sep)
|
||||
path2 = destination.path.split(os.sep)
|
||||
for elem1, elem2 in zip(path1, path2):
|
||||
if elem1 == elem2:
|
||||
commonItems += 1
|
||||
else:
|
||||
break
|
||||
path = [".."] * (len(path1) - commonItems)
|
||||
return path + path2[commonItems:]
|
||||
|
||||
|
||||
class NotWorkingDirectory(Exception):
|
||||
"""
|
||||
Raised when a directory does not appear to be a repository directory of a
|
||||
supported VCS.
|
||||
"""
|
||||
686
.venv/lib/python3.12/site-packages/twisted/python/_shellcomp.py
Normal file
686
.venv/lib/python3.12/site-packages/twisted/python/_shellcomp.py
Normal file
@@ -0,0 +1,686 @@
|
||||
# -*- test-case-name: twisted.python.test.test_shellcomp -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
No public APIs are provided by this module. Internal use only.
|
||||
|
||||
This module implements dynamic tab-completion for any command that uses
|
||||
twisted.python.usage. Currently, only zsh is supported. Bash support may
|
||||
be added in the future.
|
||||
|
||||
Maintainer: Eric P. Mangold - twisted AT teratorn DOT org
|
||||
|
||||
In order for zsh completion to take place the shell must be able to find an
|
||||
appropriate "stub" file ("completion function") that invokes this code and
|
||||
displays the results to the user.
|
||||
|
||||
The stub used for Twisted commands is in the file C{twisted-completion.zsh},
|
||||
which is also included in the official Zsh distribution at
|
||||
C{Completion/Unix/Command/_twisted}. Use this file as a basis for completion
|
||||
functions for your own commands. You should only need to change the first line
|
||||
to something like C{#compdef mycommand}.
|
||||
|
||||
The main public documentation exists in the L{twisted.python.usage.Options}
|
||||
docstring, the L{twisted.python.usage.Completions} docstring, and the
|
||||
Options howto.
|
||||
"""
|
||||
|
||||
import getopt
|
||||
import inspect
|
||||
import itertools
|
||||
from types import MethodType
|
||||
from typing import Dict, List, Set
|
||||
|
||||
from twisted.python import reflect, usage, util
|
||||
from twisted.python.compat import ioType
|
||||
|
||||
|
||||
def shellComplete(config, cmdName, words, shellCompFile):
|
||||
"""
|
||||
Perform shell completion.
|
||||
|
||||
A completion function (shell script) is generated for the requested
|
||||
shell and written to C{shellCompFile}, typically C{stdout}. The result
|
||||
is then eval'd by the shell to produce the desired completions.
|
||||
|
||||
@type config: L{twisted.python.usage.Options}
|
||||
@param config: The L{twisted.python.usage.Options} instance to generate
|
||||
completions for.
|
||||
|
||||
@type cmdName: C{str}
|
||||
@param cmdName: The name of the command we're generating completions for.
|
||||
In the case of zsh, this is used to print an appropriate
|
||||
"#compdef $CMD" line at the top of the output. This is
|
||||
not necessary for the functionality of the system, but it
|
||||
helps in debugging, since the output we produce is properly
|
||||
formed and may be saved in a file and used as a stand-alone
|
||||
completion function.
|
||||
|
||||
@type words: C{list} of C{str}
|
||||
@param words: The raw command-line words passed to use by the shell
|
||||
stub function. argv[0] has already been stripped off.
|
||||
|
||||
@type shellCompFile: C{file}
|
||||
@param shellCompFile: The file to write completion data to.
|
||||
"""
|
||||
|
||||
# If given a file with unicode semantics, such as sys.stdout on Python 3,
|
||||
# we must get at the the underlying buffer which has bytes semantics.
|
||||
if shellCompFile and ioType(shellCompFile) == str:
|
||||
shellCompFile = shellCompFile.buffer
|
||||
|
||||
# shellName is provided for forward-compatibility. It is not used,
|
||||
# since we currently only support zsh.
|
||||
shellName, position = words[-1].split(":")
|
||||
position = int(position)
|
||||
# zsh gives the completion position ($CURRENT) as a 1-based index,
|
||||
# and argv[0] has already been stripped off, so we subtract 2 to
|
||||
# get the real 0-based index.
|
||||
position -= 2
|
||||
cWord = words[position]
|
||||
|
||||
# since the user may hit TAB at any time, we may have been called with an
|
||||
# incomplete command-line that would generate getopt errors if parsed
|
||||
# verbatim. However, we must do *some* parsing in order to determine if
|
||||
# there is a specific subcommand that we need to provide completion for.
|
||||
# So, to make the command-line more sane we work backwards from the
|
||||
# current completion position and strip off all words until we find one
|
||||
# that "looks" like a subcommand. It may in fact be the argument to a
|
||||
# normal command-line option, but that won't matter for our purposes.
|
||||
while position >= 1:
|
||||
if words[position - 1].startswith("-"):
|
||||
position -= 1
|
||||
else:
|
||||
break
|
||||
words = words[:position]
|
||||
|
||||
subCommands = getattr(config, "subCommands", None)
|
||||
if subCommands:
|
||||
# OK, this command supports sub-commands, so lets see if we have been
|
||||
# given one.
|
||||
|
||||
# If the command-line arguments are not valid then we won't be able to
|
||||
# sanely detect the sub-command, so just generate completions as if no
|
||||
# sub-command was found.
|
||||
args = None
|
||||
try:
|
||||
opts, args = getopt.getopt(words, config.shortOpt, config.longOpt)
|
||||
except getopt.error:
|
||||
pass
|
||||
|
||||
if args:
|
||||
# yes, we have a subcommand. Try to find it.
|
||||
for cmd, short, parser, doc in config.subCommands:
|
||||
if args[0] == cmd or args[0] == short:
|
||||
subOptions = parser()
|
||||
subOptions.parent = config
|
||||
|
||||
gen: ZshBuilder = ZshSubcommandBuilder(
|
||||
subOptions, config, cmdName, shellCompFile
|
||||
)
|
||||
gen.write()
|
||||
return
|
||||
|
||||
# sub-command not given, or did not match any knowns sub-command names
|
||||
genSubs = True
|
||||
if cWord.startswith("-"):
|
||||
# optimization: if the current word being completed starts
|
||||
# with a hyphen then it can't be a sub-command, so skip
|
||||
# the expensive generation of the sub-command list
|
||||
genSubs = False
|
||||
gen = ZshBuilder(config, cmdName, shellCompFile)
|
||||
gen.write(genSubs=genSubs)
|
||||
else:
|
||||
gen = ZshBuilder(config, cmdName, shellCompFile)
|
||||
gen.write()
|
||||
|
||||
|
||||
class SubcommandAction(usage.Completer):
|
||||
def _shellCode(self, optName, shellType):
|
||||
if shellType == usage._ZSH:
|
||||
return "*::subcmd:->subcmd"
|
||||
raise NotImplementedError(f"Unknown shellType {shellType!r}")
|
||||
|
||||
|
||||
class ZshBuilder:
|
||||
"""
|
||||
Constructs zsh code that will complete options for a given usage.Options
|
||||
instance, possibly including a list of subcommand names.
|
||||
|
||||
Completions for options to subcommands won't be generated because this
|
||||
class will never be used if the user is completing options for a specific
|
||||
subcommand. (See L{ZshSubcommandBuilder} below)
|
||||
|
||||
@type options: L{twisted.python.usage.Options}
|
||||
@ivar options: The L{twisted.python.usage.Options} instance defined for this
|
||||
command.
|
||||
|
||||
@type cmdName: C{str}
|
||||
@ivar cmdName: The name of the command we're generating completions for.
|
||||
|
||||
@type file: C{file}
|
||||
@ivar file: The C{file} to write the completion function to. The C{file}
|
||||
must have L{bytes} I/O semantics.
|
||||
"""
|
||||
|
||||
def __init__(self, options, cmdName, file):
|
||||
self.options = options
|
||||
self.cmdName = cmdName
|
||||
self.file = file
|
||||
|
||||
def write(self, genSubs=True):
|
||||
"""
|
||||
Generate the completion function and write it to the output file
|
||||
@return: L{None}
|
||||
|
||||
@type genSubs: C{bool}
|
||||
@param genSubs: Flag indicating whether or not completions for the list
|
||||
of subcommand should be generated. Only has an effect
|
||||
if the C{subCommands} attribute has been defined on the
|
||||
L{twisted.python.usage.Options} instance.
|
||||
"""
|
||||
if genSubs and getattr(self.options, "subCommands", None) is not None:
|
||||
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
|
||||
gen.extraActions.insert(0, SubcommandAction())
|
||||
gen.write()
|
||||
self.file.write(b"local _zsh_subcmds_array\n_zsh_subcmds_array=(\n")
|
||||
for cmd, short, parser, desc in self.options.subCommands:
|
||||
self.file.write(
|
||||
b'"' + cmd.encode("utf-8") + b":" + desc.encode("utf-8") + b'"\n'
|
||||
)
|
||||
self.file.write(b")\n\n")
|
||||
self.file.write(b'_describe "sub-command" _zsh_subcmds_array\n')
|
||||
else:
|
||||
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
|
||||
gen.write()
|
||||
|
||||
|
||||
class ZshSubcommandBuilder(ZshBuilder):
|
||||
"""
|
||||
Constructs zsh code that will complete options for a given usage.Options
|
||||
instance, and also for a single sub-command. This will only be used in
|
||||
the case where the user is completing options for a specific subcommand.
|
||||
|
||||
@type subOptions: L{twisted.python.usage.Options}
|
||||
@ivar subOptions: The L{twisted.python.usage.Options} instance defined for
|
||||
the sub command.
|
||||
"""
|
||||
|
||||
def __init__(self, subOptions, *args):
|
||||
self.subOptions = subOptions
|
||||
ZshBuilder.__init__(self, *args)
|
||||
|
||||
def write(self):
|
||||
"""
|
||||
Generate the completion function and write it to the output file
|
||||
@return: L{None}
|
||||
"""
|
||||
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
|
||||
gen.extraActions.insert(0, SubcommandAction())
|
||||
gen.write()
|
||||
|
||||
gen = ZshArgumentsGenerator(self.subOptions, self.cmdName, self.file)
|
||||
gen.write()
|
||||
|
||||
|
||||
class ZshArgumentsGenerator:
|
||||
"""
|
||||
Generate a call to the zsh _arguments completion function
|
||||
based on data in a usage.Options instance
|
||||
|
||||
The first three instance variables are populated based on constructor
|
||||
arguments. The remaining non-constructor variables are populated by this
|
||||
class with data gathered from the C{Options} instance passed in, and its
|
||||
base classes.
|
||||
|
||||
@type options: L{twisted.python.usage.Options}
|
||||
@ivar options: The L{twisted.python.usage.Options} instance to generate for
|
||||
|
||||
@type cmdName: C{str}
|
||||
@ivar cmdName: The name of the command we're generating completions for.
|
||||
|
||||
@type file: C{file}
|
||||
@ivar file: The C{file} to write the completion function to. The C{file}
|
||||
must have L{bytes} I/O semantics.
|
||||
|
||||
@type descriptions: C{dict}
|
||||
@ivar descriptions: A dict mapping long option names to alternate
|
||||
descriptions. When this variable is defined, the descriptions
|
||||
contained here will override those descriptions provided in the
|
||||
optFlags and optParameters variables.
|
||||
|
||||
@type multiUse: C{list}
|
||||
@ivar multiUse: An iterable containing those long option names which may
|
||||
appear on the command line more than once. By default, options will
|
||||
only be completed one time.
|
||||
|
||||
@type mutuallyExclusive: C{list} of C{tuple}
|
||||
@ivar mutuallyExclusive: A sequence of sequences, with each sub-sequence
|
||||
containing those long option names that are mutually exclusive. That is,
|
||||
those options that cannot appear on the command line together.
|
||||
|
||||
@type optActions: C{dict}
|
||||
@ivar optActions: A dict mapping long option names to shell "actions".
|
||||
These actions define what may be completed as the argument to the
|
||||
given option, and should be given as instances of
|
||||
L{twisted.python.usage.Completer}.
|
||||
|
||||
Callables may instead be given for the values in this dict. The
|
||||
callable should accept no arguments, and return a C{Completer}
|
||||
instance used as the action.
|
||||
|
||||
@type extraActions: C{list} of C{twisted.python.usage.Completer}
|
||||
@ivar extraActions: Extra arguments are those arguments typically
|
||||
appearing at the end of the command-line, which are not associated
|
||||
with any particular named option. That is, the arguments that are
|
||||
given to the parseArgs() method of your usage.Options subclass.
|
||||
"""
|
||||
|
||||
def __init__(self, options, cmdName, file):
|
||||
self.options = options
|
||||
self.cmdName = cmdName
|
||||
self.file = file
|
||||
|
||||
self.descriptions = {}
|
||||
self.multiUse = set()
|
||||
self.mutuallyExclusive = []
|
||||
self.optActions = {}
|
||||
self.extraActions = []
|
||||
|
||||
for cls in reversed(inspect.getmro(options.__class__)):
|
||||
data = getattr(cls, "compData", None)
|
||||
if data:
|
||||
self.descriptions.update(data.descriptions)
|
||||
self.optActions.update(data.optActions)
|
||||
self.multiUse.update(data.multiUse)
|
||||
|
||||
self.mutuallyExclusive.extend(data.mutuallyExclusive)
|
||||
|
||||
# I don't see any sane way to aggregate extraActions, so just
|
||||
# take the one at the top of the MRO (nearest the `options'
|
||||
# instance).
|
||||
if data.extraActions:
|
||||
self.extraActions = data.extraActions
|
||||
|
||||
aCL = reflect.accumulateClassList
|
||||
|
||||
optFlags: List[List[object]] = []
|
||||
optParams: List[List[object]] = []
|
||||
|
||||
aCL(options.__class__, "optFlags", optFlags)
|
||||
aCL(options.__class__, "optParameters", optParams)
|
||||
|
||||
for i, optList in enumerate(optFlags):
|
||||
if len(optList) != 3:
|
||||
optFlags[i] = util.padTo(3, optList)
|
||||
|
||||
for i, optList in enumerate(optParams):
|
||||
if len(optList) != 5:
|
||||
optParams[i] = util.padTo(5, optList)
|
||||
|
||||
self.optFlags = optFlags
|
||||
self.optParams = optParams
|
||||
|
||||
paramNameToDefinition = {}
|
||||
for optList in optParams:
|
||||
paramNameToDefinition[optList[0]] = optList[1:]
|
||||
self.paramNameToDefinition = paramNameToDefinition
|
||||
|
||||
flagNameToDefinition = {}
|
||||
for optList in optFlags:
|
||||
flagNameToDefinition[optList[0]] = optList[1:]
|
||||
self.flagNameToDefinition = flagNameToDefinition
|
||||
|
||||
allOptionsNameToDefinition = {}
|
||||
allOptionsNameToDefinition.update(paramNameToDefinition)
|
||||
allOptionsNameToDefinition.update(flagNameToDefinition)
|
||||
self.allOptionsNameToDefinition = allOptionsNameToDefinition
|
||||
|
||||
self.addAdditionalOptions()
|
||||
|
||||
# makes sure none of the Completions metadata references
|
||||
# option names that don't exist. (great for catching typos)
|
||||
self.verifyZshNames()
|
||||
|
||||
self.excludes = self.makeExcludesDict()
|
||||
|
||||
def write(self):
|
||||
"""
|
||||
Write the zsh completion code to the file given to __init__
|
||||
@return: L{None}
|
||||
"""
|
||||
self.writeHeader()
|
||||
self.writeExtras()
|
||||
self.writeOptions()
|
||||
self.writeFooter()
|
||||
|
||||
def writeHeader(self):
|
||||
"""
|
||||
This is the start of the code that calls _arguments
|
||||
@return: L{None}
|
||||
"""
|
||||
self.file.write(
|
||||
b"#compdef " + self.cmdName.encode("utf-8") + b"\n\n"
|
||||
b'_arguments -s -A "-*" \\\n'
|
||||
)
|
||||
|
||||
def writeOptions(self):
|
||||
"""
|
||||
Write out zsh code for each option in this command
|
||||
@return: L{None}
|
||||
"""
|
||||
optNames = list(self.allOptionsNameToDefinition.keys())
|
||||
optNames.sort()
|
||||
for longname in optNames:
|
||||
self.writeOpt(longname)
|
||||
|
||||
def writeExtras(self):
|
||||
"""
|
||||
Write out completion information for extra arguments appearing on the
|
||||
command-line. These are extra positional arguments not associated
|
||||
with a named option. That is, the stuff that gets passed to
|
||||
Options.parseArgs().
|
||||
|
||||
@return: L{None}
|
||||
|
||||
@raise ValueError: If C{Completer} with C{repeat=True} is found and
|
||||
is not the last item in the C{extraActions} list.
|
||||
"""
|
||||
for i, action in enumerate(self.extraActions):
|
||||
# a repeatable action must be the last action in the list
|
||||
if action._repeat and i != len(self.extraActions) - 1:
|
||||
raise ValueError(
|
||||
"Completer with repeat=True must be "
|
||||
"last item in Options.extraActions"
|
||||
)
|
||||
self.file.write(escape(action._shellCode("", usage._ZSH)).encode("utf-8"))
|
||||
self.file.write(b" \\\n")
|
||||
|
||||
def writeFooter(self):
|
||||
"""
|
||||
Write the last bit of code that finishes the call to _arguments
|
||||
@return: L{None}
|
||||
"""
|
||||
self.file.write(b"&& return 0\n")
|
||||
|
||||
def verifyZshNames(self):
|
||||
"""
|
||||
Ensure that none of the option names given in the metadata are typoed
|
||||
@return: L{None}
|
||||
@raise ValueError: If unknown option names have been found.
|
||||
"""
|
||||
|
||||
def err(name):
|
||||
raise ValueError(
|
||||
'Unknown option name "%s" found while\n'
|
||||
"examining Completions instances on %s" % (name, self.options)
|
||||
)
|
||||
|
||||
for name in itertools.chain(self.descriptions, self.optActions, self.multiUse):
|
||||
if name not in self.allOptionsNameToDefinition:
|
||||
err(name)
|
||||
|
||||
for seq in self.mutuallyExclusive:
|
||||
for name in seq:
|
||||
if name not in self.allOptionsNameToDefinition:
|
||||
err(name)
|
||||
|
||||
def excludeStr(self, longname, buildShort=False):
|
||||
"""
|
||||
Generate an "exclusion string" for the given option
|
||||
|
||||
@type longname: C{str}
|
||||
@param longname: The long option name (e.g. "verbose" instead of "v")
|
||||
|
||||
@type buildShort: C{bool}
|
||||
@param buildShort: May be True to indicate we're building an excludes
|
||||
string for the short option that corresponds to the given long opt.
|
||||
|
||||
@return: The generated C{str}
|
||||
"""
|
||||
if longname in self.excludes:
|
||||
exclusions = self.excludes[longname].copy()
|
||||
else:
|
||||
exclusions = set()
|
||||
|
||||
# if longname isn't a multiUse option (can't appear on the cmd line more
|
||||
# than once), then we have to exclude the short option if we're
|
||||
# building for the long option, and vice versa.
|
||||
if longname not in self.multiUse:
|
||||
if buildShort is False:
|
||||
short = self.getShortOption(longname)
|
||||
if short is not None:
|
||||
exclusions.add(short)
|
||||
else:
|
||||
exclusions.add(longname)
|
||||
|
||||
if not exclusions:
|
||||
return ""
|
||||
|
||||
strings = []
|
||||
for optName in exclusions:
|
||||
if len(optName) == 1:
|
||||
# short option
|
||||
strings.append("-" + optName)
|
||||
else:
|
||||
strings.append("--" + optName)
|
||||
strings.sort() # need deterministic order for reliable unit-tests
|
||||
return "(%s)" % " ".join(strings)
|
||||
|
||||
def makeExcludesDict(self) -> Dict[str, Set[str]]:
|
||||
"""
|
||||
@return: A C{dict} that maps each option name appearing in
|
||||
self.mutuallyExclusive to a set of those option names that is it
|
||||
mutually exclusive with (can't appear on the cmd line with).
|
||||
"""
|
||||
|
||||
# create a mapping of long option name -> single character name
|
||||
longToShort = {}
|
||||
for optList in itertools.chain(self.optParams, self.optFlags):
|
||||
if optList[1] != None:
|
||||
longToShort[optList[0]] = optList[1]
|
||||
|
||||
excludes: Dict[str, Set[str]] = {}
|
||||
for lst in self.mutuallyExclusive:
|
||||
for i, longname in enumerate(lst):
|
||||
tmp = set(lst[:i] + lst[i + 1 :])
|
||||
for name in tmp.copy():
|
||||
if name in longToShort:
|
||||
tmp.add(longToShort[name])
|
||||
|
||||
if longname in excludes:
|
||||
excludes[longname] = excludes[longname].union(tmp)
|
||||
else:
|
||||
excludes[longname] = tmp
|
||||
return excludes
|
||||
|
||||
def writeOpt(self, longname):
|
||||
"""
|
||||
Write out the zsh code for the given argument. This is just part of the
|
||||
one big call to _arguments
|
||||
|
||||
@type longname: C{str}
|
||||
@param longname: The long option name (e.g. "verbose" instead of "v")
|
||||
|
||||
@return: L{None}
|
||||
"""
|
||||
if longname in self.flagNameToDefinition:
|
||||
# It's a flag option. Not one that takes a parameter.
|
||||
longField = "--%s" % longname
|
||||
else:
|
||||
longField = "--%s=" % longname
|
||||
|
||||
short = self.getShortOption(longname)
|
||||
if short != None:
|
||||
shortField = "-" + short
|
||||
else:
|
||||
shortField = ""
|
||||
|
||||
descr = self.getDescription(longname)
|
||||
descriptionField = descr.replace("[", r"\[")
|
||||
descriptionField = descriptionField.replace("]", r"\]")
|
||||
descriptionField = "[%s]" % descriptionField
|
||||
|
||||
actionField = self.getAction(longname)
|
||||
if longname in self.multiUse:
|
||||
multiField = "*"
|
||||
else:
|
||||
multiField = ""
|
||||
|
||||
longExclusionsField = self.excludeStr(longname)
|
||||
|
||||
if short:
|
||||
# we have to write an extra line for the short option if we have one
|
||||
shortExclusionsField = self.excludeStr(longname, buildShort=True)
|
||||
self.file.write(
|
||||
escape(
|
||||
"%s%s%s%s%s"
|
||||
% (
|
||||
shortExclusionsField,
|
||||
multiField,
|
||||
shortField,
|
||||
descriptionField,
|
||||
actionField,
|
||||
)
|
||||
).encode("utf-8")
|
||||
)
|
||||
self.file.write(b" \\\n")
|
||||
|
||||
self.file.write(
|
||||
escape(
|
||||
"%s%s%s%s%s"
|
||||
% (
|
||||
longExclusionsField,
|
||||
multiField,
|
||||
longField,
|
||||
descriptionField,
|
||||
actionField,
|
||||
)
|
||||
).encode("utf-8")
|
||||
)
|
||||
self.file.write(b" \\\n")
|
||||
|
||||
def getAction(self, longname):
|
||||
"""
|
||||
Return a zsh "action" string for the given argument
|
||||
@return: C{str}
|
||||
"""
|
||||
if longname in self.optActions:
|
||||
if callable(self.optActions[longname]):
|
||||
action = self.optActions[longname]()
|
||||
else:
|
||||
action = self.optActions[longname]
|
||||
return action._shellCode(longname, usage._ZSH)
|
||||
|
||||
if longname in self.paramNameToDefinition:
|
||||
return f":{longname}:_files"
|
||||
return ""
|
||||
|
||||
def getDescription(self, longname):
|
||||
"""
|
||||
Return the description to be used for this argument
|
||||
@return: C{str}
|
||||
"""
|
||||
# check if we have an alternate descr for this arg, and if so use it
|
||||
if longname in self.descriptions:
|
||||
return self.descriptions[longname]
|
||||
|
||||
# otherwise we have to get it from the optFlags or optParams
|
||||
try:
|
||||
descr = self.flagNameToDefinition[longname][1]
|
||||
except KeyError:
|
||||
try:
|
||||
descr = self.paramNameToDefinition[longname][2]
|
||||
except KeyError:
|
||||
descr = None
|
||||
|
||||
if descr is not None:
|
||||
return descr
|
||||
|
||||
# let's try to get it from the opt_foo method doc string if there is one
|
||||
longMangled = longname.replace("-", "_") # this is what t.p.usage does
|
||||
obj = getattr(self.options, "opt_%s" % longMangled, None)
|
||||
if obj is not None:
|
||||
descr = descrFromDoc(obj)
|
||||
# On Python3.13 we have an empty string instead of None,
|
||||
# for missing description.
|
||||
if descr:
|
||||
return descr
|
||||
|
||||
return longname # we really ought to have a good description to use
|
||||
|
||||
def getShortOption(self, longname):
|
||||
"""
|
||||
Return the short option letter or None
|
||||
@return: C{str} or L{None}
|
||||
"""
|
||||
optList = self.allOptionsNameToDefinition[longname]
|
||||
return optList[0] or None
|
||||
|
||||
def addAdditionalOptions(self) -> None:
|
||||
"""
|
||||
Add additional options to the optFlags and optParams lists.
|
||||
These will be defined by 'opt_foo' methods of the Options subclass
|
||||
@return: L{None}
|
||||
"""
|
||||
methodsDict: Dict[str, MethodType] = {}
|
||||
reflect.accumulateMethods(self.options, methodsDict, "opt_")
|
||||
methodToShort = {}
|
||||
for name in methodsDict.copy():
|
||||
if len(name) == 1:
|
||||
methodToShort[methodsDict[name]] = name
|
||||
del methodsDict[name]
|
||||
|
||||
for methodName, methodObj in methodsDict.items():
|
||||
longname = methodName.replace("_", "-") # t.p.usage does this
|
||||
# if this option is already defined by the optFlags or
|
||||
# optParameters then we don't want to override that data
|
||||
if longname in self.allOptionsNameToDefinition:
|
||||
continue
|
||||
|
||||
descr = self.getDescription(longname)
|
||||
|
||||
short = None
|
||||
if methodObj in methodToShort:
|
||||
short = methodToShort[methodObj]
|
||||
|
||||
reqArgs = methodObj.__func__.__code__.co_argcount
|
||||
if reqArgs == 2:
|
||||
self.optParams.append([longname, short, None, descr])
|
||||
self.paramNameToDefinition[longname] = [short, None, descr]
|
||||
self.allOptionsNameToDefinition[longname] = [short, None, descr]
|
||||
else:
|
||||
# reqArgs must equal 1. self.options would have failed
|
||||
# to instantiate if it had opt_ methods with bad signatures.
|
||||
self.optFlags.append([longname, short, descr])
|
||||
self.flagNameToDefinition[longname] = [short, descr]
|
||||
self.allOptionsNameToDefinition[longname] = [short, None, descr]
|
||||
|
||||
|
||||
def descrFromDoc(obj):
|
||||
"""
|
||||
Generate an appropriate description from docstring of the given object
|
||||
"""
|
||||
if obj.__doc__ is None or obj.__doc__.isspace():
|
||||
return None
|
||||
|
||||
lines = [x.strip() for x in obj.__doc__.split("\n") if x and not x.isspace()]
|
||||
return " ".join(lines)
|
||||
|
||||
|
||||
def escape(x):
|
||||
"""
|
||||
Shell escape the given string
|
||||
|
||||
Implementation borrowed from now-deprecated commands.mkarg() in the stdlib
|
||||
"""
|
||||
if "'" not in x:
|
||||
return "'" + x + "'"
|
||||
s = '"'
|
||||
for c in x:
|
||||
if c in '\\$"`':
|
||||
s = s + "\\"
|
||||
s = s + c
|
||||
s = s + '"'
|
||||
return s
|
||||
@@ -0,0 +1,304 @@
|
||||
# -*- test-case-name: twisted.python.test.test_textattributes -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
This module provides some common functionality for the manipulation of
|
||||
formatting states.
|
||||
|
||||
Defining the mechanism by which text containing character attributes is
|
||||
constructed begins by subclassing L{CharacterAttributesMixin}.
|
||||
|
||||
Defining how a single formatting state is to be serialized begins by
|
||||
subclassing L{_FormattingStateMixin}.
|
||||
|
||||
Serializing a formatting structure is done with L{flatten}.
|
||||
|
||||
@see: L{twisted.conch.insults.helper._FormattingState}
|
||||
@see: L{twisted.conch.insults.text._CharacterAttributes}
|
||||
@see: L{twisted.words.protocols.irc._FormattingState}
|
||||
@see: L{twisted.words.protocols.irc._CharacterAttributes}
|
||||
"""
|
||||
|
||||
|
||||
from typing import ClassVar, List, Sequence
|
||||
|
||||
from twisted.python.util import FancyEqMixin
|
||||
|
||||
|
||||
class _Attribute(FancyEqMixin):
|
||||
"""
|
||||
A text attribute.
|
||||
|
||||
Indexing a text attribute with a C{str} or another text attribute adds that
|
||||
object as a child, indexing with a C{list} or C{tuple} adds the elements as
|
||||
children; in either case C{self} is returned.
|
||||
|
||||
@type children: C{list}
|
||||
@ivar children: Child attributes.
|
||||
"""
|
||||
|
||||
compareAttributes: ClassVar[Sequence[str]] = ("children",)
|
||||
|
||||
def __init__(self):
|
||||
self.children = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{type(self).__name__} {vars(self)!r}>"
|
||||
|
||||
def __getitem__(self, item):
|
||||
assert isinstance(item, (list, tuple, _Attribute, str))
|
||||
if isinstance(item, (list, tuple)):
|
||||
self.children.extend(item)
|
||||
else:
|
||||
self.children.append(item)
|
||||
return self
|
||||
|
||||
def serialize(self, write, attrs=None, attributeRenderer="toVT102"):
|
||||
"""
|
||||
Serialize the text attribute and its children.
|
||||
|
||||
@param write: C{callable}, taking one C{str} argument, called to output
|
||||
a single text attribute at a time.
|
||||
|
||||
@param attrs: A formatting state instance used to determine how to
|
||||
serialize the attribute children.
|
||||
|
||||
@type attributeRenderer: C{str}
|
||||
@param attributeRenderer: Name of the method on I{attrs} that should be
|
||||
called to render the attributes during serialization. Defaults to
|
||||
C{'toVT102'}.
|
||||
"""
|
||||
if attrs is None:
|
||||
attrs = DefaultFormattingState()
|
||||
for ch in self.children:
|
||||
if isinstance(ch, _Attribute):
|
||||
ch.serialize(write, attrs.copy(), attributeRenderer)
|
||||
else:
|
||||
renderMeth = getattr(attrs, attributeRenderer)
|
||||
write(renderMeth())
|
||||
write(ch)
|
||||
|
||||
|
||||
class _NormalAttr(_Attribute):
|
||||
"""
|
||||
A text attribute for normal text.
|
||||
"""
|
||||
|
||||
def serialize(self, write, attrs, attributeRenderer):
|
||||
attrs.__init__()
|
||||
_Attribute.serialize(self, write, attrs, attributeRenderer)
|
||||
|
||||
|
||||
class _OtherAttr(_Attribute):
|
||||
"""
|
||||
A text attribute for text with formatting attributes.
|
||||
|
||||
The unary minus operator returns the inverse of this attribute, where that
|
||||
makes sense.
|
||||
|
||||
@type attrname: C{str}
|
||||
@ivar attrname: Text attribute name.
|
||||
|
||||
@ivar attrvalue: Text attribute value.
|
||||
"""
|
||||
|
||||
compareAttributes = ("attrname", "attrvalue", "children")
|
||||
|
||||
def __init__(self, attrname, attrvalue):
|
||||
_Attribute.__init__(self)
|
||||
self.attrname = attrname
|
||||
self.attrvalue = attrvalue
|
||||
|
||||
def __neg__(self):
|
||||
result = _OtherAttr(self.attrname, not self.attrvalue)
|
||||
result.children.extend(self.children)
|
||||
return result
|
||||
|
||||
def serialize(self, write, attrs, attributeRenderer):
|
||||
attrs = attrs._withAttribute(self.attrname, self.attrvalue)
|
||||
_Attribute.serialize(self, write, attrs, attributeRenderer)
|
||||
|
||||
|
||||
class _ColorAttr(_Attribute):
|
||||
"""
|
||||
Generic color attribute.
|
||||
|
||||
@param color: Color value.
|
||||
|
||||
@param ground: Foreground or background attribute name.
|
||||
"""
|
||||
|
||||
compareAttributes = ("color", "ground", "children")
|
||||
|
||||
def __init__(self, color, ground):
|
||||
_Attribute.__init__(self)
|
||||
self.color = color
|
||||
self.ground = ground
|
||||
|
||||
def serialize(self, write, attrs, attributeRenderer):
|
||||
attrs = attrs._withAttribute(self.ground, self.color)
|
||||
_Attribute.serialize(self, write, attrs, attributeRenderer)
|
||||
|
||||
|
||||
class _ForegroundColorAttr(_ColorAttr):
|
||||
"""
|
||||
Foreground color attribute.
|
||||
"""
|
||||
|
||||
def __init__(self, color):
|
||||
_ColorAttr.__init__(self, color, "foreground")
|
||||
|
||||
|
||||
class _BackgroundColorAttr(_ColorAttr):
|
||||
"""
|
||||
Background color attribute.
|
||||
"""
|
||||
|
||||
def __init__(self, color):
|
||||
_ColorAttr.__init__(self, color, "background")
|
||||
|
||||
|
||||
class _ColorAttribute:
|
||||
"""
|
||||
A color text attribute.
|
||||
|
||||
Attribute access results in a color value lookup, by name, in
|
||||
I{_ColorAttribute.attrs}.
|
||||
|
||||
@type ground: L{_ColorAttr}
|
||||
@param ground: Foreground or background color attribute to look color names
|
||||
up from.
|
||||
|
||||
@param attrs: Mapping of color names to color values.
|
||||
@type attrs: Dict like object.
|
||||
"""
|
||||
|
||||
def __init__(self, ground, attrs):
|
||||
self.ground = ground
|
||||
self.attrs = attrs
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self.ground(self.attrs[name])
|
||||
except KeyError:
|
||||
raise AttributeError(name)
|
||||
|
||||
|
||||
class CharacterAttributesMixin:
|
||||
"""
|
||||
Mixin for character attributes that implements a C{__getattr__} method
|
||||
returning a new C{_NormalAttr} instance when attempting to access
|
||||
a C{'normal'} attribute; otherwise a new C{_OtherAttr} instance is returned
|
||||
for names that appears in the C{'attrs'} attribute.
|
||||
"""
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "normal":
|
||||
return _NormalAttr()
|
||||
if name in self.attrs:
|
||||
return _OtherAttr(name, True)
|
||||
raise AttributeError(name)
|
||||
|
||||
|
||||
class DefaultFormattingState(FancyEqMixin):
|
||||
"""
|
||||
A character attribute that does nothing, thus applying no attributes to
|
||||
text.
|
||||
"""
|
||||
|
||||
compareAttributes: ClassVar[Sequence[str]] = ("_dummy",)
|
||||
|
||||
_dummy = 0
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
Make a copy of this formatting state.
|
||||
|
||||
@return: A formatting state instance.
|
||||
"""
|
||||
return type(self)()
|
||||
|
||||
def _withAttribute(self, name, value):
|
||||
"""
|
||||
Add a character attribute to a copy of this formatting state.
|
||||
|
||||
@param name: Attribute name to be added to formatting state.
|
||||
|
||||
@param value: Attribute value.
|
||||
|
||||
@return: A formatting state instance with the new attribute.
|
||||
"""
|
||||
return self.copy()
|
||||
|
||||
def toVT102(self):
|
||||
"""
|
||||
Emit a VT102 control sequence that will set up all the attributes this
|
||||
formatting state has set.
|
||||
|
||||
@return: A string containing VT102 control sequences that mimic this
|
||||
formatting state.
|
||||
"""
|
||||
return ""
|
||||
|
||||
|
||||
class _FormattingStateMixin(DefaultFormattingState):
|
||||
"""
|
||||
Mixin for the formatting state/attributes of a single character.
|
||||
"""
|
||||
|
||||
def copy(self):
|
||||
c = DefaultFormattingState.copy(self)
|
||||
c.__dict__.update(vars(self))
|
||||
return c
|
||||
|
||||
def _withAttribute(self, name, value):
|
||||
if getattr(self, name) != value:
|
||||
attr = self.copy()
|
||||
attr._subtracting = not value
|
||||
setattr(attr, name, value)
|
||||
return attr
|
||||
else:
|
||||
return self.copy()
|
||||
|
||||
|
||||
def flatten(output, attrs, attributeRenderer="toVT102"):
|
||||
"""
|
||||
Serialize a sequence of characters with attribute information
|
||||
|
||||
The resulting string can be interpreted by compatible software so that the
|
||||
contained characters are displayed and, for those attributes which are
|
||||
supported by the software, the attributes expressed. The exact result of
|
||||
the serialization depends on the behavior of the method specified by
|
||||
I{attributeRenderer}.
|
||||
|
||||
For example, if your terminal is VT102 compatible, you might run
|
||||
this for a colorful variation on the \"hello world\" theme::
|
||||
|
||||
from twisted.conch.insults.text import flatten, attributes as A
|
||||
from twisted.conch.insults.helper import CharacterAttribute
|
||||
print(flatten(
|
||||
A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ',
|
||||
A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]],
|
||||
CharacterAttribute()))
|
||||
|
||||
@param output: Object returned by accessing attributes of the
|
||||
module-level attributes object.
|
||||
|
||||
@param attrs: A formatting state instance used to determine how to
|
||||
serialize C{output}.
|
||||
|
||||
@type attributeRenderer: C{str}
|
||||
@param attributeRenderer: Name of the method on I{attrs} that should be
|
||||
called to render the attributes during serialization. Defaults to
|
||||
C{'toVT102'}.
|
||||
|
||||
@return: A string expressing the text and display attributes specified by
|
||||
L{output}.
|
||||
"""
|
||||
flattened: List[str] = []
|
||||
output.serialize(flattened.append, attrs, attributeRenderer)
|
||||
return "".join(flattened)
|
||||
|
||||
|
||||
__all__ = ["flatten", "DefaultFormattingState", "CharacterAttributesMixin"]
|
||||
105
.venv/lib/python3.12/site-packages/twisted/python/_tzhelper.py
Normal file
105
.venv/lib/python3.12/site-packages/twisted/python/_tzhelper.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# -*- test-case-name: twisted.python.test.test_tzhelper -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Time zone utilities.
|
||||
"""
|
||||
|
||||
from datetime import (
|
||||
datetime as DateTime,
|
||||
timedelta as TimeDelta,
|
||||
timezone,
|
||||
tzinfo as TZInfo,
|
||||
)
|
||||
from typing import Optional
|
||||
|
||||
__all__ = [
|
||||
"FixedOffsetTimeZone",
|
||||
"UTC",
|
||||
]
|
||||
|
||||
|
||||
class FixedOffsetTimeZone(TZInfo):
|
||||
"""
|
||||
Represents a fixed timezone offset (without daylight saving time).
|
||||
|
||||
@ivar name: A L{str} giving the name of this timezone; the name just
|
||||
includes how much time this offset represents.
|
||||
|
||||
@ivar offset: A L{TimeDelta} giving the amount of time this timezone is
|
||||
offset.
|
||||
"""
|
||||
|
||||
def __init__(self, offset: TimeDelta, name: Optional[str] = None) -> None:
|
||||
"""
|
||||
Construct a L{FixedOffsetTimeZone} with a fixed offset.
|
||||
|
||||
@param offset: a delta representing the offset from UTC.
|
||||
@param name: A name to be given for this timezone.
|
||||
"""
|
||||
self.offset = offset
|
||||
self.name = name
|
||||
|
||||
@classmethod
|
||||
def fromSignHoursMinutes(
|
||||
cls, sign: str, hours: int, minutes: int
|
||||
) -> "FixedOffsetTimeZone":
|
||||
"""
|
||||
Construct a L{FixedOffsetTimeZone} from an offset described by sign
|
||||
('+' or '-'), hours, and minutes.
|
||||
|
||||
@note: For protocol compatibility with AMP, this method never uses 'Z'
|
||||
|
||||
@param sign: A string describing the positive or negative-ness of the
|
||||
offset.
|
||||
@param hours: The number of hours in the offset.
|
||||
@param minutes: The number of minutes in the offset
|
||||
|
||||
@return: A time zone with the given offset, and a name describing the
|
||||
offset.
|
||||
"""
|
||||
name = "%s%02i:%02i" % (sign, hours, minutes)
|
||||
if sign == "-":
|
||||
hours = -hours
|
||||
minutes = -minutes
|
||||
elif sign != "+":
|
||||
raise ValueError(f"Invalid sign for timezone {sign!r}")
|
||||
return cls(TimeDelta(hours=hours, minutes=minutes), name)
|
||||
|
||||
@classmethod
|
||||
def fromLocalTimeStamp(cls, timeStamp: float) -> "FixedOffsetTimeZone":
|
||||
"""
|
||||
Create a time zone with a fixed offset corresponding to a time stamp in
|
||||
the system's locally configured time zone.
|
||||
"""
|
||||
offset = DateTime.fromtimestamp(timeStamp) - DateTime.fromtimestamp(
|
||||
timeStamp, timezone.utc
|
||||
).replace(tzinfo=None)
|
||||
return cls(offset)
|
||||
|
||||
def utcoffset(self, dt: Optional[DateTime]) -> TimeDelta:
|
||||
"""
|
||||
Return the given timezone's offset from UTC.
|
||||
"""
|
||||
return self.offset
|
||||
|
||||
def dst(self, dt: Optional[DateTime]) -> TimeDelta:
|
||||
"""
|
||||
Return a zero L{TimeDelta} for the daylight saving time
|
||||
offset, since there is never one.
|
||||
"""
|
||||
return TimeDelta(0)
|
||||
|
||||
def tzname(self, dt: Optional[DateTime]) -> str:
|
||||
"""
|
||||
Return a string describing this timezone.
|
||||
"""
|
||||
if self.name is not None:
|
||||
return self.name
|
||||
# XXX this is wrong; the tests are
|
||||
dt = DateTime.fromtimestamp(0, self)
|
||||
return dt.strftime("UTC%z")
|
||||
|
||||
|
||||
UTC = FixedOffsetTimeZone.fromSignHoursMinutes("+", 0, 0)
|
||||
11
.venv/lib/python3.12/site-packages/twisted/python/_url.py
Normal file
11
.venv/lib/python3.12/site-packages/twisted/python/_url.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# -*- test-case-name: twisted.python.test.test_url -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
URL parsing, construction and rendering.
|
||||
"""
|
||||
|
||||
from hyperlink._url import URL
|
||||
|
||||
__all__ = ["URL"]
|
||||
619
.venv/lib/python3.12/site-packages/twisted/python/compat.py
Normal file
619
.venv/lib/python3.12/site-packages/twisted/python/compat.py
Normal file
@@ -0,0 +1,619 @@
|
||||
# -*- test-case-name: twisted.test.test_compat -*-
|
||||
#
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Compatibility module to provide backwards compatibility for useful Python
|
||||
features.
|
||||
|
||||
This is mainly for use of internal Twisted code. We encourage you to use
|
||||
the latest version of Python directly from your code, if possible.
|
||||
|
||||
@var unicode: The type of Unicode strings, C{unicode} on Python 2 and C{str}
|
||||
on Python 3.
|
||||
|
||||
@var NativeStringIO: An in-memory file-like object that operates on the native
|
||||
string type (bytes in Python 2, unicode in Python 3).
|
||||
|
||||
@var urllib_parse: a URL-parsing module (urlparse on Python 2, urllib.parse on
|
||||
Python 3)
|
||||
"""
|
||||
|
||||
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import urllib.parse as urllib_parse
|
||||
from collections.abc import Sequence
|
||||
from functools import reduce
|
||||
from html import escape
|
||||
from http import cookiejar as cookielib
|
||||
from io import IOBase, StringIO as NativeStringIO, TextIOBase
|
||||
from sys import intern
|
||||
from types import FrameType, MethodType as _MethodType
|
||||
from typing import Any, AnyStr, cast
|
||||
from urllib.parse import quote as urlquote, unquote as urlunquote
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python.deprecate import deprecated, deprecatedModuleAttribute
|
||||
|
||||
if platform.python_implementation() == "PyPy":
|
||||
_PYPY = True
|
||||
else:
|
||||
_PYPY = False
|
||||
|
||||
FileType = IOBase
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for io.IOBase",
|
||||
__name__,
|
||||
"FileType",
|
||||
)
|
||||
|
||||
frozenset = frozenset
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for frozenset builtin type",
|
||||
__name__,
|
||||
"frozenset",
|
||||
)
|
||||
|
||||
InstanceType = object
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Old-style classes don't exist in Python 3",
|
||||
__name__,
|
||||
"InstanceType",
|
||||
)
|
||||
|
||||
izip = zip
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for zip() builtin",
|
||||
__name__,
|
||||
"izip",
|
||||
)
|
||||
|
||||
long = int
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for int builtin type",
|
||||
__name__,
|
||||
"long",
|
||||
)
|
||||
|
||||
range = range
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for range() builtin",
|
||||
__name__,
|
||||
"range",
|
||||
)
|
||||
|
||||
raw_input = input
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for input() builtin",
|
||||
__name__,
|
||||
"raw_input",
|
||||
)
|
||||
|
||||
set = set
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for set builtin type",
|
||||
__name__,
|
||||
"set",
|
||||
)
|
||||
|
||||
StringType = str
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for str builtin type",
|
||||
__name__,
|
||||
"StringType",
|
||||
)
|
||||
|
||||
unichr = chr
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for chr() builtin",
|
||||
__name__,
|
||||
"unichr",
|
||||
)
|
||||
|
||||
unicode = str
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for str builtin type",
|
||||
__name__,
|
||||
"unicode",
|
||||
)
|
||||
|
||||
xrange = range
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Obsolete alias for range() builtin",
|
||||
__name__,
|
||||
"xrange",
|
||||
)
|
||||
|
||||
|
||||
@deprecated(Version("Twisted", 21, 2, 0), replacement="d.items()")
|
||||
def iteritems(d):
|
||||
"""
|
||||
Return an iterable of the items of C{d}.
|
||||
|
||||
@type d: L{dict}
|
||||
@rtype: iterable
|
||||
"""
|
||||
return d.items()
|
||||
|
||||
|
||||
@deprecated(Version("Twisted", 21, 2, 0), replacement="d.values()")
|
||||
def itervalues(d):
|
||||
"""
|
||||
Return an iterable of the values of C{d}.
|
||||
|
||||
@type d: L{dict}
|
||||
@rtype: iterable
|
||||
"""
|
||||
return d.values()
|
||||
|
||||
|
||||
@deprecated(Version("Twisted", 21, 2, 0), replacement="list(d.items())")
|
||||
def items(d):
|
||||
"""
|
||||
Return a list of the items of C{d}.
|
||||
|
||||
@type d: L{dict}
|
||||
@rtype: L{list}
|
||||
"""
|
||||
return list(d.items())
|
||||
|
||||
|
||||
def currentframe(n: int = 0) -> FrameType:
|
||||
"""
|
||||
In Python 3, L{inspect.currentframe} does not take a stack-level argument.
|
||||
Restore that functionality from Python 2 so we don't have to re-implement
|
||||
the C{f_back}-walking loop in places where it's called.
|
||||
|
||||
@param n: The number of stack levels above the caller to walk.
|
||||
|
||||
@return: a frame, n levels up the stack from the caller.
|
||||
"""
|
||||
f = inspect.currentframe()
|
||||
for x in range(n + 1):
|
||||
assert f is not None
|
||||
f = f.f_back
|
||||
assert f is not None
|
||||
return f
|
||||
|
||||
|
||||
def execfile(filename, globals, locals=None):
|
||||
"""
|
||||
Execute a Python script in the given namespaces.
|
||||
|
||||
Similar to the execfile builtin, but a namespace is mandatory, partly
|
||||
because that's a sensible thing to require, and because otherwise we'd
|
||||
have to do some frame hacking.
|
||||
|
||||
This is a compatibility implementation for Python 3 porting, to avoid the
|
||||
use of the deprecated builtin C{execfile} function.
|
||||
"""
|
||||
if locals is None:
|
||||
locals = globals
|
||||
with open(filename, "rb") as fin:
|
||||
source = fin.read()
|
||||
code = compile(source, filename, "exec")
|
||||
exec(code, globals, locals)
|
||||
|
||||
|
||||
# type note: Can't find a Comparable type, despite
|
||||
# https://github.com/python/typing/issues/59
|
||||
def cmp(a: object, b: object) -> int:
|
||||
"""
|
||||
Compare two objects.
|
||||
|
||||
Returns a negative number if C{a < b}, zero if they are equal, and a
|
||||
positive number if C{a > b}.
|
||||
"""
|
||||
if a < b: # type: ignore[operator]
|
||||
return -1
|
||||
elif a == b:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
def comparable(klass):
|
||||
"""
|
||||
Class decorator that ensures support for the special C{__cmp__} method.
|
||||
|
||||
C{__eq__}, C{__lt__}, etc. methods are added to the class, relying on
|
||||
C{__cmp__} to implement their comparisons.
|
||||
"""
|
||||
|
||||
def __eq__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c == 0
|
||||
|
||||
def __ne__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c != 0
|
||||
|
||||
def __lt__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c < 0
|
||||
|
||||
def __le__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c <= 0
|
||||
|
||||
def __gt__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c > 0
|
||||
|
||||
def __ge__(self: Any, other: object) -> bool:
|
||||
c = cast(bool, self.__cmp__(other))
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c >= 0
|
||||
|
||||
klass.__lt__ = __lt__
|
||||
klass.__gt__ = __gt__
|
||||
klass.__le__ = __le__
|
||||
klass.__ge__ = __ge__
|
||||
klass.__eq__ = __eq__
|
||||
klass.__ne__ = __ne__
|
||||
return klass
|
||||
|
||||
|
||||
def ioType(fileIshObject, default=str):
|
||||
"""
|
||||
Determine the type which will be returned from the given file object's
|
||||
read() and accepted by its write() method as an argument.
|
||||
|
||||
In other words, determine whether the given file is 'opened in text mode'.
|
||||
|
||||
@param fileIshObject: Any object, but ideally one which resembles a file.
|
||||
@type fileIshObject: L{object}
|
||||
|
||||
@param default: A default value to return when the type of C{fileIshObject}
|
||||
cannot be determined.
|
||||
@type default: L{type}
|
||||
|
||||
@return: There are 3 possible return values:
|
||||
|
||||
1. L{str}, if the file is unambiguously opened in text mode.
|
||||
|
||||
2. L{bytes}, if the file is unambiguously opened in binary mode.
|
||||
|
||||
3. The C{default} parameter, if the given type is not understood.
|
||||
|
||||
@rtype: L{type}
|
||||
"""
|
||||
if isinstance(fileIshObject, TextIOBase):
|
||||
# If it's for text I/O, then it's for text I/O.
|
||||
return str
|
||||
if isinstance(fileIshObject, IOBase):
|
||||
# If it's for I/O but it's _not_ for text I/O, it's for bytes I/O.
|
||||
return bytes
|
||||
encoding = getattr(fileIshObject, "encoding", None)
|
||||
import codecs
|
||||
|
||||
if isinstance(fileIshObject, (codecs.StreamReader, codecs.StreamWriter)):
|
||||
# On StreamReaderWriter, the 'encoding' attribute has special meaning;
|
||||
# it is unambiguously text.
|
||||
if encoding:
|
||||
return str
|
||||
else:
|
||||
return bytes
|
||||
return default
|
||||
|
||||
|
||||
def nativeString(s: AnyStr) -> str:
|
||||
"""
|
||||
Convert C{bytes} or C{str} to C{str} type, using ASCII encoding if
|
||||
conversion is necessary.
|
||||
|
||||
@raise UnicodeError: The input string is not ASCII encodable/decodable.
|
||||
@raise TypeError: The input is neither C{bytes} nor C{str}.
|
||||
"""
|
||||
if not isinstance(s, (bytes, str)):
|
||||
raise TypeError("%r is neither bytes nor str" % s)
|
||||
if isinstance(s, bytes):
|
||||
return s.decode("ascii")
|
||||
else:
|
||||
# Ensure we're limited to ASCII subset:
|
||||
s.encode("ascii")
|
||||
return s
|
||||
|
||||
|
||||
def _matchingString(constantString, inputString):
|
||||
"""
|
||||
Some functions, such as C{os.path.join}, operate on string arguments which
|
||||
may be bytes or text, and wish to return a value of the same type. In
|
||||
those cases you may wish to have a string constant (in the case of
|
||||
C{os.path.join}, that constant would be C{os.path.sep}) involved in the
|
||||
parsing or processing, that must be of a matching type in order to use
|
||||
string operations on it. L{_matchingString} will take a constant string
|
||||
(either L{bytes} or L{str}) and convert it to the same type as the
|
||||
input string. C{constantString} should contain only characters from ASCII;
|
||||
to ensure this, it will be encoded or decoded regardless.
|
||||
|
||||
@param constantString: A string literal used in processing.
|
||||
@type constantString: L{str} or L{bytes}
|
||||
|
||||
@param inputString: A byte string or text string provided by the user.
|
||||
@type inputString: L{str} or L{bytes}
|
||||
|
||||
@return: C{constantString} converted into the same type as C{inputString}
|
||||
@rtype: the type of C{inputString}
|
||||
"""
|
||||
if isinstance(constantString, bytes):
|
||||
otherType = constantString.decode("ascii")
|
||||
else:
|
||||
otherType = constantString.encode("ascii")
|
||||
if type(constantString) == type(inputString):
|
||||
return constantString
|
||||
else:
|
||||
return otherType
|
||||
|
||||
|
||||
@deprecated(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
replacement="raise exception.with_traceback(traceback)",
|
||||
)
|
||||
def reraise(exception, traceback):
|
||||
"""
|
||||
Re-raise an exception, with an optional traceback.
|
||||
|
||||
Re-raised exceptions will be mutated, with their C{__traceback__} attribute
|
||||
being set.
|
||||
|
||||
@param exception: The exception instance.
|
||||
@param traceback: The traceback to use, or L{None} indicating a new
|
||||
traceback.
|
||||
"""
|
||||
raise exception.with_traceback(traceback)
|
||||
|
||||
|
||||
def iterbytes(originalBytes):
|
||||
"""
|
||||
Return an iterable wrapper for a C{bytes} object that provides the behavior
|
||||
of iterating over C{bytes} on Python 2.
|
||||
|
||||
In particular, the results of iteration are the individual bytes (rather
|
||||
than integers as on Python 3).
|
||||
|
||||
@param originalBytes: A C{bytes} object that will be wrapped.
|
||||
"""
|
||||
for i in range(len(originalBytes)):
|
||||
yield originalBytes[i : i + 1]
|
||||
|
||||
|
||||
@deprecated(Version("Twisted", 21, 2, 0), replacement="b'%d'")
|
||||
def intToBytes(i: int) -> bytes:
|
||||
"""
|
||||
Convert the given integer into C{bytes}, as ASCII-encoded Arab numeral.
|
||||
|
||||
@param i: The C{int} to convert to C{bytes}.
|
||||
@rtype: C{bytes}
|
||||
"""
|
||||
return b"%d" % (i,)
|
||||
|
||||
|
||||
def lazyByteSlice(object, offset=0, size=None):
|
||||
"""
|
||||
Return a copy of the given bytes-like object.
|
||||
|
||||
If an offset is given, the copy starts at that offset. If a size is
|
||||
given, the copy will only be of that length.
|
||||
|
||||
@param object: C{bytes} to be copied.
|
||||
|
||||
@param offset: C{int}, starting index of copy.
|
||||
|
||||
@param size: Optional, if an C{int} is given limit the length of copy
|
||||
to this size.
|
||||
"""
|
||||
view = memoryview(object)
|
||||
if size is None:
|
||||
return view[offset:]
|
||||
else:
|
||||
return view[offset : (offset + size)]
|
||||
|
||||
|
||||
def networkString(s: str) -> bytes:
|
||||
"""
|
||||
Convert a string to L{bytes} using ASCII encoding.
|
||||
|
||||
This is useful for sending text-like bytes that are constructed using
|
||||
string interpolation. For example::
|
||||
|
||||
networkString("Hello %d" % (n,))
|
||||
|
||||
@param s: A string to convert to bytes.
|
||||
@type s: L{str}
|
||||
|
||||
@raise UnicodeError: The input string is not ASCII encodable.
|
||||
@raise TypeError: The input is not L{str}.
|
||||
|
||||
@rtype: L{bytes}
|
||||
"""
|
||||
if not isinstance(s, str):
|
||||
raise TypeError("Can only convert strings to bytes")
|
||||
return s.encode("ascii")
|
||||
|
||||
|
||||
@deprecated(Version("Twisted", 21, 2, 0), replacement="os***REMOVED***ironb")
|
||||
def bytesEnviron():
|
||||
"""
|
||||
Return a L{dict} of L{os***REMOVED***iron} where all text-strings are encoded into
|
||||
L{bytes}.
|
||||
|
||||
This function is POSIX only; environment variables are always text strings
|
||||
on Windows.
|
||||
"""
|
||||
encodekey = os***REMOVED***iron.encodekey
|
||||
encodevalue = os***REMOVED***iron.encodevalue
|
||||
|
||||
return {encodekey(x): encodevalue(y) for x, y in os***REMOVED***iron.items()}
|
||||
|
||||
|
||||
def _constructMethod(cls, name, self):
|
||||
"""
|
||||
Construct a bound method.
|
||||
|
||||
@param cls: The class that the method should be bound to.
|
||||
@type cls: L{type}
|
||||
|
||||
@param name: The name of the method.
|
||||
@type name: native L{str}
|
||||
|
||||
@param self: The object that the method is bound to.
|
||||
@type self: any object
|
||||
|
||||
@return: a bound method
|
||||
@rtype: L{_MethodType}
|
||||
"""
|
||||
func = cls.__dict__[name]
|
||||
return _MethodType(func, self)
|
||||
|
||||
|
||||
def _pypy3BlockingHack():
|
||||
"""
|
||||
Work around U{https://foss.heptapod.net/pypy/pypy/-/issues/3051}
|
||||
by replacing C{socket.fromfd} with a more conservative version.
|
||||
"""
|
||||
try:
|
||||
from fcntl import F_GETFL, F_SETFL, fcntl
|
||||
except ImportError:
|
||||
return
|
||||
if not _PYPY:
|
||||
return
|
||||
|
||||
def fromFDWithoutModifyingFlags(fd, family, type, proto=None):
|
||||
passproto = [proto] * (proto is not None)
|
||||
flags = fcntl(fd, F_GETFL)
|
||||
try:
|
||||
return realFromFD(fd, family, type, *passproto)
|
||||
finally:
|
||||
fcntl(fd, F_SETFL, flags)
|
||||
|
||||
realFromFD = socket.fromfd
|
||||
if realFromFD.__name__ == fromFDWithoutModifyingFlags.__name__:
|
||||
return
|
||||
socket.fromfd = fromFDWithoutModifyingFlags
|
||||
|
||||
|
||||
_pypy3BlockingHack()
|
||||
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use functools.reduce() directly",
|
||||
__name__,
|
||||
"reduce",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use io.StringIO directly",
|
||||
__name__,
|
||||
"NativeStringIO",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Import urllib.parse directly",
|
||||
__name__,
|
||||
"urllib_parse",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0), "Use html.escape directly", __name__, "escape"
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use urllib.parse.quote() directly",
|
||||
__name__,
|
||||
"urlquote",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use urllib.parse.unquote() directly",
|
||||
__name__,
|
||||
"urlunquote",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use http.cookiejar directly",
|
||||
__name__,
|
||||
"cookielib",
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0), "Use sys.intern() directly", __name__, "intern"
|
||||
)
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Use collections.abc.Sequence directly",
|
||||
__name__,
|
||||
"Sequence",
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"reraise",
|
||||
"execfile",
|
||||
"frozenset",
|
||||
"reduce",
|
||||
"set",
|
||||
"cmp",
|
||||
"comparable",
|
||||
"nativeString",
|
||||
"NativeStringIO",
|
||||
"networkString",
|
||||
"unicode",
|
||||
"iterbytes",
|
||||
"intToBytes",
|
||||
"lazyByteSlice",
|
||||
"StringType",
|
||||
"InstanceType",
|
||||
"FileType",
|
||||
"items",
|
||||
"iteritems",
|
||||
"itervalues",
|
||||
"range",
|
||||
"xrange",
|
||||
"urllib_parse",
|
||||
"bytesEnviron",
|
||||
"escape",
|
||||
"urlquote",
|
||||
"urlunquote",
|
||||
"cookielib",
|
||||
"intern",
|
||||
"unichr",
|
||||
"raw_input",
|
||||
"Sequence",
|
||||
]
|
||||
431
.venv/lib/python3.12/site-packages/twisted/python/components.py
Normal file
431
.venv/lib/python3.12/site-packages/twisted/python/components.py
Normal file
@@ -0,0 +1,431 @@
|
||||
# -*- test-case-name: twisted.python.test.test_components -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Component architecture for Twisted, based on Zope3 components.
|
||||
|
||||
Using the Zope3 API directly is strongly recommended. Everything
|
||||
you need is in the top-level of the zope.interface package, e.g.::
|
||||
|
||||
from zope.interface import Interface, implementer
|
||||
|
||||
class IFoo(Interface):
|
||||
pass
|
||||
|
||||
@implementer(IFoo)
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
print(IFoo.implementedBy(Foo)) # True
|
||||
print(IFoo.providedBy(Foo())) # True
|
||||
|
||||
L{twisted.python.components.registerAdapter} from this module may be used to
|
||||
add to Twisted's global adapter registry.
|
||||
|
||||
L{twisted.python.components.proxyForInterface} is a factory for classes
|
||||
which allow access to only the parts of another class defined by a specified
|
||||
interface.
|
||||
"""
|
||||
|
||||
|
||||
from io import StringIO
|
||||
from typing import Dict
|
||||
|
||||
# zope3 imports
|
||||
from zope.interface import declarations, interface
|
||||
from zope.interface.adapter import AdapterRegistry
|
||||
|
||||
# twisted imports
|
||||
from twisted.python import reflect
|
||||
|
||||
# Twisted's global adapter registry
|
||||
globalRegistry = AdapterRegistry()
|
||||
|
||||
# Attribute that registerAdapter looks at. Is this supposed to be public?
|
||||
ALLOW_DUPLICATES = 0
|
||||
|
||||
|
||||
def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
|
||||
"""Register an adapter class.
|
||||
|
||||
An adapter class is expected to implement the given interface, by
|
||||
adapting instances implementing 'origInterface'. An adapter class's
|
||||
__init__ method should accept one parameter, an instance implementing
|
||||
'origInterface'.
|
||||
"""
|
||||
self = globalRegistry
|
||||
assert interfaceClasses, "You need to pass an Interface"
|
||||
global ALLOW_DUPLICATES
|
||||
|
||||
# deal with class->interface adapters:
|
||||
if not isinstance(origInterface, interface.InterfaceClass):
|
||||
origInterface = declarations.implementedBy(origInterface)
|
||||
|
||||
for interfaceClass in interfaceClasses:
|
||||
factory = self.registered([origInterface], interfaceClass)
|
||||
if factory is not None and not ALLOW_DUPLICATES:
|
||||
raise ValueError(f"an adapter ({factory}) was already registered.")
|
||||
for interfaceClass in interfaceClasses:
|
||||
self.register([origInterface], interfaceClass, "", adapterFactory)
|
||||
|
||||
|
||||
def getAdapterFactory(fromInterface, toInterface, default):
|
||||
"""Return registered adapter for a given class and interface.
|
||||
|
||||
Note that is tied to the *Twisted* global registry, and will
|
||||
thus not find adapters registered elsewhere.
|
||||
"""
|
||||
self = globalRegistry
|
||||
if not isinstance(fromInterface, interface.InterfaceClass):
|
||||
fromInterface = declarations.implementedBy(fromInterface)
|
||||
factory = self.lookup1(fromInterface, toInterface) # type: ignore[attr-defined]
|
||||
if factory is None:
|
||||
factory = default
|
||||
return factory
|
||||
|
||||
|
||||
def _addHook(registry):
|
||||
"""
|
||||
Add an adapter hook which will attempt to look up adapters in the given
|
||||
registry.
|
||||
|
||||
@type registry: L{zope.interface.adapter.AdapterRegistry}
|
||||
|
||||
@return: The hook which was added, for later use with L{_removeHook}.
|
||||
"""
|
||||
lookup = registry.lookup1
|
||||
|
||||
def _hook(iface, ob):
|
||||
factory = lookup(declarations.providedBy(ob), iface)
|
||||
if factory is None:
|
||||
return None
|
||||
else:
|
||||
return factory(ob)
|
||||
|
||||
interface.adapter_hooks.append(_hook)
|
||||
return _hook
|
||||
|
||||
|
||||
def _removeHook(hook):
|
||||
"""
|
||||
Remove a previously added adapter hook.
|
||||
|
||||
@param hook: An object previously returned by a call to L{_addHook}. This
|
||||
will be removed from the list of adapter hooks.
|
||||
"""
|
||||
interface.adapter_hooks.remove(hook)
|
||||
|
||||
|
||||
# add global adapter lookup hook for our newly created registry
|
||||
_addHook(globalRegistry)
|
||||
|
||||
|
||||
def getRegistry():
|
||||
"""Returns the Twisted global
|
||||
C{zope.interface.adapter.AdapterRegistry} instance.
|
||||
"""
|
||||
return globalRegistry
|
||||
|
||||
|
||||
# FIXME: deprecate attribute somehow?
|
||||
CannotAdapt = TypeError
|
||||
|
||||
|
||||
class Adapter:
|
||||
"""I am the default implementation of an Adapter for some interface.
|
||||
|
||||
This docstring contains a limerick, by popular demand::
|
||||
|
||||
Subclassing made Zope and TR
|
||||
much harder to work with by far.
|
||||
So before you inherit,
|
||||
be sure to declare it
|
||||
Adapter, not PyObject*
|
||||
|
||||
@cvar temporaryAdapter: If this is True, the adapter will not be
|
||||
persisted on the Componentized.
|
||||
@cvar multiComponent: If this adapter is persistent, should it be
|
||||
automatically registered for all appropriate interfaces.
|
||||
"""
|
||||
|
||||
# These attributes are used with Componentized.
|
||||
|
||||
temporaryAdapter = 0
|
||||
multiComponent = 1
|
||||
|
||||
def __init__(self, original):
|
||||
"""Set my 'original' attribute to be the object I am adapting."""
|
||||
self.original = original
|
||||
|
||||
def __conform__(self, interface):
|
||||
"""
|
||||
I forward __conform__ to self.original if it has it, otherwise I
|
||||
simply return None.
|
||||
"""
|
||||
if hasattr(self.original, "__conform__"):
|
||||
return self.original.__conform__(interface)
|
||||
return None
|
||||
|
||||
def isuper(self, iface, adapter):
|
||||
"""
|
||||
Forward isuper to self.original
|
||||
"""
|
||||
return self.original.isuper(iface, adapter)
|
||||
|
||||
|
||||
class Componentized:
|
||||
"""I am a mixin to allow you to be adapted in various ways persistently.
|
||||
|
||||
I define a list of persistent adapters. This is to allow adapter classes
|
||||
to store system-specific state, and initialized on demand. The
|
||||
getComponent method implements this. You must also register adapters for
|
||||
this class for the interfaces that you wish to pass to getComponent.
|
||||
|
||||
Many other classes and utilities listed here are present in Zope3; this one
|
||||
is specific to Twisted.
|
||||
"""
|
||||
|
||||
persistenceVersion = 1
|
||||
|
||||
def __init__(self):
|
||||
self._adapterCache = {}
|
||||
|
||||
def locateAdapterClass(self, klass, interfaceClass, default):
|
||||
return getAdapterFactory(klass, interfaceClass, default)
|
||||
|
||||
def setAdapter(self, interfaceClass, adapterClass):
|
||||
"""
|
||||
Cache a provider for the given interface, by adapting C{self} using
|
||||
the given adapter class.
|
||||
"""
|
||||
self.setComponent(interfaceClass, adapterClass(self))
|
||||
|
||||
def addAdapter(self, adapterClass, ignoreClass=0):
|
||||
"""Utility method that calls addComponent. I take an adapter class and
|
||||
instantiate it with myself as the first argument.
|
||||
|
||||
@return: The adapter instantiated.
|
||||
"""
|
||||
adapt = adapterClass(self)
|
||||
self.addComponent(adapt, ignoreClass)
|
||||
return adapt
|
||||
|
||||
def setComponent(self, interfaceClass, component):
|
||||
"""
|
||||
Cache a provider of the given interface.
|
||||
"""
|
||||
self._adapterCache[reflect.qual(interfaceClass)] = component
|
||||
|
||||
def addComponent(self, component, ignoreClass=0):
|
||||
"""
|
||||
Add a component to me, for all appropriate interfaces.
|
||||
|
||||
In order to determine which interfaces are appropriate, the component's
|
||||
provided interfaces will be scanned.
|
||||
|
||||
If the argument 'ignoreClass' is True, then all interfaces are
|
||||
considered appropriate.
|
||||
|
||||
Otherwise, an 'appropriate' interface is one for which its class has
|
||||
been registered as an adapter for my class according to the rules of
|
||||
getComponent.
|
||||
"""
|
||||
for iface in declarations.providedBy(component):
|
||||
if ignoreClass or (
|
||||
self.locateAdapterClass(self.__class__, iface, None)
|
||||
== component.__class__
|
||||
):
|
||||
self._adapterCache[reflect.qual(iface)] = component
|
||||
|
||||
def unsetComponent(self, interfaceClass):
|
||||
"""Remove my component specified by the given interface class."""
|
||||
del self._adapterCache[reflect.qual(interfaceClass)]
|
||||
|
||||
def removeComponent(self, component):
|
||||
"""
|
||||
Remove the given component from me entirely, for all interfaces for which
|
||||
it has been registered.
|
||||
|
||||
@return: a list of the interfaces that were removed.
|
||||
"""
|
||||
l = []
|
||||
for k, v in list(self._adapterCache.items()):
|
||||
if v is component:
|
||||
del self._adapterCache[k]
|
||||
l.append(reflect.namedObject(k))
|
||||
return l
|
||||
|
||||
def getComponent(self, interface, default=None):
|
||||
"""Create or retrieve an adapter for the given interface.
|
||||
|
||||
If such an adapter has already been created, retrieve it from the cache
|
||||
that this instance keeps of all its adapters. Adapters created through
|
||||
this mechanism may safely store system-specific state.
|
||||
|
||||
If you want to register an adapter that will be created through
|
||||
getComponent, but you don't require (or don't want) your adapter to be
|
||||
cached and kept alive for the lifetime of this Componentized object,
|
||||
set the attribute 'temporaryAdapter' to True on your adapter class.
|
||||
|
||||
If you want to automatically register an adapter for all appropriate
|
||||
interfaces (with addComponent), set the attribute 'multiComponent' to
|
||||
True on your adapter class.
|
||||
"""
|
||||
k = reflect.qual(interface)
|
||||
if k in self._adapterCache:
|
||||
return self._adapterCache[k]
|
||||
else:
|
||||
adapter = interface.__adapt__(self)
|
||||
if adapter is not None and not (
|
||||
hasattr(adapter, "temporaryAdapter") and adapter.temporaryAdapter
|
||||
):
|
||||
self._adapterCache[k] = adapter
|
||||
if hasattr(adapter, "multiComponent") and adapter.multiComponent:
|
||||
self.addComponent(adapter)
|
||||
if adapter is None:
|
||||
return default
|
||||
return adapter
|
||||
|
||||
def __conform__(self, interface):
|
||||
return self.getComponent(interface)
|
||||
|
||||
|
||||
class ReprableComponentized(Componentized):
|
||||
def __init__(self):
|
||||
Componentized.__init__(self)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
from pprint import pprint
|
||||
|
||||
sio = StringIO()
|
||||
pprint(self._adapterCache, sio)
|
||||
return sio.getvalue()
|
||||
|
||||
|
||||
def proxyForInterface(iface, originalAttribute="original"):
|
||||
"""
|
||||
Create a class which proxies all method calls which adhere to an interface
|
||||
to another provider of that interface.
|
||||
|
||||
This function is intended for creating specialized proxies. The typical way
|
||||
to use it is by subclassing the result::
|
||||
|
||||
class MySpecializedProxy(proxyForInterface(IFoo)):
|
||||
def someInterfaceMethod(self, arg):
|
||||
if arg == 3:
|
||||
return 3
|
||||
return self.original.someInterfaceMethod(arg)
|
||||
|
||||
@param iface: The Interface to which the resulting object will conform, and
|
||||
which the wrapped object must provide.
|
||||
|
||||
@param originalAttribute: name of the attribute used to save the original
|
||||
object in the resulting class. Default to C{original}.
|
||||
@type originalAttribute: C{str}
|
||||
|
||||
@return: A class whose constructor takes the original object as its only
|
||||
argument. Constructing the class creates the proxy.
|
||||
"""
|
||||
|
||||
def __init__(self, original):
|
||||
setattr(self, originalAttribute, original)
|
||||
|
||||
contents: Dict[str, object] = {"__init__": __init__}
|
||||
for name in iface:
|
||||
contents[name] = _ProxyDescriptor(name, originalAttribute)
|
||||
proxy = type(f"(Proxy for {reflect.qual(iface)})", (object,), contents)
|
||||
# mypy-zope declarations.classImplements only works when passing
|
||||
# a concrete class type
|
||||
declarations.classImplements(proxy, iface) # type: ignore[misc]
|
||||
return proxy
|
||||
|
||||
|
||||
class _ProxiedClassMethod:
|
||||
"""
|
||||
A proxied class method.
|
||||
|
||||
@ivar methodName: the name of the method which this should invoke when
|
||||
called.
|
||||
@type methodName: L{str}
|
||||
|
||||
@ivar __name__: The name of the method being proxied (the same as
|
||||
C{methodName}).
|
||||
@type __name__: L{str}
|
||||
|
||||
@ivar originalAttribute: name of the attribute of the proxy where the
|
||||
original object is stored.
|
||||
@type originalAttribute: L{str}
|
||||
"""
|
||||
|
||||
def __init__(self, methodName, originalAttribute):
|
||||
self.methodName = self.__name__ = methodName
|
||||
self.originalAttribute = originalAttribute
|
||||
|
||||
def __call__(self, oself, *args, **kw):
|
||||
"""
|
||||
Invoke the specified L{methodName} method of the C{original} attribute
|
||||
for proxyForInterface.
|
||||
|
||||
@param oself: an instance of a L{proxyForInterface} object.
|
||||
|
||||
@return: the result of the underlying method.
|
||||
"""
|
||||
original = getattr(oself, self.originalAttribute)
|
||||
actualMethod = getattr(original, self.methodName)
|
||||
return actualMethod(*args, **kw)
|
||||
|
||||
|
||||
class _ProxyDescriptor:
|
||||
"""
|
||||
A descriptor which will proxy attribute access, mutation, and
|
||||
deletion to the L{_ProxyDescriptor.originalAttribute} of the
|
||||
object it is being accessed from.
|
||||
|
||||
@ivar attributeName: the name of the attribute which this descriptor will
|
||||
retrieve from instances' C{original} attribute.
|
||||
@type attributeName: C{str}
|
||||
|
||||
@ivar originalAttribute: name of the attribute of the proxy where the
|
||||
original object is stored.
|
||||
@type originalAttribute: C{str}
|
||||
"""
|
||||
|
||||
def __init__(self, attributeName, originalAttribute):
|
||||
self.attributeName = attributeName
|
||||
self.originalAttribute = originalAttribute
|
||||
|
||||
def __get__(self, oself, type=None):
|
||||
"""
|
||||
Retrieve the C{self.attributeName} property from I{oself}.
|
||||
"""
|
||||
if oself is None:
|
||||
return _ProxiedClassMethod(self.attributeName, self.originalAttribute)
|
||||
original = getattr(oself, self.originalAttribute)
|
||||
return getattr(original, self.attributeName)
|
||||
|
||||
def __set__(self, oself, value):
|
||||
"""
|
||||
Set the C{self.attributeName} property of I{oself}.
|
||||
"""
|
||||
original = getattr(oself, self.originalAttribute)
|
||||
setattr(original, self.attributeName, value)
|
||||
|
||||
def __delete__(self, oself):
|
||||
"""
|
||||
Delete the C{self.attributeName} property of I{oself}.
|
||||
"""
|
||||
original = getattr(oself, self.originalAttribute)
|
||||
delattr(original, self.attributeName)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"registerAdapter",
|
||||
"getAdapterFactory",
|
||||
"Adapter",
|
||||
"Componentized",
|
||||
"ReprableComponentized",
|
||||
"getRegistry",
|
||||
"proxyForInterface",
|
||||
]
|
||||
@@ -0,0 +1,14 @@
|
||||
# -*- test-case-name: twisted.python.test.test_constants -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Symbolic constant support, including collections and constants with text,
|
||||
numeric, and bit flag values.
|
||||
"""
|
||||
|
||||
|
||||
# Import and re-export Constantly
|
||||
from constantly import FlagConstant, Flags, NamedConstant, Names, ValueConstant, Values
|
||||
|
||||
__all__ = ["NamedConstant", "ValueConstant", "FlagConstant", "Names", "Values", "Flags"]
|
||||
135
.venv/lib/python3.12/site-packages/twisted/python/context.py
Normal file
135
.venv/lib/python3.12/site-packages/twisted/python/context.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# -*- test-case-name: twisted.test.test_context -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Dynamic pseudo-scoping for Python.
|
||||
|
||||
Call functions with context.call({key: value}, func); func and
|
||||
functions that it calls will be able to use 'context.get(key)' to
|
||||
retrieve 'value'.
|
||||
|
||||
This is thread-safe.
|
||||
"""
|
||||
|
||||
|
||||
from threading import local
|
||||
from typing import Dict, Type
|
||||
|
||||
defaultContextDict: Dict[Type[object], Dict[str, str]] = {}
|
||||
|
||||
setDefault = defaultContextDict.__setitem__
|
||||
|
||||
|
||||
class ContextTracker:
|
||||
"""
|
||||
A L{ContextTracker} provides a way to pass arbitrary key/value data up and
|
||||
down a call stack without passing them as parameters to the functions on
|
||||
that call stack.
|
||||
|
||||
This can be useful when functions on the top and bottom of the call stack
|
||||
need to cooperate but the functions in between them do not allow passing the
|
||||
necessary state. For example::
|
||||
|
||||
from twisted.python.context import call, get
|
||||
|
||||
def handleRequest(request):
|
||||
call({'request-id': request.id}, renderRequest, request.url)
|
||||
|
||||
def renderRequest(url):
|
||||
renderHeader(url)
|
||||
renderBody(url)
|
||||
|
||||
def renderHeader(url):
|
||||
return "the header"
|
||||
|
||||
def renderBody(url):
|
||||
return "the body (request id=%r)" % (get("request-id"),)
|
||||
|
||||
This should be used sparingly, since the lack of a clear connection between
|
||||
the two halves can result in code which is difficult to understand and
|
||||
maintain.
|
||||
|
||||
@ivar contexts: A C{list} of C{dict}s tracking the context state. Each new
|
||||
L{ContextTracker.callWithContext} pushes a new C{dict} onto this stack
|
||||
for the duration of the call, making the data available to the function
|
||||
called and restoring the previous data once it is complete..
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.contexts = [defaultContextDict]
|
||||
|
||||
def callWithContext(self, newContext, func, *args, **kw):
|
||||
"""
|
||||
Call C{func(*args, **kw)} such that the contents of C{newContext} will
|
||||
be available for it to retrieve using L{getContext}.
|
||||
|
||||
@param newContext: A C{dict} of data to push onto the context for the
|
||||
duration of the call to C{func}.
|
||||
|
||||
@param func: A callable which will be called.
|
||||
|
||||
@param args: Any additional positional arguments to pass to C{func}.
|
||||
|
||||
@param kw: Any additional keyword arguments to pass to C{func}.
|
||||
|
||||
@return: Whatever is returned by C{func}
|
||||
|
||||
@raise Exception: Whatever is raised by C{func}.
|
||||
"""
|
||||
self.contexts.append(newContext)
|
||||
try:
|
||||
return func(*args, **kw)
|
||||
finally:
|
||||
self.contexts.pop()
|
||||
|
||||
def getContext(self, key, default=None):
|
||||
"""
|
||||
Retrieve the value for a key from the context.
|
||||
|
||||
@param key: The key to look up in the context.
|
||||
|
||||
@param default: The value to return if C{key} is not found in the
|
||||
context.
|
||||
|
||||
@return: The value most recently remembered in the context for C{key}.
|
||||
"""
|
||||
for ctx in reversed(self.contexts):
|
||||
try:
|
||||
return ctx[key]
|
||||
except KeyError:
|
||||
pass
|
||||
return default
|
||||
|
||||
|
||||
class ThreadedContextTracker:
|
||||
def __init__(self):
|
||||
self.storage = local()
|
||||
|
||||
def currentContext(self):
|
||||
try:
|
||||
return self.storage.ct
|
||||
except AttributeError:
|
||||
ct = self.storage.ct = ContextTracker()
|
||||
return ct
|
||||
|
||||
def callWithContext(self, ctx, func, *args, **kw):
|
||||
return self.currentContext().callWithContext(ctx, func, *args, **kw)
|
||||
|
||||
def getContext(self, key, default=None):
|
||||
return self.currentContext().getContext(key, default)
|
||||
|
||||
|
||||
theContextTracker = ThreadedContextTracker()
|
||||
call = theContextTracker.callWithContext
|
||||
get = theContextTracker.getContext
|
||||
|
||||
|
||||
def installContextTracker(ctr):
|
||||
global theContextTracker
|
||||
global call
|
||||
global get
|
||||
|
||||
theContextTracker = ctr
|
||||
call = theContextTracker.callWithContext
|
||||
get = theContextTracker.getContext
|
||||
833
.venv/lib/python3.12/site-packages/twisted/python/deprecate.py
Normal file
833
.venv/lib/python3.12/site-packages/twisted/python/deprecate.py
Normal file
@@ -0,0 +1,833 @@
|
||||
# -*- test-case-name: twisted.python.test.test_deprecate -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Deprecation framework for Twisted.
|
||||
|
||||
To mark a method, function, or class as being deprecated do this::
|
||||
|
||||
from incremental import Version
|
||||
from twisted.python.deprecate import deprecated
|
||||
|
||||
@deprecated(Version("Twisted", 22, 10, 0))
|
||||
def badAPI(self, first, second):
|
||||
'''
|
||||
Docstring for badAPI.
|
||||
'''
|
||||
...
|
||||
|
||||
@deprecated(Version("Twisted", 22, 10, 0))
|
||||
class BadClass:
|
||||
'''
|
||||
Docstring for BadClass.
|
||||
'''
|
||||
|
||||
The newly-decorated badAPI will issue a warning when called, and BadClass will
|
||||
issue a warning when instantiated. Both will also have a deprecation notice
|
||||
appended to their docstring.
|
||||
|
||||
To deprecate properties you can use::
|
||||
|
||||
from incremental import Version
|
||||
from twisted.python.deprecate import deprecatedProperty
|
||||
|
||||
class OtherwiseUndeprecatedClass:
|
||||
|
||||
@deprecatedProperty(Version("Twisted", 22, 10, 0))
|
||||
def badProperty(self):
|
||||
'''
|
||||
Docstring for badProperty.
|
||||
'''
|
||||
|
||||
@badProperty.setter
|
||||
def badProperty(self, value):
|
||||
'''
|
||||
Setter sill also raise the deprecation warning.
|
||||
'''
|
||||
|
||||
|
||||
While it's best to avoid this as it adds performance overhead to *any* usage of
|
||||
the module, to mark module-level attributes as being deprecated you can use::
|
||||
|
||||
badAttribute = "someValue"
|
||||
|
||||
...
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 22, 10, 0),
|
||||
"Use goodAttribute instead.",
|
||||
"your.full.module.name",
|
||||
"badAttribute")
|
||||
|
||||
The deprecated attributes will issue a warning whenever they are accessed. If
|
||||
the attributes being deprecated are in the same module as the
|
||||
L{deprecatedModuleAttribute} call is being made from, the C{__name__} global
|
||||
can be used as the C{moduleName} parameter.
|
||||
|
||||
|
||||
To mark an optional, keyword parameter of a function or method as deprecated
|
||||
without deprecating the function itself, you can use::
|
||||
|
||||
@deprecatedKeywordParameter(Version("Twisted", 22, 10, 0), "baz")
|
||||
def someFunction(foo, bar=0, baz=None):
|
||||
...
|
||||
|
||||
See also L{incremental.Version}.
|
||||
|
||||
@type DEPRECATION_WARNING_FORMAT: C{str}
|
||||
@var DEPRECATION_WARNING_FORMAT: The default deprecation warning string format
|
||||
to use when one is not provided by the user.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
__all__ = [
|
||||
"deprecated",
|
||||
"deprecatedProperty",
|
||||
"getDeprecationWarningString",
|
||||
"getWarningMethod",
|
||||
"setWarningMethod",
|
||||
"deprecatedModuleAttribute",
|
||||
"deprecatedKeywordParameter",
|
||||
]
|
||||
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
from dis import findlinestarts
|
||||
from functools import wraps
|
||||
from types import ModuleType
|
||||
from typing import Any, Callable, Dict, Optional, TypeVar, cast
|
||||
from warnings import warn, warn_explicit
|
||||
|
||||
from incremental import Version, getVersionString
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
_P = ParamSpec("_P")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
DEPRECATION_WARNING_FORMAT = "%(fqpn)s was deprecated in %(version)s"
|
||||
|
||||
# Notionally, part of twisted.python.reflect, but defining it there causes a
|
||||
# cyclic dependency between this module and that module. Define it here,
|
||||
# instead, and let reflect import it to re-expose to the public.
|
||||
|
||||
|
||||
def _fullyQualifiedName(obj):
|
||||
"""
|
||||
Return the fully qualified name of a module, class, method or function.
|
||||
Classes and functions need to be module level ones to be correctly
|
||||
qualified.
|
||||
|
||||
@rtype: C{str}.
|
||||
"""
|
||||
try:
|
||||
name = obj.__qualname__
|
||||
except AttributeError:
|
||||
name = obj.__name__
|
||||
|
||||
if inspect.isclass(obj) or inspect.isfunction(obj):
|
||||
moduleName = obj.__module__
|
||||
return f"{moduleName}.{name}"
|
||||
elif inspect.ismethod(obj):
|
||||
return f"{obj.__module__}.{obj.__qualname__}"
|
||||
return name
|
||||
|
||||
|
||||
# Try to keep it looking like something in twisted.python.reflect.
|
||||
_fullyQualifiedName.__module__ = "twisted.python.reflect"
|
||||
_fullyQualifiedName.__name__ = "fullyQualifiedName"
|
||||
_fullyQualifiedName.__qualname__ = "fullyQualifiedName"
|
||||
|
||||
|
||||
def _getReplacementString(replacement):
|
||||
"""
|
||||
Surround a replacement for a deprecated API with some polite text exhorting
|
||||
the user to consider it as an alternative.
|
||||
|
||||
@type replacement: C{str} or callable
|
||||
|
||||
@return: a string like "please use twisted.python.modules.getModule
|
||||
instead".
|
||||
"""
|
||||
if callable(replacement):
|
||||
replacement = _fullyQualifiedName(replacement)
|
||||
return f"please use {replacement} instead"
|
||||
|
||||
|
||||
def _getDeprecationDocstring(version, replacement=None):
|
||||
"""
|
||||
Generate an addition to a deprecated object's docstring that explains its
|
||||
deprecation.
|
||||
|
||||
@param version: the version it was deprecated.
|
||||
@type version: L{incremental.Version}
|
||||
|
||||
@param replacement: The replacement, if specified.
|
||||
@type replacement: C{str} or callable
|
||||
|
||||
@return: a string like "Deprecated in Twisted 27.2.0; please use
|
||||
twisted.timestream.tachyon.flux instead."
|
||||
"""
|
||||
doc = f"Deprecated in {getVersionString(version)}"
|
||||
if replacement:
|
||||
doc = f"{doc}; {_getReplacementString(replacement)}"
|
||||
return doc + "."
|
||||
|
||||
|
||||
def _getDeprecationWarningString(fqpn, version, format=None, replacement=None):
|
||||
"""
|
||||
Return a string indicating that the Python name was deprecated in the given
|
||||
version.
|
||||
|
||||
@param fqpn: Fully qualified Python name of the thing being deprecated
|
||||
@type fqpn: C{str}
|
||||
|
||||
@param version: Version that C{fqpn} was deprecated in.
|
||||
@type version: L{incremental.Version}
|
||||
|
||||
@param format: A user-provided format to interpolate warning values into, or
|
||||
L{DEPRECATION_WARNING_FORMAT
|
||||
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if L{None} is
|
||||
given.
|
||||
@type format: C{str}
|
||||
|
||||
@param replacement: what should be used in place of C{fqpn}. Either pass in
|
||||
a string, which will be inserted into the warning message, or a
|
||||
callable, which will be expanded to its full import path.
|
||||
@type replacement: C{str} or callable
|
||||
|
||||
@return: A textual description of the deprecation
|
||||
@rtype: C{str}
|
||||
"""
|
||||
if format is None:
|
||||
format = DEPRECATION_WARNING_FORMAT
|
||||
warningString = format % {"fqpn": fqpn, "version": getVersionString(version)}
|
||||
if replacement:
|
||||
warningString = "{}; {}".format(
|
||||
warningString, _getReplacementString(replacement)
|
||||
)
|
||||
return warningString
|
||||
|
||||
|
||||
def getDeprecationWarningString(callableThing, version, format=None, replacement=None):
|
||||
"""
|
||||
Return a string indicating that the callable was deprecated in the given
|
||||
version.
|
||||
|
||||
@type callableThing: C{callable}
|
||||
@param callableThing: Callable object to be deprecated
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: Version that C{callableThing} was deprecated in.
|
||||
|
||||
@type format: C{str}
|
||||
@param format: A user-provided format to interpolate warning values into,
|
||||
or L{DEPRECATION_WARNING_FORMAT
|
||||
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if L{None} is
|
||||
given
|
||||
|
||||
@param replacement: what should be used in place of the callable. Either
|
||||
pass in a string, which will be inserted into the warning message,
|
||||
or a callable, which will be expanded to its full import path.
|
||||
@type replacement: C{str} or callable
|
||||
|
||||
@return: A string describing the deprecation.
|
||||
@rtype: C{str}
|
||||
"""
|
||||
return _getDeprecationWarningString(
|
||||
_fullyQualifiedName(callableThing), version, format, replacement
|
||||
)
|
||||
|
||||
|
||||
def _appendToDocstring(thingWithDoc, textToAppend):
|
||||
"""
|
||||
Append the given text to the docstring of C{thingWithDoc}.
|
||||
|
||||
If C{thingWithDoc} has no docstring, then the text just replaces the
|
||||
docstring. If it has a single-line docstring then it appends a blank line
|
||||
and the message text. If it has a multi-line docstring, then in appends a
|
||||
blank line a the message text, and also does the indentation correctly.
|
||||
"""
|
||||
if thingWithDoc.__doc__:
|
||||
docstringLines = thingWithDoc.__doc__.splitlines()
|
||||
else:
|
||||
docstringLines = []
|
||||
|
||||
if len(docstringLines) == 0:
|
||||
docstringLines.append(textToAppend)
|
||||
elif len(docstringLines) == 1:
|
||||
docstringLines.extend(["", textToAppend, ""])
|
||||
else:
|
||||
trailer = docstringLines[-1]
|
||||
spaces = ""
|
||||
if not trailer.strip():
|
||||
# Deal with differences between Python 3.13 and older versions.
|
||||
spaces = docstringLines.pop()
|
||||
docstringLines.extend(["", spaces + textToAppend, spaces])
|
||||
docstringLines = [l.lstrip(" ") for l in docstringLines]
|
||||
thingWithDoc.__doc__ = "\n".join(docstringLines)
|
||||
|
||||
|
||||
def deprecated(
|
||||
version: Version, replacement: str | Callable[..., object] | None = None
|
||||
) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]:
|
||||
"""
|
||||
Return a decorator that marks callables as deprecated. To deprecate a
|
||||
property, see L{deprecatedProperty}.
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: The version in which the callable will be marked as
|
||||
having been deprecated. The decorated function will be annotated
|
||||
with this version, having it set as its C{deprecatedVersion}
|
||||
attribute.
|
||||
|
||||
@param replacement: what should be used in place of the callable. Either
|
||||
pass in a string, which will be inserted into the warning message,
|
||||
or a callable, which will be expanded to its full import path.
|
||||
@type replacement: C{str} or callable
|
||||
"""
|
||||
|
||||
def deprecationDecorator(function: Callable[_P, _R]) -> Callable[_P, _R]:
|
||||
"""
|
||||
Decorator that marks C{function} as deprecated.
|
||||
"""
|
||||
warningString = getDeprecationWarningString(
|
||||
function, version, None, replacement
|
||||
)
|
||||
|
||||
@wraps(function)
|
||||
def deprecatedFunction(*args: _P.args, **kwargs: _P.kwargs) -> _R:
|
||||
warn(warningString, DeprecationWarning, stacklevel=2)
|
||||
return function(*args, **kwargs)
|
||||
|
||||
_appendToDocstring(
|
||||
deprecatedFunction, _getDeprecationDocstring(version, replacement)
|
||||
)
|
||||
deprecatedFunction.deprecatedVersion = version # type: ignore[attr-defined]
|
||||
return deprecatedFunction
|
||||
|
||||
return deprecationDecorator
|
||||
|
||||
|
||||
def deprecatedProperty(version, replacement=None):
|
||||
"""
|
||||
Return a decorator that marks a property as deprecated. To deprecate a
|
||||
regular callable or class, see L{deprecated}.
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: The version in which the callable will be marked as
|
||||
having been deprecated. The decorated function will be annotated
|
||||
with this version, having it set as its C{deprecatedVersion}
|
||||
attribute.
|
||||
|
||||
@param replacement: what should be used in place of the callable.
|
||||
Either pass in a string, which will be inserted into the warning
|
||||
message, or a callable, which will be expanded to its full import
|
||||
path.
|
||||
@type replacement: C{str} or callable
|
||||
|
||||
@return: A new property with deprecated setter and getter.
|
||||
@rtype: C{property}
|
||||
|
||||
@since: 16.1.0
|
||||
"""
|
||||
|
||||
class _DeprecatedProperty(property):
|
||||
"""
|
||||
Extension of the build-in property to allow deprecated setters.
|
||||
"""
|
||||
|
||||
def _deprecatedWrapper(self, function):
|
||||
@wraps(function)
|
||||
def deprecatedFunction(*args, **kwargs):
|
||||
warn(
|
||||
self.warningString, # type: ignore[attr-defined]
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return function(*args, **kwargs)
|
||||
|
||||
return deprecatedFunction
|
||||
|
||||
def setter(self, function):
|
||||
return property.setter(self, self._deprecatedWrapper(function))
|
||||
|
||||
def deprecationDecorator(function):
|
||||
warningString = getDeprecationWarningString(
|
||||
function, version, None, replacement
|
||||
)
|
||||
|
||||
@wraps(function)
|
||||
def deprecatedFunction(*args, **kwargs):
|
||||
warn(warningString, DeprecationWarning, stacklevel=2)
|
||||
return function(*args, **kwargs)
|
||||
|
||||
_appendToDocstring(
|
||||
deprecatedFunction, _getDeprecationDocstring(version, replacement)
|
||||
)
|
||||
deprecatedFunction.deprecatedVersion = version # type: ignore[attr-defined]
|
||||
|
||||
result = _DeprecatedProperty(deprecatedFunction)
|
||||
result.warningString = warningString # type: ignore[attr-defined]
|
||||
return result
|
||||
|
||||
return deprecationDecorator
|
||||
|
||||
|
||||
def getWarningMethod():
|
||||
"""
|
||||
Return the warning method currently used to record deprecation warnings.
|
||||
"""
|
||||
return warn
|
||||
|
||||
|
||||
def setWarningMethod(newMethod):
|
||||
"""
|
||||
Set the warning method to use to record deprecation warnings.
|
||||
|
||||
The callable should take message, category and stacklevel. The return
|
||||
value is ignored.
|
||||
"""
|
||||
global warn
|
||||
warn = newMethod
|
||||
|
||||
|
||||
class _InternalState:
|
||||
"""
|
||||
An L{_InternalState} is a helper object for a L{_ModuleProxy}, so that it
|
||||
can easily access its own attributes, bypassing its logic for delegating to
|
||||
another object that it's proxying for.
|
||||
|
||||
@ivar proxy: a L{_ModuleProxy}
|
||||
"""
|
||||
|
||||
def __init__(self, proxy):
|
||||
object.__setattr__(self, "proxy", proxy)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
return object.__getattribute__(object.__getattribute__(self, "proxy"), name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return object.__setattr__(object.__getattribute__(self, "proxy"), name, value)
|
||||
|
||||
|
||||
class _ModuleProxy:
|
||||
"""
|
||||
Python module wrapper to hook module-level attribute access.
|
||||
|
||||
Access to deprecated attributes first checks
|
||||
L{_ModuleProxy._deprecatedAttributes}, if the attribute does not appear
|
||||
there then access falls through to L{_ModuleProxy._module}, the wrapped
|
||||
module object.
|
||||
|
||||
@ivar _module: Module on which to hook attribute access.
|
||||
@type _module: C{module}
|
||||
|
||||
@ivar _deprecatedAttributes: Mapping of attribute names to objects that
|
||||
retrieve the module attribute's original value.
|
||||
@type _deprecatedAttributes: C{dict} mapping C{str} to
|
||||
L{_DeprecatedAttribute}
|
||||
|
||||
@ivar _lastWasPath: Heuristic guess as to whether warnings about this
|
||||
package should be ignored for the next call. If the last attribute
|
||||
access of this module was a C{getattr} of C{__path__}, we will assume
|
||||
that it was the import system doing it and we won't emit a warning for
|
||||
the next access, even if it is to a deprecated attribute. The CPython
|
||||
import system always tries to access C{__path__}, then the attribute
|
||||
itself, then the attribute itself again, in both successful and failed
|
||||
cases.
|
||||
@type _lastWasPath: C{bool}
|
||||
"""
|
||||
|
||||
def __init__(self, module):
|
||||
state = _InternalState(self)
|
||||
state._module = module
|
||||
state._deprecatedAttributes = {}
|
||||
state._lastWasPath = False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Get a string containing the type of the module proxy and a
|
||||
representation of the wrapped module object.
|
||||
"""
|
||||
state = _InternalState(self)
|
||||
return f"<{type(self).__name__} module={state._module!r}>"
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""
|
||||
Set an attribute on the wrapped module object.
|
||||
"""
|
||||
state = _InternalState(self)
|
||||
state._lastWasPath = False
|
||||
setattr(state._module, name, value)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
"""
|
||||
Get an attribute from the module object, possibly emitting a warning.
|
||||
|
||||
If the specified name has been deprecated, then a warning is issued.
|
||||
(Unless certain obscure conditions are met; see
|
||||
L{_ModuleProxy._lastWasPath} for more information about what might quash
|
||||
such a warning.)
|
||||
"""
|
||||
state = _InternalState(self)
|
||||
if state._lastWasPath:
|
||||
deprecatedAttribute = None
|
||||
else:
|
||||
deprecatedAttribute = state._deprecatedAttributes.get(name)
|
||||
|
||||
if deprecatedAttribute is not None:
|
||||
# If we have a _DeprecatedAttribute object from the earlier lookup,
|
||||
# allow it to issue the warning.
|
||||
value = deprecatedAttribute.get()
|
||||
else:
|
||||
# Otherwise, just retrieve the underlying value directly; it's not
|
||||
# deprecated, there's no warning to issue.
|
||||
value = getattr(state._module, name)
|
||||
if name == "__path__":
|
||||
state._lastWasPath = True
|
||||
else:
|
||||
state._lastWasPath = False
|
||||
return value
|
||||
|
||||
|
||||
class _DeprecatedAttribute:
|
||||
"""
|
||||
Wrapper for deprecated attributes.
|
||||
|
||||
This is intended to be used by L{_ModuleProxy}. Calling
|
||||
L{_DeprecatedAttribute.get} will issue a warning and retrieve the
|
||||
underlying attribute's value.
|
||||
|
||||
@type module: C{module}
|
||||
@ivar module: The original module instance containing this attribute
|
||||
|
||||
@type fqpn: C{str}
|
||||
@ivar fqpn: Fully qualified Python name for the deprecated attribute
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@ivar version: Version that the attribute was deprecated in
|
||||
|
||||
@type message: C{str}
|
||||
@ivar message: Deprecation message
|
||||
"""
|
||||
|
||||
def __init__(self, module, name, version, message):
|
||||
"""
|
||||
Initialise a deprecated name wrapper.
|
||||
"""
|
||||
self.module = module
|
||||
self.__name__ = name
|
||||
self.fqpn = module.__name__ + "." + name
|
||||
self.version = version
|
||||
self.message = message
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the underlying attribute value and issue a deprecation warning.
|
||||
"""
|
||||
# This might fail if the deprecated thing is a module inside a package.
|
||||
# In that case, don't emit the warning this time. The import system
|
||||
# will come back again when it's not an AttributeError and we can emit
|
||||
# the warning then.
|
||||
result = getattr(self.module, self.__name__)
|
||||
message = _getDeprecationWarningString(
|
||||
self.fqpn, self.version, DEPRECATION_WARNING_FORMAT + ": " + self.message
|
||||
)
|
||||
warn(message, DeprecationWarning, stacklevel=3)
|
||||
return result
|
||||
|
||||
|
||||
def _deprecateAttribute(proxy, name, version, message):
|
||||
"""
|
||||
Mark a module-level attribute as being deprecated.
|
||||
|
||||
@type proxy: L{_ModuleProxy}
|
||||
@param proxy: The module proxy instance proxying the deprecated attributes
|
||||
|
||||
@type name: C{str}
|
||||
@param name: Attribute name
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: Version that the attribute was deprecated in
|
||||
|
||||
@type message: C{str}
|
||||
@param message: Deprecation message
|
||||
"""
|
||||
_module = object.__getattribute__(proxy, "_module")
|
||||
attr = _DeprecatedAttribute(_module, name, version, message)
|
||||
# Add a deprecated attribute marker for this module's attribute. When this
|
||||
# attribute is accessed via _ModuleProxy a warning is emitted.
|
||||
_deprecatedAttributes = object.__getattribute__(proxy, "_deprecatedAttributes")
|
||||
_deprecatedAttributes[name] = attr
|
||||
|
||||
|
||||
def deprecatedModuleAttribute(version, message, moduleName, name):
|
||||
"""
|
||||
Declare a module-level attribute as being deprecated.
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: Version that the attribute was deprecated in
|
||||
|
||||
@type message: C{str}
|
||||
@param message: Deprecation message
|
||||
|
||||
@type moduleName: C{str}
|
||||
@param moduleName: Fully-qualified Python name of the module containing
|
||||
the deprecated attribute; if called from the same module as the
|
||||
attributes are being deprecated in, using the C{__name__} global can
|
||||
be helpful
|
||||
|
||||
@type name: C{str}
|
||||
@param name: Attribute name to deprecate
|
||||
"""
|
||||
module = sys.modules[moduleName]
|
||||
if not isinstance(module, _ModuleProxy):
|
||||
module = cast(ModuleType, _ModuleProxy(module))
|
||||
sys.modules[moduleName] = module
|
||||
|
||||
_deprecateAttribute(module, name, version, message)
|
||||
|
||||
|
||||
def warnAboutFunction(offender, warningString):
|
||||
"""
|
||||
Issue a warning string, identifying C{offender} as the responsible code.
|
||||
|
||||
This function is used to deprecate some behavior of a function. It differs
|
||||
from L{warnings.warn} in that it is not limited to deprecating the behavior
|
||||
of a function currently on the call stack.
|
||||
|
||||
@param offender: The function that is being deprecated.
|
||||
|
||||
@param warningString: The string that should be emitted by this warning.
|
||||
@type warningString: C{str}
|
||||
|
||||
@since: 11.0
|
||||
"""
|
||||
# inspect.getmodule() is attractive, but somewhat
|
||||
# broken in Python < 2.6. See Python bug 4845.
|
||||
# In Python 3.13 line numbers returned by findlinestarts
|
||||
# can be None for bytecode that does not map to source
|
||||
# lines.
|
||||
offenderModule = sys.modules[offender.__module__]
|
||||
warn_explicit(
|
||||
warningString,
|
||||
category=DeprecationWarning,
|
||||
filename=inspect.getabsfile(offenderModule),
|
||||
lineno=max(
|
||||
lineNumber
|
||||
for _, lineNumber in findlinestarts(offender.__code__)
|
||||
if lineNumber is not None
|
||||
),
|
||||
module=offenderModule.__name__,
|
||||
registry=offender.__globals__.setdefault("__warningregistry__", {}),
|
||||
module_globals=None,
|
||||
)
|
||||
|
||||
|
||||
def _passedArgSpec(argspec, positional, keyword):
|
||||
"""
|
||||
Take an I{inspect.ArgSpec}, a tuple of positional arguments, and a dict of
|
||||
keyword arguments, and return a mapping of arguments that were actually
|
||||
passed to their passed values.
|
||||
|
||||
@param argspec: The argument specification for the function to inspect.
|
||||
@type argspec: I{inspect.ArgSpec}
|
||||
|
||||
@param positional: The positional arguments that were passed.
|
||||
@type positional: L{tuple}
|
||||
|
||||
@param keyword: The keyword arguments that were passed.
|
||||
@type keyword: L{dict}
|
||||
|
||||
@return: A dictionary mapping argument names (those declared in C{argspec})
|
||||
to values that were passed explicitly by the user.
|
||||
@rtype: L{dict} mapping L{str} to L{object}
|
||||
"""
|
||||
result: Dict[str, object] = {}
|
||||
unpassed = len(argspec.args) - len(positional)
|
||||
if argspec.keywords is not None:
|
||||
kwargs = result[argspec.keywords] = {}
|
||||
if unpassed < 0:
|
||||
if argspec.varargs is None:
|
||||
raise TypeError("Too many arguments.")
|
||||
else:
|
||||
result[argspec.varargs] = positional[len(argspec.args) :]
|
||||
for name, value in zip(argspec.args, positional):
|
||||
result[name] = value
|
||||
for name, value in keyword.items():
|
||||
if name in argspec.args:
|
||||
if name in result:
|
||||
raise TypeError("Already passed.")
|
||||
result[name] = value
|
||||
elif argspec.keywords is not None:
|
||||
kwargs[name] = value
|
||||
else:
|
||||
raise TypeError("no such param")
|
||||
return result
|
||||
|
||||
|
||||
def _passedSignature(signature, positional, keyword):
|
||||
"""
|
||||
Take an L{inspect.Signature}, a tuple of positional arguments, and a dict of
|
||||
keyword arguments, and return a mapping of arguments that were actually
|
||||
passed to their passed values.
|
||||
|
||||
@param signature: The signature of the function to inspect.
|
||||
@type signature: L{inspect.Signature}
|
||||
|
||||
@param positional: The positional arguments that were passed.
|
||||
@type positional: L{tuple}
|
||||
|
||||
@param keyword: The keyword arguments that were passed.
|
||||
@type keyword: L{dict}
|
||||
|
||||
@return: A dictionary mapping argument names (those declared in
|
||||
C{signature}) to values that were passed explicitly by the user.
|
||||
@rtype: L{dict} mapping L{str} to L{object}
|
||||
"""
|
||||
result = {}
|
||||
kwargs = None
|
||||
numPositional = 0
|
||||
for n, (name, param) in enumerate(signature.parameters.items()):
|
||||
if param.kind == inspect.Parameter.VAR_POSITIONAL:
|
||||
# Varargs, for example: *args
|
||||
result[name] = positional[n:]
|
||||
numPositional = len(result[name]) + 1
|
||||
elif param.kind == inspect.Parameter.VAR_KEYWORD:
|
||||
# Variable keyword args, for example: **my_kwargs
|
||||
kwargs = result[name] = {}
|
||||
elif param.kind in (
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
inspect.Parameter.POSITIONAL_ONLY,
|
||||
):
|
||||
if n < len(positional):
|
||||
result[name] = positional[n]
|
||||
numPositional += 1
|
||||
elif param.kind == inspect.Parameter.KEYWORD_ONLY:
|
||||
if name not in keyword:
|
||||
if param.default == inspect.Parameter.empty:
|
||||
raise TypeError(f"missing keyword arg {name}")
|
||||
else:
|
||||
result[name] = param.default
|
||||
else:
|
||||
raise TypeError(f"'{name}' parameter is invalid kind: {param.kind}")
|
||||
|
||||
if len(positional) > numPositional:
|
||||
raise TypeError("Too many arguments.")
|
||||
for name, value in keyword.items():
|
||||
if name in signature.parameters.keys():
|
||||
if name in result:
|
||||
raise TypeError("Already passed.")
|
||||
result[name] = value
|
||||
elif kwargs is not None:
|
||||
kwargs[name] = value
|
||||
else:
|
||||
raise TypeError("no such param")
|
||||
return result
|
||||
|
||||
|
||||
def _mutuallyExclusiveArguments(argumentPairs):
|
||||
"""
|
||||
Decorator which causes its decoratee to raise a L{TypeError} if two of the
|
||||
given arguments are passed at the same time.
|
||||
|
||||
@param argumentPairs: pairs of argument identifiers, each pair indicating
|
||||
an argument that may not be passed in conjunction with another.
|
||||
@type argumentPairs: sequence of 2-sequences of L{str}
|
||||
|
||||
@return: A decorator, used like so::
|
||||
|
||||
@_mutuallyExclusiveArguments([["tweedledum", "tweedledee"]])
|
||||
def function(tweedledum=1, tweedledee=2):
|
||||
"Don't pass tweedledum and tweedledee at the same time."
|
||||
|
||||
@rtype: 1-argument callable taking a callable and returning a callable.
|
||||
"""
|
||||
|
||||
def wrapper(wrappee):
|
||||
spec = inspect.signature(wrappee)
|
||||
_passed = _passedSignature
|
||||
|
||||
@wraps(wrappee)
|
||||
def wrapped(*args, **kwargs):
|
||||
arguments = _passed(spec, args, kwargs)
|
||||
for this, that in argumentPairs:
|
||||
if this in arguments and that in arguments:
|
||||
raise TypeError(
|
||||
("The %r and %r arguments to %s " "are mutually exclusive.")
|
||||
% (this, that, _fullyQualifiedName(wrappee))
|
||||
)
|
||||
return wrappee(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
_Tc = TypeVar("_Tc", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def deprecatedKeywordParameter(
|
||||
version: Version, name: str, replacement: Optional[str] = None
|
||||
) -> Callable[[_Tc], _Tc]:
|
||||
"""
|
||||
Return a decorator that marks a keyword parameter of a callable
|
||||
as deprecated. A warning will be emitted if a caller supplies
|
||||
a value for the parameter, whether the caller uses a keyword or
|
||||
positional syntax.
|
||||
|
||||
@type version: L{incremental.Version}
|
||||
@param version: The version in which the parameter will be marked as
|
||||
having been deprecated.
|
||||
|
||||
@type name: L{str}
|
||||
@param name: The name of the deprecated parameter.
|
||||
|
||||
@type replacement: L{str}
|
||||
@param replacement: Optional text indicating what should be used in
|
||||
place of the deprecated parameter.
|
||||
|
||||
@since: Twisted 21.2.0
|
||||
"""
|
||||
|
||||
def wrapper(wrappee: _Tc) -> _Tc:
|
||||
warningString = _getDeprecationWarningString(
|
||||
f"The {name!r} parameter to {_fullyQualifiedName(wrappee)}",
|
||||
version,
|
||||
replacement=replacement,
|
||||
)
|
||||
|
||||
doc = "The {!r} parameter was deprecated in {}".format(
|
||||
name,
|
||||
getVersionString(version),
|
||||
)
|
||||
if replacement:
|
||||
doc = doc + "; " + _getReplacementString(replacement)
|
||||
doc += "."
|
||||
|
||||
params = inspect.signature(wrappee).parameters
|
||||
if (
|
||||
name in params
|
||||
and params[name].kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
|
||||
):
|
||||
parameterIndex = list(params).index(name)
|
||||
|
||||
def checkDeprecatedParameter(*args, **kwargs):
|
||||
if len(args) > parameterIndex or name in kwargs:
|
||||
warn(warningString, DeprecationWarning, stacklevel=2)
|
||||
return wrappee(*args, **kwargs)
|
||||
|
||||
else:
|
||||
|
||||
def checkDeprecatedParameter(*args, **kwargs):
|
||||
if name in kwargs:
|
||||
warn(warningString, DeprecationWarning, stacklevel=2)
|
||||
return wrappee(*args, **kwargs)
|
||||
|
||||
decorated = cast(_Tc, wraps(wrappee)(checkDeprecatedParameter))
|
||||
_appendToDocstring(decorated, doc)
|
||||
return decorated
|
||||
|
||||
return wrapper
|
||||
712
.venv/lib/python3.12/site-packages/twisted/python/failure.py
Normal file
712
.venv/lib/python3.12/site-packages/twisted/python/failure.py
Normal file
@@ -0,0 +1,712 @@
|
||||
# -*- test-case-name: twisted.test.test_failure -*-
|
||||
# See also test suite twisted.test.test_pbfailure
|
||||
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Asynchronous-friendly error mechanism.
|
||||
|
||||
See L{Failure}.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# System Imports
|
||||
import builtins
|
||||
import copy
|
||||
import inspect
|
||||
import linecache
|
||||
import sys
|
||||
from functools import partial
|
||||
from inspect import getmro
|
||||
from io import StringIO
|
||||
from typing import Callable, NoReturn, TypeVar
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python import reflect
|
||||
from twisted.python.deprecate import deprecatedProperty
|
||||
|
||||
_T_Callable = TypeVar("_T_Callable", bound=Callable[..., object])
|
||||
|
||||
count = 0
|
||||
traceupLength = 4
|
||||
|
||||
|
||||
class DefaultException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def format_frames(frames, write, detail="default"):
|
||||
"""
|
||||
Format and write frames.
|
||||
|
||||
@param frames: is a list of frames as used by Failure.frames, with
|
||||
each frame being a list of
|
||||
(funcName, fileName, lineNumber, locals.items(), globals.items())
|
||||
@type frames: list
|
||||
@param write: this will be called with formatted strings.
|
||||
@type write: callable
|
||||
@param detail: Four detail levels are available:
|
||||
default, brief, verbose, and verbose-vars-not-captured.
|
||||
C{Failure.printDetailedTraceback} uses the latter when the caller asks
|
||||
for verbose, but no vars were captured, so that an explicit warning
|
||||
about the missing data is shown.
|
||||
@type detail: string
|
||||
"""
|
||||
if detail not in ("default", "brief", "verbose", "verbose-vars-not-captured"):
|
||||
raise ValueError(
|
||||
"Detail must be default, brief, verbose, or "
|
||||
"verbose-vars-not-captured. (not %r)" % (detail,)
|
||||
)
|
||||
w = write
|
||||
if detail == "brief":
|
||||
for method, filename, lineno, localVars, globalVars in frames:
|
||||
w(f"{filename}:{lineno}:{method}\n")
|
||||
elif detail == "default":
|
||||
for method, filename, lineno, localVars, globalVars in frames:
|
||||
w(f' File "{filename}", line {lineno}, in {method}\n')
|
||||
w(" %s\n" % linecache.getline(filename, lineno).strip())
|
||||
elif detail == "verbose-vars-not-captured":
|
||||
for method, filename, lineno, localVars, globalVars in frames:
|
||||
w("%s:%d: %s(...)\n" % (filename, lineno, method))
|
||||
w(" [Capture of Locals and Globals disabled (use captureVars=True)]\n")
|
||||
elif detail == "verbose":
|
||||
for method, filename, lineno, localVars, globalVars in frames:
|
||||
w("%s:%d: %s(...)\n" % (filename, lineno, method))
|
||||
w(" [ Locals ]\n")
|
||||
# Note: the repr(val) was (self.pickled and val) or repr(val)))
|
||||
for name, val in localVars:
|
||||
w(f" {name} : {repr(val)}\n")
|
||||
w(" ( Globals )\n")
|
||||
for name, val in globalVars:
|
||||
w(f" {name} : {repr(val)}\n")
|
||||
|
||||
|
||||
# Unused, here for backwards compatibility.
|
||||
EXCEPTION_CAUGHT_HERE = "--- <exception caught here> ---"
|
||||
|
||||
|
||||
class NoCurrentExceptionError(Exception):
|
||||
"""
|
||||
Raised when trying to create a Failure from the current interpreter
|
||||
exception state and there is no current exception state.
|
||||
"""
|
||||
|
||||
|
||||
def _Traceback(tbFrames):
|
||||
"""
|
||||
Construct a fake traceback object using a list of frames.
|
||||
|
||||
It should have the same API as stdlib to allow interaction with
|
||||
other tools.
|
||||
|
||||
@param tbFrames: [(methodname, filename, lineno, locals, globals), ...]
|
||||
"""
|
||||
assert len(tbFrames) > 0, "Must pass some frames"
|
||||
# We deliberately avoid using recursion here, as the frames list may be
|
||||
# long.
|
||||
|
||||
stack = None
|
||||
# 'tbFrames' is a list of frames from the point the exception was caught,
|
||||
# down to where it was thrown, with the oldest at the start. Add these to
|
||||
# the linked list of _Frames, but also wrap each one with a _Traceback
|
||||
# frame which is linked in the opposite direction (towards the newest
|
||||
# frame).
|
||||
stack = _Frame(tbFrames[0], stack)
|
||||
firstTb = tb = _TracebackFrame(stack)
|
||||
for sf in tbFrames[1:]:
|
||||
stack = _Frame(sf, stack)
|
||||
tb.tb_next = _TracebackFrame(stack)
|
||||
tb = tb.tb_next
|
||||
|
||||
# Return the first _TracebackFrame.
|
||||
return firstTb
|
||||
|
||||
|
||||
# The set of attributes for _TracebackFrame, _Frame and _Code were taken from
|
||||
# https://docs.python.org/3.11/library/inspect.html Other Pythons may have a
|
||||
# few more attributes that should be added if needed.
|
||||
class _TracebackFrame:
|
||||
"""
|
||||
Fake traceback object which can be passed to functions in the standard
|
||||
library L{traceback} module.
|
||||
"""
|
||||
|
||||
def __init__(self, frame):
|
||||
"""
|
||||
@param frame: _Frame object
|
||||
"""
|
||||
self.tb_frame = frame
|
||||
self.tb_lineno = frame.f_lineno
|
||||
self.tb_lasti = frame.f_lasti
|
||||
self.tb_next = None
|
||||
|
||||
|
||||
class _Frame:
|
||||
"""
|
||||
A fake frame object, used by L{_Traceback}.
|
||||
|
||||
@ivar f_code: fake L{code<types.CodeType>} object
|
||||
@ivar f_lineno: line number
|
||||
@ivar f_globals: fake f_globals dictionary (usually empty)
|
||||
@ivar f_locals: fake f_locals dictionary (usually empty)
|
||||
@ivar f_back: previous stack frame (towards the caller)
|
||||
"""
|
||||
|
||||
def __init__(self, frameinfo, back):
|
||||
"""
|
||||
@param frameinfo: (methodname, filename, lineno, locals, globals)
|
||||
@param back: previous (older) stack frame
|
||||
@type back: C{frame}
|
||||
"""
|
||||
name, filename, lineno, localz, globalz = frameinfo
|
||||
self.f_code = _Code(name, filename)
|
||||
self.f_lineno = lineno
|
||||
self.f_globals = dict(globalz or {})
|
||||
self.f_locals = dict(localz or {})
|
||||
self.f_back = back
|
||||
self.f_lasti = 0
|
||||
self.f_builtins = vars(builtins).copy()
|
||||
self.f_trace = None
|
||||
|
||||
|
||||
class _Code:
|
||||
"""
|
||||
A fake code object, used by L{_Traceback} via L{_Frame}.
|
||||
|
||||
It is intended to have the same API as the stdlib code type to allow
|
||||
interoperation with other tools based on that interface.
|
||||
"""
|
||||
|
||||
def __init__(self, name, filename):
|
||||
self.co_name = name
|
||||
self.co_filename = filename
|
||||
self.co_lnotab = b""
|
||||
self.co_firstlineno = 0
|
||||
self.co_argcount = 0
|
||||
self.co_varnames = []
|
||||
self.co_code = b""
|
||||
self.co_cellvars = ()
|
||||
self.co_consts = ()
|
||||
self.co_flags = 0
|
||||
self.co_freevars = ()
|
||||
self.co_posonlyargcount = 0
|
||||
self.co_kwonlyargcount = 0
|
||||
self.co_names = ()
|
||||
self.co_nlocals = 0
|
||||
self.co_stacksize = 0
|
||||
|
||||
def co_positions(self):
|
||||
return ((None, None, None, None),)
|
||||
|
||||
|
||||
_inlineCallbacksExtraneous = []
|
||||
|
||||
|
||||
def _extraneous(f: _T_Callable) -> _T_Callable:
|
||||
"""
|
||||
Mark the given callable as extraneous to inlineCallbacks exception
|
||||
reporting; don't show these functions.
|
||||
|
||||
@param f: a function that you NEVER WANT TO SEE AGAIN in ANY TRACEBACK
|
||||
reported by Failure.
|
||||
|
||||
@type f: function
|
||||
|
||||
@return: f
|
||||
"""
|
||||
_inlineCallbacksExtraneous.append(f.__code__)
|
||||
return f
|
||||
|
||||
|
||||
class Failure(BaseException):
|
||||
"""
|
||||
A basic abstraction for an error that has occurred.
|
||||
|
||||
This is necessary because Python's built-in error mechanisms are
|
||||
inconvenient for asynchronous communication.
|
||||
|
||||
The C{frame} attribute contain the traceback frames. Each frame is a tuple
|
||||
of (funcName, fileName, lineNumber, localsItems, globalsItems), where
|
||||
localsItems and globalsItems are the contents of
|
||||
C{locals().items()}/C{globals().items()} for that frame, or an empty tuple
|
||||
if those details were not captured.
|
||||
|
||||
Local/global variables in C{frame} will only be captured if
|
||||
C{captureVars=True} when constructing the L{Failure}.
|
||||
|
||||
@ivar value: The exception instance responsible for this failure.
|
||||
|
||||
@ivar type: The exception's class.
|
||||
|
||||
@ivar stack: Deprecated, always an empty list. Equivalent information can
|
||||
be extracted from C{import traceback;
|
||||
traceback.extract_stack(your_failure.tb)}
|
||||
|
||||
@ivar frames: list of frames, innermost first.
|
||||
"""
|
||||
|
||||
pickled = 0
|
||||
_parents = None
|
||||
|
||||
def __init__(self, exc_value=None, exc_type=None, exc_tb=None, captureVars=False):
|
||||
"""
|
||||
Initialize me with an explanation of the error.
|
||||
|
||||
By default, this will use the current C{exception}
|
||||
(L{sys.exc_info}()). However, if you want to specify a
|
||||
particular kind of failure, you can pass an exception as an
|
||||
argument.
|
||||
|
||||
If no C{exc_value} is passed, then an "original" C{Failure} will
|
||||
be searched for. If the current exception handler that this
|
||||
C{Failure} is being constructed in is handling an exception
|
||||
raised by L{raiseException}, then this C{Failure} will act like
|
||||
the original C{Failure}.
|
||||
|
||||
For C{exc_tb} only L{traceback} instances or L{None} are allowed.
|
||||
If L{None} is supplied for C{exc_value}, the value of C{exc_tb} is
|
||||
ignored, otherwise if C{exc_tb} is L{None}, it will be found from
|
||||
execution context (ie, L{sys.exc_info}).
|
||||
|
||||
@param captureVars: if set, capture locals and globals of stack
|
||||
frames. This is pretty slow, and makes no difference unless you
|
||||
are going to use L{printDetailedTraceback}.
|
||||
"""
|
||||
global count
|
||||
count = count + 1
|
||||
self.count = count
|
||||
self.type = self.value = tb = None
|
||||
self.captureVars = captureVars
|
||||
|
||||
if exc_value is None:
|
||||
self.type, self.value, tb = sys.exc_info()
|
||||
if self.type is None:
|
||||
raise NoCurrentExceptionError()
|
||||
elif exc_type is None:
|
||||
if isinstance(exc_value, Exception):
|
||||
self.type = exc_value.__class__
|
||||
else:
|
||||
# Allow arbitrary objects.
|
||||
self.type = type(exc_value)
|
||||
self.value = exc_value
|
||||
else:
|
||||
self.type = exc_type
|
||||
self.value = exc_value
|
||||
|
||||
if isinstance(self.value, Failure):
|
||||
self._extrapolate(self.value)
|
||||
return
|
||||
|
||||
if tb is None:
|
||||
if exc_tb:
|
||||
tb = exc_tb
|
||||
elif getattr(self.value, "__traceback__", None):
|
||||
# Python 3
|
||||
tb = self.value.__traceback__
|
||||
self.tb = tb
|
||||
|
||||
@property
|
||||
def frames(self):
|
||||
if hasattr(self, "_frames"):
|
||||
return self._frames
|
||||
|
||||
frames = self._frames = []
|
||||
tb = self.tb
|
||||
|
||||
while tb is not None:
|
||||
f = tb.tb_frame
|
||||
if self.captureVars:
|
||||
localz = f.f_locals.copy()
|
||||
if f.f_locals is f.f_globals:
|
||||
globalz = {}
|
||||
else:
|
||||
globalz = f.f_globals.copy()
|
||||
for d in globalz, localz:
|
||||
if "__builtins__" in d:
|
||||
del d["__builtins__"]
|
||||
localz = list(localz.items())
|
||||
globalz = list(globalz.items())
|
||||
else:
|
||||
localz = globalz = ()
|
||||
frames.append(
|
||||
(
|
||||
f.f_code.co_name,
|
||||
f.f_code.co_filename,
|
||||
tb.tb_lineno,
|
||||
localz,
|
||||
globalz,
|
||||
)
|
||||
)
|
||||
tb = tb.tb_next
|
||||
return frames
|
||||
|
||||
@frames.setter
|
||||
def frames(self, frames):
|
||||
self._frames = frames
|
||||
|
||||
@deprecatedProperty(Version("Twisted", 24, 10, 0))
|
||||
def stack(self):
|
||||
return []
|
||||
|
||||
@stack.setter # type: ignore[no-redef]
|
||||
def stack(self, stack):
|
||||
del stack
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
if self._parents is not None:
|
||||
return self._parents
|
||||
|
||||
if inspect.isclass(self.type) and issubclass(self.type, Exception):
|
||||
parentCs = getmro(self.type)
|
||||
self._parents = list(map(reflect.qual, parentCs))
|
||||
else:
|
||||
self._parents = [self.type]
|
||||
return self._parents
|
||||
|
||||
@parents.setter
|
||||
def parents(self, parents):
|
||||
self._parents = parents
|
||||
|
||||
def _extrapolate(self, otherFailure):
|
||||
"""
|
||||
Extrapolate from one failure into another, copying its stack frames.
|
||||
|
||||
@param otherFailure: Another L{Failure}, whose traceback information,
|
||||
if any, should be preserved as part of the stack presented by this
|
||||
one.
|
||||
@type otherFailure: L{Failure}
|
||||
"""
|
||||
# Copy all infos from that failure (including self._frames).
|
||||
self.__dict__ = copy.copy(otherFailure.__dict__)
|
||||
|
||||
@staticmethod
|
||||
def _withoutTraceback(value: BaseException) -> Failure:
|
||||
"""
|
||||
Create a L{Failure} for an exception without a traceback.
|
||||
|
||||
By restricting the inputs significantly, this constructor runs much
|
||||
faster.
|
||||
"""
|
||||
result = Failure.__new__(Failure)
|
||||
global count
|
||||
count += 1
|
||||
result.captureVars = False
|
||||
result.count = count
|
||||
result.value = value
|
||||
result.type = value.__class__
|
||||
result.tb = None
|
||||
return result
|
||||
|
||||
def trap(self, *errorTypes):
|
||||
"""
|
||||
Trap this failure if its type is in a predetermined list.
|
||||
|
||||
This allows you to trap a Failure in an error callback. It will be
|
||||
automatically re-raised if it is not a type that you expect.
|
||||
|
||||
The reason for having this particular API is because it's very useful
|
||||
in Deferred errback chains::
|
||||
|
||||
def _ebFoo(self, failure):
|
||||
r = failure.trap(Spam, Eggs)
|
||||
print('The Failure is due to either Spam or Eggs!')
|
||||
if r == Spam:
|
||||
print('Spam did it!')
|
||||
elif r == Eggs:
|
||||
print('Eggs did it!')
|
||||
|
||||
If the failure is not a Spam or an Eggs, then the Failure will be
|
||||
'passed on' to the next errback. In Python 2 the Failure will be
|
||||
raised; in Python 3 the underlying exception will be re-raised.
|
||||
|
||||
@type errorTypes: L{Exception}
|
||||
"""
|
||||
error = self.check(*errorTypes)
|
||||
if not error:
|
||||
self.raiseException()
|
||||
return error
|
||||
|
||||
def check(self, *errorTypes):
|
||||
"""
|
||||
Check if this failure's type is in a predetermined list.
|
||||
|
||||
@type errorTypes: list of L{Exception} classes or
|
||||
fully-qualified class names.
|
||||
@returns: the matching L{Exception} type, or None if no match.
|
||||
"""
|
||||
for error in errorTypes:
|
||||
err = error
|
||||
if inspect.isclass(error) and issubclass(error, Exception):
|
||||
err = reflect.qual(error)
|
||||
if err in self.parents:
|
||||
return error
|
||||
return None
|
||||
|
||||
def raiseException(self) -> NoReturn:
|
||||
"""
|
||||
raise the original exception, preserving traceback
|
||||
information if available.
|
||||
"""
|
||||
raise self.value.with_traceback(self.tb)
|
||||
|
||||
@_extraneous
|
||||
def throwExceptionIntoGenerator(self, g):
|
||||
"""
|
||||
Throw the original exception into the given generator,
|
||||
preserving traceback information if available.
|
||||
|
||||
@return: The next value yielded from the generator.
|
||||
@raise StopIteration: If there are no more values in the generator.
|
||||
@raise anything else: Anything that the generator raises.
|
||||
"""
|
||||
return g.throw(self.value.with_traceback(self.tb))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{} {}: {}>".format(
|
||||
reflect.qual(self.__class__),
|
||||
reflect.qual(self.type),
|
||||
self.getErrorMessage(),
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "[Failure instance: %s]" % self.getBriefTraceback()
|
||||
|
||||
def __setstate__(self, state):
|
||||
if "stack" in state:
|
||||
state.pop("stack")
|
||||
state["_parents"] = state.pop("parents")
|
||||
state["_frames"] = state.pop("frames")
|
||||
self.__dict__.update(state)
|
||||
|
||||
def __getstate__(self):
|
||||
"""
|
||||
Avoid pickling objects in the traceback.
|
||||
|
||||
This is not called direclty by pickle, since C{BaseException}
|
||||
implements reduce; instead, pickle calls C{Failure.__reduce__} which
|
||||
then calls this API.
|
||||
"""
|
||||
# Make sure _parents field is populated:
|
||||
_ = self.parents
|
||||
|
||||
c = self.__dict__.copy()
|
||||
|
||||
# Backwards compatibility with old code, e.g. for Perspective Broker:
|
||||
c["parents"] = c.pop("_parents")
|
||||
c["stack"] = []
|
||||
|
||||
if "_frames" in c:
|
||||
c.pop("_frames")
|
||||
|
||||
if self.captureVars:
|
||||
c["frames"] = [
|
||||
[
|
||||
v[0],
|
||||
v[1],
|
||||
v[2],
|
||||
_safeReprVars(v[3]),
|
||||
_safeReprVars(v[4]),
|
||||
]
|
||||
for v in self.frames
|
||||
]
|
||||
else:
|
||||
c["frames"] = self.frames
|
||||
|
||||
# Added 2003-06-23. See comment above in __init__
|
||||
c["tb"] = None
|
||||
|
||||
c["pickled"] = 1
|
||||
return c
|
||||
|
||||
def __reduce__(self):
|
||||
# BaseException implements a __reduce__ (in C, technically), so we need
|
||||
# to override this to get pickling working.
|
||||
return (partial(Failure.__new__, Failure), (), self.__getstate__())
|
||||
|
||||
def cleanFailure(self):
|
||||
"""
|
||||
Remove references to other objects, replacing them with strings.
|
||||
|
||||
On Python 3, this will also set the C{__traceback__} attribute of the
|
||||
exception instance to L{None}.
|
||||
"""
|
||||
state = self.__getstate__()
|
||||
state["_frames"] = state.pop("frames")
|
||||
self.__dict__ = state
|
||||
if getattr(self.value, "__traceback__", None):
|
||||
# Python 3
|
||||
self.value.__traceback__ = None
|
||||
|
||||
def getTracebackObject(self):
|
||||
"""
|
||||
Get an object that represents this Failure's stack that can be passed
|
||||
to traceback.extract_tb.
|
||||
|
||||
If the original traceback object is still present, return that. If this
|
||||
traceback object has been lost but we still have the information,
|
||||
return a fake traceback object (see L{_Traceback}). If there is no
|
||||
traceback information at all, return None.
|
||||
"""
|
||||
if self.tb is not None:
|
||||
return self.tb
|
||||
elif len(self.frames) > 0:
|
||||
return _Traceback(self.frames)
|
||||
else:
|
||||
return None
|
||||
|
||||
def getErrorMessage(self) -> str:
|
||||
"""
|
||||
Get a string of the exception which caused this Failure.
|
||||
"""
|
||||
if isinstance(self.value, Failure):
|
||||
return self.value.getErrorMessage()
|
||||
return reflect.safe_str(self.value)
|
||||
|
||||
def getBriefTraceback(self) -> str:
|
||||
io = StringIO()
|
||||
self.printBriefTraceback(file=io)
|
||||
return io.getvalue()
|
||||
|
||||
def getTraceback(self, elideFrameworkCode: int = 0, detail: str = "default") -> str:
|
||||
io = StringIO()
|
||||
self.printTraceback(
|
||||
file=io, elideFrameworkCode=elideFrameworkCode, detail=detail
|
||||
)
|
||||
return io.getvalue()
|
||||
|
||||
def printTraceback(self, file=None, elideFrameworkCode=False, detail="default"):
|
||||
"""
|
||||
Emulate Python's standard error reporting mechanism.
|
||||
|
||||
@param file: If specified, a file-like object to which to write the
|
||||
traceback.
|
||||
|
||||
@param elideFrameworkCode: Deprecated, ignored.
|
||||
|
||||
@param detail: A string indicating how much information to include
|
||||
in the traceback. Must be one of C{'brief'}, C{'default'}, or
|
||||
C{'verbose'}.
|
||||
"""
|
||||
if file is None:
|
||||
from twisted.python import log
|
||||
|
||||
file = log.logerr
|
||||
|
||||
w = file.write
|
||||
|
||||
if detail == "verbose" and not self.captureVars:
|
||||
# We don't have any locals or globals, so rather than show them as
|
||||
# empty make the output explicitly say that we don't have them at
|
||||
# all.
|
||||
formatDetail = "verbose-vars-not-captured"
|
||||
else:
|
||||
formatDetail = detail
|
||||
|
||||
# Preamble
|
||||
if detail == "verbose":
|
||||
w(
|
||||
"*--- Failure #%d%s---\n"
|
||||
% (self.count, (self.pickled and " (pickled) ") or " ")
|
||||
)
|
||||
elif detail == "brief":
|
||||
if self.frames:
|
||||
hasFrames = "Traceback"
|
||||
else:
|
||||
hasFrames = "Traceback (failure with no frames)"
|
||||
w(
|
||||
"%s: %s: %s\n"
|
||||
% (hasFrames, reflect.safe_str(self.type), reflect.safe_str(self.value))
|
||||
)
|
||||
else:
|
||||
w("Traceback (most recent call last):\n")
|
||||
|
||||
# Frames, formatted in appropriate style
|
||||
if self.frames:
|
||||
format_frames(self.frames, w, formatDetail)
|
||||
elif not detail == "brief":
|
||||
# Yeah, it's not really a traceback, despite looking like one...
|
||||
w("Failure: ")
|
||||
|
||||
# Postamble, if any
|
||||
if not detail == "brief":
|
||||
w(f"{reflect.qual(self.type)}: {reflect.safe_str(self.value)}\n")
|
||||
|
||||
# Chaining
|
||||
if isinstance(self.value, Failure):
|
||||
# TODO: indentation for chained failures?
|
||||
file.write(" (chained Failure)\n")
|
||||
self.value.printTraceback(file, elideFrameworkCode, detail)
|
||||
if detail == "verbose":
|
||||
w("*--- End of Failure #%d ---\n" % self.count)
|
||||
|
||||
def printBriefTraceback(self, file=None, elideFrameworkCode=0):
|
||||
"""
|
||||
Print a traceback as densely as possible.
|
||||
"""
|
||||
self.printTraceback(file, elideFrameworkCode, detail="brief")
|
||||
|
||||
def printDetailedTraceback(self, file=None, elideFrameworkCode=0):
|
||||
"""
|
||||
Print a traceback with detailed locals and globals information.
|
||||
"""
|
||||
self.printTraceback(file, elideFrameworkCode, detail="verbose")
|
||||
|
||||
|
||||
def _safeReprVars(varsDictItems):
|
||||
"""
|
||||
Convert a list of (name, object) pairs into (name, repr) pairs.
|
||||
|
||||
L{twisted.python.reflect.safe_repr} is used to generate the repr, so no
|
||||
exceptions will be raised by faulty C{__repr__} methods.
|
||||
|
||||
@param varsDictItems: a sequence of (name, value) pairs as returned by e.g.
|
||||
C{locals().items()}.
|
||||
@returns: a sequence of (name, repr) pairs.
|
||||
"""
|
||||
return [(name, reflect.safe_repr(obj)) for (name, obj) in varsDictItems]
|
||||
|
||||
|
||||
# slyphon: make post-morteming exceptions tweakable
|
||||
|
||||
DO_POST_MORTEM = True
|
||||
|
||||
|
||||
def _debuginit(
|
||||
self,
|
||||
exc_value=None,
|
||||
exc_type=None,
|
||||
exc_tb=None,
|
||||
captureVars=False,
|
||||
Failure__init__=Failure.__init__,
|
||||
):
|
||||
"""
|
||||
Initialize failure object, possibly spawning pdb.
|
||||
"""
|
||||
if (exc_value, exc_type, exc_tb) == (None, None, None):
|
||||
exc = sys.exc_info()
|
||||
if not exc[0] == self.__class__ and DO_POST_MORTEM:
|
||||
try:
|
||||
strrepr = str(exc[1])
|
||||
except BaseException:
|
||||
strrepr = "broken str"
|
||||
print(
|
||||
"Jumping into debugger for post-mortem of exception '{}':".format(
|
||||
strrepr
|
||||
)
|
||||
)
|
||||
import pdb
|
||||
|
||||
pdb.post_mortem(exc[2])
|
||||
Failure__init__(self, exc_value, exc_type, exc_tb, captureVars)
|
||||
|
||||
|
||||
def startDebugMode():
|
||||
"""
|
||||
Enable debug hooks for Failures.
|
||||
"""
|
||||
Failure.__init__ = _debuginit
|
||||
263
.venv/lib/python3.12/site-packages/twisted/python/fakepwd.py
Normal file
263
.venv/lib/python3.12/site-packages/twisted/python/fakepwd.py
Normal file
@@ -0,0 +1,263 @@
|
||||
# -*- test-case-name: twisted.python.test.test_fakepwd -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
L{twisted.python.fakepwd} provides a fake implementation of the L{pwd} API.
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
__all__ = ["UserDatabase", "ShadowDatabase"]
|
||||
|
||||
|
||||
class _UserRecord:
|
||||
"""
|
||||
L{_UserRecord} holds the user data for a single user in L{UserDatabase}.
|
||||
It corresponds to the C{passwd} structure from the L{pwd} module.
|
||||
See that module for attribute documentation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
password: str,
|
||||
uid: int,
|
||||
gid: int,
|
||||
gecos: str,
|
||||
home: str,
|
||||
shell: str,
|
||||
) -> None:
|
||||
self.pw_name = name
|
||||
self.pw_passwd = password
|
||||
self.pw_uid = uid
|
||||
self.pw_gid = gid
|
||||
self.pw_gecos = gecos
|
||||
self.pw_dir = home
|
||||
self.pw_shell = shell
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 7
|
||||
|
||||
def __getitem__(self, index):
|
||||
return (
|
||||
self.pw_name,
|
||||
self.pw_passwd,
|
||||
self.pw_uid,
|
||||
self.pw_gid,
|
||||
self.pw_gecos,
|
||||
self.pw_dir,
|
||||
self.pw_shell,
|
||||
)[index]
|
||||
|
||||
|
||||
class UserDatabase:
|
||||
"""
|
||||
L{UserDatabase} holds a traditional POSIX user data in memory and makes it
|
||||
available via the same API as L{pwd}.
|
||||
|
||||
@ivar _users: A C{list} of L{_UserRecord} instances holding all user data
|
||||
added to this database.
|
||||
"""
|
||||
|
||||
_users: List[_UserRecord]
|
||||
_lastUID: int = 10101
|
||||
_lastGID: int = 20202
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._users = []
|
||||
|
||||
def addUser(
|
||||
self,
|
||||
username: str,
|
||||
password: str = "password",
|
||||
uid: Optional[int] = None,
|
||||
gid: Optional[int] = None,
|
||||
gecos: str = "",
|
||||
home: str = "",
|
||||
shell: str = "/bin/sh",
|
||||
) -> None:
|
||||
"""
|
||||
Add a new user record to this database.
|
||||
|
||||
@param username: The value for the C{pw_name} field of the user
|
||||
record to add.
|
||||
|
||||
@param password: The value for the C{pw_passwd} field of the user
|
||||
record to add.
|
||||
|
||||
@param uid: The value for the C{pw_uid} field of the user record to
|
||||
add.
|
||||
|
||||
@param gid: The value for the C{pw_gid} field of the user record to
|
||||
add.
|
||||
|
||||
@param gecos: The value for the C{pw_gecos} field of the user record
|
||||
to add.
|
||||
|
||||
@param home: The value for the C{pw_dir} field of the user record to
|
||||
add.
|
||||
|
||||
@param shell: The value for the C{pw_shell} field of the user record to
|
||||
add.
|
||||
"""
|
||||
if uid is None:
|
||||
uid = self._lastUID
|
||||
self._lastUID += 1
|
||||
if gid is None:
|
||||
gid = self._lastGID
|
||||
self._lastGID += 1
|
||||
newUser = _UserRecord(username, password, uid, gid, gecos, home, shell)
|
||||
self._users.append(newUser)
|
||||
|
||||
def getpwuid(self, uid: int) -> _UserRecord:
|
||||
"""
|
||||
Return the user record corresponding to the given uid.
|
||||
"""
|
||||
for entry in self._users:
|
||||
if entry.pw_uid == uid:
|
||||
return entry
|
||||
raise KeyError()
|
||||
|
||||
def getpwnam(self, name: str) -> _UserRecord:
|
||||
"""
|
||||
Return the user record corresponding to the given username.
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
raise TypeError(f"getpwuam() argument must be str, not {type(name)}")
|
||||
for entry in self._users:
|
||||
if entry.pw_name == name:
|
||||
return entry
|
||||
raise KeyError()
|
||||
|
||||
def getpwall(self) -> List[_UserRecord]:
|
||||
"""
|
||||
Return a list of all user records.
|
||||
"""
|
||||
return self._users
|
||||
|
||||
|
||||
class _ShadowRecord:
|
||||
"""
|
||||
L{_ShadowRecord} holds the shadow user data for a single user in
|
||||
L{ShadowDatabase}. It corresponds to C{spwd.struct_spwd}. See that class
|
||||
for attribute documentation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username: str,
|
||||
password: str,
|
||||
lastChange: int,
|
||||
min: int,
|
||||
max: int,
|
||||
warn: int,
|
||||
inact: int,
|
||||
expire: int,
|
||||
flag: int,
|
||||
) -> None:
|
||||
self.sp_nam = username
|
||||
self.sp_pwd = password
|
||||
self.sp_lstchg = lastChange
|
||||
self.sp_min = min
|
||||
self.sp_max = max
|
||||
self.sp_warn = warn
|
||||
self.sp_inact = inact
|
||||
self.sp_expire = expire
|
||||
self.sp_flag = flag
|
||||
|
||||
def __len__(self) -> int:
|
||||
return 9
|
||||
|
||||
def __getitem__(self, index):
|
||||
return (
|
||||
self.sp_nam,
|
||||
self.sp_pwd,
|
||||
self.sp_lstchg,
|
||||
self.sp_min,
|
||||
self.sp_max,
|
||||
self.sp_warn,
|
||||
self.sp_inact,
|
||||
self.sp_expire,
|
||||
self.sp_flag,
|
||||
)[index]
|
||||
|
||||
|
||||
class ShadowDatabase:
|
||||
"""
|
||||
L{ShadowDatabase} holds a shadow user database in memory and makes it
|
||||
available via the same API as C{spwd}.
|
||||
|
||||
@ivar _users: A C{list} of L{_ShadowRecord} instances holding all user data
|
||||
added to this database.
|
||||
|
||||
@since: 12.0
|
||||
"""
|
||||
|
||||
_users: List[_ShadowRecord]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._users = []
|
||||
|
||||
def addUser(
|
||||
self,
|
||||
username: str,
|
||||
password: str,
|
||||
lastChange: int,
|
||||
min: int,
|
||||
max: int,
|
||||
warn: int,
|
||||
inact: int,
|
||||
expire: int,
|
||||
flag: int,
|
||||
) -> None:
|
||||
"""
|
||||
Add a new user record to this database.
|
||||
|
||||
@param username: The value for the C{sp_nam} field of the user record to
|
||||
add.
|
||||
|
||||
@param password: The value for the C{sp_pwd} field of the user record to
|
||||
add.
|
||||
|
||||
@param lastChange: The value for the C{sp_lstchg} field of the user
|
||||
record to add.
|
||||
|
||||
@param min: The value for the C{sp_min} field of the user record to add.
|
||||
|
||||
@param max: The value for the C{sp_max} field of the user record to add.
|
||||
|
||||
@param warn: The value for the C{sp_warn} field of the user record to
|
||||
add.
|
||||
|
||||
@param inact: The value for the C{sp_inact} field of the user record to
|
||||
add.
|
||||
|
||||
@param expire: The value for the C{sp_expire} field of the user record
|
||||
to add.
|
||||
|
||||
@param flag: The value for the C{sp_flag} field of the user record to
|
||||
add.
|
||||
"""
|
||||
self._users.append(
|
||||
_ShadowRecord(
|
||||
username, password, lastChange, min, max, warn, inact, expire, flag
|
||||
)
|
||||
)
|
||||
|
||||
def getspnam(self, username: str) -> _ShadowRecord:
|
||||
"""
|
||||
Return the shadow user record corresponding to the given username.
|
||||
"""
|
||||
if not isinstance(username, str):
|
||||
raise TypeError(f"getspnam() argument must be str, not {type(username)}")
|
||||
for entry in self._users:
|
||||
if entry.sp_nam == username:
|
||||
return entry
|
||||
raise KeyError(username)
|
||||
|
||||
def getspall(self):
|
||||
"""
|
||||
Return a list of all shadow user records.
|
||||
"""
|
||||
return self._users
|
||||
1784
.venv/lib/python3.12/site-packages/twisted/python/filepath.py
Normal file
1784
.venv/lib/python3.12/site-packages/twisted/python/filepath.py
Normal file
File diff suppressed because it is too large
Load Diff
446
.venv/lib/python3.12/site-packages/twisted/python/formmethod.py
Normal file
446
.venv/lib/python3.12/site-packages/twisted/python/formmethod.py
Normal file
@@ -0,0 +1,446 @@
|
||||
# -*- test-case-name: twisted.test.test_formmethod -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Form-based method objects.
|
||||
|
||||
This module contains support for descriptive method signatures that can be used
|
||||
to format methods.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
|
||||
class FormException(Exception):
|
||||
"""An error occurred calling the form method."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Exception.__init__(self, *args)
|
||||
self.descriptions = kwargs
|
||||
|
||||
|
||||
class InputError(FormException):
|
||||
"""
|
||||
An error occurred with some input.
|
||||
"""
|
||||
|
||||
|
||||
class Argument:
|
||||
"""Base class for form arguments."""
|
||||
|
||||
# default value for argument, if no other default is given
|
||||
defaultDefault: Any = None
|
||||
|
||||
def __init__(
|
||||
self, name, default=None, shortDesc=None, longDesc=None, hints=None, allowNone=1
|
||||
):
|
||||
self.name = name
|
||||
self.allowNone = allowNone
|
||||
if default is None:
|
||||
default = self.defaultDefault
|
||||
self.default = default
|
||||
self.shortDesc = shortDesc
|
||||
self.longDesc = longDesc
|
||||
if not hints:
|
||||
hints = {}
|
||||
self.hints = hints
|
||||
|
||||
def addHints(self, **kwargs):
|
||||
self.hints.update(kwargs)
|
||||
|
||||
def getHint(self, name, default=None):
|
||||
return self.hints.get(name, default)
|
||||
|
||||
def getShortDescription(self):
|
||||
return self.shortDesc or self.name.capitalize()
|
||||
|
||||
def getLongDescription(self):
|
||||
return self.longDesc or "" # self.shortDesc or "The %s." % self.name
|
||||
|
||||
def coerce(self, val):
|
||||
"""Convert the value to the correct format."""
|
||||
raise NotImplementedError("implement in subclass")
|
||||
|
||||
|
||||
class String(Argument):
|
||||
"""A single string."""
|
||||
|
||||
defaultDefault: str = ""
|
||||
min = 0
|
||||
max = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
default=None,
|
||||
shortDesc=None,
|
||||
longDesc=None,
|
||||
hints=None,
|
||||
allowNone=1,
|
||||
min=0,
|
||||
max=None,
|
||||
):
|
||||
Argument.__init__(
|
||||
self,
|
||||
name,
|
||||
default=default,
|
||||
shortDesc=shortDesc,
|
||||
longDesc=longDesc,
|
||||
hints=hints,
|
||||
allowNone=allowNone,
|
||||
)
|
||||
self.min = min
|
||||
self.max = max
|
||||
|
||||
def coerce(self, val):
|
||||
s = str(val)
|
||||
if len(s) < self.min:
|
||||
raise InputError("Value must be at least %s characters long" % self.min)
|
||||
if self.max is not None and len(s) > self.max:
|
||||
raise InputError("Value must be at most %s characters long" % self.max)
|
||||
return str(val)
|
||||
|
||||
|
||||
class Text(String):
|
||||
"""A long string."""
|
||||
|
||||
|
||||
class Password(String):
|
||||
"""A string which should be obscured when input."""
|
||||
|
||||
|
||||
class VerifiedPassword(String):
|
||||
"""A string that should be obscured when input and needs verification."""
|
||||
|
||||
def coerce(self, vals):
|
||||
if len(vals) != 2 or vals[0] != vals[1]:
|
||||
raise InputError("Please enter the same password twice.")
|
||||
s = str(vals[0])
|
||||
if len(s) < self.min:
|
||||
raise InputError("Value must be at least %s characters long" % self.min)
|
||||
if self.max is not None and len(s) > self.max:
|
||||
raise InputError("Value must be at most %s characters long" % self.max)
|
||||
return s
|
||||
|
||||
|
||||
class Hidden(String):
|
||||
"""A string which is not displayed.
|
||||
|
||||
The passed default is used as the value.
|
||||
"""
|
||||
|
||||
|
||||
class Integer(Argument):
|
||||
"""A single integer."""
|
||||
|
||||
defaultDefault: Optional[int] = None
|
||||
|
||||
def __init__(
|
||||
self, name, allowNone=1, default=None, shortDesc=None, longDesc=None, hints=None
|
||||
):
|
||||
# although Argument now has allowNone, that was recently added, and
|
||||
# putting it at the end kept things which relied on argument order
|
||||
# from breaking. However, allowNone originally was in here, so
|
||||
# I have to keep the same order, to prevent breaking code that
|
||||
# depends on argument order only
|
||||
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone)
|
||||
|
||||
def coerce(self, val):
|
||||
if not val.strip() and self.allowNone:
|
||||
return None
|
||||
try:
|
||||
return int(val)
|
||||
except ValueError:
|
||||
raise InputError(
|
||||
"{} is not valid, please enter " "a whole number, e.g. 10".format(val)
|
||||
)
|
||||
|
||||
|
||||
class IntegerRange(Integer):
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
min,
|
||||
max,
|
||||
allowNone=1,
|
||||
default=None,
|
||||
shortDesc=None,
|
||||
longDesc=None,
|
||||
hints=None,
|
||||
):
|
||||
self.min = min
|
||||
self.max = max
|
||||
Integer.__init__(
|
||||
self,
|
||||
name,
|
||||
allowNone=allowNone,
|
||||
default=default,
|
||||
shortDesc=shortDesc,
|
||||
longDesc=longDesc,
|
||||
hints=hints,
|
||||
)
|
||||
|
||||
def coerce(self, val):
|
||||
result = Integer.coerce(self, val)
|
||||
if self.allowNone and result == None:
|
||||
return result
|
||||
if result < self.min:
|
||||
raise InputError(
|
||||
"Value {} is too small, it should be at least {}".format(
|
||||
result, self.min
|
||||
)
|
||||
)
|
||||
if result > self.max:
|
||||
raise InputError(
|
||||
"Value {} is too large, it should be at most {}".format(
|
||||
result, self.max
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class Float(Argument):
|
||||
defaultDefault: Optional[float] = None
|
||||
|
||||
def __init__(
|
||||
self, name, allowNone=1, default=None, shortDesc=None, longDesc=None, hints=None
|
||||
):
|
||||
# although Argument now has allowNone, that was recently added, and
|
||||
# putting it at the end kept things which relied on argument order
|
||||
# from breaking. However, allowNone originally was in here, so
|
||||
# I have to keep the same order, to prevent breaking code that
|
||||
# depends on argument order only
|
||||
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone)
|
||||
|
||||
def coerce(self, val):
|
||||
if not val.strip() and self.allowNone:
|
||||
return None
|
||||
try:
|
||||
return float(val)
|
||||
except ValueError:
|
||||
raise InputError("Invalid float: %s" % val)
|
||||
|
||||
|
||||
class Choice(Argument):
|
||||
"""
|
||||
The result of a choice between enumerated types. The choices should
|
||||
be a list of tuples of tag, value, and description. The tag will be
|
||||
the value returned if the user hits "Submit", and the description
|
||||
is the bale for the enumerated type. default is a list of all the
|
||||
values (seconds element in choices). If no defaults are specified,
|
||||
initially the first item will be selected. Only one item can (should)
|
||||
be selected at once.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
choices=[],
|
||||
default=[],
|
||||
shortDesc=None,
|
||||
longDesc=None,
|
||||
hints=None,
|
||||
allowNone=1,
|
||||
):
|
||||
self.choices = choices
|
||||
if choices and not default:
|
||||
default.append(choices[0][1])
|
||||
Argument.__init__(
|
||||
self, name, default, shortDesc, longDesc, hints, allowNone=allowNone
|
||||
)
|
||||
|
||||
def coerce(self, inIdent):
|
||||
for ident, val, desc in self.choices:
|
||||
if ident == inIdent:
|
||||
return val
|
||||
else:
|
||||
raise InputError("Invalid Choice: %s" % inIdent)
|
||||
|
||||
|
||||
class Flags(Argument):
|
||||
"""
|
||||
The result of a checkbox group or multi-menu. The flags should be a
|
||||
list of tuples of tag, value, and description. The tag will be
|
||||
the value returned if the user hits "Submit", and the description
|
||||
is the bale for the enumerated type. default is a list of all the
|
||||
values (second elements in flags). If no defaults are specified,
|
||||
initially nothing will be selected. Several items may be selected at
|
||||
once.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
flags=(),
|
||||
default=(),
|
||||
shortDesc=None,
|
||||
longDesc=None,
|
||||
hints=None,
|
||||
allowNone=1,
|
||||
):
|
||||
self.flags = flags
|
||||
Argument.__init__(
|
||||
self, name, default, shortDesc, longDesc, hints, allowNone=allowNone
|
||||
)
|
||||
|
||||
def coerce(self, inFlagKeys):
|
||||
if not inFlagKeys:
|
||||
return []
|
||||
outFlags = []
|
||||
for inFlagKey in inFlagKeys:
|
||||
for flagKey, flagVal, flagDesc in self.flags:
|
||||
if inFlagKey == flagKey:
|
||||
outFlags.append(flagVal)
|
||||
break
|
||||
else:
|
||||
raise InputError("Invalid Flag: %s" % inFlagKey)
|
||||
return outFlags
|
||||
|
||||
|
||||
class CheckGroup(Flags):
|
||||
pass
|
||||
|
||||
|
||||
class RadioGroup(Choice):
|
||||
pass
|
||||
|
||||
|
||||
class Boolean(Argument):
|
||||
def coerce(self, inVal):
|
||||
if not inVal:
|
||||
return 0
|
||||
lInVal = str(inVal).lower()
|
||||
if lInVal in ("no", "n", "f", "false", "0"):
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
class File(Argument):
|
||||
def __init__(self, name, allowNone=1, shortDesc=None, longDesc=None, hints=None):
|
||||
Argument.__init__(
|
||||
self, name, None, shortDesc, longDesc, hints, allowNone=allowNone
|
||||
)
|
||||
|
||||
def coerce(self, file):
|
||||
if not file and self.allowNone:
|
||||
return None
|
||||
elif file:
|
||||
return file
|
||||
else:
|
||||
raise InputError("Invalid File")
|
||||
|
||||
|
||||
def positiveInt(x):
|
||||
x = int(x)
|
||||
if x <= 0:
|
||||
raise ValueError
|
||||
return x
|
||||
|
||||
|
||||
class Date(Argument):
|
||||
"""A date -- (year, month, day) tuple."""
|
||||
|
||||
defaultDefault: Optional[Tuple[int, int, int]] = None
|
||||
|
||||
def __init__(
|
||||
self, name, allowNone=1, default=None, shortDesc=None, longDesc=None, hints=None
|
||||
):
|
||||
Argument.__init__(self, name, default, shortDesc, longDesc, hints)
|
||||
self.allowNone = allowNone
|
||||
if not allowNone:
|
||||
self.defaultDefault = (1970, 1, 1)
|
||||
|
||||
def coerce(self, args):
|
||||
"""Return tuple of ints (year, month, day)."""
|
||||
if tuple(args) == ("", "", "") and self.allowNone:
|
||||
return None
|
||||
|
||||
try:
|
||||
year, month, day = map(positiveInt, args)
|
||||
except ValueError:
|
||||
raise InputError("Invalid date")
|
||||
if (month, day) == (2, 29):
|
||||
if not calendar.isleap(year):
|
||||
raise InputError("%d was not a leap year" % year)
|
||||
else:
|
||||
return year, month, day
|
||||
try:
|
||||
mdays = calendar.mdays[month]
|
||||
except IndexError:
|
||||
raise InputError("Invalid date")
|
||||
if day > mdays:
|
||||
raise InputError("Invalid date")
|
||||
return year, month, day
|
||||
|
||||
|
||||
class Submit(Choice):
|
||||
"""Submit button or a reasonable facsimile thereof."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
choices=[("Submit", "submit", "Submit form")],
|
||||
reset=0,
|
||||
shortDesc=None,
|
||||
longDesc=None,
|
||||
allowNone=0,
|
||||
hints=None,
|
||||
):
|
||||
Choice.__init__(
|
||||
self,
|
||||
name,
|
||||
choices=choices,
|
||||
shortDesc=shortDesc,
|
||||
longDesc=longDesc,
|
||||
hints=hints,
|
||||
)
|
||||
self.allowNone = allowNone
|
||||
self.reset = reset
|
||||
|
||||
def coerce(self, value):
|
||||
if self.allowNone and not value:
|
||||
return None
|
||||
else:
|
||||
return Choice.coerce(self, value)
|
||||
|
||||
|
||||
class PresentationHint:
|
||||
"""
|
||||
A hint to a particular system.
|
||||
"""
|
||||
|
||||
|
||||
class MethodSignature:
|
||||
"""
|
||||
A signature of a callable.
|
||||
"""
|
||||
|
||||
def __init__(self, *sigList):
|
||||
""""""
|
||||
self.methodSignature = sigList
|
||||
|
||||
def getArgument(self, name):
|
||||
for a in self.methodSignature:
|
||||
if a.name == name:
|
||||
return a
|
||||
|
||||
def method(self, callable, takesRequest=False):
|
||||
return FormMethod(self, callable, takesRequest)
|
||||
|
||||
|
||||
class FormMethod:
|
||||
"""A callable object with a signature."""
|
||||
|
||||
def __init__(self, signature, callable, takesRequest=False):
|
||||
self.signature = signature
|
||||
self.callable = callable
|
||||
self.takesRequest = takesRequest
|
||||
|
||||
def getArgs(self):
|
||||
return tuple(self.signature.methodSignature)
|
||||
|
||||
def call(self, *args, **kw):
|
||||
return self.callable(*args, **kw)
|
||||
133
.venv/lib/python3.12/site-packages/twisted/python/htmlizer.py
Normal file
133
.venv/lib/python3.12/site-packages/twisted/python/htmlizer.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# -*- test-case-name: twisted.python.test.test_htmlizer -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
HTML rendering of Python source.
|
||||
"""
|
||||
|
||||
import keyword
|
||||
import tokenize
|
||||
from html import escape
|
||||
from typing import List
|
||||
|
||||
from . import reflect
|
||||
|
||||
|
||||
class TokenPrinter:
|
||||
"""
|
||||
Format a stream of tokens and intermediate whitespace, for pretty-printing.
|
||||
"""
|
||||
|
||||
currentCol, currentLine = 0, 1
|
||||
lastIdentifier = parameters = 0
|
||||
encoding = "utf-8"
|
||||
|
||||
def __init__(self, writer):
|
||||
"""
|
||||
@param writer: A file-like object, opened in bytes mode.
|
||||
"""
|
||||
self.writer = writer
|
||||
|
||||
def printtoken(self, type, token, sCoordinates, eCoordinates, line):
|
||||
if hasattr(tokenize, "ENCODING") and type == tokenize.ENCODING:
|
||||
self.encoding = token
|
||||
return
|
||||
|
||||
if not isinstance(token, bytes):
|
||||
token = token.encode(self.encoding)
|
||||
|
||||
(srow, scol) = sCoordinates
|
||||
(erow, ecol) = eCoordinates
|
||||
if self.currentLine < srow:
|
||||
self.writer(b"\n" * (srow - self.currentLine))
|
||||
self.currentLine, self.currentCol = srow, 0
|
||||
self.writer(b" " * (scol - self.currentCol))
|
||||
if self.lastIdentifier:
|
||||
type = "identifier"
|
||||
self.parameters = 1
|
||||
elif type == tokenize.NAME:
|
||||
if keyword.iskeyword(token):
|
||||
type = "keyword"
|
||||
else:
|
||||
if self.parameters:
|
||||
type = "parameter"
|
||||
else:
|
||||
type = "variable"
|
||||
else:
|
||||
type = tokenize.tok_name.get(type)
|
||||
assert type is not None
|
||||
type = type.lower()
|
||||
self.writer(token, type)
|
||||
self.currentCol = ecol
|
||||
self.currentLine += token.count(b"\n")
|
||||
if self.currentLine != erow:
|
||||
self.currentCol = 0
|
||||
self.lastIdentifier = token in (b"def", b"class")
|
||||
if token == b":":
|
||||
self.parameters = 0
|
||||
|
||||
|
||||
class HTMLWriter:
|
||||
"""
|
||||
Write the stream of tokens and whitespace from L{TokenPrinter}, formating
|
||||
tokens as HTML spans.
|
||||
"""
|
||||
|
||||
noSpan: List[str] = []
|
||||
|
||||
def __init__(self, writer):
|
||||
self.writer = writer
|
||||
noSpan: List[str] = []
|
||||
reflect.accumulateClassList(self.__class__, "noSpan", noSpan)
|
||||
self.noSpan = noSpan
|
||||
|
||||
def write(self, token, type=None):
|
||||
if isinstance(token, bytes):
|
||||
token = token.decode("utf-8")
|
||||
token = escape(token)
|
||||
token = token.encode("utf-8")
|
||||
if (type is None) or (type in self.noSpan):
|
||||
self.writer(token)
|
||||
else:
|
||||
self.writer(
|
||||
b'<span class="py-src-'
|
||||
+ type.encode("utf-8")
|
||||
+ b'">'
|
||||
+ token
|
||||
+ b"</span>"
|
||||
)
|
||||
|
||||
|
||||
class SmallerHTMLWriter(HTMLWriter):
|
||||
"""
|
||||
HTMLWriter that doesn't generate spans for some junk.
|
||||
|
||||
Results in much smaller HTML output.
|
||||
"""
|
||||
|
||||
noSpan = ["endmarker", "indent", "dedent", "op", "newline", "nl"]
|
||||
|
||||
|
||||
def filter(inp, out, writer=HTMLWriter):
|
||||
out.write(b"<pre>")
|
||||
printer = TokenPrinter(writer(out.write).write).printtoken
|
||||
try:
|
||||
for token in tokenize.tokenize(inp.readline):
|
||||
(tokenType, string, start, end, line) = token
|
||||
printer(tokenType, string, start, end, line)
|
||||
except tokenize.TokenError:
|
||||
pass
|
||||
out.write(b"</pre>\n")
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
stdout = getattr(sys.stdout, "buffer", sys.stdout)
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
filter(f, stdout)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
241
.venv/lib/python3.12/site-packages/twisted/python/lockfile.py
Normal file
241
.venv/lib/python3.12/site-packages/twisted/python/lockfile.py
Normal file
@@ -0,0 +1,241 @@
|
||||
# -*- test-case-name: twisted.test.test_lockfile -*-
|
||||
# Copyright (c) 2005 Divmod, Inc.
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Filesystem-based interprocess mutex.
|
||||
"""
|
||||
|
||||
|
||||
import errno
|
||||
import os
|
||||
from time import time as _uniquefloat
|
||||
|
||||
from twisted.python.runtime import platform
|
||||
|
||||
|
||||
def unique():
|
||||
return str(int(_uniquefloat() * 1000))
|
||||
|
||||
|
||||
from os import rename
|
||||
|
||||
if not platform.isWindows():
|
||||
from os import kill, readlink, remove as rmlink, symlink
|
||||
|
||||
_windows = False
|
||||
else:
|
||||
_windows = True
|
||||
|
||||
# On UNIX, a symlink can be made to a nonexistent location, and
|
||||
# FilesystemLock uses this by making the target of the symlink an
|
||||
# imaginary, non-existing file named that of the PID of the process with
|
||||
# the lock. This has some benefits on UNIX -- making and removing this
|
||||
# symlink is atomic. However, because Windows doesn't support symlinks (at
|
||||
# least as how we know them), we have to fake this and actually write a
|
||||
# file with the PID of the process holding the lock instead.
|
||||
# These functions below perform that unenviable, probably-fraught-with-
|
||||
# race-conditions duty. - hawkie
|
||||
|
||||
try:
|
||||
import pywintypes
|
||||
from win32api import OpenProcess
|
||||
except ImportError:
|
||||
kill = None # type: ignore[assignment]
|
||||
else:
|
||||
ERROR_ACCESS_DENIED = 5
|
||||
ERROR_INVALID_PARAMETER = 87
|
||||
|
||||
# typing ignored due to:
|
||||
# https://github.com/python/typeshed/issues/4249
|
||||
def kill(pid, signal): # type: ignore[misc]
|
||||
try:
|
||||
OpenProcess(0, 0, pid)
|
||||
except pywintypes.error as e:
|
||||
if e.args[0] == ERROR_ACCESS_DENIED:
|
||||
return
|
||||
elif e.args[0] == ERROR_INVALID_PARAMETER:
|
||||
raise OSError(errno.ESRCH, None)
|
||||
raise
|
||||
else:
|
||||
raise RuntimeError("OpenProcess is required to fail.")
|
||||
|
||||
# For monkeypatching in tests
|
||||
_open = open
|
||||
|
||||
# typing ignored due to:
|
||||
# https://github.com/python/typeshed/issues/4249
|
||||
def symlink(value, filename): # type: ignore[misc]
|
||||
"""
|
||||
Write a file at C{filename} with the contents of C{value}. See the
|
||||
above comment block as to why this is needed.
|
||||
"""
|
||||
# XXX Implement an atomic thingamajig for win32
|
||||
newlinkname = filename + "." + unique() + ".newlink"
|
||||
newvalname = os.path.join(newlinkname, "symlink")
|
||||
os.mkdir(newlinkname)
|
||||
|
||||
# Python 3 does not support the 'commit' flag of fopen in the MSVCRT
|
||||
# (http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx)
|
||||
mode = "w"
|
||||
|
||||
with _open(newvalname, mode) as f:
|
||||
f.write(value)
|
||||
f.flush()
|
||||
|
||||
try:
|
||||
rename(newlinkname, filename)
|
||||
except BaseException:
|
||||
os.remove(newvalname)
|
||||
os.rmdir(newlinkname)
|
||||
raise
|
||||
|
||||
# typing ignored due to:
|
||||
# https://github.com/python/typeshed/issues/4249
|
||||
def readlink(filename): # type: ignore[misc]
|
||||
"""
|
||||
Read the contents of C{filename}. See the above comment block as to why
|
||||
this is needed.
|
||||
"""
|
||||
try:
|
||||
fObj = _open(os.path.join(filename, "symlink"), "r")
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT or e.errno == errno.EIO:
|
||||
raise OSError(e.errno, None)
|
||||
raise
|
||||
else:
|
||||
with fObj:
|
||||
result = fObj.read()
|
||||
return result
|
||||
|
||||
# typing ignored due to:
|
||||
# https://github.com/python/typeshed/issues/4249
|
||||
def rmlink(filename): # type: ignore[misc]
|
||||
os.remove(os.path.join(filename, "symlink"))
|
||||
os.rmdir(filename)
|
||||
|
||||
|
||||
class FilesystemLock:
|
||||
"""
|
||||
A mutex.
|
||||
|
||||
This relies on the filesystem property that creating
|
||||
a symlink is an atomic operation and that it will
|
||||
fail if the symlink already exists. Deleting the
|
||||
symlink will release the lock.
|
||||
|
||||
@ivar name: The name of the file associated with this lock.
|
||||
|
||||
@ivar clean: Indicates whether this lock was released cleanly by its
|
||||
last owner. Only meaningful after C{lock} has been called and
|
||||
returns True.
|
||||
|
||||
@ivar locked: Indicates whether the lock is currently held by this
|
||||
object.
|
||||
"""
|
||||
|
||||
clean = None
|
||||
locked = False
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def lock(self):
|
||||
"""
|
||||
Acquire this lock.
|
||||
|
||||
@rtype: C{bool}
|
||||
@return: True if the lock is acquired, false otherwise.
|
||||
|
||||
@raise OSError: Any exception L{os.symlink()} may raise,
|
||||
other than L{errno.EEXIST}.
|
||||
"""
|
||||
clean = True
|
||||
while True:
|
||||
try:
|
||||
symlink(str(os.getpid()), self.name)
|
||||
except OSError as e:
|
||||
if _windows and e.errno in (errno.EACCES, errno.EIO):
|
||||
# The lock is in the middle of being deleted because we're
|
||||
# on Windows where lock removal isn't atomic. Give up, we
|
||||
# don't know how long this is going to take.
|
||||
return False
|
||||
if e.errno == errno.EEXIST:
|
||||
try:
|
||||
pid = readlink(self.name)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# The lock has vanished, try to claim it in the
|
||||
# next iteration through the loop.
|
||||
continue
|
||||
elif _windows and e.errno == errno.EACCES:
|
||||
# The lock is in the middle of being
|
||||
# deleted because we're on Windows where
|
||||
# lock removal isn't atomic. Give up, we
|
||||
# don't know how long this is going to
|
||||
# take.
|
||||
return False
|
||||
raise
|
||||
try:
|
||||
if kill is not None:
|
||||
kill(int(pid), 0)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ESRCH:
|
||||
# The owner has vanished, try to claim it in the
|
||||
# next iteration through the loop.
|
||||
try:
|
||||
rmlink(self.name)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# Another process cleaned up the lock.
|
||||
# Race them to acquire it in the next
|
||||
# iteration through the loop.
|
||||
continue
|
||||
raise
|
||||
clean = False
|
||||
continue
|
||||
raise
|
||||
return False
|
||||
raise
|
||||
self.locked = True
|
||||
self.clean = clean
|
||||
return True
|
||||
|
||||
def unlock(self):
|
||||
"""
|
||||
Release this lock.
|
||||
|
||||
This deletes the directory with the given name.
|
||||
|
||||
@raise OSError: Any exception L{os.readlink()} may raise.
|
||||
@raise ValueError: If the lock is not owned by this process.
|
||||
"""
|
||||
pid = readlink(self.name)
|
||||
if int(pid) != os.getpid():
|
||||
raise ValueError(f"Lock {self.name!r} not owned by this process")
|
||||
rmlink(self.name)
|
||||
self.locked = False
|
||||
|
||||
|
||||
def isLocked(name):
|
||||
"""
|
||||
Determine if the lock of the given name is held or not.
|
||||
|
||||
@type name: C{str}
|
||||
@param name: The filesystem path to the lock to test
|
||||
|
||||
@rtype: C{bool}
|
||||
@return: True if the lock is held, False otherwise.
|
||||
"""
|
||||
l = FilesystemLock(name)
|
||||
result = None
|
||||
try:
|
||||
result = l.lock()
|
||||
finally:
|
||||
if result:
|
||||
l.unlock()
|
||||
return not result
|
||||
|
||||
|
||||
__all__ = ["FilesystemLock", "isLocked"]
|
||||
738
.venv/lib/python3.12/site-packages/twisted/python/log.py
Normal file
738
.venv/lib/python3.12/site-packages/twisted/python/log.py
Normal file
@@ -0,0 +1,738 @@
|
||||
# -*- test-case-name: twisted.test.test_log -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Logging and metrics infrastructure.
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, BinaryIO, Dict, Optional, cast
|
||||
|
||||
from zope.interface import Interface
|
||||
|
||||
from twisted.logger import (
|
||||
LegacyLogObserverWrapper,
|
||||
Logger as NewLogger,
|
||||
LoggingFile,
|
||||
LogLevel as NewLogLevel,
|
||||
LogPublisher as NewPublisher,
|
||||
STDLibLogObserver as NewSTDLibLogObserver,
|
||||
globalLogBeginner as newGlobalLogBeginner,
|
||||
globalLogPublisher as newGlobalLogPublisher,
|
||||
)
|
||||
from twisted.logger._global import LogBeginner
|
||||
from twisted.logger._legacy import publishToNewObserver as _publishNew
|
||||
from twisted.python import context, failure, reflect, util
|
||||
from twisted.python.threadable import synchronize
|
||||
|
||||
EventDict = Dict[str, Any]
|
||||
|
||||
|
||||
class ILogContext:
|
||||
"""
|
||||
Actually, this interface is just a synonym for the dictionary interface,
|
||||
but it serves as a key for the default information in a log.
|
||||
|
||||
I do not inherit from C{Interface} because the world is a cruel place.
|
||||
"""
|
||||
|
||||
|
||||
class ILogObserver(Interface):
|
||||
"""
|
||||
An observer which can do something with log events.
|
||||
|
||||
Given that most log observers are actually bound methods, it's okay to not
|
||||
explicitly declare provision of this interface.
|
||||
"""
|
||||
|
||||
def __call__(eventDict: EventDict) -> None:
|
||||
"""
|
||||
Log an event.
|
||||
|
||||
@param eventDict: A dictionary with arbitrary keys. However, these
|
||||
keys are often available:
|
||||
- C{message}: A C{tuple} of C{str} containing messages to be
|
||||
logged.
|
||||
- C{system}: A C{str} which indicates the "system" which is
|
||||
generating this event.
|
||||
- C{isError}: A C{bool} indicating whether this event represents
|
||||
an error.
|
||||
- C{failure}: A L{failure.Failure} instance
|
||||
- C{why}: Used as header of the traceback in case of errors.
|
||||
- C{format}: A string format used in place of C{message} to
|
||||
customize the event. The intent is for the observer to format
|
||||
a message by doing something like C{format % eventDict}.
|
||||
"""
|
||||
|
||||
|
||||
context.setDefault(ILogContext, {"system": "-"})
|
||||
|
||||
|
||||
def callWithContext(ctx, func, *args, **kw):
|
||||
newCtx = context.get(ILogContext).copy()
|
||||
newCtx.update(ctx)
|
||||
return context.call({ILogContext: newCtx}, func, *args, **kw)
|
||||
|
||||
|
||||
def callWithLogger(logger, func, *args, **kw):
|
||||
"""
|
||||
Utility method which wraps a function in a try:/except:, logs a failure if
|
||||
one occurs, and uses the system's logPrefix.
|
||||
"""
|
||||
try:
|
||||
lp = logger.logPrefix()
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException:
|
||||
lp = "(buggy logPrefix method)"
|
||||
err(system=lp)
|
||||
try:
|
||||
return callWithContext({"system": lp}, func, *args, **kw)
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException:
|
||||
err(system=lp)
|
||||
|
||||
|
||||
def err(_stuff=None, _why=None, **kw):
|
||||
"""
|
||||
Write a failure to the log.
|
||||
|
||||
The C{_stuff} and C{_why} parameters use an underscore prefix to lessen
|
||||
the chance of colliding with a keyword argument the application wishes
|
||||
to pass. It is intended that they be supplied with arguments passed
|
||||
positionally, not by keyword.
|
||||
|
||||
@param _stuff: The failure to log. If C{_stuff} is L{None} a new
|
||||
L{Failure} will be created from the current exception state. If
|
||||
C{_stuff} is an C{Exception} instance it will be wrapped in a
|
||||
L{Failure}.
|
||||
@type _stuff: L{None}, C{Exception}, or L{Failure}.
|
||||
|
||||
@param _why: The source of this failure. This will be logged along with
|
||||
C{_stuff} and should describe the context in which the failure
|
||||
occurred.
|
||||
@type _why: C{str}
|
||||
"""
|
||||
if _stuff is None:
|
||||
_stuff = failure.Failure()
|
||||
if isinstance(_stuff, failure.Failure):
|
||||
msg(failure=_stuff, why=_why, isError=1, **kw)
|
||||
elif isinstance(_stuff, Exception):
|
||||
msg(failure=failure.Failure(_stuff), why=_why, isError=1, **kw)
|
||||
else:
|
||||
msg(repr(_stuff), why=_why, isError=1, **kw)
|
||||
|
||||
|
||||
deferr = err
|
||||
|
||||
|
||||
class Logger:
|
||||
"""
|
||||
This represents a class which may 'own' a log. Used by subclassing.
|
||||
"""
|
||||
|
||||
def logPrefix(self):
|
||||
"""
|
||||
Override this method to insert custom logging behavior. Its
|
||||
return value will be inserted in front of every line. It may
|
||||
be called more times than the number of output lines.
|
||||
"""
|
||||
return "-"
|
||||
|
||||
|
||||
class LogPublisher:
|
||||
"""
|
||||
Class for singleton log message publishing.
|
||||
"""
|
||||
|
||||
synchronized = ["msg"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
observerPublisher=None,
|
||||
publishPublisher=None,
|
||||
logBeginner=None,
|
||||
warningsModule=warnings,
|
||||
):
|
||||
if publishPublisher is None:
|
||||
publishPublisher = NewPublisher()
|
||||
if observerPublisher is None:
|
||||
observerPublisher = publishPublisher
|
||||
if observerPublisher is None:
|
||||
observerPublisher = NewPublisher()
|
||||
self._observerPublisher = observerPublisher
|
||||
self._publishPublisher = publishPublisher
|
||||
self._legacyObservers = []
|
||||
if logBeginner is None:
|
||||
# This default behavior is really only used for testing.
|
||||
beginnerPublisher = NewPublisher()
|
||||
beginnerPublisher.addObserver(observerPublisher)
|
||||
logBeginner = LogBeginner(
|
||||
beginnerPublisher, cast(BinaryIO, NullFile()), sys, warnings
|
||||
)
|
||||
self._logBeginner = logBeginner
|
||||
self._warningsModule = warningsModule
|
||||
self._oldshowwarning = warningsModule.showwarning
|
||||
self.showwarning = self._logBeginner.showwarning
|
||||
|
||||
@property
|
||||
def observers(self):
|
||||
"""
|
||||
Property returning all observers registered on this L{LogPublisher}.
|
||||
|
||||
@return: observers previously added with L{LogPublisher.addObserver}
|
||||
@rtype: L{list} of L{callable}
|
||||
"""
|
||||
return [x.legacyObserver for x in self._legacyObservers]
|
||||
|
||||
def _startLogging(self, other, setStdout):
|
||||
"""
|
||||
Begin logging to the L{LogBeginner} associated with this
|
||||
L{LogPublisher}.
|
||||
|
||||
@param other: the observer to log to.
|
||||
@type other: L{LogBeginner}
|
||||
|
||||
@param setStdout: if true, send standard I/O to the observer as well.
|
||||
@type setStdout: L{bool}
|
||||
"""
|
||||
wrapped = LegacyLogObserverWrapper(other)
|
||||
self._legacyObservers.append(wrapped)
|
||||
self._logBeginner.beginLoggingTo([wrapped], True, setStdout)
|
||||
|
||||
def _stopLogging(self):
|
||||
"""
|
||||
Clean-up hook for fixing potentially global state. Only for testing of
|
||||
this module itself. If you want less global state, use the new
|
||||
warnings system in L{twisted.logger}.
|
||||
"""
|
||||
if self._warningsModule.showwarning == self.showwarning:
|
||||
self._warningsModule.showwarning = self._oldshowwarning
|
||||
|
||||
def addObserver(self, other):
|
||||
"""
|
||||
Add a new observer.
|
||||
|
||||
@type other: Provider of L{ILogObserver}
|
||||
@param other: A callable object that will be called with each new log
|
||||
message (a dict).
|
||||
"""
|
||||
wrapped = LegacyLogObserverWrapper(other)
|
||||
self._legacyObservers.append(wrapped)
|
||||
self._observerPublisher.addObserver(wrapped)
|
||||
|
||||
def removeObserver(self, other):
|
||||
"""
|
||||
Remove an observer.
|
||||
"""
|
||||
for observer in self._legacyObservers:
|
||||
if observer.legacyObserver == other:
|
||||
self._legacyObservers.remove(observer)
|
||||
self._observerPublisher.removeObserver(observer)
|
||||
break
|
||||
|
||||
def msg(self, *message, **kw):
|
||||
"""
|
||||
Log a new message.
|
||||
|
||||
The message should be a native string, i.e. bytes on Python 2 and
|
||||
Unicode on Python 3. For compatibility with both use the native string
|
||||
syntax, for example::
|
||||
|
||||
>>> log.msg('Hello, world.')
|
||||
|
||||
You MUST avoid passing in Unicode on Python 2, and the form::
|
||||
|
||||
>>> log.msg('Hello ', 'world.')
|
||||
|
||||
This form only works (sometimes) by accident.
|
||||
|
||||
Keyword arguments will be converted into items in the event
|
||||
dict that is passed to L{ILogObserver} implementations.
|
||||
Each implementation, in turn, can define keys that are used
|
||||
by it specifically, in addition to common keys listed at
|
||||
L{ILogObserver.__call__}.
|
||||
|
||||
For example, to set the C{system} parameter while logging
|
||||
a message::
|
||||
|
||||
>>> log.msg('Started', system='Foo')
|
||||
|
||||
"""
|
||||
actualEventDict = cast(EventDict, (context.get(ILogContext) or {}).copy())
|
||||
actualEventDict.update(kw)
|
||||
actualEventDict["message"] = message
|
||||
actualEventDict["time"] = time.time()
|
||||
if "isError" not in actualEventDict:
|
||||
actualEventDict["isError"] = 0
|
||||
|
||||
_publishNew(self._publishPublisher, actualEventDict, textFromEventDict)
|
||||
|
||||
|
||||
synchronize(LogPublisher)
|
||||
|
||||
|
||||
if "theLogPublisher" not in globals():
|
||||
|
||||
def _actually(something):
|
||||
"""
|
||||
A decorator that returns its argument rather than the thing it is
|
||||
decorating.
|
||||
|
||||
This allows the documentation generator to see an alias for a method or
|
||||
constant as an object with a docstring and thereby document it and
|
||||
allow references to it statically.
|
||||
|
||||
@param something: An object to create an alias for.
|
||||
@type something: L{object}
|
||||
|
||||
@return: a 1-argument callable that returns C{something}
|
||||
@rtype: L{object}
|
||||
"""
|
||||
|
||||
def decorate(thingWithADocstring):
|
||||
return something
|
||||
|
||||
return decorate
|
||||
|
||||
theLogPublisher = LogPublisher(
|
||||
observerPublisher=newGlobalLogPublisher,
|
||||
publishPublisher=newGlobalLogPublisher,
|
||||
logBeginner=newGlobalLogBeginner,
|
||||
)
|
||||
|
||||
@_actually(theLogPublisher.addObserver)
|
||||
def addObserver(observer):
|
||||
"""
|
||||
Add a log observer to the global publisher.
|
||||
|
||||
@see: L{LogPublisher.addObserver}
|
||||
|
||||
@param observer: a log observer
|
||||
@type observer: L{callable}
|
||||
"""
|
||||
|
||||
@_actually(theLogPublisher.removeObserver)
|
||||
def removeObserver(observer):
|
||||
"""
|
||||
Remove a log observer from the global publisher.
|
||||
|
||||
@see: L{LogPublisher.removeObserver}
|
||||
|
||||
@param observer: a log observer previously added with L{addObserver}
|
||||
@type observer: L{callable}
|
||||
"""
|
||||
|
||||
@_actually(theLogPublisher.msg)
|
||||
def msg(*message, **event):
|
||||
"""
|
||||
Publish a message to the global log publisher.
|
||||
|
||||
@see: L{LogPublisher.msg}
|
||||
|
||||
@param message: the log message
|
||||
@type message: C{tuple} of L{str} (native string)
|
||||
|
||||
@param event: fields for the log event
|
||||
@type event: L{dict} mapping L{str} (native string) to L{object}
|
||||
"""
|
||||
|
||||
@_actually(theLogPublisher.showwarning)
|
||||
def showwarning():
|
||||
"""
|
||||
Publish a Python warning through the global log publisher.
|
||||
|
||||
@see: L{LogPublisher.showwarning}
|
||||
"""
|
||||
|
||||
|
||||
def _safeFormat(fmtString: str, fmtDict: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Try to format a string, swallowing all errors to always return a string.
|
||||
|
||||
@note: For backward-compatibility reasons, this function ensures that it
|
||||
returns a native string, meaning L{bytes} in Python 2 and L{str} in
|
||||
Python 3.
|
||||
|
||||
@param fmtString: a C{%}-format string
|
||||
@param fmtDict: string formatting arguments for C{fmtString}
|
||||
|
||||
@return: A native string, formatted from C{fmtString} and C{fmtDict}.
|
||||
"""
|
||||
# There's a way we could make this if not safer at least more
|
||||
# informative: perhaps some sort of str/repr wrapper objects
|
||||
# could be wrapped around the things inside of C{fmtDict}. That way
|
||||
# if the event dict contains an object with a bad __repr__, we
|
||||
# can only cry about that individual object instead of the
|
||||
# entire event dict.
|
||||
try:
|
||||
text = fmtString % fmtDict
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException:
|
||||
try:
|
||||
text = (
|
||||
"Invalid format string or unformattable object in "
|
||||
"log message: %r, %s" % (fmtString, fmtDict)
|
||||
)
|
||||
except BaseException:
|
||||
try:
|
||||
text = (
|
||||
"UNFORMATTABLE OBJECT WRITTEN TO LOG with fmt %r, "
|
||||
"MESSAGE LOST" % (fmtString,)
|
||||
)
|
||||
except BaseException:
|
||||
text = (
|
||||
"PATHOLOGICAL ERROR IN BOTH FORMAT STRING AND "
|
||||
"MESSAGE DETAILS, MESSAGE LOST"
|
||||
)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def textFromEventDict(eventDict: EventDict) -> Optional[str]:
|
||||
"""
|
||||
Extract text from an event dict passed to a log observer. If it cannot
|
||||
handle the dict, it returns None.
|
||||
|
||||
The possible keys of eventDict are:
|
||||
- C{message}: by default, it holds the final text. It's required, but can
|
||||
be empty if either C{isError} or C{format} is provided (the first
|
||||
having the priority).
|
||||
- C{isError}: boolean indicating the nature of the event.
|
||||
- C{failure}: L{failure.Failure} instance, required if the event is an
|
||||
error.
|
||||
- C{why}: if defined, used as header of the traceback in case of errors.
|
||||
- C{format}: string format used in place of C{message} to customize
|
||||
the event. It uses all keys present in C{eventDict} to format
|
||||
the text.
|
||||
Other keys will be used when applying the C{format}, or ignored.
|
||||
"""
|
||||
edm = eventDict["message"]
|
||||
if not edm:
|
||||
if eventDict["isError"] and "failure" in eventDict:
|
||||
why = cast(str, eventDict.get("why"))
|
||||
if why:
|
||||
why = reflect.safe_str(why)
|
||||
else:
|
||||
why = "Unhandled Error"
|
||||
try:
|
||||
traceback = cast(failure.Failure, eventDict["failure"]).getTraceback()
|
||||
except Exception as e:
|
||||
traceback = "(unable to obtain traceback): " + str(e)
|
||||
text = why + "\n" + traceback
|
||||
elif "format" in eventDict:
|
||||
text = _safeFormat(eventDict["format"], eventDict)
|
||||
else:
|
||||
# We don't know how to log this
|
||||
return None
|
||||
else:
|
||||
text = " ".join(map(reflect.safe_str, edm))
|
||||
return text
|
||||
|
||||
|
||||
class _GlobalStartStopObserver(ABC):
|
||||
"""
|
||||
Mix-in for global log observers that can start and stop.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def emit(self, eventDict: EventDict) -> None:
|
||||
"""
|
||||
Emit the given log event.
|
||||
|
||||
@param eventDict: a log event
|
||||
"""
|
||||
|
||||
def start(self) -> None:
|
||||
"""
|
||||
Start observing log events.
|
||||
"""
|
||||
addObserver(self.emit)
|
||||
|
||||
def stop(self) -> None:
|
||||
"""
|
||||
Stop observing log events.
|
||||
"""
|
||||
removeObserver(self.emit)
|
||||
|
||||
|
||||
class FileLogObserver(_GlobalStartStopObserver):
|
||||
"""
|
||||
Log observer that writes to a file-like object.
|
||||
|
||||
@type timeFormat: C{str} or L{None}
|
||||
@ivar timeFormat: If not L{None}, the format string passed to strftime().
|
||||
"""
|
||||
|
||||
timeFormat: Optional[str] = None
|
||||
|
||||
def __init__(self, f):
|
||||
# Compatibility
|
||||
self.write = f.write
|
||||
self.flush = f.flush
|
||||
|
||||
def getTimezoneOffset(self, when):
|
||||
"""
|
||||
Return the current local timezone offset from UTC.
|
||||
|
||||
@type when: C{int}
|
||||
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
|
||||
|
||||
@rtype: C{int}
|
||||
@return: The number of seconds offset from UTC. West is positive,
|
||||
east is negative.
|
||||
"""
|
||||
offset = datetime.fromtimestamp(when, timezone.utc).replace(
|
||||
tzinfo=None
|
||||
) - datetime.fromtimestamp(when)
|
||||
return offset.days * (60 * 60 * 24) + offset.seconds
|
||||
|
||||
def formatTime(self, when):
|
||||
"""
|
||||
Format the given UTC value as a string representing that time in the
|
||||
local timezone.
|
||||
|
||||
By default it's formatted as an ISO8601-like string (ISO8601 date and
|
||||
ISO8601 time separated by a space). It can be customized using the
|
||||
C{timeFormat} attribute, which will be used as input for the underlying
|
||||
L{datetime.datetime.strftime} call.
|
||||
|
||||
@type when: C{int}
|
||||
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
|
||||
|
||||
@rtype: C{str}
|
||||
"""
|
||||
if self.timeFormat is not None:
|
||||
return datetime.fromtimestamp(when).strftime(self.timeFormat)
|
||||
|
||||
tzOffset = -self.getTimezoneOffset(when)
|
||||
when = datetime.fromtimestamp(when + tzOffset, timezone.utc).replace(
|
||||
tzinfo=None
|
||||
)
|
||||
tzHour = abs(int(tzOffset / 60 / 60))
|
||||
tzMin = abs(int(tzOffset / 60 % 60))
|
||||
if tzOffset < 0:
|
||||
tzSign = "-"
|
||||
else:
|
||||
tzSign = "+"
|
||||
return "%d-%02d-%02d %02d:%02d:%02d%s%02d%02d" % (
|
||||
when.year,
|
||||
when.month,
|
||||
when.day,
|
||||
when.hour,
|
||||
when.minute,
|
||||
when.second,
|
||||
tzSign,
|
||||
tzHour,
|
||||
tzMin,
|
||||
)
|
||||
|
||||
def emit(self, eventDict: EventDict) -> None:
|
||||
"""
|
||||
Format the given log event as text and write it to the output file.
|
||||
|
||||
@param eventDict: a log event
|
||||
"""
|
||||
text = textFromEventDict(eventDict)
|
||||
if text is None:
|
||||
return
|
||||
|
||||
timeStr = self.formatTime(eventDict["time"])
|
||||
fmtDict = {"system": eventDict["system"], "text": text.replace("\n", "\n\t")}
|
||||
msgStr = _safeFormat("[%(system)s] %(text)s\n", fmtDict)
|
||||
|
||||
util.untilConcludes(self.write, timeStr + " " + msgStr)
|
||||
util.untilConcludes(self.flush) # Hoorj!
|
||||
|
||||
|
||||
class PythonLoggingObserver(_GlobalStartStopObserver):
|
||||
"""
|
||||
Output twisted messages to Python standard library L{logging} module.
|
||||
|
||||
WARNING: specific logging configurations (example: network) can lead to
|
||||
a blocking system. Nothing is done here to prevent that, so be sure to not
|
||||
use this: code within Twisted, such as twisted.web, assumes that logging
|
||||
does not block.
|
||||
"""
|
||||
|
||||
def __init__(self, loggerName="twisted"):
|
||||
"""
|
||||
@param loggerName: identifier used for getting logger.
|
||||
@type loggerName: C{str}
|
||||
"""
|
||||
self._newObserver = NewSTDLibLogObserver(loggerName)
|
||||
|
||||
def emit(self, eventDict: EventDict) -> None:
|
||||
"""
|
||||
Receive a twisted log entry, format it and bridge it to python.
|
||||
|
||||
By default the logging level used is info; log.err produces error
|
||||
level, and you can customize the level by using the C{logLevel} key::
|
||||
|
||||
>>> log.msg('debugging', logLevel=logging.DEBUG)
|
||||
"""
|
||||
if "log_format" in eventDict:
|
||||
_publishNew(self._newObserver, eventDict, textFromEventDict)
|
||||
|
||||
|
||||
class StdioOnnaStick:
|
||||
"""
|
||||
Class that pretends to be stdout/err, and turns writes into log messages.
|
||||
|
||||
@ivar isError: boolean indicating whether this is stderr, in which cases
|
||||
log messages will be logged as errors.
|
||||
|
||||
@ivar encoding: unicode encoding used to encode any unicode strings
|
||||
written to this object.
|
||||
"""
|
||||
|
||||
closed = 0
|
||||
softspace = 0
|
||||
mode = "wb"
|
||||
name = "<stdio (log)>"
|
||||
|
||||
def __init__(self, isError=0, encoding=None):
|
||||
self.isError = isError
|
||||
if encoding is None:
|
||||
encoding = sys.getdefaultencoding()
|
||||
self.encoding = encoding
|
||||
self.buf = ""
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def fileno(self):
|
||||
return -1
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def read(self):
|
||||
raise OSError("can't read from the log!")
|
||||
|
||||
readline = read
|
||||
readlines = read
|
||||
seek = read
|
||||
tell = read
|
||||
|
||||
def write(self, data):
|
||||
d = (self.buf + data).split("\n")
|
||||
self.buf = d[-1]
|
||||
messages = d[0:-1]
|
||||
for message in messages:
|
||||
msg(message, printed=1, isError=self.isError)
|
||||
|
||||
def writelines(self, lines):
|
||||
for line in lines:
|
||||
msg(line, printed=1, isError=self.isError)
|
||||
|
||||
|
||||
def startLogging(file, *a, **kw):
|
||||
"""
|
||||
Initialize logging to a specified file.
|
||||
|
||||
@return: A L{FileLogObserver} if a new observer is added, None otherwise.
|
||||
"""
|
||||
if isinstance(file, LoggingFile):
|
||||
return
|
||||
flo = FileLogObserver(file)
|
||||
startLoggingWithObserver(flo.emit, *a, **kw)
|
||||
return flo
|
||||
|
||||
|
||||
def startLoggingWithObserver(observer, setStdout=1):
|
||||
"""
|
||||
Initialize logging to a specified observer. If setStdout is true
|
||||
(defaults to yes), also redirect sys.stdout and sys.stderr
|
||||
to the specified file.
|
||||
"""
|
||||
theLogPublisher._startLogging(observer, setStdout)
|
||||
msg("Log opened.")
|
||||
|
||||
|
||||
class NullFile:
|
||||
"""
|
||||
A file-like object that discards everything.
|
||||
"""
|
||||
|
||||
softspace = 0
|
||||
|
||||
def read(self):
|
||||
"""
|
||||
Do nothing.
|
||||
"""
|
||||
|
||||
def write(self, bytes):
|
||||
"""
|
||||
Do nothing.
|
||||
|
||||
@param bytes: data
|
||||
@type bytes: L{bytes}
|
||||
"""
|
||||
|
||||
def flush(self):
|
||||
"""
|
||||
Do nothing.
|
||||
"""
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Do nothing.
|
||||
"""
|
||||
|
||||
|
||||
def discardLogs():
|
||||
"""
|
||||
Discard messages logged via the global C{logfile} object.
|
||||
"""
|
||||
global logfile
|
||||
logfile = NullFile()
|
||||
|
||||
|
||||
# Prevent logfile from being erased on reload. This only works in cpython.
|
||||
if "logfile" not in globals():
|
||||
logfile = LoggingFile(
|
||||
logger=NewLogger(),
|
||||
level=NewLogLevel.info,
|
||||
encoding=getattr(sys.stdout, "encoding", None),
|
||||
)
|
||||
logerr = LoggingFile(
|
||||
logger=NewLogger(),
|
||||
level=NewLogLevel.error,
|
||||
encoding=getattr(sys.stderr, "encoding", None),
|
||||
)
|
||||
|
||||
|
||||
class DefaultObserver(_GlobalStartStopObserver):
|
||||
"""
|
||||
Default observer.
|
||||
|
||||
Will ignore all non-error messages and send error messages to sys.stderr.
|
||||
Will be removed when startLogging() is called for the first time.
|
||||
"""
|
||||
|
||||
stderr = sys.stderr
|
||||
|
||||
def emit(self, eventDict: EventDict) -> None:
|
||||
"""
|
||||
Emit an event dict.
|
||||
|
||||
@param eventDict: an event dict
|
||||
"""
|
||||
if eventDict["isError"]:
|
||||
text = textFromEventDict(eventDict)
|
||||
if text is not None:
|
||||
self.stderr.write(text)
|
||||
self.stderr.flush()
|
||||
|
||||
|
||||
if "defaultObserver" not in globals():
|
||||
defaultObserver = DefaultObserver()
|
||||
341
.venv/lib/python3.12/site-packages/twisted/python/logfile.py
Normal file
341
.venv/lib/python3.12/site-packages/twisted/python/logfile.py
Normal file
@@ -0,0 +1,341 @@
|
||||
# -*- test-case-name: twisted.test.test_logfile -*-
|
||||
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
A rotating, browsable log file.
|
||||
"""
|
||||
|
||||
|
||||
# System Imports
|
||||
import glob
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
from typing import BinaryIO, Optional, cast
|
||||
|
||||
from twisted.python import threadable
|
||||
|
||||
|
||||
class BaseLogFile:
|
||||
"""
|
||||
The base class for a log file that can be rotated.
|
||||
"""
|
||||
|
||||
synchronized = ["write", "rotate"]
|
||||
|
||||
def __init__(
|
||||
self, name: str, directory: str, defaultMode: Optional[int] = None
|
||||
) -> None:
|
||||
"""
|
||||
Create a log file.
|
||||
|
||||
@param name: name of the file
|
||||
@param directory: directory holding the file
|
||||
@param defaultMode: permissions used to create the file. Default to
|
||||
current permissions of the file if the file exists.
|
||||
"""
|
||||
self.directory = directory
|
||||
self.name = name
|
||||
self.path = os.path.join(directory, name)
|
||||
if defaultMode is None and os.path.exists(self.path):
|
||||
self.defaultMode: Optional[int] = stat.S_IMODE(
|
||||
os.stat(self.path)[stat.ST_MODE]
|
||||
)
|
||||
else:
|
||||
self.defaultMode = defaultMode
|
||||
self._openFile()
|
||||
|
||||
@classmethod
|
||||
def fromFullPath(cls, filename, *args, **kwargs):
|
||||
"""
|
||||
Construct a log file from a full file path.
|
||||
"""
|
||||
logPath = os.path.abspath(filename)
|
||||
return cls(os.path.basename(logPath), os.path.dirname(logPath), *args, **kwargs)
|
||||
|
||||
def shouldRotate(self):
|
||||
"""
|
||||
Override with a method to that returns true if the log
|
||||
should be rotated.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _openFile(self):
|
||||
"""
|
||||
Open the log file.
|
||||
|
||||
The log file is always opened in binary mode.
|
||||
"""
|
||||
self.closed = False
|
||||
if os.path.exists(self.path):
|
||||
self._file = cast(BinaryIO, open(self.path, "rb+", 0))
|
||||
self._file.seek(0, 2)
|
||||
else:
|
||||
if self.defaultMode is not None:
|
||||
# Set the lowest permissions
|
||||
oldUmask = os.umask(0o777)
|
||||
try:
|
||||
self._file = cast(BinaryIO, open(self.path, "wb+", 0))
|
||||
finally:
|
||||
os.umask(oldUmask)
|
||||
else:
|
||||
self._file = cast(BinaryIO, open(self.path, "wb+", 0))
|
||||
if self.defaultMode is not None:
|
||||
try:
|
||||
os.chmod(self.path, self.defaultMode)
|
||||
except OSError:
|
||||
# Probably /dev/null or something?
|
||||
pass
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Write some data to the file.
|
||||
|
||||
@param data: The data to write. Text will be encoded as UTF-8.
|
||||
@type data: L{bytes} or L{unicode}
|
||||
"""
|
||||
if self.shouldRotate():
|
||||
self.flush()
|
||||
self.rotate()
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf8")
|
||||
self._file.write(data)
|
||||
|
||||
def flush(self):
|
||||
"""
|
||||
Flush the file.
|
||||
"""
|
||||
self._file.flush()
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the file.
|
||||
|
||||
The file cannot be used once it has been closed.
|
||||
"""
|
||||
self.closed = True
|
||||
self._file.close()
|
||||
del self._file
|
||||
|
||||
def reopen(self):
|
||||
"""
|
||||
Reopen the log file. This is mainly useful if you use an external log
|
||||
rotation tool, which moves under your feet.
|
||||
|
||||
Note that on Windows you probably need a specific API to rename the
|
||||
file, as it's not supported to simply use os.rename, for example.
|
||||
"""
|
||||
self.close()
|
||||
self._openFile()
|
||||
|
||||
def getCurrentLog(self):
|
||||
"""
|
||||
Return a LogReader for the current log file.
|
||||
"""
|
||||
return LogReader(self.path)
|
||||
|
||||
|
||||
class LogFile(BaseLogFile):
|
||||
"""
|
||||
A log file that can be rotated.
|
||||
|
||||
A rotateLength of None disables automatic log rotation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
directory,
|
||||
rotateLength=1000000,
|
||||
defaultMode=None,
|
||||
maxRotatedFiles=None,
|
||||
):
|
||||
"""
|
||||
Create a log file rotating on length.
|
||||
|
||||
@param name: file name.
|
||||
@type name: C{str}
|
||||
@param directory: path of the log file.
|
||||
@type directory: C{str}
|
||||
@param rotateLength: size of the log file where it rotates. Default to
|
||||
1M.
|
||||
@type rotateLength: C{int}
|
||||
@param defaultMode: mode used to create the file.
|
||||
@type defaultMode: C{int}
|
||||
@param maxRotatedFiles: if not None, max number of log files the class
|
||||
creates. Warning: it removes all log files above this number.
|
||||
@type maxRotatedFiles: C{int}
|
||||
"""
|
||||
BaseLogFile.__init__(self, name, directory, defaultMode)
|
||||
self.rotateLength = rotateLength
|
||||
self.maxRotatedFiles = maxRotatedFiles
|
||||
|
||||
def _openFile(self):
|
||||
BaseLogFile._openFile(self)
|
||||
self.size = self._file.tell()
|
||||
|
||||
def shouldRotate(self):
|
||||
"""
|
||||
Rotate when the log file size is larger than rotateLength.
|
||||
"""
|
||||
return self.rotateLength and self.size >= self.rotateLength
|
||||
|
||||
def getLog(self, identifier):
|
||||
"""
|
||||
Given an integer, return a LogReader for an old log file.
|
||||
"""
|
||||
filename = "%s.%d" % (self.path, identifier)
|
||||
if not os.path.exists(filename):
|
||||
raise ValueError("no such logfile exists")
|
||||
return LogReader(filename)
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Write some data to the file.
|
||||
"""
|
||||
BaseLogFile.write(self, data)
|
||||
self.size += len(data)
|
||||
|
||||
def rotate(self):
|
||||
"""
|
||||
Rotate the file and create a new one.
|
||||
|
||||
If it's not possible to open new logfile, this will fail silently,
|
||||
and continue logging to old logfile.
|
||||
"""
|
||||
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
|
||||
return
|
||||
logs = self.listLogs()
|
||||
logs.reverse()
|
||||
for i in logs:
|
||||
if self.maxRotatedFiles is not None and i >= self.maxRotatedFiles:
|
||||
os.remove("%s.%d" % (self.path, i))
|
||||
else:
|
||||
os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
|
||||
self._file.close()
|
||||
os.rename(self.path, "%s.1" % self.path)
|
||||
self._openFile()
|
||||
|
||||
def listLogs(self):
|
||||
"""
|
||||
Return sorted list of integers - the old logs' identifiers.
|
||||
"""
|
||||
result = []
|
||||
for name in glob.glob("%s.*" % self.path):
|
||||
try:
|
||||
counter = int(name.split(".")[-1])
|
||||
if counter:
|
||||
result.append(counter)
|
||||
except ValueError:
|
||||
pass
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
def __getstate__(self):
|
||||
state = BaseLogFile.__getstate__(self)
|
||||
del state["size"]
|
||||
return state
|
||||
|
||||
|
||||
threadable.synchronize(LogFile)
|
||||
|
||||
|
||||
class DailyLogFile(BaseLogFile):
|
||||
"""A log file that is rotated daily (at or after midnight localtime)"""
|
||||
|
||||
def _openFile(self):
|
||||
BaseLogFile._openFile(self)
|
||||
self.lastDate = self.toDate(os.stat(self.path)[8])
|
||||
|
||||
def shouldRotate(self):
|
||||
"""Rotate when the date has changed since last write"""
|
||||
return self.toDate() > self.lastDate
|
||||
|
||||
def toDate(self, *args):
|
||||
"""Convert a unixtime to (year, month, day) localtime tuple,
|
||||
or return the current (year, month, day) localtime tuple.
|
||||
|
||||
This function primarily exists so you may overload it with
|
||||
gmtime, or some cruft to make unit testing possible.
|
||||
"""
|
||||
# primarily so this can be unit tested easily
|
||||
return time.localtime(*args)[:3]
|
||||
|
||||
def suffix(self, tupledate):
|
||||
"""Return the suffix given a (year, month, day) tuple or unixtime"""
|
||||
try:
|
||||
return "_".join(map(str, tupledate))
|
||||
except BaseException:
|
||||
# try taking a float unixtime
|
||||
return "_".join(map(str, self.toDate(tupledate)))
|
||||
|
||||
def getLog(self, identifier):
|
||||
"""Given a unix time, return a LogReader for an old log file."""
|
||||
if self.toDate(identifier) == self.lastDate:
|
||||
return self.getCurrentLog()
|
||||
filename = f"{self.path}.{self.suffix(identifier)}"
|
||||
if not os.path.exists(filename):
|
||||
raise ValueError("no such logfile exists")
|
||||
return LogReader(filename)
|
||||
|
||||
def write(self, data):
|
||||
"""Write some data to the log file"""
|
||||
BaseLogFile.write(self, data)
|
||||
# Guard against a corner case where time.time()
|
||||
# could potentially run backwards to yesterday.
|
||||
# Primarily due to network time.
|
||||
self.lastDate = max(self.lastDate, self.toDate())
|
||||
|
||||
def rotate(self):
|
||||
"""Rotate the file and create a new one.
|
||||
|
||||
If it's not possible to open new logfile, this will fail silently,
|
||||
and continue logging to old logfile.
|
||||
"""
|
||||
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
|
||||
return
|
||||
newpath = f"{self.path}.{self.suffix(self.lastDate)}"
|
||||
if os.path.exists(newpath):
|
||||
return
|
||||
self._file.close()
|
||||
os.rename(self.path, newpath)
|
||||
self._openFile()
|
||||
|
||||
def __getstate__(self):
|
||||
state = BaseLogFile.__getstate__(self)
|
||||
del state["lastDate"]
|
||||
return state
|
||||
|
||||
|
||||
threadable.synchronize(DailyLogFile)
|
||||
|
||||
|
||||
class LogReader:
|
||||
"""Read from a log file."""
|
||||
|
||||
def __init__(self, name):
|
||||
"""
|
||||
Open the log file for reading.
|
||||
|
||||
The comments about binary-mode for L{BaseLogFile._openFile} also apply
|
||||
here.
|
||||
"""
|
||||
self._file = open(name) # Optional[BinaryIO]
|
||||
|
||||
def readLines(self, lines=10):
|
||||
"""Read a list of lines from the log file.
|
||||
|
||||
This doesn't returns all of the files lines - call it multiple times.
|
||||
"""
|
||||
result = []
|
||||
for i in range(lines):
|
||||
line = self._file.readline()
|
||||
if not line:
|
||||
break
|
||||
result.append(line)
|
||||
return result
|
||||
|
||||
def close(self):
|
||||
self._file.close()
|
||||
786
.venv/lib/python3.12/site-packages/twisted/python/modules.py
Normal file
786
.venv/lib/python3.12/site-packages/twisted/python/modules.py
Normal file
@@ -0,0 +1,786 @@
|
||||
# -*- test-case-name: twisted.test.test_modules -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
This module aims to provide a unified, object-oriented view of Python's
|
||||
runtime hierarchy.
|
||||
|
||||
Python is a very dynamic language with wide variety of introspection utilities.
|
||||
However, these utilities can be hard to use, because there is no consistent
|
||||
API. The introspection API in python is made up of attributes (__name__,
|
||||
__module__, func_name, etc) on instances, modules, classes and functions which
|
||||
vary between those four types, utility modules such as 'inspect' which provide
|
||||
some functionality, the 'imp' module, the "compiler" module, the semantics of
|
||||
PEP 302 support, and setuptools, among other things.
|
||||
|
||||
At the top, you have "PythonPath", an abstract representation of sys.path which
|
||||
includes methods to locate top-level modules, with or without loading them.
|
||||
The top-level exposed functions in this module for accessing the system path
|
||||
are "walkModules", "iterModules", and "getModule".
|
||||
|
||||
From most to least specific, here are the objects provided::
|
||||
|
||||
PythonPath # sys.path
|
||||
|
|
||||
v
|
||||
PathEntry # one entry on sys.path: an importer
|
||||
|
|
||||
v
|
||||
PythonModule # a module or package that can be loaded
|
||||
|
|
||||
v
|
||||
PythonAttribute # an attribute of a module (function or class)
|
||||
|
|
||||
v
|
||||
PythonAttribute # an attribute of a function or class
|
||||
|
|
||||
v
|
||||
...
|
||||
|
||||
Here's an example of idiomatic usage: this is what you would do to list all of
|
||||
the modules outside the standard library's python-files directory::
|
||||
|
||||
import os
|
||||
stdlibdir = os.path.dirname(os.__file__)
|
||||
|
||||
from twisted.python.modules import iterModules
|
||||
|
||||
for modinfo in iterModules():
|
||||
if (modinfo.pathEntry.filePath.path != stdlibdir
|
||||
and not modinfo.isPackage()):
|
||||
print('unpackaged: %s: %s' % (
|
||||
modinfo.name, modinfo.filePath.path))
|
||||
|
||||
@var theSystemPath: The very top of the Python object space.
|
||||
@type theSystemPath: L{PythonPath}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import sys
|
||||
import warnings
|
||||
import zipimport
|
||||
|
||||
# let's try to keep path imports to a minimum...
|
||||
from os.path import dirname, split as splitpath
|
||||
|
||||
from zope.interface import Interface, implementer
|
||||
|
||||
from twisted.python.compat import nativeString
|
||||
from twisted.python.components import registerAdapter
|
||||
from twisted.python.filepath import FilePath, UnlistableError
|
||||
from twisted.python.reflect import namedAny
|
||||
from twisted.python.zippath import ZipArchive
|
||||
|
||||
_nothing = object()
|
||||
|
||||
PYTHON_EXTENSIONS = [".py"]
|
||||
OPTIMIZED_MODE = __doc__ is None
|
||||
if OPTIMIZED_MODE:
|
||||
PYTHON_EXTENSIONS.append(".pyo")
|
||||
else:
|
||||
PYTHON_EXTENSIONS.append(".pyc")
|
||||
|
||||
|
||||
def _isPythonIdentifier(string):
|
||||
"""
|
||||
cheezy fake test for proper identifier-ness.
|
||||
|
||||
@param string: a L{str} which might or might not be a valid python
|
||||
identifier.
|
||||
@return: True or False
|
||||
"""
|
||||
textString = nativeString(string)
|
||||
return " " not in textString and "." not in textString and "-" not in textString
|
||||
|
||||
|
||||
def _isPackagePath(fpath):
|
||||
# Determine if a FilePath-like object is a Python package. TODO: deal with
|
||||
# __init__module.(so|dll|pyd)?
|
||||
extless = fpath.splitext()[0]
|
||||
basend = splitpath(extless)[1]
|
||||
return basend == "__init__"
|
||||
|
||||
|
||||
class _ModuleIteratorHelper:
|
||||
"""
|
||||
This mixin provides common behavior between python module and path entries,
|
||||
since the mechanism for searching sys.path and __path__ attributes is
|
||||
remarkably similar.
|
||||
"""
|
||||
|
||||
def iterModules(self):
|
||||
"""
|
||||
Loop over the modules present below this entry or package on PYTHONPATH.
|
||||
|
||||
For modules which are not packages, this will yield nothing.
|
||||
|
||||
For packages and path entries, this will only yield modules one level
|
||||
down; i.e. if there is a package a.b.c, iterModules on a will only
|
||||
return a.b. If you want to descend deeply, use walkModules.
|
||||
|
||||
@return: a generator which yields PythonModule instances that describe
|
||||
modules which can be, or have been, imported.
|
||||
"""
|
||||
yielded = {}
|
||||
if not self.filePath.exists():
|
||||
return
|
||||
|
||||
for placeToLook in self._packagePaths():
|
||||
try:
|
||||
children = sorted(placeToLook.children())
|
||||
except UnlistableError:
|
||||
continue
|
||||
|
||||
for potentialTopLevel in children:
|
||||
ext = potentialTopLevel.splitext()[1]
|
||||
potentialBasename = potentialTopLevel.basename()[: -len(ext)]
|
||||
if ext in PYTHON_EXTENSIONS:
|
||||
# TODO: this should be a little choosier about which path entry
|
||||
# it selects first, and it should do all the .so checking and
|
||||
# crud
|
||||
if not _isPythonIdentifier(potentialBasename):
|
||||
continue
|
||||
modname = self._subModuleName(potentialBasename)
|
||||
if modname.split(".")[-1] == "__init__":
|
||||
# This marks the directory as a package so it can't be
|
||||
# a module.
|
||||
continue
|
||||
if modname not in yielded:
|
||||
yielded[modname] = True
|
||||
pm = PythonModule(modname, potentialTopLevel, self._getEntry())
|
||||
assert pm != self
|
||||
yield pm
|
||||
else:
|
||||
if (
|
||||
ext
|
||||
or not _isPythonIdentifier(potentialBasename)
|
||||
or not potentialTopLevel.isdir()
|
||||
):
|
||||
continue
|
||||
modname = self._subModuleName(potentialTopLevel.basename())
|
||||
for ext in PYTHON_EXTENSIONS:
|
||||
initpy = potentialTopLevel.child("__init__" + ext)
|
||||
if initpy.exists() and modname not in yielded:
|
||||
yielded[modname] = True
|
||||
pm = PythonModule(modname, initpy, self._getEntry())
|
||||
assert pm != self
|
||||
yield pm
|
||||
break
|
||||
|
||||
def walkModules(self, importPackages=False):
|
||||
"""
|
||||
Similar to L{iterModules}, this yields self, and then every module in my
|
||||
package or entry, and every submodule in each package or entry.
|
||||
|
||||
In other words, this is deep, and L{iterModules} is shallow.
|
||||
"""
|
||||
yield self
|
||||
for package in self.iterModules():
|
||||
yield from package.walkModules(importPackages=importPackages)
|
||||
|
||||
def _subModuleName(self, mn):
|
||||
"""
|
||||
This is a hook to provide packages with the ability to specify their names
|
||||
as a prefix to submodules here.
|
||||
"""
|
||||
return mn
|
||||
|
||||
def _packagePaths(self):
|
||||
"""
|
||||
Implement in subclasses to specify where to look for modules.
|
||||
|
||||
@return: iterable of FilePath-like objects.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _getEntry(self):
|
||||
"""
|
||||
Implement in subclasses to specify what path entry submodules will come
|
||||
from.
|
||||
|
||||
@return: a PathEntry instance.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __getitem__(self, modname):
|
||||
"""
|
||||
Retrieve a module from below this path or package.
|
||||
|
||||
@param modname: a str naming a module to be loaded. For entries, this
|
||||
is a top-level, undotted package name, and for packages it is the name
|
||||
of the module without the package prefix. For example, if you have a
|
||||
PythonModule representing the 'twisted' package, you could use::
|
||||
|
||||
twistedPackageObj['python']['modules']
|
||||
|
||||
to retrieve this module.
|
||||
|
||||
@raise KeyError: if the module is not found.
|
||||
|
||||
@return: a PythonModule.
|
||||
"""
|
||||
for module in self.iterModules():
|
||||
if module.name == self._subModuleName(modname):
|
||||
return module
|
||||
raise KeyError(modname)
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Implemented to raise NotImplementedError for clarity, so that attempting to
|
||||
loop over this object won't call __getitem__.
|
||||
|
||||
Note: in the future there might be some sensible default for iteration,
|
||||
like 'walkEverything', so this is deliberately untested and undefined
|
||||
behavior.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class PythonAttribute:
|
||||
"""
|
||||
I represent a function, class, or other object that is present.
|
||||
|
||||
@ivar name: the fully-qualified python name of this attribute.
|
||||
|
||||
@ivar onObject: a reference to a PythonModule or other PythonAttribute that
|
||||
is this attribute's logical parent.
|
||||
|
||||
@ivar name: the fully qualified python name of the attribute represented by
|
||||
this class.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, name: str, onObject: PythonAttribute, loaded: bool, pythonValue: object
|
||||
) -> None:
|
||||
"""
|
||||
Create a PythonAttribute. This is a private constructor. Do not construct
|
||||
me directly, use PythonModule.iterAttributes.
|
||||
|
||||
@param name: the FQPN
|
||||
@param onObject: see ivar
|
||||
@param loaded: always True, for now
|
||||
@param pythonValue: the value of the attribute we're pointing to.
|
||||
"""
|
||||
self.name: str = name
|
||||
self.onObject = onObject
|
||||
self._loaded = loaded
|
||||
self.pythonValue = pythonValue
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"PythonAttribute<{self.name!r}>"
|
||||
|
||||
def isLoaded(self):
|
||||
"""
|
||||
Return a boolean describing whether the attribute this describes has
|
||||
actually been loaded into memory by importing its module.
|
||||
|
||||
Note: this currently always returns true; there is no Python parser
|
||||
support in this module yet.
|
||||
"""
|
||||
return self._loaded
|
||||
|
||||
def load(self, default=_nothing):
|
||||
"""
|
||||
Load the value associated with this attribute.
|
||||
|
||||
@return: an arbitrary Python object, or 'default' if there is an error
|
||||
loading it.
|
||||
"""
|
||||
return self.pythonValue
|
||||
|
||||
def iterAttributes(self):
|
||||
for name, val in inspect.getmembers(self.load()):
|
||||
yield PythonAttribute(self.name + "." + name, self, True, val)
|
||||
|
||||
|
||||
class PythonModule(_ModuleIteratorHelper):
|
||||
"""
|
||||
Representation of a module which could be imported from sys.path.
|
||||
|
||||
@ivar name: the fully qualified python name of this module.
|
||||
|
||||
@ivar filePath: a FilePath-like object which points to the location of this
|
||||
module.
|
||||
|
||||
@ivar pathEntry: a L{PathEntry} instance which this module was located
|
||||
from.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, name: str, filePath: FilePath[str], pathEntry: PathEntry
|
||||
) -> None:
|
||||
"""
|
||||
Create a PythonModule. Do not construct this directly, instead inspect a
|
||||
PythonPath or other PythonModule instances.
|
||||
|
||||
@param name: see ivar
|
||||
@param filePath: see ivar
|
||||
@param pathEntry: see ivar
|
||||
"""
|
||||
_name = nativeString(name)
|
||||
assert not _name.endswith(".__init__")
|
||||
self.name: str = _name
|
||||
self.filePath = filePath
|
||||
self.parentPath = filePath.parent()
|
||||
self.pathEntry = pathEntry
|
||||
|
||||
def _getEntry(self):
|
||||
return self.pathEntry
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Return a string representation including the module name.
|
||||
"""
|
||||
return f"PythonModule<{self.name!r}>"
|
||||
|
||||
def isLoaded(self):
|
||||
"""
|
||||
Determine if the module is loaded into sys.modules.
|
||||
|
||||
@return: a boolean: true if loaded, false if not.
|
||||
"""
|
||||
return self.pathEntry.pythonPath.moduleDict.get(self.name) is not None
|
||||
|
||||
def iterAttributes(self):
|
||||
"""
|
||||
List all the attributes defined in this module.
|
||||
|
||||
Note: Future work is planned here to make it possible to list python
|
||||
attributes on a module without loading the module by inspecting ASTs or
|
||||
bytecode, but currently any iteration of PythonModule objects insists
|
||||
they must be loaded, and will use inspect.getmodule.
|
||||
|
||||
@raise NotImplementedError: if this module is not loaded.
|
||||
|
||||
@return: a generator yielding PythonAttribute instances describing the
|
||||
attributes of this module.
|
||||
"""
|
||||
if not self.isLoaded():
|
||||
raise NotImplementedError(
|
||||
"You can't load attributes from non-loaded modules yet."
|
||||
)
|
||||
for name, val in inspect.getmembers(self.load()):
|
||||
yield PythonAttribute(self.name + "." + name, self, True, val)
|
||||
|
||||
def isPackage(self):
|
||||
"""
|
||||
Returns true if this module is also a package, and might yield something
|
||||
from iterModules.
|
||||
"""
|
||||
return _isPackagePath(self.filePath)
|
||||
|
||||
def load(self, default=_nothing):
|
||||
"""
|
||||
Load this module.
|
||||
|
||||
@param default: if specified, the value to return in case of an error.
|
||||
|
||||
@return: a genuine python module.
|
||||
|
||||
@raise Exception: Importing modules is a risky business;
|
||||
the erorrs of any code run at module scope may be raised from here, as
|
||||
well as ImportError if something bizarre happened to the system path
|
||||
between the discovery of this PythonModule object and the attempt to
|
||||
import it. If you specify a default, the error will be swallowed
|
||||
entirely, and not logged.
|
||||
|
||||
@rtype: types.ModuleType.
|
||||
"""
|
||||
try:
|
||||
return self.pathEntry.pythonPath.moduleLoader(self.name)
|
||||
except BaseException: # this needs more thought...
|
||||
if default is not _nothing:
|
||||
return default
|
||||
raise
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""
|
||||
PythonModules with the same name are equal.
|
||||
"""
|
||||
if isinstance(other, PythonModule):
|
||||
return other.name == self.name
|
||||
return NotImplemented
|
||||
|
||||
def walkModules(self, importPackages=False):
|
||||
if importPackages and self.isPackage():
|
||||
self.load()
|
||||
return super().walkModules(importPackages=importPackages)
|
||||
|
||||
def _subModuleName(self, mn):
|
||||
"""
|
||||
submodules of this module are prefixed with our name.
|
||||
"""
|
||||
return self.name + "." + mn
|
||||
|
||||
def _packagePaths(self):
|
||||
"""
|
||||
Yield a sequence of FilePath-like objects which represent path segments.
|
||||
"""
|
||||
if not self.isPackage():
|
||||
return
|
||||
if self.isLoaded():
|
||||
load = self.load()
|
||||
if hasattr(load, "__path__"):
|
||||
for fn in load.__path__:
|
||||
if fn == self.parentPath.path:
|
||||
# this should _really_ exist.
|
||||
assert self.parentPath.exists()
|
||||
yield self.parentPath
|
||||
else:
|
||||
smp = self.pathEntry.pythonPath._smartPath(fn)
|
||||
if smp.exists():
|
||||
yield smp
|
||||
else:
|
||||
yield self.parentPath
|
||||
|
||||
|
||||
class PathEntry(_ModuleIteratorHelper):
|
||||
"""
|
||||
I am a proxy for a single entry on sys.path.
|
||||
|
||||
@ivar filePath: a FilePath-like object pointing at the filesystem location
|
||||
or archive file where this path entry is stored.
|
||||
|
||||
@ivar pythonPath: a PythonPath instance.
|
||||
"""
|
||||
|
||||
def __init__(self, filePath, pythonPath):
|
||||
"""
|
||||
Create a PathEntry. This is a private constructor.
|
||||
"""
|
||||
self.filePath = filePath
|
||||
self.pythonPath = pythonPath
|
||||
|
||||
def _getEntry(self):
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"PathEntry<{self.filePath!r}>"
|
||||
|
||||
def _packagePaths(self):
|
||||
yield self.filePath
|
||||
|
||||
|
||||
class IPathImportMapper(Interface):
|
||||
"""
|
||||
This is an internal interface, used to map importers to factories for
|
||||
FilePath-like objects.
|
||||
"""
|
||||
|
||||
def mapPath(pathLikeString):
|
||||
"""
|
||||
Return a FilePath-like object.
|
||||
|
||||
@param pathLikeString: a path-like string, like one that might be
|
||||
passed to an import hook.
|
||||
|
||||
@return: a L{FilePath}, or something like it (currently only a
|
||||
L{ZipPath}, but more might be added later).
|
||||
"""
|
||||
|
||||
|
||||
@implementer(IPathImportMapper)
|
||||
class _DefaultMapImpl:
|
||||
"""Wrapper for the default importer, i.e. None."""
|
||||
|
||||
def mapPath(self, fsPathString):
|
||||
return FilePath(fsPathString)
|
||||
|
||||
|
||||
_theDefaultMapper = _DefaultMapImpl()
|
||||
|
||||
|
||||
@implementer(IPathImportMapper)
|
||||
class _ZipMapImpl:
|
||||
"""IPathImportMapper implementation for zipimport.ZipImporter."""
|
||||
|
||||
def __init__(self, importer):
|
||||
self.importer = importer
|
||||
|
||||
def mapPath(self, fsPathString):
|
||||
"""
|
||||
Map the given FS path to a ZipPath, by looking at the ZipImporter's
|
||||
"archive" attribute and using it as our ZipArchive root, then walking
|
||||
down into the archive from there.
|
||||
|
||||
@return: a L{zippath.ZipPath} or L{zippath.ZipArchive} instance.
|
||||
"""
|
||||
za = ZipArchive(self.importer.archive)
|
||||
myPath = FilePath(self.importer.archive)
|
||||
itsPath = FilePath(fsPathString)
|
||||
if myPath == itsPath:
|
||||
return za
|
||||
# This is NOT a general-purpose rule for sys.path or __file__:
|
||||
# zipimport specifically uses regular OS path syntax in its
|
||||
# pathnames, even though zip files specify that slashes are always
|
||||
# the separator, regardless of platform.
|
||||
segs = itsPath.segmentsFrom(myPath)
|
||||
zp = za
|
||||
for seg in segs:
|
||||
zp = zp.child(seg)
|
||||
return zp
|
||||
|
||||
|
||||
registerAdapter(_ZipMapImpl, zipimport.zipimporter, IPathImportMapper)
|
||||
|
||||
|
||||
def _defaultSysPathFactory():
|
||||
"""
|
||||
Provide the default behavior of PythonPath's sys.path factory, which is to
|
||||
return the current value of sys.path.
|
||||
|
||||
@return: L{sys.path}
|
||||
"""
|
||||
return sys.path
|
||||
|
||||
|
||||
class PythonPath:
|
||||
"""
|
||||
I represent the very top of the Python object-space, the module list in
|
||||
C{sys.path} and the modules list in C{sys.modules}.
|
||||
|
||||
@ivar _sysPath: A sequence of strings like C{sys.path}. This attribute is
|
||||
read-only.
|
||||
|
||||
@ivar sysPath: The current value of the module search path list.
|
||||
@type sysPath: C{list}
|
||||
|
||||
@ivar moduleDict: A dictionary mapping string module names to module
|
||||
objects, like C{sys.modules}.
|
||||
|
||||
@ivar sysPathHooks: A list of PEP-302 path hooks, like C{sys.path_hooks}.
|
||||
|
||||
@ivar moduleLoader: A function that takes a fully-qualified python name and
|
||||
returns a module, like L{twisted.python.reflect.namedAny}.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sysPath=None,
|
||||
moduleDict=sys.modules,
|
||||
sysPathHooks=sys.path_hooks,
|
||||
importerCache=sys.path_importer_cache,
|
||||
moduleLoader=namedAny,
|
||||
sysPathFactory=None,
|
||||
):
|
||||
"""
|
||||
Create a PythonPath. You almost certainly want to use
|
||||
modules.theSystemPath, or its aliased methods, rather than creating a
|
||||
new instance yourself, though.
|
||||
|
||||
All parameters are optional, and if unspecified, will use 'system'
|
||||
equivalents that makes this PythonPath like the global L{theSystemPath}
|
||||
instance.
|
||||
|
||||
@param sysPath: a sys.path-like list to use for this PythonPath, to
|
||||
specify where to load modules from.
|
||||
|
||||
@param moduleDict: a sys.modules-like dictionary to use for keeping
|
||||
track of what modules this PythonPath has loaded.
|
||||
|
||||
@param sysPathHooks: sys.path_hooks-like list of PEP-302 path hooks to
|
||||
be used for this PythonPath, to determie which importers should be
|
||||
used.
|
||||
|
||||
@param importerCache: a sys.path_importer_cache-like list of PEP-302
|
||||
importers. This will be used in conjunction with the given
|
||||
sysPathHooks.
|
||||
|
||||
@param moduleLoader: a module loader function which takes a string and
|
||||
returns a module. That is to say, it is like L{namedAny} - *not* like
|
||||
L{__import__}.
|
||||
|
||||
@param sysPathFactory: a 0-argument callable which returns the current
|
||||
value of a sys.path-like list of strings. Specify either this, or
|
||||
sysPath, not both. This alternative interface is provided because the
|
||||
way the Python import mechanism works, you can re-bind the 'sys.path'
|
||||
name and that is what is used for current imports, so it must be a
|
||||
factory rather than a value to deal with modification by rebinding
|
||||
rather than modification by mutation. Note: it is not recommended to
|
||||
rebind sys.path. Although this mechanism can deal with that, it is a
|
||||
subtle point which some tools that it is easy for tools which interact
|
||||
with sys.path to miss.
|
||||
"""
|
||||
if sysPath is not None:
|
||||
sysPathFactory = lambda: sysPath
|
||||
elif sysPathFactory is None:
|
||||
sysPathFactory = _defaultSysPathFactory
|
||||
self._sysPathFactory = sysPathFactory
|
||||
self._sysPath = sysPath
|
||||
self.moduleDict = moduleDict
|
||||
self.sysPathHooks = sysPathHooks
|
||||
self.importerCache = importerCache
|
||||
self.moduleLoader = moduleLoader
|
||||
|
||||
@property
|
||||
def sysPath(self):
|
||||
"""
|
||||
Retrieve the current value of the module search path list.
|
||||
"""
|
||||
return self._sysPathFactory()
|
||||
|
||||
def _findEntryPathString(self, modobj):
|
||||
"""
|
||||
Determine where a given Python module object came from by looking at path
|
||||
entries.
|
||||
"""
|
||||
topPackageObj = modobj
|
||||
while "." in topPackageObj.__name__:
|
||||
topPackageObj = self.moduleDict[
|
||||
".".join(topPackageObj.__name__.split(".")[:-1])
|
||||
]
|
||||
if _isPackagePath(FilePath(topPackageObj.__file__)):
|
||||
# if package 'foo' is on sys.path at /a/b/foo, package 'foo's
|
||||
# __file__ will be /a/b/foo/__init__.py, and we are looking for
|
||||
# /a/b here, the path-entry; so go up two steps.
|
||||
rval = dirname(dirname(topPackageObj.__file__))
|
||||
else:
|
||||
# the module is completely top-level, not within any packages. The
|
||||
# path entry it's on is just its dirname.
|
||||
rval = dirname(topPackageObj.__file__)
|
||||
|
||||
# There are probably some awful tricks that an importer could pull
|
||||
# which would break this, so let's just make sure... it's a loaded
|
||||
# module after all, which means that its path MUST be in
|
||||
# path_importer_cache according to PEP 302 -glyph
|
||||
if rval not in self.importerCache:
|
||||
warnings.warn(
|
||||
"%s (for module %s) not in path importer cache "
|
||||
"(PEP 302 violation - check your local configuration)."
|
||||
% (rval, modobj.__name__),
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
return rval
|
||||
|
||||
def _smartPath(self, pathName):
|
||||
"""
|
||||
Given a path entry from sys.path which may refer to an importer,
|
||||
return the appropriate FilePath-like instance.
|
||||
|
||||
@param pathName: a str describing the path.
|
||||
|
||||
@return: a FilePath-like object.
|
||||
"""
|
||||
importr = self.importerCache.get(pathName, _nothing)
|
||||
if importr is _nothing:
|
||||
for hook in self.sysPathHooks:
|
||||
try:
|
||||
importr = hook(pathName)
|
||||
except ImportError:
|
||||
pass
|
||||
if importr is _nothing: # still
|
||||
importr = None
|
||||
return IPathImportMapper(importr, _theDefaultMapper).mapPath(pathName)
|
||||
|
||||
def iterEntries(self):
|
||||
"""
|
||||
Iterate the entries on my sysPath.
|
||||
|
||||
@return: a generator yielding PathEntry objects
|
||||
"""
|
||||
for pathName in self.sysPath:
|
||||
fp = self._smartPath(pathName)
|
||||
yield PathEntry(fp, self)
|
||||
|
||||
def __getitem__(self, modname):
|
||||
"""
|
||||
Get a python module by its given fully-qualified name.
|
||||
|
||||
@param modname: The fully-qualified Python module name to load.
|
||||
|
||||
@type modname: C{str}
|
||||
|
||||
@return: an object representing the module identified by C{modname}
|
||||
|
||||
@rtype: L{PythonModule}
|
||||
|
||||
@raise KeyError: if the module name is not a valid module name, or no
|
||||
such module can be identified as loadable.
|
||||
"""
|
||||
# See if the module is already somewhere in Python-land.
|
||||
moduleObject = self.moduleDict.get(modname)
|
||||
if moduleObject is not None:
|
||||
# we need 2 paths; one of the path entry and one for the module.
|
||||
pe = PathEntry(
|
||||
self._smartPath(self._findEntryPathString(moduleObject)), self
|
||||
)
|
||||
mp = self._smartPath(moduleObject.__file__)
|
||||
return PythonModule(modname, mp, pe)
|
||||
|
||||
# Recurse if we're trying to get a submodule.
|
||||
if "." in modname:
|
||||
pkg = self
|
||||
for name in modname.split("."):
|
||||
pkg = pkg[name]
|
||||
return pkg
|
||||
|
||||
# Finally do the slowest possible thing and iterate
|
||||
for module in self.iterModules():
|
||||
if module.name == modname:
|
||||
return module
|
||||
raise KeyError(modname)
|
||||
|
||||
def __contains__(self, module):
|
||||
"""
|
||||
Check to see whether or not a module exists on my import path.
|
||||
|
||||
@param module: The name of the module to look for on my import path.
|
||||
@type module: C{str}
|
||||
"""
|
||||
try:
|
||||
self.__getitem__(module)
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Display my sysPath and moduleDict in a string representation.
|
||||
"""
|
||||
return f"PythonPath({self.sysPath!r},{self.moduleDict!r})"
|
||||
|
||||
def iterModules(self):
|
||||
"""
|
||||
Yield all top-level modules on my sysPath.
|
||||
"""
|
||||
for entry in self.iterEntries():
|
||||
yield from entry.iterModules()
|
||||
|
||||
def walkModules(self, importPackages=False):
|
||||
"""
|
||||
Similar to L{iterModules}, this yields every module on the path, then every
|
||||
submodule in each package or entry.
|
||||
"""
|
||||
for package in self.iterModules():
|
||||
yield from package.walkModules(importPackages=False)
|
||||
|
||||
|
||||
theSystemPath = PythonPath()
|
||||
|
||||
|
||||
def walkModules(importPackages=False):
|
||||
"""
|
||||
Deeply iterate all modules on the global python path.
|
||||
|
||||
@param importPackages: Import packages as they are seen.
|
||||
"""
|
||||
return theSystemPath.walkModules(importPackages=importPackages)
|
||||
|
||||
|
||||
def iterModules():
|
||||
"""
|
||||
Iterate all modules and top-level packages on the global Python path, but
|
||||
do not descend into packages.
|
||||
"""
|
||||
return theSystemPath.iterModules()
|
||||
|
||||
|
||||
def getModule(moduleName):
|
||||
"""
|
||||
Retrieve a module from the system path.
|
||||
"""
|
||||
return theSystemPath[moduleName]
|
||||
73
.venv/lib/python3.12/site-packages/twisted/python/monkey.py
Normal file
73
.venv/lib/python3.12/site-packages/twisted/python/monkey.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# -*- test-case-name: twisted.test.test_monkey -*-
|
||||
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
class MonkeyPatcher:
|
||||
"""
|
||||
Cover up attributes with new objects. Neat for monkey-patching things for
|
||||
unit-testing purposes.
|
||||
"""
|
||||
|
||||
def __init__(self, *patches):
|
||||
# List of patches to apply in (obj, name, value).
|
||||
self._patchesToApply = []
|
||||
# List of the original values for things that have been patched.
|
||||
# (obj, name, value) format.
|
||||
self._originals = []
|
||||
for patch in patches:
|
||||
self.addPatch(*patch)
|
||||
|
||||
def addPatch(self, obj, name, value):
|
||||
"""
|
||||
Add a patch so that the attribute C{name} on C{obj} will be assigned to
|
||||
C{value} when C{patch} is called or during C{runWithPatches}.
|
||||
|
||||
You can restore the original values with a call to restore().
|
||||
"""
|
||||
self._patchesToApply.append((obj, name, value))
|
||||
|
||||
def _alreadyPatched(self, obj, name):
|
||||
"""
|
||||
Has the C{name} attribute of C{obj} already been patched by this
|
||||
patcher?
|
||||
"""
|
||||
for o, n, v in self._originals:
|
||||
if (o, n) == (obj, name):
|
||||
return True
|
||||
return False
|
||||
|
||||
def patch(self):
|
||||
"""
|
||||
Apply all of the patches that have been specified with L{addPatch}.
|
||||
Reverse this operation using L{restore}.
|
||||
"""
|
||||
for obj, name, value in self._patchesToApply:
|
||||
if not self._alreadyPatched(obj, name):
|
||||
self._originals.append((obj, name, getattr(obj, name)))
|
||||
setattr(obj, name, value)
|
||||
|
||||
__enter__ = patch
|
||||
|
||||
def restore(self):
|
||||
"""
|
||||
Restore all original values to any patched objects.
|
||||
"""
|
||||
while self._originals:
|
||||
obj, name, value = self._originals.pop()
|
||||
setattr(obj, name, value)
|
||||
|
||||
def __exit__(self, excType=None, excValue=None, excTraceback=None):
|
||||
self.restore()
|
||||
|
||||
def runWithPatches(self, f, *args, **kw):
|
||||
"""
|
||||
Apply each patch already specified. Then run the function f with the
|
||||
given args and kwargs. Restore everything when done.
|
||||
"""
|
||||
self.patch()
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
finally:
|
||||
self.restore()
|
||||
@@ -0,0 +1,50 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Utilities for dealing with processes.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def which(name, flags=os.X_OK):
|
||||
"""
|
||||
Search PATH for executable files with the given name.
|
||||
|
||||
On newer versions of MS-Windows, the PATHEXT environment variable will be
|
||||
set to the list of file extensions for files considered executable. This
|
||||
will normally include things like ".EXE". This function will also find files
|
||||
with the given name ending with any of these extensions.
|
||||
|
||||
On MS-Windows the only flag that has any meaning is os.F_OK. Any other
|
||||
flags will be ignored.
|
||||
|
||||
@type name: C{str}
|
||||
@param name: The name for which to search.
|
||||
|
||||
@type flags: C{int}
|
||||
@param flags: Arguments to L{os.access}.
|
||||
|
||||
@rtype: C{list}
|
||||
@return: A list of the full paths to files found, in the order in which they
|
||||
were found.
|
||||
"""
|
||||
result = []
|
||||
exts = list(filter(None, os***REMOVED***iron.get("PATHEXT", "").split(os.pathsep)))
|
||||
path = os***REMOVED***iron.get("PATH", None)
|
||||
|
||||
if path is None:
|
||||
return []
|
||||
|
||||
for p in os***REMOVED***iron.get("PATH", "").split(os.pathsep):
|
||||
p = os.path.join(p, name)
|
||||
if os.access(p, flags):
|
||||
result.append(p)
|
||||
for e in exts:
|
||||
pext = p + e
|
||||
if os.access(pext, flags):
|
||||
result.append(pext)
|
||||
|
||||
return result
|
||||
128
.venv/lib/python3.12/site-packages/twisted/python/randbytes.py
Normal file
128
.venv/lib/python3.12/site-packages/twisted/python/randbytes.py
Normal file
@@ -0,0 +1,128 @@
|
||||
# -*- test-case-name: twisted.test.test_randbytes -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Cryptographically secure random implementation, with fallback on normal random.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import random
|
||||
import warnings
|
||||
|
||||
getrandbits = getattr(random, "getrandbits", None)
|
||||
|
||||
_fromhex = bytes.fromhex
|
||||
|
||||
|
||||
class SecureRandomNotAvailable(RuntimeError):
|
||||
"""
|
||||
Exception raised when no secure random algorithm is found.
|
||||
"""
|
||||
|
||||
|
||||
class SourceNotAvailable(RuntimeError):
|
||||
"""
|
||||
Internal exception used when a specific random source is not available.
|
||||
"""
|
||||
|
||||
|
||||
class RandomFactory:
|
||||
"""
|
||||
Factory providing L{secureRandom} and L{insecureRandom} methods.
|
||||
|
||||
You shouldn't have to instantiate this class, use the module level
|
||||
functions instead: it is an implementation detail and could be removed or
|
||||
changed arbitrarily.
|
||||
"""
|
||||
|
||||
# This variable is no longer used, and will eventually be removed.
|
||||
randomSources = ()
|
||||
|
||||
getrandbits = getrandbits
|
||||
|
||||
def _osUrandom(self, nbytes: int) -> bytes:
|
||||
"""
|
||||
Wrapper around C{os.urandom} that cleanly manage its absence.
|
||||
"""
|
||||
try:
|
||||
return os.urandom(nbytes)
|
||||
except (AttributeError, NotImplementedError) as e:
|
||||
raise SourceNotAvailable(e)
|
||||
|
||||
def secureRandom(self, nbytes: int, fallback: bool = False) -> bytes:
|
||||
"""
|
||||
Return a number of secure random bytes.
|
||||
|
||||
@param nbytes: number of bytes to generate.
|
||||
@type nbytes: C{int}
|
||||
@param fallback: Whether the function should fallback on non-secure
|
||||
random or not. Default to C{False}.
|
||||
@type fallback: C{bool}
|
||||
|
||||
@return: a string of random bytes.
|
||||
@rtype: C{str}
|
||||
"""
|
||||
try:
|
||||
return self._osUrandom(nbytes)
|
||||
except SourceNotAvailable:
|
||||
pass
|
||||
|
||||
if fallback:
|
||||
warnings.warn(
|
||||
"urandom unavailable - "
|
||||
"proceeding with non-cryptographically secure random source",
|
||||
category=RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.insecureRandom(nbytes)
|
||||
else:
|
||||
raise SecureRandomNotAvailable("No secure random source available")
|
||||
|
||||
def _randBits(self, nbytes: int) -> bytes:
|
||||
"""
|
||||
Wrapper around C{os.getrandbits}.
|
||||
"""
|
||||
if self.getrandbits is not None:
|
||||
n = self.getrandbits(nbytes * 8)
|
||||
hexBytes = ("%%0%dx" % (nbytes * 2)) % n
|
||||
return _fromhex(hexBytes)
|
||||
raise SourceNotAvailable("random.getrandbits is not available")
|
||||
|
||||
_maketrans = bytes.maketrans
|
||||
_BYTES = _maketrans(b"", b"")
|
||||
|
||||
def _randModule(self, nbytes: int) -> bytes:
|
||||
"""
|
||||
Wrapper around the C{random} module.
|
||||
"""
|
||||
return b"".join([bytes([random.choice(self._BYTES)]) for i in range(nbytes)])
|
||||
|
||||
def insecureRandom(self, nbytes: int) -> bytes:
|
||||
"""
|
||||
Return a number of non secure random bytes.
|
||||
|
||||
@param nbytes: number of bytes to generate.
|
||||
@type nbytes: C{int}
|
||||
|
||||
@return: a string of random bytes.
|
||||
@rtype: C{str}
|
||||
"""
|
||||
try:
|
||||
return self._randBits(nbytes)
|
||||
except SourceNotAvailable:
|
||||
pass
|
||||
return self._randModule(nbytes)
|
||||
|
||||
|
||||
factory = RandomFactory()
|
||||
|
||||
secureRandom = factory.secureRandom
|
||||
|
||||
insecureRandom = factory.insecureRandom
|
||||
|
||||
del factory
|
||||
|
||||
|
||||
__all__ = ["secureRandom", "insecureRandom", "SecureRandomNotAvailable"]
|
||||
250
.venv/lib/python3.12/site-packages/twisted/python/rebuild.py
Normal file
250
.venv/lib/python3.12/site-packages/twisted/python/rebuild.py
Normal file
@@ -0,0 +1,250 @@
|
||||
# -*- test-case-name: twisted.test.test_rebuild -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
*Real* reloading support for Python.
|
||||
"""
|
||||
|
||||
import linecache
|
||||
|
||||
# System Imports
|
||||
import sys
|
||||
import time
|
||||
import types
|
||||
from importlib import reload
|
||||
from types import ModuleType
|
||||
from typing import Dict
|
||||
|
||||
# Sibling Imports
|
||||
from twisted.python import log, reflect
|
||||
|
||||
lastRebuild = time.time()
|
||||
|
||||
|
||||
class Sensitive:
|
||||
"""
|
||||
A utility mixin that's sensitive to rebuilds.
|
||||
|
||||
This is a mixin for classes (usually those which represent collections of
|
||||
callbacks) to make sure that their code is up-to-date before running.
|
||||
"""
|
||||
|
||||
lastRebuild = lastRebuild
|
||||
|
||||
def needRebuildUpdate(self):
|
||||
yn = self.lastRebuild < lastRebuild
|
||||
return yn
|
||||
|
||||
def rebuildUpToDate(self):
|
||||
self.lastRebuild = time.time()
|
||||
|
||||
def latestVersionOf(self, anObject):
|
||||
"""
|
||||
Get the latest version of an object.
|
||||
|
||||
This can handle just about anything callable; instances, functions,
|
||||
methods, and classes.
|
||||
"""
|
||||
t = type(anObject)
|
||||
if t == types.FunctionType:
|
||||
return latestFunction(anObject)
|
||||
elif t == types.MethodType:
|
||||
if anObject.__self__ is None:
|
||||
return getattr(anObject.im_class, anObject.__name__)
|
||||
else:
|
||||
return getattr(anObject.__self__, anObject.__name__)
|
||||
else:
|
||||
log.msg("warning returning anObject!")
|
||||
return anObject
|
||||
|
||||
|
||||
_modDictIDMap: Dict[int, ModuleType] = {}
|
||||
|
||||
|
||||
def latestFunction(oldFunc):
|
||||
"""
|
||||
Get the latest version of a function.
|
||||
"""
|
||||
# This may be CPython specific, since I believe jython instantiates a new
|
||||
# module upon reload.
|
||||
dictID = id(oldFunc.__globals__)
|
||||
module = _modDictIDMap.get(dictID)
|
||||
if module is None:
|
||||
return oldFunc
|
||||
return getattr(module, oldFunc.__name__)
|
||||
|
||||
|
||||
def latestClass(oldClass):
|
||||
"""
|
||||
Get the latest version of a class.
|
||||
"""
|
||||
module = reflect.namedModule(oldClass.__module__)
|
||||
newClass = getattr(module, oldClass.__name__)
|
||||
newBases = [latestClass(base) for base in newClass.__bases__]
|
||||
|
||||
if newClass.__module__ == "builtins":
|
||||
# builtin members can't be reloaded sanely
|
||||
return newClass
|
||||
|
||||
try:
|
||||
# This makes old-style stuff work
|
||||
newClass.__bases__ = tuple(newBases)
|
||||
return newClass
|
||||
except TypeError:
|
||||
ctor = type(newClass)
|
||||
return ctor(newClass.__name__, tuple(newBases), dict(newClass.__dict__))
|
||||
|
||||
|
||||
class RebuildError(Exception):
|
||||
"""
|
||||
Exception raised when trying to rebuild a class whereas it's not possible.
|
||||
"""
|
||||
|
||||
|
||||
def updateInstance(self):
|
||||
"""
|
||||
Updates an instance to be current.
|
||||
"""
|
||||
self.__class__ = latestClass(self.__class__)
|
||||
|
||||
|
||||
def __injectedgetattr__(self, name):
|
||||
"""
|
||||
A getattr method to cause a class to be refreshed.
|
||||
"""
|
||||
if name == "__del__":
|
||||
raise AttributeError("Without this, Python segfaults.")
|
||||
updateInstance(self)
|
||||
log.msg(f"(rebuilding stale {reflect.qual(self.__class__)} instance ({name}))")
|
||||
result = getattr(self, name)
|
||||
return result
|
||||
|
||||
|
||||
def rebuild(module, doLog=1):
|
||||
"""
|
||||
Reload a module and do as much as possible to replace its references.
|
||||
"""
|
||||
global lastRebuild
|
||||
lastRebuild = time.time()
|
||||
if hasattr(module, "ALLOW_TWISTED_REBUILD"):
|
||||
# Is this module allowed to be rebuilt?
|
||||
if not module.ALLOW_TWISTED_REBUILD:
|
||||
raise RuntimeError("I am not allowed to be rebuilt.")
|
||||
if doLog:
|
||||
log.msg(f"Rebuilding {str(module.__name__)}...")
|
||||
|
||||
# Safely handle adapter re-registration
|
||||
from twisted.python import components
|
||||
|
||||
components.ALLOW_DUPLICATES = True
|
||||
|
||||
d = module.__dict__
|
||||
_modDictIDMap[id(d)] = module
|
||||
newclasses = {}
|
||||
classes = {}
|
||||
functions = {}
|
||||
values = {}
|
||||
if doLog:
|
||||
log.msg(f" (scanning {str(module.__name__)}): ")
|
||||
for k, v in d.items():
|
||||
if issubclass(type(v), types.FunctionType):
|
||||
if v.__globals__ is module.__dict__:
|
||||
functions[v] = 1
|
||||
if doLog:
|
||||
log.logfile.write("f")
|
||||
log.logfile.flush()
|
||||
elif isinstance(v, type):
|
||||
if v.__module__ == module.__name__:
|
||||
newclasses[v] = 1
|
||||
if doLog:
|
||||
log.logfile.write("o")
|
||||
log.logfile.flush()
|
||||
|
||||
values.update(classes)
|
||||
values.update(functions)
|
||||
fromOldModule = values.__contains__
|
||||
newclasses = newclasses.keys()
|
||||
classes = classes.keys()
|
||||
functions = functions.keys()
|
||||
|
||||
if doLog:
|
||||
log.msg("")
|
||||
log.msg(f" (reload {str(module.__name__)})")
|
||||
|
||||
# Boom.
|
||||
reload(module)
|
||||
# Make sure that my traceback printing will at least be recent...
|
||||
linecache.clearcache()
|
||||
|
||||
if doLog:
|
||||
log.msg(f" (cleaning {str(module.__name__)}): ")
|
||||
|
||||
for clazz in classes:
|
||||
if getattr(module, clazz.__name__) is clazz:
|
||||
log.msg(f"WARNING: class {reflect.qual(clazz)} not replaced by reload!")
|
||||
else:
|
||||
if doLog:
|
||||
log.logfile.write("x")
|
||||
log.logfile.flush()
|
||||
clazz.__bases__ = ()
|
||||
clazz.__dict__.clear()
|
||||
clazz.__getattr__ = __injectedgetattr__
|
||||
clazz.__module__ = module.__name__
|
||||
if newclasses:
|
||||
import gc
|
||||
for nclass in newclasses:
|
||||
ga = getattr(module, nclass.__name__)
|
||||
if ga is nclass:
|
||||
log.msg(
|
||||
"WARNING: new-class {} not replaced by reload!".format(
|
||||
reflect.qual(nclass)
|
||||
)
|
||||
)
|
||||
else:
|
||||
for r in gc.get_referrers(nclass):
|
||||
if getattr(r, "__class__", None) is nclass:
|
||||
r.__class__ = ga
|
||||
if doLog:
|
||||
log.msg("")
|
||||
log.msg(f" (fixing {str(module.__name__)}): ")
|
||||
modcount = 0
|
||||
for mk, mod in sys.modules.items():
|
||||
modcount = modcount + 1
|
||||
if mod == module or mod is None:
|
||||
continue
|
||||
|
||||
if not hasattr(mod, "__file__"):
|
||||
# It's a builtin module; nothing to replace here.
|
||||
continue
|
||||
|
||||
if hasattr(mod, "__bundle__"):
|
||||
# PyObjC has a few buggy objects which segfault if you hash() them.
|
||||
# It doesn't make sense to try rebuilding extension modules like
|
||||
# this anyway, so don't try.
|
||||
continue
|
||||
|
||||
changed = 0
|
||||
|
||||
for k, v in mod.__dict__.items():
|
||||
try:
|
||||
hash(v)
|
||||
except Exception:
|
||||
continue
|
||||
if fromOldModule(v):
|
||||
if doLog:
|
||||
log.logfile.write("f")
|
||||
log.logfile.flush()
|
||||
nv = latestFunction(v)
|
||||
changed = 1
|
||||
setattr(mod, k, nv)
|
||||
if doLog and not changed and ((modcount % 10) == 0):
|
||||
log.logfile.write(".")
|
||||
log.logfile.flush()
|
||||
|
||||
components.ALLOW_DUPLICATES = False
|
||||
if doLog:
|
||||
log.msg("")
|
||||
log.msg(f" Rebuilt {str(module.__name__)}.")
|
||||
return module
|
||||
686
.venv/lib/python3.12/site-packages/twisted/python/reflect.py
Normal file
686
.venv/lib/python3.12/site-packages/twisted/python/reflect.py
Normal file
@@ -0,0 +1,686 @@
|
||||
# -*- test-case-name: twisted.test.test_reflect -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Standardized versions of various cool and/or strange things that you can do
|
||||
with Python's reflection capabilities.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import weakref
|
||||
from collections import deque
|
||||
from io import IOBase, StringIO
|
||||
from typing import Type, Union
|
||||
|
||||
from twisted.python.compat import nativeString
|
||||
from twisted.python.deprecate import _fullyQualifiedName as fullyQualifiedName
|
||||
|
||||
RegexType = type(re.compile(""))
|
||||
|
||||
|
||||
def prefixedMethodNames(classObj, prefix):
|
||||
"""
|
||||
Given a class object C{classObj}, returns a list of method names that match
|
||||
the string C{prefix}.
|
||||
|
||||
@param classObj: A class object from which to collect method names.
|
||||
|
||||
@param prefix: A native string giving a prefix. Each method with a name
|
||||
which begins with this prefix will be returned.
|
||||
@type prefix: L{str}
|
||||
|
||||
@return: A list of the names of matching methods of C{classObj} (and base
|
||||
classes of C{classObj}).
|
||||
@rtype: L{list} of L{str}
|
||||
"""
|
||||
dct = {}
|
||||
addMethodNamesToDict(classObj, dct, prefix)
|
||||
return list(dct.keys())
|
||||
|
||||
|
||||
def addMethodNamesToDict(classObj, dict, prefix, baseClass=None):
|
||||
"""
|
||||
This goes through C{classObj} (and its bases) and puts method names
|
||||
starting with 'prefix' in 'dict' with a value of 1. if baseClass isn't
|
||||
None, methods will only be added if classObj is-a baseClass
|
||||
|
||||
If the class in question has the methods 'prefix_methodname' and
|
||||
'prefix_methodname2', the resulting dict should look something like:
|
||||
{"methodname": 1, "methodname2": 1}.
|
||||
|
||||
@param classObj: A class object from which to collect method names.
|
||||
|
||||
@param dict: A L{dict} which will be updated with the results of the
|
||||
accumulation. Items are added to this dictionary, with method names as
|
||||
keys and C{1} as values.
|
||||
@type dict: L{dict}
|
||||
|
||||
@param prefix: A native string giving a prefix. Each method of C{classObj}
|
||||
(and base classes of C{classObj}) with a name which begins with this
|
||||
prefix will be returned.
|
||||
@type prefix: L{str}
|
||||
|
||||
@param baseClass: A class object at which to stop searching upwards for new
|
||||
methods. To collect all method names, do not pass a value for this
|
||||
parameter.
|
||||
|
||||
@return: L{None}
|
||||
"""
|
||||
for base in classObj.__bases__:
|
||||
addMethodNamesToDict(base, dict, prefix, baseClass)
|
||||
|
||||
if baseClass is None or baseClass in classObj.__bases__:
|
||||
for name, method in classObj.__dict__.items():
|
||||
optName = name[len(prefix) :]
|
||||
if (
|
||||
(type(method) is types.FunctionType)
|
||||
and (name[: len(prefix)] == prefix)
|
||||
and (len(optName))
|
||||
):
|
||||
dict[optName] = 1
|
||||
|
||||
|
||||
def prefixedMethods(obj, prefix=""):
|
||||
"""
|
||||
Given an object C{obj}, returns a list of method objects that match the
|
||||
string C{prefix}.
|
||||
|
||||
@param obj: An arbitrary object from which to collect methods.
|
||||
|
||||
@param prefix: A native string giving a prefix. Each method of C{obj} with
|
||||
a name which begins with this prefix will be returned.
|
||||
@type prefix: L{str}
|
||||
|
||||
@return: A list of the matching method objects.
|
||||
@rtype: L{list}
|
||||
"""
|
||||
dct = {}
|
||||
accumulateMethods(obj, dct, prefix)
|
||||
return list(dct.values())
|
||||
|
||||
|
||||
def accumulateMethods(obj, dict, prefix="", curClass=None):
|
||||
"""
|
||||
Given an object C{obj}, add all methods that begin with C{prefix}.
|
||||
|
||||
@param obj: An arbitrary object to collect methods from.
|
||||
|
||||
@param dict: A L{dict} which will be updated with the results of the
|
||||
accumulation. Items are added to this dictionary, with method names as
|
||||
keys and corresponding instance method objects as values.
|
||||
@type dict: L{dict}
|
||||
|
||||
@param prefix: A native string giving a prefix. Each method of C{obj} with
|
||||
a name which begins with this prefix will be returned.
|
||||
@type prefix: L{str}
|
||||
|
||||
@param curClass: The class in the inheritance hierarchy at which to start
|
||||
collecting methods. Collection proceeds up. To collect all methods
|
||||
from C{obj}, do not pass a value for this parameter.
|
||||
|
||||
@return: L{None}
|
||||
"""
|
||||
if not curClass:
|
||||
curClass = obj.__class__
|
||||
for base in curClass.__bases__:
|
||||
# The implementation of the object class is different on PyPy vs.
|
||||
# CPython. This has the side effect of making accumulateMethods()
|
||||
# pick up object methods from all new-style classes -
|
||||
# such as __getattribute__, etc.
|
||||
# If we ignore 'object' when accumulating methods, we can get
|
||||
# consistent behavior on Pypy and CPython.
|
||||
if base is not object:
|
||||
accumulateMethods(obj, dict, prefix, base)
|
||||
|
||||
for name, method in curClass.__dict__.items():
|
||||
optName = name[len(prefix) :]
|
||||
if (
|
||||
(type(method) is types.FunctionType)
|
||||
and (name[: len(prefix)] == prefix)
|
||||
and (len(optName))
|
||||
):
|
||||
dict[optName] = getattr(obj, name)
|
||||
|
||||
|
||||
def namedModule(name):
|
||||
"""
|
||||
Return a module given its name.
|
||||
"""
|
||||
topLevel = __import__(name)
|
||||
packages = name.split(".")[1:]
|
||||
m = topLevel
|
||||
for p in packages:
|
||||
m = getattr(m, p)
|
||||
return m
|
||||
|
||||
|
||||
def namedObject(name):
|
||||
"""
|
||||
Get a fully named module-global object.
|
||||
"""
|
||||
classSplit = name.split(".")
|
||||
module = namedModule(".".join(classSplit[:-1]))
|
||||
return getattr(module, classSplit[-1])
|
||||
|
||||
|
||||
namedClass = namedObject # backwards compat
|
||||
|
||||
|
||||
def requireModule(name, default=None):
|
||||
"""
|
||||
Try to import a module given its name, returning C{default} value if
|
||||
C{ImportError} is raised during import.
|
||||
|
||||
@param name: Module name as it would have been passed to C{import}.
|
||||
@type name: C{str}.
|
||||
|
||||
@param default: Value returned in case C{ImportError} is raised while
|
||||
importing the module.
|
||||
|
||||
@return: Module or default value.
|
||||
"""
|
||||
try:
|
||||
return namedModule(name)
|
||||
except ImportError:
|
||||
return default
|
||||
|
||||
|
||||
class _NoModuleFound(Exception):
|
||||
"""
|
||||
No module was found because none exists.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidName(ValueError):
|
||||
"""
|
||||
The given name is not a dot-separated list of Python objects.
|
||||
"""
|
||||
|
||||
|
||||
class ModuleNotFound(InvalidName):
|
||||
"""
|
||||
The module associated with the given name doesn't exist and it can't be
|
||||
imported.
|
||||
"""
|
||||
|
||||
|
||||
class ObjectNotFound(InvalidName):
|
||||
"""
|
||||
The object associated with the given name doesn't exist and it can't be
|
||||
imported.
|
||||
"""
|
||||
|
||||
|
||||
def _importAndCheckStack(importName):
|
||||
"""
|
||||
Import the given name as a module, then walk the stack to determine whether
|
||||
the failure was the module not existing, or some code in the module (for
|
||||
example a dependent import) failing. This can be helpful to determine
|
||||
whether any actual application code was run. For example, to distiguish
|
||||
administrative error (entering the wrong module name), from programmer
|
||||
error (writing buggy code in a module that fails to import).
|
||||
|
||||
@param importName: The name of the module to import.
|
||||
@type importName: C{str}
|
||||
@raise Exception: if something bad happens. This can be any type of
|
||||
exception, since nobody knows what loading some arbitrary code might
|
||||
do.
|
||||
@raise _NoModuleFound: if no module was found.
|
||||
"""
|
||||
try:
|
||||
return __import__(importName)
|
||||
except ImportError:
|
||||
excType, excValue, excTraceback = sys.exc_info()
|
||||
while excTraceback:
|
||||
execName = excTraceback.tb_frame.f_globals["__name__"]
|
||||
if execName == importName:
|
||||
raise excValue.with_traceback(excTraceback)
|
||||
excTraceback = excTraceback.tb_next
|
||||
raise _NoModuleFound()
|
||||
|
||||
|
||||
def namedAny(name):
|
||||
"""
|
||||
Retrieve a Python object by its fully qualified name from the global Python
|
||||
module namespace. The first part of the name, that describes a module,
|
||||
will be discovered and imported. Each subsequent part of the name is
|
||||
treated as the name of an attribute of the object specified by all of the
|
||||
name which came before it. For example, the fully-qualified name of this
|
||||
object is 'twisted.python.reflect.namedAny'.
|
||||
|
||||
@type name: L{str}
|
||||
@param name: The name of the object to return.
|
||||
|
||||
@raise InvalidName: If the name is an empty string, starts or ends with
|
||||
a '.', or is otherwise syntactically incorrect.
|
||||
|
||||
@raise ModuleNotFound: If the name is syntactically correct but the
|
||||
module it specifies cannot be imported because it does not appear to
|
||||
exist.
|
||||
|
||||
@raise ObjectNotFound: If the name is syntactically correct, includes at
|
||||
least one '.', but the module it specifies cannot be imported because
|
||||
it does not appear to exist.
|
||||
|
||||
@raise AttributeError: If an attribute of an object along the way cannot be
|
||||
accessed, or a module along the way is not found.
|
||||
|
||||
@return: the Python object identified by 'name'.
|
||||
"""
|
||||
if not name:
|
||||
raise InvalidName("Empty module name")
|
||||
|
||||
names = name.split(".")
|
||||
|
||||
# if the name starts or ends with a '.' or contains '..', the __import__
|
||||
# will raise an 'Empty module name' error. This will provide a better error
|
||||
# message.
|
||||
if "" in names:
|
||||
raise InvalidName(
|
||||
"name must be a string giving a '.'-separated list of Python "
|
||||
"identifiers, not %r" % (name,)
|
||||
)
|
||||
|
||||
topLevelPackage = None
|
||||
moduleNames = names[:]
|
||||
while not topLevelPackage:
|
||||
if moduleNames:
|
||||
trialname = ".".join(moduleNames)
|
||||
try:
|
||||
topLevelPackage = _importAndCheckStack(trialname)
|
||||
except _NoModuleFound:
|
||||
moduleNames.pop()
|
||||
else:
|
||||
if len(names) == 1:
|
||||
raise ModuleNotFound(f"No module named {name!r}")
|
||||
else:
|
||||
raise ObjectNotFound(f"{name!r} does not name an object")
|
||||
|
||||
obj = topLevelPackage
|
||||
for n in names[1:]:
|
||||
obj = getattr(obj, n)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def filenameToModuleName(fn):
|
||||
"""
|
||||
Convert a name in the filesystem to the name of the Python module it is.
|
||||
|
||||
This is aggressive about getting a module name back from a file; it will
|
||||
always return a string. Aggressive means 'sometimes wrong'; it won't look
|
||||
at the Python path or try to do any error checking: don't use this method
|
||||
unless you already know that the filename you're talking about is a Python
|
||||
module.
|
||||
|
||||
@param fn: A filesystem path to a module or package; C{bytes} on Python 2,
|
||||
C{bytes} or C{unicode} on Python 3.
|
||||
|
||||
@return: A hopefully importable module name.
|
||||
@rtype: C{str}
|
||||
"""
|
||||
if isinstance(fn, bytes):
|
||||
initPy = b"__init__.py"
|
||||
else:
|
||||
initPy = "__init__.py"
|
||||
fullName = os.path.abspath(fn)
|
||||
base = os.path.basename(fn)
|
||||
if not base:
|
||||
# this happens when fn ends with a path separator, just skit it
|
||||
base = os.path.basename(fn[:-1])
|
||||
modName = nativeString(os.path.splitext(base)[0])
|
||||
while 1:
|
||||
fullName = os.path.dirname(fullName)
|
||||
if os.path.exists(os.path.join(fullName, initPy)):
|
||||
modName = "{}.{}".format(
|
||||
nativeString(os.path.basename(fullName)),
|
||||
nativeString(modName),
|
||||
)
|
||||
else:
|
||||
break
|
||||
return modName
|
||||
|
||||
|
||||
def qual(clazz: Type[object]) -> str:
|
||||
"""
|
||||
Return full import path of a class.
|
||||
"""
|
||||
return clazz.__module__ + "." + clazz.__name__
|
||||
|
||||
|
||||
def _determineClass(x):
|
||||
try:
|
||||
return x.__class__
|
||||
except BaseException:
|
||||
return type(x)
|
||||
|
||||
|
||||
def _determineClassName(x):
|
||||
c = _determineClass(x)
|
||||
try:
|
||||
return c.__name__
|
||||
except BaseException:
|
||||
try:
|
||||
return str(c)
|
||||
except BaseException:
|
||||
return "<BROKEN CLASS AT 0x%x>" % id(c)
|
||||
|
||||
|
||||
def _safeFormat(formatter: Union[types.FunctionType, Type[str]], o: object) -> str:
|
||||
"""
|
||||
Helper function for L{safe_repr} and L{safe_str}.
|
||||
|
||||
Called when C{repr} or C{str} fail. Returns a string containing info about
|
||||
C{o} and the latest exception.
|
||||
|
||||
@param formatter: C{str} or C{repr}.
|
||||
@type formatter: C{type}
|
||||
@param o: Any object.
|
||||
|
||||
@rtype: C{str}
|
||||
@return: A string containing information about C{o} and the raised
|
||||
exception.
|
||||
"""
|
||||
io = StringIO()
|
||||
traceback.print_exc(file=io)
|
||||
className = _determineClassName(o)
|
||||
tbValue = io.getvalue()
|
||||
return "<{} instance at 0x{:x} with {} error:\n {}>".format(
|
||||
className,
|
||||
id(o),
|
||||
formatter.__name__,
|
||||
tbValue,
|
||||
)
|
||||
|
||||
|
||||
def safe_repr(o):
|
||||
"""
|
||||
Returns a string representation of an object, or a string containing a
|
||||
traceback, if that object's __repr__ raised an exception.
|
||||
|
||||
@param o: Any object.
|
||||
|
||||
@rtype: C{str}
|
||||
"""
|
||||
try:
|
||||
return repr(o)
|
||||
except BaseException:
|
||||
return _safeFormat(repr, o)
|
||||
|
||||
|
||||
def safe_str(o: object) -> str:
|
||||
"""
|
||||
Returns a string representation of an object, or a string containing a
|
||||
traceback, if that object's __str__ raised an exception.
|
||||
|
||||
@param o: Any object.
|
||||
"""
|
||||
if isinstance(o, bytes):
|
||||
# If o is bytes and seems to holds a utf-8 encoded string,
|
||||
# convert it to str.
|
||||
try:
|
||||
return o.decode("utf-8")
|
||||
except BaseException:
|
||||
pass
|
||||
try:
|
||||
return str(o)
|
||||
except BaseException:
|
||||
return _safeFormat(str, o)
|
||||
|
||||
|
||||
class QueueMethod:
|
||||
"""
|
||||
I represent a method that doesn't exist yet.
|
||||
"""
|
||||
|
||||
def __init__(self, name, calls):
|
||||
self.name = name
|
||||
self.calls = calls
|
||||
|
||||
def __call__(self, *args):
|
||||
self.calls.append((self.name, args))
|
||||
|
||||
|
||||
def fullFuncName(func):
|
||||
qualName = str(pickle.whichmodule(func, func.__name__)) + "." + func.__name__
|
||||
if namedObject(qualName) is not func:
|
||||
raise Exception(f"Couldn't find {func} as {qualName}.")
|
||||
return qualName
|
||||
|
||||
|
||||
def getClass(obj):
|
||||
"""
|
||||
Return the class or type of object 'obj'.
|
||||
"""
|
||||
return type(obj)
|
||||
|
||||
|
||||
def accumulateClassDict(classObj, attr, adict, baseClass=None):
|
||||
"""
|
||||
Accumulate all attributes of a given name in a class hierarchy into a single dictionary.
|
||||
|
||||
Assuming all class attributes of this name are dictionaries.
|
||||
If any of the dictionaries being accumulated have the same key, the
|
||||
one highest in the class hierarchy wins.
|
||||
(XXX: If \"highest\" means \"closest to the starting class\".)
|
||||
|
||||
Ex::
|
||||
|
||||
class Soy:
|
||||
properties = {\"taste\": \"bland\"}
|
||||
|
||||
class Plant:
|
||||
properties = {\"colour\": \"green\"}
|
||||
|
||||
class Seaweed(Plant):
|
||||
pass
|
||||
|
||||
class Lunch(Soy, Seaweed):
|
||||
properties = {\"vegan\": 1 }
|
||||
|
||||
dct = {}
|
||||
|
||||
accumulateClassDict(Lunch, \"properties\", dct)
|
||||
|
||||
print(dct)
|
||||
|
||||
{\"taste\": \"bland\", \"colour\": \"green\", \"vegan\": 1}
|
||||
"""
|
||||
for base in classObj.__bases__:
|
||||
accumulateClassDict(base, attr, adict)
|
||||
if baseClass is None or baseClass in classObj.__bases__:
|
||||
adict.update(classObj.__dict__.get(attr, {}))
|
||||
|
||||
|
||||
def accumulateClassList(classObj, attr, listObj, baseClass=None):
|
||||
"""
|
||||
Accumulate all attributes of a given name in a class hierarchy into a single list.
|
||||
|
||||
Assuming all class attributes of this name are lists.
|
||||
"""
|
||||
for base in classObj.__bases__:
|
||||
accumulateClassList(base, attr, listObj)
|
||||
if baseClass is None or baseClass in classObj.__bases__:
|
||||
listObj.extend(classObj.__dict__.get(attr, []))
|
||||
|
||||
|
||||
def isSame(a, b):
|
||||
return a is b
|
||||
|
||||
|
||||
def isLike(a, b):
|
||||
return a == b
|
||||
|
||||
|
||||
def modgrep(goal):
|
||||
return objgrep(sys.modules, goal, isLike, "sys.modules")
|
||||
|
||||
|
||||
def isOfType(start, goal):
|
||||
return type(start) is goal
|
||||
|
||||
|
||||
def findInstances(start, t):
|
||||
return objgrep(start, t, isOfType)
|
||||
|
||||
|
||||
def objgrep(
|
||||
start,
|
||||
goal,
|
||||
eq=isLike,
|
||||
path="",
|
||||
paths=None,
|
||||
seen=None,
|
||||
showUnknowns=0,
|
||||
maxDepth=None,
|
||||
):
|
||||
"""
|
||||
L{objgrep} finds paths between C{start} and C{goal}.
|
||||
|
||||
Starting at the python object C{start}, we will loop over every reachable
|
||||
reference, tring to find the python object C{goal} (i.e. every object
|
||||
C{candidate} for whom C{eq(candidate, goal)} is truthy), and return a
|
||||
L{list} of L{str}, where each L{str} is Python syntax for a path between
|
||||
C{start} and C{goal}.
|
||||
|
||||
Since this can be slightly difficult to visualize, here's an example::
|
||||
|
||||
>>> class Holder:
|
||||
... def __init__(self, x):
|
||||
... self.x = x
|
||||
...
|
||||
>>> start = Holder({"irrelevant": "ignore",
|
||||
... "relevant": [7, 1, 3, 5, 7]})
|
||||
>>> for path in objgrep(start, 7):
|
||||
... print("start" + path)
|
||||
start.x['relevant'][0]
|
||||
start.x['relevant'][4]
|
||||
|
||||
This can be useful, for example, when debugging stateful graphs of objects
|
||||
attached to a socket, trying to figure out where a particular connection is
|
||||
attached.
|
||||
|
||||
@param start: The object to start looking at.
|
||||
|
||||
@param goal: The object to search for.
|
||||
|
||||
@param eq: A 2-argument predicate which takes an object found by traversing
|
||||
references starting at C{start}, as well as C{goal}, and returns a
|
||||
boolean.
|
||||
|
||||
@param path: The prefix of the path to include in every return value; empty
|
||||
by default.
|
||||
|
||||
@param paths: The result object to append values to; a list of strings.
|
||||
|
||||
@param seen: A dictionary mapping ints (object IDs) to objects already
|
||||
seen.
|
||||
|
||||
@param showUnknowns: if true, print a message to C{stdout} when
|
||||
encountering objects that C{objgrep} does not know how to traverse.
|
||||
|
||||
@param maxDepth: The maximum number of object references to attempt
|
||||
traversing before giving up. If an integer, limit to that many links,
|
||||
if C{None}, unlimited.
|
||||
|
||||
@return: A list of strings representing python object paths starting at
|
||||
C{start} and terminating at C{goal}.
|
||||
"""
|
||||
if paths is None:
|
||||
paths = []
|
||||
if seen is None:
|
||||
seen = {}
|
||||
if eq(start, goal):
|
||||
paths.append(path)
|
||||
if id(start) in seen:
|
||||
if seen[id(start)] is start:
|
||||
return
|
||||
if maxDepth is not None:
|
||||
if maxDepth == 0:
|
||||
return
|
||||
maxDepth -= 1
|
||||
seen[id(start)] = start
|
||||
# Make an alias for those arguments which are passed recursively to
|
||||
# objgrep for container objects.
|
||||
args = (paths, seen, showUnknowns, maxDepth)
|
||||
if isinstance(start, dict):
|
||||
for k, v in start.items():
|
||||
objgrep(k, goal, eq, path + "{" + repr(v) + "}", *args)
|
||||
objgrep(v, goal, eq, path + "[" + repr(k) + "]", *args)
|
||||
elif isinstance(start, (list, tuple, deque)):
|
||||
for idx, _elem in enumerate(start):
|
||||
objgrep(start[idx], goal, eq, path + "[" + str(idx) + "]", *args)
|
||||
elif isinstance(start, types.MethodType):
|
||||
objgrep(start.__self__, goal, eq, path + ".__self__", *args)
|
||||
objgrep(start.__func__, goal, eq, path + ".__func__", *args)
|
||||
objgrep(start.__self__.__class__, goal, eq, path + ".__self__.__class__", *args)
|
||||
elif hasattr(start, "__dict__"):
|
||||
for k, v in start.__dict__.items():
|
||||
objgrep(v, goal, eq, path + "." + k, *args)
|
||||
elif isinstance(start, weakref.ReferenceType):
|
||||
objgrep(start(), goal, eq, path + "()", *args)
|
||||
elif isinstance(
|
||||
start,
|
||||
(
|
||||
str,
|
||||
int,
|
||||
types.FunctionType,
|
||||
types.BuiltinMethodType,
|
||||
RegexType,
|
||||
float,
|
||||
type(None),
|
||||
IOBase,
|
||||
),
|
||||
) or type(start).__name__ in (
|
||||
"wrapper_descriptor",
|
||||
"method_descriptor",
|
||||
"member_descriptor",
|
||||
"getset_descriptor",
|
||||
):
|
||||
pass
|
||||
elif showUnknowns:
|
||||
print("unknown type", type(start), start)
|
||||
return paths
|
||||
|
||||
|
||||
__all__ = [
|
||||
"InvalidName",
|
||||
"ModuleNotFound",
|
||||
"ObjectNotFound",
|
||||
"QueueMethod",
|
||||
"namedModule",
|
||||
"namedObject",
|
||||
"namedClass",
|
||||
"namedAny",
|
||||
"requireModule",
|
||||
"safe_repr",
|
||||
"safe_str",
|
||||
"prefixedMethodNames",
|
||||
"addMethodNamesToDict",
|
||||
"prefixedMethods",
|
||||
"accumulateMethods",
|
||||
"fullFuncName",
|
||||
"qual",
|
||||
"getClass",
|
||||
"accumulateClassDict",
|
||||
"accumulateClassList",
|
||||
"isSame",
|
||||
"isLike",
|
||||
"modgrep",
|
||||
"isOfType",
|
||||
"findInstances",
|
||||
"objgrep",
|
||||
"filenameToModuleName",
|
||||
"fullyQualifiedName",
|
||||
]
|
||||
|
||||
|
||||
# This is to be removed when fixing #6986
|
||||
__all__.remove("objgrep")
|
||||
63
.venv/lib/python3.12/site-packages/twisted/python/release.py
Normal file
63
.venv/lib/python3.12/site-packages/twisted/python/release.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
A release-automation toolkit.
|
||||
|
||||
Don't use this outside of Twisted.
|
||||
|
||||
Maintainer: Christopher Armstrong
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
# errors
|
||||
|
||||
|
||||
class DirectoryExists(OSError):
|
||||
"""
|
||||
Some directory exists when it shouldn't.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class DirectoryDoesntExist(OSError):
|
||||
"""
|
||||
Some directory doesn't exist when it should.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CommandFailed(OSError):
|
||||
pass
|
||||
|
||||
|
||||
# utilities
|
||||
|
||||
|
||||
def sh(command, null=True, prompt=False):
|
||||
"""
|
||||
I'll try to execute C{command}, and if C{prompt} is true, I'll
|
||||
ask before running it. If the command returns something other
|
||||
than 0, I'll raise C{CommandFailed(command)}.
|
||||
"""
|
||||
print("--$", command)
|
||||
|
||||
if prompt:
|
||||
if input("run ?? ").startswith("n"):
|
||||
return
|
||||
if null:
|
||||
command = "%s > /dev/null" % command
|
||||
if os.system(command) != 0:
|
||||
raise CommandFailed(command)
|
||||
|
||||
|
||||
def runChdirSafe(f, *args, **kw):
|
||||
origdir = os.path.abspath(".")
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
finally:
|
||||
os.chdir(origdir)
|
||||
242
.venv/lib/python3.12/site-packages/twisted/python/roots.py
Normal file
242
.venv/lib/python3.12/site-packages/twisted/python/roots.py
Normal file
@@ -0,0 +1,242 @@
|
||||
# -*- test-case-name: twisted.test.test_roots -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Twisted Python Roots: an abstract hierarchy representation for Twisted.
|
||||
|
||||
Maintainer: Glyph Lefkowitz
|
||||
"""
|
||||
|
||||
|
||||
from twisted.python import reflect
|
||||
|
||||
|
||||
class NotSupportedError(NotImplementedError):
|
||||
"""
|
||||
An exception meaning that the tree-manipulation operation
|
||||
you're attempting to perform is not supported.
|
||||
"""
|
||||
|
||||
|
||||
class Request:
|
||||
"""I am an abstract representation of a request for an entity.
|
||||
|
||||
I also function as the response. The request is responded to by calling
|
||||
self.write(data) until there is no data left and then calling
|
||||
self.finish().
|
||||
"""
|
||||
|
||||
# This attribute should be set to the string name of the protocol being
|
||||
# responded to (e.g. HTTP or FTP)
|
||||
wireProtocol = None
|
||||
|
||||
def write(self, data):
|
||||
"""Add some data to the response to this request."""
|
||||
raise NotImplementedError("%s.write" % reflect.qual(self.__class__))
|
||||
|
||||
def finish(self):
|
||||
"""The response to this request is finished; flush all data to the network stream."""
|
||||
raise NotImplementedError("%s.finish" % reflect.qual(self.__class__))
|
||||
|
||||
|
||||
class Entity:
|
||||
"""I am a terminal object in a hierarchy, with no children.
|
||||
|
||||
I represent a null interface; certain non-instance objects (strings and
|
||||
integers, notably) are Entities.
|
||||
|
||||
Methods on this class are suggested to be implemented, but are not
|
||||
required, and will be emulated on a per-protocol basis for types which do
|
||||
not handle them.
|
||||
"""
|
||||
|
||||
def render(self, request):
|
||||
"""
|
||||
I produce a stream of bytes for the request, by calling request.write()
|
||||
and request.finish().
|
||||
"""
|
||||
raise NotImplementedError("%s.render" % reflect.qual(self.__class__))
|
||||
|
||||
|
||||
class Collection:
|
||||
"""I represent a static collection of entities.
|
||||
|
||||
I contain methods designed to represent collections that can be dynamically
|
||||
created.
|
||||
"""
|
||||
|
||||
def __init__(self, entities=None):
|
||||
"""Initialize me."""
|
||||
if entities is not None:
|
||||
self.entities = entities
|
||||
else:
|
||||
self.entities = {}
|
||||
|
||||
def getStaticEntity(self, name):
|
||||
"""Get an entity that was added to me using putEntity.
|
||||
|
||||
This method will return 'None' if it fails.
|
||||
"""
|
||||
return self.entities.get(name)
|
||||
|
||||
def getDynamicEntity(self, name, request):
|
||||
"""Subclass this to generate an entity on demand.
|
||||
|
||||
This method should return 'None' if it fails.
|
||||
"""
|
||||
|
||||
def getEntity(self, name, request):
|
||||
"""Retrieve an entity from me.
|
||||
|
||||
I will first attempt to retrieve an entity statically; static entities
|
||||
will obscure dynamic ones. If that fails, I will retrieve the entity
|
||||
dynamically.
|
||||
|
||||
If I cannot retrieve an entity, I will return 'None'.
|
||||
"""
|
||||
ent = self.getStaticEntity(name)
|
||||
if ent is not None:
|
||||
return ent
|
||||
ent = self.getDynamicEntity(name, request)
|
||||
if ent is not None:
|
||||
return ent
|
||||
return None
|
||||
|
||||
def putEntity(self, name, entity):
|
||||
"""Store a static reference on 'name' for 'entity'.
|
||||
|
||||
Raises a KeyError if the operation fails.
|
||||
"""
|
||||
self.entities[name] = entity
|
||||
|
||||
def delEntity(self, name):
|
||||
"""Remove a static reference for 'name'.
|
||||
|
||||
Raises a KeyError if the operation fails.
|
||||
"""
|
||||
del self.entities[name]
|
||||
|
||||
def storeEntity(self, name, request):
|
||||
"""Store an entity for 'name', based on the content of 'request'."""
|
||||
raise NotSupportedError("%s.storeEntity" % reflect.qual(self.__class__))
|
||||
|
||||
def removeEntity(self, name, request):
|
||||
"""Remove an entity for 'name', based on the content of 'request'."""
|
||||
raise NotSupportedError("%s.removeEntity" % reflect.qual(self.__class__))
|
||||
|
||||
def listStaticEntities(self):
|
||||
"""Retrieve a list of all name, entity pairs that I store references to.
|
||||
|
||||
See getStaticEntity.
|
||||
"""
|
||||
return self.entities.items()
|
||||
|
||||
def listDynamicEntities(self, request):
|
||||
"""A list of all name, entity that I can generate on demand.
|
||||
|
||||
See getDynamicEntity.
|
||||
"""
|
||||
return []
|
||||
|
||||
def listEntities(self, request):
|
||||
"""Retrieve a list of all name, entity pairs I contain.
|
||||
|
||||
See getEntity.
|
||||
"""
|
||||
return self.listStaticEntities() + self.listDynamicEntities(request)
|
||||
|
||||
def listStaticNames(self):
|
||||
"""Retrieve a list of the names of entities that I store references to.
|
||||
|
||||
See getStaticEntity.
|
||||
"""
|
||||
return self.entities.keys()
|
||||
|
||||
def listDynamicNames(self):
|
||||
"""Retrieve a list of the names of entities that I store references to.
|
||||
|
||||
See getDynamicEntity.
|
||||
"""
|
||||
return []
|
||||
|
||||
def listNames(self, request):
|
||||
"""Retrieve a list of all names for entities that I contain.
|
||||
|
||||
See getEntity.
|
||||
"""
|
||||
return self.listStaticNames()
|
||||
|
||||
|
||||
class ConstraintViolation(Exception):
|
||||
"""An exception raised when a constraint is violated."""
|
||||
|
||||
|
||||
class Constrained(Collection):
|
||||
"""A collection that has constraints on its names and/or entities."""
|
||||
|
||||
def nameConstraint(self, name):
|
||||
"""A method that determines whether an entity may be added to me with a given name.
|
||||
|
||||
If the constraint is satisfied, return 1; if the constraint is not
|
||||
satisfied, either return 0 or raise a descriptive ConstraintViolation.
|
||||
"""
|
||||
return 1
|
||||
|
||||
def entityConstraint(self, entity):
|
||||
"""A method that determines whether an entity may be added to me.
|
||||
|
||||
If the constraint is satisfied, return 1; if the constraint is not
|
||||
satisfied, either return 0 or raise a descriptive ConstraintViolation.
|
||||
"""
|
||||
return 1
|
||||
|
||||
def reallyPutEntity(self, name, entity):
|
||||
Collection.putEntity(self, name, entity)
|
||||
|
||||
def putEntity(self, name, entity):
|
||||
"""Store an entity if it meets both constraints.
|
||||
|
||||
Otherwise raise a ConstraintViolation.
|
||||
"""
|
||||
if self.nameConstraint(name):
|
||||
if self.entityConstraint(entity):
|
||||
self.reallyPutEntity(name, entity)
|
||||
else:
|
||||
raise ConstraintViolation("Entity constraint violated.")
|
||||
else:
|
||||
raise ConstraintViolation("Name constraint violated.")
|
||||
|
||||
|
||||
class Locked(Constrained):
|
||||
"""A collection that can be locked from adding entities."""
|
||||
|
||||
locked = 0
|
||||
|
||||
def lock(self):
|
||||
self.locked = 1
|
||||
|
||||
def entityConstraint(self, entity):
|
||||
return not self.locked
|
||||
|
||||
|
||||
class Homogenous(Constrained):
|
||||
"""A homogenous collection of entities.
|
||||
|
||||
I will only contain entities that are an instance of the class or type
|
||||
specified by my 'entityType' attribute.
|
||||
"""
|
||||
|
||||
entityType = object
|
||||
|
||||
def entityConstraint(self, entity):
|
||||
if isinstance(entity, self.entityType):
|
||||
return 1
|
||||
else:
|
||||
raise ConstraintViolation(f"{entity} of incorrect type ({self.entityType})")
|
||||
|
||||
def getNameType(self):
|
||||
return "Name"
|
||||
|
||||
def getEntityType(self):
|
||||
return self.entityType.__name__
|
||||
204
.venv/lib/python3.12/site-packages/twisted/python/runtime.py
Normal file
204
.venv/lib/python3.12/site-packages/twisted/python/runtime.py
Normal file
@@ -0,0 +1,204 @@
|
||||
# -*- test-case-name: twisted.python.test.test_runtime -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
__all__ = [
|
||||
"seconds",
|
||||
"shortPythonVersion",
|
||||
"Platform",
|
||||
"platform",
|
||||
"platformType",
|
||||
]
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from time import time as seconds
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def shortPythonVersion() -> str:
|
||||
"""
|
||||
Returns the Python version as a dot-separated string.
|
||||
"""
|
||||
return "%s.%s.%s" % sys.version_info[:3]
|
||||
|
||||
|
||||
knownPlatforms = {
|
||||
"nt": "win32",
|
||||
"ce": "win32",
|
||||
"posix": "posix",
|
||||
"java": "java",
|
||||
"org.python.modules.os": "java",
|
||||
}
|
||||
|
||||
|
||||
class Platform:
|
||||
"""
|
||||
Gives us information about the platform we're running on.
|
||||
"""
|
||||
|
||||
type: Optional[str] = knownPlatforms.get(os.name)
|
||||
seconds = staticmethod(seconds)
|
||||
_platform = sys.platform
|
||||
|
||||
def __init__(
|
||||
self, name: Optional[str] = None, platform: Optional[str] = None
|
||||
) -> None:
|
||||
if name is not None:
|
||||
self.type = knownPlatforms.get(name)
|
||||
if platform is not None:
|
||||
self._platform = platform
|
||||
|
||||
def isKnown(self) -> bool:
|
||||
"""
|
||||
Do we know about this platform?
|
||||
|
||||
@return: Boolean indicating whether this is a known platform or not.
|
||||
"""
|
||||
return self.type != None
|
||||
|
||||
def getType(self) -> Optional[str]:
|
||||
"""
|
||||
Get platform type.
|
||||
|
||||
@return: Either 'posix', 'win32' or 'java'
|
||||
"""
|
||||
return self.type
|
||||
|
||||
def isMacOSX(self) -> bool:
|
||||
"""
|
||||
Check if current platform is macOS.
|
||||
|
||||
@return: C{True} if the current platform has been detected as macOS.
|
||||
"""
|
||||
return self._platform == "darwin"
|
||||
|
||||
def isWinNT(self) -> bool:
|
||||
"""
|
||||
Are we running in Windows NT?
|
||||
|
||||
This is deprecated and always returns C{True} on win32 because
|
||||
Twisted only supports Windows NT-derived platforms at this point.
|
||||
|
||||
@return: C{True} if the current platform has been detected as
|
||||
Windows NT.
|
||||
"""
|
||||
warnings.warn(
|
||||
"twisted.python.runtime.Platform.isWinNT was deprecated in "
|
||||
"Twisted 13.0. Use Platform.isWindows instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.isWindows()
|
||||
|
||||
def isWindows(self) -> bool:
|
||||
"""
|
||||
Are we running in Windows?
|
||||
|
||||
@return: C{True} if the current platform has been detected as
|
||||
Windows.
|
||||
"""
|
||||
return self.getType() == "win32"
|
||||
|
||||
def isVista(self) -> bool:
|
||||
"""
|
||||
Check if current platform is Windows Vista or Windows Server 2008.
|
||||
|
||||
@return: C{True} if the current platform has been detected as Vista
|
||||
"""
|
||||
return sys.platform == "win32" and sys.getwindowsversion().major == 6
|
||||
|
||||
def isLinux(self) -> bool:
|
||||
"""
|
||||
Check if current platform is Linux.
|
||||
|
||||
@return: C{True} if the current platform has been detected as Linux.
|
||||
"""
|
||||
return self._platform.startswith("linux")
|
||||
|
||||
def isDocker(self, _initCGroupLocation: str = "/proc/1/cgroup") -> bool:
|
||||
"""
|
||||
Check if the current platform is Linux in a Docker container.
|
||||
|
||||
@return: C{True} if the current platform has been detected as Linux
|
||||
inside a Docker container.
|
||||
"""
|
||||
if not self.isLinux():
|
||||
return False
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
# Ask for the cgroups of init (pid 1)
|
||||
initCGroups = FilePath(_initCGroupLocation)
|
||||
if initCGroups.exists():
|
||||
# The cgroups file looks like "2:cpu:/". The third element will
|
||||
# begin with /docker if it is inside a Docker container.
|
||||
controlGroups = [
|
||||
x.split(b":") for x in initCGroups.getContent().split(b"\n")
|
||||
]
|
||||
|
||||
for group in controlGroups:
|
||||
if len(group) == 3 and group[2].startswith(b"/docker/"):
|
||||
# If it starts with /docker/, we're in a docker container
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _supportsSymlinks(self) -> bool:
|
||||
"""
|
||||
Check for symlink support usable for Twisted's purposes.
|
||||
|
||||
@return: C{True} if symlinks are supported on the current platform,
|
||||
otherwise C{False}.
|
||||
"""
|
||||
if self.isWindows():
|
||||
# We do the isWindows() check as newer Pythons support the symlink
|
||||
# support in Vista+, but only if you have some obscure permission
|
||||
# (SeCreateSymbolicLinkPrivilege), which can only be given on
|
||||
# platforms with msc.exe (so, Business/Enterprise editions).
|
||||
# This uncommon requirement makes the Twisted test suite test fail
|
||||
# in 99.99% of cases as general users don't have permission to do
|
||||
# it, even if there is "symlink support".
|
||||
return False
|
||||
else:
|
||||
# If we're not on Windows, check for existence of os.symlink.
|
||||
try:
|
||||
os.symlink
|
||||
except AttributeError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def supportsThreads(self) -> bool:
|
||||
"""
|
||||
Can threads be created?
|
||||
|
||||
@return: C{True} if the threads are supported on the current platform.
|
||||
"""
|
||||
try:
|
||||
import threading
|
||||
|
||||
return threading is not None # shh pyflakes
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
def supportsINotify(self) -> bool:
|
||||
"""
|
||||
Return C{True} if we can use the inotify API on this platform.
|
||||
|
||||
@since: 10.1
|
||||
"""
|
||||
try:
|
||||
from twisted.python._inotify import INotifyError, init
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
try:
|
||||
os.close(init())
|
||||
except INotifyError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
platform = Platform()
|
||||
platformType = platform.getType()
|
||||
76
.venv/lib/python3.12/site-packages/twisted/python/sendmsg.py
Normal file
76
.venv/lib/python3.12/site-packages/twisted/python/sendmsg.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# -*- test-case-name: twisted.test.test_sendmsg -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
sendmsg(2) and recvmsg(2) support for Python.
|
||||
"""
|
||||
|
||||
|
||||
from collections import namedtuple
|
||||
from socket import CMSG_SPACE, SCM_RIGHTS, socket as Socket
|
||||
from typing import List, Tuple
|
||||
|
||||
__all__ = ["sendmsg", "recvmsg", "getSocketFamily", "SCM_RIGHTS"]
|
||||
|
||||
|
||||
ReceivedMessage = namedtuple("ReceivedMessage", ["data", "ancillary", "flags"])
|
||||
|
||||
|
||||
def sendmsg(
|
||||
socket: Socket,
|
||||
data: bytes,
|
||||
ancillary: List[Tuple[int, int, bytes]] = [],
|
||||
flags: int = 0,
|
||||
) -> int:
|
||||
"""
|
||||
Send a message on a socket.
|
||||
|
||||
@param socket: The socket to send the message on.
|
||||
@param data: Bytes to write to the socket.
|
||||
@param ancillary: Extra data to send over the socket outside of the normal
|
||||
datagram or stream mechanism. By default no ancillary data is sent.
|
||||
@param flags: Flags to affect how the message is sent. See the C{MSG_}
|
||||
constants in the sendmsg(2) manual page. By default no flags are set.
|
||||
|
||||
@return: The return value of the underlying syscall, if it succeeds.
|
||||
"""
|
||||
return socket.sendmsg([data], ancillary, flags)
|
||||
|
||||
|
||||
def recvmsg(
|
||||
socket: Socket, maxSize: int = 8192, cmsgSize: int = 4096, flags: int = 0
|
||||
) -> ReceivedMessage:
|
||||
"""
|
||||
Receive a message on a socket.
|
||||
|
||||
@param socket: The socket to receive the message on.
|
||||
@param maxSize: The maximum number of bytes to receive from the socket using
|
||||
the datagram or stream mechanism. The default maximum is 8192.
|
||||
@param cmsgSize: The maximum number of bytes to receive from the socket
|
||||
outside of the normal datagram or stream mechanism. The default maximum
|
||||
is 4096.
|
||||
@param flags: Flags to affect how the message is sent. See the C{MSG_}
|
||||
constants in the sendmsg(2) manual page. By default no flags are set.
|
||||
|
||||
@return: A named 3-tuple of the bytes received using the datagram/stream
|
||||
mechanism, a L{list} of L{tuple}s giving ancillary received data, and
|
||||
flags as an L{int} describing the data received.
|
||||
"""
|
||||
# In Twisted's _sendmsg.c, the csmg_space was defined as:
|
||||
# int cmsg_size = 4096;
|
||||
# cmsg_space = CMSG_SPACE(cmsg_size);
|
||||
# Since the default in Python 3's socket is 0, we need to define our
|
||||
# own default of 4096. -hawkie
|
||||
data, ancillary, flags = socket.recvmsg(maxSize, CMSG_SPACE(cmsgSize), flags)[0:3]
|
||||
|
||||
return ReceivedMessage(data=data, ancillary=ancillary, flags=flags)
|
||||
|
||||
|
||||
def getSocketFamily(socket: Socket) -> int:
|
||||
"""
|
||||
Return the family of the given socket.
|
||||
|
||||
@param socket: The socket to get the family of.
|
||||
"""
|
||||
return socket.family
|
||||
@@ -0,0 +1,88 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Creation of Windows shortcuts.
|
||||
|
||||
Requires win32all.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pythoncom # type: ignore[import-untyped]
|
||||
from win32com.shell import shell
|
||||
|
||||
|
||||
def open(filename):
|
||||
"""
|
||||
Open an existing shortcut for reading.
|
||||
|
||||
@return: The shortcut object
|
||||
@rtype: Shortcut
|
||||
"""
|
||||
sc = Shortcut()
|
||||
sc.load(filename)
|
||||
return sc
|
||||
|
||||
|
||||
class Shortcut:
|
||||
"""
|
||||
A shortcut on Win32.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path=None,
|
||||
arguments=None,
|
||||
description=None,
|
||||
workingdir=None,
|
||||
iconpath=None,
|
||||
iconidx=0,
|
||||
):
|
||||
"""
|
||||
@param path: Location of the target
|
||||
@param arguments: If path points to an executable, optional arguments
|
||||
to pass
|
||||
@param description: Human-readable description of target
|
||||
@param workingdir: Directory from which target is launched
|
||||
@param iconpath: Filename that contains an icon for the shortcut
|
||||
@param iconidx: If iconpath is set, optional index of the icon desired
|
||||
"""
|
||||
self._base = pythoncom.CoCreateInstance(
|
||||
shell.CLSID_ShellLink,
|
||||
None,
|
||||
pythoncom.CLSCTX_INPROC_SERVER,
|
||||
shell.IID_IShellLink,
|
||||
)
|
||||
if path is not None:
|
||||
self.SetPath(os.path.abspath(path))
|
||||
if arguments is not None:
|
||||
self.SetArguments(arguments)
|
||||
if description is not None:
|
||||
self.SetDescription(description)
|
||||
if workingdir is not None:
|
||||
self.SetWorkingDirectory(os.path.abspath(workingdir))
|
||||
if iconpath is not None:
|
||||
self.SetIconLocation(os.path.abspath(iconpath), iconidx)
|
||||
|
||||
def load(self, filename):
|
||||
"""
|
||||
Read a shortcut file from disk.
|
||||
"""
|
||||
self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(
|
||||
os.path.abspath(filename)
|
||||
)
|
||||
|
||||
def save(self, filename):
|
||||
"""
|
||||
Write the shortcut to disk.
|
||||
|
||||
The file should be named something.lnk.
|
||||
"""
|
||||
self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(
|
||||
os.path.abspath(filename), 0
|
||||
)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._base, name)
|
||||
106
.venv/lib/python3.12/site-packages/twisted/python/syslog.py
Normal file
106
.venv/lib/python3.12/site-packages/twisted/python/syslog.py
Normal file
@@ -0,0 +1,106 @@
|
||||
# -*- test-case-name: twisted.python.test.test_syslog -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Classes and utility functions for integrating Twisted and syslog.
|
||||
|
||||
You probably want to call L{startLogging}.
|
||||
"""
|
||||
|
||||
syslog = __import__("syslog")
|
||||
|
||||
from twisted.python import log
|
||||
|
||||
# These defaults come from the Python syslog docs.
|
||||
DEFAULT_OPTIONS = 0
|
||||
DEFAULT_FACILITY = syslog.LOG_USER
|
||||
|
||||
|
||||
class SyslogObserver:
|
||||
"""
|
||||
A log observer for logging to syslog.
|
||||
|
||||
See L{twisted.python.log} for context.
|
||||
|
||||
This logObserver will automatically use LOG_ALERT priority for logged
|
||||
failures (such as from C{log.err()}), but you can use any priority and
|
||||
facility by setting the 'C{syslogPriority}' and 'C{syslogFacility}' keys in
|
||||
the event dict.
|
||||
"""
|
||||
|
||||
openlog = syslog.openlog
|
||||
syslog = syslog.syslog
|
||||
|
||||
def __init__(self, prefix, options=DEFAULT_OPTIONS, facility=DEFAULT_FACILITY):
|
||||
"""
|
||||
@type prefix: C{str}
|
||||
@param prefix: The syslog prefix to use.
|
||||
|
||||
@type options: C{int}
|
||||
@param options: A bitvector represented as an integer of the syslog
|
||||
options to use.
|
||||
|
||||
@type facility: C{int}
|
||||
@param facility: An indication to the syslog daemon of what sort of
|
||||
program this is (essentially, an additional arbitrary metadata
|
||||
classification for messages sent to syslog by this observer).
|
||||
"""
|
||||
self.openlog(prefix, options, facility)
|
||||
|
||||
def emit(self, eventDict):
|
||||
"""
|
||||
Send a message event to the I{syslog}.
|
||||
|
||||
@param eventDict: The event to send. If it has no C{'message'} key, it
|
||||
will be ignored. Otherwise, if it has C{'syslogPriority'} and/or
|
||||
C{'syslogFacility'} keys, these will be used as the syslog priority
|
||||
and facility. If it has no C{'syslogPriority'} key but a true
|
||||
value for the C{'isError'} key, the B{LOG_ALERT} priority will be
|
||||
used; if it has a false value for C{'isError'}, B{LOG_INFO} will be
|
||||
used. If the C{'message'} key is multiline, each line will be sent
|
||||
to the syslog separately.
|
||||
"""
|
||||
# Figure out what the message-text is.
|
||||
text = log.textFromEventDict(eventDict)
|
||||
if text is None:
|
||||
return
|
||||
|
||||
# Figure out what syslog parameters we might need to use.
|
||||
priority = syslog.LOG_INFO
|
||||
facility = 0
|
||||
if eventDict["isError"]:
|
||||
priority = syslog.LOG_ALERT
|
||||
if "syslogPriority" in eventDict:
|
||||
priority = int(eventDict["syslogPriority"])
|
||||
if "syslogFacility" in eventDict:
|
||||
facility = int(eventDict["syslogFacility"])
|
||||
|
||||
# Break the message up into lines and send them.
|
||||
lines = text.split("\n")
|
||||
while lines[-1:] == [""]:
|
||||
lines.pop()
|
||||
|
||||
firstLine = True
|
||||
for line in lines:
|
||||
if firstLine:
|
||||
firstLine = False
|
||||
else:
|
||||
line = "\t" + line
|
||||
self.syslog(
|
||||
priority | facility, "[{}] {}".format(eventDict["system"], line)
|
||||
)
|
||||
|
||||
|
||||
def startLogging(
|
||||
prefix="Twisted", options=DEFAULT_OPTIONS, facility=DEFAULT_FACILITY, setStdout=1
|
||||
):
|
||||
"""
|
||||
Send all Twisted logging output to syslog from now on.
|
||||
|
||||
The prefix, options and facility arguments are passed to
|
||||
C{syslog.openlog()}, see the Python syslog documentation for details. For
|
||||
other parameters, see L{twisted.python.log.startLoggingWithObserver}.
|
||||
"""
|
||||
obs = SyslogObserver(prefix, options, facility)
|
||||
log.startLoggingWithObserver(obs.emit, setStdout=setStdout)
|
||||
154
.venv/lib/python3.12/site-packages/twisted/python/systemd.py
Normal file
154
.venv/lib/python3.12/site-packages/twisted/python/systemd.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# -*- test-case-name: twisted.python.test.test_systemd -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Integration with systemd.
|
||||
|
||||
Currently only the minimum APIs necessary for using systemd's socket activation
|
||||
feature are supported.
|
||||
"""
|
||||
|
||||
|
||||
__all__ = ["ListenFDs"]
|
||||
|
||||
from os import getpid
|
||||
from typing import Dict, List, Mapping, Optional, Sequence
|
||||
|
||||
from attrs import Factory, define
|
||||
|
||||
|
||||
@define
|
||||
class ListenFDs:
|
||||
"""
|
||||
L{ListenFDs} provides access to file descriptors inherited from systemd.
|
||||
|
||||
Typically L{ListenFDs.fromEnvironment} should be used to construct a new
|
||||
instance of L{ListenFDs}.
|
||||
|
||||
@cvar _START: File descriptors inherited from systemd are always
|
||||
consecutively numbered, with a fixed lowest "starting" descriptor. This
|
||||
gives the default starting descriptor. Since this must agree with the
|
||||
value systemd is using, it typically should not be overridden.
|
||||
|
||||
@ivar _descriptors: A C{list} of C{int} giving the descriptors which were
|
||||
inherited.
|
||||
|
||||
@ivar _names: A L{Sequence} of C{str} giving the names of the descriptors
|
||||
which were inherited.
|
||||
"""
|
||||
|
||||
_descriptors: Sequence[int]
|
||||
_names: Sequence[str] = Factory(tuple)
|
||||
|
||||
_START = 3
|
||||
|
||||
@classmethod
|
||||
def fromEnvironment(
|
||||
cls,
|
||||
environ: Optional[Mapping[str, str]] = None,
|
||||
start: Optional[int] = None,
|
||||
) -> "ListenFDs":
|
||||
"""
|
||||
@param environ: A dictionary-like object to inspect to discover
|
||||
inherited descriptors. By default, L{None}, indicating that the
|
||||
real process environment should be inspected. The default is
|
||||
suitable for typical usage.
|
||||
|
||||
@param start: An integer giving the lowest value of an inherited
|
||||
descriptor systemd will give us. By default, L{None}, indicating
|
||||
the known correct (that is, in agreement with systemd) value will be
|
||||
used. The default is suitable for typical usage.
|
||||
|
||||
@return: A new instance of C{cls} which can be used to look up the
|
||||
descriptors which have been inherited.
|
||||
"""
|
||||
if environ is None:
|
||||
from os import environ as _environ
|
||||
|
||||
environ = _environ
|
||||
if start is None:
|
||||
start = cls._START
|
||||
|
||||
if str(getpid()) == environ.get("LISTEN_PID"):
|
||||
descriptors: List[int] = _parseDescriptors(start, environ)
|
||||
names: Sequence[str] = _parseNames(environ)
|
||||
else:
|
||||
descriptors = []
|
||||
names = ()
|
||||
|
||||
# They may both be missing (consistent with not running under systemd
|
||||
# at all) or they may both be present (consistent with running under
|
||||
# systemd 227 or newer). It is not allowed for only one to be present
|
||||
# or for the values to disagree with each other (per
|
||||
# systemd.socket(5), systemd will use a default value based on the
|
||||
# socket unit name if the socket unit doesn't explicitly define a name
|
||||
# with FileDescriptorName).
|
||||
if len(names) != len(descriptors):
|
||||
return cls([], ())
|
||||
|
||||
return cls(descriptors, names)
|
||||
|
||||
def inheritedDescriptors(self) -> List[int]:
|
||||
"""
|
||||
@return: The configured descriptors.
|
||||
"""
|
||||
return list(self._descriptors)
|
||||
|
||||
def inheritedNamedDescriptors(self) -> Dict[str, int]:
|
||||
"""
|
||||
@return: A mapping from the names of configured descriptors to
|
||||
their integer values.
|
||||
"""
|
||||
return dict(zip(self._names, self._descriptors))
|
||||
|
||||
|
||||
def _parseDescriptors(start: int, environ: Mapping[str, str]) -> List[int]:
|
||||
"""
|
||||
Parse the I{LISTEN_FDS} environment variable supplied by systemd.
|
||||
|
||||
@param start: systemd provides only a count of the number of descriptors
|
||||
that have been inherited. This is the integer value of the first
|
||||
inherited descriptor. Subsequent inherited descriptors are numbered
|
||||
counting up from here. See L{ListenFDs._START}.
|
||||
|
||||
@param environ: The environment variable mapping in which to look for the
|
||||
value to parse.
|
||||
|
||||
@return: The integer values of the inherited file descriptors, in order.
|
||||
"""
|
||||
try:
|
||||
count = int(environ["LISTEN_FDS"])
|
||||
except (KeyError, ValueError):
|
||||
return []
|
||||
else:
|
||||
descriptors = list(range(start, start + count))
|
||||
|
||||
# Remove the information from the environment so that a second
|
||||
# `ListenFDs` cannot find the same information. This is a precaution
|
||||
# against some application code accidentally trying to handle the same
|
||||
# inherited descriptor more than once - which probably wouldn't work.
|
||||
#
|
||||
# This precaution is perhaps somewhat questionable since it is up to
|
||||
# the application itself to know whether its handling of the file
|
||||
# descriptor will actually be safe. Also, nothing stops an
|
||||
# application from getting the same descriptor more than once using
|
||||
# multiple calls to `ListenFDs.inheritedDescriptors()` on the same
|
||||
# `ListenFDs` instance.
|
||||
del environ["LISTEN_PID"], environ["LISTEN_FDS"]
|
||||
return descriptors
|
||||
|
||||
|
||||
def _parseNames(environ: Mapping[str, str]) -> Sequence[str]:
|
||||
"""
|
||||
Parse the I{LISTEN_FDNAMES} environment variable supplied by systemd.
|
||||
|
||||
@param environ: The environment variable mapping in which to look for the
|
||||
value to parse.
|
||||
|
||||
@return: The names of the inherited descriptors, in order.
|
||||
"""
|
||||
names = environ.get("LISTEN_FDNAMES", "")
|
||||
if len(names) > 0:
|
||||
return tuple(names.split(":"))
|
||||
return ()
|
||||
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Unit tests for L{twisted.python}.
|
||||
"""
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
A module that is deprecated, used by L{twisted.python.test.test_deprecate} for
|
||||
testing purposes.
|
||||
"""
|
||||
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python.deprecate import deprecatedModuleAttribute
|
||||
|
||||
# Known module-level attributes.
|
||||
DEPRECATED_ATTRIBUTE = 42
|
||||
ANOTHER_ATTRIBUTE = "hello"
|
||||
|
||||
|
||||
version = Version("Twisted", 8, 0, 0)
|
||||
message = "Oh noes!"
|
||||
|
||||
|
||||
deprecatedModuleAttribute(version, message, __name__, "DEPRECATED_ATTRIBUTE")
|
||||
@@ -0,0 +1,57 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Facilities for helping test code which interacts with Python's module system
|
||||
to load code.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from types import ModuleType
|
||||
from typing import Iterable, List, Tuple
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
|
||||
|
||||
class TwistedModulesMixin:
|
||||
"""
|
||||
A mixin for C{twisted.trial.unittest.SynchronousTestCase} providing useful
|
||||
methods for manipulating Python's module system.
|
||||
"""
|
||||
|
||||
def replaceSysPath(self, sysPath: List[str]) -> None:
|
||||
"""
|
||||
Replace sys.path, for the duration of the test, with the given value.
|
||||
"""
|
||||
originalSysPath = sys.path[:]
|
||||
|
||||
def cleanUpSysPath() -> None:
|
||||
sys.path[:] = originalSysPath
|
||||
|
||||
self.addCleanup(cleanUpSysPath) # type: ignore[attr-defined]
|
||||
sys.path[:] = sysPath
|
||||
|
||||
def replaceSysModules(self, sysModules: Iterable[Tuple[str, ModuleType]]) -> None:
|
||||
"""
|
||||
Replace sys.modules, for the duration of the test, with the given value.
|
||||
"""
|
||||
originalSysModules = sys.modules.copy()
|
||||
|
||||
def cleanUpSysModules() -> None:
|
||||
sys.modules.clear()
|
||||
sys.modules.update(originalSysModules)
|
||||
|
||||
self.addCleanup(cleanUpSysModules) # type: ignore[attr-defined]
|
||||
sys.modules.clear()
|
||||
sys.modules.update(sysModules)
|
||||
|
||||
def pathEntryWithOnePackage(self, pkgname: str = "test_package") -> FilePath[str]:
|
||||
"""
|
||||
Generate a L{FilePath} with one package, named C{pkgname}, on it, and
|
||||
return the L{FilePath} of the path entry.
|
||||
"""
|
||||
entry = FilePath(self.mktemp()) # type: ignore[attr-defined]
|
||||
pkg = entry.child("test_package")
|
||||
pkg.makedirs()
|
||||
pkg.child("__init__.py").setContent(b"")
|
||||
return entry
|
||||
@@ -0,0 +1,40 @@
|
||||
# -*- test-case-name: twisted.python.test.test_sendmsg -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
from struct import unpack
|
||||
from typing import Tuple
|
||||
|
||||
from twisted.python.sendmsg import recvmsg
|
||||
|
||||
|
||||
def recvfd(socketfd: int) -> Tuple[int, bytes]:
|
||||
"""
|
||||
Receive a file descriptor from a L{sendmsg} message on the given C{AF_UNIX}
|
||||
socket.
|
||||
|
||||
@param socketfd: An C{AF_UNIX} socket, attached to another process waiting
|
||||
to send sockets via the ancillary data mechanism in L{send1msg}.
|
||||
|
||||
@param fd: C{int}
|
||||
|
||||
@return: a 2-tuple of (new file descriptor, description).
|
||||
@rtype: 2-tuple of (C{int}, C{bytes})
|
||||
"""
|
||||
ourSocket = socket.fromfd(socketfd, socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
data, ancillary, flags = recvmsg(ourSocket)
|
||||
[(cmsgLevel, cmsgType, packedFD)] = ancillary
|
||||
# cmsgLevel and cmsgType really need to be SOL_SOCKET / SCM_RIGHTS, but
|
||||
# since those are the *only* standard values, there's not much point in
|
||||
# checking.
|
||||
[unpackedFD] = unpack("i", packedFD)
|
||||
return (unpackedFD, data)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
fd, description = recvfd(int(sys.argv[1]))
|
||||
os.write(fd, b"Test fixture data: " + description + b".\n")
|
||||
os.close(fd)
|
||||
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Hypothesis strategies for values related to L{twisted.python}.
|
||||
"""
|
||||
|
||||
from hypothesis.strategies import SearchStrategy, characters, text
|
||||
from typing_extensions import Literal
|
||||
|
||||
|
||||
def systemdDescriptorNames() -> SearchStrategy[str]:
|
||||
"""
|
||||
Build strings that are legal values for the systemd
|
||||
I{FileDescriptorName} field.
|
||||
"""
|
||||
# systemd.socket(5) says:
|
||||
#
|
||||
# > Names may contain any ASCII character, but must exclude control
|
||||
# > characters and ":", and must be at most 255 characters in length.
|
||||
control_characters: Literal["Cc"] = "Cc"
|
||||
return text(
|
||||
# The docs don't say there is a min size so I'm guessing...
|
||||
min_size=1,
|
||||
max_size=255,
|
||||
alphabet=characters(
|
||||
# These constraints restrict us to ASCII.
|
||||
min_codepoint=0,
|
||||
max_codepoint=127,
|
||||
# This one excludes control characters.
|
||||
blacklist_categories=(control_characters,),
|
||||
# And this excludes the separator.
|
||||
blacklist_characters=(":",),
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,40 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for the data directory support.
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from twisted.python import _appdirs
|
||||
except ImportError:
|
||||
_appdirs = None # type: ignore[assignment]
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class AppdirsTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{_appdirs}.
|
||||
"""
|
||||
|
||||
if not _appdirs:
|
||||
skip = "appdirs package not installed"
|
||||
|
||||
def test_moduleName(self) -> None:
|
||||
"""
|
||||
Calling L{appdirs.getDataDirectory} will return a user data directory
|
||||
in the system convention, with the module of the caller as the
|
||||
subdirectory.
|
||||
"""
|
||||
res = _appdirs.getDataDirectory()
|
||||
self.assertTrue(res.endswith("twisted.python.test.test_appdirs"))
|
||||
|
||||
def test_manual(self) -> None:
|
||||
"""
|
||||
Calling L{appdirs.getDataDirectory} with a C{moduleName} argument will
|
||||
make a data directory with that name instead.
|
||||
"""
|
||||
res = _appdirs.getDataDirectory("foo.bar.baz")
|
||||
self.assertTrue(res.endswith("foo.bar.baz"))
|
||||
@@ -0,0 +1,878 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
|
||||
"""
|
||||
Test cases for Twisted component architecture.
|
||||
"""
|
||||
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from zope.interface import Attribute, Interface, implementer
|
||||
from zope.interface.adapter import AdapterRegistry
|
||||
|
||||
from twisted.python import components
|
||||
from twisted.python.compat import cmp, comparable
|
||||
from twisted.python.components import _addHook, _removeHook, proxyForInterface
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class Compo(components.Componentized):
|
||||
num = 0
|
||||
|
||||
def inc(self):
|
||||
self.num = self.num + 1
|
||||
return self.num
|
||||
|
||||
|
||||
class IAdept(Interface):
|
||||
def adaptorFunc():
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class IElapsed(Interface):
|
||||
def elapsedFunc():
|
||||
"""
|
||||
1!
|
||||
"""
|
||||
|
||||
|
||||
@implementer(IAdept)
|
||||
class Adept(components.Adapter):
|
||||
def __init__(self, orig):
|
||||
self.original = orig
|
||||
self.num = 0
|
||||
|
||||
def adaptorFunc(self):
|
||||
self.num = self.num + 1
|
||||
return self.num, self.original.inc()
|
||||
|
||||
|
||||
@implementer(IElapsed)
|
||||
class Elapsed(components.Adapter):
|
||||
def elapsedFunc(self):
|
||||
return 1
|
||||
|
||||
|
||||
class AComp(components.Componentized):
|
||||
pass
|
||||
|
||||
|
||||
class BComp(AComp):
|
||||
pass
|
||||
|
||||
|
||||
class CComp(BComp):
|
||||
pass
|
||||
|
||||
|
||||
class ITest(Interface):
|
||||
pass
|
||||
|
||||
|
||||
class ITest2(Interface):
|
||||
pass
|
||||
|
||||
|
||||
class ITest3(Interface):
|
||||
pass
|
||||
|
||||
|
||||
class ITest4(Interface):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(ITest, ITest3, ITest4)
|
||||
class Test(components.Adapter):
|
||||
def __init__(self, orig):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(ITest2)
|
||||
class Test2:
|
||||
temporaryAdapter = 1
|
||||
|
||||
def __init__(self, orig):
|
||||
pass
|
||||
|
||||
|
||||
class RegistryUsingMixin:
|
||||
"""
|
||||
Mixin for test cases which modify the global registry somehow.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Configure L{twisted.python.components.registerAdapter} to mutate an
|
||||
alternate registry to improve test isolation.
|
||||
"""
|
||||
# Create a brand new, empty registry and put it onto the components
|
||||
# module where registerAdapter will use it. Also ensure that it goes
|
||||
# away at the end of the test.
|
||||
scratchRegistry = AdapterRegistry()
|
||||
self.patch(components, "globalRegistry", scratchRegistry)
|
||||
# Hook the new registry up to the adapter lookup system and ensure that
|
||||
# association is also discarded after the test.
|
||||
hook = _addHook(scratchRegistry)
|
||||
self.addCleanup(_removeHook, hook)
|
||||
|
||||
|
||||
class ComponentizedTests(unittest.SynchronousTestCase, RegistryUsingMixin):
|
||||
"""
|
||||
Simple test case for caching in Componentized.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
RegistryUsingMixin.setUp(self)
|
||||
|
||||
components.registerAdapter(Test, AComp, ITest)
|
||||
components.registerAdapter(Test, AComp, ITest3)
|
||||
components.registerAdapter(Test2, AComp, ITest2)
|
||||
|
||||
def testComponentized(self):
|
||||
components.registerAdapter(Adept, Compo, IAdept)
|
||||
components.registerAdapter(Elapsed, Compo, IElapsed)
|
||||
|
||||
c = Compo()
|
||||
assert c.getComponent(IAdept).adaptorFunc() == (1, 1)
|
||||
assert c.getComponent(IAdept).adaptorFunc() == (2, 2)
|
||||
assert IElapsed(IAdept(c)).elapsedFunc() == 1
|
||||
|
||||
def testInheritanceAdaptation(self):
|
||||
c = CComp()
|
||||
co1 = c.getComponent(ITest)
|
||||
co2 = c.getComponent(ITest)
|
||||
co3 = c.getComponent(ITest2)
|
||||
co4 = c.getComponent(ITest2)
|
||||
assert co1 is co2
|
||||
assert co3 is not co4
|
||||
c.removeComponent(co1)
|
||||
co5 = c.getComponent(ITest)
|
||||
co6 = c.getComponent(ITest)
|
||||
assert co5 is co6
|
||||
assert co1 is not co5
|
||||
|
||||
def testMultiAdapter(self):
|
||||
c = CComp()
|
||||
co1 = c.getComponent(ITest)
|
||||
co3 = c.getComponent(ITest3)
|
||||
co4 = c.getComponent(ITest4)
|
||||
self.assertIsNone(co4)
|
||||
self.assertIs(co1, co3)
|
||||
|
||||
def test_getComponentDefaults(self):
|
||||
"""
|
||||
Test that a default value specified to Componentized.getComponent if
|
||||
there is no component for the requested interface.
|
||||
"""
|
||||
componentized = components.Componentized()
|
||||
default = object()
|
||||
self.assertIs(componentized.getComponent(ITest, default), default)
|
||||
self.assertIs(componentized.getComponent(ITest, default=default), default)
|
||||
self.assertIs(componentized.getComponent(ITest), None)
|
||||
|
||||
def test_setAdapter(self):
|
||||
"""
|
||||
C{Componentized.setAdapter} sets a component for an interface by
|
||||
wrapping the instance with the given adapter class.
|
||||
"""
|
||||
componentized = components.Componentized()
|
||||
componentized.setAdapter(IAdept, Adept)
|
||||
component = componentized.getComponent(IAdept)
|
||||
self.assertEqual(component.original, componentized)
|
||||
self.assertIsInstance(component, Adept)
|
||||
|
||||
def test_addAdapter(self):
|
||||
"""
|
||||
C{Componentized.setAdapter} adapts the instance by wrapping it with
|
||||
given adapter class, then stores it using C{addComponent}.
|
||||
"""
|
||||
componentized = components.Componentized()
|
||||
componentized.addAdapter(Adept, ignoreClass=True)
|
||||
component = componentized.getComponent(IAdept)
|
||||
self.assertEqual(component.original, componentized)
|
||||
self.assertIsInstance(component, Adept)
|
||||
|
||||
def test_setComponent(self):
|
||||
"""
|
||||
C{Componentized.setComponent} stores the given component using the
|
||||
given interface as the key.
|
||||
"""
|
||||
componentized = components.Componentized()
|
||||
obj = object()
|
||||
componentized.setComponent(ITest, obj)
|
||||
self.assertIs(componentized.getComponent(ITest), obj)
|
||||
|
||||
def test_unsetComponent(self):
|
||||
"""
|
||||
C{Componentized.setComponent} removes the cached component for the
|
||||
given interface.
|
||||
"""
|
||||
componentized = components.Componentized()
|
||||
obj = object()
|
||||
componentized.setComponent(ITest, obj)
|
||||
componentized.unsetComponent(ITest)
|
||||
self.assertIsNone(componentized.getComponent(ITest))
|
||||
|
||||
def test_reprableComponentized(self):
|
||||
"""
|
||||
C{ReprableComponentized} has a C{__repr__} that lists its cache.
|
||||
"""
|
||||
rc = components.ReprableComponentized()
|
||||
rc.setComponent(ITest, "hello")
|
||||
result = repr(rc)
|
||||
self.assertIn("ITest", result)
|
||||
self.assertIn("hello", result)
|
||||
|
||||
|
||||
class AdapterTests(unittest.SynchronousTestCase):
|
||||
"""Test adapters."""
|
||||
|
||||
def testAdapterGetComponent(self):
|
||||
o = object()
|
||||
a = Adept(o)
|
||||
self.assertRaises(components.CannotAdapt, ITest, a)
|
||||
self.assertIsNone(ITest(a, None))
|
||||
|
||||
|
||||
class IMeta(Interface):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(IMeta)
|
||||
class MetaAdder(components.Adapter):
|
||||
def add(self, num):
|
||||
return self.original.num + num
|
||||
|
||||
|
||||
@implementer(IMeta)
|
||||
class BackwardsAdder(components.Adapter):
|
||||
def add(self, num):
|
||||
return self.original.num - num
|
||||
|
||||
|
||||
class MetaNumber:
|
||||
"""
|
||||
Integer wrapper for Interface adaptation tests.
|
||||
"""
|
||||
|
||||
def __init__(self, num):
|
||||
self.num = num
|
||||
|
||||
|
||||
class ComponentNumber(components.Componentized):
|
||||
def __init__(self):
|
||||
self.num = 0
|
||||
components.Componentized.__init__(self)
|
||||
|
||||
|
||||
@implementer(IMeta)
|
||||
class ComponentAdder(components.Adapter):
|
||||
"""
|
||||
Adder for componentized adapter tests.
|
||||
"""
|
||||
|
||||
def __init__(self, original):
|
||||
components.Adapter.__init__(self, original)
|
||||
self.num = self.original.num
|
||||
|
||||
def add(self, num):
|
||||
self.num += num
|
||||
return self.num
|
||||
|
||||
|
||||
class IAttrX(Interface):
|
||||
"""
|
||||
Base interface for test of adapter with C{__cmp__}.
|
||||
"""
|
||||
|
||||
def x():
|
||||
"""
|
||||
Return a value.
|
||||
"""
|
||||
|
||||
|
||||
class IAttrXX(Interface):
|
||||
"""
|
||||
Adapted interface for test of adapter with C{__cmp__}.
|
||||
"""
|
||||
|
||||
def xx():
|
||||
"""
|
||||
Return a tuple of values.
|
||||
"""
|
||||
|
||||
|
||||
@implementer(IAttrX)
|
||||
class Xcellent:
|
||||
"""
|
||||
L{IAttrX} implementation for test of adapter with C{__cmp__}.
|
||||
"""
|
||||
|
||||
def x(self):
|
||||
"""
|
||||
Return a value.
|
||||
|
||||
@return: a value
|
||||
"""
|
||||
return "x!"
|
||||
|
||||
|
||||
@comparable
|
||||
class DoubleXAdapter:
|
||||
"""
|
||||
Adapter with __cmp__.
|
||||
"""
|
||||
|
||||
num = 42
|
||||
|
||||
def __init__(self, original):
|
||||
self.original = original
|
||||
|
||||
def xx(self):
|
||||
return (self.original.x(), self.original.x())
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.num, other.num)
|
||||
|
||||
|
||||
class MetaInterfaceTests(RegistryUsingMixin, unittest.SynchronousTestCase):
|
||||
def test_basic(self):
|
||||
"""
|
||||
Registered adapters can be used to adapt classes to an interface.
|
||||
"""
|
||||
components.registerAdapter(MetaAdder, MetaNumber, IMeta)
|
||||
n = MetaNumber(1)
|
||||
self.assertEqual(IMeta(n).add(1), 2)
|
||||
|
||||
def testComponentizedInteraction(self):
|
||||
components.registerAdapter(ComponentAdder, ComponentNumber, IMeta)
|
||||
c = ComponentNumber()
|
||||
IMeta(c).add(1)
|
||||
IMeta(c).add(1)
|
||||
self.assertEqual(IMeta(c).add(1), 3)
|
||||
|
||||
def testAdapterWithCmp(self):
|
||||
# Make sure that a __cmp__ on an adapter doesn't break anything
|
||||
components.registerAdapter(DoubleXAdapter, IAttrX, IAttrXX)
|
||||
xx = IAttrXX(Xcellent())
|
||||
self.assertEqual(("x!", "x!"), xx.xx())
|
||||
|
||||
|
||||
class RegistrationTests(RegistryUsingMixin, unittest.SynchronousTestCase):
|
||||
"""
|
||||
Tests for adapter registration.
|
||||
"""
|
||||
|
||||
def _registerAdapterForClassOrInterface(self, original):
|
||||
"""
|
||||
Register an adapter with L{components.registerAdapter} for the given
|
||||
class or interface and verify that the adapter can be looked up with
|
||||
L{components.getAdapterFactory}.
|
||||
"""
|
||||
adapter = lambda o: None
|
||||
components.registerAdapter(adapter, original, ITest)
|
||||
self.assertIs(components.getAdapterFactory(original, ITest, None), adapter)
|
||||
|
||||
def test_registerAdapterForClass(self):
|
||||
"""
|
||||
Test that an adapter from a class can be registered and then looked
|
||||
up.
|
||||
"""
|
||||
|
||||
class TheOriginal:
|
||||
pass
|
||||
|
||||
return self._registerAdapterForClassOrInterface(TheOriginal)
|
||||
|
||||
def test_registerAdapterForInterface(self):
|
||||
"""
|
||||
Test that an adapter from an interface can be registered and then
|
||||
looked up.
|
||||
"""
|
||||
return self._registerAdapterForClassOrInterface(ITest2)
|
||||
|
||||
def _duplicateAdapterForClassOrInterface(self, original):
|
||||
"""
|
||||
Verify that L{components.registerAdapter} raises L{ValueError} if the
|
||||
from-type/interface and to-interface pair is not unique.
|
||||
"""
|
||||
firstAdapter = lambda o: False
|
||||
secondAdapter = lambda o: True
|
||||
components.registerAdapter(firstAdapter, original, ITest)
|
||||
self.assertRaises(
|
||||
ValueError, components.registerAdapter, secondAdapter, original, ITest
|
||||
)
|
||||
# Make sure that the original adapter is still around as well
|
||||
self.assertIs(components.getAdapterFactory(original, ITest, None), firstAdapter)
|
||||
|
||||
def test_duplicateAdapterForClass(self):
|
||||
"""
|
||||
Test that attempting to register a second adapter from a class
|
||||
raises the appropriate exception.
|
||||
"""
|
||||
|
||||
class TheOriginal:
|
||||
pass
|
||||
|
||||
return self._duplicateAdapterForClassOrInterface(TheOriginal)
|
||||
|
||||
def test_duplicateAdapterForInterface(self):
|
||||
"""
|
||||
Test that attempting to register a second adapter from an interface
|
||||
raises the appropriate exception.
|
||||
"""
|
||||
return self._duplicateAdapterForClassOrInterface(ITest2)
|
||||
|
||||
def _[AWS-SECRET-REMOVED]ed(self, original):
|
||||
"""
|
||||
Verify that when C{components.ALLOW_DUPLICATES} is set to C{True}, new
|
||||
adapter registrations for a particular from-type/interface and
|
||||
to-interface pair replace older registrations.
|
||||
"""
|
||||
firstAdapter = lambda o: False
|
||||
secondAdapter = lambda o: True
|
||||
|
||||
class TheInterface(Interface):
|
||||
pass
|
||||
|
||||
components.registerAdapter(firstAdapter, original, TheInterface)
|
||||
components.ALLOW_DUPLICATES = True
|
||||
try:
|
||||
components.registerAdapter(secondAdapter, original, TheInterface)
|
||||
self.assertIs(
|
||||
components.getAdapterFactory(original, TheInterface, None),
|
||||
secondAdapter,
|
||||
)
|
||||
finally:
|
||||
components.ALLOW_DUPLICATES = False
|
||||
|
||||
# It should be rejected again at this point
|
||||
self.assertRaises(
|
||||
ValueError, components.registerAdapter, firstAdapter, original, TheInterface
|
||||
)
|
||||
|
||||
self.assertIs(
|
||||
components.getAdapterFactory(original, TheInterface, None), secondAdapter
|
||||
)
|
||||
|
||||
def test_duplicateAdapterForClassAllowed(self):
|
||||
"""
|
||||
Test that when L{components.ALLOW_DUPLICATES} is set to a true
|
||||
value, duplicate registrations from classes are allowed to override
|
||||
the original registration.
|
||||
"""
|
||||
|
||||
class TheOriginal:
|
||||
pass
|
||||
|
||||
return self._[AWS-SECRET-REMOVED]ed(TheOriginal)
|
||||
|
||||
def test_duplicateAdapterForInterfaceAllowed(self):
|
||||
"""
|
||||
Test that when L{components.ALLOW_DUPLICATES} is set to a true
|
||||
value, duplicate registrations from interfaces are allowed to
|
||||
override the original registration.
|
||||
"""
|
||||
|
||||
class TheOriginal(Interface):
|
||||
pass
|
||||
|
||||
return self._[AWS-SECRET-REMOVED]ed(TheOriginal)
|
||||
|
||||
def _multipleInterfacesForClassOrInterface(self, original):
|
||||
"""
|
||||
Verify that an adapter can be registered for multiple to-interfaces at a
|
||||
time.
|
||||
"""
|
||||
adapter = lambda o: None
|
||||
components.registerAdapter(adapter, original, ITest, ITest2)
|
||||
self.assertIs(components.getAdapterFactory(original, ITest, None), adapter)
|
||||
self.assertIs(components.getAdapterFactory(original, ITest2, None), adapter)
|
||||
|
||||
def test_multipleInterfacesForClass(self):
|
||||
"""
|
||||
Test the registration of an adapter from a class to several
|
||||
interfaces at once.
|
||||
"""
|
||||
|
||||
class TheOriginal:
|
||||
pass
|
||||
|
||||
return self._multipleInterfacesForClassOrInterface(TheOriginal)
|
||||
|
||||
def test_multipleInterfacesForInterface(self):
|
||||
"""
|
||||
Test the registration of an adapter from an interface to several
|
||||
interfaces at once.
|
||||
"""
|
||||
return self._multipleInterfacesForClassOrInterface(ITest3)
|
||||
|
||||
def _[AWS-SECRET-REMOVED]erface(self, original):
|
||||
"""
|
||||
Verify that a new adapter can be registered for a particular
|
||||
to-interface from a subclass of a type or interface which already has an
|
||||
adapter registered to that interface and that the subclass adapter takes
|
||||
precedence over the base class adapter.
|
||||
"""
|
||||
firstAdapter = lambda o: True
|
||||
secondAdapter = lambda o: False
|
||||
|
||||
class TheSubclass(original):
|
||||
pass
|
||||
|
||||
components.registerAdapter(firstAdapter, original, ITest)
|
||||
components.registerAdapter(secondAdapter, TheSubclass, ITest)
|
||||
self.assertIs(components.getAdapterFactory(original, ITest, None), firstAdapter)
|
||||
self.assertIs(
|
||||
components.getAdapterFactory(TheSubclass, ITest, None), secondAdapter
|
||||
)
|
||||
|
||||
def test_subclassAdapterRegistrationForClass(self):
|
||||
"""
|
||||
Test that an adapter to a particular interface can be registered
|
||||
from both a class and its subclass.
|
||||
"""
|
||||
|
||||
class TheOriginal:
|
||||
pass
|
||||
|
||||
return self._[AWS-SECRET-REMOVED]erface(TheOriginal)
|
||||
|
||||
def test_subclassAdapterRegistrationForInterface(self):
|
||||
"""
|
||||
Test that an adapter to a particular interface can be registered
|
||||
from both an interface and its subclass.
|
||||
"""
|
||||
return self._[AWS-SECRET-REMOVED]erface(ITest2)
|
||||
|
||||
|
||||
class IProxiedInterface(Interface):
|
||||
"""
|
||||
An interface class for use by L{proxyForInterface}.
|
||||
"""
|
||||
|
||||
ifaceAttribute = Attribute(
|
||||
"""
|
||||
An example declared attribute, which should be proxied."""
|
||||
)
|
||||
|
||||
def yay(*a, **kw):
|
||||
"""
|
||||
A sample method which should be proxied.
|
||||
"""
|
||||
|
||||
|
||||
class IProxiedSubInterface(IProxiedInterface):
|
||||
"""
|
||||
An interface that derives from another for use with L{proxyForInterface}.
|
||||
"""
|
||||
|
||||
def boo():
|
||||
"""
|
||||
A different sample method which should be proxied.
|
||||
"""
|
||||
|
||||
|
||||
@implementer(IProxiedInterface)
|
||||
class Yayable: # type: ignore[misc]
|
||||
# class does not implement Attribute ifaceAttribute
|
||||
# so we need to turn off mypy warning
|
||||
"""
|
||||
A provider of L{IProxiedInterface} which increments a counter for
|
||||
every call to C{yay}.
|
||||
|
||||
@ivar yays: The number of times C{yay} has been called.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.yays = 0
|
||||
self.yayArgs = []
|
||||
|
||||
def yay(self, *a, **kw):
|
||||
"""
|
||||
Increment C{self.yays}.
|
||||
"""
|
||||
self.yays += 1
|
||||
self.yayArgs.append((a, kw))
|
||||
return self.yays
|
||||
|
||||
|
||||
@implementer(IProxiedSubInterface)
|
||||
class Booable: # type: ignore[misc]
|
||||
# class does not implement Attribute ifaceAttribute
|
||||
# so we need to turn off mypy warning
|
||||
"""
|
||||
An implementation of IProxiedSubInterface
|
||||
"""
|
||||
|
||||
yayed = False
|
||||
booed = False
|
||||
|
||||
def yay(self, *a, **kw):
|
||||
"""
|
||||
Mark the fact that 'yay' has been called.
|
||||
"""
|
||||
self.yayed = True
|
||||
|
||||
def boo(self):
|
||||
"""
|
||||
Mark the fact that 'boo' has been called.1
|
||||
"""
|
||||
self.booed = True
|
||||
|
||||
|
||||
class IMultipleMethods(Interface):
|
||||
"""
|
||||
An interface with multiple methods.
|
||||
"""
|
||||
|
||||
def methodOne():
|
||||
"""
|
||||
The first method. Should return 1.
|
||||
"""
|
||||
|
||||
def methodTwo():
|
||||
"""
|
||||
The second method. Should return 2.
|
||||
"""
|
||||
|
||||
|
||||
class MultipleMethodImplementor:
|
||||
"""
|
||||
A precise implementation of L{IMultipleMethods}.
|
||||
"""
|
||||
|
||||
def methodOne(self):
|
||||
"""
|
||||
@return: 1
|
||||
"""
|
||||
return 1
|
||||
|
||||
def methodTwo(self):
|
||||
"""
|
||||
@return: 2
|
||||
"""
|
||||
return 2
|
||||
|
||||
|
||||
class ProxyForInterfaceTests(unittest.SynchronousTestCase):
|
||||
"""
|
||||
Tests for L{proxyForInterface}.
|
||||
"""
|
||||
|
||||
def test_original(self):
|
||||
"""
|
||||
Proxy objects should have an C{original} attribute which refers to the
|
||||
original object passed to the constructor.
|
||||
"""
|
||||
original = object()
|
||||
proxy = proxyForInterface(IProxiedInterface)(original)
|
||||
self.assertIs(proxy.original, original)
|
||||
|
||||
def test_proxyMethod(self):
|
||||
"""
|
||||
The class created from L{proxyForInterface} passes methods on an
|
||||
interface to the object which is passed to its constructor.
|
||||
"""
|
||||
klass = proxyForInterface(IProxiedInterface)
|
||||
yayable = Yayable()
|
||||
proxy = klass(yayable)
|
||||
proxy.yay()
|
||||
self.assertEqual(proxy.yay(), 2)
|
||||
self.assertEqual(yayable.yays, 2)
|
||||
|
||||
def test_decoratedProxyMethod(self):
|
||||
"""
|
||||
Methods of the class created from L{proxyForInterface} can be used with
|
||||
the decorator-helper L{functools.wraps}.
|
||||
"""
|
||||
base = proxyForInterface(IProxiedInterface)
|
||||
|
||||
class klass(base):
|
||||
@wraps(base.yay)
|
||||
def yay(self):
|
||||
self.original.yays += 1
|
||||
return base.yay(self)
|
||||
|
||||
original = Yayable()
|
||||
yayable = klass(original)
|
||||
yayable.yay()
|
||||
self.assertEqual(2, original.yays)
|
||||
|
||||
def test_proxyAttribute(self):
|
||||
"""
|
||||
Proxy objects should proxy declared attributes, but not other
|
||||
attributes.
|
||||
"""
|
||||
yayable = Yayable()
|
||||
yayable.ifaceAttribute = object()
|
||||
proxy = proxyForInterface(IProxiedInterface)(yayable)
|
||||
self.assertIs(proxy.ifaceAttribute, yayable.ifaceAttribute)
|
||||
self.assertRaises(AttributeError, lambda: proxy.yays)
|
||||
|
||||
def test_proxySetAttribute(self):
|
||||
"""
|
||||
The attributes that proxy objects proxy should be assignable and affect
|
||||
the original object.
|
||||
"""
|
||||
yayable = Yayable()
|
||||
proxy = proxyForInterface(IProxiedInterface)(yayable)
|
||||
thingy = object()
|
||||
proxy.ifaceAttribute = thingy
|
||||
self.assertIs(yayable.ifaceAttribute, thingy)
|
||||
|
||||
def test_proxyDeleteAttribute(self):
|
||||
"""
|
||||
The attributes that proxy objects proxy should be deletable and affect
|
||||
the original object.
|
||||
"""
|
||||
yayable = Yayable()
|
||||
yayable.ifaceAttribute = None
|
||||
proxy = proxyForInterface(IProxiedInterface)(yayable)
|
||||
del proxy.ifaceAttribute
|
||||
self.assertFalse(hasattr(yayable, "ifaceAttribute"))
|
||||
|
||||
def test_multipleMethods(self):
|
||||
"""
|
||||
[Regression test] The proxy should send its method calls to the correct
|
||||
method, not the incorrect one.
|
||||
"""
|
||||
multi = MultipleMethodImplementor()
|
||||
proxy = proxyForInterface(IMultipleMethods)(multi)
|
||||
self.assertEqual(proxy.methodOne(), 1)
|
||||
self.assertEqual(proxy.methodTwo(), 2)
|
||||
|
||||
def test_subclassing(self):
|
||||
"""
|
||||
It is possible to subclass the result of L{proxyForInterface}.
|
||||
"""
|
||||
|
||||
class SpecializedProxy(proxyForInterface(IProxiedInterface)):
|
||||
"""
|
||||
A specialized proxy which can decrement the number of yays.
|
||||
"""
|
||||
|
||||
def boo(self):
|
||||
"""
|
||||
Decrement the number of yays.
|
||||
"""
|
||||
self.original.yays -= 1
|
||||
|
||||
yayable = Yayable()
|
||||
special = SpecializedProxy(yayable)
|
||||
self.assertEqual(yayable.yays, 0)
|
||||
special.boo()
|
||||
self.assertEqual(yayable.yays, -1)
|
||||
|
||||
def test_proxyName(self):
|
||||
"""
|
||||
The name of a proxy class indicates which interface it proxies.
|
||||
"""
|
||||
proxy = proxyForInterface(IProxiedInterface)
|
||||
self.assertEqual(
|
||||
proxy.__name__,
|
||||
"(Proxy for " "twisted.python.test.test_components.IProxiedInterface)",
|
||||
)
|
||||
|
||||
def test_implements(self):
|
||||
"""
|
||||
The resulting proxy implements the interface that it proxies.
|
||||
"""
|
||||
proxy = proxyForInterface(IProxiedInterface)
|
||||
self.assertTrue(IProxiedInterface.implementedBy(proxy))
|
||||
|
||||
def test_proxyDescriptorGet(self):
|
||||
"""
|
||||
_ProxyDescriptor's __get__ method should return the appropriate
|
||||
attribute of its argument's 'original' attribute if it is invoked with
|
||||
an object. If it is invoked with None, it should return a false
|
||||
class-method emulator instead.
|
||||
|
||||
For some reason, Python's documentation recommends to define
|
||||
descriptors' __get__ methods with the 'type' parameter as optional,
|
||||
despite the fact that Python itself never actually calls the descriptor
|
||||
that way. This is probably do to support 'foo.__get__(bar)' as an
|
||||
idiom. Let's make sure that the behavior is correct. Since we don't
|
||||
actually use the 'type' argument at all, this test calls it the
|
||||
idiomatic way to ensure that signature works; test_proxyInheritance
|
||||
verifies the how-Python-actually-calls-it signature.
|
||||
"""
|
||||
|
||||
class Sample:
|
||||
called = False
|
||||
|
||||
def hello(self):
|
||||
self.called = True
|
||||
|
||||
fakeProxy = Sample()
|
||||
testObject = Sample()
|
||||
fakeProxy.original = testObject
|
||||
pd = components._ProxyDescriptor("hello", "original")
|
||||
self.assertEqual(pd.__get__(fakeProxy), testObject.hello)
|
||||
fakeClassMethod = pd.__get__(None)
|
||||
fakeClassMethod(fakeProxy)
|
||||
self.assertTrue(testObject.called)
|
||||
|
||||
def test_proxyInheritance(self):
|
||||
"""
|
||||
Subclasses of the class returned from L{proxyForInterface} should be
|
||||
able to upcall methods by reference to their superclass, as any normal
|
||||
Python class can.
|
||||
"""
|
||||
|
||||
class YayableWrapper(proxyForInterface(IProxiedInterface)):
|
||||
"""
|
||||
This class does not override any functionality.
|
||||
"""
|
||||
|
||||
class EnhancedWrapper(YayableWrapper):
|
||||
"""
|
||||
This class overrides the 'yay' method.
|
||||
"""
|
||||
|
||||
wrappedYays = 1
|
||||
|
||||
def yay(self, *a, **k):
|
||||
self.wrappedYays += 1
|
||||
return YayableWrapper.yay(self, *a, **k) + 7
|
||||
|
||||
yayable = Yayable()
|
||||
wrapper = EnhancedWrapper(yayable)
|
||||
self.assertEqual(wrapper.yay(3, 4, x=5, y=6), 8)
|
||||
self.assertEqual(yayable.yayArgs, [((3, 4), dict(x=5, y=6))])
|
||||
|
||||
def test_interfaceInheritance(self):
|
||||
"""
|
||||
Proxies of subinterfaces generated with proxyForInterface should allow
|
||||
access to attributes of both the child and the base interfaces.
|
||||
"""
|
||||
proxyClass = proxyForInterface(IProxiedSubInterface)
|
||||
booable = Booable()
|
||||
proxy = proxyClass(booable)
|
||||
proxy.yay()
|
||||
proxy.boo()
|
||||
self.assertTrue(booable.yayed)
|
||||
self.assertTrue(booable.booed)
|
||||
|
||||
def test_attributeCustomization(self):
|
||||
"""
|
||||
The original attribute name can be customized via the
|
||||
C{originalAttribute} argument of L{proxyForInterface}: the attribute
|
||||
should change, but the methods of the original object should still be
|
||||
callable, and the attributes still accessible.
|
||||
"""
|
||||
yayable = Yayable()
|
||||
yayable.ifaceAttribute = object()
|
||||
proxy = proxyForInterface(IProxiedInterface, originalAttribute="foo")(yayable)
|
||||
self.assertIs(proxy.foo, yayable)
|
||||
|
||||
# Check the behavior
|
||||
self.assertEqual(proxy.yay(), 1)
|
||||
self.assertIs(proxy.ifaceAttribute, yayable.ifaceAttribute)
|
||||
thingy = object()
|
||||
proxy.ifaceAttribute = thingy
|
||||
self.assertIs(yayable.ifaceAttribute, thingy)
|
||||
del proxy.ifaceAttribute
|
||||
self.assertFalse(hasattr(yayable, "ifaceAttribute"))
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,453 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.fakepwd}.
|
||||
"""
|
||||
|
||||
try:
|
||||
import pwd as _pwd
|
||||
except ImportError:
|
||||
pwd = None
|
||||
else:
|
||||
pwd = _pwd
|
||||
|
||||
try:
|
||||
import spwd as _spwd
|
||||
except ImportError:
|
||||
spwd = None
|
||||
else:
|
||||
spwd = _spwd
|
||||
|
||||
import os
|
||||
from operator import getitem
|
||||
|
||||
from twisted.python.compat import _PYPY
|
||||
from twisted.python.fakepwd import ShadowDatabase, UserDatabase
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
SYSTEM_UID_MAX = 999
|
||||
|
||||
|
||||
def findInvalidUID():
|
||||
"""
|
||||
By convention, UIDs less than 1000 are reserved for the system. A system
|
||||
which allocated every single one of those UIDs would likely have practical
|
||||
problems with allocating new ones, so let's assume that we'll be able to
|
||||
find one. (If we don't, this will wrap around to negative values and
|
||||
I{eventually} find something.)
|
||||
|
||||
@return: a user ID which does not exist on the local system. Or, on
|
||||
systems without a L{pwd} module, return C{SYSTEM_UID_MAX}.
|
||||
"""
|
||||
guess = SYSTEM_UID_MAX
|
||||
if pwd is not None:
|
||||
while True:
|
||||
try:
|
||||
pwd.getpwuid(guess)
|
||||
except KeyError:
|
||||
break
|
||||
else:
|
||||
guess -= 1
|
||||
return guess
|
||||
|
||||
|
||||
INVALID_UID = findInvalidUID()
|
||||
|
||||
|
||||
class UserDatabaseTestsMixin:
|
||||
"""
|
||||
L{UserDatabaseTestsMixin} defines tests which apply to any user database
|
||||
implementation. Subclasses should mix it in, implement C{setUp} to create
|
||||
C{self.database} bound to a user database instance, and implement
|
||||
C{getExistingUserInfo} to return information about a user (such information
|
||||
should be unique per test method).
|
||||
"""
|
||||
|
||||
def test_getpwuid(self):
|
||||
"""
|
||||
I{getpwuid} accepts a uid and returns the user record associated with
|
||||
it.
|
||||
"""
|
||||
for i in range(2):
|
||||
# Get some user which exists in the database.
|
||||
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
|
||||
|
||||
# Now try to look it up and make sure the result is correct.
|
||||
entry = self.database.getpwuid(uid)
|
||||
self.assertEqual(entry.pw_name, username)
|
||||
self.assertEqual(entry.pw_passwd, password)
|
||||
self.assertEqual(entry.pw_uid, uid)
|
||||
self.assertEqual(entry.pw_gid, gid)
|
||||
self.assertEqual(entry.pw_gecos, gecos)
|
||||
self.assertEqual(entry.pw_dir, dir)
|
||||
self.assertEqual(entry.pw_shell, shell)
|
||||
|
||||
def test_noSuchUID(self):
|
||||
"""
|
||||
I{getpwuid} raises L{KeyError} when passed a uid which does not exist
|
||||
in the user database.
|
||||
"""
|
||||
self.assertRaises(KeyError, self.database.getpwuid, INVALID_UID)
|
||||
|
||||
def test_getpwnam(self):
|
||||
"""
|
||||
I{getpwnam} accepts a username and returns the user record associated
|
||||
with it.
|
||||
"""
|
||||
for i in range(2):
|
||||
# Get some user which exists in the database.
|
||||
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
|
||||
|
||||
# Now try to look it up and make sure the result is correct.
|
||||
entry = self.database.getpwnam(username)
|
||||
self.assertEqual(entry.pw_name, username)
|
||||
self.assertEqual(entry.pw_passwd, password)
|
||||
self.assertEqual(entry.pw_uid, uid)
|
||||
self.assertEqual(entry.pw_gid, gid)
|
||||
self.assertEqual(entry.pw_gecos, gecos)
|
||||
self.assertEqual(entry.pw_dir, dir)
|
||||
self.assertEqual(entry.pw_shell, shell)
|
||||
|
||||
def test_getpwnamRejectsBytes(self):
|
||||
"""
|
||||
L{getpwnam} rejects a non-L{str} username with an exception.
|
||||
"""
|
||||
exc_type = TypeError
|
||||
if _PYPY:
|
||||
# PyPy raises KeyError instead of TypeError. See
|
||||
# https://foss.heptapod.net/pypy/pypy/-/issues/3624
|
||||
exc_type = Exception
|
||||
self.assertRaises(exc_type, self.database.getpwnam, b"i-am-bytes")
|
||||
|
||||
def test_noSuchName(self):
|
||||
"""
|
||||
I{getpwnam} raises L{KeyError} when passed a username which does not
|
||||
exist in the user database.
|
||||
"""
|
||||
self.assertRaises(
|
||||
KeyError,
|
||||
self.database.getpwnam,
|
||||
"no"
|
||||
"such"
|
||||
"user"
|
||||
"exists"
|
||||
"the"
|
||||
"name"
|
||||
"is"
|
||||
"too"
|
||||
"long"
|
||||
"and"
|
||||
"has"
|
||||
"\1"
|
||||
"in"
|
||||
"it"
|
||||
"too",
|
||||
)
|
||||
|
||||
def test_recordLength(self):
|
||||
"""
|
||||
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
|
||||
has a length.
|
||||
"""
|
||||
db = self.database
|
||||
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
|
||||
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
|
||||
self.assertIsInstance(len(entry), int)
|
||||
self.assertEqual(len(entry), 7)
|
||||
|
||||
def test_recordIndexable(self):
|
||||
"""
|
||||
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
|
||||
is indexable, with successive indexes starting from 0 corresponding to
|
||||
the values of the C{pw_name}, C{pw_passwd}, C{pw_uid}, C{pw_gid},
|
||||
C{pw_gecos}, C{pw_dir}, and C{pw_shell} attributes, respectively.
|
||||
"""
|
||||
db = self.database
|
||||
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
|
||||
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
|
||||
self.assertEqual(entry[0], username)
|
||||
self.assertEqual(entry[1], password)
|
||||
self.assertEqual(entry[2], uid)
|
||||
self.assertEqual(entry[3], gid)
|
||||
self.assertEqual(entry[4], gecos)
|
||||
self.assertEqual(entry[5], dir)
|
||||
self.assertEqual(entry[6], shell)
|
||||
|
||||
self.assertEqual(len(entry), len(list(entry)))
|
||||
self.assertRaises(IndexError, getitem, entry, 7)
|
||||
|
||||
|
||||
class UserDatabaseTests(TestCase, UserDatabaseTestsMixin):
|
||||
"""
|
||||
Tests for L{UserDatabase}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Create a L{UserDatabase} with no user data in it.
|
||||
"""
|
||||
self.database = UserDatabase()
|
||||
self._counter = SYSTEM_UID_MAX + 1
|
||||
|
||||
def getExistingUserInfo(self):
|
||||
"""
|
||||
Add a new user to C{self.database} and return its information.
|
||||
"""
|
||||
self._counter += 1
|
||||
suffix = "_" + str(self._counter)
|
||||
username = "username" + suffix
|
||||
[PASSWORD-REMOVED]" + suffix
|
||||
uid = self._counter
|
||||
gid = self._counter + 1000
|
||||
gecos = "gecos" + suffix
|
||||
dir = "dir" + suffix
|
||||
shell = "shell" + suffix
|
||||
|
||||
self.database.addUser(username, password, uid, gid, gecos, dir, shell)
|
||||
return (username, password, uid, gid, gecos, dir, shell)
|
||||
|
||||
def test_addUser(self):
|
||||
"""
|
||||
L{UserDatabase.addUser} accepts seven arguments, one for each field of
|
||||
a L{pwd.struct_passwd}, and makes the new record available via
|
||||
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
|
||||
L{UserDatabase.getpwall}.
|
||||
"""
|
||||
username = "alice"
|
||||
password = "secr3t"
|
||||
uid = 123
|
||||
gid = 456
|
||||
gecos = "Alice,,,"
|
||||
home = "/users/alice"
|
||||
shell = "/usr/bin/foosh"
|
||||
|
||||
db = self.database
|
||||
db.addUser(username, password, uid, gid, gecos, home, shell)
|
||||
|
||||
for [entry] in [[db.getpwuid(uid)], [db.getpwnam(username)], db.getpwall()]:
|
||||
self.assertEqual(entry.pw_name, username)
|
||||
self.assertEqual(entry.pw_passwd, password)
|
||||
self.assertEqual(entry.pw_uid, uid)
|
||||
self.assertEqual(entry.pw_gid, gid)
|
||||
self.assertEqual(entry.pw_gecos, gecos)
|
||||
self.assertEqual(entry.pw_dir, home)
|
||||
self.assertEqual(entry.pw_shell, shell)
|
||||
|
||||
|
||||
class PwdModuleTests(TestCase, UserDatabaseTestsMixin):
|
||||
"""
|
||||
L{PwdModuleTests} runs the tests defined by L{UserDatabaseTestsMixin}
|
||||
against the built-in C{pwd} module. This serves to verify that
|
||||
L{UserDatabase} is really a fake of that API.
|
||||
"""
|
||||
|
||||
if pwd is None:
|
||||
skip = "Cannot verify UserDatabase against pwd without pwd"
|
||||
else:
|
||||
database = pwd
|
||||
|
||||
def setUp(self):
|
||||
self._users = iter(self.database.getpwall())
|
||||
self._uids = set()
|
||||
|
||||
def getExistingUserInfo(self):
|
||||
"""
|
||||
Read and return the next record from C{self._users}, filtering out
|
||||
any records with previously seen uid values (as these cannot be
|
||||
found with C{getpwuid} and only cause trouble).
|
||||
"""
|
||||
while True:
|
||||
entry = next(self._users)
|
||||
uid = entry.pw_uid
|
||||
if uid not in self._uids:
|
||||
self._uids.add(uid)
|
||||
return entry
|
||||
|
||||
|
||||
class ShadowDatabaseTestsMixin:
|
||||
"""
|
||||
L{ShadowDatabaseTestsMixin} defines tests which apply to any shadow user
|
||||
database implementation. Subclasses should mix it in, implement C{setUp} to
|
||||
create C{self.database} bound to a shadow user database instance, and
|
||||
implement C{getExistingUserInfo} to return information about a user (such
|
||||
information should be unique per test method).
|
||||
"""
|
||||
|
||||
def test_getspnam(self):
|
||||
"""
|
||||
L{getspnam} accepts a username and returns the user record associated
|
||||
with it.
|
||||
"""
|
||||
for i in range(2):
|
||||
# Get some user which exists in the database.
|
||||
(
|
||||
username,
|
||||
password,
|
||||
lastChange,
|
||||
min,
|
||||
max,
|
||||
warn,
|
||||
inact,
|
||||
expire,
|
||||
flag,
|
||||
) = self.getExistingUserInfo()
|
||||
|
||||
entry = self.database.getspnam(username)
|
||||
self.assertEqual(entry.sp_nam, username)
|
||||
self.assertEqual(entry.sp_pwd, password)
|
||||
self.assertEqual(entry.sp_lstchg, lastChange)
|
||||
self.assertEqual(entry.sp_min, min)
|
||||
self.assertEqual(entry.sp_max, max)
|
||||
self.assertEqual(entry.sp_warn, warn)
|
||||
self.assertEqual(entry.sp_inact, inact)
|
||||
self.assertEqual(entry.sp_expire, expire)
|
||||
self.assertEqual(entry.sp_flag, flag)
|
||||
|
||||
def test_noSuchName(self):
|
||||
"""
|
||||
I{getspnam} raises L{KeyError} when passed a username which does not
|
||||
exist in the user database.
|
||||
"""
|
||||
self.assertRaises(KeyError, self.database.getspnam, "alice")
|
||||
|
||||
def test_getspnamBytes(self):
|
||||
"""
|
||||
I{getspnam} raises L{TypeError} when passed a L{bytes}, just like
|
||||
L{spwd.getspnam}.
|
||||
"""
|
||||
self.assertRaises(TypeError, self.database.getspnam, b"i-am-bytes")
|
||||
|
||||
def test_recordLength(self):
|
||||
"""
|
||||
The shadow user record returned by I{getspnam} and I{getspall} has a
|
||||
length.
|
||||
"""
|
||||
db = self.database
|
||||
username = self.getExistingUserInfo()[0]
|
||||
for entry in [db.getspnam(username), db.getspall()[0]]:
|
||||
self.assertIsInstance(len(entry), int)
|
||||
self.assertEqual(len(entry), 9)
|
||||
|
||||
def test_recordIndexable(self):
|
||||
"""
|
||||
The shadow user record returned by I{getpwnam} and I{getspall} is
|
||||
indexable, with successive indexes starting from 0 corresponding to the
|
||||
values of the C{sp_nam}, C{sp_pwd}, C{sp_lstchg}, C{sp_min}, C{sp_max},
|
||||
C{sp_warn}, C{sp_inact}, C{sp_expire}, and C{sp_flag} attributes,
|
||||
respectively.
|
||||
"""
|
||||
db = self.database
|
||||
(
|
||||
username,
|
||||
password,
|
||||
lastChange,
|
||||
min,
|
||||
max,
|
||||
warn,
|
||||
inact,
|
||||
expire,
|
||||
flag,
|
||||
) = self.getExistingUserInfo()
|
||||
for entry in [db.getspnam(username), db.getspall()[0]]:
|
||||
self.assertEqual(entry[0], username)
|
||||
self.assertEqual(entry[1], password)
|
||||
self.assertEqual(entry[2], lastChange)
|
||||
self.assertEqual(entry[3], min)
|
||||
self.assertEqual(entry[4], max)
|
||||
self.assertEqual(entry[5], warn)
|
||||
self.assertEqual(entry[6], inact)
|
||||
self.assertEqual(entry[7], expire)
|
||||
self.assertEqual(entry[8], flag)
|
||||
|
||||
self.assertEqual(len(entry), len(list(entry)))
|
||||
self.assertRaises(IndexError, getitem, entry, 9)
|
||||
|
||||
|
||||
class ShadowDatabaseTests(TestCase, ShadowDatabaseTestsMixin):
|
||||
"""
|
||||
Tests for L{ShadowDatabase}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Create a L{ShadowDatabase} with no user data in it.
|
||||
"""
|
||||
self.database = ShadowDatabase()
|
||||
self._counter = 0
|
||||
|
||||
def getExistingUserInfo(self):
|
||||
"""
|
||||
Add a new user to C{self.database} and return its information.
|
||||
"""
|
||||
self._counter += 1
|
||||
suffix = "_" + str(self._counter)
|
||||
username = "username" + suffix
|
||||
[PASSWORD-REMOVED]" + suffix
|
||||
lastChange = self._counter + 1
|
||||
min = self._counter + 2
|
||||
max = self._counter + 3
|
||||
warn = self._counter + 4
|
||||
inact = self._counter + 5
|
||||
expire = self._counter + 6
|
||||
flag = self._counter + 7
|
||||
|
||||
self.database.addUser(
|
||||
username, password, lastChange, min, max, warn, inact, expire, flag
|
||||
)
|
||||
return (username, password, lastChange, min, max, warn, inact, expire, flag)
|
||||
|
||||
def test_addUser(self):
|
||||
"""
|
||||
L{UserDatabase.addUser} accepts seven arguments, one for each field of
|
||||
a L{pwd.struct_passwd}, and makes the new record available via
|
||||
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
|
||||
L{UserDatabase.getpwall}.
|
||||
"""
|
||||
username = "alice"
|
||||
password = "secr3t"
|
||||
lastChange = 17
|
||||
min = 42
|
||||
max = 105
|
||||
warn = 12
|
||||
inact = 3
|
||||
expire = 400
|
||||
flag = 3
|
||||
|
||||
db = self.database
|
||||
db.addUser(username, password, lastChange, min, max, warn, inact, expire, flag)
|
||||
|
||||
for [entry] in [[db.getspnam(username)], db.getspall()]:
|
||||
self.assertEqual(entry.sp_nam, username)
|
||||
self.assertEqual(entry.sp_pwd, password)
|
||||
self.assertEqual(entry.sp_lstchg, lastChange)
|
||||
self.assertEqual(entry.sp_min, min)
|
||||
self.assertEqual(entry.sp_max, max)
|
||||
self.assertEqual(entry.sp_warn, warn)
|
||||
self.assertEqual(entry.sp_inact, inact)
|
||||
self.assertEqual(entry.sp_expire, expire)
|
||||
self.assertEqual(entry.sp_flag, flag)
|
||||
|
||||
|
||||
class SPwdModuleTests(TestCase, ShadowDatabaseTestsMixin):
|
||||
"""
|
||||
L{SPwdModuleTests} runs the tests defined by L{ShadowDatabaseTestsMixin}
|
||||
against the built-in C{spwd} module. This serves to verify that
|
||||
L{ShadowDatabase} is really a fake of that API.
|
||||
"""
|
||||
|
||||
if spwd is None:
|
||||
skip = "Cannot verify ShadowDatabase against spwd without spwd"
|
||||
elif os.getuid() != 0:
|
||||
skip = "Cannot access shadow user database except as root"
|
||||
else:
|
||||
database = spwd
|
||||
|
||||
def setUp(self):
|
||||
self._users = iter(self.database.getspall())
|
||||
|
||||
def getExistingUserInfo(self):
|
||||
"""
|
||||
Read and return the next record from C{self._users}.
|
||||
"""
|
||||
return next(self._users)
|
||||
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.htmlizer}.
|
||||
"""
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from twisted.python.htmlizer import filter
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
|
||||
class FilterTests(TestCase):
|
||||
"""
|
||||
Tests for L{twisted.python.htmlizer.filter}.
|
||||
"""
|
||||
|
||||
def test_empty(self) -> None:
|
||||
"""
|
||||
If passed an empty input file, L{filter} writes a I{pre} tag containing
|
||||
only an end marker to the output file.
|
||||
"""
|
||||
input = BytesIO(b"")
|
||||
output = BytesIO()
|
||||
filter(input, output)
|
||||
self.assertEqual(
|
||||
output.getvalue(), b'<pre><span class="py-src-endmarker"></span></pre>\n'
|
||||
)
|
||||
|
||||
def test_variable(self) -> None:
|
||||
"""
|
||||
If passed an input file containing a variable access, L{filter} writes
|
||||
a I{pre} tag containing a I{py-src-variable} span containing the
|
||||
variable.
|
||||
"""
|
||||
input = BytesIO(b"foo\n")
|
||||
output = BytesIO()
|
||||
filter(input, output)
|
||||
self.assertEqual(
|
||||
output.getvalue(),
|
||||
b'<pre><span class="py-src-variable">foo</span>'
|
||||
b'<span class="py-src-newline">\n'
|
||||
b'</span><span class="py-src-endmarker"></span></pre>\n',
|
||||
)
|
||||
@@ -0,0 +1,138 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python._inotify}.
|
||||
"""
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.python.runtime import platform
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
try:
|
||||
from twisted.python import _inotify
|
||||
except ImportError:
|
||||
inotify = None
|
||||
else:
|
||||
inotify = _inotify
|
||||
|
||||
if inotify and platform.supportsINotify():
|
||||
from ctypes import c_char_p, c_int, c_uint32
|
||||
|
||||
from twisted.python._inotify import INotifyError, add, init, initializeModule
|
||||
else:
|
||||
inotify = None
|
||||
|
||||
|
||||
class INotifyTests(TestCase):
|
||||
"""
|
||||
Tests for L{twisted.python._inotify}.
|
||||
"""
|
||||
|
||||
if inotify is None:
|
||||
skip = "This platform doesn't support INotify."
|
||||
|
||||
def test_missingInit(self):
|
||||
"""
|
||||
If the I{libc} object passed to L{initializeModule} has no
|
||||
C{inotify_init} attribute, L{ImportError} is raised.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_add_watch(self):
|
||||
pass
|
||||
|
||||
def inotify_rm_watch(self):
|
||||
pass
|
||||
|
||||
self.assertRaises(ImportError, initializeModule, libc())
|
||||
|
||||
def test_missingAdd(self):
|
||||
"""
|
||||
If the I{libc} object passed to L{initializeModule} has no
|
||||
C{inotify_add_watch} attribute, L{ImportError} is raised.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_init(self):
|
||||
pass
|
||||
|
||||
def inotify_rm_watch(self):
|
||||
pass
|
||||
|
||||
self.assertRaises(ImportError, initializeModule, libc())
|
||||
|
||||
def test_missingRemove(self):
|
||||
"""
|
||||
If the I{libc} object passed to L{initializeModule} has no
|
||||
C{inotify_rm_watch} attribute, L{ImportError} is raised.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_init(self):
|
||||
pass
|
||||
|
||||
def inotify_add_watch(self):
|
||||
pass
|
||||
|
||||
self.assertRaises(ImportError, initializeModule, libc())
|
||||
|
||||
def test_setTypes(self):
|
||||
"""
|
||||
If the I{libc} object passed to L{initializeModule} has all of the
|
||||
necessary attributes, it sets the C{argtypes} and C{restype} attributes
|
||||
of the three ctypes methods used from libc.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_init(self):
|
||||
pass
|
||||
|
||||
inotify_init = staticmethod(inotify_init)
|
||||
|
||||
def inotify_rm_watch(self):
|
||||
pass
|
||||
|
||||
inotify_rm_watch = staticmethod(inotify_rm_watch)
|
||||
|
||||
def inotify_add_watch(self):
|
||||
pass
|
||||
|
||||
inotify_add_watch = staticmethod(inotify_add_watch)
|
||||
|
||||
c = libc()
|
||||
initializeModule(c)
|
||||
self.assertEqual(c.inotify_init.argtypes, [])
|
||||
self.assertEqual(c.inotify_init.restype, c_int)
|
||||
|
||||
self.assertEqual(c.inotify_rm_watch.argtypes, [c_int, c_int])
|
||||
self.assertEqual(c.inotify_rm_watch.restype, c_int)
|
||||
|
||||
self.assertEqual(c.inotify_add_watch.argtypes, [c_int, c_char_p, c_uint32])
|
||||
self.assertEqual(c.inotify_add_watch.restype, c_int)
|
||||
|
||||
def test_failedInit(self):
|
||||
"""
|
||||
If C{inotify_init} returns a negative number, L{init} raises
|
||||
L{INotifyError}.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_init(self):
|
||||
return -1
|
||||
|
||||
self.patch(inotify, "libc", libc())
|
||||
self.assertRaises(INotifyError, init)
|
||||
|
||||
def test_failedAddWatch(self):
|
||||
"""
|
||||
If C{inotify_add_watch} returns a negative number, L{add}
|
||||
raises L{INotifyError}.
|
||||
"""
|
||||
|
||||
class libc:
|
||||
def inotify_add_watch(self, fd, path, mask):
|
||||
return -1
|
||||
|
||||
self.patch(inotify, "libc", libc())
|
||||
self.assertRaises(INotifyError, add, 3, FilePath("/foo"), 0)
|
||||
@@ -0,0 +1,503 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.release} and L{twisted.python._release}.
|
||||
|
||||
All of these tests are skipped on platforms other than Linux, as the release is
|
||||
only ever performed on Linux.
|
||||
"""
|
||||
import glob
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python import release
|
||||
from twisted.python._release import (
|
||||
GitCommand,
|
||||
IVCSCommand,
|
||||
NotWorkingDirectory,
|
||||
Project,
|
||||
filePathDelta,
|
||||
findTwistedProjects,
|
||||
getRepositoryCommand,
|
||||
replaceInFile,
|
||||
runCommand,
|
||||
)
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
if sys.platform != "win32":
|
||||
skip = None
|
||||
else:
|
||||
skip = "Release toolchain only supported on POSIX."
|
||||
|
||||
# This should match the GitHub Actions environment used by pre-commit.ci to push changes to the auto-updated branches.
|
||||
PRECOMMIT_CI_ENVIRON = {
|
||||
"GITHUB_HEAD_REF": "pre-commit-ci-update-config",
|
||||
"PATH": os***REMOVED***iron["PATH"],
|
||||
}
|
||||
# This should match the GHA environment for non pre-commit.ci PRs.
|
||||
GENERIC_CI_ENVIRON = {
|
||||
"GITHUB_HEAD_REF": "1234-some-branch-name",
|
||||
"PATH": os***REMOVED***iron["PATH"],
|
||||
}
|
||||
|
||||
|
||||
class ExternalTempdirTestCase(TestCase):
|
||||
"""
|
||||
A test case which has mkdir make directories outside of the usual spot, so
|
||||
that Git commands don't interfere with the Twisted checkout.
|
||||
"""
|
||||
|
||||
def mktemp(self):
|
||||
"""
|
||||
Make our own directory.
|
||||
"""
|
||||
newDir = tempfile.mkdtemp(dir=tempfile.gettempdir())
|
||||
self.addCleanup(shutil.rmtree, newDir)
|
||||
return newDir
|
||||
|
||||
|
||||
def _gitConfig(path):
|
||||
"""
|
||||
Set some config in the repo that Git requires to make commits. This isn't
|
||||
needed in real usage, just for tests.
|
||||
|
||||
@param path: The path to the Git repository.
|
||||
@type path: L{FilePath}
|
||||
"""
|
||||
runCommand(
|
||||
[
|
||||
"git",
|
||||
"config",
|
||||
"--file",
|
||||
path.child(".git").child("config").path,
|
||||
"user.name",
|
||||
'"someone"',
|
||||
]
|
||||
)
|
||||
runCommand(
|
||||
[
|
||||
"git",
|
||||
"config",
|
||||
"--file",
|
||||
path.child(".git").child("config").path,
|
||||
"user.email",
|
||||
'"someone@someplace.com"',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _gitInit(path):
|
||||
"""
|
||||
Run a git init, and set some config that git requires. This isn't needed in
|
||||
real usage.
|
||||
|
||||
@param path: The path to where the Git repo will be created.
|
||||
@type path: L{FilePath}
|
||||
"""
|
||||
runCommand(["git", "init", path.path])
|
||||
_gitConfig(path)
|
||||
|
||||
|
||||
def genVersion(*args, **kwargs):
|
||||
"""
|
||||
A convenience for generating _version.py data.
|
||||
|
||||
@param args: Arguments to pass to L{Version}.
|
||||
@param kwargs: Keyword arguments to pass to L{Version}.
|
||||
"""
|
||||
return "from incremental import Version\n__version__={!r}".format(
|
||||
Version(*args, **kwargs)
|
||||
)
|
||||
|
||||
|
||||
class StructureAssertingMixin:
|
||||
"""
|
||||
A mixin for L{TestCase} subclasses which provides some methods for
|
||||
asserting the structure and contents of directories and files on the
|
||||
filesystem.
|
||||
"""
|
||||
|
||||
def createStructure(self, root, dirDict):
|
||||
"""
|
||||
Create a set of directories and files given a dict defining their
|
||||
structure.
|
||||
|
||||
@param root: The directory in which to create the structure. It must
|
||||
already exist.
|
||||
@type root: L{FilePath}
|
||||
|
||||
@param dirDict: The dict defining the structure. Keys should be strings
|
||||
naming files, values should be strings describing file contents OR
|
||||
dicts describing subdirectories. All files are written in binary
|
||||
mode. Any string values are assumed to describe text files and
|
||||
will have their newlines replaced with the platform-native newline
|
||||
convention. For example::
|
||||
|
||||
{"foofile": "foocontents",
|
||||
"bardir": {"barfile": "bar\ncontents"}}
|
||||
@type dirDict: C{dict}
|
||||
"""
|
||||
for x in dirDict:
|
||||
child = root.child(x)
|
||||
if isinstance(dirDict[x], dict):
|
||||
child.createDirectory()
|
||||
self.createStructure(child, dirDict[x])
|
||||
else:
|
||||
child.setContent(dirDict[x].replace("\n", os.linesep).encode())
|
||||
|
||||
def assertStructure(self, root, dirDict):
|
||||
"""
|
||||
Assert that a directory is equivalent to one described by a dict.
|
||||
|
||||
@param root: The filesystem directory to compare.
|
||||
@type root: L{FilePath}
|
||||
@param dirDict: The dict that should describe the contents of the
|
||||
directory. It should be the same structure as the C{dirDict}
|
||||
parameter to L{createStructure}.
|
||||
@type dirDict: C{dict}
|
||||
"""
|
||||
children = [each.basename() for each in root.children()]
|
||||
for pathSegment, expectation in dirDict.items():
|
||||
child = root.child(pathSegment)
|
||||
if callable(expectation):
|
||||
self.assertTrue(expectation(child))
|
||||
elif isinstance(expectation, dict):
|
||||
self.assertTrue(child.isdir(), f"{child.path} is not a dir!")
|
||||
self.assertStructure(child, expectation)
|
||||
else:
|
||||
actual = child.getContent().decode().replace(os.linesep, "\n")
|
||||
self.assertEqual(actual, expectation)
|
||||
children.remove(pathSegment)
|
||||
if children:
|
||||
self.fail(f"There were extra children in {root.path}: {children}")
|
||||
|
||||
|
||||
class ProjectTests(ExternalTempdirTestCase):
|
||||
"""
|
||||
There is a first-class representation of a project.
|
||||
"""
|
||||
|
||||
def assertProjectsEqual(self, observedProjects, expectedProjects):
|
||||
"""
|
||||
Assert that two lists of L{Project}s are equal.
|
||||
"""
|
||||
self.assertEqual(len(observedProjects), len(expectedProjects))
|
||||
observedProjects = sorted(
|
||||
observedProjects, key=operator.attrgetter("directory")
|
||||
)
|
||||
expectedProjects = sorted(
|
||||
expectedProjects, key=operator.attrgetter("directory")
|
||||
)
|
||||
for observed, expected in zip(observedProjects, expectedProjects):
|
||||
self.assertEqual(observed.directory, expected.directory)
|
||||
|
||||
def makeProject(self, version, baseDirectory=None):
|
||||
"""
|
||||
Make a Twisted-style project in the given base directory.
|
||||
|
||||
@param baseDirectory: The directory to create files in
|
||||
(as a L{FilePath).
|
||||
@param version: The version information for the project.
|
||||
@return: L{Project} pointing to the created project.
|
||||
"""
|
||||
if baseDirectory is None:
|
||||
baseDirectory = FilePath(self.mktemp())
|
||||
segments = version[0].split(".")
|
||||
directory = baseDirectory
|
||||
for segment in segments:
|
||||
directory = directory.child(segment)
|
||||
if not directory.exists():
|
||||
directory.createDirectory()
|
||||
directory.child("__init__.py").setContent(b"")
|
||||
directory.child("newsfragments").createDirectory()
|
||||
directory.child("_version.py").setContent(genVersion(*version).encode())
|
||||
return Project(directory)
|
||||
|
||||
def makeProjects(self, *versions):
|
||||
"""
|
||||
Create a series of projects underneath a temporary base directory.
|
||||
|
||||
@return: A L{FilePath} for the base directory.
|
||||
"""
|
||||
baseDirectory = FilePath(self.mktemp())
|
||||
for version in versions:
|
||||
self.makeProject(version, baseDirectory)
|
||||
return baseDirectory
|
||||
|
||||
def test_getVersion(self):
|
||||
"""
|
||||
Project objects know their version.
|
||||
"""
|
||||
version = ("twisted", 2, 1, 0)
|
||||
project = self.makeProject(version)
|
||||
self.assertEqual(project.getVersion(), Version(*version))
|
||||
|
||||
def test_repr(self):
|
||||
"""
|
||||
The representation of a Project is Project(directory).
|
||||
"""
|
||||
foo = Project(FilePath("bar"))
|
||||
self.assertEqual(repr(foo), "Project(%r)" % (foo.directory))
|
||||
|
||||
def test_findTwistedStyleProjects(self):
|
||||
"""
|
||||
findTwistedStyleProjects finds all projects underneath a particular
|
||||
directory. A 'project' is defined by the existence of a 'newsfragments'
|
||||
directory and is returned as a Project object.
|
||||
"""
|
||||
baseDirectory = self.makeProjects(("foo", 2, 3, 0), ("foo.bar", 0, 7, 4))
|
||||
projects = findTwistedProjects(baseDirectory)
|
||||
self.assertProjectsEqual(
|
||||
projects,
|
||||
[
|
||||
Project(baseDirectory.child("foo")),
|
||||
Project(baseDirectory.child("foo").child("bar")),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class UtilityTests(ExternalTempdirTestCase):
|
||||
"""
|
||||
Tests for various utility functions for releasing.
|
||||
"""
|
||||
|
||||
def test_chdir(self):
|
||||
"""
|
||||
Test that the runChdirSafe is actually safe, i.e., it still
|
||||
changes back to the original directory even if an error is
|
||||
raised.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
|
||||
def chAndBreak():
|
||||
os.mkdir("releaseCh")
|
||||
os.chdir("releaseCh")
|
||||
1 // 0
|
||||
|
||||
self.assertRaises(ZeroDivisionError, release.runChdirSafe, chAndBreak)
|
||||
self.assertEqual(cwd, os.getcwd())
|
||||
|
||||
def test_replaceInFile(self):
|
||||
"""
|
||||
L{replaceInFile} replaces data in a file based on a dict. A key from
|
||||
the dict that is found in the file is replaced with the corresponding
|
||||
value.
|
||||
"""
|
||||
content = "foo\nhey hey $VER\nbar\n"
|
||||
with open("release.replace", "w") as outf:
|
||||
outf.write(content)
|
||||
|
||||
expected = content.replace("$VER", "2.0.0")
|
||||
replaceInFile("release.replace", {"$VER": "2.0.0"})
|
||||
with open("release.replace") as f:
|
||||
self.assertEqual(f.read(), expected)
|
||||
|
||||
expected = expected.replace("2.0.0", "3.0.0")
|
||||
replaceInFile("release.replace", {"2.0.0": "3.0.0"})
|
||||
with open("release.replace") as f:
|
||||
self.assertEqual(f.read(), expected)
|
||||
|
||||
|
||||
class FilePathDeltaTests(TestCase):
|
||||
"""
|
||||
Tests for L{filePathDelta}.
|
||||
"""
|
||||
|
||||
def test_filePathDeltaSubdir(self):
|
||||
"""
|
||||
L{filePathDelta} can create a simple relative path to a child path.
|
||||
"""
|
||||
self.assertEqual(
|
||||
filePathDelta(FilePath("/foo/bar"), FilePath("/foo/bar/baz")), ["baz"]
|
||||
)
|
||||
|
||||
def test_filePathDeltaSiblingDir(self):
|
||||
"""
|
||||
L{filePathDelta} can traverse upwards to create relative paths to
|
||||
siblings.
|
||||
"""
|
||||
self.assertEqual(
|
||||
filePathDelta(FilePath("/foo/bar"), FilePath("/foo/baz")), ["..", "baz"]
|
||||
)
|
||||
|
||||
def test_filePathNoCommonElements(self):
|
||||
"""
|
||||
L{filePathDelta} can create relative paths to totally unrelated paths
|
||||
for maximum portability.
|
||||
"""
|
||||
self.assertEqual(
|
||||
filePathDelta(FilePath("/foo/bar"), FilePath("/baz/quux")),
|
||||
["..", "..", "baz", "quux"],
|
||||
)
|
||||
|
||||
def test_filePathDeltaSimilarEndElements(self):
|
||||
"""
|
||||
L{filePathDelta} doesn't take into account final elements when
|
||||
comparing 2 paths, but stops at the first difference.
|
||||
"""
|
||||
self.assertEqual(
|
||||
filePathDelta(FilePath("/foo/bar/bar/spam"), FilePath("/foo/bar/baz/spam")),
|
||||
["..", "..", "baz", "spam"],
|
||||
)
|
||||
|
||||
|
||||
class CommandsTestMixin(StructureAssertingMixin):
|
||||
"""
|
||||
Test mixin for the VCS commands used by the release scripts.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmpDir = FilePath(self.mktemp())
|
||||
|
||||
def test_[AWS-SECRET-REMOVED]tory(self):
|
||||
"""
|
||||
Calling the C{ensureIsWorkingDirectory} VCS command's method on a valid
|
||||
working directory doesn't produce any error.
|
||||
"""
|
||||
reposDir = self.makeRepository(self.tmpDir)
|
||||
self.assertIsNone(self.createCommand.ensureIsWorkingDirectory(reposDir))
|
||||
|
||||
def test_[AWS-SECRET-REMOVED]rectory(self):
|
||||
"""
|
||||
Calling the C{ensureIsWorkingDirectory} VCS command's method on an
|
||||
invalid working directory raises a L{NotWorkingDirectory} exception.
|
||||
"""
|
||||
self.assertRaises(
|
||||
NotWorkingDirectory,
|
||||
self.createCommand.ensureIsWorkingDirectory,
|
||||
self.tmpDir,
|
||||
)
|
||||
|
||||
def test_statusClean(self):
|
||||
"""
|
||||
Calling the C{isStatusClean} VCS command's method on a repository with
|
||||
no pending modifications returns C{True}.
|
||||
"""
|
||||
reposDir = self.makeRepository(self.tmpDir)
|
||||
self.assertTrue(self.createCommand.isStatusClean(reposDir))
|
||||
|
||||
def test_statusNotClean(self):
|
||||
"""
|
||||
Calling the C{isStatusClean} VCS command's method on a repository with
|
||||
no pending modifications returns C{False}.
|
||||
"""
|
||||
reposDir = self.makeRepository(self.tmpDir)
|
||||
reposDir.child("some-file").setContent(b"something")
|
||||
self.assertFalse(self.createCommand.isStatusClean(reposDir))
|
||||
|
||||
def test_remove(self):
|
||||
"""
|
||||
Calling the C{remove} VCS command's method remove the specified path
|
||||
from the directory.
|
||||
"""
|
||||
reposDir = self.makeRepository(self.tmpDir)
|
||||
testFile = reposDir.child("some-file")
|
||||
testFile.setContent(b"something")
|
||||
self.commitRepository(reposDir)
|
||||
self.assertTrue(testFile.exists())
|
||||
|
||||
self.createCommand.remove(testFile)
|
||||
testFile.restat(False) # Refresh the file information
|
||||
self.assertFalse(testFile.exists(), "File still exists")
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
The C{exportTo} VCS command's method export the content of the
|
||||
repository as identical in a specified directory.
|
||||
"""
|
||||
structure = {
|
||||
"README.rst": "Hi this is 1.0.0.",
|
||||
"twisted": {
|
||||
"newsfragments": {"README": "Hi this is 1.0.0"},
|
||||
"_version.py": genVersion("twisted", 1, 0, 0),
|
||||
"web": {
|
||||
"newsfragments": {"README": "Hi this is 1.0.0"},
|
||||
"_version.py": genVersion("twisted.web", 1, 0, 0),
|
||||
},
|
||||
},
|
||||
}
|
||||
reposDir = self.makeRepository(self.tmpDir)
|
||||
self.createStructure(reposDir, structure)
|
||||
self.commitRepository(reposDir)
|
||||
|
||||
exportDir = FilePath(self.mktemp()).child("export")
|
||||
self.createCommand.exportTo(reposDir, exportDir)
|
||||
self.assertStructure(exportDir, structure)
|
||||
|
||||
|
||||
class GitCommandTest(CommandsTestMixin, ExternalTempdirTestCase):
|
||||
"""
|
||||
Specific L{CommandsTestMixin} related to Git repositories through
|
||||
L{GitCommand}.
|
||||
"""
|
||||
|
||||
createCommand = GitCommand
|
||||
|
||||
def makeRepository(self, root):
|
||||
"""
|
||||
Create a Git repository in the specified path.
|
||||
|
||||
@type root: L{FilePath}
|
||||
@params root: The directory to create the Git repository into.
|
||||
|
||||
@return: The path to the repository just created.
|
||||
@rtype: L{FilePath}
|
||||
"""
|
||||
_gitInit(root)
|
||||
return root
|
||||
|
||||
def commitRepository(self, repository):
|
||||
"""
|
||||
Add and commit all the files from the Git repository specified.
|
||||
|
||||
@type repository: L{FilePath}
|
||||
@params repository: The Git repository to commit into.
|
||||
"""
|
||||
runCommand(
|
||||
["git", "-C", repository.path, "add"] + glob.glob(repository.path + "/*")
|
||||
)
|
||||
runCommand(["git", "-C", repository.path, "commit", "-m", "hop"])
|
||||
|
||||
|
||||
class RepositoryCommandDetectionTest(ExternalTempdirTestCase):
|
||||
"""
|
||||
Test the L{getRepositoryCommand} to access the right set of VCS commands
|
||||
depending on the repository manipulated.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.repos = FilePath(self.mktemp())
|
||||
|
||||
def test_git(self):
|
||||
"""
|
||||
L{getRepositoryCommand} from a Git repository returns L{GitCommand}.
|
||||
"""
|
||||
_gitInit(self.repos)
|
||||
cmd = getRepositoryCommand(self.repos)
|
||||
self.assertIs(cmd, GitCommand)
|
||||
|
||||
def test_unknownRepository(self):
|
||||
"""
|
||||
L{getRepositoryCommand} from a directory which doesn't look like a Git
|
||||
repository produces a L{NotWorkingDirectory} exception.
|
||||
"""
|
||||
self.assertRaises(NotWorkingDirectory, getRepositoryCommand, self.repos)
|
||||
|
||||
|
||||
class VCSCommandInterfaceTests(TestCase):
|
||||
"""
|
||||
Test that the VCS command classes implement their interface.
|
||||
"""
|
||||
|
||||
def test_git(self):
|
||||
"""
|
||||
L{GitCommand} implements L{IVCSCommand}.
|
||||
"""
|
||||
self.assertTrue(IVCSCommand.implementedBy(GitCommand))
|
||||
@@ -0,0 +1,228 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.runtime}.
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.python.reflect import namedModule
|
||||
from twisted.python.runtime import Platform, shortPythonVersion
|
||||
from twisted.trial.unittest import SynchronousTestCase
|
||||
from twisted.trial.util import suppress as SUPRESS
|
||||
|
||||
|
||||
class PythonVersionTests(SynchronousTestCase):
|
||||
"""
|
||||
Tests the shortPythonVersion method.
|
||||
"""
|
||||
|
||||
def test_shortPythonVersion(self) -> None:
|
||||
"""
|
||||
Verify if the Python version is returned correctly.
|
||||
"""
|
||||
ver = shortPythonVersion().split(".")
|
||||
for i in range(3):
|
||||
self.assertEqual(int(ver[i]), sys.version_info[i])
|
||||
|
||||
|
||||
class PlatformTests(SynchronousTestCase):
|
||||
"""
|
||||
Tests for the default L{Platform} initializer.
|
||||
"""
|
||||
|
||||
isWinNTDeprecationMessage = (
|
||||
"twisted.python.runtime.Platform.isWinNT was "
|
||||
"deprecated in Twisted 13.0. Use Platform.isWindows instead."
|
||||
)
|
||||
|
||||
def test_isKnown(self) -> None:
|
||||
"""
|
||||
L{Platform.isKnown} returns a boolean indicating whether this is one of
|
||||
the L{runtime.knownPlatforms}.
|
||||
"""
|
||||
platform = Platform()
|
||||
self.assertTrue(platform.isKnown())
|
||||
|
||||
def test_isVistaConsistency(self) -> None:
|
||||
"""
|
||||
Verify consistency of L{Platform.isVista}: it can only be C{True} if
|
||||
L{Platform.isWinNT} and L{Platform.isWindows} are C{True}.
|
||||
"""
|
||||
platform = Platform()
|
||||
if platform.isVista():
|
||||
self.assertTrue(platform.isWinNT())
|
||||
self.assertTrue(platform.isWindows())
|
||||
self.assertFalse(platform.isMacOSX())
|
||||
|
||||
def test_isMacOSXConsistency(self) -> None:
|
||||
"""
|
||||
L{Platform.isMacOSX} can only return C{True} if L{Platform.getType}
|
||||
returns C{'posix'}.
|
||||
"""
|
||||
platform = Platform()
|
||||
if platform.isMacOSX():
|
||||
self.assertEqual(platform.getType(), "posix")
|
||||
|
||||
def test_isLinuxConsistency(self) -> None:
|
||||
"""
|
||||
L{Platform.isLinux} can only return C{True} if L{Platform.getType}
|
||||
returns C{'posix'} and L{sys.platform} starts with C{"linux"}.
|
||||
"""
|
||||
platform = Platform()
|
||||
if platform.isLinux():
|
||||
self.assertTrue(sys.platform.startswith("linux"))
|
||||
|
||||
def test_isWinNT(self) -> None:
|
||||
"""
|
||||
L{Platform.isWinNT} can return only C{False} or C{True} and can not
|
||||
return C{True} if L{Platform.getType} is not C{"win32"}.
|
||||
"""
|
||||
platform = Platform()
|
||||
isWinNT = platform.isWinNT()
|
||||
self.assertIn(isWinNT, (False, True))
|
||||
if platform.getType() != "win32":
|
||||
self.assertFalse(isWinNT)
|
||||
|
||||
test_isWinNT.suppress = [ # type: ignore[attr-defined]
|
||||
SUPRESS(
|
||||
category=DeprecationWarning,
|
||||
message=isWinNTDeprecationMessage,
|
||||
)
|
||||
]
|
||||
|
||||
def test_isWinNTDeprecated(self) -> None:
|
||||
"""
|
||||
L{Platform.isWinNT} is deprecated in favor of L{platform.isWindows}.
|
||||
"""
|
||||
platform = Platform()
|
||||
platform.isWinNT()
|
||||
warnings = self.flushWarnings([self.test_isWinNTDeprecated])
|
||||
self.assertEqual(len(warnings), 1)
|
||||
self.assertEqual(warnings[0]["message"], self.isWinNTDeprecationMessage)
|
||||
|
||||
def test_supportsThreads(self) -> None:
|
||||
"""
|
||||
L{Platform.supportsThreads} returns C{True} if threads can be created in
|
||||
this runtime, C{False} otherwise.
|
||||
"""
|
||||
# It's difficult to test both cases of this without faking the threading
|
||||
# module. Perhaps an adequate test is to just test the behavior with
|
||||
# the current runtime, whatever that happens to be.
|
||||
try:
|
||||
namedModule("threading")
|
||||
except ImportError:
|
||||
self.assertFalse(Platform().supportsThreads())
|
||||
else:
|
||||
self.assertTrue(Platform().supportsThreads())
|
||||
|
||||
|
||||
class ForeignPlatformTests(SynchronousTestCase):
|
||||
"""
|
||||
Tests for L{Platform} based overridden initializer values.
|
||||
"""
|
||||
|
||||
def test_getType(self) -> None:
|
||||
"""
|
||||
If an operating system name is supplied to L{Platform}'s initializer,
|
||||
L{Platform.getType} returns the platform type which corresponds to that
|
||||
name.
|
||||
"""
|
||||
self.assertEqual(Platform("nt").getType(), "win32")
|
||||
self.assertEqual(Platform("ce").getType(), "win32")
|
||||
self.assertEqual(Platform("posix").getType(), "posix")
|
||||
self.assertEqual(Platform("java").getType(), "java")
|
||||
|
||||
def test_isMacOSX(self) -> None:
|
||||
"""
|
||||
If a system platform name is supplied to L{Platform}'s initializer, it
|
||||
is used to determine the result of L{Platform.isMacOSX}, which returns
|
||||
C{True} for C{"darwin"}, C{False} otherwise.
|
||||
"""
|
||||
self.assertTrue(Platform(None, "darwin").isMacOSX())
|
||||
self.assertFalse(Platform(None, "linux2").isMacOSX())
|
||||
self.assertFalse(Platform(None, "win32").isMacOSX())
|
||||
|
||||
def test_isLinux(self) -> None:
|
||||
"""
|
||||
If a system platform name is supplied to L{Platform}'s initializer, it
|
||||
is used to determine the result of L{Platform.isLinux}, which returns
|
||||
C{True} for values beginning with C{"linux"}, C{False} otherwise.
|
||||
"""
|
||||
self.assertFalse(Platform(None, "darwin").isLinux())
|
||||
self.assertTrue(Platform(None, "linux").isLinux())
|
||||
self.assertTrue(Platform(None, "linux2").isLinux())
|
||||
self.assertTrue(Platform(None, "linux3").isLinux())
|
||||
self.assertFalse(Platform(None, "win32").isLinux())
|
||||
|
||||
|
||||
class DockerPlatformTests(SynchronousTestCase):
|
||||
"""
|
||||
Tests for L{twisted.python.runtime.Platform.isDocker}.
|
||||
"""
|
||||
|
||||
def test_noChecksOnLinux(self) -> None:
|
||||
"""
|
||||
If the platform is not Linux, C{isDocker()} always returns L{False}.
|
||||
"""
|
||||
platform = Platform(None, "win32")
|
||||
self.assertFalse(platform.isDocker())
|
||||
|
||||
def test_noCGroups(self) -> None:
|
||||
"""
|
||||
If the platform is Linux, and the cgroups file in C{/proc} does not
|
||||
exist, C{isDocker()} returns L{False}
|
||||
"""
|
||||
platform = Platform(None, "linux")
|
||||
self.assertFalse(platform.isDocker(_initCGroupLocation="fakepath"))
|
||||
|
||||
def test_cgroupsSuggestsDocker(self) -> None:
|
||||
"""
|
||||
If the platform is Linux, and the cgroups file (faked out here) exists,
|
||||
and one of the paths starts with C{/docker/}, C{isDocker()} returns
|
||||
C{True}.
|
||||
"""
|
||||
cgroupsFile = self.mktemp()
|
||||
with open(cgroupsFile, "wb") as f:
|
||||
# real cgroups file from inside a Debian 7 docker container
|
||||
f.write(
|
||||
b"""10:debug:/
|
||||
9:net_prio:/
|
||||
8:perf_event:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
7:net_cls:/
|
||||
6:freezer:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
5:devices:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
4:blkio:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
3:cpuacct:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
2:cpu:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f
|
||||
1:cpuset:[AWS-SECRET-REMOVED]5a06a7508403c797bc89ea43adf8d35f"""
|
||||
)
|
||||
|
||||
platform = Platform(None, "linux")
|
||||
self.assertTrue(platform.isDocker(_initCGroupLocation=cgroupsFile))
|
||||
|
||||
def test_cgroupsSuggestsRealSystem(self) -> None:
|
||||
"""
|
||||
If the platform is Linux, and the cgroups file (faked out here) exists,
|
||||
and none of the paths starts with C{/docker/}, C{isDocker()} returns
|
||||
C{False}.
|
||||
"""
|
||||
cgroupsFile = self.mktemp()
|
||||
with open(cgroupsFile, "wb") as f:
|
||||
# real cgroups file from a Fedora 17 system
|
||||
f.write(
|
||||
b"""9:perf_event:/
|
||||
8:blkio:/
|
||||
7:net_cls:/
|
||||
6:freezer:/
|
||||
5:devices:/
|
||||
4:memory:/
|
||||
3:cpuacct,cpu:/
|
||||
2:cpuset:/
|
||||
1:name=systemd:/system"""
|
||||
)
|
||||
|
||||
platform = Platform(None, "linux")
|
||||
self.assertFalse(platform.isDocker(_initCGroupLocation=cgroupsFile))
|
||||
@@ -0,0 +1,337 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.sendmsg}.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from os import close, pathsep, pipe, read
|
||||
from socket import AF_INET, AF_INET6, SOL_SOCKET, error, socket
|
||||
from struct import pack
|
||||
|
||||
try:
|
||||
from socket import AF_UNIX, socketpair
|
||||
except ImportError:
|
||||
nonUNIXSkip = True
|
||||
else:
|
||||
nonUNIXSkip = False
|
||||
|
||||
from unittest import skipIf
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet.defer import Deferred, inlineCallbacks
|
||||
from twisted.internet.error import ProcessDone
|
||||
from twisted.internet.protocol import ProcessProtocol
|
||||
from twisted.python.filepath import FilePath
|
||||
from twisted.python.runtime import platform
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
if platform.isLinux():
|
||||
from socket import MSG_DONTWAIT
|
||||
|
||||
dontWaitSkip = False
|
||||
else:
|
||||
# It would be nice to be able to test flags on more platforms, but finding
|
||||
# a flag that works *at all* is somewhat challenging.
|
||||
dontWaitSkip = True
|
||||
|
||||
|
||||
try:
|
||||
from twisted.python.sendmsg import SCM_RIGHTS, getSocketFamily, recvmsg, sendmsg
|
||||
except ImportError:
|
||||
doImportSkip = True
|
||||
importSkipReason = "Platform doesn't support sendmsg."
|
||||
else:
|
||||
doImportSkip = False
|
||||
importSkipReason = ""
|
||||
|
||||
|
||||
class _FDHolder:
|
||||
"""
|
||||
A wrapper around a FD that will remember if it has been closed or not.
|
||||
"""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._fd = fd
|
||||
|
||||
def fileno(self):
|
||||
"""
|
||||
Return the fileno of this FD.
|
||||
"""
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close the FD. If it's already been closed, do nothing.
|
||||
"""
|
||||
if self._fd:
|
||||
close(self._fd)
|
||||
self._fd = None
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
If C{self._fd} is unclosed, raise a warning.
|
||||
"""
|
||||
if self._fd:
|
||||
warnings.warn(f"FD {self._fd} was not closed!", ResourceWarning)
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
def _makePipe():
|
||||
"""
|
||||
Create a pipe, and return the two FDs wrapped in L{_FDHolders}.
|
||||
"""
|
||||
r, w = pipe()
|
||||
return (_FDHolder(r), _FDHolder(w))
|
||||
|
||||
|
||||
class ExitedWithStderr(Exception):
|
||||
"""
|
||||
A process exited with some stderr.
|
||||
"""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
Dump the errors in a pretty way in the event of a subprocess traceback.
|
||||
"""
|
||||
result = b"\n".join([b""] + list(self.args))
|
||||
return repr(result)
|
||||
|
||||
|
||||
class StartStopProcessProtocol(ProcessProtocol):
|
||||
"""
|
||||
An L{IProcessProtocol} with a Deferred for events where the subprocess
|
||||
starts and stops.
|
||||
|
||||
@ivar started: A L{Deferred} which fires with this protocol's
|
||||
L{IProcessTransport} provider when it is connected to one.
|
||||
|
||||
@ivar stopped: A L{Deferred} which fires with the process output or a
|
||||
failure if the process produces output on standard error.
|
||||
|
||||
@ivar output: A C{str} used to accumulate standard output.
|
||||
|
||||
@ivar errors: A C{str} used to accumulate standard error.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.started = Deferred()
|
||||
self.stopped = Deferred()
|
||||
self.output = b""
|
||||
self.errors = b""
|
||||
|
||||
def connectionMade(self):
|
||||
self.started.callback(self.transport)
|
||||
|
||||
def outReceived(self, data):
|
||||
self.output += data
|
||||
|
||||
def errReceived(self, data):
|
||||
self.errors += data
|
||||
|
||||
def processEnded(self, reason):
|
||||
if reason.check(ProcessDone):
|
||||
self.stopped.callback(self.output)
|
||||
else:
|
||||
self.stopped.errback(ExitedWithStderr(self.errors, self.output))
|
||||
|
||||
|
||||
def _spawn(script, outputFD):
|
||||
"""
|
||||
Start a script that is a peer of this test as a subprocess.
|
||||
|
||||
@param script: the module name of the script in this directory (no
|
||||
package prefix, no '.py')
|
||||
@type script: C{str}
|
||||
|
||||
@rtype: L{StartStopProcessProtocol}
|
||||
"""
|
||||
pyExe = FilePath(sys.executable).asTextMode().path
|
||||
env = dict(os***REMOVED***iron)
|
||||
env["PYTHONPATH"] = FilePath(pathsep.join(sys.path)).asTextMode().path
|
||||
sspp = StartStopProcessProtocol()
|
||||
reactor.spawnProcess(
|
||||
sspp,
|
||||
pyExe,
|
||||
[
|
||||
pyExe,
|
||||
FilePath(__file__).sibling(script + ".py").asTextMode().path,
|
||||
b"17",
|
||||
],
|
||||
env=env,
|
||||
childFDs={0: "w", 1: "r", 2: "r", 17: outputFD},
|
||||
)
|
||||
return sspp
|
||||
|
||||
|
||||
@skipIf(doImportSkip, importSkipReason)
|
||||
class SendmsgTests(TestCase):
|
||||
"""
|
||||
Tests for the Python2/3 compatible L{sendmsg} interface.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Create a pair of UNIX sockets.
|
||||
"""
|
||||
self.input, self.output = socketpair(AF_UNIX)
|
||||
|
||||
def tearDown(self):
|
||||
"""
|
||||
Close the sockets opened by setUp.
|
||||
"""
|
||||
self.input.close()
|
||||
self.output.close()
|
||||
|
||||
def test_syscallError(self):
|
||||
"""
|
||||
If the underlying C{sendmsg} call fails, L{send1msg} raises
|
||||
L{socket.error} with its errno set to the underlying errno value.
|
||||
"""
|
||||
self.input.close()
|
||||
exc = self.assertRaises(error, sendmsg, self.input, b"hello, world")
|
||||
self.assertEqual(exc.args[0], errno.EBADF)
|
||||
|
||||
def test_syscallErrorWithControlMessage(self):
|
||||
"""
|
||||
The behavior when the underlying C{sendmsg} call fails is the same
|
||||
whether L{sendmsg} is passed ancillary data or not.
|
||||
"""
|
||||
self.input.close()
|
||||
exc = self.assertRaises(
|
||||
error, sendmsg, self.input, b"hello, world", [(0, 0, b"0123")], 0
|
||||
)
|
||||
self.assertEqual(exc.args[0], errno.EBADF)
|
||||
|
||||
def test_roundtrip(self):
|
||||
"""
|
||||
L{recvmsg} will retrieve a message sent via L{sendmsg}.
|
||||
"""
|
||||
message = b"hello, world!"
|
||||
self.assertEqual(len(message), sendmsg(self.input, message))
|
||||
|
||||
result = recvmsg(self.output)
|
||||
self.assertEqual(result.data, b"hello, world!")
|
||||
self.assertEqual(result.flags, 0)
|
||||
self.assertEqual(result.ancillary, [])
|
||||
|
||||
def test_shortsend(self):
|
||||
"""
|
||||
L{sendmsg} returns the number of bytes which it was able to send.
|
||||
"""
|
||||
message = b"x" * 1024 * 1024 * 16
|
||||
self.input.setblocking(False)
|
||||
sent = sendmsg(self.input, message)
|
||||
# Sanity check - make sure the amount of data we sent was less than the
|
||||
# message, but not the whole message, as we should have filled the send
|
||||
# buffer. This won't work if the send buffer is large enough for
|
||||
# message, though.
|
||||
self.assertTrue(sent < len(message))
|
||||
received = recvmsg(self.output, len(message))
|
||||
self.assertEqual(len(received[0]), sent)
|
||||
|
||||
def test_roundtripEmptyAncillary(self):
|
||||
"""
|
||||
L{sendmsg} treats an empty ancillary data list the same way it treats
|
||||
receiving no argument for the ancillary parameter at all.
|
||||
"""
|
||||
sendmsg(self.input, b"hello, world!", [], 0)
|
||||
|
||||
result = recvmsg(self.output)
|
||||
self.assertEqual(result, (b"hello, world!", [], 0))
|
||||
|
||||
@skipIf(dontWaitSkip, "MSG_DONTWAIT is only known to work as intended on Linux")
|
||||
def test_flags(self):
|
||||
"""
|
||||
The C{flags} argument to L{sendmsg} is passed on to the underlying
|
||||
C{sendmsg} call, to affect it in whatever way is defined by those
|
||||
flags.
|
||||
"""
|
||||
# Just exercise one flag with simple, well-known behavior. MSG_DONTWAIT
|
||||
# makes the send a non-blocking call, even if the socket is in blocking
|
||||
# mode. See also test_flags in RecvmsgTests
|
||||
for i in range(8 * 1024):
|
||||
try:
|
||||
sendmsg(self.input, b"x" * 1024, flags=MSG_DONTWAIT)
|
||||
except OSError as e:
|
||||
self.assertEqual(e.args[0], errno.EAGAIN)
|
||||
break
|
||||
else:
|
||||
self.fail(
|
||||
"Failed to fill up the send buffer, "
|
||||
"or maybe send1msg blocked for a while"
|
||||
)
|
||||
|
||||
@inlineCallbacks
|
||||
def test_sendSubProcessFD(self):
|
||||
"""
|
||||
Calling L{sendmsg} with SOL_SOCKET, SCM_RIGHTS, and a platform-endian
|
||||
packed file descriptor number should send that file descriptor to a
|
||||
different process, where it can be retrieved by using L{recv1msg}.
|
||||
"""
|
||||
sspp = _spawn("pullpipe", self.output.fileno())
|
||||
yield sspp.started
|
||||
pipeOut, pipeIn = _makePipe()
|
||||
self.addCleanup(pipeOut.close)
|
||||
self.addCleanup(pipeIn.close)
|
||||
|
||||
with pipeIn:
|
||||
sendmsg(
|
||||
self.input,
|
||||
b"blonk",
|
||||
[(SOL_SOCKET, SCM_RIGHTS, pack("i", pipeIn.fileno()))],
|
||||
)
|
||||
|
||||
yield sspp.stopped
|
||||
self.assertEqual(read(pipeOut.fileno(), 1024), b"Test fixture data: blonk.\n")
|
||||
# Make sure that the pipe is actually closed now.
|
||||
self.assertEqual(read(pipeOut.fileno(), 1024), b"")
|
||||
|
||||
|
||||
@skipIf(doImportSkip, importSkipReason)
|
||||
class GetSocketFamilyTests(TestCase):
|
||||
"""
|
||||
Tests for L{getSocketFamily}.
|
||||
"""
|
||||
|
||||
def _socket(self, addressFamily):
|
||||
"""
|
||||
Create a new socket using the given address family and return that
|
||||
socket's file descriptor. The socket will automatically be closed when
|
||||
the test is torn down.
|
||||
"""
|
||||
s = socket(addressFamily)
|
||||
self.addCleanup(s.close)
|
||||
return s
|
||||
|
||||
def test_inet(self):
|
||||
"""
|
||||
When passed the file descriptor of a socket created with the C{AF_INET}
|
||||
address family, L{getSocketFamily} returns C{AF_INET}.
|
||||
"""
|
||||
self.assertEqual(AF_INET, getSocketFamily(self._socket(AF_INET)))
|
||||
|
||||
def test_inet6(self):
|
||||
"""
|
||||
When passed the file descriptor of a socket created with the
|
||||
C{AF_INET6} address family, L{getSocketFamily} returns C{AF_INET6}.
|
||||
"""
|
||||
self.assertEqual(AF_INET6, getSocketFamily(self._socket(AF_INET6)))
|
||||
|
||||
@skipIf(nonUNIXSkip, "Platform does not support AF_UNIX sockets")
|
||||
def test_unix(self):
|
||||
"""
|
||||
When passed the file descriptor of a socket created with the C{AF_UNIX}
|
||||
address family, L{getSocketFamily} returns C{AF_UNIX}.
|
||||
"""
|
||||
self.assertEqual(AF_UNIX, getSocketFamily(self._socket(AF_UNIX)))
|
||||
@@ -0,0 +1,635 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases for twisted.python._shellcomp
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
from io import BytesIO
|
||||
from typing import List, Optional
|
||||
|
||||
from twisted.python import _shellcomp, reflect, usage
|
||||
from twisted.python.usage import CompleteFiles, CompleteList, Completer, Completions
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class ZshScriptTestMeta(type):
|
||||
"""
|
||||
Metaclass of ZshScriptTestMixin.
|
||||
"""
|
||||
|
||||
def __new__(cls, name, bases, attrs):
|
||||
def makeTest(cmdName, optionsFQPN):
|
||||
def runTest(self):
|
||||
return test_genZshFunction(self, cmdName, optionsFQPN)
|
||||
|
||||
return runTest
|
||||
|
||||
# add test_ methods to the class for each script
|
||||
# we are testing.
|
||||
if "generateFor" in attrs:
|
||||
for cmdName, optionsFQPN in attrs["generateFor"]:
|
||||
test = makeTest(cmdName, optionsFQPN)
|
||||
attrs["test_genZshFunction_" + cmdName] = test
|
||||
|
||||
return type.__new__(cls, name, bases, attrs)
|
||||
|
||||
|
||||
class ZshScriptTestMixin(metaclass=ZshScriptTestMeta):
|
||||
"""
|
||||
Integration test helper to show that C{usage.Options} classes can have zsh
|
||||
completion functions generated for them without raising errors.
|
||||
|
||||
In your subclasses set a class variable like so::
|
||||
|
||||
# | cmd name | Fully Qualified Python Name of Options class |
|
||||
#
|
||||
generateFor = [('conch', 'twisted.conch.scripts.conch.ClientOptions'),
|
||||
('twistd', 'twisted.scripts.twistd.ServerOptions'),
|
||||
]
|
||||
|
||||
Each package that contains Twisted scripts should contain one TestCase
|
||||
subclass which also inherits from this mixin, and contains a C{generateFor}
|
||||
list appropriate for the scripts in that package.
|
||||
"""
|
||||
|
||||
|
||||
def test_genZshFunction(self, cmdName, optionsFQPN):
|
||||
"""
|
||||
Generate completion functions for given twisted command - no errors
|
||||
should be raised
|
||||
|
||||
@type cmdName: C{str}
|
||||
@param cmdName: The name of the command-line utility e.g. 'twistd'
|
||||
|
||||
@type optionsFQPN: C{str}
|
||||
@param optionsFQPN: The Fully Qualified Python Name of the C{Options}
|
||||
class to be tested.
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
|
||||
# some scripts won't import or instantiate because of missing
|
||||
# dependencies (pyOpenSSL, etc) so we have to skip them.
|
||||
try:
|
||||
o = reflect.namedAny(optionsFQPN)()
|
||||
except Exception as e:
|
||||
raise unittest.SkipTest(
|
||||
"Couldn't import or instantiate " "Options class: %s" % (e,)
|
||||
)
|
||||
|
||||
try:
|
||||
o.parseOptions(["", "--_shell-completion", "zsh:2"])
|
||||
except ImportError as e:
|
||||
# this can happen for commands which don't have all
|
||||
# the necessary dependencies installed. skip test.
|
||||
# skip
|
||||
raise unittest.SkipTest("ImportError calling parseOptions(): %s", (e,))
|
||||
except SystemExit:
|
||||
pass # expected
|
||||
else:
|
||||
self.fail("SystemExit not raised")
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
outputFile.seek(0)
|
||||
outputFile.truncate()
|
||||
|
||||
# now, if it has sub commands, we have to test those too
|
||||
if hasattr(o, "subCommands"):
|
||||
for cmd, short, parser, doc in o.subCommands:
|
||||
try:
|
||||
o.parseOptions([cmd, "", "--_shell-completion", "zsh:3"])
|
||||
except ImportError as e:
|
||||
# this can happen for commands which don't have all
|
||||
# the necessary dependencies installed. skip test.
|
||||
raise unittest.SkipTest(
|
||||
"ImportError calling parseOptions() " "on subcommand: %s", (e,)
|
||||
)
|
||||
except SystemExit:
|
||||
pass # expected
|
||||
else:
|
||||
self.fail("SystemExit not raised")
|
||||
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
outputFile.seek(0)
|
||||
outputFile.truncate()
|
||||
|
||||
# flushed because we don't want DeprecationWarnings to be printed when
|
||||
# running these test cases.
|
||||
self.flushWarnings()
|
||||
|
||||
|
||||
class ZshTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for zsh completion code
|
||||
"""
|
||||
|
||||
def test_accumulateMetadata(self):
|
||||
"""
|
||||
Are `compData' attributes you can place on Options classes
|
||||
picked up correctly?
|
||||
"""
|
||||
opts = FighterAceExtendedOptions()
|
||||
ag = _shellcomp.ZshArgumentsGenerator(opts, "ace", BytesIO())
|
||||
|
||||
descriptions = FighterAceOptions.compData.descriptions.copy()
|
||||
descriptions.update(FighterAceExtendedOptions.compData.descriptions)
|
||||
|
||||
self.assertEqual(ag.descriptions, descriptions)
|
||||
self.assertEqual(ag.multiUse, set(FighterAceOptions.compData.multiUse))
|
||||
self.assertEqual(
|
||||
ag.mutuallyExclusive, FighterAceOptions.compData.mutuallyExclusive
|
||||
)
|
||||
|
||||
optActions = FighterAceOptions.compData.optActions.copy()
|
||||
optActions.update(FighterAceExtendedOptions.compData.optActions)
|
||||
self.assertEqual(ag.optActions, optActions)
|
||||
|
||||
self.assertEqual(ag.extraActions, FighterAceOptions.compData.extraActions)
|
||||
|
||||
def test_mutuallyExclusiveCornerCase(self):
|
||||
"""
|
||||
Exercise a corner-case of ZshArgumentsGenerator.makeExcludesDict()
|
||||
where the long option name already exists in the `excludes` dict being
|
||||
built.
|
||||
"""
|
||||
|
||||
class OddFighterAceOptions(FighterAceExtendedOptions):
|
||||
# since "fokker", etc, are already defined as mutually-
|
||||
# exclusive on the super-class, defining them again here forces
|
||||
# the corner-case to be exercised.
|
||||
optFlags = [
|
||||
["anatra", None, "Select the Anatra DS as your dogfighter aircraft"]
|
||||
]
|
||||
compData = Completions(
|
||||
mutuallyExclusive=[["anatra", "fokker", "albatros", "spad", "bristol"]]
|
||||
)
|
||||
|
||||
opts = OddFighterAceOptions()
|
||||
ag = _shellcomp.ZshArgumentsGenerator(opts, "ace", BytesIO())
|
||||
|
||||
expected = {
|
||||
"albatros": {"anatra", "b", "bristol", "f", "fokker", "s", "spad"},
|
||||
"anatra": {"a", "albatros", "b", "bristol", "f", "fokker", "s", "spad"},
|
||||
"bristol": {"a", "albatros", "anatra", "f", "fokker", "s", "spad"},
|
||||
"fokker": {"a", "albatros", "anatra", "b", "bristol", "s", "spad"},
|
||||
"spad": {"a", "albatros", "anatra", "b", "bristol", "f", "fokker"},
|
||||
}
|
||||
|
||||
self.assertEqual(ag.excludes, expected)
|
||||
|
||||
def test_accumulateAdditionalOptions(self):
|
||||
"""
|
||||
We pick up options that are only defined by having an
|
||||
appropriately named method on your Options class,
|
||||
e.g. def opt_foo(self, foo)
|
||||
"""
|
||||
opts = FighterAceExtendedOptions()
|
||||
ag = _shellcomp.ZshArgumentsGenerator(opts, "ace", BytesIO())
|
||||
|
||||
self.assertIn("nocrash", ag.flagNameToDefinition)
|
||||
self.assertIn("nocrash", ag.allOptionsNameToDefinition)
|
||||
|
||||
self.assertIn("difficulty", ag.paramNameToDefinition)
|
||||
self.assertIn("difficulty", ag.allOptionsNameToDefinition)
|
||||
|
||||
def test_verifyZshNames(self):
|
||||
"""
|
||||
Using a parameter/flag name that doesn't exist
|
||||
will raise an error
|
||||
"""
|
||||
|
||||
class TmpOptions(FighterAceExtendedOptions):
|
||||
# Note typo of detail
|
||||
compData = Completions(optActions={"detaill": None})
|
||||
|
||||
self.assertRaises(
|
||||
ValueError, _shellcomp.ZshArgumentsGenerator, TmpOptions(), "ace", BytesIO()
|
||||
)
|
||||
|
||||
class TmpOptions2(FighterAceExtendedOptions):
|
||||
# Note that 'foo' and 'bar' are not real option
|
||||
# names defined in this class
|
||||
compData = Completions(mutuallyExclusive=[("foo", "bar")])
|
||||
|
||||
self.assertRaises(
|
||||
ValueError,
|
||||
_shellcomp.ZshArgumentsGenerator,
|
||||
TmpOptions2(),
|
||||
"ace",
|
||||
BytesIO(),
|
||||
)
|
||||
|
||||
def test_zshCode(self):
|
||||
"""
|
||||
Generate a completion function, and test the textual output
|
||||
against a known correct output
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
self.patch(sys, "argv", ["silly", "", "--_shell-completion", "zsh:2"])
|
||||
opts = SimpleProgOptions()
|
||||
self.assertRaises(SystemExit, opts.parseOptions)
|
||||
self.assertEqual(testOutput1, outputFile.getvalue())
|
||||
|
||||
def test_zshCodeWithSubs(self):
|
||||
"""
|
||||
Generate a completion function with subcommands,
|
||||
and test the textual output against a known correct output
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
self.patch(sys, "argv", ["silly2", "", "--_shell-completion", "zsh:2"])
|
||||
opts = SimpleProgWithSubcommands()
|
||||
self.assertRaises(SystemExit, opts.parseOptions)
|
||||
self.assertEqual(testOutput2, outputFile.getvalue())
|
||||
|
||||
def test_incompleteCommandLine(self):
|
||||
"""
|
||||
Completion still happens even if a command-line is given
|
||||
that would normally throw UsageError.
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
opts = FighterAceOptions()
|
||||
|
||||
self.assertRaises(
|
||||
SystemExit,
|
||||
opts.parseOptions,
|
||||
[
|
||||
"--fokker",
|
||||
"server",
|
||||
"--unknown-option",
|
||||
"--unknown-option2",
|
||||
"--_shell-completion",
|
||||
"zsh:5",
|
||||
],
|
||||
)
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
|
||||
def test_incompleteCommandLine_case2(self):
|
||||
"""
|
||||
Completion still happens even if a command-line is given
|
||||
that would normally throw UsageError.
|
||||
|
||||
The existence of --unknown-option prior to the subcommand
|
||||
will break subcommand detection... but we complete anyway
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
opts = FighterAceOptions()
|
||||
|
||||
self.assertRaises(
|
||||
SystemExit,
|
||||
opts.parseOptions,
|
||||
[
|
||||
"--fokker",
|
||||
"--unknown-option",
|
||||
"server",
|
||||
"--list-server",
|
||||
"--_shell-completion",
|
||||
"zsh:5",
|
||||
],
|
||||
)
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
|
||||
outputFile.seek(0)
|
||||
outputFile.truncate()
|
||||
|
||||
def test_incompleteCommandLine_case3(self):
|
||||
"""
|
||||
Completion still happens even if a command-line is given
|
||||
that would normally throw UsageError.
|
||||
|
||||
Break subcommand detection in a different way by providing
|
||||
an invalid subcommand name.
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
opts = FighterAceOptions()
|
||||
|
||||
self.assertRaises(
|
||||
SystemExit,
|
||||
opts.parseOptions,
|
||||
[
|
||||
"--fokker",
|
||||
"unknown-subcommand",
|
||||
"--list-server",
|
||||
"--_shell-completion",
|
||||
"zsh:4",
|
||||
],
|
||||
)
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
|
||||
def test_skipSubcommandList(self):
|
||||
"""
|
||||
Ensure the optimization which skips building the subcommand list
|
||||
under certain conditions isn't broken.
|
||||
"""
|
||||
outputFile = BytesIO()
|
||||
self.patch(usage.Options, "_shellCompFile", outputFile)
|
||||
opts = FighterAceOptions()
|
||||
|
||||
self.assertRaises(
|
||||
SystemExit, opts.parseOptions, ["--alba", "--_shell-completion", "zsh:2"]
|
||||
)
|
||||
outputFile.seek(0)
|
||||
# test that we got some output
|
||||
self.assertEqual(1, len(outputFile.read(1)))
|
||||
|
||||
def test_poorlyDescribedOptMethod(self):
|
||||
"""
|
||||
Test corner case fetching an option description from a method docstring
|
||||
"""
|
||||
opts = FighterAceOptions()
|
||||
argGen = _shellcomp.ZshArgumentsGenerator(opts, "ace", None)
|
||||
|
||||
descr = argGen.getDescription("silly")
|
||||
|
||||
# docstring for opt_silly is useless so it should just use the
|
||||
# option name as the description
|
||||
self.assertEqual(descr, "silly")
|
||||
|
||||
def test_brokenActions(self):
|
||||
"""
|
||||
A C{Completer} with repeat=True may only be used as the
|
||||
last item in the extraActions list.
|
||||
"""
|
||||
|
||||
class BrokenActions(usage.Options):
|
||||
compData = usage.Completions(
|
||||
extraActions=[usage.Completer(repeat=True), usage.Completer()]
|
||||
)
|
||||
|
||||
outputFile = BytesIO()
|
||||
opts = BrokenActions()
|
||||
self.patch(opts, "_shellCompFile", outputFile)
|
||||
self.assertRaises(
|
||||
ValueError, opts.parseOptions, ["", "--_shell-completion", "zsh:2"]
|
||||
)
|
||||
|
||||
def test_optMethodsDontOverride(self):
|
||||
"""
|
||||
opt_* methods on Options classes should not override the
|
||||
data provided in optFlags or optParameters.
|
||||
"""
|
||||
|
||||
class Options(usage.Options):
|
||||
optFlags = [["flag", "f", "A flag"]]
|
||||
optParameters = [["param", "p", None, "A param"]]
|
||||
|
||||
def opt_flag(self):
|
||||
"""junk description"""
|
||||
|
||||
def opt_param(self, param):
|
||||
"""junk description"""
|
||||
|
||||
opts = Options()
|
||||
argGen = _shellcomp.ZshArgumentsGenerator(opts, "ace", None)
|
||||
|
||||
self.assertEqual(argGen.getDescription("flag"), "A flag")
|
||||
self.assertEqual(argGen.getDescription("param"), "A param")
|
||||
|
||||
|
||||
class EscapeTests(unittest.TestCase):
|
||||
def test_escape(self):
|
||||
"""
|
||||
Verify _shellcomp.escape() function
|
||||
"""
|
||||
esc = _shellcomp.escape
|
||||
|
||||
test = "$"
|
||||
self.assertEqual(esc(test), "'$'")
|
||||
|
||||
test = "A--'$\"\\`--B"
|
||||
self.assertEqual(esc(test), '"A--\'\\$\\"\\\\\\`--B"')
|
||||
|
||||
|
||||
class CompleterNotImplementedTests(unittest.TestCase):
|
||||
"""
|
||||
Test that using an unknown shell constant with SubcommandAction
|
||||
raises NotImplementedError
|
||||
|
||||
The other Completer() subclasses are tested in test_usage.py
|
||||
"""
|
||||
|
||||
def test_unknownShell(self):
|
||||
"""
|
||||
Using an unknown shellType should raise NotImplementedError
|
||||
"""
|
||||
action = _shellcomp.SubcommandAction()
|
||||
|
||||
self.assertRaises(
|
||||
NotImplementedError, action._shellCode, None, "bad_shell_type"
|
||||
)
|
||||
|
||||
|
||||
class FighterAceServerOptions(usage.Options):
|
||||
"""
|
||||
Options for FighterAce 'server' subcommand
|
||||
"""
|
||||
|
||||
optFlags = [
|
||||
["list-server", None, "List this server with the online FighterAce network"]
|
||||
]
|
||||
optParameters = [
|
||||
[
|
||||
"packets-per-second",
|
||||
None,
|
||||
"Number of update packets to send per second",
|
||||
"20",
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
class FighterAceOptions(usage.Options):
|
||||
"""
|
||||
Command-line options for an imaginary `Fighter Ace` game
|
||||
"""
|
||||
|
||||
optFlags: List[List[Optional[str]]] = [
|
||||
["fokker", "f", "Select the Fokker Dr.I as your dogfighter aircraft"],
|
||||
["albatros", "a", "Select the Albatros D-III as your dogfighter aircraft"],
|
||||
["spad", "s", "Select the SPAD S.VII as your dogfighter aircraft"],
|
||||
["bristol", "b", "Select the Bristol Scout as your dogfighter aircraft"],
|
||||
["physics", "p", "Enable secret Twisted physics engine"],
|
||||
["jam", "j", "Enable a small chance that your machine guns will jam!"],
|
||||
["verbose", "v", "Verbose logging (may be specified more than once)"],
|
||||
]
|
||||
|
||||
optParameters: List[List[Optional[str]]] = [
|
||||
["pilot-name", None, "What's your name, Ace?", "Manfred von Richthofen"],
|
||||
["detail", "d", "Select the level of rendering detail (1-5)", "3"],
|
||||
]
|
||||
|
||||
subCommands = [
|
||||
["server", None, FighterAceServerOptions, "Start FighterAce game-server."],
|
||||
]
|
||||
|
||||
compData = Completions(
|
||||
descriptions={"physics": "Twisted-Physics", "detail": "Rendering detail level"},
|
||||
multiUse=["verbose"],
|
||||
mutuallyExclusive=[["fokker", "albatros", "spad", "bristol"]],
|
||||
optActions={"detail": CompleteList(["1" "2" "3" "4" "5"])},
|
||||
extraActions=[CompleteFiles(descr="saved game file to load")],
|
||||
)
|
||||
|
||||
def opt_silly(self):
|
||||
# A silly option which nobody can explain
|
||||
""" """
|
||||
|
||||
|
||||
class FighterAceExtendedOptions(FighterAceOptions):
|
||||
"""
|
||||
Extend the options and zsh metadata provided by FighterAceOptions.
|
||||
_shellcomp must accumulate options and metadata from all classes in the
|
||||
hiearchy so this is important to test.
|
||||
"""
|
||||
|
||||
optFlags = [["no-stalls", None, "Turn off the ability to stall your aircraft"]]
|
||||
optParameters = [
|
||||
["reality-level", None, "Select the level of physics reality (1-5)", "5"]
|
||||
]
|
||||
|
||||
compData = Completions(
|
||||
descriptions={"no-stalls": "Can't stall your plane"},
|
||||
optActions={"reality-level": Completer(descr="Physics reality level")},
|
||||
)
|
||||
|
||||
def opt_nocrash(self):
|
||||
"""
|
||||
Select that you can't crash your plane
|
||||
"""
|
||||
|
||||
def opt_difficulty(self, difficulty):
|
||||
"""
|
||||
How tough are you? (1-10)
|
||||
"""
|
||||
|
||||
|
||||
def _accuracyAction():
|
||||
# add tick marks just to exercise quoting
|
||||
return CompleteList(["1", "2", "3"], descr="Accuracy'`?")
|
||||
|
||||
|
||||
class SimpleProgOptions(usage.Options):
|
||||
"""
|
||||
Command-line options for a `Silly` imaginary program
|
||||
"""
|
||||
|
||||
optFlags = [
|
||||
["color", "c", "Turn on color output"],
|
||||
["gray", "g", "Turn on gray-scale output"],
|
||||
["verbose", "v", "Verbose logging (may be specified more than once)"],
|
||||
]
|
||||
|
||||
optParameters = [
|
||||
["optimization", None, "5", "Select the level of optimization (1-5)"],
|
||||
["accuracy", "a", "3", "Select the level of accuracy (1-3)"],
|
||||
]
|
||||
|
||||
compData = Completions(
|
||||
descriptions={"color": "Color on", "optimization": "Optimization level"},
|
||||
multiUse=["verbose"],
|
||||
mutuallyExclusive=[["color", "gray"]],
|
||||
optActions={
|
||||
"optimization": CompleteList(
|
||||
["1", "2", "3", "4", "5"], descr="Optimization?"
|
||||
),
|
||||
"accuracy": _accuracyAction,
|
||||
},
|
||||
extraActions=[CompleteFiles(descr="output file")],
|
||||
)
|
||||
|
||||
def opt_X(self):
|
||||
"""
|
||||
usage.Options does not recognize single-letter opt_ methods
|
||||
"""
|
||||
|
||||
|
||||
class SimpleProgSub1(usage.Options):
|
||||
optFlags = [["sub-opt", "s", "Sub Opt One"]]
|
||||
|
||||
|
||||
class SimpleProgSub2(usage.Options):
|
||||
optFlags = [["sub-opt", "s", "Sub Opt Two"]]
|
||||
|
||||
|
||||
class SimpleProgWithSubcommands(SimpleProgOptions):
|
||||
optFlags = [["some-option"], ["other-option", "o"]]
|
||||
|
||||
optParameters = [
|
||||
["some-param"],
|
||||
["other-param", "p"],
|
||||
["another-param", "P", "Yet Another Param"],
|
||||
]
|
||||
|
||||
subCommands = [
|
||||
["sub1", None, SimpleProgSub1, "Sub Command 1"],
|
||||
["sub2", None, SimpleProgSub2, "Sub Command 2"],
|
||||
]
|
||||
|
||||
|
||||
testOutput1 = b"""#compdef silly
|
||||
|
||||
_arguments -s -A "-*" \\
|
||||
':output file (*):_files -g "*"' \\
|
||||
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\\`?:(1 2 3)" \\
|
||||
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\\`?:(1 2 3)" \\
|
||||
'(--color --gray -g)-c[Color on]' \\
|
||||
'(--gray -c -g)--color[Color on]' \\
|
||||
'(--color --gray -c)-g[Turn on gray-scale output]' \\
|
||||
'(--color -c -g)--gray[Turn on gray-scale output]' \\
|
||||
'--help[Display this help and exit.]' \\
|
||||
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
|
||||
'*-v[Verbose logging (may be specified more than once)]' \\
|
||||
'*--verbose[Verbose logging (may be specified more than once)]' \\
|
||||
'--version[Display Twisted version and exit.]' \\
|
||||
&& return 0
|
||||
"""
|
||||
|
||||
# with sub-commands
|
||||
testOutput2 = b"""#compdef silly2
|
||||
|
||||
_arguments -s -A "-*" \\
|
||||
'*::subcmd:->subcmd' \\
|
||||
':output file (*):_files -g "*"' \\
|
||||
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\\`?:(1 2 3)" \\
|
||||
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\\`?:(1 2 3)" \\
|
||||
'(--another-param)-P[another-param]:another-param:_files' \\
|
||||
'(-P)--another-param=[another-param]:another-param:_files' \\
|
||||
'(--color --gray -g)-c[Color on]' \\
|
||||
'(--gray -c -g)--color[Color on]' \\
|
||||
'(--color --gray -c)-g[Turn on gray-scale output]' \\
|
||||
'(--color -c -g)--gray[Turn on gray-scale output]' \\
|
||||
'--help[Display this help and exit.]' \\
|
||||
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
|
||||
'(--other-option)-o[other-option]' \\
|
||||
'(-o)--other-option[other-option]' \\
|
||||
'(--other-param)-p[other-param]:other-param:_files' \\
|
||||
'(-p)--other-param=[other-param]:other-param:_files' \\
|
||||
'--some-option[some-option]' \\
|
||||
'--some-param=[some-param]:some-param:_files' \\
|
||||
'*-v[Verbose logging (may be specified more than once)]' \\
|
||||
'*--verbose[Verbose logging (may be specified more than once)]' \\
|
||||
'--version[Display Twisted version and exit.]' \\
|
||||
&& return 0
|
||||
local _zsh_subcmds_array
|
||||
_zsh_subcmds_array=(
|
||||
"sub1:Sub Command 1"
|
||||
"sub2:Sub Command 2"
|
||||
)
|
||||
|
||||
_describe "sub-command" _zsh_subcmds_array
|
||||
"""
|
||||
@@ -0,0 +1,158 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.trial.unittest import TestCase
|
||||
|
||||
try:
|
||||
import syslog as _stdsyslog
|
||||
except ImportError:
|
||||
stdsyslog = None
|
||||
else:
|
||||
stdsyslog = _stdsyslog
|
||||
from twisted.python import syslog
|
||||
|
||||
|
||||
class SyslogObserverTests(TestCase):
|
||||
"""
|
||||
Tests for L{SyslogObserver} which sends Twisted log events to the syslog.
|
||||
"""
|
||||
|
||||
events = None
|
||||
|
||||
if stdsyslog is None:
|
||||
skip = "syslog is not supported on this platform"
|
||||
|
||||
def setUp(self):
|
||||
self.patch(syslog.SyslogObserver, "openlog", self.openlog)
|
||||
self.patch(syslog.SyslogObserver, "syslog", self.syslog)
|
||||
self.observer = syslog.SyslogObserver("SyslogObserverTests")
|
||||
|
||||
def openlog(self, prefix, options, facility):
|
||||
self.logOpened = (prefix, options, facility)
|
||||
self.events = []
|
||||
|
||||
def syslog(self, options, message):
|
||||
self.events.append((options, message))
|
||||
|
||||
def test_emitWithoutMessage(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} ignores events with an empty value for the
|
||||
C{'message'} key.
|
||||
"""
|
||||
self.observer.emit({"message": (), "isError": False, "system": "-"})
|
||||
self.assertEqual(self.events, [])
|
||||
|
||||
def test_emitCustomPriority(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
|
||||
syslog priority, if that key is present in the event dictionary.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{
|
||||
"message": ("hello, world",),
|
||||
"isError": False,
|
||||
"system": "-",
|
||||
"syslogPriority": stdsyslog.LOG_DEBUG,
|
||||
}
|
||||
)
|
||||
self.assertEqual(self.events, [(stdsyslog.LOG_DEBUG, "[-] hello, world")])
|
||||
|
||||
def test_emitErrorPriority(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} uses C{LOG_ALERT} if the event represents an
|
||||
error.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{
|
||||
"message": ("hello, world",),
|
||||
"isError": True,
|
||||
"system": "-",
|
||||
"failure": Failure(Exception("foo")),
|
||||
}
|
||||
)
|
||||
self.assertEqual(self.events, [(stdsyslog.LOG_ALERT, "[-] hello, world")])
|
||||
|
||||
def test_emitCustomPriorityOverridesError(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} key if
|
||||
it is specified even if the event dictionary represents an error.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{
|
||||
"message": ("hello, world",),
|
||||
"isError": True,
|
||||
"system": "-",
|
||||
"syslogPriority": stdsyslog.LOG_NOTICE,
|
||||
"failure": Failure(Exception("bar")),
|
||||
}
|
||||
)
|
||||
self.assertEqual(self.events, [(stdsyslog.LOG_NOTICE, "[-] hello, world")])
|
||||
|
||||
def test_emitCustomFacility(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
|
||||
syslog priority, if that key is present in the event dictionary.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{
|
||||
"message": ("hello, world",),
|
||||
"isError": False,
|
||||
"system": "-",
|
||||
"syslogFacility": stdsyslog.LOG_CRON,
|
||||
}
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events, [(stdsyslog.LOG_INFO | stdsyslog.LOG_CRON, "[-] hello, world")]
|
||||
)
|
||||
|
||||
def test_emitCustomSystem(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} uses the value of the C{'system'} key to prefix
|
||||
the logged message.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{
|
||||
"message": ("hello, world",),
|
||||
"isError": False,
|
||||
"system": "nonDefaultSystem",
|
||||
}
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events, [(stdsyslog.LOG_INFO, "[nonDefaultSystem] hello, world")]
|
||||
)
|
||||
|
||||
def test_emitMessage(self):
|
||||
"""
|
||||
L{SyslogObserver.emit} logs the value of the C{'message'} key of the
|
||||
event dictionary it is passed to the syslog.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{"message": ("hello, world",), "isError": False, "system": "-"}
|
||||
)
|
||||
self.assertEqual(self.events, [(stdsyslog.LOG_INFO, "[-] hello, world")])
|
||||
|
||||
def test_emitMultilineMessage(self):
|
||||
"""
|
||||
Each line of a multiline message is emitted separately to the syslog.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{"message": ("hello,\nworld",), "isError": False, "system": "-"}
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events,
|
||||
[(stdsyslog.LOG_INFO, "[-] hello,"), (stdsyslog.LOG_INFO, "[-] \tworld")],
|
||||
)
|
||||
|
||||
def test_emitStripsTrailingEmptyLines(self):
|
||||
"""
|
||||
Trailing empty lines of a multiline message are omitted from the
|
||||
messages sent to the syslog.
|
||||
"""
|
||||
self.observer.emit(
|
||||
{"message": ("hello,\nworld\n\n",), "isError": False, "system": "-"}
|
||||
)
|
||||
self.assertEqual(
|
||||
self.events,
|
||||
[(stdsyslog.LOG_INFO, "[-] hello,"), (stdsyslog.LOG_INFO, "[-] \tworld")],
|
||||
)
|
||||
@@ -0,0 +1,180 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.systemd}.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
from typing import Dict, Mapping, Sequence
|
||||
|
||||
from hamcrest import assert_that, equal_to, not_
|
||||
from hypothesis import given
|
||||
from hypothesis.strategies import dictionaries, integers, lists
|
||||
|
||||
from twisted.python.systemd import ListenFDs
|
||||
from twisted.trial.unittest import SynchronousTestCase
|
||||
from .strategies import systemdDescriptorNames
|
||||
|
||||
|
||||
def buildEnvironment(count: int, pid: object) -> Dict[str, str]:
|
||||
"""
|
||||
@param count: The number of file descriptors to indicate as inherited.
|
||||
|
||||
@param pid: The pid of the inheriting process to indicate.
|
||||
|
||||
@return: A copy of the current process environment with the I{systemd}
|
||||
file descriptor inheritance-related environment variables added to it.
|
||||
"""
|
||||
result = os***REMOVED***iron.copy()
|
||||
result["LISTEN_FDS"] = str(count)
|
||||
result["LISTEN_FDNAMES"] = ":".join([f"{n}.socket" for n in range(count)])
|
||||
result["LISTEN_PID"] = str(pid)
|
||||
return result
|
||||
|
||||
|
||||
class ListenFDsTests(SynchronousTestCase):
|
||||
"""
|
||||
Apply tests to L{ListenFDs}, constructed based on an environment dictionary.
|
||||
"""
|
||||
|
||||
@given(lists(systemdDescriptorNames(), min_size=0, max_size=10))
|
||||
def test_fromEnvironmentEquivalence(self, names: Sequence[str]) -> None:
|
||||
"""
|
||||
The L{ListenFDs} and L{ListenFDs.fromEnvironment} constructors are
|
||||
equivalent for their respective representations of the same
|
||||
information.
|
||||
|
||||
@param names: The names of the file descriptors to represent as
|
||||
inherited in the test environment given to the parser. The number
|
||||
of descriptors represented will equal the length of this list.
|
||||
"""
|
||||
numFDs = len(names)
|
||||
descriptors = list(range(ListenFDs._START, ListenFDs._START + numFDs))
|
||||
fds = ListenFDs.fromEnvironment(
|
||||
{
|
||||
"LISTEN_PID": str(os.getpid()),
|
||||
"LISTEN_FDS": str(numFDs),
|
||||
"LISTEN_FDNAMES": ":".join(names),
|
||||
}
|
||||
)
|
||||
assert_that(fds, equal_to(ListenFDs(descriptors, tuple(names))))
|
||||
|
||||
def test_defaultEnviron(self) -> None:
|
||||
"""
|
||||
If the process environment is not explicitly passed to
|
||||
L{ListenFDs.fromEnvironment}, the real process environment dictionary
|
||||
is used.
|
||||
"""
|
||||
self.patch(os, "environ", buildEnvironment(5, os.getpid()))
|
||||
sddaemon = ListenFDs.fromEnvironment()
|
||||
self.assertEqual(list(range(3, 3 + 5)), sddaemon.inheritedDescriptors())
|
||||
|
||||
def test_secondEnvironment(self) -> None:
|
||||
"""
|
||||
L{ListenFDs.fromEnvironment} removes information about the
|
||||
inherited file descriptors from the environment mapping so that the
|
||||
same inherited file descriptors cannot be handled repeatedly from
|
||||
multiple L{ListenFDs} instances.
|
||||
"""
|
||||
env = buildEnvironment(3, os.getpid())
|
||||
first = ListenFDs.fromEnvironment(environ=env)
|
||||
second = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual(list(range(3, 6)), first.inheritedDescriptors())
|
||||
self.assertEqual([], second.inheritedDescriptors())
|
||||
|
||||
def test_mismatchedPID(self) -> None:
|
||||
"""
|
||||
If the current process PID does not match the PID in the
|
||||
environment then the systemd variables in the environment were set for
|
||||
a different process (perhaps our parent) and the inherited descriptors
|
||||
are not intended for this process so L{ListenFDs.inheritedDescriptors}
|
||||
returns an empty list.
|
||||
"""
|
||||
env = buildEnvironment(3, os.getpid() + 1)
|
||||
sddaemon = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual([], sddaemon.inheritedDescriptors())
|
||||
|
||||
def test_missingPIDVariable(self) -> None:
|
||||
"""
|
||||
If the I{LISTEN_PID} environment variable is not present then
|
||||
there is no clear indication that any file descriptors were inherited
|
||||
by this process so L{ListenFDs.inheritedDescriptors} returns an empty
|
||||
list.
|
||||
"""
|
||||
env = buildEnvironment(3, os.getpid())
|
||||
del env["LISTEN_PID"]
|
||||
sddaemon = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual([], sddaemon.inheritedDescriptors())
|
||||
|
||||
def test_nonIntegerPIDVariable(self) -> None:
|
||||
"""
|
||||
If the I{LISTEN_PID} environment variable is set to a string that cannot
|
||||
be parsed as an integer, no inherited descriptors are reported.
|
||||
"""
|
||||
env = buildEnvironment(3, "hello, world")
|
||||
sddaemon = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual([], sddaemon.inheritedDescriptors())
|
||||
|
||||
def test_missingFDSVariable(self) -> None:
|
||||
"""
|
||||
If the I{LISTEN_FDS} and I{LISTEN_FDNAMES} environment variables
|
||||
are not present, no inherited descriptors are reported.
|
||||
"""
|
||||
env = buildEnvironment(3, os.getpid())
|
||||
del env["LISTEN_FDS"]
|
||||
del env["LISTEN_FDNAMES"]
|
||||
sddaemon = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual([], sddaemon.inheritedDescriptors())
|
||||
|
||||
def test_nonIntegerFDSVariable(self) -> None:
|
||||
"""
|
||||
If the I{LISTEN_FDS} environment variable is set to a string that cannot
|
||||
be parsed as an integer, no inherited descriptors are reported.
|
||||
"""
|
||||
env = buildEnvironment(3, os.getpid())
|
||||
env["LISTEN_FDS"] = "hello, world"
|
||||
sddaemon = ListenFDs.fromEnvironment(environ=env)
|
||||
self.assertEqual([], sddaemon.inheritedDescriptors())
|
||||
|
||||
@given(lists(integers(min_value=0, max_value=10), unique=True))
|
||||
def test_inheritedDescriptors(self, descriptors: Sequence[int]) -> None:
|
||||
"""
|
||||
L{ListenFDs.inheritedDescriptors} returns a copy of the inherited
|
||||
descriptors list.
|
||||
"""
|
||||
names = tuple(map(str, descriptors))
|
||||
fds = ListenFDs(descriptors, names)
|
||||
fdsCopy = fds.inheritedDescriptors()
|
||||
assert_that(descriptors, equal_to(fdsCopy))
|
||||
fdsCopy.append(1)
|
||||
assert_that(descriptors, not_(equal_to(fdsCopy)))
|
||||
|
||||
@given(dictionaries(systemdDescriptorNames(), integers(min_value=0), max_size=10))
|
||||
def test_inheritedNamedDescriptors(self, expected: Mapping[str, int]) -> None:
|
||||
"""
|
||||
L{ListenFDs.inheritedNamedDescriptors} returns a mapping from the
|
||||
descriptor names to their integer values, with items formed by
|
||||
pairwise combination of the input descriptors and names.
|
||||
"""
|
||||
items = list(expected.items())
|
||||
names = [name for name, _ in items]
|
||||
descriptors = [fd for _, fd in items]
|
||||
fds = ListenFDs(descriptors, names)
|
||||
assert_that(fds.inheritedNamedDescriptors(), equal_to(expected))
|
||||
|
||||
@given(lists(integers(min_value=0, max_value=10), unique=True))
|
||||
def test_repeated(self, descriptors: Sequence[int]) -> None:
|
||||
"""
|
||||
Any subsequent calls to C{inheritedDescriptors} and
|
||||
C{inheritedNamedDescriptors} return the same list.
|
||||
"""
|
||||
names = tuple(map(str, descriptors))
|
||||
sddaemon = ListenFDs(descriptors, names)
|
||||
self.assertEqual(
|
||||
sddaemon.inheritedDescriptors(), sddaemon.inheritedDescriptors()
|
||||
)
|
||||
self.assertEqual(
|
||||
sddaemon.inheritedNamedDescriptors(), sddaemon.inheritedNamedDescriptors()
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.textattributes}.
|
||||
"""
|
||||
|
||||
from twisted.python._textattributes import DefaultFormattingState
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class DefaultFormattingStateTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{twisted.python._textattributes.DefaultFormattingState}.
|
||||
"""
|
||||
|
||||
def test_equality(self) -> None:
|
||||
"""
|
||||
L{DefaultFormattingState}s are always equal to other
|
||||
L{DefaultFormattingState}s.
|
||||
"""
|
||||
self.assertEqual(DefaultFormattingState(), DefaultFormattingState())
|
||||
self.assertNotEqual(DefaultFormattingState(), "hello")
|
||||
@@ -0,0 +1,145 @@
|
||||
# # Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python._tzhelper}.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from os import environ
|
||||
|
||||
try:
|
||||
from time import tzset as _tzset
|
||||
except ImportError:
|
||||
tzset = None
|
||||
else:
|
||||
tzset = _tzset
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from time import mktime as mktime_real
|
||||
|
||||
from twisted.python._tzhelper import FixedOffsetTimeZone
|
||||
from twisted.trial.unittest import SkipTest, TestCase
|
||||
|
||||
# On some rare platforms (FreeBSD 8? I was not able to reproduce
|
||||
# on FreeBSD 9) 'mktime' seems to always fail once tzset() has been
|
||||
# called more than once in a process lifetime. I think this is
|
||||
# just a platform bug, so let's work around it. -glyph
|
||||
|
||||
|
||||
def mktime(t9: tuple[int, int, int, int, int, int, int, int, int]) -> float:
|
||||
"""
|
||||
Call L{mktime_real}, and if it raises L{OverflowError}, catch it and raise
|
||||
SkipTest instead.
|
||||
|
||||
@param t9: A time as a 9-item tuple.
|
||||
@type t9: L{tuple}
|
||||
|
||||
@return: A timestamp.
|
||||
@rtype: L{float}
|
||||
"""
|
||||
try:
|
||||
return mktime_real(t9)
|
||||
except OverflowError:
|
||||
raise SkipTest(f"Platform cannot construct time zone for {t9!r}")
|
||||
|
||||
|
||||
def setTZ(name: str | None) -> None:
|
||||
"""
|
||||
Set time zone.
|
||||
|
||||
@param name: a time zone name
|
||||
@type name: L{str}
|
||||
"""
|
||||
if tzset is None:
|
||||
return
|
||||
|
||||
if name is None:
|
||||
try:
|
||||
del environ["TZ"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
environ["TZ"] = name
|
||||
tzset()
|
||||
|
||||
|
||||
def addTZCleanup(testCase: TestCase) -> None:
|
||||
"""
|
||||
Add cleanup hooks to a test case to reset timezone to original value.
|
||||
|
||||
@param testCase: the test case to add the cleanup to.
|
||||
@type testCase: L{unittest.TestCase}
|
||||
"""
|
||||
tzIn = environ.get("TZ", None)
|
||||
|
||||
@testCase.addCleanup
|
||||
def resetTZ() -> None:
|
||||
setTZ(tzIn)
|
||||
|
||||
|
||||
class FixedOffsetTimeZoneTests(TestCase):
|
||||
"""
|
||||
Tests for L{FixedOffsetTimeZone}.
|
||||
"""
|
||||
|
||||
def test_tzinfo(self) -> None:
|
||||
"""
|
||||
Test that timezone attributes respect the timezone as set by the
|
||||
standard C{TZ} environment variable and L{tzset} API.
|
||||
"""
|
||||
if tzset is None:
|
||||
raise SkipTest("Platform cannot change timezone; unable to verify offsets.")
|
||||
|
||||
def testForTimeZone(
|
||||
name: str, expectedOffsetDST: str, expectedOffsetSTD: str
|
||||
) -> None:
|
||||
setTZ(name)
|
||||
|
||||
localDST = mktime((2006, 6, 30, 0, 0, 0, 4, 181, 1))
|
||||
localDSTdt = datetime.fromtimestamp(localDST)
|
||||
localSTD = mktime((2007, 1, 31, 0, 0, 0, 2, 31, 0))
|
||||
localSTDdt = datetime.fromtimestamp(localSTD)
|
||||
|
||||
tzDST = FixedOffsetTimeZone.fromLocalTimeStamp(localDST)
|
||||
tzSTD = FixedOffsetTimeZone.fromLocalTimeStamp(localSTD)
|
||||
|
||||
self.assertEqual(tzDST.tzname(localDSTdt), f"UTC{expectedOffsetDST}")
|
||||
self.assertEqual(tzSTD.tzname(localSTDdt), f"UTC{expectedOffsetSTD}")
|
||||
|
||||
self.assertEqual(tzDST.dst(localDSTdt), timedelta(0))
|
||||
self.assertEqual(tzSTD.dst(localSTDdt), timedelta(0))
|
||||
|
||||
def timeDeltaFromOffset(offset: str) -> timedelta:
|
||||
assert len(offset) == 5
|
||||
|
||||
sign = offset[0]
|
||||
hours = int(offset[1:3])
|
||||
minutes = int(offset[3:5])
|
||||
|
||||
if sign == "-":
|
||||
hours = -hours
|
||||
minutes = -minutes
|
||||
else:
|
||||
assert sign == "+"
|
||||
|
||||
return timedelta(hours=hours, minutes=minutes)
|
||||
|
||||
self.assertEqual(
|
||||
tzDST.utcoffset(localDSTdt), timeDeltaFromOffset(expectedOffsetDST)
|
||||
)
|
||||
self.assertEqual(
|
||||
tzSTD.utcoffset(localSTDdt), timeDeltaFromOffset(expectedOffsetSTD)
|
||||
)
|
||||
|
||||
addTZCleanup(self)
|
||||
|
||||
# UTC
|
||||
testForTimeZone("UTC+00", "+0000", "+0000")
|
||||
# West of UTC
|
||||
testForTimeZone("EST+05EDT,M4.1.0,M10.5.0", "-0400", "-0500")
|
||||
# East of UTC
|
||||
testForTimeZone("CEST-01CEDT,M4.1.0,M10.5.0", "+0200", "+0100")
|
||||
# No DST
|
||||
testForTimeZone("CST+06", "-0600", "-0600")
|
||||
@@ -0,0 +1,825 @@
|
||||
# -*- test-case-name: twisted.python.test.test_url -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.url}.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from twisted.trial.unittest import SynchronousTestCase
|
||||
from ..url import URL
|
||||
|
||||
theurl = "http://www.foo.com/a/nice/path/?zot=23&zut"
|
||||
|
||||
# Examples from RFC 3986 section 5.4, Reference Resolution Examples
|
||||
relativeLinkBaseForRFC3986 = "http://a/b/c/d;p?q"
|
||||
relativeLinkTestsForRFC3986 = [
|
||||
# "Normal"
|
||||
# ('g:h', 'g:h'), # Not supported: scheme with relative path
|
||||
("g", "http://a/b/c/g"),
|
||||
("./g", "http://a/b/c/g"),
|
||||
("g/", "http://a/b/c/g/"),
|
||||
("/g", "http://a/g"),
|
||||
("//g", "http://g"),
|
||||
("?y", "http://a/b/c/d;p?y"),
|
||||
("g?y", "http://a/b/c/g?y"),
|
||||
("#s", "http://a/b/c/d;p?q#s"),
|
||||
("g#s", "http://a/b/c/g#s"),
|
||||
("g?y#s", "http://a/b/c/g?y#s"),
|
||||
(";x", "http://a/b/c/;x"),
|
||||
("g;x", "http://a/b/c/g;x"),
|
||||
("g;x?y#s", "http://a/b/c/g;x?y#s"),
|
||||
("", "http://a/b/c/d;p?q"),
|
||||
(".", "http://a/b/c/"),
|
||||
("./", "http://a/b/c/"),
|
||||
("..", "http://a/b/"),
|
||||
("../", "http://a/b/"),
|
||||
("../g", "http://a/b/g"),
|
||||
("../..", "http://a/"),
|
||||
("../../", "http://a/"),
|
||||
("../../g", "http://a/g"),
|
||||
# Abnormal examples
|
||||
# ".." cannot be used to change the authority component of a URI.
|
||||
("../../../g", "http://a/g"),
|
||||
("../../../../g", "http://a/g"),
|
||||
# Only include "." and ".." when they are only part of a larger segment,
|
||||
# not by themselves.
|
||||
("/./g", "http://a/g"),
|
||||
("/../g", "http://a/g"),
|
||||
("g.", "http://a/b/c/g."),
|
||||
(".g", "http://a/b/c/.g"),
|
||||
("g..", "http://a/b/c/g.."),
|
||||
("..g", "http://a/b/c/..g"),
|
||||
# Unnecessary or nonsensical forms of "." and "..".
|
||||
("./../g", "http://a/b/g"),
|
||||
("./g/.", "http://a/b/c/g/"),
|
||||
("g/./h", "http://a/b/c/g/h"),
|
||||
("g/../h", "http://a/b/c/h"),
|
||||
("g;x=1/./y", "http://a/b/c/g;x=1/y"),
|
||||
("g;x=1/../y", "http://a/b/c/y"),
|
||||
# Separating the reference's query and fragment components from the path.
|
||||
("g?y/./x", "http://a/b/c/g?y/./x"),
|
||||
("g?y/../x", "http://a/b/c/g?y/../x"),
|
||||
("g#s/./x", "http://a/b/c/g#s/./x"),
|
||||
("g#s/../x", "http://a/b/c/g#s/../x"),
|
||||
# Not supported: scheme with relative path
|
||||
# ("http:g", "http:g"), # strict
|
||||
# ("http:g", "http://a/b/c/g"), # non-strict
|
||||
]
|
||||
|
||||
|
||||
_percentenc = lambda s: "".join("%%%02X" % ord(c) for c in s)
|
||||
|
||||
|
||||
class _HasException(Protocol):
|
||||
@property
|
||||
def exception(self) -> BaseException:
|
||||
...
|
||||
|
||||
|
||||
class TestURL(SynchronousTestCase):
|
||||
"""
|
||||
Tests for L{URL}.
|
||||
"""
|
||||
|
||||
def assertUnicoded(self, u: URL) -> None:
|
||||
"""
|
||||
The given L{URL}'s components should be L{unicode}.
|
||||
|
||||
@param u: The L{URL} to test.
|
||||
"""
|
||||
self.assertIsInstance(u.scheme, str, repr(u))
|
||||
self.assertIsInstance(u.host, str, repr(u))
|
||||
for seg in u.path:
|
||||
self.assertIsInstance(seg, str, repr(u))
|
||||
for k, v in u.query:
|
||||
self.assertIsInstance(k, str, repr(u))
|
||||
self.assertTrue(v is None or isinstance(v, str), repr(u))
|
||||
self.assertIsInstance(u.fragment, str, repr(u))
|
||||
|
||||
def assertURL(
|
||||
self,
|
||||
u: URL,
|
||||
scheme: str,
|
||||
host: str,
|
||||
path: Iterable[str],
|
||||
query: Iterable[tuple[str, str | None]],
|
||||
fragment: str,
|
||||
port: int | None,
|
||||
userinfo: str = "",
|
||||
) -> None:
|
||||
"""
|
||||
The given L{URL} should have the given components.
|
||||
|
||||
@param u: The actual L{URL} to examine.
|
||||
|
||||
@param scheme: The expected scheme.
|
||||
|
||||
@param host: The expected host.
|
||||
|
||||
@param path: The expected path.
|
||||
|
||||
@param query: The expected query.
|
||||
|
||||
@param fragment: The expected fragment.
|
||||
|
||||
@param port: The expected port.
|
||||
|
||||
@param userinfo: The expected userinfo.
|
||||
"""
|
||||
actual = (u.scheme, u.host, u.path, u.query, u.fragment, u.port, u.userinfo)
|
||||
expected = (scheme, host, tuple(path), tuple(query), fragment, port, u.userinfo)
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_initDefaults(self) -> None:
|
||||
"""
|
||||
L{URL} should have appropriate default values.
|
||||
"""
|
||||
|
||||
def check(u: URL) -> None:
|
||||
self.assertUnicoded(u)
|
||||
self.assertURL(u, "http", "", [], [], "", 80, "")
|
||||
|
||||
check(URL("http", ""))
|
||||
check(URL("http", "", [], []))
|
||||
check(URL("http", "", [], [], ""))
|
||||
|
||||
def test_init(self) -> None:
|
||||
"""
|
||||
L{URL} should accept L{unicode} parameters.
|
||||
"""
|
||||
u = URL("s", "h", ["p"], [("k", "v"), ("k", None)], "f")
|
||||
self.assertUnicoded(u)
|
||||
self.assertURL(u, "s", "h", ["p"], [("k", "v"), ("k", None)], "f", None)
|
||||
|
||||
self.assertURL(
|
||||
URL("http", "\xe0", ["\xe9"], [("\u03bb", "\u03c0")], "\u22a5"),
|
||||
"http",
|
||||
"\xe0",
|
||||
["\xe9"],
|
||||
[("\u03bb", "\u03c0")],
|
||||
"\u22a5",
|
||||
80,
|
||||
)
|
||||
|
||||
def test_initPercent(self) -> None:
|
||||
"""
|
||||
L{URL} should accept (and not interpret) percent characters.
|
||||
"""
|
||||
u = URL("s", "%68", ["%70"], [("%6B", "%76"), ("%6B", None)], "%66")
|
||||
self.assertUnicoded(u)
|
||||
self.assertURL(
|
||||
u, "s", "%68", ["%70"], [("%6B", "%76"), ("%6B", None)], "%66", None
|
||||
)
|
||||
|
||||
def test_repr(self) -> None:
|
||||
"""
|
||||
L{URL.__repr__} will display the canonical form of the URL, wrapped in
|
||||
a L{URL.fromText} invocation, so that it is C{eval}-able but still easy
|
||||
to read.
|
||||
"""
|
||||
self.assertEqual(
|
||||
repr(
|
||||
URL(
|
||||
scheme="http",
|
||||
host="foo",
|
||||
path=["bar"],
|
||||
query=[("baz", None), ("k", "v")],
|
||||
fragment="frob",
|
||||
)
|
||||
),
|
||||
"URL.from_text({})".format(repr("http://foo/bar?baz&k=v#frob")),
|
||||
)
|
||||
|
||||
def test_fromText(self) -> None:
|
||||
"""
|
||||
Round-tripping L{URL.fromText} with C{str} results in an equivalent
|
||||
URL.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(theurl, urlpath.asText())
|
||||
|
||||
def test_roundtrip(self) -> None:
|
||||
"""
|
||||
L{URL.asText} should invert L{URL.fromText}.
|
||||
"""
|
||||
tests = (
|
||||
"http://localhost",
|
||||
"http://localhost/",
|
||||
"http://localhost/foo",
|
||||
"http://localhost/foo/",
|
||||
"http://localhost/foo!!bar/",
|
||||
"http://localhost/foo%20bar/",
|
||||
"http://localhost/foo%2Fbar/",
|
||||
"http://localhost/foo?n",
|
||||
"http://localhost/foo?n=v",
|
||||
"http://localhost/foo?n=/a/b",
|
||||
"http://example.com/foo!@$bar?b!@z=123",
|
||||
"http://localhost/asd?a=asd%20sdf/345",
|
||||
"http://(%2525)/(%2525)?(%2525)&(%2525)=(%2525)#(%2525)",
|
||||
"http://(%C3%A9)/(%C3%A9)?(%C3%A9)&(%C3%A9)=(%C3%A9)#(%C3%A9)",
|
||||
)
|
||||
for test in tests:
|
||||
result = URL.fromText(test).asText()
|
||||
self.assertEqual(test, result)
|
||||
|
||||
def test_equality(self) -> None:
|
||||
"""
|
||||
Two URLs decoded using L{URL.fromText} will be equal (C{==}) if they
|
||||
decoded same URL string, and unequal (C{!=}) if they decoded different
|
||||
strings.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(urlpath, URL.fromText(theurl))
|
||||
self.assertNotEqual(
|
||||
urlpath,
|
||||
URL.fromText(
|
||||
"ftp://www.anotherinvaliddomain.com/" "foo/bar/baz/?zot=21&zut"
|
||||
),
|
||||
)
|
||||
|
||||
def test_fragmentEquality(self) -> None:
|
||||
"""
|
||||
An URL created with the empty string for a fragment compares equal
|
||||
to an URL created with an unspecified fragment.
|
||||
"""
|
||||
self.assertEqual(URL(fragment=""), URL())
|
||||
self.assertEqual(
|
||||
URL.fromText("http://localhost/#"), URL.fromText("http://localhost/")
|
||||
)
|
||||
|
||||
def test_child(self) -> None:
|
||||
"""
|
||||
L{URL.child} appends a new path segment, but does not affect the query
|
||||
or fragment.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/gong?zot=23&zut",
|
||||
urlpath.child("gong").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/gong%2F?zot=23&zut",
|
||||
urlpath.child("gong/").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/gong%2Fdouble?zot=23&zut",
|
||||
urlpath.child("gong/double").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/gong%2Fdouble%2F?zot=23&zut",
|
||||
urlpath.child("gong/double/").asText(),
|
||||
)
|
||||
|
||||
def test_multiChild(self) -> None:
|
||||
"""
|
||||
L{URL.child} receives multiple segments as C{*args} and appends each in
|
||||
turn.
|
||||
"""
|
||||
self.assertEqual(
|
||||
URL.fromText("http://example.com/a/b").child("c", "d", "e").asText(),
|
||||
"http://example.com/a/b/c/d/e",
|
||||
)
|
||||
|
||||
def test_childInitRoot(self) -> None:
|
||||
"""
|
||||
L{URL.child} of a L{URL} without a path produces a L{URL} with a single
|
||||
path segment.
|
||||
"""
|
||||
childURL = URL(host="www.foo.com").child("c")
|
||||
self.assertTrue(childURL.rooted)
|
||||
self.assertEqual("http://www.foo.com/c", childURL.asText())
|
||||
|
||||
def test_sibling(self) -> None:
|
||||
"""
|
||||
L{URL.sibling} of a L{URL} replaces the last path segment, but does not
|
||||
affect the query or fragment.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/sister?zot=23&zut",
|
||||
urlpath.sibling("sister").asText(),
|
||||
)
|
||||
# Use an url without trailing '/' to check child removal.
|
||||
theurl2 = "http://www.foo.com/a/nice/path?zot=23&zut"
|
||||
urlpath = URL.fromText(theurl2)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/sister?zot=23&zut",
|
||||
urlpath.sibling("sister").asText(),
|
||||
)
|
||||
|
||||
def test_click(self) -> None:
|
||||
"""
|
||||
L{URL.click} interprets the given string as a relative URI-reference
|
||||
and returns a new L{URL} interpreting C{self} as the base absolute URI.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
# A null uri should be valid (return here).
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut", urlpath.click("").asText()
|
||||
)
|
||||
# A simple relative path remove the query.
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/click", urlpath.click("click").asText()
|
||||
)
|
||||
# An absolute path replace path and query.
|
||||
self.assertEqual("http://www.foo.com/click", urlpath.click("/click").asText())
|
||||
# Replace just the query.
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?burp", urlpath.click("?burp").asText()
|
||||
)
|
||||
# One full url to another should not generate '//' between authority.
|
||||
# and path
|
||||
self.assertNotIn(
|
||||
"//foobar", urlpath.click("http://www.foo.com/foobar").asText()
|
||||
)
|
||||
|
||||
# From a url with no query clicking a url with a query, the query
|
||||
# should be handled properly.
|
||||
u = URL.fromText("http://www.foo.com/me/noquery")
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/me/17?spam=158", u.click("/me/17?spam=158").asText()
|
||||
)
|
||||
|
||||
# Check that everything from the path onward is removed when the click
|
||||
# link has no path.
|
||||
u = URL.fromText("http://localhost/foo?abc=def")
|
||||
self.assertEqual(
|
||||
u.click("http://www.python.org").asText(), "http://www.python.org"
|
||||
)
|
||||
|
||||
def test_clickRFC3986(self) -> None:
|
||||
"""
|
||||
L{URL.click} should correctly resolve the examples in RFC 3986.
|
||||
"""
|
||||
base = URL.fromText(relativeLinkBaseForRFC3986)
|
||||
for ref, expected in relativeLinkTestsForRFC3986:
|
||||
self.assertEqual(base.click(ref).asText(), expected)
|
||||
|
||||
def test_clickSchemeRelPath(self) -> None:
|
||||
"""
|
||||
L{URL.click} should not accept schemes with relative paths.
|
||||
"""
|
||||
base = URL.fromText(relativeLinkBaseForRFC3986)
|
||||
self.assertRaises(NotImplementedError, base.click, "g:h")
|
||||
self.assertRaises(NotImplementedError, base.click, "http:h")
|
||||
|
||||
def test_cloneUnchanged(self) -> None:
|
||||
"""
|
||||
Verify that L{URL.replace} doesn't change any of the arguments it
|
||||
is passed.
|
||||
"""
|
||||
urlpath = URL.fromText("https://x:1/y?z=1#A")
|
||||
self.assertEqual(
|
||||
urlpath.replace(
|
||||
urlpath.scheme,
|
||||
urlpath.host,
|
||||
urlpath.path,
|
||||
urlpath.query,
|
||||
urlpath.fragment,
|
||||
urlpath.port,
|
||||
),
|
||||
urlpath,
|
||||
)
|
||||
self.assertEqual(urlpath.replace(), urlpath)
|
||||
|
||||
def test_clickCollapse(self) -> None:
|
||||
"""
|
||||
L{URL.click} collapses C{.} and C{..} according to RFC 3986 section
|
||||
5.2.4.
|
||||
"""
|
||||
tests = [
|
||||
["http://localhost/", ".", "http://localhost/"],
|
||||
["http://localhost/", "..", "http://localhost/"],
|
||||
["http://localhost/a/b/c", ".", "http://localhost/a/b/"],
|
||||
["http://localhost/a/b/c", "..", "http://localhost/a/"],
|
||||
["http://localhost/a/b/c", "./d/e", "http://localhost/a/b/d/e"],
|
||||
["http://localhost/a/b/c", "../d/e", "http://localhost/a/d/e"],
|
||||
["http://localhost/a/b/c", "/./d/e", "http://localhost/d/e"],
|
||||
["http://localhost/a/b/c", "/../d/e", "http://localhost/d/e"],
|
||||
["http://localhost/a/b/c/", "../../d/e/", "http://localhost/a/d/e/"],
|
||||
["http://localhost/a/./c", "../d/e", "http://localhost/d/e"],
|
||||
["http://localhost/a/./c/", "../d/e", "http://localhost/a/d/e"],
|
||||
["http://localhost/a/b/c/d", "./e/../f/../g", "http://localhost/a/b/c/g"],
|
||||
["http://localhost/a/b/c", "d//e", "http://localhost/a/b/d//e"],
|
||||
]
|
||||
for start, click, expected in tests:
|
||||
actual = URL.fromText(start).click(click).asText()
|
||||
self.assertEqual(
|
||||
actual,
|
||||
expected,
|
||||
"{start}.click({click}) => {actual} not {expected}".format(
|
||||
start=start,
|
||||
click=repr(click),
|
||||
actual=actual,
|
||||
expected=expected,
|
||||
),
|
||||
)
|
||||
|
||||
def test_queryAdd(self) -> None:
|
||||
"""
|
||||
L{URL.add} adds query parameters.
|
||||
"""
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?foo=bar",
|
||||
URL.fromText("http://www.foo.com/a/nice/path/").add("foo", "bar").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/?foo=bar",
|
||||
URL(host="www.foo.com").add("foo", "bar").asText(),
|
||||
)
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut&burp",
|
||||
urlpath.add("burp").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx",
|
||||
urlpath.add("burp", "xxx").asText(),
|
||||
)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx&zing",
|
||||
urlpath.add("burp", "xxx").add("zing").asText(),
|
||||
)
|
||||
# Note the inversion!
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut&zing&burp=xxx",
|
||||
urlpath.add("zing").add("burp", "xxx").asText(),
|
||||
)
|
||||
# Note the two values for the same name.
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=23&zut&burp=xxx&zot=32",
|
||||
urlpath.add("burp", "xxx").add("zot", "32").asText(),
|
||||
)
|
||||
|
||||
def test_querySet(self) -> None:
|
||||
"""
|
||||
L{URL.set} replaces query parameters by name.
|
||||
"""
|
||||
urlpath = URL.fromText(theurl)
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=32&zut",
|
||||
urlpath.set("zot", "32").asText(),
|
||||
)
|
||||
# Replace name without value with name/value and vice-versa.
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot&zut=itworked",
|
||||
urlpath.set("zot").set("zut", "itworked").asText(),
|
||||
)
|
||||
# Q: what happens when the query has two values and we replace?
|
||||
# A: we replace both values with a single one
|
||||
self.assertEqual(
|
||||
"http://www.foo.com/a/nice/path/?zot=32&zut",
|
||||
urlpath.add("zot", "xxx").set("zot", "32").asText(),
|
||||
)
|
||||
|
||||
def test_queryRemove(self) -> None:
|
||||
"""
|
||||
L{URL.remove} removes all instances of a query parameter.
|
||||
"""
|
||||
url = URL.fromText("https://example.com/a/b/?foo=1&bar=2&foo=3")
|
||||
self.assertEqual(
|
||||
url.remove("foo"), URL.fromText("https://example.com/a/b/?bar=2")
|
||||
)
|
||||
|
||||
def test_empty(self) -> None:
|
||||
"""
|
||||
An empty L{URL} should serialize as the empty string.
|
||||
"""
|
||||
self.assertEqual(URL().asText(), "")
|
||||
|
||||
def test_justQueryText(self) -> None:
|
||||
"""
|
||||
An L{URL} with query text should serialize as just query text.
|
||||
"""
|
||||
u = URL(query=[("hello", "world")])
|
||||
self.assertEqual(u.asText(), "?hello=world")
|
||||
|
||||
def test_identicalEqual(self) -> None:
|
||||
"""
|
||||
L{URL} compares equal to itself.
|
||||
"""
|
||||
u = URL.fromText("http://localhost/")
|
||||
self.assertEqual(u, u)
|
||||
|
||||
def test_similarEqual(self) -> None:
|
||||
"""
|
||||
URLs with equivalent components should compare equal.
|
||||
"""
|
||||
u1 = URL.fromText("http://localhost/")
|
||||
u2 = URL.fromText("http://localhost/")
|
||||
self.assertEqual(u1, u2)
|
||||
|
||||
def test_differentNotEqual(self) -> None:
|
||||
"""
|
||||
L{URL}s that refer to different resources are both unequal (C{!=}) and
|
||||
also not equal (not C{==}).
|
||||
"""
|
||||
u1 = URL.fromText("http://localhost/a")
|
||||
u2 = URL.fromText("http://localhost/b")
|
||||
self.assertFalse(u1 == u2, f"{u1!r} != {u2!r}")
|
||||
self.assertNotEqual(u1, u2)
|
||||
|
||||
def test_otherTypesNotEqual(self) -> None:
|
||||
"""
|
||||
L{URL} is not equal (C{==}) to other types.
|
||||
"""
|
||||
u = URL.fromText("http://localhost/")
|
||||
self.assertFalse(u == 42, "URL must not equal a number.")
|
||||
self.assertFalse(u == object(), "URL must not equal an object.")
|
||||
self.assertNotEqual(u, 42)
|
||||
self.assertNotEqual(u, object())
|
||||
|
||||
def test_identicalNotUnequal(self) -> None:
|
||||
"""
|
||||
Identical L{URL}s are not unequal (C{!=}) to each other.
|
||||
"""
|
||||
u = URL.fromText("http://localhost/")
|
||||
self.assertFalse(u != u, "%r == itself" % u)
|
||||
|
||||
def test_similarNotUnequal(self) -> None:
|
||||
"""
|
||||
Structurally similar L{URL}s are not unequal (C{!=}) to each other.
|
||||
"""
|
||||
u1 = URL.fromText("http://localhost/")
|
||||
u2 = URL.fromText("http://localhost/")
|
||||
self.assertFalse(u1 != u2, f"{u1!r} == {u2!r}")
|
||||
|
||||
def test_differentUnequal(self) -> None:
|
||||
"""
|
||||
Structurally different L{URL}s are unequal (C{!=}) to each other.
|
||||
"""
|
||||
u1 = URL.fromText("http://localhost/a")
|
||||
u2 = URL.fromText("http://localhost/b")
|
||||
self.assertTrue(u1 != u2, f"{u1!r} == {u2!r}")
|
||||
|
||||
def test_otherTypesUnequal(self) -> None:
|
||||
"""
|
||||
L{URL} is unequal (C{!=}) to other types.
|
||||
"""
|
||||
u = URL.fromText("http://localhost/")
|
||||
self.assertTrue(u != 42, "URL must differ from a number.")
|
||||
self.assertTrue(u != object(), "URL must be differ from an object.")
|
||||
|
||||
def test_asURI(self) -> None:
|
||||
"""
|
||||
L{URL.asURI} produces an URI which converts any URI unicode encoding
|
||||
into pure US-ASCII and returns a new L{URL}.
|
||||
"""
|
||||
unicodey = (
|
||||
"http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/"
|
||||
"\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}"
|
||||
"?\N{LATIN SMALL LETTER A}\N{COMBINING ACUTE ACCENT}="
|
||||
"\N{LATIN SMALL LETTER I}\N{COMBINING ACUTE ACCENT}"
|
||||
"#\N{LATIN SMALL LETTER U}\N{COMBINING ACUTE ACCENT}"
|
||||
)
|
||||
iri = URL.fromText(unicodey)
|
||||
uri = iri.asURI()
|
||||
self.assertEqual(iri.host, "\N{LATIN SMALL LETTER E WITH ACUTE}.com")
|
||||
self.assertEqual(
|
||||
iri.path[0], "\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}"
|
||||
)
|
||||
self.assertEqual(iri.asText(), unicodey)
|
||||
expectedURI = "http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA"
|
||||
actualURI = uri.asText()
|
||||
self.assertEqual(actualURI, expectedURI, f"{actualURI!r} != {expectedURI!r}")
|
||||
|
||||
def test_asIRI(self) -> None:
|
||||
"""
|
||||
L{URL.asIRI} decodes any percent-encoded text in the URI, making it
|
||||
more suitable for reading by humans, and returns a new L{URL}.
|
||||
"""
|
||||
asciiish = "http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA"
|
||||
uri = URL.fromText(asciiish)
|
||||
iri = uri.asIRI()
|
||||
self.assertEqual(uri.host, "xn--9ca.com")
|
||||
self.assertEqual(uri.path[0], "%C3%A9")
|
||||
self.assertEqual(uri.asText(), asciiish)
|
||||
expectedIRI = (
|
||||
"http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/"
|
||||
"\N{LATIN SMALL LETTER E WITH ACUTE}"
|
||||
"?\N{LATIN SMALL LETTER A WITH ACUTE}="
|
||||
"\N{LATIN SMALL LETTER I WITH ACUTE}"
|
||||
"#\N{LATIN SMALL LETTER U WITH ACUTE}"
|
||||
)
|
||||
actualIRI = iri.asText()
|
||||
self.assertEqual(actualIRI, expectedIRI, f"{actualIRI!r} != {expectedIRI!r}")
|
||||
|
||||
def test_badUTF8AsIRI(self) -> None:
|
||||
"""
|
||||
Bad UTF-8 in a path segment, query parameter, or fragment results in
|
||||
that portion of the URI remaining percent-encoded in the IRI.
|
||||
"""
|
||||
urlWithBinary = "http://xn--9ca.com/%00%FF/%C3%A9"
|
||||
uri = URL.fromText(urlWithBinary)
|
||||
iri = uri.asIRI()
|
||||
expectedIRI = (
|
||||
"http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/"
|
||||
"%00%FF/"
|
||||
"\N{LATIN SMALL LETTER E WITH ACUTE}"
|
||||
)
|
||||
actualIRI = iri.asText()
|
||||
self.assertEqual(actualIRI, expectedIRI, f"{actualIRI!r} != {expectedIRI!r}")
|
||||
|
||||
def test_alreadyIRIAsIRI(self) -> None:
|
||||
"""
|
||||
A L{URL} composed of non-ASCII text will result in non-ASCII text.
|
||||
"""
|
||||
unicodey = (
|
||||
"http://\N{LATIN SMALL LETTER E WITH ACUTE}.com/"
|
||||
"\N{LATIN SMALL LETTER E}\N{COMBINING ACUTE ACCENT}"
|
||||
"?\N{LATIN SMALL LETTER A}\N{COMBINING ACUTE ACCENT}="
|
||||
"\N{LATIN SMALL LETTER I}\N{COMBINING ACUTE ACCENT}"
|
||||
"#\N{LATIN SMALL LETTER U}\N{COMBINING ACUTE ACCENT}"
|
||||
)
|
||||
iri = URL.fromText(unicodey)
|
||||
alsoIRI = iri.asIRI()
|
||||
self.assertEqual(alsoIRI.asText(), unicodey)
|
||||
|
||||
def test_alreadyURIAsURI(self) -> None:
|
||||
"""
|
||||
A L{URL} composed of encoded text will remain encoded.
|
||||
"""
|
||||
expectedURI = "http://xn--9ca.com/%C3%A9?%C3%A1=%C3%AD#%C3%BA"
|
||||
uri = URL.fromText(expectedURI)
|
||||
actualURI = uri.asURI().asText()
|
||||
self.assertEqual(actualURI, expectedURI)
|
||||
|
||||
def test_userinfo(self) -> None:
|
||||
"""
|
||||
L{URL.fromText} will parse the C{userinfo} portion of the URI
|
||||
separately from the host and port.
|
||||
"""
|
||||
url = URL.fromText(
|
||||
"http://someuser:somepassword@example.com/some-segment@ignore"
|
||||
)
|
||||
self.assertEqual(url.authority(True), "someuser:somepassword@example.com")
|
||||
self.assertEqual(url.authority(False), "someuser:@example.com")
|
||||
self.assertEqual(url.userinfo, "someuser:somepassword")
|
||||
self.assertEqual(url.user, "someuser")
|
||||
self.assertEqual(
|
||||
url.asText(), "http://someuser:@example.com/some-segment@ignore"
|
||||
)
|
||||
self.assertEqual(
|
||||
url.replace(userinfo="someuser").asText(),
|
||||
"http://someuser@example.com/some-segment@ignore",
|
||||
)
|
||||
|
||||
def test_portText(self) -> None:
|
||||
"""
|
||||
L{URL.fromText} parses custom port numbers as integers.
|
||||
"""
|
||||
portURL = URL.fromText("http://www.example.com:8080/")
|
||||
self.assertEqual(portURL.port, 8080)
|
||||
self.assertEqual(portURL.asText(), "http://www.example.com:8080/")
|
||||
|
||||
def test_mailto(self) -> None:
|
||||
"""
|
||||
Although L{URL} instances are mainly for dealing with HTTP, other
|
||||
schemes (such as C{mailto:}) should work as well. For example,
|
||||
L{URL.fromText}/L{URL.asText} round-trips cleanly for a C{mailto:} URL
|
||||
representing an email address.
|
||||
"""
|
||||
self.assertEqual(
|
||||
URL.fromText("mailto:user@example.com").asText(), "mailto:user@example.com"
|
||||
)
|
||||
|
||||
def test_queryIterable(self) -> None:
|
||||
"""
|
||||
When a L{URL} is created with a C{query} argument, the C{query}
|
||||
argument is converted into an N-tuple of 2-tuples.
|
||||
"""
|
||||
# note the type here is invalid as only 2-tuples are accepted
|
||||
url = URL(query=[["alpha", "beta"]]) # type: ignore[list-item]
|
||||
self.assertEqual(url.query, (("alpha", "beta"),))
|
||||
|
||||
def test_pathIterable(self) -> None:
|
||||
"""
|
||||
When a L{URL} is created with a C{path} argument, the C{path} is
|
||||
converted into a tuple.
|
||||
"""
|
||||
url = URL(path=["hello", "world"])
|
||||
self.assertEqual(url.path, ("hello", "world"))
|
||||
|
||||
def test_invalidArguments(self) -> None:
|
||||
"""
|
||||
Passing an argument of the wrong type to any of the constructor
|
||||
arguments of L{URL} will raise a descriptive L{TypeError}.
|
||||
|
||||
L{URL} typechecks very aggressively to ensure that its constitutent
|
||||
parts are all properly immutable and to prevent confusing errors when
|
||||
bad data crops up in a method call long after the code that called the
|
||||
constructor is off the stack.
|
||||
"""
|
||||
|
||||
class Unexpected:
|
||||
def __str__(self) -> str:
|
||||
return "wrong"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<unexpected>"
|
||||
|
||||
defaultExpectation = "unicode" if bytes is str else "str"
|
||||
|
||||
def assertRaised(raised: _HasException, expectation: str, name: str) -> None:
|
||||
self.assertEqual(
|
||||
str(raised.exception),
|
||||
"expected {} for {}, got {}".format(expectation, name, "<unexpected>"),
|
||||
)
|
||||
|
||||
def check(param: str, expectation: str = defaultExpectation) -> None:
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
URL(**{param: Unexpected()}) # type: ignore[arg-type]
|
||||
assertRaised(raised, expectation, param)
|
||||
|
||||
check("scheme")
|
||||
check("host")
|
||||
check("fragment")
|
||||
check("rooted", "bool")
|
||||
check("userinfo")
|
||||
check("port", "int or NoneType")
|
||||
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
URL(
|
||||
path=[
|
||||
Unexpected(), # type: ignore[list-item]
|
||||
]
|
||||
)
|
||||
assertRaised(raised, defaultExpectation, "path segment")
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
URL(
|
||||
query=[
|
||||
("name", Unexpected()), # type: ignore[list-item]
|
||||
]
|
||||
)
|
||||
assertRaised(
|
||||
raised, defaultExpectation + " or NoneType", "query parameter value"
|
||||
)
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
URL(
|
||||
query=[
|
||||
(Unexpected(), "value"), # type: ignore[list-item]
|
||||
]
|
||||
)
|
||||
assertRaised(raised, defaultExpectation, "query parameter name")
|
||||
# No custom error message for this one, just want to make sure
|
||||
# non-2-tuples don't get through.
|
||||
with self.assertRaises(TypeError):
|
||||
URL(query=[Unexpected()]) # type: ignore[list-item]
|
||||
with self.assertRaises(ValueError):
|
||||
URL(query=[("k", "v", "vv")]) # type: ignore[list-item]
|
||||
with self.assertRaises(ValueError):
|
||||
URL(query=[("k",)]) # type: ignore[list-item]
|
||||
|
||||
url = URL.fromText("https://valid.example.com/")
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
url.child(Unexpected()) # type: ignore[arg-type]
|
||||
assertRaised(raised, defaultExpectation, "path segment")
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
url.sibling(Unexpected()) # type: ignore[arg-type]
|
||||
assertRaised(raised, defaultExpectation, "path segment")
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
url.click(Unexpected()) # type: ignore[arg-type]
|
||||
assertRaised(raised, defaultExpectation, "relative URL")
|
||||
|
||||
def test_technicallyTextIsIterableBut(self) -> None:
|
||||
"""
|
||||
Technically, L{str} (or L{unicode}, as appropriate) is iterable, but
|
||||
C{URL(path="foo")} resulting in C{URL.fromText("f/o/o")} is never what
|
||||
you want.
|
||||
"""
|
||||
with self.assertRaises(TypeError) as raised:
|
||||
URL(path="foo")
|
||||
self.assertEqual(
|
||||
str(raised.exception),
|
||||
"expected iterable of text for path, not: {}".format(repr("foo")),
|
||||
)
|
||||
|
||||
|
||||
class URLDeprecationTests(SynchronousTestCase):
|
||||
"""
|
||||
L{twisted.python.url} is deprecated.
|
||||
"""
|
||||
|
||||
def test_urlDeprecation(self) -> None:
|
||||
"""
|
||||
L{twisted.python.url} is deprecated since Twisted 17.5.0.
|
||||
"""
|
||||
from twisted.python import url
|
||||
|
||||
url
|
||||
|
||||
warningsShown = self.flushWarnings([self.test_urlDeprecation])
|
||||
self.assertEqual(1, len(warningsShown))
|
||||
self.assertEqual(
|
||||
(
|
||||
"twisted.python.url was deprecated in Twisted 17.5.0:"
|
||||
" Please use hyperlink from PyPI instead."
|
||||
),
|
||||
warningsShown[0]["message"],
|
||||
)
|
||||
@@ -0,0 +1,289 @@
|
||||
# -*- test-case-name: twisted.python.test.test_urlpath -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.urlpath}.
|
||||
"""
|
||||
|
||||
from twisted.python import urlpath
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class _BaseURLPathTests:
|
||||
"""
|
||||
Tests for instantiated L{urlpath.URLPath}s.
|
||||
"""
|
||||
|
||||
def test_partsAreBytes(self):
|
||||
"""
|
||||
All of the attributes of L{urlpath.URLPath} should be L{bytes}.
|
||||
"""
|
||||
self.assertIsInstance(self.path.scheme, bytes)
|
||||
self.assertIsInstance(self.path.netloc, bytes)
|
||||
self.assertIsInstance(self.path.path, bytes)
|
||||
self.assertIsInstance(self.path.query, bytes)
|
||||
self.assertIsInstance(self.path.fragment, bytes)
|
||||
|
||||
def test_strReturnsStr(self):
|
||||
"""
|
||||
Calling C{str()} with a L{URLPath} will always return a L{str}.
|
||||
"""
|
||||
self.assertEqual(type(self.path.__str__()), str)
|
||||
|
||||
def test_mutabilityWithText(self, stringType=str):
|
||||
"""
|
||||
Setting attributes on L{urlpath.URLPath} should change the value
|
||||
returned by L{str}.
|
||||
|
||||
@param stringType: a callable to parameterize this test for different
|
||||
text types.
|
||||
@type stringType: 1-argument callable taking L{str} and returning
|
||||
L{str} or L{bytes}.
|
||||
"""
|
||||
self.path.scheme = stringType("https")
|
||||
self.assertEqual(
|
||||
str(self.path), "https://example.com/foo/bar?yes=no&no=yes#footer"
|
||||
)
|
||||
self.path.netloc = stringType("another.example.invalid")
|
||||
self.assertEqual(
|
||||
str(self.path),
|
||||
"https://another.example.invalid/foo/bar?yes=no&no=yes#footer",
|
||||
)
|
||||
self.path.path = stringType("/hello")
|
||||
self.assertEqual(
|
||||
str(self.path), "https://another.example.invalid/hello?yes=no&no=yes#footer"
|
||||
)
|
||||
self.path.query = stringType("alpha=omega&opposites=same")
|
||||
self.assertEqual(
|
||||
str(self.path),
|
||||
"https://another.example.invalid/hello?alpha=omega&opposites=same"
|
||||
"#footer",
|
||||
)
|
||||
self.path.fragment = stringType("header")
|
||||
self.assertEqual(
|
||||
str(self.path),
|
||||
"https://another.example.invalid/hello?alpha=omega&opposites=same"
|
||||
"#header",
|
||||
)
|
||||
|
||||
def test_mutabilityWithBytes(self):
|
||||
"""
|
||||
Same as L{test_mutabilityWithText} but for bytes.
|
||||
"""
|
||||
self.test_mutabilityWithText(lambda x: x.encode("ascii"))
|
||||
|
||||
def test_allAttributesAreBytes(self):
|
||||
"""
|
||||
A created L{URLPath} has bytes attributes.
|
||||
"""
|
||||
self.assertIsInstance(self.path.scheme, bytes)
|
||||
self.assertIsInstance(self.path.netloc, bytes)
|
||||
self.assertIsInstance(self.path.path, bytes)
|
||||
self.assertIsInstance(self.path.query, bytes)
|
||||
self.assertIsInstance(self.path.fragment, bytes)
|
||||
|
||||
def test_stringConversion(self):
|
||||
"""
|
||||
Calling C{str()} with a L{URLPath} will return the same URL that it was
|
||||
constructed with.
|
||||
"""
|
||||
self.assertEqual(
|
||||
str(self.path), "http://example.com/foo/bar?yes=no&no=yes#footer"
|
||||
)
|
||||
|
||||
def test_childString(self):
|
||||
"""
|
||||
Calling C{str()} with a C{URLPath.child()} will return a URL which is
|
||||
the child of the URL it was instantiated with.
|
||||
"""
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"hello")), "http://example.com/foo/bar/hello"
|
||||
)
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"hello").child(b"")),
|
||||
"http://example.com/foo/bar/hello/",
|
||||
)
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"hello", keepQuery=True)),
|
||||
"http://example.com/foo/bar/hello?yes=no&no=yes",
|
||||
)
|
||||
|
||||
def test_siblingString(self):
|
||||
"""
|
||||
Calling C{str()} with a C{URLPath.sibling()} will return a URL which is
|
||||
the sibling of the URL it was instantiated with.
|
||||
"""
|
||||
self.assertEqual(str(self.path.sibling(b"baz")), "http://example.com/foo/baz")
|
||||
self.assertEqual(
|
||||
str(self.path.sibling(b"baz", keepQuery=True)),
|
||||
"http://example.com/foo/baz?yes=no&no=yes",
|
||||
)
|
||||
|
||||
# The sibling of http://example.com/foo/bar/
|
||||
# is http://example.comf/foo/bar/baz
|
||||
# because really we are constructing a sibling of
|
||||
# http://example.com/foo/bar/index.html
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"").sibling(b"baz")), "http://example.com/foo/bar/baz"
|
||||
)
|
||||
|
||||
def test_parentString(self):
|
||||
"""
|
||||
Calling C{str()} with a C{URLPath.parent()} will return a URL which is
|
||||
the parent of the URL it was instantiated with.
|
||||
"""
|
||||
# .parent() should be equivalent to '..'
|
||||
# 'foo' is the current directory, '/' is the parent directory
|
||||
self.assertEqual(str(self.path.parent()), "http://example.com/")
|
||||
self.assertEqual(
|
||||
str(self.path.parent(keepQuery=True)), "http://example.com/?yes=no&no=yes"
|
||||
)
|
||||
self.assertEqual(str(self.path.child(b"").parent()), "http://example.com/foo/")
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"baz").parent()), "http://example.com/foo/"
|
||||
)
|
||||
self.assertEqual(
|
||||
str(self.path.parent().parent().parent().parent().parent()),
|
||||
"http://example.com/",
|
||||
)
|
||||
|
||||
def test_hereString(self):
|
||||
"""
|
||||
Calling C{str()} with a C{URLPath.here()} will return a URL which is
|
||||
the URL that it was instantiated with, without any file, query, or
|
||||
fragment.
|
||||
"""
|
||||
# .here() should be equivalent to '.'
|
||||
self.assertEqual(str(self.path.here()), "http://example.com/foo/")
|
||||
self.assertEqual(
|
||||
str(self.path.here(keepQuery=True)), "http://example.com/foo/?yes=no&no=yes"
|
||||
)
|
||||
self.assertEqual(
|
||||
str(self.path.child(b"").here()), "http://example.com/foo/bar/"
|
||||
)
|
||||
|
||||
def test_doubleSlash(self):
|
||||
"""
|
||||
Calling L{urlpath.URLPath.click} on a L{urlpath.URLPath} with a
|
||||
trailing slash with a relative URL containing a leading slash will
|
||||
result in a URL with a single slash at the start of the path portion.
|
||||
"""
|
||||
self.assertEqual(
|
||||
str(self.path.click(b"/hello/world")).encode("ascii"),
|
||||
b"http://example.com/hello/world",
|
||||
)
|
||||
|
||||
def test_pathList(self):
|
||||
"""
|
||||
L{urlpath.URLPath.pathList} returns a L{list} of L{bytes}.
|
||||
"""
|
||||
self.assertEqual(
|
||||
self.path.child(b"%00%01%02").pathList(),
|
||||
[b"", b"foo", b"bar", b"%00%01%02"],
|
||||
)
|
||||
|
||||
# Just testing that the 'copy' argument exists for compatibility; it
|
||||
# was originally provided for performance reasons, and its behavioral
|
||||
# contract is kind of nonsense (where is the state shared? who with?)
|
||||
# so it doesn't actually *do* anything any more.
|
||||
self.assertEqual(
|
||||
self.path.child(b"%00%01%02").pathList(copy=False),
|
||||
[b"", b"foo", b"bar", b"%00%01%02"],
|
||||
)
|
||||
self.assertEqual(
|
||||
self.path.child(b"%00%01%02").pathList(unquote=True),
|
||||
[b"", b"foo", b"bar", b"\x00\x01\x02"],
|
||||
)
|
||||
|
||||
|
||||
class BytesURLPathTests(_BaseURLPathTests, unittest.TestCase):
|
||||
"""
|
||||
Tests for interacting with a L{URLPath} created with C{fromBytes}.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.path = urlpath.URLPath.fromBytes(
|
||||
b"http://example.com/foo/bar?yes=no&no=yes#footer"
|
||||
)
|
||||
|
||||
def test_mustBeBytes(self):
|
||||
"""
|
||||
L{URLPath.fromBytes} must take a L{bytes} argument.
|
||||
"""
|
||||
with self.assertRaises(ValueError):
|
||||
urlpath.URLPath.fromBytes(None)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
urlpath.URLPath.fromBytes("someurl")
|
||||
|
||||
def test_withoutArguments(self):
|
||||
"""
|
||||
An instantiation with no arguments creates a usable L{URLPath} with
|
||||
default arguments.
|
||||
"""
|
||||
url = urlpath.URLPath()
|
||||
self.assertEqual(str(url), "http://localhost/")
|
||||
|
||||
def test_partialArguments(self):
|
||||
"""
|
||||
Leaving some optional arguments unfilled makes a L{URLPath} with those
|
||||
optional arguments filled with defaults.
|
||||
"""
|
||||
# Not a "full" URL given to fromBytes, no /
|
||||
# / is filled in
|
||||
url = urlpath.URLPath.fromBytes(b"http://google.com")
|
||||
self.assertEqual(url.scheme, b"http")
|
||||
self.assertEqual(url.netloc, b"google.com")
|
||||
self.assertEqual(url.path, b"/")
|
||||
self.assertEqual(url.fragment, b"")
|
||||
self.assertEqual(url.query, b"")
|
||||
self.assertEqual(str(url), "http://google.com/")
|
||||
|
||||
def test_nonASCIIBytes(self):
|
||||
"""
|
||||
L{URLPath.fromBytes} can interpret non-ASCII bytes as percent-encoded
|
||||
"""
|
||||
url = urlpath.URLPath.fromBytes(b"http://example.com/\xff\x00")
|
||||
self.assertEqual(str(url), "http://example.com/%FF%00")
|
||||
|
||||
|
||||
class StringURLPathTests(_BaseURLPathTests, unittest.TestCase):
|
||||
"""
|
||||
Tests for interacting with a L{URLPath} created with C{fromString} and a
|
||||
L{str} argument.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.path = urlpath.URLPath.fromString(
|
||||
"http://example.com/foo/bar?yes=no&no=yes#footer"
|
||||
)
|
||||
|
||||
def test_mustBeStr(self):
|
||||
"""
|
||||
C{URLPath.fromString} must take a L{str} or L{str} argument.
|
||||
"""
|
||||
with self.assertRaises(ValueError):
|
||||
urlpath.URLPath.fromString(None)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
urlpath.URLPath.fromString(b"someurl")
|
||||
|
||||
|
||||
class UnicodeURLPathTests(_BaseURLPathTests, unittest.TestCase):
|
||||
"""
|
||||
Tests for interacting with a L{URLPath} created with C{fromString} and a
|
||||
L{str} argument.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.path = urlpath.URLPath.fromString(
|
||||
"http://example.com/foo/bar?yes=no&no=yes#footer"
|
||||
)
|
||||
|
||||
def test_nonASCIICharacters(self):
|
||||
"""
|
||||
L{URLPath.fromString} can load non-ASCII characters.
|
||||
"""
|
||||
url = urlpath.URLPath.fromString("http://example.com/\xff\x00")
|
||||
self.assertEqual(str(url), "http://example.com/%C3%BF%00")
|
||||
1078
.venv/lib/python3.12/site-packages/twisted/python/test/test_util.py
Normal file
1078
.venv/lib/python3.12/site-packages/twisted/python/test/test_util.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,64 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.win32}.
|
||||
"""
|
||||
|
||||
from twisted.python import reflect, win32
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class CommandLineQuotingTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for L{cmdLineQuote}.
|
||||
"""
|
||||
|
||||
def test_argWithoutSpaces(self) -> None:
|
||||
"""
|
||||
Calling C{cmdLineQuote} with an argument with no spaces returns
|
||||
the argument unchanged.
|
||||
"""
|
||||
self.assertEqual(win32.cmdLineQuote("an_argument"), "an_argument")
|
||||
|
||||
def test_argWithSpaces(self) -> None:
|
||||
"""
|
||||
Calling C{cmdLineQuote} with an argument containing spaces returns
|
||||
the argument surrounded by quotes.
|
||||
"""
|
||||
self.assertEqual(win32.cmdLineQuote("An Argument"), '"An Argument"')
|
||||
|
||||
def test_emptyStringArg(self) -> None:
|
||||
"""
|
||||
Calling C{cmdLineQuote} with an empty string returns a quoted empty
|
||||
string.
|
||||
"""
|
||||
self.assertEqual(win32.cmdLineQuote(""), '""')
|
||||
|
||||
|
||||
class DeprecationTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for deprecated (Fake)WindowsError.
|
||||
"""
|
||||
|
||||
def test_deprecation_FakeWindowsError(self) -> None:
|
||||
"""Importing C{FakeWindowsError} triggers a L{DeprecationWarning}."""
|
||||
|
||||
self.assertWarns(
|
||||
DeprecationWarning,
|
||||
"twisted.python.win32.FakeWindowsError was deprecated in Twisted 21.2.0: "
|
||||
"Catch OSError and check presence of 'winerror' attribute.",
|
||||
reflect.__file__,
|
||||
lambda: reflect.namedAny("twisted.python.win32.FakeWindowsError"),
|
||||
)
|
||||
|
||||
def test_deprecation_WindowsError(self) -> None:
|
||||
"""Importing C{WindowsError} triggers a L{DeprecationWarning}."""
|
||||
|
||||
self.assertWarns(
|
||||
DeprecationWarning,
|
||||
"twisted.python.win32.WindowsError was deprecated in Twisted 21.2.0: "
|
||||
"Catch OSError and check presence of 'winerror' attribute.",
|
||||
reflect.__file__,
|
||||
lambda: reflect.namedAny("twisted.python.win32.WindowsError"),
|
||||
)
|
||||
@@ -0,0 +1,109 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Test cases covering L{twisted.python.zippath}.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import zipfile
|
||||
from typing import Union
|
||||
|
||||
from twisted.python.filepath import _coerceToFilesystemEncoding
|
||||
from twisted.python.zippath import ZipArchive, ZipPath
|
||||
from twisted.test.test_paths import AbstractFilePathTests
|
||||
|
||||
|
||||
def zipit(dirname: str | bytes, zfname: str | bytes) -> None:
|
||||
"""
|
||||
Create a zipfile on zfname, containing the contents of dirname'
|
||||
"""
|
||||
coercedDirname = _coerceToFilesystemEncoding("", dirname)
|
||||
coercedZfname = _coerceToFilesystemEncoding("", zfname)
|
||||
|
||||
with zipfile.ZipFile(coercedZfname, "w") as zf:
|
||||
for (
|
||||
root,
|
||||
ignored,
|
||||
files,
|
||||
) in os.walk(coercedDirname):
|
||||
for fname in files:
|
||||
fspath = os.path.join(root, fname)
|
||||
arcpath = os.path.join(root, fname)[len(dirname) + 1 :]
|
||||
zf.write(fspath, arcpath)
|
||||
|
||||
|
||||
class ZipFilePathTests(AbstractFilePathTests):
|
||||
"""
|
||||
Test various L{ZipPath} path manipulations as well as reprs for L{ZipPath}
|
||||
and L{ZipArchive}.
|
||||
"""
|
||||
|
||||
path: ZipArchive[bytes] # type:ignore[assignment]
|
||||
root: ZipArchive[bytes] # type:ignore[assignment]
|
||||
|
||||
def setUp(self) -> None:
|
||||
AbstractFilePathTests.setUp(self)
|
||||
zipit(self.cmn, self.cmn + b".zip")
|
||||
self.nativecmn = _coerceToFilesystemEncoding("", self.cmn)
|
||||
self.path = ZipArchive(self.cmn + b".zip")
|
||||
self.root = self.path
|
||||
self.all = [x.replace(self.cmn, self.cmn + b".zip") for x in self.all]
|
||||
|
||||
def test_sibling(self) -> None:
|
||||
"""
|
||||
L{ZipPath.sibling} returns a path at the same level.
|
||||
"""
|
||||
self.assertEqual(self.path.child("one").sibling("two"), self.path.child("two"))
|
||||
|
||||
def test_zipPathRepr(self) -> None:
|
||||
"""
|
||||
Make sure that invoking ZipPath's repr prints the correct class name
|
||||
and an absolute path to the zip file.
|
||||
"""
|
||||
child: Union[ZipPath[str, bytes], ZipPath[str, str]] = self.path.child("foo")
|
||||
pathRepr = "ZipPath({!r})".format(
|
||||
os.path.abspath(self.nativecmn + ".zip" + os.sep + "foo"),
|
||||
)
|
||||
|
||||
# Check for an absolute path
|
||||
self.assertEqual(repr(child), pathRepr)
|
||||
|
||||
# Create a path to the file rooted in the current working directory
|
||||
relativeCommon = self.nativecmn.replace(os.getcwd() + os.sep, "", 1) + ".zip"
|
||||
relpath = ZipArchive(relativeCommon)
|
||||
child = relpath.child("foo")
|
||||
|
||||
# Check using a path without the cwd prepended
|
||||
self.assertEqual(repr(child), pathRepr)
|
||||
|
||||
def test_zipPathReprParentDirSegment(self) -> None:
|
||||
"""
|
||||
The repr of a ZipPath with C{".."} in the internal part of its path
|
||||
includes the C{".."} rather than applying the usual parent directory
|
||||
meaning.
|
||||
"""
|
||||
child = self.path.child("foo").child("..").child("bar")
|
||||
pathRepr = "ZipPath(%r)" % (
|
||||
self.nativecmn + ".zip" + os.sep.join(["", "foo", "..", "bar"])
|
||||
)
|
||||
self.assertEqual(repr(child), pathRepr)
|
||||
|
||||
def test_zipArchiveRepr(self) -> None:
|
||||
"""
|
||||
Make sure that invoking ZipArchive's repr prints the correct class
|
||||
name and an absolute path to the zip file.
|
||||
"""
|
||||
path = ZipArchive(self.nativecmn + ".zip")
|
||||
pathRepr = "ZipArchive({!r})".format(os.path.abspath(self.nativecmn + ".zip"))
|
||||
|
||||
# Check for an absolute path
|
||||
self.assertEqual(repr(path), pathRepr)
|
||||
|
||||
# Create a path to the file rooted in the current working directory
|
||||
relativeCommon = self.nativecmn.replace(os.getcwd() + os.sep, "", 1) + ".zip"
|
||||
relpath = ZipArchive(relativeCommon)
|
||||
|
||||
# Check using a path without the cwd prepended
|
||||
self.assertEqual(repr(relpath), pathRepr)
|
||||
@@ -0,0 +1,331 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Tests for L{twisted.python.zipstream}
|
||||
"""
|
||||
|
||||
import random
|
||||
import struct
|
||||
import zipfile
|
||||
from hashlib import md5
|
||||
|
||||
from twisted.python import filepath, zipstream
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class FileEntryMixin:
|
||||
"""
|
||||
File entry classes should behave as file-like objects
|
||||
"""
|
||||
|
||||
def getFileEntry(self, contents):
|
||||
"""
|
||||
Return an appropriate zip file entry
|
||||
"""
|
||||
filename = self.mktemp()
|
||||
with zipfile.ZipFile(filename, "w", self.compression) as z:
|
||||
z.writestr("content", contents)
|
||||
z = zipstream.ChunkingZipFile(filename, "r")
|
||||
return z.readfile("content")
|
||||
|
||||
def test_isatty(self):
|
||||
"""
|
||||
zip files should not be ttys, so isatty() should be false
|
||||
"""
|
||||
with self.getFileEntry("") as fileEntry:
|
||||
self.assertFalse(fileEntry.isatty())
|
||||
|
||||
def test_closed(self):
|
||||
"""
|
||||
The C{closed} attribute should reflect whether C{close()} has been
|
||||
called.
|
||||
"""
|
||||
with self.getFileEntry("") as fileEntry:
|
||||
self.assertFalse(fileEntry.closed)
|
||||
self.assertTrue(fileEntry.closed)
|
||||
|
||||
def test_readline(self):
|
||||
"""
|
||||
C{readline()} should mirror L{file.readline} and return up to a single
|
||||
delimiter.
|
||||
"""
|
||||
with self.getFileEntry(b"hoho\nho") as fileEntry:
|
||||
self.assertEqual(fileEntry.readline(), b"hoho\n")
|
||||
self.assertEqual(fileEntry.readline(), b"ho")
|
||||
self.assertEqual(fileEntry.readline(), b"")
|
||||
|
||||
def test_next(self):
|
||||
"""
|
||||
Zip file entries should implement the iterator protocol as files do.
|
||||
"""
|
||||
with self.getFileEntry(b"ho\nhoho") as fileEntry:
|
||||
self.assertEqual(fileEntry.next(), b"ho\n")
|
||||
self.assertEqual(fileEntry.next(), b"hoho")
|
||||
self.assertRaises(StopIteration, fileEntry.next)
|
||||
|
||||
def test_readlines(self):
|
||||
"""
|
||||
C{readlines()} should return a list of all the lines.
|
||||
"""
|
||||
with self.getFileEntry(b"ho\nho\nho") as fileEntry:
|
||||
self.assertEqual(fileEntry.readlines(), [b"ho\n", b"ho\n", b"ho"])
|
||||
|
||||
def test_iteration(self):
|
||||
"""
|
||||
C{__iter__()} and C{xreadlines()} should return C{self}.
|
||||
"""
|
||||
with self.getFileEntry("") as fileEntry:
|
||||
self.assertIs(iter(fileEntry), fileEntry)
|
||||
self.assertIs(fileEntry.xreadlines(), fileEntry)
|
||||
|
||||
def test_readWhole(self):
|
||||
"""
|
||||
C{.read()} should read the entire file.
|
||||
"""
|
||||
contents = b"Hello, world!"
|
||||
with self.getFileEntry(contents) as entry:
|
||||
self.assertEqual(entry.read(), contents)
|
||||
|
||||
def test_readPartial(self):
|
||||
"""
|
||||
C{.read(num)} should read num bytes from the file.
|
||||
"""
|
||||
contents = "0123456789"
|
||||
with self.getFileEntry(contents) as entry:
|
||||
one = entry.read(4)
|
||||
two = entry.read(200)
|
||||
self.assertEqual(one, b"0123")
|
||||
self.assertEqual(two, b"456789")
|
||||
|
||||
def test_tell(self):
|
||||
"""
|
||||
C{.tell()} should return the number of bytes that have been read so
|
||||
far.
|
||||
"""
|
||||
contents = "x" * 100
|
||||
with self.getFileEntry(contents) as entry:
|
||||
entry.read(2)
|
||||
self.assertEqual(entry.tell(), 2)
|
||||
entry.read(4)
|
||||
self.assertEqual(entry.tell(), 6)
|
||||
|
||||
|
||||
class DeflatedZipFileEntryTests(FileEntryMixin, unittest.TestCase):
|
||||
"""
|
||||
DeflatedZipFileEntry should be file-like
|
||||
"""
|
||||
|
||||
compression = zipfile.ZIP_DEFLATED
|
||||
|
||||
|
||||
class ZipFileEntryTests(FileEntryMixin, unittest.TestCase):
|
||||
"""
|
||||
ZipFileEntry should be file-like
|
||||
"""
|
||||
|
||||
compression = zipfile.ZIP_STORED
|
||||
|
||||
|
||||
class ZipstreamTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for twisted.python.zipstream
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""
|
||||
Creates junk data that can be compressed and a test directory for any
|
||||
files that will be created
|
||||
"""
|
||||
self.testdir = filepath.FilePath(self.mktemp())
|
||||
self.testdir.makedirs()
|
||||
self.unzipdir = self.testdir.child("unzipped")
|
||||
self.unzipdir.makedirs()
|
||||
|
||||
def makeZipFile(self, contents, directory=""):
|
||||
"""
|
||||
Makes a zip file archive containing len(contents) files. Contents
|
||||
should be a list of strings, each string being the content of one file.
|
||||
"""
|
||||
zpfilename = self.testdir.child("zipfile.zip").path
|
||||
with zipfile.ZipFile(zpfilename, "w") as zpfile:
|
||||
for i, content in enumerate(contents):
|
||||
filename = str(i)
|
||||
if directory:
|
||||
filename = directory + "/" + filename
|
||||
zpfile.writestr(filename, content)
|
||||
return zpfilename
|
||||
|
||||
def test_invalidMode(self):
|
||||
"""
|
||||
A ChunkingZipFile opened in write-mode should not allow .readfile(),
|
||||
and raise a RuntimeError instead.
|
||||
"""
|
||||
with zipstream.ChunkingZipFile(self.mktemp(), "w") as czf:
|
||||
self.assertRaises(RuntimeError, czf.readfile, "something")
|
||||
|
||||
def test_closedArchive(self):
|
||||
"""
|
||||
A closed ChunkingZipFile should raise a L{RuntimeError} when
|
||||
.readfile() is invoked.
|
||||
"""
|
||||
czf = zipstream.ChunkingZipFile(self.makeZipFile(["something"]), "r")
|
||||
czf.close()
|
||||
self.assertRaises(RuntimeError, czf.readfile, "something")
|
||||
|
||||
def test_invalidHeader(self):
|
||||
"""
|
||||
A zipfile entry with the wrong magic number should raise BadZipFile for
|
||||
readfile(), but that should not affect other files in the archive.
|
||||
"""
|
||||
fn = self.makeZipFile(["test contents", "more contents"])
|
||||
with zipfile.ZipFile(fn, "r") as zf:
|
||||
zeroOffset = zf.getinfo("0").header_offset
|
||||
# Zero out just the one header.
|
||||
with open(fn, "r+b") as scribble:
|
||||
scribble.seek(zeroOffset, 0)
|
||||
scribble.write(b"0" * 4)
|
||||
with zipstream.ChunkingZipFile(fn) as czf:
|
||||
self.assertRaises(zipfile.BadZipFile, czf.readfile, "0")
|
||||
with czf.readfile("1") as zfe:
|
||||
self.assertEqual(zfe.read(), b"more contents")
|
||||
|
||||
def test_filenameMismatch(self):
|
||||
"""
|
||||
A zipfile entry with a different filename than is found in the central
|
||||
directory should raise BadZipFile.
|
||||
"""
|
||||
fn = self.makeZipFile([b"test contents", b"more contents"])
|
||||
with zipfile.ZipFile(fn, "r") as zf:
|
||||
info = zf.getinfo("0")
|
||||
info.filename = "not zero"
|
||||
with open(fn, "r+b") as scribble:
|
||||
scribble.seek(info.header_offset, 0)
|
||||
scribble.write(info.FileHeader())
|
||||
|
||||
with zipstream.ChunkingZipFile(fn) as czf:
|
||||
self.assertRaises(zipfile.BadZipFile, czf.readfile, "0")
|
||||
with czf.readfile("1") as zfe:
|
||||
self.assertEqual(zfe.read(), b"more contents")
|
||||
|
||||
def test_unsupportedCompression(self):
|
||||
"""
|
||||
A zipfile which describes an unsupported compression mechanism should
|
||||
raise BadZipFile.
|
||||
"""
|
||||
fn = self.mktemp()
|
||||
with zipfile.ZipFile(fn, "w") as zf:
|
||||
zi = zipfile.ZipInfo("0")
|
||||
zf.writestr(zi, "some data")
|
||||
# Mangle its compression type in the central directory; can't do
|
||||
# this before the writestr call or zipfile will (correctly) tell us
|
||||
# not to pass bad compression types :)
|
||||
zi.compress_type = 1234
|
||||
|
||||
with zipstream.ChunkingZipFile(fn) as czf:
|
||||
self.assertRaises(zipfile.BadZipFile, czf.readfile, "0")
|
||||
|
||||
def test_extraData(self):
|
||||
"""
|
||||
readfile() should skip over 'extra' data present in the zip metadata.
|
||||
"""
|
||||
fn = self.mktemp()
|
||||
with zipfile.ZipFile(fn, "w") as zf:
|
||||
zi = zipfile.ZipInfo("0")
|
||||
extra_data = b"hello, extra"
|
||||
zi.extra = struct.pack("<hh", 42, len(extra_data)) + extra_data
|
||||
zf.writestr(zi, b"the real data")
|
||||
with zipstream.ChunkingZipFile(fn) as czf, czf.readfile("0") as zfe:
|
||||
self.assertEqual(zfe.read(), b"the real data")
|
||||
|
||||
def test_unzipIterChunky(self):
|
||||
"""
|
||||
L{twisted.python.zipstream.unzipIterChunky} returns an iterator which
|
||||
must be exhausted to completely unzip the input archive.
|
||||
"""
|
||||
numfiles = 10
|
||||
contents = ["This is test file %d!" % i for i in range(numfiles)]
|
||||
contents = [i.encode("ascii") for i in contents]
|
||||
zpfilename = self.makeZipFile(contents)
|
||||
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
|
||||
self.assertEqual(set(self.unzipdir.listdir()), set(map(str, range(numfiles))))
|
||||
|
||||
for child in self.unzipdir.children():
|
||||
num = int(child.basename())
|
||||
self.assertEqual(child.getContent(), contents[num])
|
||||
|
||||
def test_unzipIterChunkyDirectory(self):
|
||||
"""
|
||||
The path to which a file is extracted by L{zipstream.unzipIterChunky}
|
||||
is determined by joining the C{directory} argument to C{unzip} with the
|
||||
path within the archive of the file being extracted.
|
||||
"""
|
||||
numfiles = 10
|
||||
contents = ["This is test file %d!" % i for i in range(numfiles)]
|
||||
contents = [i.encode("ascii") for i in contents]
|
||||
zpfilename = self.makeZipFile(contents, "foo")
|
||||
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
|
||||
fileContents = {str(num).encode("ascii") for num in range(numfiles)}
|
||||
self.assertEqual(set(self.unzipdir.child(b"foo").listdir()), fileContents)
|
||||
|
||||
for child in self.unzipdir.child(b"foo").children():
|
||||
num = int(child.basename())
|
||||
self.assertEqual(child.getContent(), contents[num])
|
||||
|
||||
# XXX these tests are kind of gross and old, but I think unzipIterChunky is
|
||||
# kind of a gross function anyway. We should really write an abstract
|
||||
# copyTo/moveTo that operates on FilePath and make sure ZipPath can support
|
||||
# it, then just deprecate / remove this stuff.
|
||||
def _unzipIterChunkyTest(self, compression, chunksize, lower, upper):
|
||||
"""
|
||||
unzipIterChunky should unzip the given number of bytes per iteration.
|
||||
"""
|
||||
junk = b""
|
||||
for n in range(1000):
|
||||
num = round(random.random(), 12)
|
||||
numEncoded = str(num).encode("ascii")
|
||||
junk += b" " + numEncoded
|
||||
|
||||
junkmd5 = md5(junk).hexdigest()
|
||||
|
||||
tempdir = filepath.FilePath(self.mktemp())
|
||||
tempdir.makedirs()
|
||||
zfpath = tempdir.child("bigfile.zip").path
|
||||
self._makebigfile(zfpath, compression, junk)
|
||||
uziter = zipstream.unzipIterChunky(zfpath, tempdir.path, chunksize=chunksize)
|
||||
r = next(uziter)
|
||||
# test that the number of chunks is in the right ballpark;
|
||||
# this could theoretically be any number but statistically it
|
||||
# should always be in this range
|
||||
approx = lower < r < upper
|
||||
self.assertTrue(approx)
|
||||
for r in uziter:
|
||||
pass
|
||||
self.assertEqual(r, 0)
|
||||
with tempdir.child("zipstreamjunk").open() as f:
|
||||
newmd5 = md5(f.read()).hexdigest()
|
||||
self.assertEqual(newmd5, junkmd5)
|
||||
|
||||
def test_unzipIterChunkyStored(self):
|
||||
"""
|
||||
unzipIterChunky should unzip the given number of bytes per iteration on
|
||||
a stored archive.
|
||||
"""
|
||||
self._unzipIterChunkyTest(zipfile.ZIP_STORED, 500, 35, 45)
|
||||
|
||||
def test_chunkyDeflated(self):
|
||||
"""
|
||||
unzipIterChunky should unzip the given number of bytes per iteration on
|
||||
a deflated archive.
|
||||
"""
|
||||
self._unzipIterChunkyTest(zipfile.ZIP_DEFLATED, 972, 23, 27)
|
||||
|
||||
def _makebigfile(self, filename, compression, junk):
|
||||
"""
|
||||
Create a zip file with the given file name and compression scheme.
|
||||
"""
|
||||
with zipfile.ZipFile(filename, "w", compression) as zf:
|
||||
for i in range(10):
|
||||
fn = "zipstream%d" % i
|
||||
zf.writestr(fn, "")
|
||||
zf.writestr("zipstreamjunk", junk)
|
||||
205
.venv/lib/python3.12/site-packages/twisted/python/text.py
Normal file
205
.venv/lib/python3.12/site-packages/twisted/python/text.py
Normal file
@@ -0,0 +1,205 @@
|
||||
# -*- test-case-name: twisted.test.test_text -*-
|
||||
#
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Miscellany of text-munging functions.
|
||||
"""
|
||||
|
||||
|
||||
def stringyString(object, indentation=""):
|
||||
"""
|
||||
Expansive string formatting for sequence types.
|
||||
|
||||
C{list.__str__} and C{dict.__str__} use C{repr()} to display their
|
||||
elements. This function also turns these sequence types
|
||||
into strings, but uses C{str()} on their elements instead.
|
||||
|
||||
Sequence elements are also displayed on separate lines, and nested
|
||||
sequences have nested indentation.
|
||||
"""
|
||||
braces = ""
|
||||
sl = []
|
||||
|
||||
if type(object) is dict:
|
||||
braces = "{}"
|
||||
for key, value in object.items():
|
||||
value = stringyString(value, indentation + " ")
|
||||
if isMultiline(value):
|
||||
if endsInNewline(value):
|
||||
value = value[: -len("\n")]
|
||||
sl.append(f"{indentation} {key}:\n{value}")
|
||||
else:
|
||||
# Oops. Will have to move that indentation.
|
||||
sl.append(f"{indentation} {key}: {value[len(indentation) + 3 :]}")
|
||||
|
||||
elif type(object) is tuple or type(object) is list:
|
||||
if type(object) is tuple:
|
||||
braces = "()"
|
||||
else:
|
||||
braces = "[]"
|
||||
|
||||
for element in object:
|
||||
element = stringyString(element, indentation + " ")
|
||||
sl.append(element.rstrip() + ",")
|
||||
else:
|
||||
sl[:] = map(lambda s, i=indentation: i + s, str(object).split("\n"))
|
||||
|
||||
if not sl:
|
||||
sl.append(indentation)
|
||||
|
||||
if braces:
|
||||
sl[0] = indentation + braces[0] + sl[0][len(indentation) + 1 :]
|
||||
sl[-1] = sl[-1] + braces[-1]
|
||||
|
||||
s = "\n".join(sl)
|
||||
|
||||
if isMultiline(s) and not endsInNewline(s):
|
||||
s = s + "\n"
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def isMultiline(s):
|
||||
"""
|
||||
Returns C{True} if this string has a newline in it.
|
||||
"""
|
||||
return s.find("\n") != -1
|
||||
|
||||
|
||||
def endsInNewline(s):
|
||||
"""
|
||||
Returns C{True} if this string ends in a newline.
|
||||
"""
|
||||
return s[-len("\n") :] == "\n"
|
||||
|
||||
|
||||
def greedyWrap(inString, width=80):
|
||||
"""
|
||||
Given a string and a column width, return a list of lines.
|
||||
|
||||
Caveat: I'm use a stupid greedy word-wrapping
|
||||
algorythm. I won't put two spaces at the end
|
||||
of a sentence. I don't do full justification.
|
||||
And no, I've never even *heard* of hypenation.
|
||||
"""
|
||||
|
||||
outLines = []
|
||||
|
||||
# eww, evil hacks to allow paragraphs delimited by two \ns :(
|
||||
if inString.find("\n\n") >= 0:
|
||||
paragraphs = inString.split("\n\n")
|
||||
for para in paragraphs:
|
||||
outLines.extend(greedyWrap(para, width) + [""])
|
||||
return outLines
|
||||
inWords = inString.split()
|
||||
|
||||
column = 0
|
||||
ptr_line = 0
|
||||
while inWords:
|
||||
column = column + len(inWords[ptr_line])
|
||||
ptr_line = ptr_line + 1
|
||||
|
||||
if column > width:
|
||||
if ptr_line == 1:
|
||||
# This single word is too long, it will be the whole line.
|
||||
pass
|
||||
else:
|
||||
# We've gone too far, stop the line one word back.
|
||||
ptr_line = ptr_line - 1
|
||||
(l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:])
|
||||
outLines.append(" ".join(l))
|
||||
|
||||
ptr_line = 0
|
||||
column = 0
|
||||
elif not (len(inWords) > ptr_line):
|
||||
# Clean up the last bit.
|
||||
outLines.append(" ".join(inWords))
|
||||
del inWords[:]
|
||||
else:
|
||||
# Space
|
||||
column = column + 1
|
||||
# next word
|
||||
|
||||
return outLines
|
||||
|
||||
|
||||
wordWrap = greedyWrap
|
||||
|
||||
|
||||
def removeLeadingBlanks(lines):
|
||||
ret = []
|
||||
for line in lines:
|
||||
if ret or line.strip():
|
||||
ret.append(line)
|
||||
return ret
|
||||
|
||||
|
||||
def removeLeadingTrailingBlanks(s):
|
||||
lines = removeLeadingBlanks(s.split("\n"))
|
||||
lines.reverse()
|
||||
lines = removeLeadingBlanks(lines)
|
||||
lines.reverse()
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def splitQuoted(s):
|
||||
"""
|
||||
Like a string split, but don't break substrings inside quotes.
|
||||
|
||||
>>> splitQuoted('the "hairy monkey" likes pie')
|
||||
['the', 'hairy monkey', 'likes', 'pie']
|
||||
|
||||
Another one of those "someone must have a better solution for
|
||||
this" things. This implementation is a VERY DUMB hack done too
|
||||
quickly.
|
||||
"""
|
||||
out = []
|
||||
quot = None
|
||||
phrase = None
|
||||
for word in s.split():
|
||||
if phrase is None:
|
||||
if word and (word[0] in ('"', "'")):
|
||||
quot = word[0]
|
||||
word = word[1:]
|
||||
phrase = []
|
||||
|
||||
if phrase is None:
|
||||
out.append(word)
|
||||
else:
|
||||
if word and (word[-1] == quot):
|
||||
word = word[:-1]
|
||||
phrase.append(word)
|
||||
out.append(" ".join(phrase))
|
||||
phrase = None
|
||||
else:
|
||||
phrase.append(word)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def strFile(p, f, caseSensitive=True):
|
||||
"""
|
||||
Find whether string C{p} occurs in a read()able object C{f}.
|
||||
|
||||
@rtype: C{bool}
|
||||
"""
|
||||
buf = type(p)()
|
||||
buf_len = max(len(p), 2**2**2**2)
|
||||
if not caseSensitive:
|
||||
p = p.lower()
|
||||
while 1:
|
||||
r = f.read(buf_len - len(p))
|
||||
if not caseSensitive:
|
||||
r = r.lower()
|
||||
bytes_read = len(r)
|
||||
if bytes_read == 0:
|
||||
return False
|
||||
l = len(buf) + bytes_read - buf_len
|
||||
if l <= 0:
|
||||
buf = buf + r
|
||||
else:
|
||||
buf = buf[l:] + r
|
||||
if buf.find(p) != -1:
|
||||
return True
|
||||
137
.venv/lib/python3.12/site-packages/twisted/python/threadable.py
Normal file
137
.venv/lib/python3.12/site-packages/twisted/python/threadable.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# -*- test-case-name: twisted.python.test_threadable -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
A module to provide some very basic threading primitives, such as
|
||||
synchronization.
|
||||
"""
|
||||
|
||||
|
||||
from functools import wraps
|
||||
|
||||
|
||||
class DummyLock:
|
||||
"""
|
||||
Hack to allow locks to be unpickled on an unthreaded system.
|
||||
"""
|
||||
|
||||
def __reduce__(self):
|
||||
return (unpickle_lock, ())
|
||||
|
||||
|
||||
def unpickle_lock():
|
||||
if threadingmodule is not None:
|
||||
return XLock()
|
||||
else:
|
||||
return DummyLock()
|
||||
|
||||
|
||||
unpickle_lock.__safe_for_unpickling__ = True # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def _synchPre(self):
|
||||
if "_threadable_lock" not in self.__dict__:
|
||||
_synchLockCreator.acquire()
|
||||
if "_threadable_lock" not in self.__dict__:
|
||||
self.__dict__["_threadable_lock"] = XLock()
|
||||
_synchLockCreator.release()
|
||||
self._threadable_lock.acquire()
|
||||
|
||||
|
||||
def _synchPost(self):
|
||||
self._threadable_lock.release()
|
||||
|
||||
|
||||
def _sync(klass, function):
|
||||
@wraps(function)
|
||||
def sync(self, *args, **kwargs):
|
||||
_synchPre(self)
|
||||
try:
|
||||
return function(self, *args, **kwargs)
|
||||
finally:
|
||||
_synchPost(self)
|
||||
|
||||
return sync
|
||||
|
||||
|
||||
def synchronize(*klasses):
|
||||
"""
|
||||
Make all methods listed in each class' synchronized attribute synchronized.
|
||||
|
||||
The synchronized attribute should be a list of strings, consisting of the
|
||||
names of methods that must be synchronized. If we are running in threaded
|
||||
mode these methods will be wrapped with a lock.
|
||||
"""
|
||||
if threadingmodule is not None:
|
||||
for klass in klasses:
|
||||
for methodName in klass.synchronized:
|
||||
sync = _sync(klass, klass.__dict__[methodName])
|
||||
setattr(klass, methodName, sync)
|
||||
|
||||
|
||||
def init(with_threads=1):
|
||||
"""Initialize threading.
|
||||
|
||||
Don't bother calling this. If it needs to happen, it will happen.
|
||||
"""
|
||||
global threaded, _synchLockCreator, XLock
|
||||
|
||||
if with_threads:
|
||||
if not threaded:
|
||||
if threadingmodule is not None:
|
||||
threaded = True
|
||||
|
||||
class XLock(threadingmodule._RLock):
|
||||
def __reduce__(self):
|
||||
return (unpickle_lock, ())
|
||||
|
||||
_synchLockCreator = XLock()
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Cannot initialize threading, platform lacks thread support"
|
||||
)
|
||||
else:
|
||||
if threaded:
|
||||
raise RuntimeError("Cannot uninitialize threads")
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
_dummyID = object()
|
||||
|
||||
|
||||
def getThreadID():
|
||||
if threadingmodule is None:
|
||||
return _dummyID
|
||||
return threadingmodule.current_thread().ident
|
||||
|
||||
|
||||
def isInIOThread():
|
||||
"""Are we in the thread responsible for I/O requests (the event loop)?"""
|
||||
return ioThread == getThreadID()
|
||||
|
||||
|
||||
def registerAsIOThread():
|
||||
"""Mark the current thread as responsible for I/O requests."""
|
||||
global ioThread
|
||||
ioThread = getThreadID()
|
||||
|
||||
|
||||
ioThread = None
|
||||
threaded = False
|
||||
# Define these globals which might be overwritten in init().
|
||||
_synchLockCreator = None
|
||||
XLock = None
|
||||
|
||||
|
||||
try:
|
||||
import threading as _threadingmodule
|
||||
except ImportError:
|
||||
threadingmodule = None
|
||||
else:
|
||||
threadingmodule = _threadingmodule
|
||||
init(True)
|
||||
|
||||
|
||||
__all__ = ["isInIOThread", "registerAsIOThread", "getThreadID", "XLock"]
|
||||
340
.venv/lib/python3.12/site-packages/twisted/python/threadpool.py
Normal file
340
.venv/lib/python3.12/site-packages/twisted/python/threadpool.py
Normal file
@@ -0,0 +1,340 @@
|
||||
# -*- test-case-name: twisted.test.test_threadpool -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
twisted.python.threadpool: a pool of threads to which we dispatch tasks.
|
||||
|
||||
In most cases you can just use C{reactor.callInThread} and friends
|
||||
instead of creating a thread pool directly.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from threading import Thread, current_thread
|
||||
from typing import Any, Callable, List, Optional, TypeVar
|
||||
|
||||
from typing_extensions import ParamSpec, Protocol, TypedDict
|
||||
|
||||
from twisted._threads import pool as _pool
|
||||
from twisted.python import context, log
|
||||
from twisted.python.deprecate import deprecated
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.python.versions import Version
|
||||
|
||||
_P = ParamSpec("_P")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
|
||||
class _SupportsQsize(Protocol):
|
||||
def qsize(self) -> int:
|
||||
...
|
||||
|
||||
|
||||
class _State(TypedDict):
|
||||
min: int
|
||||
max: int
|
||||
|
||||
|
||||
WorkerStop = object()
|
||||
|
||||
|
||||
class ThreadPool:
|
||||
"""
|
||||
This class (hopefully) generalizes the functionality of a pool of threads
|
||||
to which work can be dispatched.
|
||||
|
||||
L{callInThread} and L{stop} should only be called from a single thread.
|
||||
|
||||
@ivar started: Whether or not the thread pool is currently running.
|
||||
@type started: L{bool}
|
||||
|
||||
@ivar threads: List of workers currently running in this thread pool.
|
||||
@type threads: L{list}
|
||||
|
||||
@ivar _pool: A hook for testing.
|
||||
@type _pool: callable compatible with L{_pool}
|
||||
"""
|
||||
|
||||
min = 5
|
||||
max = 20
|
||||
joined = False
|
||||
started = False
|
||||
name = None
|
||||
|
||||
threadFactory = Thread
|
||||
currentThread = staticmethod(
|
||||
deprecated(
|
||||
version=Version("Twisted", 22, 1, 0),
|
||||
replacement="threading.current_thread",
|
||||
)(current_thread)
|
||||
)
|
||||
_pool = staticmethod(_pool)
|
||||
|
||||
def __init__(
|
||||
self, minthreads: int = 5, maxthreads: int = 20, name: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Create a new threadpool.
|
||||
|
||||
@param minthreads: minimum number of threads in the pool
|
||||
@type minthreads: L{int}
|
||||
|
||||
@param maxthreads: maximum number of threads in the pool
|
||||
@type maxthreads: L{int}
|
||||
|
||||
@param name: The name to give this threadpool; visible in log messages.
|
||||
@type name: native L{str}
|
||||
"""
|
||||
assert minthreads >= 0, "minimum is negative"
|
||||
assert minthreads <= maxthreads, "minimum is greater than maximum"
|
||||
self.min = minthreads
|
||||
self.max = maxthreads
|
||||
self.name = name
|
||||
self.threads: List[Thread] = []
|
||||
|
||||
def trackingThreadFactory(*a: Any, **kw: Any) -> Thread:
|
||||
thread = self.threadFactory( # type: ignore[misc]
|
||||
*a, name=self._generateName(), **kw
|
||||
)
|
||||
self.threads.append(thread)
|
||||
return thread
|
||||
|
||||
def currentLimit() -> int:
|
||||
if not self.started:
|
||||
return 0
|
||||
return self.max
|
||||
|
||||
self._team = self._pool(currentLimit, trackingThreadFactory)
|
||||
|
||||
@property
|
||||
def workers(self) -> int:
|
||||
"""
|
||||
For legacy compatibility purposes, return a total number of workers.
|
||||
|
||||
@return: the current number of workers, both idle and busy (but not
|
||||
those that have been quit by L{ThreadPool.adjustPoolsize})
|
||||
@rtype: L{int}
|
||||
"""
|
||||
stats = self._team.statistics()
|
||||
return stats.idleWorkerCount + stats.busyWorkerCount
|
||||
|
||||
@property
|
||||
def working(self) -> list[None]:
|
||||
"""
|
||||
For legacy compatibility purposes, return the number of busy workers as
|
||||
expressed by a list the length of that number.
|
||||
|
||||
@return: the number of workers currently processing a work item.
|
||||
@rtype: L{list} of L{None}
|
||||
"""
|
||||
return [None] * self._team.statistics().busyWorkerCount
|
||||
|
||||
@property
|
||||
def waiters(self) -> list[None]:
|
||||
"""
|
||||
For legacy compatibility purposes, return the number of idle workers as
|
||||
expressed by a list the length of that number.
|
||||
|
||||
@return: the number of workers currently alive (with an allocated
|
||||
thread) but waiting for new work.
|
||||
@rtype: L{list} of L{None}
|
||||
"""
|
||||
return [None] * self._team.statistics().idleWorkerCount
|
||||
|
||||
@property
|
||||
def _queue(self) -> _SupportsQsize:
|
||||
"""
|
||||
For legacy compatibility purposes, return an object with a C{qsize}
|
||||
method that indicates the amount of work not yet allocated to a worker.
|
||||
|
||||
@return: an object with a C{qsize} method.
|
||||
"""
|
||||
|
||||
class NotAQueue:
|
||||
def qsize(q) -> int:
|
||||
"""
|
||||
Pretend to be a Python threading Queue and return the
|
||||
number of as-yet-unconsumed tasks.
|
||||
|
||||
@return: the amount of backlogged work not yet dispatched to a
|
||||
worker.
|
||||
@rtype: L{int}
|
||||
"""
|
||||
return self._team.statistics().backloggedWorkCount
|
||||
|
||||
return NotAQueue()
|
||||
|
||||
q = _queue # Yes, twistedchecker, I want a single-letter
|
||||
# attribute name.
|
||||
|
||||
def start(self) -> None:
|
||||
"""
|
||||
Start the threadpool.
|
||||
"""
|
||||
self.joined = False
|
||||
self.started = True
|
||||
# Start some threads.
|
||||
self.adjustPoolsize()
|
||||
backlog = self._team.statistics().backloggedWorkCount
|
||||
if backlog:
|
||||
self._team.grow(backlog)
|
||||
|
||||
def startAWorker(self) -> None:
|
||||
"""
|
||||
Increase the number of available workers for the thread pool by 1, up
|
||||
to the maximum allowed by L{ThreadPool.max}.
|
||||
"""
|
||||
self._team.grow(1)
|
||||
|
||||
def _generateName(self) -> str:
|
||||
"""
|
||||
Generate a name for a new pool thread.
|
||||
|
||||
@return: A distinctive name for the thread.
|
||||
@rtype: native L{str}
|
||||
"""
|
||||
return f"PoolThread-{self.name or id(self)}-{self.workers}"
|
||||
|
||||
def stopAWorker(self) -> None:
|
||||
"""
|
||||
Decrease the number of available workers by 1, by quitting one as soon
|
||||
as it's idle.
|
||||
"""
|
||||
self._team.shrink(1)
|
||||
|
||||
def __setstate__(self, state: _State) -> None:
|
||||
setattr(self, "__dict__", state)
|
||||
ThreadPool.__init__(self, self.min, self.max)
|
||||
|
||||
def __getstate__(self) -> _State:
|
||||
return _State(min=self.min, max=self.max)
|
||||
|
||||
def callInThread(
|
||||
self, func: Callable[_P, object], *args: _P.args, **kw: _P.kwargs
|
||||
) -> None:
|
||||
"""
|
||||
Call a callable object in a separate thread.
|
||||
|
||||
@param func: callable object to be called in separate thread
|
||||
|
||||
@param args: positional arguments to be passed to C{func}
|
||||
|
||||
@param kw: keyword args to be passed to C{func}
|
||||
"""
|
||||
self.callInThreadWithCallback(None, func, *args, **kw)
|
||||
|
||||
def callInThreadWithCallback(
|
||||
self,
|
||||
onResult: Optional[Callable[[bool, _R], object]],
|
||||
func: Callable[_P, _R],
|
||||
*args: _P.args,
|
||||
**kw: _P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Call a callable object in a separate thread and call C{onResult} with
|
||||
the return value, or a L{twisted.python.failure.Failure} if the
|
||||
callable raises an exception.
|
||||
|
||||
The callable is allowed to block, but the C{onResult} function must not
|
||||
block and should perform as little work as possible.
|
||||
|
||||
A typical action for C{onResult} for a threadpool used with a Twisted
|
||||
reactor would be to schedule a L{twisted.internet.defer.Deferred} to
|
||||
fire in the main reactor thread using C{.callFromThread}. Note that
|
||||
C{onResult} is called inside the separate thread, not inside the
|
||||
reactor thread.
|
||||
|
||||
@param onResult: a callable with the signature C{(success, result)}.
|
||||
If the callable returns normally, C{onResult} is called with
|
||||
C{(True, result)} where C{result} is the return value of the
|
||||
callable. If the callable throws an exception, C{onResult} is
|
||||
called with C{(False, failure)}.
|
||||
|
||||
Optionally, C{onResult} may be L{None}, in which case it is not
|
||||
called at all.
|
||||
|
||||
@param func: callable object to be called in separate thread
|
||||
|
||||
@param args: positional arguments to be passed to C{func}
|
||||
|
||||
@param kw: keyword arguments to be passed to C{func}
|
||||
"""
|
||||
if self.joined:
|
||||
return
|
||||
ctx = context.theContextTracker.currentContext().contexts[-1]
|
||||
|
||||
def inContext() -> None:
|
||||
try:
|
||||
result = inContext.theWork() # type: ignore[attr-defined]
|
||||
ok = True
|
||||
except BaseException:
|
||||
result = Failure()
|
||||
ok = False
|
||||
|
||||
inContext.theWork = None # type: ignore[attr-defined]
|
||||
if inContext.onResult is not None: # type: ignore[attr-defined]
|
||||
inContext.onResult(ok, result) # type: ignore[attr-defined]
|
||||
inContext.onResult = None # type: ignore[attr-defined]
|
||||
elif not ok:
|
||||
log.err(result)
|
||||
|
||||
# Avoid closing over func, ctx, args, kw so that we can carefully
|
||||
# manage their lifecycle. See
|
||||
# test_[AWS-SECRET-REMOVED]allback.
|
||||
inContext.theWork = lambda: context.call( # type: ignore[attr-defined]
|
||||
ctx, func, *args, **kw
|
||||
)
|
||||
inContext.onResult = onResult # type: ignore[attr-defined]
|
||||
|
||||
self._team.do(inContext)
|
||||
|
||||
def stop(self) -> None:
|
||||
"""
|
||||
Shutdown the threads in the threadpool.
|
||||
"""
|
||||
self.joined = True
|
||||
self.started = False
|
||||
self._team.quit()
|
||||
for thread in self.threads:
|
||||
thread.join()
|
||||
|
||||
def adjustPoolsize(
|
||||
self, minthreads: Optional[int] = None, maxthreads: Optional[int] = None
|
||||
) -> None:
|
||||
"""
|
||||
Adjust the number of available threads by setting C{min} and C{max} to
|
||||
new values.
|
||||
|
||||
@param minthreads: The new value for L{ThreadPool.min}.
|
||||
|
||||
@param maxthreads: The new value for L{ThreadPool.max}.
|
||||
"""
|
||||
if minthreads is None:
|
||||
minthreads = self.min
|
||||
if maxthreads is None:
|
||||
maxthreads = self.max
|
||||
|
||||
assert minthreads >= 0, "minimum is negative"
|
||||
assert minthreads <= maxthreads, "minimum is greater than maximum"
|
||||
|
||||
self.min = minthreads
|
||||
self.max = maxthreads
|
||||
if not self.started:
|
||||
return
|
||||
|
||||
# Kill of some threads if we have too many.
|
||||
if self.workers > self.max:
|
||||
self._team.shrink(self.workers - self.max)
|
||||
# Start some threads if we have too few.
|
||||
if self.workers < self.min:
|
||||
self._team.grow(self.min - self.workers)
|
||||
|
||||
def dumpStats(self) -> None:
|
||||
"""
|
||||
Dump some plain-text informational messages to the log about the state
|
||||
of this L{ThreadPool}.
|
||||
"""
|
||||
log.msg(f"waiters: {self.waiters}")
|
||||
log.msg(f"workers: {self.working}")
|
||||
log.msg(f"total: {self.threads}")
|
||||
@@ -0,0 +1,33 @@
|
||||
#compdef twist twistd trial conch cftp ckeygen pyhtmlizer tkconch
|
||||
#
|
||||
# This is the ZSH completion file for Twisted commands. It calls the current
|
||||
# command-line with the special "--_shell-completion" option which is handled
|
||||
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
|
||||
# handle the completions for this particular command-line.
|
||||
#
|
||||
# 3rd parties that wish to provide zsh completion for commands that
|
||||
# use t.p.usage may copy this file and change the first line to reference
|
||||
# the name(s) of their command(s).
|
||||
#
|
||||
# This file is included in the official Zsh distribution as
|
||||
# Completion/Unix/Command/_twisted
|
||||
|
||||
# redirect stderr to /dev/null otherwise deprecation warnings may get puked all
|
||||
# over the user's terminal if completing options for a deprecated command.
|
||||
# Redirect stderr to a file to debug errors.
|
||||
local cmd output
|
||||
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
|
||||
output=$("$cmd[@]" 2>/dev/null)
|
||||
|
||||
if [[ $output == "#compdef "* ]]; then
|
||||
# Looks like we got a valid completion function - so eval it to produce
|
||||
# the completion matches.
|
||||
eval $output
|
||||
else
|
||||
echo "\nCompletion error running command:" ${(qqq)cmd}
|
||||
echo -n "If output below is unhelpful you may need to edit this file and "
|
||||
echo "redirect stderr to a file."
|
||||
echo "Expected completion function, but instead got:"
|
||||
echo $output
|
||||
return 1
|
||||
fi
|
||||
15
.venv/lib/python3.12/site-packages/twisted/python/url.py
Normal file
15
.venv/lib/python3.12/site-packages/twisted/python/url.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# -*- test-case-name: twisted.python.test.test_url -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
URL parsing, construction and rendering.
|
||||
|
||||
@see: L{URL}
|
||||
"""
|
||||
|
||||
from hyperlink import URL
|
||||
|
||||
__all__ = [
|
||||
"URL",
|
||||
]
|
||||
278
.venv/lib/python3.12/site-packages/twisted/python/urlpath.py
Normal file
278
.venv/lib/python3.12/site-packages/twisted/python/urlpath.py
Normal file
@@ -0,0 +1,278 @@
|
||||
# -*- test-case-name: twisted.python.test.test_urlpath -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
L{URLPath}, a representation of a URL.
|
||||
"""
|
||||
|
||||
from typing import cast
|
||||
from urllib.parse import quote as urlquote, unquote as urlunquote, urlunsplit
|
||||
|
||||
from hyperlink import URL as _URL
|
||||
|
||||
_allascii = b"".join([chr(x).encode("ascii") for x in range(1, 128)])
|
||||
|
||||
|
||||
def _rereconstituter(name):
|
||||
"""
|
||||
Attriute declaration to preserve mutability on L{URLPath}.
|
||||
|
||||
@param name: a public attribute name
|
||||
@type name: native L{str}
|
||||
|
||||
@return: a descriptor which retrieves the private version of the attribute
|
||||
on get and calls rerealize on set.
|
||||
"""
|
||||
privateName = "_" + name
|
||||
return property(
|
||||
lambda self: getattr(self, privateName),
|
||||
lambda self, value: (
|
||||
setattr(
|
||||
self,
|
||||
privateName,
|
||||
value if isinstance(value, bytes) else value.encode("charmap"),
|
||||
)
|
||||
or self._reconstitute()
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class URLPath:
|
||||
"""
|
||||
A representation of a URL.
|
||||
|
||||
@ivar scheme: The scheme of the URL (e.g. 'http').
|
||||
@type scheme: L{bytes}
|
||||
|
||||
@ivar netloc: The network location ("host").
|
||||
@type netloc: L{bytes}
|
||||
|
||||
@ivar path: The path on the network location.
|
||||
@type path: L{bytes}
|
||||
|
||||
@ivar query: The query argument (the portion after ? in the URL).
|
||||
@type query: L{bytes}
|
||||
|
||||
@ivar fragment: The page fragment (the portion after # in the URL).
|
||||
@type fragment: L{bytes}
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, scheme=b"", netloc=b"localhost", path=b"", query=b"", fragment=b""
|
||||
):
|
||||
self._scheme = scheme or b"http"
|
||||
self._netloc = netloc
|
||||
self._path = path or b"/"
|
||||
self._query = query
|
||||
self._fragment = fragment
|
||||
self._reconstitute()
|
||||
|
||||
def _reconstitute(self):
|
||||
"""
|
||||
Reconstitute this L{URLPath} from all its given attributes.
|
||||
"""
|
||||
urltext = urlquote(
|
||||
urlunsplit(
|
||||
(self._scheme, self._netloc, self._path, self._query, self._fragment)
|
||||
),
|
||||
safe=_allascii,
|
||||
)
|
||||
self._url = _URL.fromText(urltext.encode("ascii").decode("ascii"))
|
||||
|
||||
scheme = _rereconstituter("scheme")
|
||||
netloc = _rereconstituter("netloc")
|
||||
path = _rereconstituter("path")
|
||||
query = _rereconstituter("query")
|
||||
fragment = _rereconstituter("fragment")
|
||||
|
||||
@classmethod
|
||||
def _fromURL(cls, urlInstance):
|
||||
"""
|
||||
Reconstruct all the public instance variables of this L{URLPath} from
|
||||
its underlying L{_URL}.
|
||||
|
||||
@param urlInstance: the object to base this L{URLPath} on.
|
||||
@type urlInstance: L{_URL}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
self = cls.__new__(cls)
|
||||
self._url = urlInstance.replace(path=urlInstance.path or [""])
|
||||
self._scheme = self._url.scheme.encode("ascii")
|
||||
self._netloc = self._url.authority().encode("ascii")
|
||||
self._path = (
|
||||
_URL(path=self._url.path, rooted=True).asURI().asText().encode("ascii")
|
||||
)
|
||||
self._query = (_URL(query=self._url.query).asURI().asText().encode("ascii"))[1:]
|
||||
self._fragment = self._url.fragment.encode("ascii")
|
||||
return self
|
||||
|
||||
def pathList(self, unquote=False, copy=True):
|
||||
"""
|
||||
Split this URL's path into its components.
|
||||
|
||||
@param unquote: whether to remove %-encoding from the returned strings.
|
||||
|
||||
@param copy: (ignored, do not use)
|
||||
|
||||
@return: The components of C{self.path}
|
||||
@rtype: L{list} of L{bytes}
|
||||
"""
|
||||
segments = self._url.path
|
||||
mapper = lambda x: x.encode("ascii")
|
||||
if unquote:
|
||||
mapper = lambda x, m=mapper: m(urlunquote(x))
|
||||
return [b""] + [mapper(segment) for segment in segments]
|
||||
|
||||
@classmethod
|
||||
def fromString(klass, url):
|
||||
"""
|
||||
Make a L{URLPath} from a L{str} or L{unicode}.
|
||||
|
||||
@param url: A L{str} representation of a URL.
|
||||
@type url: L{str} or L{unicode}.
|
||||
|
||||
@return: a new L{URLPath} derived from the given string.
|
||||
@rtype: L{URLPath}
|
||||
"""
|
||||
if not isinstance(url, str):
|
||||
raise ValueError("'url' must be a str")
|
||||
return klass._fromURL(_URL.fromText(url))
|
||||
|
||||
@classmethod
|
||||
def fromBytes(klass, url):
|
||||
"""
|
||||
Make a L{URLPath} from a L{bytes}.
|
||||
|
||||
@param url: A L{bytes} representation of a URL.
|
||||
@type url: L{bytes}
|
||||
|
||||
@return: a new L{URLPath} derived from the given L{bytes}.
|
||||
@rtype: L{URLPath}
|
||||
|
||||
@since: 15.4
|
||||
"""
|
||||
if not isinstance(url, bytes):
|
||||
raise ValueError("'url' must be bytes")
|
||||
quoted = urlquote(url, safe=_allascii)
|
||||
return klass.fromString(quoted)
|
||||
|
||||
@classmethod
|
||||
def fromRequest(klass, request):
|
||||
"""
|
||||
Make a L{URLPath} from a L{twisted.web.http.Request}.
|
||||
|
||||
@param request: A L{twisted.web.http.Request} to make the L{URLPath}
|
||||
from.
|
||||
|
||||
@return: a new L{URLPath} derived from the given request.
|
||||
@rtype: L{URLPath}
|
||||
"""
|
||||
return klass.fromBytes(request.prePathURL())
|
||||
|
||||
def _mod(self, newURL, keepQuery):
|
||||
"""
|
||||
Return a modified copy of C{self} using C{newURL}, keeping the query
|
||||
string if C{keepQuery} is C{True}.
|
||||
|
||||
@param newURL: a L{URL} to derive a new L{URLPath} from
|
||||
@type newURL: L{URL}
|
||||
|
||||
@param keepQuery: if C{True}, preserve the query parameters from
|
||||
C{self} on the new L{URLPath}; if C{False}, give the new L{URLPath}
|
||||
no query parameters.
|
||||
@type keepQuery: L{bool}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._fromURL(
|
||||
newURL.replace(fragment="", query=self._url.query if keepQuery else ())
|
||||
)
|
||||
|
||||
def sibling(self, path, keepQuery=False):
|
||||
"""
|
||||
Get the sibling of the current L{URLPath}. A sibling is a file which
|
||||
is in the same directory as the current file.
|
||||
|
||||
@param path: The path of the sibling.
|
||||
@type path: L{bytes}
|
||||
|
||||
@param keepQuery: Whether to keep the query parameters on the returned
|
||||
L{URLPath}.
|
||||
@type keepQuery: L{bool}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._mod(self._url.sibling(path.decode("ascii")), keepQuery)
|
||||
|
||||
def child(self, path, keepQuery=False):
|
||||
"""
|
||||
Get the child of this L{URLPath}.
|
||||
|
||||
@param path: The path of the child.
|
||||
@type path: L{bytes}
|
||||
|
||||
@param keepQuery: Whether to keep the query parameters on the returned
|
||||
L{URLPath}.
|
||||
@type keepQuery: L{bool}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._mod(self._url.child(path.decode("ascii")), keepQuery)
|
||||
|
||||
def parent(self, keepQuery=False):
|
||||
"""
|
||||
Get the parent directory of this L{URLPath}.
|
||||
|
||||
@param keepQuery: Whether to keep the query parameters on the returned
|
||||
L{URLPath}.
|
||||
@type keepQuery: L{bool}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._mod(self._url.click(".."), keepQuery)
|
||||
|
||||
def here(self, keepQuery=False):
|
||||
"""
|
||||
Get the current directory of this L{URLPath}.
|
||||
|
||||
@param keepQuery: Whether to keep the query parameters on the returned
|
||||
L{URLPath}.
|
||||
@type keepQuery: L{bool}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._mod(self._url.click("."), keepQuery)
|
||||
|
||||
def click(self, st):
|
||||
"""
|
||||
Return a path which is the URL where a browser would presumably take
|
||||
you if you clicked on a link with an HREF as given.
|
||||
|
||||
@param st: A relative URL, to be interpreted relative to C{self} as the
|
||||
base URL.
|
||||
@type st: L{bytes}
|
||||
|
||||
@return: a new L{URLPath}
|
||||
"""
|
||||
return self._fromURL(self._url.click(st.decode("ascii")))
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
The L{str} of a L{URLPath} is its URL text.
|
||||
"""
|
||||
return cast(str, self._url.asURI().asText())
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
The L{repr} of a L{URLPath} is an eval-able expression which will
|
||||
construct a similar L{URLPath}.
|
||||
"""
|
||||
return "URLPath(scheme={!r}, netloc={!r}, path={!r}, query={!r}, fragment={!r})".format(
|
||||
self.scheme,
|
||||
self.netloc,
|
||||
self.path,
|
||||
self.query,
|
||||
self.fragment,
|
||||
)
|
||||
1013
.venv/lib/python3.12/site-packages/twisted/python/usage.py
Normal file
1013
.venv/lib/python3.12/site-packages/twisted/python/usage.py
Normal file
File diff suppressed because it is too large
Load Diff
984
.venv/lib/python3.12/site-packages/twisted/python/util.py
Normal file
984
.venv/lib/python3.12/site-packages/twisted/python/util.py
Normal file
@@ -0,0 +1,984 @@
|
||||
# -*- test-case-name: twisted.python.test.test_util -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import AnyStr
|
||||
|
||||
try:
|
||||
import grp as _grp
|
||||
import pwd as _pwd
|
||||
except ImportError:
|
||||
pwd = None
|
||||
grp = None
|
||||
else:
|
||||
grp = _grp
|
||||
pwd = _pwd
|
||||
|
||||
try:
|
||||
from os import getgroups as _getgroups, setgroups as _setgroups
|
||||
except ImportError:
|
||||
setgroups = None
|
||||
getgroups = None
|
||||
else:
|
||||
setgroups = _setgroups
|
||||
getgroups = _getgroups
|
||||
|
||||
# For backwards compatibility, some things import this, so just link it
|
||||
from collections import OrderedDict
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python.deprecate import deprecatedModuleAttribute
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 15, 5, 0),
|
||||
"Use collections.OrderedDict instead.",
|
||||
"twisted.python.util",
|
||||
"OrderedDict",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class InsensitiveDict(MutableMapping[str, _T]):
|
||||
"""
|
||||
Dictionary, that has case-insensitive keys.
|
||||
|
||||
Normally keys are retained in their original form when queried with
|
||||
.keys() or .items(). If initialized with preserveCase=0, keys are both
|
||||
looked up in lowercase and returned in lowercase by .keys() and .items().
|
||||
"""
|
||||
|
||||
"""
|
||||
Modified recipe at http://code.activestate.com/recipes/66315/ originally
|
||||
contributed by Sami Hangaslammi.
|
||||
"""
|
||||
|
||||
def __init__(self, dict=None, preserve=1):
|
||||
"""
|
||||
Create an empty dictionary, or update from 'dict'.
|
||||
"""
|
||||
super().__init__()
|
||||
self.data = {}
|
||||
self.preserve = preserve
|
||||
if dict:
|
||||
self.update(dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
k = self._lowerOrReturn(key)
|
||||
del self.data[k]
|
||||
|
||||
def _lowerOrReturn(self, key):
|
||||
if isinstance(key, bytes) or isinstance(key, str):
|
||||
return key.lower()
|
||||
else:
|
||||
return key
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Retrieve the value associated with 'key' (in any case).
|
||||
"""
|
||||
k = self._lowerOrReturn(key)
|
||||
return self.data[k][1]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
Associate 'value' with 'key'. If 'key' already exists, but
|
||||
in different case, it will be replaced.
|
||||
"""
|
||||
k = self._lowerOrReturn(key)
|
||||
self.data[k] = (key, value)
|
||||
|
||||
def has_key(self, key):
|
||||
"""
|
||||
Case insensitive test whether 'key' exists.
|
||||
"""
|
||||
k = self._lowerOrReturn(key)
|
||||
return k in self.data
|
||||
|
||||
__contains__ = has_key
|
||||
|
||||
def _doPreserve(self, key):
|
||||
if not self.preserve and (isinstance(key, bytes) or isinstance(key, str)):
|
||||
return key.lower()
|
||||
else:
|
||||
return key
|
||||
|
||||
def keys(self):
|
||||
"""
|
||||
List of keys in their original case.
|
||||
"""
|
||||
return list(self.iterkeys())
|
||||
|
||||
def values(self):
|
||||
"""
|
||||
List of values.
|
||||
"""
|
||||
return list(self.itervalues())
|
||||
|
||||
def items(self):
|
||||
"""
|
||||
List of (key,value) pairs.
|
||||
"""
|
||||
return list(self.iteritems())
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""
|
||||
Retrieve value associated with 'key' or return default value
|
||||
if 'key' doesn't exist.
|
||||
"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default):
|
||||
"""
|
||||
If 'key' doesn't exist, associate it with the 'default' value.
|
||||
Return value associated with 'key'.
|
||||
"""
|
||||
if not self.has_key(key):
|
||||
self[key] = default
|
||||
return self[key]
|
||||
|
||||
def update(self, dict):
|
||||
"""
|
||||
Copy (key,value) pairs from 'dict'.
|
||||
"""
|
||||
for k, v in dict.items():
|
||||
self[k] = v
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
String representation of the dictionary.
|
||||
"""
|
||||
items = ", ".join([(f"{k!r}: {v!r}") for k, v in self.items()])
|
||||
return "InsensitiveDict({%s})" % items
|
||||
|
||||
def iterkeys(self):
|
||||
for v in self.data.values():
|
||||
yield self._doPreserve(v[0])
|
||||
|
||||
__iter__ = iterkeys
|
||||
|
||||
def itervalues(self):
|
||||
for v in self.data.values():
|
||||
yield v[1]
|
||||
|
||||
def iteritems(self):
|
||||
for k, v in self.data.values():
|
||||
yield self._doPreserve(k), v
|
||||
|
||||
_notFound = object()
|
||||
|
||||
def pop(self, key, default=_notFound):
|
||||
"""
|
||||
@see: L{dict.pop}
|
||||
@since: Twisted 21.2.0
|
||||
"""
|
||||
try:
|
||||
return self.data.pop(self._lowerOrReturn(key))[1]
|
||||
except KeyError:
|
||||
if default is self._notFound:
|
||||
raise
|
||||
return default
|
||||
|
||||
def popitem(self):
|
||||
i = self.items()[0]
|
||||
del self[i[0]]
|
||||
return i
|
||||
|
||||
def clear(self):
|
||||
for k in self.keys():
|
||||
del self[k]
|
||||
|
||||
def copy(self):
|
||||
return InsensitiveDict(self, self.preserve)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, Mapping):
|
||||
for k, v in self.items():
|
||||
if k not in other or other[k] != v:
|
||||
return False
|
||||
return len(self) == len(other)
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
def uniquify(lst):
|
||||
"""
|
||||
Make the elements of a list unique by inserting them into a dictionary.
|
||||
This must not change the order of the input lst.
|
||||
"""
|
||||
seen = set()
|
||||
result = []
|
||||
for k in lst:
|
||||
if k not in seen:
|
||||
result.append(k)
|
||||
seen.add(k)
|
||||
return result
|
||||
|
||||
|
||||
def padTo(n, seq, default=None):
|
||||
"""
|
||||
Pads a sequence out to n elements,
|
||||
|
||||
filling in with a default value if it is not long enough.
|
||||
|
||||
If the input sequence is longer than n, raises ValueError.
|
||||
|
||||
Details, details:
|
||||
This returns a new list; it does not extend the original sequence.
|
||||
The new list contains the values of the original sequence, not copies.
|
||||
"""
|
||||
|
||||
if len(seq) > n:
|
||||
raise ValueError("%d elements is more than %d." % (len(seq), n))
|
||||
|
||||
blank = [default] * n
|
||||
|
||||
blank[: len(seq)] = list(seq)
|
||||
|
||||
return blank
|
||||
|
||||
|
||||
def getPluginDirs():
|
||||
warnings.warn(
|
||||
"twisted.python.util.getPluginDirs is deprecated since Twisted 12.2.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
import twisted
|
||||
|
||||
systemPlugins = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(twisted.__file__))), "plugins"
|
||||
)
|
||||
userPlugins = os.path.expanduser("~/TwistedPlugins")
|
||||
confPlugins = os.path.expanduser("~/.twisted")
|
||||
allPlugins = filter(os.path.isdir, [systemPlugins, userPlugins, confPlugins])
|
||||
return allPlugins
|
||||
|
||||
|
||||
def addPluginDir():
|
||||
warnings.warn(
|
||||
"twisted.python.util.addPluginDir is deprecated since Twisted 12.2.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
sys.path.extend(getPluginDirs())
|
||||
|
||||
|
||||
def sibpath(
|
||||
path: os.PathLike[AnyStr] | AnyStr, sibling: os.PathLike[AnyStr] | AnyStr
|
||||
) -> AnyStr:
|
||||
"""
|
||||
Return the path to a sibling of a file in the filesystem.
|
||||
|
||||
This is useful in conjunction with the special C{__file__} attribute
|
||||
that Python provides for modules, so modules can load associated
|
||||
resource files.
|
||||
"""
|
||||
return os.path.join(os.path.dirname(os.path.abspath(path)), sibling)
|
||||
|
||||
|
||||
def _getpass(prompt):
|
||||
"""
|
||||
Helper to turn IOErrors into KeyboardInterrupts.
|
||||
"""
|
||||
import getpass
|
||||
|
||||
try:
|
||||
return getpass.getpass(prompt)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EINTR:
|
||||
raise KeyboardInterrupt
|
||||
raise
|
||||
except EOFError:
|
||||
raise KeyboardInterrupt
|
||||
|
||||
|
||||
def getPassword(
|
||||
prompt="Password: ",
|
||||
confirm=0,
|
||||
forceTTY=0,
|
||||
confirmPrompt="Confirm password: ",
|
||||
mismatchMessage="Passwords don't match.",
|
||||
):
|
||||
"""
|
||||
Obtain a password by prompting or from stdin.
|
||||
|
||||
If stdin is a terminal, prompt for a new password, and confirm (if
|
||||
C{confirm} is true) by asking again to make sure the user typed the same
|
||||
thing, as keystrokes will not be echoed.
|
||||
|
||||
If stdin is not a terminal, and C{forceTTY} is not true, read in a line
|
||||
and use it as the password, less the trailing newline, if any. If
|
||||
C{forceTTY} is true, attempt to open a tty and prompt for the password
|
||||
using it. Raise a RuntimeError if this is not possible.
|
||||
|
||||
@returns: C{str}
|
||||
"""
|
||||
isaTTY = hasattr(sys.stdin, "isatty") and sys.stdin.isatty()
|
||||
|
||||
old = None
|
||||
try:
|
||||
if not isaTTY:
|
||||
if forceTTY:
|
||||
try:
|
||||
old = sys.stdin, sys.stdout
|
||||
sys.stdin = sys.stdout = open("/dev/tty", "r+")
|
||||
except BaseException:
|
||||
raise RuntimeError("Cannot obtain a TTY")
|
||||
else:
|
||||
password = sys.stdin.readline()
|
||||
if password[-1] == "\n":
|
||||
[PASSWORD-REMOVED][:-1]
|
||||
return password
|
||||
|
||||
while 1:
|
||||
try1 = _getpass(prompt)
|
||||
if not confirm:
|
||||
return try1
|
||||
try2 = _getpass(confirmPrompt)
|
||||
if try1 == try2:
|
||||
return try1
|
||||
else:
|
||||
sys.stderr.write(mismatchMessage + "\n")
|
||||
finally:
|
||||
if old:
|
||||
sys.stdin.close()
|
||||
sys.stdin, sys.stdout = old
|
||||
|
||||
|
||||
def println(*a):
|
||||
sys.stdout.write(" ".join(map(str, a)) + "\n")
|
||||
|
||||
|
||||
# XXX
|
||||
# This does not belong here
|
||||
# But where does it belong?
|
||||
|
||||
|
||||
def str_xor(s, b):
|
||||
return "".join([chr(ord(c) ^ b) for c in s])
|
||||
|
||||
|
||||
def makeStatBar(width, maxPosition, doneChar="=", undoneChar="-", currentChar=">"):
|
||||
"""
|
||||
Creates a function that will return a string representing a progress bar.
|
||||
"""
|
||||
aValue = width / float(maxPosition)
|
||||
|
||||
def statBar(position, force=0, last=[""]):
|
||||
assert len(last) == 1, "Don't mess with the last parameter."
|
||||
done = int(aValue * position)
|
||||
toDo = width - done - 2
|
||||
result = f"[{doneChar * done}{currentChar}{undoneChar * toDo}]"
|
||||
if force:
|
||||
last[0] = result
|
||||
return result
|
||||
if result == last[0]:
|
||||
return ""
|
||||
last[0] = result
|
||||
return result
|
||||
|
||||
statBar.__doc__ = """statBar(position, force = 0) -> '[%s%s%s]'-style progress bar
|
||||
|
||||
returned string is %d characters long, and the range goes from 0..%d.
|
||||
The 'position' argument is where the '%s' will be drawn. If force is false,
|
||||
'' will be returned instead if the resulting progress bar is identical to the
|
||||
previously returned progress bar.
|
||||
""" % (
|
||||
doneChar * 3,
|
||||
currentChar,
|
||||
undoneChar * 3,
|
||||
width,
|
||||
maxPosition,
|
||||
currentChar,
|
||||
)
|
||||
return statBar
|
||||
|
||||
|
||||
def spewer(frame, s, ignored):
|
||||
"""
|
||||
A trace function for sys.settrace that prints every function or method call.
|
||||
"""
|
||||
from twisted.python import reflect
|
||||
|
||||
if "self" in frame.f_locals:
|
||||
se = frame.f_locals["self"]
|
||||
if hasattr(se, "__class__"):
|
||||
k = reflect.qual(se.__class__)
|
||||
else:
|
||||
k = reflect.qual(type(se))
|
||||
print(f"method {frame.f_code.co_name} of {k} at {id(se)}")
|
||||
else:
|
||||
print(
|
||||
"function %s in %s, line %s"
|
||||
% (frame.f_code.co_name, frame.f_code.co_filename, frame.f_lineno)
|
||||
)
|
||||
|
||||
|
||||
def searchupwards(start, files=[], dirs=[]):
|
||||
"""
|
||||
Walk upwards from start, looking for a directory containing
|
||||
all files and directories given as arguments::
|
||||
>>> searchupwards('.', ['foo.txt'], ['bar', 'bam'])
|
||||
|
||||
If not found, return None
|
||||
"""
|
||||
start = os.path.abspath(start)
|
||||
parents = start.split(os.sep)
|
||||
exists = os.path.exists
|
||||
join = os.sep.join
|
||||
isdir = os.path.isdir
|
||||
while len(parents):
|
||||
candidate = join(parents) + os.sep
|
||||
allpresent = 1
|
||||
for f in files:
|
||||
if not exists(f"{candidate}{f}"):
|
||||
allpresent = 0
|
||||
break
|
||||
if allpresent:
|
||||
for d in dirs:
|
||||
if not isdir(f"{candidate}{d}"):
|
||||
allpresent = 0
|
||||
break
|
||||
if allpresent:
|
||||
return candidate
|
||||
parents.pop(-1)
|
||||
return None
|
||||
|
||||
|
||||
class LineLog:
|
||||
"""
|
||||
A limited-size line-based log, useful for logging line-based
|
||||
protocols such as SMTP.
|
||||
|
||||
When the log fills up, old entries drop off the end.
|
||||
"""
|
||||
|
||||
def __init__(self, size=10):
|
||||
"""
|
||||
Create a new log, with size lines of storage (default 10).
|
||||
A log size of 0 (or less) means an infinite log.
|
||||
"""
|
||||
if size < 0:
|
||||
size = 0
|
||||
self.log = [None] * size
|
||||
self.size = size
|
||||
|
||||
def append(self, line):
|
||||
if self.size:
|
||||
self.log[:-1] = self.log[1:]
|
||||
self.log[-1] = line
|
||||
else:
|
||||
self.log.append(line)
|
||||
|
||||
def str(self):
|
||||
return bytes(self)
|
||||
|
||||
def __bytes__(self):
|
||||
return b"\n".join(filter(None, self.log))
|
||||
|
||||
def __getitem__(self, item):
|
||||
return filter(None, self.log)[item]
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Empty the log.
|
||||
"""
|
||||
self.log = [None] * self.size
|
||||
|
||||
|
||||
def raises(exception, f, *args, **kwargs):
|
||||
"""
|
||||
Determine whether the given call raises the given exception.
|
||||
"""
|
||||
try:
|
||||
f(*args, **kwargs)
|
||||
except exception:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
class IntervalDifferential:
|
||||
"""
|
||||
Given a list of intervals, generate the amount of time to sleep between
|
||||
"instants".
|
||||
|
||||
For example, given 7, 11 and 13, the three (infinite) sequences::
|
||||
|
||||
7 14 21 28 35 ...
|
||||
11 22 33 44 ...
|
||||
13 26 39 52 ...
|
||||
|
||||
will be generated, merged, and used to produce::
|
||||
|
||||
(7, 0) (4, 1) (2, 2) (1, 0) (7, 0) (1, 1) (4, 2) (2, 0) (5, 1) (2, 0)
|
||||
|
||||
New intervals may be added or removed as iteration proceeds using the
|
||||
proper methods.
|
||||
"""
|
||||
|
||||
def __init__(self, intervals, default=60):
|
||||
"""
|
||||
@type intervals: C{list} of C{int}, C{long}, or C{float} param
|
||||
@param intervals: The intervals between instants.
|
||||
|
||||
@type default: C{int}, C{long}, or C{float}
|
||||
@param default: The duration to generate if the intervals list
|
||||
becomes empty.
|
||||
"""
|
||||
self.intervals = intervals[:]
|
||||
self.default = default
|
||||
|
||||
def __iter__(self):
|
||||
return _IntervalDifferentialIterator(self.intervals, self.default)
|
||||
|
||||
|
||||
class _IntervalDifferentialIterator:
|
||||
def __init__(self, i, d):
|
||||
self.intervals = [[e, e, n] for (e, n) in zip(i, range(len(i)))]
|
||||
self.default = d
|
||||
self.last = 0
|
||||
|
||||
def __next__(self):
|
||||
if not self.intervals:
|
||||
return (self.default, None)
|
||||
last, index = self.intervals[0][0], self.intervals[0][2]
|
||||
self.intervals[0][0] += self.intervals[0][1]
|
||||
self.intervals.sort()
|
||||
result = last - self.last
|
||||
self.last = last
|
||||
return result, index
|
||||
|
||||
# Iterators on Python 2 use next(), not __next__()
|
||||
next = __next__
|
||||
|
||||
def addInterval(self, i):
|
||||
if self.intervals:
|
||||
delay = self.intervals[0][0] - self.intervals[0][1]
|
||||
self.intervals.append([delay + i, i, len(self.intervals)])
|
||||
self.intervals.sort()
|
||||
else:
|
||||
self.intervals.append([i, i, 0])
|
||||
|
||||
def removeInterval(self, interval):
|
||||
for i in range(len(self.intervals)):
|
||||
if self.intervals[i][1] == interval:
|
||||
index = self.intervals[i][2]
|
||||
del self.intervals[i]
|
||||
for i in self.intervals:
|
||||
if i[2] > index:
|
||||
i[2] -= 1
|
||||
return
|
||||
raise ValueError("Specified interval not in IntervalDifferential")
|
||||
|
||||
|
||||
class FancyStrMixin:
|
||||
"""
|
||||
Mixin providing a flexible implementation of C{__str__}.
|
||||
|
||||
C{__str__} output will begin with the name of the class, or the contents
|
||||
of the attribute C{fancybasename} if it is set.
|
||||
|
||||
The body of C{__str__} can be controlled by overriding C{showAttributes} in
|
||||
a subclass. Set C{showAttributes} to a sequence of strings naming
|
||||
attributes, or sequences of C{(attributeName, callable)}, or sequences of
|
||||
C{(attributeName, displayName, formatCharacter)}. In the second case, the
|
||||
callable is passed the value of the attribute and its return value used in
|
||||
the output of C{__str__}. In the final case, the attribute is looked up
|
||||
using C{attributeName}, but the output uses C{displayName} instead, and
|
||||
renders the value of the attribute using C{formatCharacter}, e.g. C{"%.3f"}
|
||||
might be used for a float.
|
||||
"""
|
||||
|
||||
# Override in subclasses:
|
||||
showAttributes: Sequence[
|
||||
Union[str, Tuple[str, str, str], Tuple[str, Callable[[Any], str]]]
|
||||
] = ()
|
||||
|
||||
def __str__(self) -> str:
|
||||
r = ["<", getattr(self, "fancybasename", self.__class__.__name__)]
|
||||
# The casts help mypy understand which type from the Union applies
|
||||
# in each 'if' case.
|
||||
# https://github.com/python/mypy/issues/9171
|
||||
for attr in self.showAttributes:
|
||||
if isinstance(attr, str):
|
||||
r.append(f" {attr}={getattr(self, attr)!r}")
|
||||
elif len(attr) == 2:
|
||||
r.append((f" {attr[0]}=") + attr[1](getattr(self, attr[0])))
|
||||
else:
|
||||
r.append((" %s=" + attr[2]) % (attr[1], getattr(self, attr[0])))
|
||||
r.append(">")
|
||||
return "".join(r)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class FancyEqMixin:
|
||||
"""
|
||||
Mixin that implements C{__eq__} and C{__ne__}.
|
||||
|
||||
Comparison is done using the list of attributes defined in
|
||||
C{compareAttributes}.
|
||||
"""
|
||||
|
||||
compareAttributes: ClassVar[Sequence[str]] = ()
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not self.compareAttributes:
|
||||
return self is other
|
||||
if isinstance(self, other.__class__):
|
||||
return all(
|
||||
getattr(self, name) == getattr(other, name)
|
||||
for name in self.compareAttributes
|
||||
)
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
result = self.__eq__(other)
|
||||
if result is NotImplemented:
|
||||
return result
|
||||
return not result
|
||||
|
||||
|
||||
try:
|
||||
# initgroups is available in Python 2.7+ on UNIX-likes
|
||||
from os import initgroups as __initgroups
|
||||
except ImportError:
|
||||
_initgroups = None
|
||||
else:
|
||||
_initgroups = __initgroups
|
||||
|
||||
|
||||
if _initgroups is None:
|
||||
|
||||
def initgroups(uid, primaryGid):
|
||||
"""
|
||||
Do nothing.
|
||||
|
||||
Underlying platform support require to manipulate groups is missing.
|
||||
"""
|
||||
|
||||
else:
|
||||
|
||||
def initgroups(uid, primaryGid):
|
||||
"""
|
||||
Initializes the group access list.
|
||||
|
||||
This uses the stdlib support which calls initgroups(3) under the hood.
|
||||
|
||||
If the given user is a member of more than C{NGROUPS}, arbitrary
|
||||
groups will be silently discarded to bring the number below that
|
||||
limit.
|
||||
|
||||
@type uid: C{int}
|
||||
@param uid: The UID for which to look up group information.
|
||||
|
||||
@type primaryGid: C{int}
|
||||
@param primaryGid: The GID to include when setting the groups.
|
||||
"""
|
||||
return _initgroups(pwd.getpwuid(uid).pw_name, primaryGid)
|
||||
|
||||
|
||||
def switchUID(uid, gid, euid=False):
|
||||
"""
|
||||
Attempts to switch the uid/euid and gid/egid for the current process.
|
||||
|
||||
If C{uid} is the same value as L{os.getuid} (or L{os.geteuid}),
|
||||
this function will issue a L{UserWarning} and not raise an exception.
|
||||
|
||||
@type uid: C{int} or L{None}
|
||||
@param uid: the UID (or EUID) to switch the current process to. This
|
||||
parameter will be ignored if the value is L{None}.
|
||||
|
||||
@type gid: C{int} or L{None}
|
||||
@param gid: the GID (or EGID) to switch the current process to. This
|
||||
parameter will be ignored if the value is L{None}.
|
||||
|
||||
@type euid: C{bool}
|
||||
@param euid: if True, set only effective user-id rather than real user-id.
|
||||
(This option has no effect unless the process is running
|
||||
as root, in which case it means not to shed all
|
||||
privileges, retaining the option to regain privileges
|
||||
in cases such as spawning processes. Use with caution.)
|
||||
"""
|
||||
if euid:
|
||||
setuid = os.seteuid
|
||||
setgid = os.setegid
|
||||
getuid = os.geteuid
|
||||
else:
|
||||
setuid = os.setuid
|
||||
setgid = os.setgid
|
||||
getuid = os.getuid
|
||||
if gid is not None:
|
||||
setgid(gid)
|
||||
if uid is not None:
|
||||
if uid == getuid():
|
||||
uidText = euid and "euid" or "uid"
|
||||
actionText = f"tried to drop privileges and set{uidText} {uid}"
|
||||
problemText = f"{uidText} is already {getuid()}"
|
||||
warnings.warn(
|
||||
"{} but {}; should we be root? Continuing.".format(
|
||||
actionText, problemText
|
||||
)
|
||||
)
|
||||
else:
|
||||
initgroups(uid, gid)
|
||||
setuid(uid)
|
||||
|
||||
|
||||
def untilConcludes(f, *a, **kw):
|
||||
"""
|
||||
Call C{f} with the given arguments, handling C{EINTR} by retrying.
|
||||
|
||||
@param f: A function to call.
|
||||
|
||||
@param a: Positional arguments to pass to C{f}.
|
||||
|
||||
@param kw: Keyword arguments to pass to C{f}.
|
||||
|
||||
@return: Whatever C{f} returns.
|
||||
|
||||
@raise Exception: Whatever C{f} raises, except for C{OSError} with
|
||||
C{errno} set to C{EINTR}.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
return f(*a, **kw)
|
||||
except OSError as e:
|
||||
if e.args[0] == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
||||
|
||||
def mergeFunctionMetadata(f, g):
|
||||
"""
|
||||
Overwrite C{g}'s name and docstring with values from C{f}. Update
|
||||
C{g}'s instance dictionary with C{f}'s.
|
||||
|
||||
@return: A function that has C{g}'s behavior and metadata merged from
|
||||
C{f}.
|
||||
"""
|
||||
try:
|
||||
g.__name__ = f.__name__
|
||||
except TypeError:
|
||||
pass
|
||||
try:
|
||||
g.__doc__ = f.__doc__
|
||||
except (TypeError, AttributeError):
|
||||
pass
|
||||
try:
|
||||
g.__dict__.update(f.__dict__)
|
||||
except (TypeError, AttributeError):
|
||||
pass
|
||||
try:
|
||||
g.__module__ = f.__module__
|
||||
except TypeError:
|
||||
pass
|
||||
return g
|
||||
|
||||
|
||||
def nameToLabel(mname):
|
||||
"""
|
||||
Convert a string like a variable name into a slightly more human-friendly
|
||||
string with spaces and capitalized letters.
|
||||
|
||||
@type mname: C{str}
|
||||
@param mname: The name to convert to a label. This must be a string
|
||||
which could be used as a Python identifier. Strings which do not take
|
||||
this form will result in unpredictable behavior.
|
||||
|
||||
@rtype: C{str}
|
||||
"""
|
||||
labelList = []
|
||||
word = ""
|
||||
lastWasUpper = False
|
||||
for letter in mname:
|
||||
if letter.isupper() == lastWasUpper:
|
||||
# Continuing a word.
|
||||
word += letter
|
||||
else:
|
||||
# breaking a word OR beginning a word
|
||||
if lastWasUpper:
|
||||
# could be either
|
||||
if len(word) == 1:
|
||||
# keep going
|
||||
word += letter
|
||||
else:
|
||||
# acronym
|
||||
# we're processing the lowercase letter after the acronym-then-capital
|
||||
lastWord = word[:-1]
|
||||
firstLetter = word[-1]
|
||||
labelList.append(lastWord)
|
||||
word = firstLetter + letter
|
||||
else:
|
||||
# definitely breaking: lower to upper
|
||||
labelList.append(word)
|
||||
word = letter
|
||||
lastWasUpper = letter.isupper()
|
||||
if labelList:
|
||||
labelList[0] = labelList[0].capitalize()
|
||||
else:
|
||||
return mname.capitalize()
|
||||
labelList.append(word)
|
||||
return " ".join(labelList)
|
||||
|
||||
|
||||
def uidFromString(uidString):
|
||||
"""
|
||||
Convert a user identifier, as a string, into an integer UID.
|
||||
|
||||
@type uidString: C{str}
|
||||
@param uidString: A string giving the base-ten representation of a UID or
|
||||
the name of a user which can be converted to a UID via L{pwd.getpwnam}.
|
||||
|
||||
@rtype: C{int}
|
||||
@return: The integer UID corresponding to the given string.
|
||||
|
||||
@raise ValueError: If the user name is supplied and L{pwd} is not
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
return int(uidString)
|
||||
except ValueError:
|
||||
if pwd is None:
|
||||
raise
|
||||
return pwd.getpwnam(uidString)[2]
|
||||
|
||||
|
||||
def gidFromString(gidString):
|
||||
"""
|
||||
Convert a group identifier, as a string, into an integer GID.
|
||||
|
||||
@type gidString: C{str}
|
||||
@param gidString: A string giving the base-ten representation of a GID or
|
||||
the name of a group which can be converted to a GID via L{grp.getgrnam}.
|
||||
|
||||
@rtype: C{int}
|
||||
@return: The integer GID corresponding to the given string.
|
||||
|
||||
@raise ValueError: If the group name is supplied and L{grp} is not
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
return int(gidString)
|
||||
except ValueError:
|
||||
if grp is None:
|
||||
raise
|
||||
return grp.getgrnam(gidString)[2]
|
||||
|
||||
|
||||
def runAsEffectiveUser(euid, egid, function, *args, **kwargs):
|
||||
"""
|
||||
Run the given function wrapped with seteuid/setegid calls.
|
||||
|
||||
This will try to minimize the number of seteuid/setegid calls, comparing
|
||||
current and wanted permissions
|
||||
|
||||
@param euid: effective UID used to call the function.
|
||||
@type euid: C{int}
|
||||
|
||||
@type egid: effective GID used to call the function.
|
||||
@param egid: C{int}
|
||||
|
||||
@param function: the function run with the specific permission.
|
||||
@type function: any callable
|
||||
|
||||
@param args: arguments passed to C{function}
|
||||
@param kwargs: keyword arguments passed to C{function}
|
||||
"""
|
||||
uid, gid = os.geteuid(), os.getegid()
|
||||
if uid == euid and gid == egid:
|
||||
return function(*args, **kwargs)
|
||||
else:
|
||||
if uid != 0 and (uid != euid or gid != egid):
|
||||
os.seteuid(0)
|
||||
if gid != egid:
|
||||
os.setegid(egid)
|
||||
if euid != 0 and (euid != uid or gid != egid):
|
||||
os.seteuid(euid)
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
finally:
|
||||
if euid != 0 and (uid != euid or gid != egid):
|
||||
os.seteuid(0)
|
||||
if gid != egid:
|
||||
os.setegid(gid)
|
||||
if uid != 0 and (uid != euid or gid != egid):
|
||||
os.seteuid(uid)
|
||||
|
||||
|
||||
def runWithWarningsSuppressed(suppressedWarnings, f, *args, **kwargs):
|
||||
"""
|
||||
Run C{f(*args, **kwargs)}, but with some warnings suppressed.
|
||||
|
||||
Unlike L{twisted.internet.utils.runWithWarningsSuppressed}, it has no
|
||||
special support for L{twisted.internet.defer.Deferred}.
|
||||
|
||||
@param suppressedWarnings: A list of arguments to pass to
|
||||
L{warnings.filterwarnings}. Must be a sequence of 2-tuples (args,
|
||||
kwargs).
|
||||
|
||||
@param f: A callable.
|
||||
|
||||
@param args: Arguments for C{f}.
|
||||
|
||||
@param kwargs: Keyword arguments for C{f}
|
||||
|
||||
@return: The result of C{f(*args, **kwargs)}.
|
||||
"""
|
||||
with warnings.catch_warnings():
|
||||
for a, kw in suppressedWarnings:
|
||||
warnings.filterwarnings(*a, **kw)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"uniquify",
|
||||
"padTo",
|
||||
"getPluginDirs",
|
||||
"addPluginDir",
|
||||
"sibpath",
|
||||
"getPassword",
|
||||
"println",
|
||||
"makeStatBar",
|
||||
"OrderedDict",
|
||||
"InsensitiveDict",
|
||||
"spewer",
|
||||
"searchupwards",
|
||||
"LineLog",
|
||||
"raises",
|
||||
"IntervalDifferential",
|
||||
"FancyStrMixin",
|
||||
"FancyEqMixin",
|
||||
"switchUID",
|
||||
"mergeFunctionMetadata",
|
||||
"nameToLabel",
|
||||
"uidFromString",
|
||||
"gidFromString",
|
||||
"runAsEffectiveUser",
|
||||
"untilConcludes",
|
||||
"runWithWarningsSuppressed",
|
||||
]
|
||||
@@ -0,0 +1,13 @@
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Versions for Python packages.
|
||||
|
||||
See L{incremental}.
|
||||
"""
|
||||
|
||||
|
||||
from incremental import IncomparableVersions, Version, getVersionString
|
||||
|
||||
__all__ = ["Version", "getVersionString", "IncomparableVersions"]
|
||||
163
.venv/lib/python3.12/site-packages/twisted/python/win32.py
Normal file
163
.venv/lib/python3.12/site-packages/twisted/python/win32.py
Normal file
@@ -0,0 +1,163 @@
|
||||
# -*- test-case-name: twisted.python.test.test_win32 -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
Win32 utilities.
|
||||
|
||||
See also twisted.python.shortcut.
|
||||
|
||||
@var O_BINARY: the 'binary' mode flag on Windows, or 0 on other platforms, so it
|
||||
may safely be OR'ed into a mask for os.open.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from incremental import Version
|
||||
|
||||
from twisted.python.deprecate import deprecatedModuleAttribute
|
||||
|
||||
# https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes
|
||||
ERROR_FILE_NOT_FOUND = 2
|
||||
ERROR_PATH_NOT_FOUND = 3
|
||||
ERROR_INVALID_NAME = 123
|
||||
ERROR_DIRECTORY = 267
|
||||
|
||||
O_BINARY = getattr(os, "O_BINARY", 0)
|
||||
|
||||
|
||||
class FakeWindowsError(OSError):
|
||||
"""
|
||||
Stand-in for sometimes-builtin exception on platforms for which it
|
||||
is missing.
|
||||
"""
|
||||
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Catch OSError and check presence of 'winerror' attribute.",
|
||||
"twisted.python.win32",
|
||||
"FakeWindowsError",
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
WindowsError: OSError = WindowsError
|
||||
except NameError:
|
||||
WindowsError = FakeWindowsError
|
||||
|
||||
deprecatedModuleAttribute(
|
||||
Version("Twisted", 21, 2, 0),
|
||||
"Catch OSError and check presence of 'winerror' attribute.",
|
||||
"twisted.python.win32",
|
||||
"WindowsError",
|
||||
)
|
||||
|
||||
|
||||
_cmdLineQuoteRe = re.compile(r'(\\*)"')
|
||||
_cmdLineQuoteRe2 = re.compile(r"(\\+)\Z")
|
||||
|
||||
|
||||
def cmdLineQuote(s):
|
||||
"""
|
||||
Internal method for quoting a single command-line argument.
|
||||
|
||||
@param s: an unquoted string that you want to quote so that something that
|
||||
does cmd.exe-style unquoting will interpret it as a single argument,
|
||||
even if it contains spaces.
|
||||
@type s: C{str}
|
||||
|
||||
@return: a quoted string.
|
||||
@rtype: C{str}
|
||||
"""
|
||||
quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == "") and '"' or ""
|
||||
return (
|
||||
quote
|
||||
+ _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s))
|
||||
+ quote
|
||||
)
|
||||
|
||||
|
||||
def quoteArguments(arguments):
|
||||
"""
|
||||
Quote an iterable of command-line arguments for passing to CreateProcess or
|
||||
a similar API. This allows the list passed to C{reactor.spawnProcess} to
|
||||
match the child process's C{sys.argv} properly.
|
||||
|
||||
@param arguments: an iterable of C{str}, each unquoted.
|
||||
|
||||
@return: a single string, with the given sequence quoted as necessary.
|
||||
"""
|
||||
return " ".join([cmdLineQuote(a) for a in arguments])
|
||||
|
||||
|
||||
class _ErrorFormatter:
|
||||
"""
|
||||
Formatter for Windows error messages.
|
||||
|
||||
@ivar winError: A callable which takes one integer error number argument
|
||||
and returns a L{WindowsError} instance for that error (like
|
||||
L{ctypes.WinError}).
|
||||
|
||||
@ivar formatMessage: A callable which takes one integer error number
|
||||
argument and returns a C{str} giving the message for that error (like
|
||||
U{win32api.FormatMessage<http://
|
||||
timgolden.me.uk/pywin32-docs/win32api__FormatMessage_meth.html>}).
|
||||
|
||||
@ivar errorTab: A mapping from integer error numbers to C{str} messages
|
||||
which correspond to those erorrs (like I{socket.errorTab}).
|
||||
"""
|
||||
|
||||
def __init__(self, WinError, FormatMessage, errorTab):
|
||||
self.winError = WinError
|
||||
self.formatMessage = FormatMessage
|
||||
self.errorTab = errorTab
|
||||
|
||||
@classmethod
|
||||
def fromEnvironment(cls):
|
||||
"""
|
||||
Get as many of the platform-specific error translation objects as
|
||||
possible and return an instance of C{cls} created with them.
|
||||
"""
|
||||
try:
|
||||
from ctypes import WinError
|
||||
except ImportError:
|
||||
WinError = None
|
||||
try:
|
||||
from win32api import FormatMessage
|
||||
except ImportError:
|
||||
FormatMessage = None
|
||||
try:
|
||||
from socket import errorTab
|
||||
except ImportError:
|
||||
errorTab = None
|
||||
return cls(WinError, FormatMessage, errorTab)
|
||||
|
||||
def formatError(self, errorcode):
|
||||
"""
|
||||
Returns the string associated with a Windows error message, such as the
|
||||
ones found in socket.error.
|
||||
|
||||
Attempts direct lookup against the win32 API via ctypes and then
|
||||
pywin32 if available), then in the error table in the socket module,
|
||||
then finally defaulting to C{os.strerror}.
|
||||
|
||||
@param errorcode: the Windows error code
|
||||
@type errorcode: C{int}
|
||||
|
||||
@return: The error message string
|
||||
@rtype: C{str}
|
||||
"""
|
||||
if self.winError is not None:
|
||||
return self.winError(errorcode).strerror
|
||||
if self.formatMessage is not None:
|
||||
return self.formatMessage(errorcode)
|
||||
if self.errorTab is not None:
|
||||
result = self.errorTab.get(errorcode)
|
||||
if result is not None:
|
||||
return result
|
||||
return os.strerror(errorcode)
|
||||
|
||||
|
||||
formatError = _ErrorFormatter.fromEnvironment().formatError
|
||||
352
.venv/lib/python3.12/site-packages/twisted/python/zippath.py
Normal file
352
.venv/lib/python3.12/site-packages/twisted/python/zippath.py
Normal file
@@ -0,0 +1,352 @@
|
||||
# -*- test-case-name: twisted.python.test.test_zippath -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
This module contains implementations of L{IFilePath} for zip files.
|
||||
|
||||
See the constructor of L{ZipArchive} for use.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import os
|
||||
import time
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AnyStr,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
List,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
from zipfile import ZipFile
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
from typing_extensions import Literal, Self
|
||||
|
||||
from twisted.python.compat import cmp, comparable
|
||||
from twisted.python.filepath import (
|
||||
AbstractFilePath,
|
||||
FilePath,
|
||||
IFilePath,
|
||||
OtherAnyStr,
|
||||
UnlistableError,
|
||||
_coerceToFilesystemEncoding,
|
||||
)
|
||||
|
||||
ZIP_PATH_SEP = "/" # In zipfiles, "/" is universally used as the
|
||||
# path separator, regardless of platform.
|
||||
|
||||
_ArchiveStr = TypeVar("_ArchiveStr", bytes, str)
|
||||
_ZipStr = TypeVar("_ZipStr", bytes, str)
|
||||
_ZipSelf = TypeVar("_ZipSelf", bound="ZipPath[Any, Any]")
|
||||
|
||||
|
||||
@comparable
|
||||
@implementer(IFilePath)
|
||||
class ZipPath(Generic[_ZipStr, _ArchiveStr], AbstractFilePath[_ZipStr]):
|
||||
"""
|
||||
I represent a file or directory contained within a zip file.
|
||||
"""
|
||||
|
||||
path: _ZipStr
|
||||
|
||||
def __init__(
|
||||
self, archive: ZipArchive[_ArchiveStr], pathInArchive: _ZipStr
|
||||
) -> None:
|
||||
"""
|
||||
Don't construct me directly. Use C{ZipArchive.child()}.
|
||||
|
||||
@param archive: a L{ZipArchive} instance.
|
||||
|
||||
@param pathInArchive: a ZIP_PATH_SEP-separated string.
|
||||
"""
|
||||
self.archive: ZipArchive[_ArchiveStr] = archive
|
||||
self.pathInArchive: _ZipStr = pathInArchive
|
||||
self._nativePathInArchive: _ArchiveStr = _coerceToFilesystemEncoding(
|
||||
archive._zipfileFilename, pathInArchive
|
||||
)
|
||||
|
||||
# self.path pretends to be os-specific because that's the way the
|
||||
# 'zipimport' module does it.
|
||||
sep = _coerceToFilesystemEncoding(pathInArchive, ZIP_PATH_SEP)
|
||||
archiveFilename: _ZipStr = _coerceToFilesystemEncoding(
|
||||
pathInArchive, archive._zipfileFilename
|
||||
)
|
||||
segments: List[_ZipStr] = self.pathInArchive.split(sep)
|
||||
fakePath: _ZipStr = os.path.join(archiveFilename, *segments)
|
||||
self.path: _ZipStr = fakePath
|
||||
|
||||
def __cmp__(self, other: object) -> int:
|
||||
if not isinstance(other, ZipPath):
|
||||
return NotImplemented
|
||||
return cmp(
|
||||
(self.archive, self.pathInArchive), (other.archive, other.pathInArchive)
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
parts: List[_ZipStr]
|
||||
parts = [
|
||||
_coerceToFilesystemEncoding(self.sep, os.path.abspath(self.archive.path))
|
||||
]
|
||||
parts.extend(self.pathInArchive.split(self.sep))
|
||||
ossep = _coerceToFilesystemEncoding(self.sep, os.sep)
|
||||
return f"ZipPath({ossep.join(parts)!r})"
|
||||
|
||||
@property
|
||||
def sep(self) -> _ZipStr:
|
||||
"""
|
||||
Return a zip directory separator.
|
||||
|
||||
@return: The zip directory separator.
|
||||
@returntype: The same type as C{self.path}.
|
||||
"""
|
||||
return _coerceToFilesystemEncoding(self.path, ZIP_PATH_SEP)
|
||||
|
||||
def _nativeParent(
|
||||
self,
|
||||
) -> Union[ZipPath[_ZipStr, _ArchiveStr], ZipArchive[_ArchiveStr]]:
|
||||
"""
|
||||
Return parent, discarding our own encoding in favor of whatever the
|
||||
archive's is.
|
||||
"""
|
||||
splitup = self.pathInArchive.split(self.sep)
|
||||
if len(splitup) == 1:
|
||||
return self.archive
|
||||
return ZipPath(self.archive, self.sep.join(splitup[:-1]))
|
||||
|
||||
def parent(self) -> Union[ZipPath[_ZipStr, _ArchiveStr], ZipArchive[_ZipStr]]:
|
||||
parent = self._nativeParent()
|
||||
if isinstance(parent, ZipArchive):
|
||||
return ZipArchive(
|
||||
_coerceToFilesystemEncoding(self.path, self.archive._zipfileFilename)
|
||||
)
|
||||
return parent
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def parents(
|
||||
self,
|
||||
) -> Iterable[Union[ZipPath[_ZipStr, _ArchiveStr], ZipArchive[_ZipStr]]]:
|
||||
...
|
||||
|
||||
def child(self, path: OtherAnyStr) -> ZipPath[OtherAnyStr, _ArchiveStr]:
|
||||
"""
|
||||
Return a new ZipPath representing a path in C{self.archive} which is
|
||||
a child of this path.
|
||||
|
||||
@note: Requesting the C{".."} (or other special name) child will not
|
||||
cause L{InsecurePath} to be raised since these names do not have
|
||||
any special meaning inside a zip archive. Be particularly
|
||||
careful with the C{path} attribute (if you absolutely must use
|
||||
it) as this means it may include special names with special
|
||||
meaning outside of the context of a zip archive.
|
||||
"""
|
||||
joiner = _coerceToFilesystemEncoding(path, ZIP_PATH_SEP)
|
||||
pathInArchive = _coerceToFilesystemEncoding(path, self.pathInArchive)
|
||||
return ZipPath(self.archive, joiner.join([pathInArchive, path]))
|
||||
|
||||
def sibling(self, path: OtherAnyStr) -> ZipPath[OtherAnyStr, _ArchiveStr]:
|
||||
parent: Union[ZipPath[_ZipStr, _ArchiveStr], ZipArchive[_ZipStr]]
|
||||
rightTypedParent: Union[ZipPath[_ZipStr, _ArchiveStr], ZipArchive[_ArchiveStr]]
|
||||
|
||||
parent = self.parent()
|
||||
rightTypedParent = self.archive if isinstance(parent, ZipArchive) else parent
|
||||
child: ZipPath[OtherAnyStr, _ArchiveStr] = rightTypedParent.child(path)
|
||||
return child
|
||||
|
||||
def exists(self) -> bool:
|
||||
return self.isdir() or self.isfile()
|
||||
|
||||
def isdir(self) -> bool:
|
||||
return self.pathInArchive in self.archive.childmap
|
||||
|
||||
def isfile(self) -> bool:
|
||||
return self.pathInArchive in self.archive.zipfile.NameToInfo
|
||||
|
||||
def islink(self) -> bool:
|
||||
return False
|
||||
|
||||
def listdir(self) -> List[_ZipStr]:
|
||||
if self.exists():
|
||||
if self.isdir():
|
||||
parentArchivePath: _ArchiveStr = _coerceToFilesystemEncoding(
|
||||
self.archive._zipfileFilename, self.pathInArchive
|
||||
)
|
||||
return [
|
||||
_coerceToFilesystemEncoding(self.path, each)
|
||||
for each in self.archive.childmap[parentArchivePath].keys()
|
||||
]
|
||||
else:
|
||||
raise UnlistableError(OSError(errno.ENOTDIR, "Leaf zip entry listed"))
|
||||
else:
|
||||
raise UnlistableError(
|
||||
OSError(errno.ENOENT, "Non-existent zip entry listed")
|
||||
)
|
||||
|
||||
def splitext(self) -> Tuple[_ZipStr, _ZipStr]:
|
||||
"""
|
||||
Return a value similar to that returned by C{os.path.splitext}.
|
||||
"""
|
||||
# This happens to work out because of the fact that we use OS-specific
|
||||
# path separators in the constructor to construct our fake 'path'
|
||||
# attribute.
|
||||
return os.path.splitext(self.path)
|
||||
|
||||
def basename(self) -> _ZipStr:
|
||||
return self.pathInArchive.split(self.sep)[-1]
|
||||
|
||||
def dirname(self) -> _ZipStr:
|
||||
# XXX NOTE: This API isn't a very good idea on filepath, but it's even
|
||||
# less meaningful here.
|
||||
return self.parent().path
|
||||
|
||||
def open(self, mode: Literal["r", "w"] = "r") -> IO[bytes]: # type:ignore[override]
|
||||
# TODO: liskov substitutability is broken here because the stdlib
|
||||
# zipfile does not support appending to files within archives, only to
|
||||
# archives themselves; we could fix this by emulating append mode.
|
||||
pathInArchive = _coerceToFilesystemEncoding("", self.pathInArchive)
|
||||
return self.archive.zipfile.open(pathInArchive, mode=mode)
|
||||
|
||||
def changed(self) -> None:
|
||||
pass
|
||||
|
||||
def getsize(self) -> int:
|
||||
"""
|
||||
Retrieve this file's size.
|
||||
|
||||
@return: file size, in bytes
|
||||
"""
|
||||
pathInArchive = _coerceToFilesystemEncoding("", self.pathInArchive)
|
||||
return self.archive.zipfile.NameToInfo[pathInArchive].file_size
|
||||
|
||||
def getAccessTime(self) -> float:
|
||||
"""
|
||||
Retrieve this file's last access-time. This is the same as the last access
|
||||
time for the archive.
|
||||
|
||||
@return: a number of seconds since the epoch
|
||||
"""
|
||||
return self.archive.getAccessTime()
|
||||
|
||||
def getModificationTime(self) -> float:
|
||||
"""
|
||||
Retrieve this file's last modification time. This is the time of
|
||||
modification recorded in the zipfile.
|
||||
|
||||
@return: a number of seconds since the epoch.
|
||||
"""
|
||||
pathInArchive = _coerceToFilesystemEncoding("", self.pathInArchive)
|
||||
return time.mktime(
|
||||
self.archive.zipfile.NameToInfo[pathInArchive].date_time + (0, 0, 0)
|
||||
)
|
||||
|
||||
def getStatusChangeTime(self) -> float:
|
||||
"""
|
||||
Retrieve this file's last modification time. This name is provided for
|
||||
compatibility, and returns the same value as getmtime.
|
||||
|
||||
@return: a number of seconds since the epoch.
|
||||
"""
|
||||
return self.getModificationTime()
|
||||
|
||||
|
||||
class ZipArchive(ZipPath[AnyStr, AnyStr]):
|
||||
"""
|
||||
I am a L{FilePath}-like object which can wrap a zip archive as if it were a
|
||||
directory.
|
||||
|
||||
It works similarly to L{FilePath} in L{bytes} and L{unicode} handling --
|
||||
instantiating with a L{bytes} will return a "bytes mode" L{ZipArchive},
|
||||
and instantiating with a L{unicode} will return a "text mode"
|
||||
L{ZipArchive}. Methods that return new L{ZipArchive} or L{ZipPath}
|
||||
instances will be in the mode of the argument to the creator method,
|
||||
converting if required.
|
||||
"""
|
||||
|
||||
_zipfileFilename: AnyStr
|
||||
|
||||
@property
|
||||
def archive(self) -> Self: # type: ignore[override]
|
||||
return self
|
||||
|
||||
def __init__(self, archivePathname: AnyStr) -> None:
|
||||
"""
|
||||
Create a ZipArchive, treating the archive at archivePathname as a zip
|
||||
file.
|
||||
|
||||
@param archivePathname: a L{bytes} or L{unicode}, naming a path in the
|
||||
filesystem.
|
||||
"""
|
||||
self.path = archivePathname
|
||||
self.zipfile = ZipFile(_coerceToFilesystemEncoding("", archivePathname))
|
||||
zfname = self.zipfile.filename
|
||||
assert (
|
||||
zfname is not None
|
||||
), "zipfile must have filename when initialized with a path"
|
||||
self._zipfileFilename = _coerceToFilesystemEncoding(archivePathname, zfname)
|
||||
self.pathInArchive = _coerceToFilesystemEncoding(archivePathname, "")
|
||||
# zipfile is already wasting O(N) memory on cached ZipInfo instances,
|
||||
# so there's no sense in trying to do this lazily or intelligently
|
||||
self.childmap: Dict[AnyStr, Dict[AnyStr, int]] = {}
|
||||
|
||||
for name in self.zipfile.namelist():
|
||||
splitName = _coerceToFilesystemEncoding(self.path, name).split(self.sep)
|
||||
for x in range(len(splitName)):
|
||||
child = splitName[-x]
|
||||
parent = self.sep.join(splitName[:-x])
|
||||
if parent not in self.childmap:
|
||||
self.childmap[parent] = {}
|
||||
self.childmap[parent][child] = 1
|
||||
parent = _coerceToFilesystemEncoding(archivePathname, "")
|
||||
|
||||
def __cmp__(self, other: object) -> int:
|
||||
if not isinstance(other, ZipArchive):
|
||||
return NotImplemented
|
||||
return cmp(self.path, other.path)
|
||||
|
||||
def child(self, path: OtherAnyStr) -> ZipPath[OtherAnyStr, AnyStr]:
|
||||
"""
|
||||
Create a ZipPath pointing at a path within the archive.
|
||||
|
||||
@param path: a L{bytes} or L{unicode} with no path separators in it
|
||||
(either '/' or the system path separator, if it's different).
|
||||
"""
|
||||
return ZipPath(self, path)
|
||||
|
||||
def exists(self) -> bool:
|
||||
"""
|
||||
Returns C{True} if the underlying archive exists.
|
||||
"""
|
||||
return FilePath(self._zipfileFilename).exists()
|
||||
|
||||
def getAccessTime(self) -> float:
|
||||
"""
|
||||
Return the archive file's last access time.
|
||||
"""
|
||||
return FilePath(self._zipfileFilename).getAccessTime()
|
||||
|
||||
def getModificationTime(self) -> float:
|
||||
"""
|
||||
Return the archive file's modification time.
|
||||
"""
|
||||
return FilePath(self._zipfileFilename).getModificationTime()
|
||||
|
||||
def getStatusChangeTime(self) -> float:
|
||||
"""
|
||||
Return the archive file's status change time.
|
||||
"""
|
||||
return FilePath(self._zipfileFilename).getStatusChangeTime()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"ZipArchive({os.path.abspath(self.path)!r})"
|
||||
|
||||
|
||||
__all__ = ["ZipArchive", "ZipPath"]
|
||||
319
.venv/lib/python3.12/site-packages/twisted/python/zipstream.py
Normal file
319
.venv/lib/python3.12/site-packages/twisted/python/zipstream.py
Normal file
@@ -0,0 +1,319 @@
|
||||
# -*- test-case-name: twisted.python.test.test_zipstream -*-
|
||||
# Copyright (c) Twisted Matrix Laboratories.
|
||||
# See LICENSE for details.
|
||||
|
||||
"""
|
||||
An incremental approach to unzipping files. This allows you to unzip a little
|
||||
bit of a file at a time, which means you can report progress as a file unzips.
|
||||
"""
|
||||
|
||||
import os.path
|
||||
import struct
|
||||
import zipfile
|
||||
import zlib
|
||||
|
||||
|
||||
class ChunkingZipFile(zipfile.ZipFile):
|
||||
"""
|
||||
A L{zipfile.ZipFile} object which, with L{readfile}, also gives you access
|
||||
to a file-like object for each entry.
|
||||
"""
|
||||
|
||||
def readfile(self, name):
|
||||
"""
|
||||
Return file-like object for name.
|
||||
"""
|
||||
if self.mode not in ("r", "a"):
|
||||
raise RuntimeError('read() requires mode "r" or "a"')
|
||||
if not self.fp:
|
||||
raise RuntimeError("Attempt to read ZIP archive that was already closed")
|
||||
zinfo = self.getinfo(name)
|
||||
|
||||
self.fp.seek(zinfo.header_offset, 0)
|
||||
|
||||
fheader = self.fp.read(zipfile.sizeFileHeader)
|
||||
if fheader[0:4] != zipfile.stringFileHeader:
|
||||
raise zipfile.BadZipFile("Bad magic number for file header")
|
||||
|
||||
fheader = struct.unpack(zipfile.structFileHeader, fheader)
|
||||
fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
|
||||
|
||||
if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
|
||||
self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
|
||||
|
||||
if zinfo.flag_bits & 0x800:
|
||||
# UTF-8 filename
|
||||
fname_str = fname.decode("utf-8")
|
||||
else:
|
||||
fname_str = fname.decode("cp437")
|
||||
|
||||
if fname_str != zinfo.orig_filename:
|
||||
raise zipfile.BadZipFile(
|
||||
'File name in directory "%s" and header "%s" differ.'
|
||||
% (zinfo.orig_filename, fname_str)
|
||||
)
|
||||
|
||||
if zinfo.compress_type == zipfile.ZIP_STORED:
|
||||
return ZipFileEntry(self, zinfo.compress_size)
|
||||
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
|
||||
return DeflatedZipFileEntry(self, zinfo.compress_size)
|
||||
else:
|
||||
raise zipfile.BadZipFile(
|
||||
"Unsupported compression method %d for file %s"
|
||||
% (zinfo.compress_type, name)
|
||||
)
|
||||
|
||||
|
||||
class _FileEntry:
|
||||
"""
|
||||
Abstract superclass of both compressed and uncompressed variants of
|
||||
file-like objects within a zip archive.
|
||||
|
||||
@ivar chunkingZipFile: a chunking zip file.
|
||||
@type chunkingZipFile: L{ChunkingZipFile}
|
||||
|
||||
@ivar length: The number of bytes within the zip file that represent this
|
||||
file. (This is the size on disk, not the number of decompressed bytes
|
||||
which will result from reading it.)
|
||||
|
||||
@ivar fp: the underlying file object (that contains pkzip data). Do not
|
||||
touch this, please. It will quite likely move or go away.
|
||||
|
||||
@ivar closed: File-like 'closed' attribute; True before this file has been
|
||||
closed, False after.
|
||||
@type closed: L{bool}
|
||||
|
||||
@ivar finished: An older, broken synonym for 'closed'. Do not touch this,
|
||||
please.
|
||||
@type finished: L{int}
|
||||
"""
|
||||
|
||||
def __init__(self, chunkingZipFile, length):
|
||||
"""
|
||||
Create a L{_FileEntry} from a L{ChunkingZipFile}.
|
||||
"""
|
||||
self.chunkingZipFile = chunkingZipFile
|
||||
self.fp = self.chunkingZipFile.fp
|
||||
self.length = length
|
||||
self.finished = 0
|
||||
self.closed = False
|
||||
|
||||
def isatty(self):
|
||||
"""
|
||||
Returns false because zip files should not be ttys
|
||||
"""
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Close self (file-like object)
|
||||
"""
|
||||
self.closed = True
|
||||
self.finished = 1
|
||||
del self.fp
|
||||
|
||||
def readline(self):
|
||||
"""
|
||||
Read a line.
|
||||
"""
|
||||
line = b""
|
||||
for byte in iter(lambda: self.read(1), b""):
|
||||
line += byte
|
||||
if byte == b"\n":
|
||||
break
|
||||
return line
|
||||
|
||||
def __next__(self):
|
||||
"""
|
||||
Implement next as file does (like readline, except raises StopIteration
|
||||
at EOF)
|
||||
"""
|
||||
nextline = self.readline()
|
||||
if nextline:
|
||||
return nextline
|
||||
raise StopIteration()
|
||||
|
||||
# Iterators on Python 2 use next(), not __next__()
|
||||
next = __next__
|
||||
|
||||
def readlines(self):
|
||||
"""
|
||||
Returns a list of all the lines
|
||||
"""
|
||||
return list(self)
|
||||
|
||||
def xreadlines(self):
|
||||
"""
|
||||
Returns an iterator (so self)
|
||||
"""
|
||||
return self
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Returns an iterator (so self)
|
||||
"""
|
||||
return self
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
|
||||
class ZipFileEntry(_FileEntry):
|
||||
"""
|
||||
File-like object used to read an uncompressed entry in a ZipFile
|
||||
"""
|
||||
|
||||
def __init__(self, chunkingZipFile, length):
|
||||
_FileEntry.__init__(self, chunkingZipFile, length)
|
||||
self.readBytes = 0
|
||||
|
||||
def tell(self):
|
||||
return self.readBytes
|
||||
|
||||
def read(self, n=None):
|
||||
if n is None:
|
||||
n = self.length - self.readBytes
|
||||
if n == 0 or self.finished:
|
||||
return b""
|
||||
data = self.chunkingZipFile.fp.read(min(n, self.length - self.readBytes))
|
||||
self.readBytes += len(data)
|
||||
if self.readBytes == self.length or len(data) < n:
|
||||
self.finished = 1
|
||||
return data
|
||||
|
||||
|
||||
class DeflatedZipFileEntry(_FileEntry):
|
||||
"""
|
||||
File-like object used to read a deflated entry in a ZipFile
|
||||
"""
|
||||
|
||||
def __init__(self, chunkingZipFile, length):
|
||||
_FileEntry.__init__(self, chunkingZipFile, length)
|
||||
self.returnedBytes = 0
|
||||
self.readBytes = 0
|
||||
self.decomp = zlib.decompressobj(-15)
|
||||
self.buffer = b""
|
||||
|
||||
def tell(self):
|
||||
return self.returnedBytes
|
||||
|
||||
def read(self, n=None):
|
||||
if self.finished:
|
||||
return b""
|
||||
if n is None:
|
||||
result = [
|
||||
self.buffer,
|
||||
]
|
||||
result.append(
|
||||
self.decomp.decompress(
|
||||
self.chunkingZipFile.fp.read(self.length - self.readBytes)
|
||||
)
|
||||
)
|
||||
result.append(self.decomp.decompress(b"Z"))
|
||||
result.append(self.decomp.flush())
|
||||
self.buffer = b""
|
||||
self.finished = 1
|
||||
result = b"".join(result)
|
||||
self.returnedBytes += len(result)
|
||||
return result
|
||||
else:
|
||||
while len(self.buffer) < n:
|
||||
data = self.chunkingZipFile.fp.read(
|
||||
min(n, 1024, self.length - self.readBytes)
|
||||
)
|
||||
self.readBytes += len(data)
|
||||
if not data:
|
||||
result = (
|
||||
self.buffer + self.decomp.decompress(b"Z") + self.decomp.flush()
|
||||
)
|
||||
self.finished = 1
|
||||
self.buffer = b""
|
||||
self.returnedBytes += len(result)
|
||||
return result
|
||||
else:
|
||||
self.buffer += self.decomp.decompress(data)
|
||||
result = self.buffer[:n]
|
||||
self.buffer = self.buffer[n:]
|
||||
self.returnedBytes += len(result)
|
||||
return result
|
||||
|
||||
|
||||
DIR_BIT = 16
|
||||
|
||||
|
||||
def countZipFileChunks(filename, chunksize):
|
||||
"""
|
||||
Predict the number of chunks that will be extracted from the entire
|
||||
zipfile, given chunksize blocks.
|
||||
"""
|
||||
totalchunks = 0
|
||||
zf = ChunkingZipFile(filename)
|
||||
for info in zf.infolist():
|
||||
totalchunks += countFileChunks(info, chunksize)
|
||||
return totalchunks
|
||||
|
||||
|
||||
def countFileChunks(zipinfo, chunksize):
|
||||
"""
|
||||
Count the number of chunks that will result from the given C{ZipInfo}.
|
||||
|
||||
@param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip
|
||||
archive to be counted.
|
||||
|
||||
@return: the number of chunks present in the zip file. (Even an empty file
|
||||
counts as one chunk.)
|
||||
@rtype: L{int}
|
||||
"""
|
||||
count, extra = divmod(zipinfo.file_size, chunksize)
|
||||
if extra > 0:
|
||||
count += 1
|
||||
return count or 1
|
||||
|
||||
|
||||
def unzipIterChunky(filename, directory=".", overwrite=0, chunksize=4096):
|
||||
"""
|
||||
Return a generator for the zipfile. This implementation will yield after
|
||||
every chunksize uncompressed bytes, or at the end of a file, whichever
|
||||
comes first.
|
||||
|
||||
The value it yields is the number of chunks left to unzip.
|
||||
"""
|
||||
czf = ChunkingZipFile(filename, "r")
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
remaining = countZipFileChunks(filename, chunksize)
|
||||
names = czf.namelist()
|
||||
infos = czf.infolist()
|
||||
|
||||
for entry, info in zip(names, infos):
|
||||
isdir = info.external_attr & DIR_BIT
|
||||
f = os.path.join(directory, entry)
|
||||
if isdir:
|
||||
# overwrite flag only applies to files
|
||||
if not os.path.exists(f):
|
||||
os.makedirs(f)
|
||||
remaining -= 1
|
||||
yield remaining
|
||||
else:
|
||||
# create the directory the file will be in first,
|
||||
# since we can't guarantee it exists
|
||||
fdir = os.path.split(f)[0]
|
||||
if not os.path.exists(fdir):
|
||||
os.makedirs(fdir)
|
||||
if overwrite or not os.path.exists(f):
|
||||
fp = czf.readfile(entry)
|
||||
if info.file_size == 0:
|
||||
remaining -= 1
|
||||
yield remaining
|
||||
with open(f, "wb") as outfile:
|
||||
while fp.tell() < info.file_size:
|
||||
hunk = fp.read(chunksize)
|
||||
outfile.write(hunk)
|
||||
remaining -= 1
|
||||
yield remaining
|
||||
else:
|
||||
remaining -= countFileChunks(info, chunksize)
|
||||
yield remaining
|
||||
Reference in New Issue
Block a user