-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Loading status checks…
Add venv to git
Showing
88 changed files
with
19,261 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
# This file must be used with "source bin/activate" *from bash* | ||
# you cannot run it directly | ||
|
||
|
||
if [ "${BASH_SOURCE-}" = "$0" ]; then | ||
echo "You must source this script: \$ source $0" >&2 | ||
exit 33 | ||
fi | ||
|
||
deactivate () { | ||
unset -f pydoc >/dev/null 2>&1 || true | ||
|
||
# reset old environment variables | ||
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all | ||
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then | ||
PATH="$_OLD_VIRTUAL_PATH" | ||
export PATH | ||
unset _OLD_VIRTUAL_PATH | ||
fi | ||
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then | ||
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" | ||
export PYTHONHOME | ||
unset _OLD_VIRTUAL_PYTHONHOME | ||
fi | ||
|
||
# The hash command must be called to get it to forget past | ||
# commands. Without forgetting past commands the $PATH changes | ||
# we made may not be respected | ||
hash -r 2>/dev/null | ||
|
||
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then | ||
PS1="$_OLD_VIRTUAL_PS1" | ||
export PS1 | ||
unset _OLD_VIRTUAL_PS1 | ||
fi | ||
|
||
unset VIRTUAL_ENV | ||
unset VIRTUAL_ENV_PROMPT | ||
if [ ! "${1-}" = "nondestructive" ] ; then | ||
# Self destruct! | ||
unset -f deactivate | ||
fi | ||
} | ||
|
||
# unset irrelevant variables | ||
deactivate nondestructive | ||
|
||
VIRTUAL_ENV='/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv' | ||
if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then | ||
VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV") | ||
fi | ||
export VIRTUAL_ENV | ||
|
||
_OLD_VIRTUAL_PATH="$PATH" | ||
PATH="$VIRTUAL_ENV/bin:$PATH" | ||
export PATH | ||
|
||
if [ "xgo-protoc-gen-mypy-py3.12" != x ] ; then | ||
VIRTUAL_ENV_PROMPT="go-protoc-gen-mypy-py3.12" | ||
else | ||
VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV") | ||
fi | ||
export VIRTUAL_ENV_PROMPT | ||
|
||
# unset PYTHONHOME if set | ||
if ! [ -z "${PYTHONHOME+_}" ] ; then | ||
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" | ||
unset PYTHONHOME | ||
fi | ||
|
||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then | ||
_OLD_VIRTUAL_PS1="${PS1-}" | ||
PS1="(${VIRTUAL_ENV_PROMPT}) ${PS1-}" | ||
export PS1 | ||
fi | ||
|
||
# Make sure to unalias pydoc if it's already there | ||
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true | ||
|
||
pydoc () { | ||
python -m pydoc "$@" | ||
} | ||
|
||
# The hash command must be called to get it to forget past | ||
# commands. Without forgetting past commands the $PATH changes | ||
# we made may not be respected | ||
hash -r 2>/dev/null |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
# This file must be used with "source bin/activate.csh" *from csh*. | ||
# You cannot run it directly. | ||
# Created by Davide Di Blasi <[email protected]>. | ||
|
||
set newline='\ | ||
' | ||
|
||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' | ||
|
||
# Unset irrelevant variables. | ||
deactivate nondestructive | ||
|
||
setenv VIRTUAL_ENV '/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv' | ||
|
||
set _OLD_VIRTUAL_PATH="$PATH:q" | ||
setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" | ||
|
||
|
||
|
||
if ('go-protoc-gen-mypy-py3.12' != "") then | ||
setenv VIRTUAL_ENV_PROMPT 'go-protoc-gen-mypy-py3.12' | ||
else | ||
setenv VIRTUAL_ENV_PROMPT "$VIRTUAL_ENV:t:q" | ||
endif | ||
|
||
if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then | ||
if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then | ||
set do_prompt = "1" | ||
else | ||
set do_prompt = "0" | ||
endif | ||
else | ||
set do_prompt = "1" | ||
endif | ||
|
||
if ( $do_prompt == "1" ) then | ||
# Could be in a non-interactive environment, | ||
# in which case, $prompt is undefined and we wouldn't | ||
# care about the prompt anyway. | ||
if ( $?prompt ) then | ||
set _OLD_VIRTUAL_PROMPT="$prompt:q" | ||
if ( "$prompt:q" =~ *"$newline:q"* ) then | ||
: | ||
else | ||
set prompt = '('"$VIRTUAL_ENV_PROMPT:q"') '"$prompt:q" | ||
endif | ||
endif | ||
endif | ||
|
||
unset env_name | ||
unset do_prompt | ||
|
||
alias pydoc python -m pydoc | ||
|
||
rehash |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. | ||
# Do not run it directly. | ||
|
||
function _bashify_path -d "Converts a fish path to something bash can recognize" | ||
set fishy_path $argv | ||
set bashy_path $fishy_path[1] | ||
for path_part in $fishy_path[2..-1] | ||
set bashy_path "$bashy_path:$path_part" | ||
end | ||
echo $bashy_path | ||
end | ||
|
||
function _fishify_path -d "Converts a bash path to something fish can recognize" | ||
echo $argv | tr ':' '\n' | ||
end | ||
|
||
function deactivate -d 'Exit virtualenv mode and return to the normal environment.' | ||
# reset old environment variables | ||
if test -n "$_OLD_VIRTUAL_PATH" | ||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling | ||
if test (echo $FISH_VERSION | head -c 1) -lt 3 | ||
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") | ||
else | ||
set -gx PATH $_OLD_VIRTUAL_PATH | ||
end | ||
set -e _OLD_VIRTUAL_PATH | ||
end | ||
|
||
if test -n "$_OLD_VIRTUAL_PYTHONHOME" | ||
set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME" | ||
set -e _OLD_VIRTUAL_PYTHONHOME | ||
end | ||
|
||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE" | ||
and functions -q _old_fish_prompt | ||
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. | ||
set -l fish_function_path | ||
|
||
# Erase virtualenv's `fish_prompt` and restore the original. | ||
functions -e fish_prompt | ||
functions -c _old_fish_prompt fish_prompt | ||
functions -e _old_fish_prompt | ||
set -e _OLD_FISH_PROMPT_OVERRIDE | ||
end | ||
|
||
set -e VIRTUAL_ENV | ||
set -e VIRTUAL_ENV_PROMPT | ||
|
||
if test "$argv[1]" != 'nondestructive' | ||
# Self-destruct! | ||
functions -e pydoc | ||
functions -e deactivate | ||
functions -e _bashify_path | ||
functions -e _fishify_path | ||
end | ||
end | ||
|
||
# Unset irrelevant variables. | ||
deactivate nondestructive | ||
|
||
set -gx VIRTUAL_ENV '/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv' | ||
|
||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling | ||
if test (echo $FISH_VERSION | head -c 1) -lt 3 | ||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) | ||
else | ||
set -gx _OLD_VIRTUAL_PATH $PATH | ||
end | ||
set -gx PATH "$VIRTUAL_ENV"'/bin' $PATH | ||
|
||
# Prompt override provided? | ||
# If not, just use the environment name. | ||
if test -n 'go-protoc-gen-mypy-py3.12' | ||
set -gx VIRTUAL_ENV_PROMPT 'go-protoc-gen-mypy-py3.12' | ||
else | ||
set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV") | ||
end | ||
|
||
# Unset `$PYTHONHOME` if set. | ||
if set -q PYTHONHOME | ||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME | ||
set -e PYTHONHOME | ||
end | ||
|
||
function pydoc | ||
python -m pydoc $argv | ||
end | ||
|
||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" | ||
# Copy the current `fish_prompt` function as `_old_fish_prompt`. | ||
functions -c fish_prompt _old_fish_prompt | ||
|
||
function fish_prompt | ||
# Run the user's prompt first; it might depend on (pipe)status. | ||
set -l prompt (_old_fish_prompt) | ||
|
||
printf '(%s) ' $VIRTUAL_ENV_PROMPT | ||
|
||
string join -- \n $prompt # handle multi-line prompts | ||
end | ||
|
||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,96 @@ | ||
# virtualenv activation module | ||
# Activate with `overlay use activate.nu` | ||
# Deactivate with `deactivate`, as usual | ||
# | ||
# To customize the overlay name, you can call `overlay use activate.nu as foo`, | ||
# but then simply `deactivate` won't work because it is just an alias to hide | ||
# the "activate" overlay. You'd need to call `overlay hide foo` manually. | ||
|
||
export-env { | ||
def is-string [x] { | ||
($x | describe) == 'string' | ||
} | ||
|
||
def has-env [...names] { | ||
$names | each {|n| | ||
$n in $env | ||
} | all {|i| $i == true} | ||
} | ||
|
||
# Emulates a `test -z`, but btter as it handles e.g 'false' | ||
def is-env-true [name: string] { | ||
if (has-env $name) { | ||
# Try to parse 'true', '0', '1', and fail if not convertible | ||
let parsed = (do -i { $env | get $name | into bool }) | ||
if ($parsed | describe) == 'bool' { | ||
$parsed | ||
} else { | ||
not ($env | get -i $name | is-empty) | ||
} | ||
} else { | ||
false | ||
} | ||
} | ||
|
||
let virtual_env = '/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv' | ||
let bin = 'bin' | ||
|
||
let is_windows = ($nu.os-info.family) == 'windows' | ||
let path_name = (if (has-env 'Path') { | ||
'Path' | ||
} else { | ||
'PATH' | ||
} | ||
) | ||
|
||
let venv_path = ([$virtual_env $bin] | path join) | ||
let new_path = ($env | get $path_name | prepend $venv_path) | ||
|
||
# If there is no default prompt, then use the env name instead | ||
let virtual_env_prompt = (if ('go-protoc-gen-mypy-py3.12' | is-empty) { | ||
($virtual_env | path basename) | ||
} else { | ||
'go-protoc-gen-mypy-py3.12' | ||
}) | ||
|
||
let new_env = { | ||
$path_name : $new_path | ||
VIRTUAL_ENV : $virtual_env | ||
VIRTUAL_ENV_PROMPT : $virtual_env_prompt | ||
} | ||
|
||
let new_env = (if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') { | ||
$new_env | ||
} else { | ||
# Creating the new prompt for the session | ||
let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) ' | ||
|
||
# Back up the old prompt builder | ||
let old_prompt_command = (if (has-env 'PROMPT_COMMAND') { | ||
$env.PROMPT_COMMAND | ||
} else { | ||
'' | ||
}) | ||
|
||
let new_prompt = (if (has-env 'PROMPT_COMMAND') { | ||
if 'closure' in ($old_prompt_command | describe) { | ||
{|| $'($virtual_prefix)(do $old_prompt_command)' } | ||
} else { | ||
{|| $'($virtual_prefix)($old_prompt_command)' } | ||
} | ||
} else { | ||
{|| $'($virtual_prefix)' } | ||
}) | ||
|
||
$new_env | merge { | ||
PROMPT_COMMAND : $new_prompt | ||
VIRTUAL_PREFIX : $virtual_prefix | ||
} | ||
}) | ||
|
||
# Environment variables that will be loaded as the virtual env | ||
load-env $new_env | ||
} | ||
|
||
export alias pydoc = python -m pydoc | ||
export alias deactivate = overlay hide activate |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
$script:THIS_PATH = $myinvocation.mycommand.path | ||
$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent | ||
|
||
function global:deactivate([switch] $NonDestructive) { | ||
if (Test-Path variable:_OLD_VIRTUAL_PATH) { | ||
$env:PATH = $variable:_OLD_VIRTUAL_PATH | ||
Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global | ||
} | ||
|
||
if (Test-Path function:_old_virtual_prompt) { | ||
$function:prompt = $function:_old_virtual_prompt | ||
Remove-Item function:\_old_virtual_prompt | ||
} | ||
|
||
if ($env:VIRTUAL_ENV) { | ||
Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue | ||
} | ||
|
||
if ($env:VIRTUAL_ENV_PROMPT) { | ||
Remove-Item env:VIRTUAL_ENV_PROMPT -ErrorAction SilentlyContinue | ||
} | ||
|
||
if (!$NonDestructive) { | ||
# Self destruct! | ||
Remove-Item function:deactivate | ||
Remove-Item function:pydoc | ||
} | ||
} | ||
|
||
function global:pydoc { | ||
python -m pydoc $args | ||
} | ||
|
||
# unset irrelevant variables | ||
deactivate -nondestructive | ||
|
||
$VIRTUAL_ENV = $BASE_DIR | ||
$env:VIRTUAL_ENV = $VIRTUAL_ENV | ||
|
||
if ("go-protoc-gen-mypy-py3.12" -ne "") { | ||
$env:VIRTUAL_ENV_PROMPT = "go-protoc-gen-mypy-py3.12" | ||
} | ||
else { | ||
$env:VIRTUAL_ENV_PROMPT = $( Split-Path $env:VIRTUAL_ENV -Leaf ) | ||
} | ||
|
||
New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH | ||
|
||
$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH | ||
if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { | ||
function global:_old_virtual_prompt { | ||
"" | ||
} | ||
$function:_old_virtual_prompt = $function:prompt | ||
|
||
function global:prompt { | ||
# Add the custom prefix to the existing prompt | ||
$previous_prompt_value = & $function:_old_virtual_prompt | ||
("(" + $env:VIRTUAL_ENV_PROMPT + ") " + $previous_prompt_value) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
""" | ||
Activate virtualenv for current interpreter: | ||
Use exec(open(this_file).read(), {'__file__': this_file}). | ||
This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. | ||
""" # noqa: D415 | ||
from __future__ import annotations | ||
|
||
import os | ||
import site | ||
import sys | ||
|
||
try: | ||
abs_file = os.path.abspath(__file__) | ||
except NameError as exc: | ||
msg = "You must use exec(open(this_file).read(), {'__file__': this_file})" | ||
raise AssertionError(msg) from exc | ||
|
||
bin_dir = os.path.dirname(abs_file) | ||
base = bin_dir[: -len("bin") - 1] # strip away the bin part from the __file__, plus the path separator | ||
|
||
# prepend bin to PATH (this file is inside the bin directory) | ||
os.environ["PATH"] = os.pathsep.join([bin_dir, *os.environ.get("PATH", "").split(os.pathsep)]) | ||
os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory | ||
os.environ["VIRTUAL_ENV_PROMPT"] = "go-protoc-gen-mypy-py3.12" or os.path.basename(base) # noqa: SIM222 | ||
|
||
# add the virtual environments libraries to the host python import mechanism | ||
prev_length = len(sys.path) | ||
for lib in "../lib/python3.12/site-packages".split(os.pathsep): | ||
path = os.path.realpath(os.path.join(bin_dir, lib)) | ||
site.addsitedir(path.decode("utf-8") if "" else path) | ||
sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] | ||
|
||
sys.real_prefix = sys.prefix | ||
sys.prefix = base |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from pip._internal.cli.main import main | ||
if __name__ == '__main__': | ||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from pip._internal.cli.main import main | ||
if __name__ == '__main__': | ||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from pip._internal.cli.main import main | ||
if __name__ == '__main__': | ||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from pip._internal.cli.main import main | ||
if __name__ == '__main__': | ||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from mypy_protobuf.main import main | ||
if __name__ == "__main__": | ||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0]) | ||
sys.exit(main()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/Users/anuraag/git/go-protoc-gen-mypy/internal/pysite/.venv/bin/python | ||
# -*- coding: utf-8 -*- | ||
import re | ||
import sys | ||
from mypy_protobuf.main import grpc | ||
if __name__ == "__main__": | ||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0]) | ||
sys.exit(grpc()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
/opt/homebrew/opt/python@3.12/bin/python3.12 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python |
1 change: 1 addition & 0 deletions
1
internal/pysite/.venv/lib/python3.12/site-packages/_virtualenv.pth
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
import _virtualenv |
102 changes: 102 additions & 0 deletions
102
internal/pysite/.venv/lib/python3.12/site-packages/_virtualenv.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
"""Patches that are applied at runtime to the virtual environment.""" | ||
|
||
from __future__ import annotations | ||
|
||
import os | ||
import sys | ||
from contextlib import suppress | ||
|
||
VIRTUALENV_PATCH_FILE = os.path.join(__file__) | ||
|
||
|
||
def patch_dist(dist): | ||
""" | ||
Distutils allows user to configure some arguments via a configuration file: | ||
https://docs.python.org/3/install/index.html#distutils-configuration-files. | ||
Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up. | ||
""" # noqa: D205 | ||
# we cannot allow some install config as that would get packages installed outside of the virtual environment | ||
old_parse_config_files = dist.Distribution.parse_config_files | ||
|
||
def parse_config_files(self, *args, **kwargs): | ||
result = old_parse_config_files(self, *args, **kwargs) | ||
install = self.get_option_dict("install") | ||
|
||
if "prefix" in install: # the prefix governs where to install the libraries | ||
install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix) | ||
for base in ("purelib", "platlib", "headers", "scripts", "data"): | ||
key = f"install_{base}" | ||
if key in install: # do not allow global configs to hijack venv paths | ||
install.pop(key, None) | ||
return result | ||
|
||
dist.Distribution.parse_config_files = parse_config_files | ||
|
||
|
||
# Import hook that patches some modules to ignore configuration values that break package installation in case | ||
# of virtual environments. | ||
_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist" | ||
# https://docs.python.org/3/library/importlib.html#setting-up-an-importer | ||
|
||
|
||
class _Finder: | ||
"""A meta path finder that allows patching the imported distutils modules.""" | ||
|
||
fullname = None | ||
|
||
# lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup, | ||
# because there are gevent-based applications that need to be first to import threading by themselves. | ||
# See https://github.com/pypa/virtualenv/issues/1895 for details. | ||
lock = [] # noqa: RUF012 | ||
|
||
def find_spec(self, fullname, path, target=None): # noqa: ARG002 | ||
if fullname in _DISTUTILS_PATCH and self.fullname is None: | ||
# initialize lock[0] lazily | ||
if len(self.lock) == 0: | ||
import threading | ||
|
||
lock = threading.Lock() | ||
# there is possibility that two threads T1 and T2 are simultaneously running into find_spec, | ||
# observing .lock as empty, and further going into hereby initialization. However due to the GIL, | ||
# list.append() operation is atomic and this way only one of the threads will "win" to put the lock | ||
# - that every thread will use - into .lock[0]. | ||
# https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe | ||
self.lock.append(lock) | ||
|
||
from functools import partial | ||
from importlib.util import find_spec | ||
|
||
with self.lock[0]: | ||
self.fullname = fullname | ||
try: | ||
spec = find_spec(fullname, path) | ||
if spec is not None: | ||
# https://www.python.org/dev/peps/pep-0451/#how-loading-will-work | ||
is_new_api = hasattr(spec.loader, "exec_module") | ||
func_name = "exec_module" if is_new_api else "load_module" | ||
old = getattr(spec.loader, func_name) | ||
func = self.exec_module if is_new_api else self.load_module | ||
if old is not func: | ||
with suppress(AttributeError): # C-Extension loaders are r/o such as zipimporter with <3.7 | ||
setattr(spec.loader, func_name, partial(func, old)) | ||
return spec | ||
finally: | ||
self.fullname = None | ||
return None | ||
|
||
@staticmethod | ||
def exec_module(old, module): | ||
old(module) | ||
if module.__name__ in _DISTUTILS_PATCH: | ||
patch_dist(module) | ||
|
||
@staticmethod | ||
def load_module(old, name): | ||
module = old(name) | ||
if module.__name__ in _DISTUTILS_PATCH: | ||
patch_dist(module) | ||
return module | ||
|
||
|
||
sys.meta_path.insert(0, _Finder()) |
7 changes: 7 additions & 0 deletions
7
internal/pysite/.venv/lib/python3.12/site-packages/google-stubs/METADATA.toml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
version = "4.24.*" | ||
upstream_repository = "https://github.com/protocolbuffers/protobuf" | ||
extra_description = "Generated using [mypy-protobuf==3.5.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.5.0) on protobuf==4.21.8" | ||
partial_stub = true | ||
|
||
[tool.stubtest] | ||
ignore_missing_stub = true |
1 change: 1 addition & 0 deletions
1
internal/pysite/.venv/lib/python3.12/site-packages/google-stubs/py.typed
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
partial |
10 changes: 10 additions & 0 deletions
10
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
# Copyright 2007 Google Inc. All Rights Reserved. | ||
|
||
__version__ = '4.25.2' |
27 changes: 27 additions & 0 deletions
27
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/any_pb2.py
Oops, something went wrong.
33 changes: 33 additions & 0 deletions
33
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/api_pb2.py
Oops, something went wrong.
Empty file.
36 changes: 36 additions & 0 deletions
36
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/compiler/plugin_pb2.py
Oops, something went wrong.
1,282 changes: 1,282 additions & 0 deletions
1,282
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/descriptor.py
Large diffs are not rendered by default.
Oops, something went wrong.
154 changes: 154 additions & 0 deletions
154
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_database.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,154 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Provides a container for DescriptorProtos.""" | ||
|
||
__author__ = 'matthewtoia@google.com (Matt Toia)' | ||
|
||
import warnings | ||
|
||
|
||
class Error(Exception): | ||
pass | ||
|
||
|
||
class DescriptorDatabaseConflictingDefinitionError(Error): | ||
"""Raised when a proto is added with the same name & different descriptor.""" | ||
|
||
|
||
class DescriptorDatabase(object): | ||
"""A container accepting FileDescriptorProtos and maps DescriptorProtos.""" | ||
|
||
def __init__(self): | ||
self._file_desc_protos_by_file = {} | ||
self._file_desc_protos_by_symbol = {} | ||
|
||
def Add(self, file_desc_proto): | ||
"""Adds the FileDescriptorProto and its types to this database. | ||
Args: | ||
file_desc_proto: The FileDescriptorProto to add. | ||
Raises: | ||
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to | ||
add a proto with the same name but different definition than an | ||
existing proto in the database. | ||
""" | ||
proto_name = file_desc_proto.name | ||
if proto_name not in self._file_desc_protos_by_file: | ||
self._file_desc_protos_by_file[proto_name] = file_desc_proto | ||
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: | ||
raise DescriptorDatabaseConflictingDefinitionError( | ||
'%s already added, but with different descriptor.' % proto_name) | ||
else: | ||
return | ||
|
||
# Add all the top-level descriptors to the index. | ||
package = file_desc_proto.package | ||
for message in file_desc_proto.message_type: | ||
for name in _ExtractSymbols(message, package): | ||
self._AddSymbol(name, file_desc_proto) | ||
for enum in file_desc_proto.enum_type: | ||
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) | ||
for enum_value in enum.value: | ||
self._file_desc_protos_by_symbol[ | ||
'.'.join((package, enum_value.name))] = file_desc_proto | ||
for extension in file_desc_proto.extension: | ||
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) | ||
for service in file_desc_proto.service: | ||
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) | ||
|
||
def FindFileByName(self, name): | ||
"""Finds the file descriptor proto by file name. | ||
Typically the file name is a relative path ending to a .proto file. The | ||
proto with the given name will have to have been added to this database | ||
using the Add method or else an error will be raised. | ||
Args: | ||
name: The file name to find. | ||
Returns: | ||
The file descriptor proto matching the name. | ||
Raises: | ||
KeyError if no file by the given name was added. | ||
""" | ||
|
||
return self._file_desc_protos_by_file[name] | ||
|
||
def FindFileContainingSymbol(self, symbol): | ||
"""Finds the file descriptor proto containing the specified symbol. | ||
The symbol should be a fully qualified name including the file descriptor's | ||
package and any containing messages. Some examples: | ||
'some.package.name.Message' | ||
'some.package.name.Message.NestedEnum' | ||
'some.package.name.Message.some_field' | ||
The file descriptor proto containing the specified symbol must be added to | ||
this database using the Add method or else an error will be raised. | ||
Args: | ||
symbol: The fully qualified symbol name. | ||
Returns: | ||
The file descriptor proto containing the symbol. | ||
Raises: | ||
KeyError if no file contains the specified symbol. | ||
""" | ||
try: | ||
return self._file_desc_protos_by_symbol[symbol] | ||
except KeyError: | ||
# Fields, enum values, and nested extensions are not in | ||
# _file_desc_protos_by_symbol. Try to find the top level | ||
# descriptor. Non-existent nested symbol under a valid top level | ||
# descriptor can also be found. The behavior is the same with | ||
# protobuf C++. | ||
top_level, _, _ = symbol.rpartition('.') | ||
try: | ||
return self._file_desc_protos_by_symbol[top_level] | ||
except KeyError: | ||
# Raise the original symbol as a KeyError for better diagnostics. | ||
raise KeyError(symbol) | ||
|
||
def FindFileContainingExtension(self, extendee_name, extension_number): | ||
# TODO: implement this API. | ||
return None | ||
|
||
def FindAllExtensionNumbers(self, extendee_name): | ||
# TODO: implement this API. | ||
return [] | ||
|
||
def _AddSymbol(self, name, file_desc_proto): | ||
if name in self._file_desc_protos_by_symbol: | ||
warn_msg = ('Conflict register for file "' + file_desc_proto.name + | ||
'": ' + name + | ||
' is already defined in file "' + | ||
self._file_desc_protos_by_symbol[name].name + '"') | ||
warnings.warn(warn_msg, RuntimeWarning) | ||
self._file_desc_protos_by_symbol[name] = file_desc_proto | ||
|
||
|
||
def _ExtractSymbols(desc_proto, package): | ||
"""Pulls out all the symbols from a descriptor proto. | ||
Args: | ||
desc_proto: The proto to extract symbols from. | ||
package: The package containing the descriptor type. | ||
Yields: | ||
The fully qualified name found in the descriptor. | ||
""" | ||
message_name = package + '.' + desc_proto.name if package else desc_proto.name | ||
yield message_name | ||
for nested_type in desc_proto.nested_type: | ||
for symbol in _ExtractSymbols(nested_type, message_name): | ||
yield symbol | ||
for enum_type in desc_proto.enum_type: | ||
yield '.'.join((message_name, enum_type.name)) |
2,790 changes: 2,790 additions & 0 deletions
2,790
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pb2.py
Large diffs are not rendered by default.
Oops, something went wrong.
1,271 changes: 1,271 additions & 0 deletions
1,271
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pool.py
Large diffs are not rendered by default.
Oops, something went wrong.
27 changes: 27 additions & 0 deletions
27
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/duration_pb2.py
Oops, something went wrong.
27 changes: 27 additions & 0 deletions
27
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/empty_pb2.py
Oops, something went wrong.
27 changes: 27 additions & 0 deletions
27
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/field_mask_pb2.py
Oops, something went wrong.
7 changes: 7 additions & 0 deletions
7
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
420 changes: 420 additions & 0 deletions
420
...rnal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/_parameterized.py
Large diffs are not rendered by default.
Oops, something went wrong.
140 changes: 140 additions & 0 deletions
140
.../pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/api_implementation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,140 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Determine which implementation of the protobuf API is used in this process. | ||
""" | ||
|
||
import importlib | ||
import os | ||
import sys | ||
import warnings | ||
|
||
|
||
def _ApiVersionToImplementationType(api_version): | ||
if api_version == 2: | ||
return 'cpp' | ||
if api_version == 1: | ||
raise ValueError('api_version=1 is no longer supported.') | ||
if api_version == 0: | ||
return 'python' | ||
return None | ||
|
||
|
||
_implementation_type = None | ||
try: | ||
# pylint: disable=g-import-not-at-top | ||
from google.protobuf.internal import _api_implementation | ||
# The compile-time constants in the _api_implementation module can be used to | ||
# switch to a certain implementation of the Python API at build time. | ||
_implementation_type = _ApiVersionToImplementationType( | ||
_api_implementation.api_version) | ||
except ImportError: | ||
pass # Unspecified by compiler flags. | ||
|
||
|
||
def _CanImport(mod_name): | ||
try: | ||
mod = importlib.import_module(mod_name) | ||
# Work around a known issue in the classic bootstrap .par import hook. | ||
if not mod: | ||
raise ImportError(mod_name + ' import succeeded but was None') | ||
return True | ||
except ImportError: | ||
return False | ||
|
||
|
||
if _implementation_type is None: | ||
if _CanImport('google._upb._message'): | ||
_implementation_type = 'upb' | ||
elif _CanImport('google.protobuf.pyext._message'): | ||
_implementation_type = 'cpp' | ||
else: | ||
_implementation_type = 'python' | ||
|
||
|
||
# This environment variable can be used to switch to a certain implementation | ||
# of the Python API, overriding the compile-time constants in the | ||
# _api_implementation module. Right now only 'python', 'cpp' and 'upb' are | ||
# valid values. Any other value will raise error. | ||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', | ||
_implementation_type) | ||
|
||
if _implementation_type not in ('python', 'cpp', 'upb'): | ||
raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not ' | ||
'supported. Please set to \'python\', \'cpp\' or ' | ||
'\'upb\'.'.format(_implementation_type)) | ||
|
||
if 'PyPy' in sys.version and _implementation_type == 'cpp': | ||
warnings.warn('PyPy does not work yet with cpp protocol buffers. ' | ||
'Falling back to the python implementation.') | ||
_implementation_type = 'python' | ||
|
||
_c_module = None | ||
|
||
if _implementation_type == 'cpp': | ||
try: | ||
# pylint: disable=g-import-not-at-top | ||
from google.protobuf.pyext import _message | ||
sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message | ||
_c_module = _message | ||
del _message | ||
except ImportError: | ||
# TODO: fail back to python | ||
warnings.warn( | ||
'Selected implementation cpp is not available.') | ||
pass | ||
|
||
if _implementation_type == 'upb': | ||
try: | ||
# pylint: disable=g-import-not-at-top | ||
from google._upb import _message | ||
_c_module = _message | ||
del _message | ||
except ImportError: | ||
warnings.warn('Selected implementation upb is not available. ' | ||
'Falling back to the python implementation.') | ||
_implementation_type = 'python' | ||
pass | ||
|
||
# Detect if serialization should be deterministic by default | ||
try: | ||
# The presence of this module in a build allows the proto implementation to | ||
# be upgraded merely via build deps. | ||
# | ||
# NOTE: Merely importing this automatically enables deterministic proto | ||
# serialization for C++ code, but we still need to export it as a boolean so | ||
# that we can do the same for `_implementation_type == 'python'`. | ||
# | ||
# NOTE2: It is possible for C++ code to enable deterministic serialization by | ||
# default _without_ affecting Python code, if the C++ implementation is not in | ||
# use by this module. That is intended behavior, so we don't actually expose | ||
# this boolean outside of this module. | ||
# | ||
# pylint: disable=g-import-not-at-top,unused-import | ||
from google.protobuf import enable_deterministic_proto_serialization | ||
_python_deterministic_proto_serialization = True | ||
except ImportError: | ||
_python_deterministic_proto_serialization = False | ||
|
||
|
||
# Usage of this function is discouraged. Clients shouldn't care which | ||
# implementation of the API is in use. Note that there is no guarantee | ||
# that differences between APIs will be maintained. | ||
# Please don't use this function if possible. | ||
def Type(): | ||
return _implementation_type | ||
|
||
|
||
# See comment on 'Type' above. | ||
# TODO: Remove the API, it returns a constant. b/228102101 | ||
def Version(): | ||
return 2 | ||
|
||
|
||
# For internal use only | ||
def IsPythonDefaultSerializationDeterministic(): | ||
return _python_deterministic_proto_serialization |
118 changes: 118 additions & 0 deletions
118
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/builder.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Builds descriptors, message classes and services for generated _pb2.py. | ||
This file is only called in python generated _pb2.py files. It builds | ||
descriptors, message classes and services that users can directly use | ||
in generated code. | ||
""" | ||
|
||
__author__ = 'jieluo@google.com (Jie Luo)' | ||
|
||
from google.protobuf.internal import enum_type_wrapper | ||
from google.protobuf.internal import python_message | ||
from google.protobuf import message as _message | ||
from google.protobuf import reflection as _reflection | ||
from google.protobuf import symbol_database as _symbol_database | ||
|
||
_sym_db = _symbol_database.Default() | ||
|
||
|
||
def BuildMessageAndEnumDescriptors(file_des, module): | ||
"""Builds message and enum descriptors. | ||
Args: | ||
file_des: FileDescriptor of the .proto file | ||
module: Generated _pb2 module | ||
""" | ||
|
||
def BuildNestedDescriptors(msg_des, prefix): | ||
for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||
module_name = prefix + name.upper() | ||
module[module_name] = nested_msg | ||
BuildNestedDescriptors(nested_msg, module_name + '_') | ||
for enum_des in msg_des.enum_types: | ||
module[prefix + enum_des.name.upper()] = enum_des | ||
|
||
for (name, msg_des) in file_des.message_types_by_name.items(): | ||
module_name = '_' + name.upper() | ||
module[module_name] = msg_des | ||
BuildNestedDescriptors(msg_des, module_name + '_') | ||
|
||
|
||
def BuildTopDescriptorsAndMessages(file_des, module_name, module): | ||
"""Builds top level descriptors and message classes. | ||
Args: | ||
file_des: FileDescriptor of the .proto file | ||
module_name: str, the name of generated _pb2 module | ||
module: Generated _pb2 module | ||
""" | ||
|
||
def BuildMessage(msg_des): | ||
create_dict = {} | ||
for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||
create_dict[name] = BuildMessage(nested_msg) | ||
create_dict['DESCRIPTOR'] = msg_des | ||
create_dict['__module__'] = module_name | ||
message_class = _reflection.GeneratedProtocolMessageType( | ||
msg_des.name, (_message.Message,), create_dict) | ||
_sym_db.RegisterMessage(message_class) | ||
return message_class | ||
|
||
# top level enums | ||
for (name, enum_des) in file_des.enum_types_by_name.items(): | ||
module['_' + name.upper()] = enum_des | ||
module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) | ||
for enum_value in enum_des.values: | ||
module[enum_value.name] = enum_value.number | ||
|
||
# top level extensions | ||
for (name, extension_des) in file_des.extensions_by_name.items(): | ||
module[name.upper() + '_FIELD_NUMBER'] = extension_des.number | ||
module[name] = extension_des | ||
|
||
# services | ||
for (name, service) in file_des.services_by_name.items(): | ||
module['_' + name.upper()] = service | ||
|
||
# Build messages. | ||
for (name, msg_des) in file_des.message_types_by_name.items(): | ||
module[name] = BuildMessage(msg_des) | ||
|
||
|
||
def AddHelpersToExtensions(file_des): | ||
"""no-op to keep old generated code work with new runtime. | ||
Args: | ||
file_des: FileDescriptor of the .proto file | ||
""" | ||
# TODO: Remove this on-op | ||
return | ||
|
||
|
||
def BuildServices(file_des, module_name, module): | ||
"""Builds services classes and services stub class. | ||
Args: | ||
file_des: FileDescriptor of the .proto file | ||
module_name: str, the name of generated _pb2 module | ||
module: Generated _pb2 module | ||
""" | ||
# pylint: disable=g-import-not-at-top | ||
from google.protobuf import service as _service | ||
from google.protobuf import service_reflection | ||
# pylint: enable=g-import-not-at-top | ||
for (name, service) in file_des.services_by_name.items(): | ||
module[name] = service_reflection.GeneratedServiceType( | ||
name, (_service.Service,), | ||
dict(DESCRIPTOR=service, __module__=module_name)) | ||
stub_name = name + '_Stub' | ||
module[stub_name] = service_reflection.GeneratedServiceStubType( | ||
stub_name, (module[name],), | ||
dict(DESCRIPTOR=service, __module__=module_name)) |
687 changes: 687 additions & 0 deletions
687
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/containers.py
Large diffs are not rendered by default.
Oops, something went wrong.
1,044 changes: 1,044 additions & 0 deletions
1,044
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/decoder.py
Large diffs are not rendered by default.
Oops, something went wrong.
806 changes: 806 additions & 0 deletions
806
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/encoder.py
Large diffs are not rendered by default.
Oops, something went wrong.
101 changes: 101 additions & 0 deletions
101
...l/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/enum_type_wrapper.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""A simple wrapper around enum types to expose utility functions. | ||
Instances are created as properties with the same name as the enum they wrap | ||
on proto classes. For usage, see: | ||
reflection_test.py | ||
""" | ||
|
||
__author__ = 'rabsatt@google.com (Kevin Rabsatt)' | ||
|
||
|
||
class EnumTypeWrapper(object): | ||
"""A utility for finding the names of enum values.""" | ||
|
||
DESCRIPTOR = None | ||
|
||
# This is a type alias, which mypy typing stubs can type as | ||
# a genericized parameter constrained to an int, allowing subclasses | ||
# to be typed with more constraint in .pyi stubs | ||
# Eg. | ||
# def MyGeneratedEnum(Message): | ||
# ValueType = NewType('ValueType', int) | ||
# def Name(self, number: MyGeneratedEnum.ValueType) -> str | ||
ValueType = int | ||
|
||
def __init__(self, enum_type): | ||
"""Inits EnumTypeWrapper with an EnumDescriptor.""" | ||
self._enum_type = enum_type | ||
self.DESCRIPTOR = enum_type # pylint: disable=invalid-name | ||
|
||
def Name(self, number): # pylint: disable=invalid-name | ||
"""Returns a string containing the name of an enum value.""" | ||
try: | ||
return self._enum_type.values_by_number[number].name | ||
except KeyError: | ||
pass # fall out to break exception chaining | ||
|
||
if not isinstance(number, int): | ||
raise TypeError( | ||
'Enum value for {} must be an int, but got {} {!r}.'.format( | ||
self._enum_type.name, type(number), number)) | ||
else: | ||
# repr here to handle the odd case when you pass in a boolean. | ||
raise ValueError('Enum {} has no name defined for value {!r}'.format( | ||
self._enum_type.name, number)) | ||
|
||
def Value(self, name): # pylint: disable=invalid-name | ||
"""Returns the value corresponding to the given enum name.""" | ||
try: | ||
return self._enum_type.values_by_name[name].number | ||
except KeyError: | ||
pass # fall out to break exception chaining | ||
raise ValueError('Enum {} has no value defined for name {!r}'.format( | ||
self._enum_type.name, name)) | ||
|
||
def keys(self): | ||
"""Return a list of the string names in the enum. | ||
Returns: | ||
A list of strs, in the order they were defined in the .proto file. | ||
""" | ||
|
||
return [value_descriptor.name | ||
for value_descriptor in self._enum_type.values] | ||
|
||
def values(self): | ||
"""Return a list of the integer values in the enum. | ||
Returns: | ||
A list of ints, in the order they were defined in the .proto file. | ||
""" | ||
|
||
return [value_descriptor.number | ||
for value_descriptor in self._enum_type.values] | ||
|
||
def items(self): | ||
"""Return a list of the (name, value) pairs of the enum. | ||
Returns: | ||
A list of (str, int) pairs, in the order they were defined | ||
in the .proto file. | ||
""" | ||
return [(value_descriptor.name, value_descriptor.number) | ||
for value_descriptor in self._enum_type.values] | ||
|
||
def __getattr__(self, name): | ||
"""Returns the value corresponding to the given enum name.""" | ||
try: | ||
return super( | ||
EnumTypeWrapper, | ||
self).__getattribute__('_enum_type').values_by_name[name].number | ||
except KeyError: | ||
pass # fall out to break exception chaining | ||
raise AttributeError('Enum {} has no value defined for name {!r}'.format( | ||
self._enum_type.name, name)) |
194 changes: 194 additions & 0 deletions
194
...rnal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/extension_dict.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,194 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Contains _ExtensionDict class to represent extensions. | ||
""" | ||
|
||
from google.protobuf.internal import type_checkers | ||
from google.protobuf.descriptor import FieldDescriptor | ||
|
||
|
||
def _VerifyExtensionHandle(message, extension_handle): | ||
"""Verify that the given extension handle is valid.""" | ||
|
||
if not isinstance(extension_handle, FieldDescriptor): | ||
raise KeyError('HasExtension() expects an extension handle, got: %s' % | ||
extension_handle) | ||
|
||
if not extension_handle.is_extension: | ||
raise KeyError('"%s" is not an extension.' % extension_handle.full_name) | ||
|
||
if not extension_handle.containing_type: | ||
raise KeyError('"%s" is missing a containing_type.' | ||
% extension_handle.full_name) | ||
|
||
if extension_handle.containing_type is not message.DESCRIPTOR: | ||
raise KeyError('Extension "%s" extends message type "%s", but this ' | ||
'message is of type "%s".' % | ||
(extension_handle.full_name, | ||
extension_handle.containing_type.full_name, | ||
message.DESCRIPTOR.full_name)) | ||
|
||
|
||
# TODO: Unify error handling of "unknown extension" crap. | ||
# TODO: Support iteritems()-style iteration over all | ||
# extensions with the "has" bits turned on? | ||
class _ExtensionDict(object): | ||
|
||
"""Dict-like container for Extension fields on proto instances. | ||
Note that in all cases we expect extension handles to be | ||
FieldDescriptors. | ||
""" | ||
|
||
def __init__(self, extended_message): | ||
""" | ||
Args: | ||
extended_message: Message instance for which we are the Extensions dict. | ||
""" | ||
self._extended_message = extended_message | ||
|
||
def __getitem__(self, extension_handle): | ||
"""Returns the current value of the given extension handle.""" | ||
|
||
_VerifyExtensionHandle(self._extended_message, extension_handle) | ||
|
||
result = self._extended_message._fields.get(extension_handle) | ||
if result is not None: | ||
return result | ||
|
||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||
result = extension_handle._default_constructor(self._extended_message) | ||
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||
message_type = extension_handle.message_type | ||
if not hasattr(message_type, '_concrete_class'): | ||
# pylint: disable=g-import-not-at-top | ||
from google.protobuf import message_factory | ||
message_factory.GetMessageClass(message_type) | ||
if not hasattr(extension_handle.message_type, '_concrete_class'): | ||
from google.protobuf import message_factory | ||
message_factory.GetMessageClass(extension_handle.message_type) | ||
result = extension_handle.message_type._concrete_class() | ||
try: | ||
result._SetListener(self._extended_message._listener_for_children) | ||
except ReferenceError: | ||
pass | ||
else: | ||
# Singular scalar -- just return the default without inserting into the | ||
# dict. | ||
return extension_handle.default_value | ||
|
||
# Atomically check if another thread has preempted us and, if not, swap | ||
# in the new object we just created. If someone has preempted us, we | ||
# take that object and discard ours. | ||
# WARNING: We are relying on setdefault() being atomic. This is true | ||
# in CPython but we haven't investigated others. This warning appears | ||
# in several other locations in this file. | ||
result = self._extended_message._fields.setdefault( | ||
extension_handle, result) | ||
|
||
return result | ||
|
||
def __eq__(self, other): | ||
if not isinstance(other, self.__class__): | ||
return False | ||
|
||
my_fields = self._extended_message.ListFields() | ||
other_fields = other._extended_message.ListFields() | ||
|
||
# Get rid of non-extension fields. | ||
my_fields = [field for field in my_fields if field.is_extension] | ||
other_fields = [field for field in other_fields if field.is_extension] | ||
|
||
return my_fields == other_fields | ||
|
||
def __ne__(self, other): | ||
return not self == other | ||
|
||
def __len__(self): | ||
fields = self._extended_message.ListFields() | ||
# Get rid of non-extension fields. | ||
extension_fields = [field for field in fields if field[0].is_extension] | ||
return len(extension_fields) | ||
|
||
def __hash__(self): | ||
raise TypeError('unhashable object') | ||
|
||
# Note that this is only meaningful for non-repeated, scalar extension | ||
# fields. Note also that we may have to call _Modified() when we do | ||
# successfully set a field this way, to set any necessary "has" bits in the | ||
# ancestors of the extended message. | ||
def __setitem__(self, extension_handle, value): | ||
"""If extension_handle specifies a non-repeated, scalar extension | ||
field, sets the value of that field. | ||
""" | ||
|
||
_VerifyExtensionHandle(self._extended_message, extension_handle) | ||
|
||
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or | ||
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): | ||
raise TypeError( | ||
'Cannot assign to extension "%s" because it is a repeated or ' | ||
'composite type.' % extension_handle.full_name) | ||
|
||
# It's slightly wasteful to lookup the type checker each time, | ||
# but we expect this to be a vanishingly uncommon case anyway. | ||
type_checker = type_checkers.GetTypeChecker(extension_handle) | ||
# pylint: disable=protected-access | ||
self._extended_message._fields[extension_handle] = ( | ||
type_checker.CheckValue(value)) | ||
self._extended_message._Modified() | ||
|
||
def __delitem__(self, extension_handle): | ||
self._extended_message.ClearExtension(extension_handle) | ||
|
||
def _FindExtensionByName(self, name): | ||
"""Tries to find a known extension with the specified name. | ||
Args: | ||
name: Extension full name. | ||
Returns: | ||
Extension field descriptor. | ||
""" | ||
descriptor = self._extended_message.DESCRIPTOR | ||
extensions = descriptor.file.pool._extensions_by_name[descriptor] | ||
return extensions.get(name, None) | ||
|
||
def _FindExtensionByNumber(self, number): | ||
"""Tries to find a known extension with the field number. | ||
Args: | ||
number: Extension field number. | ||
Returns: | ||
Extension field descriptor. | ||
""" | ||
descriptor = self._extended_message.DESCRIPTOR | ||
extensions = descriptor.file.pool._extensions_by_number[descriptor] | ||
return extensions.get(number, None) | ||
|
||
def __iter__(self): | ||
# Return a generator over the populated extension fields | ||
return (f[0] for f in self._extended_message.ListFields() | ||
if f[0].is_extension) | ||
|
||
def __contains__(self, extension_handle): | ||
_VerifyExtensionHandle(self._extended_message, extension_handle) | ||
|
||
if extension_handle not in self._extended_message._fields: | ||
return False | ||
|
||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||
return bool(self._extended_message._fields.get(extension_handle)) | ||
|
||
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||
value = self._extended_message._fields.get(extension_handle) | ||
# pylint: disable=protected-access | ||
return value is not None and value._is_present_in_parent | ||
|
||
return True |
310 changes: 310 additions & 0 deletions
310
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/field_mask.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,310 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Contains FieldMask class.""" | ||
|
||
from google.protobuf.descriptor import FieldDescriptor | ||
|
||
|
||
class FieldMask(object): | ||
"""Class for FieldMask message type.""" | ||
|
||
__slots__ = () | ||
|
||
def ToJsonString(self): | ||
"""Converts FieldMask to string according to proto3 JSON spec.""" | ||
camelcase_paths = [] | ||
for path in self.paths: | ||
camelcase_paths.append(_SnakeCaseToCamelCase(path)) | ||
return ','.join(camelcase_paths) | ||
|
||
def FromJsonString(self, value): | ||
"""Converts string to FieldMask according to proto3 JSON spec.""" | ||
if not isinstance(value, str): | ||
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) | ||
self.Clear() | ||
if value: | ||
for path in value.split(','): | ||
self.paths.append(_CamelCaseToSnakeCase(path)) | ||
|
||
def IsValidForDescriptor(self, message_descriptor): | ||
"""Checks whether the FieldMask is valid for Message Descriptor.""" | ||
for path in self.paths: | ||
if not _IsValidPath(message_descriptor, path): | ||
return False | ||
return True | ||
|
||
def AllFieldsFromDescriptor(self, message_descriptor): | ||
"""Gets all direct fields of Message Descriptor to FieldMask.""" | ||
self.Clear() | ||
for field in message_descriptor.fields: | ||
self.paths.append(field.name) | ||
|
||
def CanonicalFormFromMask(self, mask): | ||
"""Converts a FieldMask to the canonical form. | ||
Removes paths that are covered by another path. For example, | ||
"foo.bar" is covered by "foo" and will be removed if "foo" | ||
is also in the FieldMask. Then sorts all paths in alphabetical order. | ||
Args: | ||
mask: The original FieldMask to be converted. | ||
""" | ||
tree = _FieldMaskTree(mask) | ||
tree.ToFieldMask(self) | ||
|
||
def Union(self, mask1, mask2): | ||
"""Merges mask1 and mask2 into this FieldMask.""" | ||
_CheckFieldMaskMessage(mask1) | ||
_CheckFieldMaskMessage(mask2) | ||
tree = _FieldMaskTree(mask1) | ||
tree.MergeFromFieldMask(mask2) | ||
tree.ToFieldMask(self) | ||
|
||
def Intersect(self, mask1, mask2): | ||
"""Intersects mask1 and mask2 into this FieldMask.""" | ||
_CheckFieldMaskMessage(mask1) | ||
_CheckFieldMaskMessage(mask2) | ||
tree = _FieldMaskTree(mask1) | ||
intersection = _FieldMaskTree() | ||
for path in mask2.paths: | ||
tree.IntersectPath(path, intersection) | ||
intersection.ToFieldMask(self) | ||
|
||
def MergeMessage( | ||
self, source, destination, | ||
replace_message_field=False, replace_repeated_field=False): | ||
"""Merges fields specified in FieldMask from source to destination. | ||
Args: | ||
source: Source message. | ||
destination: The destination message to be merged into. | ||
replace_message_field: Replace message field if True. Merge message | ||
field if False. | ||
replace_repeated_field: Replace repeated field if True. Append | ||
elements of repeated field if False. | ||
""" | ||
tree = _FieldMaskTree(self) | ||
tree.MergeMessage( | ||
source, destination, replace_message_field, replace_repeated_field) | ||
|
||
|
||
def _IsValidPath(message_descriptor, path): | ||
"""Checks whether the path is valid for Message Descriptor.""" | ||
parts = path.split('.') | ||
last = parts.pop() | ||
for name in parts: | ||
field = message_descriptor.fields_by_name.get(name) | ||
if (field is None or | ||
field.label == FieldDescriptor.LABEL_REPEATED or | ||
field.type != FieldDescriptor.TYPE_MESSAGE): | ||
return False | ||
message_descriptor = field.message_type | ||
return last in message_descriptor.fields_by_name | ||
|
||
|
||
def _CheckFieldMaskMessage(message): | ||
"""Raises ValueError if message is not a FieldMask.""" | ||
message_descriptor = message.DESCRIPTOR | ||
if (message_descriptor.name != 'FieldMask' or | ||
message_descriptor.file.name != 'google/protobuf/field_mask.proto'): | ||
raise ValueError('Message {0} is not a FieldMask.'.format( | ||
message_descriptor.full_name)) | ||
|
||
|
||
def _SnakeCaseToCamelCase(path_name): | ||
"""Converts a path name from snake_case to camelCase.""" | ||
result = [] | ||
after_underscore = False | ||
for c in path_name: | ||
if c.isupper(): | ||
raise ValueError( | ||
'Fail to print FieldMask to Json string: Path name ' | ||
'{0} must not contain uppercase letters.'.format(path_name)) | ||
if after_underscore: | ||
if c.islower(): | ||
result.append(c.upper()) | ||
after_underscore = False | ||
else: | ||
raise ValueError( | ||
'Fail to print FieldMask to Json string: The ' | ||
'character after a "_" must be a lowercase letter ' | ||
'in path name {0}.'.format(path_name)) | ||
elif c == '_': | ||
after_underscore = True | ||
else: | ||
result += c | ||
|
||
if after_underscore: | ||
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' | ||
'in path name {0}.'.format(path_name)) | ||
return ''.join(result) | ||
|
||
|
||
def _CamelCaseToSnakeCase(path_name): | ||
"""Converts a field name from camelCase to snake_case.""" | ||
result = [] | ||
for c in path_name: | ||
if c == '_': | ||
raise ValueError('Fail to parse FieldMask: Path name ' | ||
'{0} must not contain "_"s.'.format(path_name)) | ||
if c.isupper(): | ||
result += '_' | ||
result += c.lower() | ||
else: | ||
result += c | ||
return ''.join(result) | ||
|
||
|
||
class _FieldMaskTree(object): | ||
"""Represents a FieldMask in a tree structure. | ||
For example, given a FieldMask "foo.bar,foo.baz,bar.baz", | ||
the FieldMaskTree will be: | ||
[_root] -+- foo -+- bar | ||
| | | ||
| +- baz | ||
| | ||
+- bar --- baz | ||
In the tree, each leaf node represents a field path. | ||
""" | ||
|
||
__slots__ = ('_root',) | ||
|
||
def __init__(self, field_mask=None): | ||
"""Initializes the tree by FieldMask.""" | ||
self._root = {} | ||
if field_mask: | ||
self.MergeFromFieldMask(field_mask) | ||
|
||
def MergeFromFieldMask(self, field_mask): | ||
"""Merges a FieldMask to the tree.""" | ||
for path in field_mask.paths: | ||
self.AddPath(path) | ||
|
||
def AddPath(self, path): | ||
"""Adds a field path into the tree. | ||
If the field path to add is a sub-path of an existing field path | ||
in the tree (i.e., a leaf node), it means the tree already matches | ||
the given path so nothing will be added to the tree. If the path | ||
matches an existing non-leaf node in the tree, that non-leaf node | ||
will be turned into a leaf node with all its children removed because | ||
the path matches all the node's children. Otherwise, a new path will | ||
be added. | ||
Args: | ||
path: The field path to add. | ||
""" | ||
node = self._root | ||
for name in path.split('.'): | ||
if name not in node: | ||
node[name] = {} | ||
elif not node[name]: | ||
# Pre-existing empty node implies we already have this entire tree. | ||
return | ||
node = node[name] | ||
# Remove any sub-trees we might have had. | ||
node.clear() | ||
|
||
def ToFieldMask(self, field_mask): | ||
"""Converts the tree to a FieldMask.""" | ||
field_mask.Clear() | ||
_AddFieldPaths(self._root, '', field_mask) | ||
|
||
def IntersectPath(self, path, intersection): | ||
"""Calculates the intersection part of a field path with this tree. | ||
Args: | ||
path: The field path to calculates. | ||
intersection: The out tree to record the intersection part. | ||
""" | ||
node = self._root | ||
for name in path.split('.'): | ||
if name not in node: | ||
return | ||
elif not node[name]: | ||
intersection.AddPath(path) | ||
return | ||
node = node[name] | ||
intersection.AddLeafNodes(path, node) | ||
|
||
def AddLeafNodes(self, prefix, node): | ||
"""Adds leaf nodes begin with prefix to this tree.""" | ||
if not node: | ||
self.AddPath(prefix) | ||
for name in node: | ||
child_path = prefix + '.' + name | ||
self.AddLeafNodes(child_path, node[name]) | ||
|
||
def MergeMessage( | ||
self, source, destination, | ||
replace_message, replace_repeated): | ||
"""Merge all fields specified by this tree from source to destination.""" | ||
_MergeMessage( | ||
self._root, source, destination, replace_message, replace_repeated) | ||
|
||
|
||
def _StrConvert(value): | ||
"""Converts value to str if it is not.""" | ||
# This file is imported by c extension and some methods like ClearField | ||
# requires string for the field name. py2/py3 has different text | ||
# type and may use unicode. | ||
if not isinstance(value, str): | ||
return value.encode('utf-8') | ||
return value | ||
|
||
|
||
def _MergeMessage( | ||
node, source, destination, replace_message, replace_repeated): | ||
"""Merge all fields specified by a sub-tree from source to destination.""" | ||
source_descriptor = source.DESCRIPTOR | ||
for name in node: | ||
child = node[name] | ||
field = source_descriptor.fields_by_name[name] | ||
if field is None: | ||
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( | ||
name, source_descriptor.full_name)) | ||
if child: | ||
# Sub-paths are only allowed for singular message fields. | ||
if (field.label == FieldDescriptor.LABEL_REPEATED or | ||
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): | ||
raise ValueError('Error: Field {0} in message {1} is not a singular ' | ||
'message field and cannot have sub-fields.'.format( | ||
name, source_descriptor.full_name)) | ||
if source.HasField(name): | ||
_MergeMessage( | ||
child, getattr(source, name), getattr(destination, name), | ||
replace_message, replace_repeated) | ||
continue | ||
if field.label == FieldDescriptor.LABEL_REPEATED: | ||
if replace_repeated: | ||
destination.ClearField(_StrConvert(name)) | ||
repeated_source = getattr(source, name) | ||
repeated_destination = getattr(destination, name) | ||
repeated_destination.MergeFrom(repeated_source) | ||
else: | ||
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||
if replace_message: | ||
destination.ClearField(_StrConvert(name)) | ||
if source.HasField(name): | ||
getattr(destination, name).MergeFrom(getattr(source, name)) | ||
else: | ||
setattr(destination, name, getattr(source, name)) | ||
|
||
|
||
def _AddFieldPaths(node, prefix, field_mask): | ||
"""Adds the field paths descended from node to field_mask.""" | ||
if not node and prefix: | ||
field_mask.paths.append(prefix) | ||
return | ||
for name in sorted(node): | ||
if prefix: | ||
child_path = prefix + '.' + name | ||
else: | ||
child_path = name | ||
_AddFieldPaths(node[name], child_path, field_mask) |
55 changes: 55 additions & 0 deletions
55
...al/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/message_listener.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Defines a listener interface for observing certain | ||
state transitions on Message objects. | ||
Also defines a null implementation of this interface. | ||
""" | ||
|
||
__author__ = 'robinson@google.com (Will Robinson)' | ||
|
||
|
||
class MessageListener(object): | ||
|
||
"""Listens for modifications made to a message. Meant to be registered via | ||
Message._SetListener(). | ||
Attributes: | ||
dirty: If True, then calling Modified() would be a no-op. This can be | ||
used to avoid these calls entirely in the common case. | ||
""" | ||
|
||
def Modified(self): | ||
"""Called every time the message is modified in such a way that the parent | ||
message may need to be updated. This currently means either: | ||
(a) The message was modified for the first time, so the parent message | ||
should henceforth mark the message as present. | ||
(b) The message's cached byte size became dirty -- i.e. the message was | ||
modified for the first time after a previous call to ByteSize(). | ||
Therefore the parent should also mark its byte size as dirty. | ||
Note that (a) implies (b), since new objects start out with a client cached | ||
size (zero). However, we document (a) explicitly because it is important. | ||
Modified() will *only* be called in response to one of these two events -- | ||
not every time the sub-message is modified. | ||
Note that if the listener's |dirty| attribute is true, then calling | ||
Modified at the moment would be a no-op, so it can be skipped. Performance- | ||
sensitive callers should check this attribute directly before calling since | ||
it will be true most of the time. | ||
""" | ||
|
||
raise NotImplementedError | ||
|
||
|
||
class NullMessageListener(object): | ||
|
||
"""No-op MessageListener implementation.""" | ||
|
||
def Modified(self): | ||
pass |
1,546 changes: 1,546 additions & 0 deletions
1,546
...rnal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/python_message.py
Large diffs are not rendered by default.
Oops, something went wrong.
119 changes: 119 additions & 0 deletions
119
...al/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/testing_refleaks.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""A subclass of unittest.TestCase which checks for reference leaks. | ||
To use: | ||
- Use testing_refleak.BaseTestCase instead of unittest.TestCase | ||
- Configure and compile Python with --with-pydebug | ||
If sys.gettotalrefcount() is not available (because Python was built without | ||
the Py_DEBUG option), then this module is a no-op and tests will run normally. | ||
""" | ||
|
||
import copyreg | ||
import gc | ||
import sys | ||
import unittest | ||
|
||
|
||
class LocalTestResult(unittest.TestResult): | ||
"""A TestResult which forwards events to a parent object, except for Skips.""" | ||
|
||
def __init__(self, parent_result): | ||
unittest.TestResult.__init__(self) | ||
self.parent_result = parent_result | ||
|
||
def addError(self, test, error): | ||
self.parent_result.addError(test, error) | ||
|
||
def addFailure(self, test, error): | ||
self.parent_result.addFailure(test, error) | ||
|
||
def addSkip(self, test, reason): | ||
pass | ||
|
||
|
||
class ReferenceLeakCheckerMixin(object): | ||
"""A mixin class for TestCase, which checks reference counts.""" | ||
|
||
NB_RUNS = 3 | ||
|
||
def run(self, result=None): | ||
testMethod = getattr(self, self._testMethodName) | ||
expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False) | ||
expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False) | ||
if expecting_failure_class or expecting_failure_method: | ||
return | ||
|
||
# python_message.py registers all Message classes to some pickle global | ||
# registry, which makes the classes immortal. | ||
# We save a copy of this registry, and reset it before we could references. | ||
self._saved_pickle_registry = copyreg.dispatch_table.copy() | ||
|
||
# Run the test twice, to warm up the instance attributes. | ||
super(ReferenceLeakCheckerMixin, self).run(result=result) | ||
super(ReferenceLeakCheckerMixin, self).run(result=result) | ||
|
||
oldrefcount = 0 | ||
local_result = LocalTestResult(result) | ||
num_flakes = 0 | ||
|
||
refcount_deltas = [] | ||
while len(refcount_deltas) < self.NB_RUNS: | ||
oldrefcount = self._getRefcounts() | ||
super(ReferenceLeakCheckerMixin, self).run(result=local_result) | ||
newrefcount = self._getRefcounts() | ||
# If the GC was able to collect some objects after the call to run() that | ||
# it could not collect before the call, then the counts won't match. | ||
if newrefcount < oldrefcount and num_flakes < 2: | ||
# This result is (probably) a flake -- garbage collectors aren't very | ||
# predictable, but a lower ending refcount is the opposite of the | ||
# failure we are testing for. If the result is repeatable, then we will | ||
# eventually report it, but not after trying to eliminate it. | ||
num_flakes += 1 | ||
continue | ||
num_flakes = 0 | ||
refcount_deltas.append(newrefcount - oldrefcount) | ||
print(refcount_deltas, self) | ||
|
||
try: | ||
self.assertEqual(refcount_deltas, [0] * self.NB_RUNS) | ||
except Exception: # pylint: disable=broad-except | ||
result.addError(self, sys.exc_info()) | ||
|
||
def _getRefcounts(self): | ||
copyreg.dispatch_table.clear() | ||
copyreg.dispatch_table.update(self._saved_pickle_registry) | ||
# It is sometimes necessary to gc.collect() multiple times, to ensure | ||
# that all objects can be collected. | ||
gc.collect() | ||
gc.collect() | ||
gc.collect() | ||
return sys.gettotalrefcount() | ||
|
||
|
||
if hasattr(sys, 'gettotalrefcount'): | ||
|
||
def TestCase(test_class): | ||
new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__ | ||
new_class = type(test_class)( | ||
test_class.__name__, new_bases, dict(test_class.__dict__)) | ||
return new_class | ||
SkipReferenceLeakChecker = unittest.skip | ||
|
||
else: | ||
# When PyDEBUG is not enabled, run the tests normally. | ||
|
||
def TestCase(test_class): | ||
return test_class | ||
|
||
def SkipReferenceLeakChecker(reason): | ||
del reason # Don't skip, so don't need a reason. | ||
def Same(func): | ||
return func | ||
return Same |
408 changes: 408 additions & 0 deletions
408
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/type_checkers.py
Large diffs are not rendered by default.
Oops, something went wrong.
567 changes: 567 additions & 0 deletions
567
...al/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/well_known_types.py
Large diffs are not rendered by default.
Oops, something went wrong.
245 changes: 245 additions & 0 deletions
245
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/internal/wire_format.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,245 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Constants and static functions to support protocol buffer wire format.""" | ||
|
||
__author__ = 'robinson@google.com (Will Robinson)' | ||
|
||
import struct | ||
from google.protobuf import descriptor | ||
from google.protobuf import message | ||
|
||
|
||
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. | ||
TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 | ||
|
||
# These numbers identify the wire type of a protocol buffer value. | ||
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded | ||
# tag-and-type to store one of these WIRETYPE_* constants. | ||
# These values must match WireType enum in //google/protobuf/wire_format.h. | ||
WIRETYPE_VARINT = 0 | ||
WIRETYPE_FIXED64 = 1 | ||
WIRETYPE_LENGTH_DELIMITED = 2 | ||
WIRETYPE_START_GROUP = 3 | ||
WIRETYPE_END_GROUP = 4 | ||
WIRETYPE_FIXED32 = 5 | ||
_WIRETYPE_MAX = 5 | ||
|
||
|
||
# Bounds for various integer types. | ||
INT32_MAX = int((1 << 31) - 1) | ||
INT32_MIN = int(-(1 << 31)) | ||
UINT32_MAX = (1 << 32) - 1 | ||
|
||
INT64_MAX = (1 << 63) - 1 | ||
INT64_MIN = -(1 << 63) | ||
UINT64_MAX = (1 << 64) - 1 | ||
|
||
# "struct" format strings that will encode/decode the specified formats. | ||
FORMAT_UINT32_LITTLE_ENDIAN = '<I' | ||
FORMAT_UINT64_LITTLE_ENDIAN = '<Q' | ||
FORMAT_FLOAT_LITTLE_ENDIAN = '<f' | ||
FORMAT_DOUBLE_LITTLE_ENDIAN = '<d' | ||
|
||
|
||
# We'll have to provide alternate implementations of AppendLittleEndian*() on | ||
# any architectures where these checks fail. | ||
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4: | ||
raise AssertionError('Format "I" is not a 32-bit number.') | ||
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8: | ||
raise AssertionError('Format "Q" is not a 64-bit number.') | ||
|
||
|
||
def PackTag(field_number, wire_type): | ||
"""Returns an unsigned 32-bit integer that encodes the field number and | ||
wire type information in standard protocol message wire format. | ||
Args: | ||
field_number: Expected to be an integer in the range [1, 1 << 29) | ||
wire_type: One of the WIRETYPE_* constants. | ||
""" | ||
if not 0 <= wire_type <= _WIRETYPE_MAX: | ||
raise message.EncodeError('Unknown wire type: %d' % wire_type) | ||
return (field_number << TAG_TYPE_BITS) | wire_type | ||
|
||
|
||
def UnpackTag(tag): | ||
"""The inverse of PackTag(). Given an unsigned 32-bit number, | ||
returns a (field_number, wire_type) tuple. | ||
""" | ||
return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) | ||
|
||
|
||
def ZigZagEncode(value): | ||
"""ZigZag Transform: Encodes signed integers so that they can be | ||
effectively used with varint encoding. See wire_format.h for | ||
more details. | ||
""" | ||
if value >= 0: | ||
return value << 1 | ||
return (value << 1) ^ (~0) | ||
|
||
|
||
def ZigZagDecode(value): | ||
"""Inverse of ZigZagEncode().""" | ||
if not value & 0x1: | ||
return value >> 1 | ||
return (value >> 1) ^ (~0) | ||
|
||
|
||
|
||
# The *ByteSize() functions below return the number of bytes required to | ||
# serialize "field number + type" information and then serialize the value. | ||
|
||
|
||
def Int32ByteSize(field_number, int32): | ||
return Int64ByteSize(field_number, int32) | ||
|
||
|
||
def Int32ByteSizeNoTag(int32): | ||
return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) | ||
|
||
|
||
def Int64ByteSize(field_number, int64): | ||
# Have to convert to uint before calling UInt64ByteSize(). | ||
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) | ||
|
||
|
||
def UInt32ByteSize(field_number, uint32): | ||
return UInt64ByteSize(field_number, uint32) | ||
|
||
|
||
def UInt64ByteSize(field_number, uint64): | ||
return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) | ||
|
||
|
||
def SInt32ByteSize(field_number, int32): | ||
return UInt32ByteSize(field_number, ZigZagEncode(int32)) | ||
|
||
|
||
def SInt64ByteSize(field_number, int64): | ||
return UInt64ByteSize(field_number, ZigZagEncode(int64)) | ||
|
||
|
||
def Fixed32ByteSize(field_number, fixed32): | ||
return TagByteSize(field_number) + 4 | ||
|
||
|
||
def Fixed64ByteSize(field_number, fixed64): | ||
return TagByteSize(field_number) + 8 | ||
|
||
|
||
def SFixed32ByteSize(field_number, sfixed32): | ||
return TagByteSize(field_number) + 4 | ||
|
||
|
||
def SFixed64ByteSize(field_number, sfixed64): | ||
return TagByteSize(field_number) + 8 | ||
|
||
|
||
def FloatByteSize(field_number, flt): | ||
return TagByteSize(field_number) + 4 | ||
|
||
|
||
def DoubleByteSize(field_number, double): | ||
return TagByteSize(field_number) + 8 | ||
|
||
|
||
def BoolByteSize(field_number, b): | ||
return TagByteSize(field_number) + 1 | ||
|
||
|
||
def EnumByteSize(field_number, enum): | ||
return UInt32ByteSize(field_number, enum) | ||
|
||
|
||
def StringByteSize(field_number, string): | ||
return BytesByteSize(field_number, string.encode('utf-8')) | ||
|
||
|
||
def BytesByteSize(field_number, b): | ||
return (TagByteSize(field_number) | ||
+ _VarUInt64ByteSizeNoTag(len(b)) | ||
+ len(b)) | ||
|
||
|
||
def GroupByteSize(field_number, message): | ||
return (2 * TagByteSize(field_number) # START and END group. | ||
+ message.ByteSize()) | ||
|
||
|
||
def MessageByteSize(field_number, message): | ||
return (TagByteSize(field_number) | ||
+ _VarUInt64ByteSizeNoTag(message.ByteSize()) | ||
+ message.ByteSize()) | ||
|
||
|
||
def MessageSetItemByteSize(field_number, msg): | ||
# First compute the sizes of the tags. | ||
# There are 2 tags for the beginning and ending of the repeated group, that | ||
# is field number 1, one with field number 2 (type_id) and one with field | ||
# number 3 (message). | ||
total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) | ||
|
||
# Add the number of bytes for type_id. | ||
total_size += _VarUInt64ByteSizeNoTag(field_number) | ||
|
||
message_size = msg.ByteSize() | ||
|
||
# The number of bytes for encoding the length of the message. | ||
total_size += _VarUInt64ByteSizeNoTag(message_size) | ||
|
||
# The size of the message. | ||
total_size += message_size | ||
return total_size | ||
|
||
|
||
def TagByteSize(field_number): | ||
"""Returns the bytes required to serialize a tag with this field number.""" | ||
# Just pass in type 0, since the type won't affect the tag+type size. | ||
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) | ||
|
||
|
||
# Private helper function for the *ByteSize() functions above. | ||
|
||
def _VarUInt64ByteSizeNoTag(uint64): | ||
"""Returns the number of bytes required to serialize a single varint | ||
using boundary value comparisons. (unrolled loop optimization -WPierce) | ||
uint64 must be unsigned. | ||
""" | ||
if uint64 <= 0x7f: return 1 | ||
if uint64 <= 0x3fff: return 2 | ||
if uint64 <= 0x1fffff: return 3 | ||
if uint64 <= 0xfffffff: return 4 | ||
if uint64 <= 0x7ffffffff: return 5 | ||
if uint64 <= 0x3ffffffffff: return 6 | ||
if uint64 <= 0x1ffffffffffff: return 7 | ||
if uint64 <= 0xffffffffffffff: return 8 | ||
if uint64 <= 0x7fffffffffffffff: return 9 | ||
if uint64 > UINT64_MAX: | ||
raise message.EncodeError('Value out of range: %d' % uint64) | ||
return 10 | ||
|
||
|
||
NON_PACKABLE_TYPES = ( | ||
descriptor.FieldDescriptor.TYPE_STRING, | ||
descriptor.FieldDescriptor.TYPE_GROUP, | ||
descriptor.FieldDescriptor.TYPE_MESSAGE, | ||
descriptor.FieldDescriptor.TYPE_BYTES | ||
) | ||
|
||
|
||
def IsTypePackable(field_type): | ||
"""Return true iff packable = true is valid for fields of this type. | ||
Args: | ||
field_type: a FieldDescriptor::Type value. | ||
Returns: | ||
True iff fields of this type are packable. | ||
""" | ||
return field_type not in NON_PACKABLE_TYPES |
904 changes: 904 additions & 0 deletions
904
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/json_format.py
Large diffs are not rendered by default.
Oops, something went wrong.
399 changes: 399 additions & 0 deletions
399
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/message.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,399 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
# TODO: We should just make these methods all "pure-virtual" and move | ||
# all implementation out, into reflection.py for now. | ||
|
||
|
||
"""Contains an abstract base class for protocol messages.""" | ||
|
||
__author__ = 'robinson@google.com (Will Robinson)' | ||
|
||
class Error(Exception): | ||
"""Base error type for this module.""" | ||
pass | ||
|
||
|
||
class DecodeError(Error): | ||
"""Exception raised when deserializing messages.""" | ||
pass | ||
|
||
|
||
class EncodeError(Error): | ||
"""Exception raised when serializing messages.""" | ||
pass | ||
|
||
|
||
class Message(object): | ||
|
||
"""Abstract base class for protocol messages. | ||
Protocol message classes are almost always generated by the protocol | ||
compiler. These generated types subclass Message and implement the methods | ||
shown below. | ||
""" | ||
|
||
# TODO: Link to an HTML document here. | ||
|
||
# TODO: Document that instances of this class will also | ||
# have an Extensions attribute with __getitem__ and __setitem__. | ||
# Again, not sure how to best convey this. | ||
|
||
# TODO: Document these fields and methods. | ||
|
||
__slots__ = [] | ||
|
||
#: The :class:`google.protobuf.Descriptor` | ||
# for this message type. | ||
DESCRIPTOR = None | ||
|
||
def __deepcopy__(self, memo=None): | ||
clone = type(self)() | ||
clone.MergeFrom(self) | ||
return clone | ||
|
||
def __eq__(self, other_msg): | ||
"""Recursively compares two messages by value and structure.""" | ||
raise NotImplementedError | ||
|
||
def __ne__(self, other_msg): | ||
# Can't just say self != other_msg, since that would infinitely recurse. :) | ||
return not self == other_msg | ||
|
||
def __hash__(self): | ||
raise TypeError('unhashable object') | ||
|
||
def __str__(self): | ||
"""Outputs a human-readable representation of the message.""" | ||
raise NotImplementedError | ||
|
||
def __unicode__(self): | ||
"""Outputs a human-readable representation of the message.""" | ||
raise NotImplementedError | ||
|
||
def MergeFrom(self, other_msg): | ||
"""Merges the contents of the specified message into current message. | ||
This method merges the contents of the specified message into the current | ||
message. Singular fields that are set in the specified message overwrite | ||
the corresponding fields in the current message. Repeated fields are | ||
appended. Singular sub-messages and groups are recursively merged. | ||
Args: | ||
other_msg (Message): A message to merge into the current message. | ||
""" | ||
raise NotImplementedError | ||
|
||
def CopyFrom(self, other_msg): | ||
"""Copies the content of the specified message into the current message. | ||
The method clears the current message and then merges the specified | ||
message using MergeFrom. | ||
Args: | ||
other_msg (Message): A message to copy into the current one. | ||
""" | ||
if self is other_msg: | ||
return | ||
self.Clear() | ||
self.MergeFrom(other_msg) | ||
|
||
def Clear(self): | ||
"""Clears all data that was set in the message.""" | ||
raise NotImplementedError | ||
|
||
def SetInParent(self): | ||
"""Mark this as present in the parent. | ||
This normally happens automatically when you assign a field of a | ||
sub-message, but sometimes you want to make the sub-message | ||
present while keeping it empty. If you find yourself using this, | ||
you may want to reconsider your design. | ||
""" | ||
raise NotImplementedError | ||
|
||
def IsInitialized(self): | ||
"""Checks if the message is initialized. | ||
Returns: | ||
bool: The method returns True if the message is initialized (i.e. all of | ||
its required fields are set). | ||
""" | ||
raise NotImplementedError | ||
|
||
# TODO: MergeFromString() should probably return None and be | ||
# implemented in terms of a helper that returns the # of bytes read. Our | ||
# deserialization routines would use the helper when recursively | ||
# deserializing, but the end user would almost always just want the no-return | ||
# MergeFromString(). | ||
|
||
def MergeFromString(self, serialized): | ||
"""Merges serialized protocol buffer data into this message. | ||
When we find a field in `serialized` that is already present | ||
in this message: | ||
- If it's a "repeated" field, we append to the end of our list. | ||
- Else, if it's a scalar, we overwrite our field. | ||
- Else, (it's a nonrepeated composite), we recursively merge | ||
into the existing composite. | ||
Args: | ||
serialized (bytes): Any object that allows us to call | ||
``memoryview(serialized)`` to access a string of bytes using the | ||
buffer interface. | ||
Returns: | ||
int: The number of bytes read from `serialized`. | ||
For non-group messages, this will always be `len(serialized)`, | ||
but for messages which are actually groups, this will | ||
generally be less than `len(serialized)`, since we must | ||
stop when we reach an ``END_GROUP`` tag. Note that if | ||
we *do* stop because of an ``END_GROUP`` tag, the number | ||
of bytes returned does not include the bytes | ||
for the ``END_GROUP`` tag information. | ||
Raises: | ||
DecodeError: if the input cannot be parsed. | ||
""" | ||
# TODO: Document handling of unknown fields. | ||
# TODO: When we switch to a helper, this will return None. | ||
raise NotImplementedError | ||
|
||
def ParseFromString(self, serialized): | ||
"""Parse serialized protocol buffer data in binary form into this message. | ||
Like :func:`MergeFromString()`, except we clear the object first. | ||
Raises: | ||
message.DecodeError if the input cannot be parsed. | ||
""" | ||
self.Clear() | ||
return self.MergeFromString(serialized) | ||
|
||
def SerializeToString(self, **kwargs): | ||
"""Serializes the protocol message to a binary string. | ||
Keyword Args: | ||
deterministic (bool): If true, requests deterministic serialization | ||
of the protobuf, with predictable ordering of map keys. | ||
Returns: | ||
A binary string representation of the message if all of the required | ||
fields in the message are set (i.e. the message is initialized). | ||
Raises: | ||
EncodeError: if the message isn't initialized (see :func:`IsInitialized`). | ||
""" | ||
raise NotImplementedError | ||
|
||
def SerializePartialToString(self, **kwargs): | ||
"""Serializes the protocol message to a binary string. | ||
This method is similar to SerializeToString but doesn't check if the | ||
message is initialized. | ||
Keyword Args: | ||
deterministic (bool): If true, requests deterministic serialization | ||
of the protobuf, with predictable ordering of map keys. | ||
Returns: | ||
bytes: A serialized representation of the partial message. | ||
""" | ||
raise NotImplementedError | ||
|
||
# TODO: Decide whether we like these better | ||
# than auto-generated has_foo() and clear_foo() methods | ||
# on the instances themselves. This way is less consistent | ||
# with C++, but it makes reflection-type access easier and | ||
# reduces the number of magically autogenerated things. | ||
# | ||
# TODO: Be sure to document (and test) exactly | ||
# which field names are accepted here. Are we case-sensitive? | ||
# What do we do with fields that share names with Python keywords | ||
# like 'lambda' and 'yield'? | ||
# | ||
# nnorwitz says: | ||
# """ | ||
# Typically (in python), an underscore is appended to names that are | ||
# keywords. So they would become lambda_ or yield_. | ||
# """ | ||
def ListFields(self): | ||
"""Returns a list of (FieldDescriptor, value) tuples for present fields. | ||
A message field is non-empty if HasField() would return true. A singular | ||
primitive field is non-empty if HasField() would return true in proto2 or it | ||
is non zero in proto3. A repeated field is non-empty if it contains at least | ||
one element. The fields are ordered by field number. | ||
Returns: | ||
list[tuple(FieldDescriptor, value)]: field descriptors and values | ||
for all fields in the message which are not empty. The values vary by | ||
field type. | ||
""" | ||
raise NotImplementedError | ||
|
||
def HasField(self, field_name): | ||
"""Checks if a certain field is set for the message. | ||
For a oneof group, checks if any field inside is set. Note that if the | ||
field_name is not defined in the message descriptor, :exc:`ValueError` will | ||
be raised. | ||
Args: | ||
field_name (str): The name of the field to check for presence. | ||
Returns: | ||
bool: Whether a value has been set for the named field. | ||
Raises: | ||
ValueError: if the `field_name` is not a member of this message. | ||
""" | ||
raise NotImplementedError | ||
|
||
def ClearField(self, field_name): | ||
"""Clears the contents of a given field. | ||
Inside a oneof group, clears the field set. If the name neither refers to a | ||
defined field or oneof group, :exc:`ValueError` is raised. | ||
Args: | ||
field_name (str): The name of the field to check for presence. | ||
Raises: | ||
ValueError: if the `field_name` is not a member of this message. | ||
""" | ||
raise NotImplementedError | ||
|
||
def WhichOneof(self, oneof_group): | ||
"""Returns the name of the field that is set inside a oneof group. | ||
If no field is set, returns None. | ||
Args: | ||
oneof_group (str): the name of the oneof group to check. | ||
Returns: | ||
str or None: The name of the group that is set, or None. | ||
Raises: | ||
ValueError: no group with the given name exists | ||
""" | ||
raise NotImplementedError | ||
|
||
def HasExtension(self, field_descriptor): | ||
"""Checks if a certain extension is present for this message. | ||
Extensions are retrieved using the :attr:`Extensions` mapping (if present). | ||
Args: | ||
field_descriptor: The field descriptor for the extension to check. | ||
Returns: | ||
bool: Whether the extension is present for this message. | ||
Raises: | ||
KeyError: if the extension is repeated. Similar to repeated fields, | ||
there is no separate notion of presence: a "not present" repeated | ||
extension is an empty list. | ||
""" | ||
raise NotImplementedError | ||
|
||
def ClearExtension(self, field_descriptor): | ||
"""Clears the contents of a given extension. | ||
Args: | ||
field_descriptor: The field descriptor for the extension to clear. | ||
""" | ||
raise NotImplementedError | ||
|
||
def UnknownFields(self): | ||
"""Returns the UnknownFieldSet. | ||
Returns: | ||
UnknownFieldSet: The unknown fields stored in this message. | ||
""" | ||
raise NotImplementedError | ||
|
||
def DiscardUnknownFields(self): | ||
"""Clears all fields in the :class:`UnknownFieldSet`. | ||
This operation is recursive for nested message. | ||
""" | ||
raise NotImplementedError | ||
|
||
def ByteSize(self): | ||
"""Returns the serialized size of this message. | ||
Recursively calls ByteSize() on all contained messages. | ||
Returns: | ||
int: The number of bytes required to serialize this message. | ||
""" | ||
raise NotImplementedError | ||
|
||
@classmethod | ||
def FromString(cls, s): | ||
raise NotImplementedError | ||
|
||
# TODO: Remove it in OSS | ||
@staticmethod | ||
def RegisterExtension(field_descriptor): | ||
raise NotImplementedError | ||
|
||
def _SetListener(self, message_listener): | ||
"""Internal method used by the protocol message implementation. | ||
Clients should not call this directly. | ||
Sets a listener that this message will call on certain state transitions. | ||
The purpose of this method is to register back-edges from children to | ||
parents at runtime, for the purpose of setting "has" bits and | ||
byte-size-dirty bits in the parent and ancestor objects whenever a child or | ||
descendant object is modified. | ||
If the client wants to disconnect this Message from the object tree, she | ||
explicitly sets callback to None. | ||
If message_listener is None, unregisters any existing listener. Otherwise, | ||
message_listener must implement the MessageListener interface in | ||
internal/message_listener.py, and we discard any listener registered | ||
via a previous _SetListener() call. | ||
""" | ||
raise NotImplementedError | ||
|
||
def __getstate__(self): | ||
"""Support the pickle protocol.""" | ||
return dict(serialized=self.SerializePartialToString()) | ||
|
||
def __setstate__(self, state): | ||
"""Support the pickle protocol.""" | ||
self.__init__() | ||
serialized = state['serialized'] | ||
# On Python 3, using encoding='latin1' is required for unpickling | ||
# protos pickled by Python 2. | ||
if not isinstance(serialized, bytes): | ||
serialized = serialized.encode('latin1') | ||
self.ParseFromString(serialized) | ||
|
||
def __reduce__(self): | ||
message_descriptor = self.DESCRIPTOR | ||
if message_descriptor.containing_type is None: | ||
return type(self), (), self.__getstate__() | ||
# the message type must be nested. | ||
# Python does not pickle nested classes; use the symbol_database on the | ||
# receiving end. | ||
container = message_descriptor | ||
return (_InternalConstructMessage, (container.full_name,), | ||
self.__getstate__()) | ||
|
||
|
||
def _InternalConstructMessage(full_name): | ||
"""Constructs a nested message.""" | ||
from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top | ||
|
||
return symbol_database.Default().GetSymbol(full_name)() |
233 changes: 233 additions & 0 deletions
233
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/message_factory.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,233 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Provides a factory class for generating dynamic messages. | ||
The easiest way to use this class is if you have access to the FileDescriptor | ||
protos containing the messages you want to create you can just do the following: | ||
message_classes = message_factory.GetMessages(iterable_of_file_descriptors) | ||
my_proto_instance = message_classes['some.proto.package.MessageName']() | ||
""" | ||
|
||
__author__ = 'matthewtoia@google.com (Matt Toia)' | ||
|
||
import warnings | ||
|
||
from google.protobuf.internal import api_implementation | ||
from google.protobuf import descriptor_pool | ||
from google.protobuf import message | ||
|
||
if api_implementation.Type() == 'python': | ||
from google.protobuf.internal import python_message as message_impl | ||
else: | ||
from google.protobuf.pyext import cpp_message as message_impl # pylint: disable=g-import-not-at-top | ||
|
||
|
||
# The type of all Message classes. | ||
_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType | ||
|
||
|
||
def GetMessageClass(descriptor): | ||
"""Obtains a proto2 message class based on the passed in descriptor. | ||
Passing a descriptor with a fully qualified name matching a previous | ||
invocation will cause the same class to be returned. | ||
Args: | ||
descriptor: The descriptor to build from. | ||
Returns: | ||
A class describing the passed in descriptor. | ||
""" | ||
concrete_class = getattr(descriptor, '_concrete_class', None) | ||
if concrete_class: | ||
return concrete_class | ||
return _InternalCreateMessageClass(descriptor) | ||
|
||
|
||
def GetMessageClassesForFiles(files, pool): | ||
"""Gets all the messages from specified files. | ||
This will find and resolve dependencies, failing if the descriptor | ||
pool cannot satisfy them. | ||
Args: | ||
files: The file names to extract messages from. | ||
pool: The descriptor pool to find the files including the dependent | ||
files. | ||
Returns: | ||
A dictionary mapping proto names to the message classes. | ||
""" | ||
result = {} | ||
for file_name in files: | ||
file_desc = pool.FindFileByName(file_name) | ||
for desc in file_desc.message_types_by_name.values(): | ||
result[desc.full_name] = GetMessageClass(desc) | ||
|
||
# While the extension FieldDescriptors are created by the descriptor pool, | ||
# the python classes created in the factory need them to be registered | ||
# explicitly, which is done below. | ||
# | ||
# The call to RegisterExtension will specifically check if the | ||
# extension was already registered on the object and either | ||
# ignore the registration if the original was the same, or raise | ||
# an error if they were different. | ||
|
||
for extension in file_desc.extensions_by_name.values(): | ||
extended_class = GetMessageClass(extension.containing_type) | ||
if api_implementation.Type() != 'python': | ||
# TODO: Remove this check here. Duplicate extension | ||
# register check should be in descriptor_pool. | ||
if extension is not pool.FindExtensionByNumber( | ||
extension.containing_type, extension.number | ||
): | ||
raise ValueError('Double registration of Extensions') | ||
# Recursively load protos for extension field, in order to be able to | ||
# fully represent the extension. This matches the behavior for regular | ||
# fields too. | ||
if extension.message_type: | ||
GetMessageClass(extension.message_type) | ||
return result | ||
|
||
|
||
def _InternalCreateMessageClass(descriptor): | ||
"""Builds a proto2 message class based on the passed in descriptor. | ||
Args: | ||
descriptor: The descriptor to build from. | ||
Returns: | ||
A class describing the passed in descriptor. | ||
""" | ||
descriptor_name = descriptor.name | ||
result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( | ||
descriptor_name, | ||
(message.Message,), | ||
{ | ||
'DESCRIPTOR': descriptor, | ||
# If module not set, it wrongly points to message_factory module. | ||
'__module__': None, | ||
}) | ||
for field in descriptor.fields: | ||
if field.message_type: | ||
GetMessageClass(field.message_type) | ||
for extension in result_class.DESCRIPTOR.extensions: | ||
extended_class = GetMessageClass(extension.containing_type) | ||
if api_implementation.Type() != 'python': | ||
# TODO: Remove this check here. Duplicate extension | ||
# register check should be in descriptor_pool. | ||
pool = extension.containing_type.file.pool | ||
if extension is not pool.FindExtensionByNumber( | ||
extension.containing_type, extension.number | ||
): | ||
raise ValueError('Double registration of Extensions') | ||
if extension.message_type: | ||
GetMessageClass(extension.message_type) | ||
return result_class | ||
|
||
|
||
# Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles() | ||
# method above instead. | ||
class MessageFactory(object): | ||
"""Factory for creating Proto2 messages from descriptors in a pool.""" | ||
|
||
def __init__(self, pool=None): | ||
"""Initializes a new factory.""" | ||
self.pool = pool or descriptor_pool.DescriptorPool() | ||
|
||
def GetPrototype(self, descriptor): | ||
"""Obtains a proto2 message class based on the passed in descriptor. | ||
Passing a descriptor with a fully qualified name matching a previous | ||
invocation will cause the same class to be returned. | ||
Args: | ||
descriptor: The descriptor to build from. | ||
Returns: | ||
A class describing the passed in descriptor. | ||
""" | ||
warnings.warn( | ||
'MessageFactory class is deprecated. Please use ' | ||
'GetMessageClass() instead of MessageFactory.GetPrototype. ' | ||
'MessageFactory class will be removed after 2024.', | ||
stacklevel=2, | ||
) | ||
return GetMessageClass(descriptor) | ||
|
||
def CreatePrototype(self, descriptor): | ||
"""Builds a proto2 message class based on the passed in descriptor. | ||
Don't call this function directly, it always creates a new class. Call | ||
GetMessageClass() instead. | ||
Args: | ||
descriptor: The descriptor to build from. | ||
Returns: | ||
A class describing the passed in descriptor. | ||
""" | ||
warnings.warn( | ||
'Directly call CreatePrototype is wrong. Please use ' | ||
'GetMessageClass() method instead. Directly use ' | ||
'CreatePrototype will raise error after July 2023.', | ||
stacklevel=2, | ||
) | ||
return _InternalCreateMessageClass(descriptor) | ||
|
||
def GetMessages(self, files): | ||
"""Gets all the messages from a specified file. | ||
This will find and resolve dependencies, failing if the descriptor | ||
pool cannot satisfy them. | ||
Args: | ||
files: The file names to extract messages from. | ||
Returns: | ||
A dictionary mapping proto names to the message classes. This will include | ||
any dependent messages as well as any messages defined in the same file as | ||
a specified message. | ||
""" | ||
warnings.warn( | ||
'MessageFactory class is deprecated. Please use ' | ||
'GetMessageClassesForFiles() instead of ' | ||
'MessageFactory.GetMessages(). MessageFactory class ' | ||
'will be removed after 2024.', | ||
stacklevel=2, | ||
) | ||
return GetMessageClassesForFiles(files, self.pool) | ||
|
||
|
||
def GetMessages(file_protos, pool=None): | ||
"""Builds a dictionary of all the messages available in a set of files. | ||
Args: | ||
file_protos: Iterable of FileDescriptorProto to build messages out of. | ||
pool: The descriptor pool to add the file protos. | ||
Returns: | ||
A dictionary mapping proto names to the message classes. This will include | ||
any dependent messages as well as any messages defined in the same file as | ||
a specified message. | ||
""" | ||
# The cpp implementation of the protocol buffer library requires to add the | ||
# message in topological order of the dependency graph. | ||
des_pool = pool or descriptor_pool.DescriptorPool() | ||
file_by_name = {file_proto.name: file_proto for file_proto in file_protos} | ||
def _AddFile(file_proto): | ||
for dependency in file_proto.dependency: | ||
if dependency in file_by_name: | ||
# Remove from elements to be visited, in order to cut cycles. | ||
_AddFile(file_by_name.pop(dependency)) | ||
des_pool.Add(file_proto) | ||
while file_by_name: | ||
_AddFile(file_by_name.popitem()[1]) | ||
return GetMessageClassesForFiles( | ||
[file_proto.name for file_proto in file_protos], des_pool) |
111 changes: 111 additions & 0 deletions
111
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/proto_builder.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Dynamic Protobuf class creator.""" | ||
|
||
from collections import OrderedDict | ||
import hashlib | ||
import os | ||
|
||
from google.protobuf import descriptor_pb2 | ||
from google.protobuf import descriptor | ||
from google.protobuf import descriptor_pool | ||
from google.protobuf import message_factory | ||
|
||
|
||
def _GetMessageFromFactory(pool, full_name): | ||
"""Get a proto class from the MessageFactory by name. | ||
Args: | ||
pool: a descriptor pool. | ||
full_name: str, the fully qualified name of the proto type. | ||
Returns: | ||
A class, for the type identified by full_name. | ||
Raises: | ||
KeyError, if the proto is not found in the factory's descriptor pool. | ||
""" | ||
proto_descriptor = pool.FindMessageTypeByName(full_name) | ||
proto_cls = message_factory.GetMessageClass(proto_descriptor) | ||
return proto_cls | ||
|
||
|
||
def MakeSimpleProtoClass(fields, full_name=None, pool=None): | ||
"""Create a Protobuf class whose fields are basic types. | ||
Note: this doesn't validate field names! | ||
Args: | ||
fields: dict of {name: field_type} mappings for each field in the proto. If | ||
this is an OrderedDict the order will be maintained, otherwise the | ||
fields will be sorted by name. | ||
full_name: optional str, the fully-qualified name of the proto type. | ||
pool: optional DescriptorPool instance. | ||
Returns: | ||
a class, the new protobuf class with a FileDescriptor. | ||
""" | ||
pool_instance = pool or descriptor_pool.DescriptorPool() | ||
if full_name is not None: | ||
try: | ||
proto_cls = _GetMessageFromFactory(pool_instance, full_name) | ||
return proto_cls | ||
except KeyError: | ||
# The factory's DescriptorPool doesn't know about this class yet. | ||
pass | ||
|
||
# Get a list of (name, field_type) tuples from the fields dict. If fields was | ||
# an OrderedDict we keep the order, but otherwise we sort the field to ensure | ||
# consistent ordering. | ||
field_items = fields.items() | ||
if not isinstance(fields, OrderedDict): | ||
field_items = sorted(field_items) | ||
|
||
# Use a consistent file name that is unlikely to conflict with any imported | ||
# proto files. | ||
fields_hash = hashlib.sha1() | ||
for f_name, f_type in field_items: | ||
fields_hash.update(f_name.encode('utf-8')) | ||
fields_hash.update(str(f_type).encode('utf-8')) | ||
proto_file_name = fields_hash.hexdigest() + '.proto' | ||
|
||
# If the proto is anonymous, use the same hash to name it. | ||
if full_name is None: | ||
full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + | ||
fields_hash.hexdigest()) | ||
try: | ||
proto_cls = _GetMessageFromFactory(pool_instance, full_name) | ||
return proto_cls | ||
except KeyError: | ||
# The factory's DescriptorPool doesn't know about this class yet. | ||
pass | ||
|
||
# This is the first time we see this proto: add a new descriptor to the pool. | ||
pool_instance.Add( | ||
_MakeFileDescriptorProto(proto_file_name, full_name, field_items)) | ||
return _GetMessageFromFactory(pool_instance, full_name) | ||
|
||
|
||
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): | ||
"""Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" | ||
package, name = full_name.rsplit('.', 1) | ||
file_proto = descriptor_pb2.FileDescriptorProto() | ||
file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) | ||
file_proto.package = package | ||
desc_proto = file_proto.message_type.add() | ||
desc_proto.name = name | ||
for f_number, (f_name, f_type) in enumerate(field_items, 1): | ||
field_proto = desc_proto.field.add() | ||
field_proto.name = f_name | ||
# # If the number falls in the reserved range, reassign it to the correct | ||
# # number after the range. | ||
if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: | ||
f_number += ( | ||
descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - | ||
descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) | ||
field_proto.number = f_number | ||
field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL | ||
field_proto.type = f_type | ||
return file_proto |
Empty file.
49 changes: 49 additions & 0 deletions
49
internal/pysite/.venv/lib/python3.12/site-packages/google/protobuf/pyext/cpp_message.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
# Protocol Buffers - Google's data interchange format | ||
# Copyright 2008 Google Inc. All rights reserved. | ||
# | ||
# Use of this source code is governed by a BSD-style | ||
# license that can be found in the LICENSE file or at | ||
# https://developers.google.com/open-source/licenses/bsd | ||
|
||
"""Protocol message implementation hooks for C++ implementation. | ||
Contains helper functions used to create protocol message classes from | ||
Descriptor objects at runtime backed by the protocol buffer C++ API. | ||
""" | ||
|
||
__author__ = 'tibell@google.com (Johan Tibell)' | ||
|
||
from google.protobuf.internal import api_implementation | ||
|
||
|
||
# pylint: disable=protected-access | ||
_message = api_implementation._c_module | ||
# TODO: Remove this import after fix api_implementation | ||
if _message is None: | ||
from google.protobuf.pyext import _message | ||
|
||
|
||
class GeneratedProtocolMessageType(_message.MessageMeta): | ||
|
||
"""Metaclass for protocol message classes created at runtime from Descriptors. | ||
The protocol compiler currently uses this metaclass to create protocol | ||
message classes at runtime. Clients can also manually create their own | ||
classes at runtime, as in this example: | ||
mydescriptor = Descriptor(.....) | ||
factory = symbol_database.Default() | ||
factory.pool.AddDescriptor(mydescriptor) | ||
MyProtoClass = factory.GetPrototype(mydescriptor) | ||
myproto_instance = MyProtoClass() | ||
myproto.foo_field = 23 | ||
... | ||
The above example will not work for nested types. If you wish to include them, | ||
use reflection.MakeClass() instead of manually instantiating the class in | ||
order to create the appropriate class structure. | ||
""" | ||
|
||
# Must be consistent with the protocol-compiler code in | ||
# proto2/compiler/internal/generator.*. | ||
_DESCRIPTOR_KEY = 'DESCRIPTOR' |
Oops, something went wrong.