Reset
Reset dev
This commit is contained in:
parent
59f0a6bdab
commit
f8c4accd54
60
.github/workflows/build.yml
vendored
Normal file
60
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
name: Build and Upload Artifacts
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Check-out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: |
|
||||
**/pyproject*.toml
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install poetry==1.8.5
|
||||
poetry config virtualenvs.in-project true
|
||||
poetry lock --no-update
|
||||
poetry install
|
||||
|
||||
|
||||
- name: Build Executable with Nuitka
|
||||
uses: Nuitka/Nuitka-Action@main
|
||||
with:
|
||||
nuitka-version: main
|
||||
script-name: vinetrimmer1.py
|
||||
mode: onefile
|
||||
windows-console-mode: force
|
||||
include-data-dir: ./vinetrimmer/=vinetrimmer/
|
||||
follow-imports: true
|
||||
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ runner.os }} Build
|
||||
path: |
|
||||
build/*.exe
|
||||
build/*.bin
|
||||
build/*.app/**/*
|
||||
include-hidden-files: true
|
197
.gitignore
vendored
Normal file
197
.gitignore
vendored
Normal file
@ -0,0 +1,197 @@
|
||||
/misc/
|
||||
/Temp/
|
||||
/Downloads/
|
||||
/vinetrimmer/Cache/
|
||||
/vinetrimmer/Cookies/
|
||||
/vinetrimmer/Logs/
|
||||
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/python
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=python
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
Tests/
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
|
||||
devices/
|
||||
|
||||
scalable/43.xml
|
||||
scalable/40.xml
|
||||
pdm.lock
|
||||
.pdm-python
|
||||
/.idea/.gitignore
|
||||
/license.xml
|
||||
/.idea/misc.xml
|
||||
/.idea/modules.xml
|
||||
/.idea/inspectionProfiles/profiles_settings.xml
|
||||
/.idea/pyplayready.iml
|
||||
/.idea/vcs.xml
|
17
.idea/PlayReady-Amazon-Tool-main.iml
generated
Normal file
17
.idea/PlayReady-Amazon-Tool-main.iml
generated
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/protobuf3" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/pyplayready" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/pywidevine" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.10" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
<component name="PyDocumentationSettings">
|
||||
<option name="format" value="PLAIN" />
|
||||
<option name="myDocStringFormat" value="Plain" />
|
||||
</component>
|
||||
</module>
|
14
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
14
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@ -0,0 +1,14 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyCompatibilityInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ourVersions">
|
||||
<value>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="3.11" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</component>
|
18
.idea/material_theme_project_new.xml
generated
Normal file
18
.idea/material_theme_project_new.xml
generated
Normal file
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="MaterialThemeProjectNewConfig">
|
||||
<option name="metadata">
|
||||
<MTProjectMetadataState>
|
||||
<option name="migrated" value="true" />
|
||||
<option name="pristineConfig" value="false" />
|
||||
<option name="userId" value="-205910e0:18c638edec0:-8000" />
|
||||
<option name="version" value="8.13.2" />
|
||||
</MTProjectMetadataState>
|
||||
</option>
|
||||
<option name="titleBarState">
|
||||
<MTProjectTitleBarConfigState>
|
||||
<option name="overrideColor" value="false" />
|
||||
</MTProjectTitleBarConfigState>
|
||||
</option>
|
||||
</component>
|
||||
</project>
|
26
.idea/runConfigurations/poetry.xml
generated
Normal file
26
.idea/runConfigurations/poetry.xml
generated
Normal file
@ -0,0 +1,26 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="poetry" type="PythonConfigurationType" factoryName="Python">
|
||||
<module name="PlayReady-Amazon-Tool-main" />
|
||||
<option name="ENV_FILES" value="" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="SDK_NAME" value="Poetry (PlayReady-Amazon-Tool-main)" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="vinetrimmer.py" />
|
||||
<option name="PARAMETERS" value="dl --no-cache --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
85
ATVP.txt
Normal file
85
ATVP.txt
Normal file
@ -0,0 +1,85 @@
|
||||
https://tv.apple.com/us/show/ray-donovan/umc.cmc.hr7pnm1wbx98w1h3pg7dfbey
|
||||
https://tv.apple.com/us/show/party-down/umc.cmc.6myol1kgcd19kerlujhtcr8kg
|
||||
https://tv.apple.com/us/show/mythic-quest/umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
https://tv.apple.com/us/show/the-completely-made-up-adventures-of-dick-turpin/umc.cmc.37r7vskzmm8hk2pfbzaxlcwzg
|
||||
https://tv.apple.com/us/show/the-office-superfan-episodes/umc.cmc.3r3om9j6edlrnznl5pfassikv
|
||||
https://tv.apple.com/us/show/trailer-park-boys-the-swearnet-show/umc.cmc.71tbyxchxiwotaysuuztm8p54
|
||||
https://tv.apple.com/us/show/fridays/umc.cmc.ve44y99fmo41lok4mx7azvfi
|
||||
https://tv.apple.com/us/show/utopia/umc.cmc.4uzbqvarwjrbkqz92796oelqj
|
||||
|
||||
https://tv.apple.com/us/movie/oceans-eleven/umc.cmc.4mt9j4jqou4mlup1pc9riyo63
|
||||
https://tv.apple.com/us/movie/bullet-train/umc.cmc.5erhpztw3spfkfi0daabkmaq0
|
||||
https://tv.apple.com/us/movie/burn-after-reading/umc.cmc.7jdvh85z66fxnfoxg7cpxjc7w
|
||||
https://tv.apple.com/us/movie/snatch/umc.cmc.52ds3rgwysa2qq3f1ve5vsed5
|
||||
https://tv.apple.com/us/movie/bad-education/umc.cmc.yu0edomvzp43frs48ppnbcn9
|
||||
https://tv.apple.com/us/movie/american-psycho/umc.cmc.4k3idfzm0x9j5okdedo2s4l50
|
||||
https://tv.apple.com/us/movie/reservoir-dogs/umc.cmc.7iuglffk5zzbfzcki63wclmdo
|
||||
https://tv.apple.com/us/movie/matchstick-men/umc.cmc.lu6ko9g1d8pmr8uf3vagwbw5
|
||||
https://tv.apple.com/us/movie/lock-stock-and-two-smoking-barrels/umc.cmc.6h9x0cz5gx347hod2h03icb4z
|
||||
https://tv.apple.com/us/movie/american-hustle/umc.cmc.3tyc5zjamj769wybyabz90ln
|
||||
https://tv.apple.com/us/movie/midnight-run/umc.cmc.1f02nu6ah611n1tsm7vrzmdnh
|
||||
https://tv.apple.com/us/show/devs/umc.cmc.ggsldeofp8msuxqwfw7gevbd
|
||||
https://tv.apple.com/us/show/legion/umc.cmc.1qs2k3j6y79dkp2muruy3tfgy
|
||||
https://tv.apple.com/us/show/a-murder-at-the-end-of-the-world/umc.cmc.1lsxve21epalnb4me2cad6xel
|
||||
https://tv.apple.com/us/movie/gattaca/umc.cmc.5ezqaeo0qqkflzp5ofr6s33ny
|
||||
https://tv.apple.com/us/show/trying/umc.cmc.6muy4la7lj1omu5nci4bt2m66
|
||||
https://tv.apple.com/us/show/this-way-up/umc.cmc.56948mmn5dgege1ql6ijmipu2
|
||||
https://tv.apple.com/us/show/gavin-and-stacey/umc.cmc.238a52ehihka52ra3numzbsjs
|
||||
https://tv.apple.com/us/show/breeders/umc.cmc.3en8wopxszlklshswogli6krr
|
||||
https://tv.apple.com/us/movie/about-a-boy/umc.cmc.3gem0qakjpyuxutj6sobh3z39
|
||||
https://tv.apple.com/us/movie/the-skeleton-twins/umc.cmc.2g8xdqv37e0sirwudfi41u7xa
|
||||
https://tv.apple.com/us/movie/instant-family/umc.cmc.5ug70ylvyv69d6hpxevfh44fi
|
||||
https://tv.apple.com/us/movie/about-time/umc.cmc.2vfd635fg0hxxlsqcetlzw8mp
|
||||
https://tv.apple.com/us/show/youre-the-worst/umc.cmc.1tm22s9y05150wiwsk5e7mdqw
|
||||
https://tv.apple.com/us/show/rain-dogs/umc.cmc.php72zujxqf4logv2q880aps
|
||||
https://tv.apple.com/us/show/its-always-sunny-in-philadelphia/umc.cmc.ukor9ll2s04re94sh7689z5y
|
||||
https://tv.apple.com/us/show/the-it-crowd/umc.cmc.14z35ywo1dbp5e81sh0yj6520
|
||||
https://tv.apple.com/us/show/silicon-valley/umc.cmc.4v7y09m6sa22lpe3bqomh27a
|
||||
https://tv.apple.com/us/show/dead-pixels/umc.cmc.1lwecm1x1e2c8ufo0hrkggq8q
|
||||
https://tv.apple.com/us/show/superstore/umc.cmc.3kea4yvxqzaonxp9g7iigp1d5
|
||||
https://tv.apple.com/us/show/what-we-do-in-the-shadows/umc.cmc.2bss05wnkfezkywhivjc7ikml
|
||||
https://tv.apple.com/us/show/arrested-development/umc.cmc.2o9q1cdgu5880nfjtystky5pz
|
||||
https://tv.apple.com/us/show/community/umc.cmc.4nernxq22u93vwrjpt6kfk3m5
|
||||
https://tv.apple.com/us/show/parks-and-recreation/umc.cmc.4rhzbt3nmsycvgt4nilhbuyae
|
||||
https://tv.apple.com/us/show/betas/umc.cmc.4u7ljqjpblmzxgnbhwnzh1a0r
|
||||
https://tv.apple.com/us/show/the-league/umc.cmc.145fot332z5stngwfykfmx9st
|
||||
https://tv.apple.com/us/show/difficult-people/umc.cmc.1xq8e6mjorh8a4fs5w2fi3o87
|
||||
https://tv.apple.com/us/show/year-of-the-rabbit/umc.cmc.fnlixoorkhopjo3vy83c8wkk
|
||||
https://tv.apple.com/us/show/bored-to-death/umc.cmc.rs4hlv86qqcxde2he91kk0wm
|
||||
https://tv.apple.com/us/show/blackadder/umc.cmc.3qs9hyo5c66je1v5m2bdjcy09
|
||||
https://tv.apple.com/us/movie/tom-jones/umc.cmc.34cb2q35xhn74pfkyho9kcbej
|
||||
https://tv.apple.com/us/show/plebs/umc.cmc.9fff97cmmp6s6pv0bmrw83fe
|
||||
https://tv.apple.com/us/show/absolutely-fabulous/umc.cmc.1h4vlcvvmw6tlhld1ylxonj6
|
||||
https://tv.apple.com/us/movie/your-highness/umc.cmc.3bqdgohnxq3770rrlaur2zk40
|
||||
https://tv.apple.com/us/show/upstart-crow/umc.cmc.3d1f8o7akwzpzubsb3mgc84i5
|
||||
https://tv.apple.com/us/show/peep-show/umc.cmc.448lyp9cd81fpg1m2blz6hwau
|
||||
https://tv.apple.com/us/show/drunk-history-uk/umc.cmc.36gdb51ghpvc33vfo8xbypg7r
|
||||
https://tv.apple.com/us/show/wellington-paranormal/umc.cmc.233r2ier7l3vwggztv22z48u1
|
||||
https://tv.apple.com/us/show/the-office/umc.cmc.6ll0rkl6kvx4fv6tgzmnubxe6
|
||||
https://tv.apple.com/us/show/the-larry-sanders-show/umc.cmc.3w4wp4iasdpolyr0ph98cyiav
|
||||
https://tv.apple.com/us/show/portlandia/umc.cmc.k20h4a8hmspbb1m0z3362sdi
|
||||
https://tv.apple.com/us/show/the-state/umc.cmc.5af6lx6evkseyhotjzhr16oot
|
||||
https://tv.apple.com/us/show/drunk-history/umc.cmc.2fai5tmqz2z6g9iy8er8ft11m
|
||||
https://tv.apple.com/us/show/upright-citizens-brigade/umc.cmc.638n6gvt13rg3w8g24h1chmdr
|
||||
https://tv.apple.com/us/show/kroll-show/umc.cmc.6wjdipahuh4bbuqn2lfrvkihd
|
||||
https://tv.apple.com/us/show/comedy-bang-bang/umc.cmc.4gsknoov72z0j00yw5suel6jp
|
||||
https://tv.apple.com/us/show/robot-chicken/umc.cmc.5vh2acwvaldmwdqj1qibtl02u
|
||||
https://tv.apple.com/us/show/key--peele/umc.cmc.3zn0d5n33pl7buheotza1oyfs
|
||||
https://tv.apple.com/us/show/umc.cmc.3p5hey1eeuk85gse9d27zaa5q
|
||||
https://tv.apple.com/us/show/red-dwarf/umc.cmc.rdyyed48i59bca0cnr5rov5s
|
||||
https://tv.apple.com/us/movie/the-trip/umc.cmc.dkvyuytopxriedof0rl378mk
|
||||
https://tv.apple.com/us/show/this-fool/umc.cmc.2inku964u5u7ua55gw7k84jrz
|
||||
https://tv.apple.com/us/show/sealab-2021/umc.cmc.58yrhf06pngkw7lnv9g1awh6o
|
||||
https://tv.apple.com/us/movie/in-bruges/umc.cmc.3hfzzmfjj39s02vwynx3b91ih
|
||||
https://tv.apple.com/us/show/get-shorty/umc.cmc.7gvn6fekgfpq5fc72pgi1c47o
|
||||
https://tv.apple.com/us/season/season-43/umc.cmc.3ays0nyep4svqlqku2pjelspx?showId=umc.cmc.3p5hey1eeuk85gse9d27zaa5q#see-all/more-seasons-in-series | Saturday Night Live - Apple TV
|
||||
https://tv.apple.com/us/show/the-larry-sanders-show/umc.cmc.3w4wp4iasdpolyr0ph98cyiav | The Larry Sanders Show - Apple TV
|
||||
https://tv.apple.com/us/show/portlandia/umc.cmc.k20h4a8hmspbb1m0z3362sdi | Portlandia - Apple TV
|
||||
https://tv.apple.com/us/show/kroll-show/umc.cmc.6wjdipahuh4bbuqn2lfrvkihd | Kroll Show - Apple TV
|
||||
https://tv.apple.com/us/show/comedy-bang-bang/umc.cmc.4gsknoov72z0j00yw5suel6jp | Comedy Bang! Bang! - Apple TV
|
||||
https://tv.apple.com/us/show/key--peele/umc.cmc.3zn0d5n33pl7buheotza1oyfs | Key & Peele - Apple TV
|
||||
https://tv.apple.com/us/show/robot-chicken/umc.cmc.5vh2acwvaldmwdqj1qibtl02u | Robot Chicken - Apple TV
|
||||
https://tv.apple.com/us/show/the-state/umc.cmc.5af6lx6evkseyhotjzhr16oot | The State - Apple TV
|
||||
https://tv.apple.com/us/show/upright-citizens-brigade/umc.cmc.638n6gvt13rg3w8g24h1chmdr | Upright Citizens Brigade - Apple TV
|
||||
https://tv.apple.com/us/show/fridays/umc.cmc.ve44y99fmo41lok4mx7azvfi | Fridays - Apple TV
|
||||
https://tv.apple.com/us/show/drunk-history/umc.cmc.2fai5tmqz2z6g9iy8er8ft11m | Drunk History - Apple TV
|
149
How.to.use.txt
Normal file
149
How.to.use.txt
Normal file
@ -0,0 +1,149 @@
|
||||
1. Install python 3.12.xx, 3.11.xx, 3.10.xx, 3.9.xx or 3.8.xx, be sure to add python to PATH while installing it
|
||||
|
||||
2. Install Microsoft Visual C++ Redistributable
|
||||
|
||||
https://aka.ms/vs/17/release/vc_redist.x64.exe
|
||||
|
||||
3. Run install.bat
|
||||
|
||||
4. For Netflix go to the folder vinetrimmer and add your email and password of Netflix in vinetrimmer\vinetrimmer.yml (open the file with Notepad++), you do not need adding your credentials for Amazon, Peacock, GooglePlay and Hulu, just the cookies file, the cookies file name will always be default.txt.
|
||||
|
||||
5. Install this Firefox add-on
|
||||
|
||||
https://addons.mozilla.org/en-US/firefox/addon/cookies-txt-one-click/
|
||||
|
||||
for Chrome install this extension
|
||||
|
||||
https://chrome.google.com/webstore/detail/open-cookiestxt/gdocmgbfkjnnpapoeobnolbbkoibbcif
|
||||
|
||||
and go to netflix.com and download the cookies, rename the file to default.txt and put it in
|
||||
|
||||
vinetrimmer\Cookies\Netflix
|
||||
|
||||
For Amazon go to amazon.com and download the cookies, rename the file to default.txt and put it in
|
||||
|
||||
vinetrimmer\Cookies\Amazon
|
||||
|
||||
For Peacock go to peacocktv.com and download the cookies, rename the file to default.txt and put it in
|
||||
|
||||
vinetrimmer\Cookies\Peacock
|
||||
|
||||
For Hulu go to https://www.hulu.com and download the cookies, rename the file to default.txt and put it in
|
||||
|
||||
vinetrimmer\Cookies\Hulu
|
||||
|
||||
6. For Netflix you will need to add the esn in Netflix.yml which is in the folder vinetrimmer\config\services
|
||||
use the examples from the file Netflix.yml from the folder example_configs to understand how to add the correct esn.
|
||||
|
||||
7. For downloading the video of an episode from Netflix use (to download in HIGH profile use -p HPL and to download in MAIN profile use -p MPL)
|
||||
|
||||
poetry run vt dl -q 540 -al en -sl en -w S01E1 Netflix -p HPL 80189685
|
||||
|
||||
The script does not support decrypting the videos with MAIN or HIGH profiles in 1080p or 720p (only MAIN profile in 480p and HIGH profile in 540p will get decrypted) using cdm l3, you will need cdm l1 for MAIN or HIGH 1080p and 720p.
|
||||
|
||||
If you want to downlod the video for a specific seasons or all the seasons use
|
||||
|
||||
poetry run vt dl -q 540 -al en -sl en -w S01 Netflix -p HPL 80189685
|
||||
|
||||
poetry run vt dl -q 540 -al en -sl en -w S01,S03 Netflix -p HPL 80189685
|
||||
|
||||
poetry run vt dl -q 540 -al en -sl en -w S01-S05 Netflix -p HPL 80189685
|
||||
|
||||
To download the video of a movie use
|
||||
|
||||
poetry run vt dl -q 540 -al en -sl en Netflix -p HPL 81252357
|
||||
|
||||
To download multiple audio languages use
|
||||
|
||||
poetry run vt dl -q 540 -al en,es,fr -sl en Netflix -p HPL 81252357
|
||||
|
||||
To downlaod all the audio languages use
|
||||
|
||||
poetry run vt dl -q 540 -al all -sl en Netflix -p HPL 81252357
|
||||
|
||||
If you did not choose a language for the audio the script will download the original audio language, and if you did not choose a language for the subtitles the script will download all the available subtitles.
|
||||
|
||||
8. To download the video from amazon.com you will need to get the asin for the videos,
|
||||
use this userscript for getting it
|
||||
|
||||
https://greasyfork.org/en/scripts/381997-amazon-video-asin-display
|
||||
|
||||
For downloading the video of an episode from amazon.com use
|
||||
|
||||
poetry run vt dl -al en -sl en -w S06E1 Amazon -vq SD B09LGW8D3Q
|
||||
|
||||
The script supports only downloading the videos in SD with cdm l3.
|
||||
|
||||
If you want to downlod the video for a specific seasons use
|
||||
|
||||
poetry run vt dl -al en -sl en -w S06 Amazon -vq SD B09LGW8D3Q
|
||||
|
||||
9. For downloading the video of an episode from Peacock use
|
||||
|
||||
poetry run vt dl -q 1080 -al en -w S01E1 Peacock /tv/the-office/4902514835143843112
|
||||
|
||||
For downloading the video of a movie from Peacock use
|
||||
|
||||
poetry run vt dl Peacock /movies/action-and-adventure/4got10/3ae2c66c-2b1a-3a34-a84f-930d60612b95
|
||||
|
||||
10. For downloading the video of an episode from DisneyPlus use (cdm l3 is 720p only)
|
||||
|
||||
poetry run vt dl -q 720 -w S01E01 DisneyPlus -s browser 57TL7zLNu2wf
|
||||
|
||||
11. For downloading the video of a movie from Hulu in 1080p use
|
||||
|
||||
poetry run vt dl Hulu -m 4800d468-b587-44de-bad8-2646588bfa6b
|
||||
|
||||
12. If you got an error message while trying to download from google play about SAPISID remove from the cookies file the extra lines with the word SAPISID, just keep the first line with the word SAPISID
|
||||
|
||||
13. For downloading a movie from ParamountPlus use
|
||||
|
||||
poetry run vt dl https://www.paramountplus.com/movies/video/7juDj4xQPZG1xtvD0pDPRQWgv1Vj6xox/
|
||||
|
||||
For downloading an episode from ParamountPlus use
|
||||
|
||||
poetry run vt dl -w S01E1 https://www.paramountplus.com/shows/1883/
|
||||
|
||||
|
||||
for more options use
|
||||
|
||||
poetry run vt dl -h
|
||||
|
||||
for the extra options in the scripts use
|
||||
|
||||
poetry run vt dl Hulu -h
|
||||
|
||||
instead of Hulu add the service name
|
||||
|
||||
the extra options will be added after the service name
|
||||
|
||||
To use your cdm l3 in the folder vinetrimmer\devices create a new folder for the cdm l3 and copy the files device_private_key and device_client_id_blob to it like vinetrimmer\devices\generic_4464_l3 and create a new text file and rename it to wv.json and in the file wv.json add something like
|
||||
|
||||
{
|
||||
"name": "generic_4464_l3",
|
||||
"description": "android generic 4464 l3",
|
||||
"security_level": 3,
|
||||
"session_id_type": "android",
|
||||
"private_key_available": "True",
|
||||
"vmp": "False",
|
||||
"send_key_control_nonce": "True"
|
||||
}
|
||||
|
||||
change the name and the description to match the cdm l3 you have.
|
||||
|
||||
for cdm l1 use
|
||||
|
||||
"security_level": 1,
|
||||
|
||||
open the file vinetrimmer\vinetrimmer.yml with Notepad++ and add your cdm l3 name like this
|
||||
|
||||
cdm:
|
||||
default: 'generic_4464_l3'
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
212
README.md
212
README.md
@ -1,2 +1,210 @@
|
||||
# VT-PR
|
||||
VineTrimmer with PlayReady support, WIP
|
||||
# VineTrimmer-PlayReady
|
||||
A tool to download and remove DRM from streaming services. A version of an old fork of [devine](https://github.com/devine-dl/devine).
|
||||
Modified to remove Playready DRM instead of Widevine.
|
||||
|
||||
## Features
|
||||
- Progress Bars for decryption ([mp4decrypt](https://github.com/chu23465/bentoOldFork), Shaka)
|
||||
- Refresh Token fixed for Amazon service
|
||||
- Reprovision .prd after a week
|
||||
- ISM manifest support (Microsoft Smooth Streaming) (Few features to be added)
|
||||
- N_m3u8DL-RE downloader support
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run `install.bat`
|
||||
|
||||
2. Activate venv using `venv.cmd`.
|
||||
|
||||
|
||||
### Config
|
||||
|
||||
`vinetrimmer.yml` located within the `/vinetrimmer/` folder.
|
||||
|
||||
`decryptor:` either `mp4decrypt` or `packager`
|
||||
|
||||
(shaka-packager fails to decrypt files downloaded from MSS manifests)
|
||||
|
||||
`tag:` tag for your release group
|
||||
|
||||
CDM can be configured per service or per profile.
|
||||
|
||||
```
|
||||
cdm:
|
||||
default: {text}
|
||||
Amazon: {text}
|
||||
```
|
||||
|
||||
All other option can be left to defaults, unless you know what you are doing.
|
||||
|
||||
### General Options
|
||||
|
||||
Usage: vt.cmd [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
| Command line argument | Description | Default Value |
|
||||
|----------------------------|-----------------------------------------------------------------------------------------------|-----------------------------------|
|
||||
| -d, --debug | Flag to enable debug logging | False |
|
||||
| -p, --profile | Profile to use when multiple profiles are defined for a service. | "default" |
|
||||
| -q, --quality | Download Resolution | 1080 |
|
||||
| -v, --vcodec | Video Codec | H264 |
|
||||
| -a, --acodec | Audio Codec | None |
|
||||
| -vb, --vbitrate | Video Bitrate | Max |
|
||||
| -ab, --abitrate | Audio Bitrate | Max |
|
||||
| -aa, --atmos | Prefer Atmos Audio | False |
|
||||
| -r, --range | Video Color Range `HDR`, `HDR10`, `DV`, `SDR` | SDR |
|
||||
| -w, --wanted | Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03` | Default to all |
|
||||
| -al, --alang | Language wanted for audio. | Defaults to original language |
|
||||
| -sl, --slang | Language wanted for subtitles. | Defaults to original language |
|
||||
| --proxy | Proxy URI to use. If a 2-letter country is provided, it will try get a proxy from the config. | None |
|
||||
| -A, --audio-only | Only download audio tracks. | False |
|
||||
| -S, --subs-only | Only download subtitle tracks. | False |
|
||||
| -C, --chapters-only | Only download chapters. | False |
|
||||
| -ns, --no-subs | Do not download subtitle tracks. | False |
|
||||
| -na, --no-audio | Do not download audio tracks. | False |
|
||||
| -nv, --no-video | Do not download video tracks. | False |
|
||||
| -nc, --no-chapters | Do not download chapters tracks. | False |
|
||||
| -ad, --audio-description | Download audio description tracks. | False |
|
||||
| --list | Skip downloading and list available tracks and what tracks would have been downloaded. | False |
|
||||
| --selected | List selected tracks and what tracks are downloaded. | False |
|
||||
| --cdm | Override the CDM that will be used for decryption. | None |
|
||||
| --keys | Skip downloading, retrieve the decryption keys (via CDM or Key Vaults) and print them. | False |
|
||||
| --cache | Disable the use of the CDM and only retrieve decryption keys from Key Vaults. If a needed key is unable to be retrieved from any Key Vaults, the title is skipped.| False |
|
||||
| --no-cache | Disable the use of Key Vaults and only retrieve decryption keys from the CDM. | False |
|
||||
| --no-proxy | Force disable all proxy use. | False |
|
||||
| -nm, --no-mux | Do not mux the downloaded and decrypted tracks. | False |
|
||||
| --mux | Force muxing when using --audio-only/--subs-only/--chapters-only. | False |
|
||||
| -?, -h, --help | Show this message and exit. | |
|
||||
|
||||
|
||||
COMMAND :-
|
||||
|
||||
| Alaias | Command | Service Link |
|
||||
|--------|---------------|--------------------------------------------|
|
||||
| AMZN | Amazon | https://amazon.com, https://primevideo.com |
|
||||
| ATVP | AppleTVPlus | https://tv.apple.com |
|
||||
| MAX | Max | https://max.com |
|
||||
| NF | Netflix | https://netflix.com |
|
||||
|
||||
### Amazon Specific Options
|
||||
|
||||
Usage: vt.cmd AMZN [OPTIONS] [TITLE]
|
||||
|
||||
Service code for Amazon VOD (https://amazon.com) and Amazon Prime Video (https://primevideo.com).
|
||||
|
||||
Authorization: Cookies
|
||||
|
||||
Security:
|
||||
```
|
||||
UHD@L1/SL3000
|
||||
FHD@L3(ChromeCDM)/SL2000
|
||||
SD@L3
|
||||
|
||||
Certain SL2000 can do UHD
|
||||
```
|
||||
Maintains their own license server like Netflix, be cautious.
|
||||
|
||||
Region is chosen automatically based on domain extension found in cookies.
|
||||
Prime Video specific code will be run if the ASIN is detected to be a prime video variant.
|
||||
Use 'Amazon Video ASIN Display' for Tampermonkey addon for ASIN
|
||||
https://greasyfork.org/en/scripts/381997-amazon-video-asin-display
|
||||
|
||||
vt dl --list -z uk -q 1080 Amazon B09SLGYLK8
|
||||
|
||||
Below flags to be passed after the `AMZN` or `Amazon` keyword in command.
|
||||
|
||||
| Command Line Switch | Description |
|
||||
|-------------------------------------|-----------------------------------------------------------------------------------------------------|
|
||||
| -b, --bitrate | Video Bitrate Mode to download in. CVBR=Constrained Variable Bitrate, CBR=Constant Bitrate. (CVBR or CBR or CVBR+CBR) |
|
||||
| -c, --cdn | CDN to download from, defaults to the CDN with the highest weight set by Amazon. |
|
||||
| -vq, --vquality | Manifest quality to request. (SD or HD or UHD) |
|
||||
| -s, --single | Force single episode/season instead of getting series ASIN. |
|
||||
| -am, --amanifest | Manifest to use for audio. Defaults to H265 if the video manifest is missing 640k audio. (CVBR or CBR or H265) |
|
||||
| -aq, --aquality | Manifest quality to request for audio. Defaults to the same as --quality. (SD or HD or UHD) |
|
||||
| -ism, --ism | Set manifest override to SmoothStreaming. Defaults to DASH w/o this flag. |
|
||||
| -?, -h, --help | Show this message and exit. |
|
||||
|
||||
To get UHD/4k with Amazon, navigate to -
|
||||
|
||||
```
|
||||
https://www.primevideo.com/region/eu/ontv/code?ref_=atv_auth_red_aft
|
||||
```
|
||||
|
||||
Login and get to the code pair page. Extract cookies from that page using [Open Cookies.txt](https://chromewebstore.google.com/detail/open-cookiestxt/gdocmgbfkjnnpapoeobnolbbkoibbcif).
|
||||
|
||||
Save it to the path `vinetrimmer/Cookies/Amazon/default.txt`.
|
||||
|
||||
When caching cookies, use a profile without PIN. Otherwise it causes errors.
|
||||
|
||||
### Peacock
|
||||
|
||||
- PCOK bans leaked certs quickly (for 4k), be cautious.
|
||||
|
||||
### Example Command
|
||||
|
||||
Amazon Example:
|
||||
|
||||
```bash
|
||||
poetry run vt dl -al en -sl en --selected -q 2160 -r HDR -w S01E18-S01E25 AMZN -b CBR --ism 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
```
|
||||
|
||||
Above command:
|
||||
- gets english subtitles + audio,
|
||||
- selects the HDR + 4K track,
|
||||
- gets episodes from S01E18 to S01E25 from Amazon
|
||||
- with CBR bitrate,
|
||||
- tries to force ISM
|
||||
- and the title-ID is 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
AppleTV Example:
|
||||
|
||||
```bash
|
||||
poetry run vt dl -al en -sl en --list -q 720 --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
```
|
||||
|
||||
Above command:
|
||||
- gets english subtitles + audio,
|
||||
- lists all possible qualities,
|
||||
- selects 720p video track,
|
||||
- uses the proxy for licensing,
|
||||
- gets the first episode of first season (i.e S01E01)
|
||||
- of the title umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
|
||||
|
||||
## Proxy
|
||||
I recommend [Windscribe](https://windscribe.com/). You can sign up, getting 10 GB of traffic credit every month for free. We use the VPN for everything except downloading video/audio.
|
||||
Tested so far on Amazon, AppleTVPlus, Max.
|
||||
|
||||
### Steps:
|
||||
1. For each service, within get_tracks() function we do this below.
|
||||
```python
|
||||
for track in tracks:
|
||||
track.needs_proxy = False
|
||||
```
|
||||
|
||||
This flag signals that this track does not need a proxy and a proxy will not be passed to downloader even if proxy given in CLI options.
|
||||
|
||||
2. Download Windscribe app and install it.
|
||||
|
||||
3. Go to `Options` -> `Connection` -> `Split Tunneling`. Enable it.
|
||||
|
||||
Set `Mode` as `Inclusive`.
|
||||
|
||||
5. Go to `Options` -> `Connection` -> `Proxy Gateway`. Enable it. Select `Proxy Type` as `HTTP`.
|
||||
|
||||
Copy the `IP` field (will look something like `192.168.0.141:9766`)
|
||||
|
||||
Pass above copied to Vinetrimmer with the proxy flag like below.
|
||||
|
||||
```bash
|
||||
...(other flags)... --proxy http://192.168.0.141:9766 .......
|
||||
```
|
||||
|
||||
## Other
|
||||
- For `--keys` to work with ATVP you need to pass the `--no-subs` flag also
|
||||
- Nuikta compile is an option to run on various linux distributions.
|
||||
- Errors arise when running VT within Docker or Conda like python distributions. Make sure to use proper python3.
|
||||
- To use programs in `scripts` folder, first activate venv then, then -
|
||||
```bash
|
||||
poetry run python scripts/ParseKeybox.py
|
||||
```
|
||||
|
BIN
assets/icon.ico
Normal file
BIN
assets/icon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 117 KiB |
1
binaries/N_m3u8DL-RE-samplefile.txt
Normal file
1
binaries/N_m3u8DL-RE-samplefile.txt
Normal file
@ -0,0 +1 @@
|
||||
N_m3u8DL-RE.exe http://avodsls3ww-s.akamaihd.net/ondemand/iad_2/c5a2/7992/6e31/4ed5-8011-893c8d4e98a6/0bc9f599-85c7-450d-b829-b69fb27d4bd6.ism/manifest --thread-count 96 --log-level ERROR --write-meta-json False --http-request-timeout 8
|
BIN
binaries/N_m3u8DL-RE.exe
Normal file
BIN
binaries/N_m3u8DL-RE.exe
Normal file
Binary file not shown.
BIN
binaries/XstreamDL-CLI.zip
Normal file
BIN
binaries/XstreamDL-CLI.zip
Normal file
Binary file not shown.
BIN
binaries/aria2c.exe
Normal file
BIN
binaries/aria2c.exe
Normal file
Binary file not shown.
BIN
binaries/avcodec-57.dll
Normal file
BIN
binaries/avcodec-57.dll
Normal file
Binary file not shown.
BIN
binaries/avformat-57.dll
Normal file
BIN
binaries/avformat-57.dll
Normal file
Binary file not shown.
BIN
binaries/avutil-55.dll
Normal file
BIN
binaries/avutil-55.dll
Normal file
Binary file not shown.
BIN
binaries/ccextractor.exe
Normal file
BIN
binaries/ccextractor.exe
Normal file
Binary file not shown.
BIN
binaries/ccxgui.exe
Normal file
BIN
binaries/ccxgui.exe
Normal file
Binary file not shown.
BIN
binaries/curl.exe
Normal file
BIN
binaries/curl.exe
Normal file
Binary file not shown.
1
binaries/data/flutter_assets/AssetManifest.json
Normal file
1
binaries/data/flutter_assets/AssetManifest.json
Normal file
@ -0,0 +1 @@
|
||||
{"assets/ccextractor":["assets/ccextractor"],"assets/ccx.svg":["assets/ccx.svg"],"packages/cupertino_icons/assets/CupertinoIcons.ttf":["packages/cupertino_icons/assets/CupertinoIcons.ttf"]}
|
1
binaries/data/flutter_assets/FontManifest.json
Normal file
1
binaries/data/flutter_assets/FontManifest.json
Normal file
@ -0,0 +1 @@
|
||||
[{"family":"MaterialIcons","fonts":[{"asset":"fonts/MaterialIcons-Regular.otf"}]},{"family":"packages/cupertino_icons/CupertinoIcons","fonts":[{"asset":"packages/cupertino_icons/assets/CupertinoIcons.ttf"}]}]
|
BIN
binaries/data/flutter_assets/NOTICES.Z
Normal file
BIN
binaries/data/flutter_assets/NOTICES.Z
Normal file
Binary file not shown.
BIN
binaries/data/flutter_assets/assets/ccextractor
Normal file
BIN
binaries/data/flutter_assets/assets/ccextractor
Normal file
Binary file not shown.
167
binaries/data/flutter_assets/assets/ccx.svg
Normal file
167
binaries/data/flutter_assets/assets/ccx.svg
Normal file
@ -0,0 +1,167 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="65px" height="65px" viewBox="0 0 200 200" enable-background="new 0 0 200 200" xml:space="preserve"> <image id="image0" width="200" height="200" x="0" y="0"
|
||||
href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAABGdBTUEAALGPC/xhBQAAACBjSFJN
|
||||
AAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAj
|
||||
PklEQVR42u2de5Acx33fv90zs+/bvd0DcMA9gTsKIMAD+AJggopjRqEYUmEVy5Jhq1Qs2/Ef+cOl
|
||||
qJKIEaQ4SSV2OUVWlKhKYqXKKjmJXIzKClSOXZYpMoT4kEhKIiXzAYACwAd4eBx4ON1rb3f2MTPd
|
||||
+WNmdmdmZ+Z277W7t/2pQt2ip7e3d6a/++tf96+7AYFAIBAIBAKBQCAQCAQCgUAgEAgEAoFAIBAI
|
||||
BAKBQCAQrBfS7goI1kYulxtIJBJTY2NjhyVJmkokEgPVarWUz+d/dO7cue9UKhW13XXcDgiBdDjx
|
||||
eDyay+UODQwMTMXj8SMDAwOHE4nEEQB77DyE1B+joiiIx+M3nnvuud++cuXKy+2uf7cjBNJBDA8P
|
||||
T8bj8alcLnc4Ho8f2bFjxxSAjwGQnfmcgghKHxgYKH7/+9//9WvXrr3R7u/VzQiBtIHh4eGdAKaG
|
||||
h4enABwZGRk5DGAKQNIvv58gVhMJpRSxWOwn3/72t+9t9/ftZuT1FyEIIpvNxuLx+MGBgYEj8Xj8
|
||||
iGUZpmB1j4IauRfOeUNezjn8yrDTGWOIRqMn9u3bt//y5cuX2n0vuhUhkA0gFouRXC53Szwen8pm
|
||||
s1OJROJwLpc7AmASjnvs15hbEYlfGTu4gVHCUQbBLyG58pdKJezdu/eoEMjaEQJpkaGhoZ0AjgwN
|
||||
DU3B7CbdDrN7FHfmC/rVb9YSeEmBYwQMY2AYAccoGEYIQ8y6/gZkl0AAoFqtIp1OT7X7nnUzQiAh
|
||||
SJKEoaGhB3bu3PnAwMDA7fF4/DCAwc3qGhFCIINjGMwSgyUKwtAP7vMJ9TJGYTR8nmEY2LVr15F2
|
||||
38duRggkgP7+/vG77rrrO8lk8t71do1883KOXcSyBLZl4By7CYPkHTvh8B9O4Rywyt4Bjjg4SpaO
|
||||
7M+MRqNCIOtACMSHvr6+nceOHXs+FotNBOVptmsEACnOMGaJofaPMETtDA1l+CjCNiANWesiGQXD
|
||||
JaubZQuzWq2ODg0N9c/MzCy1+752I0IgHiRJwqFDh/7EKY4wMTgthGJ1j8asf6PgGCEMWWf3KMCa
|
||||
+IsEQDPWxKrfiEMgdt2q1Sr6+vpuA/BKu+9tNyIE4iGZTGb6+/s/G+ZkE3AMWo7yGBhGuWkRBgn3
|
||||
uMlAYwOv9YEa032tUfPWZISzhrRKpYIDBw7cfvHiRSGQNSAE4iGdTu+XZTkDuK3Dfhi4DzrGLcsQ
|
||||
abItB1sC7i8SoLkul0+y7aibRZgXNE0D5/xQu+9rtyIE4oEQIjsth93gdhCGTxC9npEjoLuEgPgE
|
||||
P0vQijVZvcs1Qlgtmy1uzjkmJyeFo75GaLsr0GlUq9VrjLGqN32a+9wqzuuN3JUO+I7K+icGlBFQ
|
||||
NoI/LwkgB+Yogtt+yOE23MptgRCIB1VVVwzDmPam3wCBxlto4ECISHgLZfAWyjBHsrxUKpX+AwcO
|
||||
jGzF/dtuCIF4MAyjpKrqew3pIJjhJMRqhIikWWsSVnaT1mTMXyAAILpZa0AIxEOlUqlomvau37Vp
|
||||
5+1qqSFjy6zJsI9AqtUqDhw4IASyBoRAPBiGgWKxeNbv2lXv7VqLNWn2QqtlW2X4dbF0XUcymRQx
|
||||
WWtACMSHpaWlt7lPQ5wOul2tWJPQLleTggjpcu0Bh+RTjrX4StAiQiA+lMvlDxljK16RmBZkI7pF
|
||||
2DRrooBjN/ftZh1MpVLKlt3EbYIQiA/FYrGkadp7QH2oFAAWQFGo5doAJ3uThoNHHBOGNpVKJTI0
|
||||
NLS/bTe1SxEC8aekqqrLUbcb3BXXfMhmWpO1lz1qvc8pbismS8yHtIgQiA+qquqVSuWCN51z3uio
|
||||
m1d8kto3HDzicdTtoMWhoSExktUiQiABFIvF876OOqctTt5t/XCw31yItXhKOOotIgQSwPLy8tt2
|
||||
F8UplJoFWWW41Z20vuFgXvvHHa+Dy94JhpiPoy4WT7WOCFb0cPLkSWVwcDDd39/f/8Ybb8xzzgeA
|
||||
evDfFb/Jwqajb8MjeLmnCA6rGF4XB8BBQGrRuiSg7BEwvMutPFZ6tVodf+KJJ+7u7+8veWpmnD17
|
||||
dn52djZ/+vTphji0XkbsiwXgjjvuSJw8efK37rvvvt9cXFw8ZhjGcLlcxtNPP425ubmG/P+dqtjp
|
||||
/ZkPXFnok+4RA3fktS2Fnc6sv1xRII2MQd43Ce1Hz0MyDFBCQPw+gRD8OY/geSiOJIJUKoX7778f
|
||||
fX19nuwElFJIknQ9m82+/uKLL/716dOnT7/55ps9v31pTwskl8spn//8579w9913n2KM7axUKtA0
|
||||
DbqugzGGn/70p7h+/XrD+06RMo4Sw79Qv1WHjtvsFYbzH+MAJwDJDkDaOwFp3yTkvZOQ901AHh4F
|
||||
ZBmVd85i6cv/EhECyITUROLlOSj4X/VFvQCAaDSKY8eOIZfL+VSbQJZlKIqCaDQKSuncL37xiyee
|
||||
fPLJry8sLGjtflbtome7WMeOHTt06tSpv5Qk6XChUEClUnH5GoQQ9PX1+S63vcIJ7kbQAkBesxC8
|
||||
lsTrr2FZBQ5wWQYdHYe8dwLSxC3m370TkLI5BFF46fmaVamXCHh/60YCYrLK5bJvuZxzaJoGTdOg
|
||||
qiqi0ejOO++886vf/OY3f++JJ5747Ouvv/5Ou59ZO+hJgRw/fvw3Tp069TeGYWTy+TwYMxuTd5lt
|
||||
JpOpvXZes0NOgtc71Z1pBsDgADL9kPZOQto3icjExyDvm4AyOg7ILTwCw8DKj19E1PZD3J8Kv22A
|
||||
zPqRWr0KhUJTH1WpVFCtVpFMJg9/5StfefXxxx9/5LXXXntpyx9Wm+k5gRw7duz4l770pb+rVqvJ
|
||||
mZkZXLx4EUtLS5BlGbfccguGhoZAKQWlFOl02vVeu8FdJZIn3d/3lj/9WUh33GUKIzew7roX/v51
|
||||
8PwSKCWmRAl8RGJeSAHIgmER1CXufD7f9OfZgkokEplTp0793eOPP/6J119//bVNf0gdRE8N805M
|
||||
TGS/+MUvfk/X9eTs7Cx++MMf4sqVK8jn81hYWMBrr72GGzdu1HwQ6yiBhnKucwLdk+Y3kqtdOI/I
|
||||
kbs2RBwAsPLCGUgAJMt6kFXiuUZ9VheWSiVoWmsuhaqq0DQt+dhjj31vYmIiu3FPpPPpKYE8+uij
|
||||
X5VlebRYLOLy5cu1rpWTa9euoVqtQtd1SJKERCLRkMcAcI0R33kMZ5J2/iyWvv5fNqTurFKB+tqr
|
||||
pmMOgNq2I2QGPmjxVJAfEoaqqpAkafTRRx/96oZ8oS6hZwRy9OjRQ1NTU79fKBTAOYdhGA2TgIDZ
|
||||
BdE0DYZhgFKKZNL3RAJcrTVQ/yBEAkAiQOXMM1j+7lPrrn/hJy+DllTTglhju67ulU81xlp01Fet
|
||||
Q6GAqamp3z969GjP7JLSMwJ56KGH/pAxRm2rkUqlatecIikWizAMA4wxcM4b/BCbK6utLgQgAVAI
|
||||
gfoX34L6yo/WVf+Vl34ICQQSzK4VaSI62G8kS9O0NQuEMQbGGH3ooYf+cF1fpovoCYFks1ly6NCh
|
||||
k6VSfQI5nU67hOE8V0NV1ZpAMpkMmlo85XFC7Ak8GUAEwNJX/xSV99Z2CoGxkkf5569BJqboap+8
|
||||
SqjKMGGgPqalFUfdS6lUwqFDh05ms9memEPrCYFMTk4e4pzvcjb0RCKBSCTiymd3uYrFov1riVQq
|
||||
BUppg0ima4unPHhEQom5iClSLWPhP30FxnzjzPxq5H/8AijTIBOzPFf3KkQkCgf2+FiRfD7vK/pm
|
||||
sO7RrsnJyZ7oZvWEQHK53O1Oh5wQUhuh8msotp/COYckSTU/xJl3ARRmHIaPl+ywJqZICBQA8vyv
|
||||
8Ks//ndgLXZxCi89DxkEEgco99u8MTg6eJT7O+rWTidrgjGGXC53+zoeSdfQEwLZv3//oK67B2YV
|
||||
RamNUHlFUigUal0sWZZdI1nOvNMNi6c8OEUCAoUA5N0LWPivfwr4jKD5oc3PoXruLcgwHxaxy20y
|
||||
OngEzG914Zr9EMDcBGL//v2Day6gi+gJgRBC0t4hXUKIywF3jmipqgpd17GwsIDp6emGxmTnvdJw
|
||||
+4JD3Skx5y8UAPrLL2HpL77VVN3zL5yBxM1NsZvqXnnEY49keVcXrteCEELSay6gi+iJmXS/bhQh
|
||||
xBVK4syrqip+8IMfQJZlSJIESqm5q7tnuvwKpy1tVm2eIGUGL5a++xTk0XGk/vE/Ca174YXnIMMc
|
||||
2qUI6F6FHKkw6rO6kDGGYrG44fd0O9ITFiToYQYN4To3fna+39dRb2HXQ2KLhABRQpD/2hMon/fd
|
||||
ggsAULl6BfoH70EipkBMHbS2IGsQDDHPezjn6xrJCrun242eEIhf9woAYrEYYrGY73vskaswkaw+
|
||||
F9LowBPOQQHIBIgwA4t/8kfQbsz41iH//P+ricO2HiSg3OA6+DvqKysrMIyAkP013NPtSk8IxA97
|
||||
JCtopjzsaDVbOCoIfuXs9LRgTSgAhQByfhnz//HLYD5dnsKLZ8zRK5/Y3aBym90GaD0z6r1EzwoE
|
||||
QGCsFeC2IEHdCc558LEIjYmwG7Td1G2R0CsfYv4//wfA8YuuXjgPdmPGCk401xqSJq2Utw6jAY66
|
||||
EMjq9LRAKKW+jjrgXkNh//Xf5YT4BwuusqmD3V0yw1EA9vevY/HPvlHLVXj+DGR71SCamBgMsSZj
|
||||
AdsACYGsTk8LBGgMObGxR65SqRSGh4cxNjYGoFEoNT8kSCQhXS5XOAoBKn/7V1j5278CDAOlH78A
|
||||
BYDkWrDbXLle/IIWGWNNL57qZXpimNcPe5QqnU67Rqycvsfx48exZ88exGIxqKqKq1ev1q7573Ji
|
||||
F+75sJAj1QjM4SnZWoVY/OaTYHM3IS8tQCFWcKKdvYVynZn7wJHhDMvE/Xu4vLzc7sfQ8fS0BfFz
|
||||
1J3WwQ5aZIwhEok0LJ7inOMaaMPiqTVZE3v4V9ehnf4OosSceacO95yvoVybMTSG91sLodr6DDqd
|
||||
nhYI4A45sbEbkjfkxG/ES+fmCsMGAl0Fn/kRy9GmhEAmBBECRGDOngdto9VMuU4HfsyzuhAQjnoz
|
||||
9LxAJElyrQ1xEhS06OWKfTSbl6DVfj6/+vb8BiFmcGNtz6uAVYu8xYEB4aivjZ4XiB1y4jdKVSwW
|
||||
a6EZ9toQPz50bkcaJBQ/fERCwK1/npGrVZb3YpW8owGrC1W15/eGC6XjnfRYLIZoNEpkWW5Yv9Es
|
||||
0WiUcN8Gxl2Lomxn3XbUDcOAqqqIRCJgjAWGplwFdTc/321OrL9NOdr2G5o4P90Of1+l3GHCQDkH
|
||||
82Rc69qQaDRK9uzZs+ZFU7quwzAMVKtV3smjaR0rkFgshkwmQyKRCJUkiRBCKOd8rQ+EBoVVMMYQ
|
||||
jUahKErNYXWOaC0vLyOVSkHXdSQSCRBCGkI0LoNAb9hiJKDh+36DFkQSUi7xLde8SADs4gZmPJ2G
|
||||
paUl6LoeGjkQdE+j0ejafrEAxGIxzjnnhmGw/v5+ns/n2XrjwzaDjhRIMpnEwMAAJYTImUzmaCaT
|
||||
eVhRlB3WQ2z5SS4uLt75zjvBGwPajd9vRGd+fh7VahWyLIMxhkQi0SCQIoCnENRWSEvJTdN6gwaH
|
||||
hIhHIKqq4vz586C0td724uLipyYnJ/estfrVanV+aWnp6ZWVlV8A0NPptB6Px9ns7GxHRUF23Lpi
|
||||
WZYxPDxMJUlK7N2798+y2eznCCHwLngSdDeyLINzjoWFhe9euXLlXxiGUWCMaUtLS3onWZKOsyA7
|
||||
duwgAKSxsbGvZ7PZz6mquq7FPYLOJRqNIpfL/Q5jTJuenv4CpZRnMhmmqirrlB/EjhrFshYo0UQi
|
||||
cSCbzf7e8vKyEMc2plKpYHl5Gblc7nPxePxWzrnCOZfi8XjH9Gw6SiDJZBKRSIRks9n7VVWlvbLm
|
||||
oJextlmimUzmEwAUWZZp0GhhO+ioLlYkEiGcc6ooyi4RAtE7aJqGSCSyg1IqASCyLFvnarWfjhKI
|
||||
bB0FYO+Luxr2bHArEELWPJ8i2Dw0TSPWMD7ppOfTUQKxIJqmkWg0umrGYrG4pmA7xljgUltBe7Dm
|
||||
YghjjHTSeveO8kFaJWyk4/jx42t6n0DgpBMtyLqJRqPYvXs3JEla18YE25l2/kqvYda+bWw7gXz8
|
||||
4x9HOp1GOp3Gww8/jAsXLuDixYvtrlZHYceb7d+/PzAAc6MpFou4cOFCzXp3i0i2lUAOHTqEYrGI
|
||||
V155BQ8++CCeeeYZPPjgg5idncXS0lK7q9dR3H333Th48CAAIDqYBKsY0Jbqoe+xWAyGYbh8PL+0
|
||||
VhgcHMSZM2dq/+8GkXS1D+JlfHwcb7/9tivtrbfewr59+9pdtY5jfHwcABAb7sOOf7QPOz85ARo1
|
||||
Rw7j8TgGBwexe/fuWoxWMpmspa21YQ8PD9dGKruFbSUQxljtgb755psAUAsyFJjYIf72PWFVaymu
|
||||
zsGZewcX5xoZ50nAa/Vf7OXL3cS2EsilS5dw4sQJAMBHH30EWZZx11134f3332931ToGe4/hS5fM
|
||||
w3yqcypuPv0uZn/wLrhmNt5yuYyZmRnMzMzUxFAqlXD9+nXMzMys+bPff//9rhs06S57twqXL19G
|
||||
MpnEI488glKphEQigZ/97Gdiexsfzp07h4WFBeRyuZZD3VuFc46lpSVMT093hd/hZFsJBDAf/Llz
|
||||
58QQ7yoQQjAzM4Pr169v6Wc6/3UDXS0Q535WXsLE0S0PZ7NwLi0Ou4eb8bnOv91AVwskkUhAVdWW
|
||||
HrAkSSLMBO1ppN0kDJuuFoiiKFs20bWd6caGu1Vsq1EsgWCjEQIRCEIQAhEIQhACEQhCEAIRCEIQ
|
||||
AhEIQhACEQhCEAIRCEIQAhEIQhACEQhCEAIRCEIQAhEIQhACEQhCEAIRCEIQAhEIQhACEQhCEAIR
|
||||
CEIQAhEIQhACEQhCEAIRCEIQAhEIQhACEQhC6Optf9YL5xyZTAZ9fX3trkrHwTlHoVDA8vJyLa0X
|
||||
twfqWYH09fXhgQcewPDwcE8++GbgnOP69et49tlnkc/na+m9dL96UiCKouDTn/40kskkVlZWYBhG
|
||||
W48k60QIIZAkCYODg/jMZz6Dp556Cpqm9ZQ4gB4VyMGDBxGLxVy/igI39hkimqYhmUzi4MGDeOut
|
||||
t2rXe0UoPemk9/f3o1gstrsaXUOxWER/f39PWtmeEwjnHNVqtd3V6Drse9ZrIuk5gRBCoKpqu6vR
|
||||
dbS6i/52oecEwjnH5cuXu+6svHbCGMMHH3zQM36Hk54TCCEE+Xwer776qhBJEzDG8PLLL9eO0e41
|
||||
kfTkKBYAfPDBB7h58ybGxsaQTCZ98+wfy2JkZ8qVxiplsNUc/Ja6Ij55KQUhBDSZArGPTbZOlyXU
|
||||
20BJrQSOoBek9ocEfCSImfjquZuoakZtonB6ehr5fL6rjk3bSHpWIIQQrKys4Ny5cwD8nc/JzK24
|
||||
7dA+OFuUkc+junDZlUZkGSAEkCSzEVEKIkn1v4SAUApIEkAc12j9PUSql1Ev2yvEIOERZ23M19b3
|
||||
Ia73khDxMhAA//PcG5hdrPsbhBBQS7Dddr7gRtCTArHP5aOU1s799nvonBP0JSLuxMRO8IGM2Zgp
|
||||
NRt+/R3wKcS/EoENtdV01zdziKMxP/Etws5rCjMWlRpE0I1nC24UPSkQoPEgS/9MQCLmc4tikk9m
|
||||
3lSSmb4Z4nCW3WxdGvNGFSnwWGghkB5jtQderhqQJa8Y/FpaK1aCA5wElNFKOvzzcjTmDxRHY95U
|
||||
QulJIQTR0wJZDcY4aM0p9m9Qvg3YFgbxSyc+7X0DxBH4mQj4PL/vApCmxdgb9NwwbysUVM161ULX
|
||||
Z0t9DmfZa+9WOfHtUvYwQiCr0jviAICILJqEE/FzEUK5qgdc2QJxcA6u67U0rlmvKQWNxRxlb5w4
|
||||
BI0IgYRQrho+qRwwDOjLy6DxOGg8DnAOfXkZrLACMA5uWO/jrN7IOQc3rNeMAdZrzjmgN76uf1xj
|
||||
w47fcRdoNIrNEIciCx/EiRBIS5iNrHThl+ZsOiGITx0GjcWgzVwHW1lpyNtYREB4Cw+ySo3pvFoF
|
||||
ohGf/EF1btZyMOGDeBAdzqapN7JaqAljYCU1NK87ef3iAOcA97FsGyAOQSPi5yKE5WLFeuVuZMrQ
|
||||
ELSZGdBkCnI6AwCg8Th4qeTOL8v1OQXOrFAS9y0nlAKu+CoCosgu0dTCVDgHoRRSyrPJhBDHpiEE
|
||||
EoLZRhsbWWR4BJHhEVdadO8+YHxvUEktpgdVZjMcciGOMIRAWoVzgLG6I16/YDrk3J1mz25zr/Nt
|
||||
Z2woyxkEqUDKZEwHX6tAX1iElE6bAwPurI2f2RRCHKshBNIKVrdHPXsWvFJ2XgjIv36fI/lrJwBw
|
||||
lN55x+zCUYr4HXeCKhG/grFecaTiokk4EU56szgbteG0BpvskOsaoGl1/4YxMDVoYGD9lkOWRJNw
|
||||
Iu5GM7Q62bdR4gBMx15RQDP95n8VBXIq5c0M0a3aHIQ9XQ2fRh09cCt4uYy1RPbWFleF5bdEQ2TZ
|
||||
WkAFxG+9FcbKCqREwjMSJsSxmQiBrAEpkQASCZ8rmzhaBQKpL+1TrhDHZiK6WK3SRYGHdVoQRw9u
|
||||
7ROGEEgrCHH0HEIgzdIL4hACaUAIpBl6SByFktb8+3oAIZDV8BMHY4AR1JC6VxwAoBvCijgRo1gt
|
||||
wspllH95DlzToIyOIbJnCKxcRunsW6ZwbJxbAtkNkBJzTyzAauvcHPKVHY/BWrdO5MahXKLIkPoy
|
||||
kHfsCKth819GdKtWRQgkhFikcXsffWEeXDOthz47i8ieIXN9hncb01qMlbsBOjd1ayBoFxRnqH2p
|
||||
FCIQIY6NRnSxQmgUCIfU11eb6JPS6Vp6A6vNkDeVLhzydiMsSNOYDUrq60P88BHwSgVSOgOAQ0ql
|
||||
ENk3UV9ia3efDAPctiyMmUturWuu6F5NqzVYbhgAM4XBuQF4ooaJovjUbePEwYRwXAiBNIW70dBY
|
||||
HIjF6+mUQtm1a5Ui1umQW5s4uH0TYKMtx0pRjGI5EQIJIZ2MwNtQtbk56B/NANTclBoEtUZLCAWs
|
||||
aFhnzBWRrRWCEgWhkpUmwdy4zeGQS7ZjzxtjtgjxsR6iW7XZCIGEQH02+GDFgn+4+Sb4HNEDt0LO
|
||||
5mAUi6i8exGQZMT2H7B2NBHi2AqEkx6CJPns+K77dEE2ySG398KqXp0GU1WwlTy0GzPYPHFwMVHo
|
||||
QViQEFJx76o9DmXXoNn9sYdxGQc3NNOxNnRrbysDnLPGPa6AlkariL14iTh+x1r5SWtRHACHzoSl
|
||||
cSIEEoJ7H2hrFCuddgzvBuC0KJZzzTkHNB3Oteu2NeKadYJs7f/mXzu8PTo2jgpnAKWIDA01V/k1
|
||||
iAMQo1hehEBCyCRtC7KO8BHLuSYcgNJokZopm8bjiN96a2t1WIM4ACBf1Jt8X28gfJAQzKMPuju2
|
||||
apXMrX2/HkQIJIR0Qmk+8zYRR14VTroTIZAQIkqTt2ebiAMwDw0S1BECCSERbcJF20biAIBCSfgg
|
||||
ToSTHkIyHtLFMgwYxWWAGeZRzlQCJAlUjgDU77Z2vjgAwBAWxIUQSAjxAAti5BegXXsXYP7zHCTe
|
||||
B2VoAjSWtBPhapiGDmNlEdzQrTPTJfOcdFkBjcRaOLNzY8WxVKhu4d3tDoRAQkj6nJXByiq0qxf9
|
||||
N4ezI3JLK6h+eB6xW+4AZAXOhmmsLEK7eglgPs4w4yCRGOTRj0FKNjHXssGjVbohtgbyInyQENLJ
|
||||
xi6WsTgbKo56Rh3a/A24GqZWMcUVIA4A4NUytMvnPXv/+nzWJgzlFkpGU/l6CSGQEJIxj0A4B6+W
|
||||
XEk0mQFNpEFiSRAl6s5bWnHl1Zfm/Ltl3n4/Y9DnZ/wrtWnzHByFkuhieRFdrAAUmSKqOFYU2qNV
|
||||
ngVMkaFJQI4AnMMoF6BdPlfPS9y/P6ziFheJJc32a+jmYirDcWhnqdhYqU0UB8BR1oQF8SIEEkDK
|
||||
OUnoGMrlzt1MODdHrAgxQ0pqDdhqmJ7TpNy7wgPK8C2gUfMQUFYto/rumwhs1JssDgAoiiHeBoRA
|
||||
AqjNonvmOZyn1gKktrk0AHDGQCJRK8pXN0emnDi7V4yDSjJAnVbG0aglj/XaZHEAQF4VAvEiBBJA
|
||||
fyrqPwnI9Hpj9eyyLqXSkG65PbCLVVtLwnze77EutWtbJA4AUMtCIF6EQAJIJ2Q0NDbDcG3vU18f
|
||||
bguCAEQKLJPrWl0cxG19aKIP0YPHzNB4XTOtzxaKAwAKQiANCIEEkE42HnHGPbspEsme42iyYTrf
|
||||
7+1+UQpCo/WRsC0WBzhQEF2sBsQwbwD9qag7wbHAqYbkY2WCMDTXcK4prgDaIA5wYFnsaNKAEEgA
|
||||
/SmHBbEXB+ruESwiSU2WxhrE1bh9T73cdogDEKHufoguVgA1gTjalHsEi4PIbiugz98AABAlAiIr
|
||||
oPE+mEsJvdaHWyEoHtooDgBYaXMXi/POW+/bUQIxDAOEEM4Ym2t3Xfr7Io1tStfgGtmS3X6KPncN
|
||||
0Cu1/8duu7f2ui4Q871eccHQG0a9gtl4cQDAchstiGEY84QQTgjhhtE5E5YdJxDGGC8Wiy+k02mG
|
||||
NnYBd6RjDWlcr8LZqhq6SYYjVEOSXVG5poPvfK97IrLy/tvgFdVMlyOI7r2t0ZE3M2MzxAEAy4W2
|
||||
CYSpqvpjxhgjhDAhkACKxSJPJpNMVdVLpVLpqXg8/rvtqks64bPBAnc/OO3qJWg3LpubMljhJgDM
|
||||
qFzvLoi6J85JdkxEcm6KjzNwrQJolS0XR7mqo6q3J5pXVdXT5XL5XQA6Y4wVi8WO6Wp1lJNeLpeh
|
||||
aRrnnOs3b958rFwuf69ddam44pLMVkVTGU8uBugV8FIBbGXBSrK6ULG4K6fXSedqAcbyr8CKefBq
|
||||
2ey+2ax7wVVr4gBY29aClEqlv56bm/sjABUAVcMwmKqqHSOQjrIgAJDP59nAwIBuGEZhZmbmnyeT
|
||||
yf+RTCYfAtBvZWl2OdG6+OWH80cHsyO3OluV1JeFnsyAF5fhu+rPMYwr53a7LhGPv2IsfgRj8YZP
|
||||
GQwk4nXgN1ccAFDVjY8KhcKZrbi3MP3xvKqqZ4rF4s8BlAGUAOjVapWVy+V1Fr9xdJxAisUiUqkU
|
||||
i0QimiRJXFXVV4rF4s8ASIQQc7fnLeCZVy792n13Dv+Ne48riujYAWg3r4KtzJvdIUe3ysxDIA+O
|
||||
N1gbms4Cs9MOJ9+ntVqz9DThXCy1+eIAgKWV0ttzc3OPbcW9BQDOObMqoBNCNMMwNMaYsbi42DHW
|
||||
A+hAgQDA7Owsz+VyejKZZJRSgxBCCSGUMbYl4gCAv3zu/JmHf33yyX9w++jnXRckGcruvcCuMYAz
|
||||
MF0D10rmKVMEkJJpkEjMuy0jaDQBaXAcxkeXESYOUAnSzhErcWvEAQBXPlqZYYytYItwjFgxQohR
|
||||
qVTY/Pw81/XOms3vSIEAwMLCAvL5POvr6+OxWIwoioJIJLL+glvg33zjxX/17//g3rlP3Tv5bwHU
|
||||
p9ZrcVQSqCwBsdjqhRECZccQaCwOY2nO9DsMzdx21PI/SCwJecgKgd9CcQAApfSGoihb1rcxDAOa
|
||||
pqFSqfB8Ps8rlcr6C90EOlYgAKDrOiyT2xaz+x7A/unPz/7xPUfGvvWvH/2HnwFw5J4jY6Oju/sn
|
||||
AYwArAllWFhdMSmVhZTsN78S5/WTp2BOMIJK2GpxAEChULjx3nvviUXpHrasy7IdOfnJwyMARkYG
|
||||
M7ecODI2AmDSEtA+AKMAzKGsNs+Q1wlu/7/95f/z0Okz7zzTlhvZwQiBbCInP3l4N4CxkV3piRNH
|
||||
xsYATNxzZGxsdDAzAWAMtoBqtEccAEon/tm3dv307LVCu+9ZpyEE0kZO3j9lCmgwYwmIT9xzZDRE
|
||||
QDYbKg785O2r37j3D/78C+2+H52IEEgHUxeQ0wKNjo0O9jUKaI3iAHBp9FP/7fi1m/nldn/fTkQI
|
||||
pIs5ef9tOwHsHRlMj584PLoPwPg9h0f3jg5m9gLYC7Bk2Puvzi7//Itfe/aR02femWni43oSIZBt
|
||||
zMn7D5kC2pUeP3HEFBCAXQCWr87mn/3a//7J/712M985kYECgUAgEAgEAoFAIBAIBAKBQCAQCAQC
|
||||
gUAgEAgEAoFAIBAIBL3G/weuZkhwA6jRYgAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyMS0wMy0xNVQy
|
||||
Mjo0NjoyNyswMDowMP5q2R4AAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjEtMDMtMTVUMjI6NDY6Mjcr
|
||||
MDA6MDCPN2GiAAAAAElFTkSuQmCC" />
|
||||
</svg>
|
After Width: | Height: | Size: 13 KiB |
BIN
binaries/data/flutter_assets/fonts/MaterialIcons-Regular.otf
Normal file
BIN
binaries/data/flutter_assets/fonts/MaterialIcons-Regular.otf
Normal file
Binary file not shown.
Binary file not shown.
BIN
binaries/data/icudtl.dat
Normal file
BIN
binaries/data/icudtl.dat
Normal file
Binary file not shown.
BIN
binaries/ffmpeg.exe
Normal file
BIN
binaries/ffmpeg.exe
Normal file
Binary file not shown.
BIN
binaries/ffplay.exe
Normal file
BIN
binaries/ffplay.exe
Normal file
Binary file not shown.
BIN
binaries/ffprobe.exe
Normal file
BIN
binaries/ffprobe.exe
Normal file
Binary file not shown.
BIN
binaries/file_selector_windows_plugin.dll
Normal file
BIN
binaries/file_selector_windows_plugin.dll
Normal file
Binary file not shown.
BIN
binaries/flutter_windows.dll
Normal file
BIN
binaries/flutter_windows.dll
Normal file
Binary file not shown.
BIN
binaries/mkvmerge.exe
Normal file
BIN
binaries/mkvmerge.exe
Normal file
Binary file not shown.
BIN
binaries/mp4box.exe
Normal file
BIN
binaries/mp4box.exe
Normal file
Binary file not shown.
BIN
binaries/mp4decrypt.exe
Normal file
BIN
binaries/mp4decrypt.exe
Normal file
Binary file not shown.
BIN
binaries/mp4decrypt_1.exe
Normal file
BIN
binaries/mp4decrypt_1.exe
Normal file
Binary file not shown.
BIN
binaries/mp4dump.exe
Normal file
BIN
binaries/mp4dump.exe
Normal file
Binary file not shown.
BIN
binaries/msvcp140.dll
Normal file
BIN
binaries/msvcp140.dll
Normal file
Binary file not shown.
1
binaries/mux_atmos.txt
Normal file
1
binaries/mux_atmos.txt
Normal file
@ -0,0 +1 @@
|
||||
ffmpeg -i correct_file.eac3 -map 0 -c:a copy correct_file.mp4
|
BIN
binaries/packager-old.exe
Normal file
BIN
binaries/packager-old.exe
Normal file
Binary file not shown.
BIN
binaries/packager.exe
Normal file
BIN
binaries/packager.exe
Normal file
Binary file not shown.
BIN
binaries/swresample-2.dll
Normal file
BIN
binaries/swresample-2.dll
Normal file
Binary file not shown.
BIN
binaries/swscale-4.dll
Normal file
BIN
binaries/swscale-4.dll
Normal file
Binary file not shown.
BIN
binaries/url_launcher_windows_plugin.dll
Normal file
BIN
binaries/url_launcher_windows_plugin.dll
Normal file
Binary file not shown.
BIN
binaries/vcruntime140.dll
Normal file
BIN
binaries/vcruntime140.dll
Normal file
Binary file not shown.
BIN
binaries/vcruntime140_1.dll
Normal file
BIN
binaries/vcruntime140_1.dll
Normal file
Binary file not shown.
BIN
binaries/window_size_plugin.dll
Normal file
BIN
binaries/window_size_plugin.dll
Normal file
Binary file not shown.
23
binary.txt
Normal file
23
binary.txt
Normal file
@ -0,0 +1,23 @@
|
||||
dl -al en -sl en --keys -q 2160 --cdm hisense_smarttv_he55a7000euwts_sl3000 -r HDR --selected -w S05E08-S05E24 AMZN -b CBR -vq UHD 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
|
||||
--include-data-files=/path/to/scan=folder_name=**/*.txt
|
||||
--include-data-files=/path/to/file/*.txt=folder_name/some.txt
|
||||
--include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/
|
||||
|
||||
--onefile --> if this flag then figure out how to set the directories to NOT TEMP folder
|
||||
|
||||
python -m nuitka --onefile --assume-yes-for-downloads --windows-console-mode=disable --show-progress --standalone --output-dir=dist --static-libpython=no vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer --include-data-dir=./binaries/=binaries --include-data-dir=./scripts/=scripts
|
||||
python -m nuitka --onefile --standalone --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/services/=vinetrimmer/services --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts
|
||||
python -m nuitka --onefile --standalone --windows-console-mode=attach --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-dir=./vinetrimmer/services/*.py=vinetrimmer/services/=**/*.py --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --onefile --standalone --windows-console-mode=attach --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files=./vinetrimmer/services/*.py=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --mode=standalone --output-dir=dist --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
|
||||
python -m nuitka --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
nuitka --output-dir=dist --standalone --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/
|
||||
|
||||
|
||||
nuitka --onefile --output-dir=dist --windows-console-mode=force vt.py --include-data-dir=./vinetrimmer/=vinetrimmer/
|
36
commands.txt
Normal file
36
commands.txt
Normal file
@ -0,0 +1,36 @@
|
||||
https://www.primevideo.com/region/eu/storefront
|
||||
|
||||
poetry run vt dl -al en -sl en -r HDR --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en -sl en --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en --selected --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en --selected AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
|
||||
poetry run vt dl -q 2160 -al en -sl en --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -q 2160 -al en -sl en --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4 --bitrate CVBR+CBR
|
||||
poetry run vt dl -al en -sl en --selected AMZN -b CBR https://www.primevideo.com/detail/0I1GTXP9ZKTV7AAD7E1LCWJCUX/
|
||||
|
||||
poetry run vt dl -al en -sl en -q 2160 --keys -r HDR AMZN -b CBR 0OSAJR8S2YWRSQCYS4J8MEGEXI
|
||||
poetry run vt dl -al en -sl en -q 2160 -r HDR --selected -w S05E08-S05E24 AMZN -b CBR 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
python vinetrimmer1.py dl -al en -sl en -q 2160 -r HDR --selected -w S05E09-S05E24 AMZN -b CBR 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
poetry run vt dl -al en -sl en --selected -q 2160 -r HDR -w S01E18-S01E25 AMZN -b CBR --ism 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
Atmos audio download AMZN to fix --> poetry run vt dl -al en -aa -sl en --selected --debug -w S01E01 -A AMZN -b CBR --ism 0HAQAA7JM43QWX0H6GUD3IOF70
|
||||
|
||||
http://ABHIRCQAAAAAAAAMCX3W7WLVKL54A.s3-bom-ww.cf.smooth.row.aiv-cdn.net/e5b0/2fe1/032c/4fae-b896-aca9d8bef3d4/170b36b1-856d-4c69-bbf6-feb6c979185a.ism/manifest
|
||||
poetry run vt dl -al en -sl en -r HDR -w S01E01 --list -q 2160 AMZN https://www.primevideo.com/detail/0HU52DR3U1R0FGI3KSUL00XYY7
|
||||
https://www.primevideo.com/detail/0HU52DR3U1R0FGI3KSUL00XYY7/
|
||||
https://ABAKS6NAAAAAAAAMBIBDKKUP3ONNU.s3-iad-2.cf.smooth.row.aiv-cdn.net/357a/1bb0/c1f3/4a6b-b709-d6f2edf5b709/15eab8ec-d8ac-4c23-96fc-f5d89f459829.ism/manifest
|
||||
http://ABHIRCQAAAAAAAAMHLTVNGLHRCITQ.s3-bom-ww.cf.smooth.row.aiv-cdn.net/e7ab/7c49/9743/4e53-ab5c-6d15516ecf15/52bf7e61-51cd-4e5d-bd68-834706f17789.ism/manifest
|
||||
https://www.primevideo.com/region/eu/detail/0KYRVT4JDB957NXZO72E2MIFW5/
|
||||
https://m-5884s3.ll.smooth.row.aiv-cdn.net/iad_2/3572/bbdc/73b4/404d-a100-802b1d9de4c6/862e2506-c20e-4ba7-bacc-d6b4775e7b62.ism/manifest
|
||||
|
||||
poetry run vt dl -al en -sl en -w S01E01 Max https://play.max.com/show/c8ea8e19-cae7-4683-9b62-cdbbed744784
|
||||
UHD
|
||||
poetry run vt dl -al en -sl en --keys Max https://play.max.com/show/5756c2bf-36f8-4890-b1f9-ef168f1d8e9c
|
||||
poetry run vt dl -al en -sl en -w S02E05-S02E10 --selected --proxy http://192.168.0.99:9766 Max
|
||||
poetry run vt dl -al en -sl en --list -w S01E01 --proxy http://192.168.0.99:9766 Max
|
||||
|
||||
poetry run vt dl -al all --selected --proxy http://192.168.0.99:9766 --debug -w S01E01 ATVP umc.cmc.7gvn6fekgfpq5fc72pgi1c47o
|
||||
poetry run vt dl -al en -sl en --selected --debug -q 720 --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
poetry run vt dl -al en -sl en --selected --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
54
fix.txt
Normal file
54
fix.txt
Normal file
@ -0,0 +1,54 @@
|
||||
D:\PlayReady-Amazon-Tool-main>poetry run vt dl -al en -sl en --selected --keys --cdm hisense_smarttv_he55a7000euwts_sl3000 AMZN -vq UHD -b CVBR+CBR https://www.primevideo.com/detail/0I1GTXP9ZKTV7AAD7E1LCWJCUX/
|
||||
2025-02-07 22:26:57 [I] vt : vinetrimmer - Widevine DRM downloader and decrypter
|
||||
2025-02-07 22:26:57 [I] vt : [Root Config] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\vinetrimmer.yml
|
||||
2025-02-07 22:26:57 [I] vt : [Service Configs] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Services
|
||||
2025-02-07 22:26:57 [I] vt : [Cookies] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Cookies
|
||||
2025-02-07 22:26:57 [I] vt : [CDM Devices] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\devices
|
||||
2025-02-07 22:26:57 [I] vt : [Cache] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Cache
|
||||
2025-02-07 22:26:57 [I] vt : [Logs] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Logs
|
||||
2025-02-07 22:26:57 [I] vt : [Temp Files] : D:\PlayReady-Amazon-Tool-main\Temp
|
||||
2025-02-07 22:26:57 [I] vt : [Downloads] : D:\PlayReady-Amazon-Tool-main\Downloads
|
||||
2025-02-07 22:26:57 [I] dl : + 1 Local Vault
|
||||
2025-02-07 22:26:57 [I] dl : + 0 Remote Vaults
|
||||
2025-02-07 22:26:57 [I] dl : + Loaded Device: hisense_smarttv_he55a7000euwts_sl3000 (L3000)
|
||||
2025-02-07 22:26:57 [I] AMZN : Getting Account Region
|
||||
2025-02-07 22:26:59 [I] AMZN : + Region: us
|
||||
2025-02-07 22:26:59 [I] AMZN : + Using cached device bearer
|
||||
2025-02-07 22:26:59 [I] AMZN : Retrieving Titles
|
||||
2025-02-07 22:27:00 [I] Titles : Title: I Was Not Ready Da
|
||||
2025-02-07 22:27:00 [I] AMZN : Getting tracks for I Was Not Ready Da (2020) [amzn1.dv.gti.30baee18-aa4c-1fc2-72cc-6e11d5e627d9]
|
||||
2025-02-07 22:27:01 [I] AMZN : + Detected encodingVersion=2
|
||||
2025-02-07 22:27:01 [I] AMZN : + Downloading CVBR MPD
|
||||
2025-02-07 22:27:02 [I] AMZN : + Detected encodingVersion=2
|
||||
2025-02-07 22:27:02 [I] AMZN : + Downloading CBR MPD
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 1, in <module>
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1161, in __call__
|
||||
return self.main(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1082, in main
|
||||
rv = self.invoke(ctx)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1443, in invoke
|
||||
return ctx.invoke(self.callback, **ctx.params)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 788, in invoke
|
||||
return __callback(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\vinetrimmer.py", line 72, in main
|
||||
dl()
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1161, in __call__
|
||||
return self.main(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1082, in main
|
||||
rv = self.invoke(ctx)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1697, in invoke
|
||||
return _process_result(sub_ctx.command.invoke(sub_ctx))
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1666, in _process_result
|
||||
value = ctx.invoke(self._result_callback, value, **ctx.params)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 788, in invoke
|
||||
return __callback(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\decorators.py", line 33, in new_func
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\commands\dl.py", line 309, in result
|
||||
title.tracks.add(service.get_tracks(title), warn_only=True)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\services\amazon.py", line 321, in get_tracks
|
||||
manifest, chosen_manifest, tracks = self.get_best_quality(title)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\services\amazon.py", line 1051, in get_best_quality
|
||||
best_quality = max(track_list, key=lambda x: x['max_size'])
|
||||
TypeError: '>' not supported between instances of 'NoneType' and 'NoneType'
|
6
install.bat
Normal file
6
install.bat
Normal file
@ -0,0 +1,6 @@
|
||||
@echo off
|
||||
python -m pip install poetry==1.8.5
|
||||
poetry config virtualenvs.in-project true
|
||||
poetry lock --no-update
|
||||
poetry install
|
||||
pause
|
2732
poetry.lock
generated
Normal file
2732
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
64
pyproject.toml
Normal file
64
pyproject.toml
Normal file
@ -0,0 +1,64 @@
|
||||
[build-system]
|
||||
requires = ['poetry-core>=1.0.0']
|
||||
build-backend = 'poetry.core.masonry.api'
|
||||
|
||||
[tool.poetry]
|
||||
name = 'vinetrimmer'
|
||||
version = '0.1.0'
|
||||
description = 'Playready DRM downloader and decrypter'
|
||||
authors = []
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
appdirs = "^1.4.4"
|
||||
beautifulsoup4 = "~4.8.2"
|
||||
click = "^8.0.1"
|
||||
cffi = "^1.16.0"
|
||||
coloredlogs = "^15.0"
|
||||
construct = "2.8.8"
|
||||
crccheck = "^1.0"
|
||||
cryptography = "^43.0.3"
|
||||
ecpy = "^1.2.5"
|
||||
httpx = "^0.23.0"
|
||||
isodate = "^0.6.1"
|
||||
jsonpickle = "^2.0.0"
|
||||
langcodes = { extras = ["data"], version = "^3.1.0" }
|
||||
lxml = "^4.6.3"
|
||||
m3u8 = "^0.9.0"
|
||||
marisa-trie = "^1.1.0"
|
||||
poetry = "1.8.5"
|
||||
pproxy = "^2.7.7"
|
||||
protobuf3 = { path = "./scripts/protobuf3", develop = true }
|
||||
pycaption = "^2.1.1"
|
||||
pycryptodome = "^3.21.0"
|
||||
pycryptodomex = "^3.4.3"
|
||||
pyhulu = "^1.1.2"
|
||||
pymediainfo = "^5.0.3"
|
||||
PyMySQL = { extras = ["rsa"], version = "^1.0.2" }
|
||||
pymp4 = "^1.4.0"
|
||||
pyplayready = { path = "./scripts/pyplayready", develop = true }
|
||||
pywidevine = { path = "./scripts/pywidevine", develop = true }
|
||||
pysubs2 = "^1.6.1"
|
||||
PyYAML = "^6.0.1"
|
||||
requests = { extras = ["socks"], version = "2.32.3" }
|
||||
tldextract = "^3.1.0"
|
||||
toml = "^0.10.2"
|
||||
tqdm = "^4.67.0"
|
||||
Unidecode = "^1.2.0"
|
||||
validators = "^0.18.2"
|
||||
websocket-client = "^1.1.0"
|
||||
xmltodict = "^0.14.0"
|
||||
yt-dlp = "^2022.11.11"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
flake8 = "^3.8.4"
|
||||
isort = "^5.9.2"
|
||||
pyinstaller = "^4.4"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
vt = 'vinetrimmer.vinetrimmer:main'
|
||||
|
||||
[tool.isort]
|
||||
line_length = 120
|
||||
classes = ['CTV', 'FPS', 'IO', 'iTunes', 'MP4', 'TVNOW']
|
||||
extend_skip = ['vinetrimmer/vendor']
|
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
requests
|
||||
pycryptodome
|
||||
ecpy
|
||||
construct
|
||||
click
|
100
scripts/AddKeysToKeyVault.py
Normal file
100
scripts/AddKeysToKeyVault.py
Normal file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
from vinetrimmer.utils.AtomicSQL import AtomicSQL
|
||||
|
||||
"""
|
||||
Add keys to key vault. File should have one KID:KEY per-line.
|
||||
Optionally you can also put `:<title here>` at the end (after `KEY`).
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"Key Vault DB batch adder/updater",
|
||||
description="Simple script to add or update key information to a vinetrimmer key vault db"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t", "--table",
|
||||
help="table to store keys to. (e.g. amazon, netflix, disneyplus)",
|
||||
required=True)
|
||||
parser.add_argument(
|
||||
"-i", "--input",
|
||||
help="data used to parse from",
|
||||
required=True)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
help="key store db that will receive keys",
|
||||
required=True)
|
||||
parser.add_argument(
|
||||
"-d", "--dry-run",
|
||||
help="execute it, but never actually save/commit changes.",
|
||||
action="store_true", required=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
output_db = AtomicSQL()
|
||||
output_db_id = output_db.load(sqlite3.connect(args.output))
|
||||
|
||||
# get all keys from input db
|
||||
add_count = 0
|
||||
update_count = 0
|
||||
existed_count = 0
|
||||
|
||||
if args.input == "-":
|
||||
input_ = sys.stdin.read()
|
||||
else:
|
||||
with open(args.input, encoding="utf-8") as fd:
|
||||
input_ = fd.read()
|
||||
|
||||
for line in input_.splitlines(keepends=False):
|
||||
match = re.search(r"^(?P<kid>[0-9a-fA-F]{32}):(?P<key>[0-9a-fA-F]{32})(:(?P<title>[\w .:-]*))?$", line)
|
||||
if not match:
|
||||
continue
|
||||
kid = match.group("kid").lower()
|
||||
key = match.group("key").lower()
|
||||
title = match.group("title") or None
|
||||
|
||||
exists = output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"SELECT title FROM `{args.table}` WHERE `kid`=:kid",
|
||||
{"kid": kid}
|
||||
)
|
||||
).fetchone()
|
||||
|
||||
if exists:
|
||||
if title and not exists[0]:
|
||||
update_count += 1
|
||||
print(f"Updating {args.table} {kid}: {title}")
|
||||
output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"UPDATE `{args.table}` SET `title`=:title",
|
||||
{"title": title}
|
||||
)
|
||||
)
|
||||
else:
|
||||
existed_count += 1
|
||||
print(f"Key {args.table} {kid} already exists in the db with no differences, skipping...")
|
||||
else:
|
||||
add_count += 1
|
||||
print(f"Adding {args.table} {kid} ({title}): {key}")
|
||||
output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"INSERT INTO `{args.table}` (kid, key_, title) VALUES (:kid, :key, :title)",
|
||||
{"kid": kid, "key": key, "title": title}
|
||||
)
|
||||
)
|
||||
|
||||
if args.dry_run:
|
||||
print("--dry run enabled, have not commited any changes.")
|
||||
else:
|
||||
output_db.commit(output_db_id)
|
||||
|
||||
print(
|
||||
"Done!\n"
|
||||
f"{add_count} added, {update_count} updated in some way, {existed_count} already existed (skipped)"
|
||||
)
|
79
scripts/ClientIDGen/ClientIDGen.py
Normal file
79
scripts/ClientIDGen/ClientIDGen.py
Normal file
@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
|
||||
import yaml
|
||||
|
||||
from vinetrimmer.utils.widevine.protos.widevine_pb2 import ClientIdentificationRaw
|
||||
|
||||
parser = argparse.ArgumentParser("Widevine Client ID building tool.")
|
||||
parser.add_argument("-q", "--quiet",
|
||||
help="do not print the generated client id",
|
||||
action="store_true")
|
||||
parser.add_argument("-c", "--config",
|
||||
help="configuration yaml file",
|
||||
default="config.yml")
|
||||
parser.add_argument("-o", "--output",
|
||||
default="device_client_id_blob",
|
||||
help="output filename")
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.config) as fd:
|
||||
config = yaml.safe_load(fd)
|
||||
|
||||
with open(config["token"], "rb") as fd:
|
||||
token = fd.read()
|
||||
|
||||
ci = ClientIdentificationRaw()
|
||||
ci.Type = ClientIdentificationRaw.DEVICE_CERTIFICATE
|
||||
ci.Token = token
|
||||
|
||||
for name, value in config["client_info"].items():
|
||||
nv = ci.ClientInfo.add()
|
||||
nv.Name = name
|
||||
if name == "device_id":
|
||||
value = base64.b64decode(value)
|
||||
nv.Value = value
|
||||
|
||||
capabilities = ClientIdentificationRaw.ClientCapabilities()
|
||||
caps = config["capabilities"]
|
||||
if "client_token" in caps:
|
||||
capabilities.ClientToken = caps["client_token"]
|
||||
if "session_token" in caps:
|
||||
capabilities.SessionToken = caps["session_token"]
|
||||
if "video_resolution_constraints" in caps:
|
||||
capabilities.VideoResolutionConstraints = caps["video_resolution_constraints"]
|
||||
if "max_hdcp_version" in caps:
|
||||
max_hdcp_version = caps["max_hdcp_version"]
|
||||
if str(max_hdcp_version).isdigit():
|
||||
max_hdcp_version = int(max_hdcp_version)
|
||||
else:
|
||||
max_hdcp_version = ClientIdentificationRaw.ClientCapabilities.HdcpVersion.Value(max_hdcp_version)
|
||||
capabilities.MaxHdcpVersion = max_hdcp_version
|
||||
if "oem_crypto_api_version" in caps:
|
||||
capabilities.OemCryptoApiVersion = int(caps["oem_crypto_api_version"])
|
||||
# I have not seen any of the following in use:
|
||||
if "anti_rollback_usage_table" in caps:
|
||||
capabilities.AntiRollbackUsageTable = caps["anti_rollback_usage_table"]
|
||||
if "srm_version" in caps:
|
||||
capabilities.SrmVersion = int(caps["srm_version"])
|
||||
if "can_update_srm" in caps:
|
||||
capabilities.ClientToken = caps["can_update_srm"]
|
||||
# is it possible to refactor this?
|
||||
if "supported_certificate_key_type" in caps:
|
||||
supported_certificate_key_type = caps["supported_certificate_key_type"]
|
||||
if str(supported_certificate_key_type).isdigit():
|
||||
supported_certificate_key_type = int(supported_certificate_key_type)
|
||||
else:
|
||||
supported_certificate_key_type = ClientIdentificationRaw.ClientCapabilities.CertificateKeyType.Value(
|
||||
supported_certificate_key_type
|
||||
)
|
||||
capabilities.SupportedCertificateKeyType.append(supported_certificate_key_type)
|
||||
ci._ClientCapabilities.CopyFrom(capabilities)
|
||||
|
||||
if not args.quiet:
|
||||
print(ci)
|
||||
|
||||
with open(args.output, "wb") as fd:
|
||||
fd.write(ci.SerializeToString())
|
20
scripts/ClientIDGen/config.example.yml
Normal file
20
scripts/ClientIDGen/config.example.yml
Normal file
@ -0,0 +1,20 @@
|
||||
# NOTE!
|
||||
# This client id gen script may use outdated ClientIdentification values.
|
||||
# Just letting you know, do whatever you wish, but yeah
|
||||
|
||||
token: 'token.bin'
|
||||
|
||||
client_info:
|
||||
company_name: 'motorola'
|
||||
model_name: 'Nexus 6'
|
||||
architecture_name: 'armeabi-v7a'
|
||||
device_name: 'shamu'
|
||||
product_name: 'shamu'
|
||||
build_info: 'google/shamu/shamu:5.1.1/LMY48M/2167285:user/release-keys'
|
||||
device_id: 'TU1JX0VGRkYwRkU2NUQ5OA=='
|
||||
os_version: '5.1.12'
|
||||
|
||||
capabilities:
|
||||
session_token: 1
|
||||
max_hdcp_version: 'HDCP_V2_2'
|
||||
oem_crypto_api_version: 11
|
35
scripts/GetVikiManifestFree.py
Normal file
35
scripts/GetVikiManifestFree.py
Normal file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
import requests
|
||||
from Cryptodome.Cipher import AES
|
||||
|
||||
# create a session with a user agent
|
||||
http = requests.Session()
|
||||
http.headers.update({
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:68.0) Gecko/20100101 Firefox/68.0"
|
||||
})
|
||||
# get player fragment page
|
||||
fragment = http.get(sys.argv[1].replace("/videos/", "/player5_fragment/")).text
|
||||
# get encrypted manifest.xml urls for both hls and dash
|
||||
encrypted_manifests = {k: bytes.fromhex(re.findall(
|
||||
r'<source\s+type="application/' + v + r'"\s+src=".+?/e-stream-url\?stream=(.+?)"',
|
||||
fragment
|
||||
)[0][0]) for k, v in {"hls": "x-mpegURL", "dash": r"dash\+xml"}.items()}
|
||||
|
||||
# decrypt all manifest.xml urls in manifests
|
||||
m = re.search(r"^\s*chabi:\s*'(.+?)'", fragment, re.MULTILINE)
|
||||
if not m:
|
||||
raise ValueError("Unable to get key")
|
||||
key = m.group(1).encode()
|
||||
|
||||
m = re.search(r"^\s*ecta:\s*'(.+?)'", fragment, re.MULTILINE)
|
||||
if not m:
|
||||
raise ValueError("Unable to get key")
|
||||
iv = m.group(1).encode()
|
||||
|
||||
manifests = {k: AES.new(key, AES.MODE_CBC, iv).decrypt(v).decode("utf-8") for k, v in encrypted_manifests.items()}
|
||||
# print em out
|
||||
print(manifests)
|
112
scripts/MergeKeyStores.py
Normal file
112
scripts/MergeKeyStores.py
Normal file
@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
from vinetrimmer.utils.AtomicSQL import AtomicSQL
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"Key Store DB merger",
|
||||
description="Simple script to merge vinetrimmer key store db's into one"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i", "--input",
|
||||
help="key store db that will send keys",
|
||||
required=True)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
help="key store db that will receive keys",
|
||||
required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
add_count = 0
|
||||
update_count = 0
|
||||
existed_count = 0
|
||||
|
||||
input_db = AtomicSQL()
|
||||
input_db_id = input_db.load(sqlite3.connect(args.input))
|
||||
|
||||
output_db = AtomicSQL()
|
||||
output_db_id = output_db.load(sqlite3.connect(args.output))
|
||||
|
||||
# get all keys from input db
|
||||
input_keys = input_db.safe_execute(
|
||||
input_db_id,
|
||||
lambda db, cursor: cursor.execute("SELECT * FROM `keys`")
|
||||
).fetchall()
|
||||
|
||||
for i, service, title, pssh_b64, pssh_sha1, content_keys in input_keys:
|
||||
exists = output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
"""
|
||||
SELECT "id","service","title","pssh_b64","pssh_sha1","content_keys" FROM `keys` WHERE `service`=:service AND
|
||||
(`pssh_b64`=:pssh_b64 or `pssh_sha1`=:pssh_sha1)
|
||||
""",
|
||||
{
|
||||
"service": service,
|
||||
"pssh_b64": pssh_b64,
|
||||
"pssh_sha1": pssh_sha1
|
||||
}
|
||||
)
|
||||
).fetchone()
|
||||
if exists:
|
||||
has_differences = (
|
||||
json.loads(exists[5]) != json.loads(content_keys) or
|
||||
title != exists[2] or
|
||||
pssh_b64 != exists[3] or
|
||||
pssh_sha1 != exists[4]
|
||||
)
|
||||
if has_differences:
|
||||
update_count += 1
|
||||
content_keys = list(set(json.loads(exists[5])) | set(json.loads(content_keys)))
|
||||
print(f"Updating {title} {service} {pssh_b64}: {content_keys}")
|
||||
output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
"""
|
||||
UPDATE `keys` SET `service`=:service, `title`=:title, `pssh_b64`=:new_pssh_b64,
|
||||
`pssh_sha1`=:new_pssh_sha1, `content_keys`=:content_keys WHERE `service`=:service AND
|
||||
(`pssh_b64`=:pssh_b64 or `pssh_sha1`=:pssh_sha1)
|
||||
""",
|
||||
{
|
||||
"service": service,
|
||||
"title": title or exists[2],
|
||||
"pssh_b64": pssh_b64,
|
||||
"new_pssh_b64": pssh_b64 or exists[3],
|
||||
"pssh_sha1": pssh_sha1,
|
||||
"new_pssh_sha1": pssh_sha1 or exists[4],
|
||||
"content_keys": json.dumps(content_keys, separators=(",", ":"))
|
||||
}
|
||||
)
|
||||
)
|
||||
else:
|
||||
existed_count += 1
|
||||
print(f"Key {title} {service} {pssh_b64} already exists in the db with no differences, skipping...")
|
||||
else:
|
||||
add_count += 1
|
||||
print(f"Adding {title} {service} {pssh_b64}: {content_keys}")
|
||||
output_db.safe_execute(
|
||||
output_db_id,
|
||||
lambda db, cursor: cursor.execute(
|
||||
"""
|
||||
INSERT INTO `keys` (service, title, pssh_b64, pssh_sha1, content_keys)
|
||||
VALUES (:service, :title, :pssh_b64, :pssh_sha1, :content_keys)
|
||||
""",
|
||||
{
|
||||
"service": service,
|
||||
"title": title,
|
||||
"pssh_b64": pssh_b64,
|
||||
"pssh_sha1": pssh_sha1,
|
||||
"content_keys": json.dumps(content_keys, separators=(",", ":"))
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
output_db.commit(output_db_id)
|
||||
|
||||
print(
|
||||
"Done!\n"
|
||||
f"{add_count} added, {update_count} updated in some way, {existed_count} already existed (no difference)"
|
||||
)
|
29
scripts/ParseClientID.py
Normal file
29
scripts/ParseClientID.py
Normal file
@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
|
||||
from vinetrimmer.utils.widevine.device import LocalDevice
|
||||
from vinetrimmer.utils.widevine.protos.widevine_pb2 import ClientIdentification
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"Client identification parser",
|
||||
description="Simple script to read a client id blob to see information about it"
|
||||
)
|
||||
parser.add_argument(
|
||||
"input",
|
||||
help="client id blob bin path or path to a wvd file",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
client_id = ClientIdentification()
|
||||
is_wvd = args.input.lower().endswith(".wvd")
|
||||
|
||||
with open(args.input, "rb") as fd:
|
||||
data = fd.read()
|
||||
|
||||
if is_wvd:
|
||||
client_id = LocalDevice.load(data).client_id
|
||||
else:
|
||||
client_id.ParseFromString(data)
|
||||
|
||||
print(client_id)
|
18
scripts/ParseKeybox.py
Normal file
18
scripts/ParseKeybox.py
Normal file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
|
||||
from vinetrimmer.utils.widevine.keybox import Keybox
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"Keybox parser",
|
||||
description="Simple script to read a keybox to see information about it"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-k", "--keybox",
|
||||
help="keybox path",
|
||||
required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
keybox = Keybox.load(args.keybox)
|
||||
print(repr(keybox))
|
30
scripts/ParsePSSH.py
Normal file
30
scripts/ParsePSSH.py
Normal file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
|
||||
from vinetrimmer.utils.widevine.protos.widevine_pb2 import WidevineCencHeader
|
||||
from vinetrimmer.vendor.pymp4.parser import Box
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"PSSH parser",
|
||||
description="Simple script to read a PSSH to see information about it"
|
||||
)
|
||||
parser.add_argument(
|
||||
"input",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
args.input = base64.b64decode(args.input.encode("utf-8"))
|
||||
box = Box.parse(args.input)
|
||||
cenc_header = WidevineCencHeader()
|
||||
cenc_header.ParseFromString(box.init_data)
|
||||
|
||||
print("pssh box:")
|
||||
print(box)
|
||||
|
||||
print("init_data parsed as WidevineCencHeader:")
|
||||
print(cenc_header)
|
||||
|
||||
print("init_data's key_id as hex:")
|
||||
print(cenc_header.key_id[0].hex())
|
22
scripts/TOMLtoYAML.py
Normal file
22
scripts/TOMLtoYAML.py
Normal file
@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
import toml
|
||||
import yaml
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("path", help="directory containing .toml files to convert")
|
||||
args = parser.parse_args()
|
||||
|
||||
for root, dirs, files in os.walk(args.path):
|
||||
for f in files:
|
||||
if f.endswith(".toml"):
|
||||
data = toml.load(os.path.join(root, f))
|
||||
# Convert to a real dict instead of weird toml object that pyyaml can't handle
|
||||
data = json.loads(json.dumps(data))
|
||||
with open(os.path.join(root, f"{os.path.splitext(f)[0]}.yml"), "w") as fd:
|
||||
print(f"Writing {os.path.realpath(fd.name)}")
|
||||
fd.write(yaml.safe_dump(data, sort_keys=False))
|
99
scripts/UpdateLocalKeyVault.py
Normal file
99
scripts/UpdateLocalKeyVault.py
Normal file
@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
from vinetrimmer.utils.AtomicSQL import AtomicSQL
|
||||
|
||||
|
||||
class LocalVault:
|
||||
def __init__(self, vault_path):
|
||||
"""
|
||||
Update local key vault to newer system.
|
||||
This should ONLY be run if you have the old structure with keys in a table named `keys`.
|
||||
It will move and update the structure of the items in `keys` to their respective new locations and structure.
|
||||
:param vault_path: sqlite db path
|
||||
"""
|
||||
self.adb = AtomicSQL()
|
||||
self.ticket = self.adb.load(sqlite3.connect(vault_path))
|
||||
if not self.table_exists("keys"):
|
||||
return
|
||||
rows = self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute("SELECT `service`, `title`, `content_keys` FROM `keys`")
|
||||
).fetchall()
|
||||
for service, title, content_keys in rows:
|
||||
service = service.lower()
|
||||
content_keys = json.loads(content_keys)
|
||||
if not self.table_exists(service):
|
||||
self.create_table(service)
|
||||
for kid, key in [x.split(":") for x in content_keys]:
|
||||
print(f"Inserting: {kid} {key} {title}")
|
||||
existing_row, existing_title = self.row_exists(service, kid, key)
|
||||
if existing_row:
|
||||
if title and not existing_title:
|
||||
print(" -- exists, but the title doesn't, so ill merge")
|
||||
self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"UPDATE `{service}` SET `title`=? WHERE `kid`=? AND `key_`=?",
|
||||
(title, kid, key)
|
||||
)
|
||||
)
|
||||
continue
|
||||
print(" -- skipping (exists already)")
|
||||
continue
|
||||
self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"INSERT INTO `{service}` (kid, key_, title) VALUES (?, ?, ?)",
|
||||
(kid, key, title)
|
||||
)
|
||||
)
|
||||
self.adb.commit(self.ticket)
|
||||
|
||||
def row_exists(self, table, kid, key):
|
||||
return self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute(
|
||||
f"SELECT count(id), title FROM `{table}` WHERE kid=? AND key_=?",
|
||||
[kid, key]
|
||||
)
|
||||
).fetchone()
|
||||
|
||||
def table_exists(self, name):
|
||||
return self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute(
|
||||
"SELECT count(name) FROM sqlite_master WHERE type='table' AND name=?",
|
||||
[name.lower()]
|
||||
)
|
||||
).fetchone()[0] == 1
|
||||
|
||||
def create_table(self, name):
|
||||
self.adb.safe_execute(
|
||||
self.ticket,
|
||||
lambda db, cursor: cursor.execute(
|
||||
"""
|
||||
CREATE TABLE {} (
|
||||
"id" INTEGER NOT NULL UNIQUE,
|
||||
"kid" TEXT NOT NULL COLLATE NOCASE,
|
||||
"key_" TEXT NOT NULL COLLATE NOCASE,
|
||||
"title" TEXT NULL,
|
||||
PRIMARY KEY("id" AUTOINCREMENT),
|
||||
UNIQUE("kid", "key_")
|
||||
);
|
||||
""".format(name.lower())
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-i", "--input",
|
||||
help="vault",
|
||||
required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
LocalVault(args.input)
|
8
scripts/VMPBlobGen/README.md
Normal file
8
scripts/VMPBlobGen/README.md
Normal file
@ -0,0 +1,8 @@
|
||||
# VMPBlobGen
|
||||
|
||||
Notes on VMP:
|
||||
|
||||
- Android doesn't require (or use!) a VMP blob (the oemcrypto hardware backs it and HDCP controls the path)
|
||||
- Chrome and WidevineCDM both have signature files. The widevinecdm.dll and chrome.exe sign both the signature files,
|
||||
then sign with the private key and inject to the license request in field 7, but you need a server cert to encrypt
|
||||
the challenge otherwise.
|
114
scripts/VMPBlobGen/VMPBlobGen.py
Normal file
114
scripts/VMPBlobGen/VMPBlobGen.py
Normal file
@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from hashlib import sha512
|
||||
|
||||
from vinetrimmer.utils.widevine.protos.widevine_pb2 import FileHashes
|
||||
from vinetrimmer.utils.widevine.vmp import WidevineSignatureReader
|
||||
|
||||
"""
|
||||
Script that generates a VMP blob for chromecdm
|
||||
"""
|
||||
|
||||
WIN32_FILES = [
|
||||
"chrome.exe",
|
||||
"chrome.dll",
|
||||
"chrome_child.dll",
|
||||
"widevinecdmadapter.dll",
|
||||
"widevinecdm.dll"
|
||||
]
|
||||
|
||||
|
||||
def sha512file(filename):
|
||||
"""Compute SHA-512 digest of file."""
|
||||
sha = sha512()
|
||||
with open(filename, "rb") as fd:
|
||||
for b in iter(lambda: fd.read(0x10000), b''):
|
||||
sha.update(b)
|
||||
return sha.digest()
|
||||
|
||||
|
||||
def build_vmp_field(filenames):
|
||||
"""
|
||||
Create and fill out a FileHashes object.
|
||||
|
||||
`filenames` is an array of pairs of filenames like (file, file_signature)
|
||||
such as ("module.dll", "module.dll.sig"). This does not validate the signature
|
||||
against the codesign root CA, or even the sha512 hash against the current signature+signer
|
||||
"""
|
||||
file_hashes = FileHashes()
|
||||
|
||||
for basename, file, sig in filenames:
|
||||
signature = WidevineSignatureReader.from_file(sig)
|
||||
s = file_hashes.signatures.add()
|
||||
s.filename = basename
|
||||
s.test_signing = False # we can't check this without parsing signer
|
||||
s.SHA512Hash = sha512file(file)
|
||||
s.main_exe = signature.mainexe
|
||||
s.signature = signature.signature
|
||||
|
||||
file_hashes.signer = signature.signer
|
||||
return file_hashes.SerializeToString()
|
||||
|
||||
|
||||
def get_files_with_signatures(path, required_files=None, random_order=False, sig_ext="sig"):
|
||||
"""
|
||||
use on chrome dir (a given version).
|
||||
random_order would put any files it found in the dir with sigs,
|
||||
it's not the right way to do it and the browser does not do this.
|
||||
this function can still fail (generate wrong output) in subtle ways if
|
||||
the Chrome dir has copies of the exe/sigs, especially if those copies are modified in some way
|
||||
"""
|
||||
if not required_files:
|
||||
required_files = WIN32_FILES
|
||||
|
||||
all_files = []
|
||||
sig_files = []
|
||||
for dir_path, _, filenames in os.walk(path):
|
||||
for filename in filenames:
|
||||
full_path = os.path.join(dir_path, filename)
|
||||
all_files.append(full_path)
|
||||
if filename.endswith(sig_ext):
|
||||
sig_files.append(full_path)
|
||||
|
||||
base_names = []
|
||||
for path in sig_files:
|
||||
orig_path = os.path.splitext(path)[0]
|
||||
if orig_path not in all_files:
|
||||
print("signature file {} lacks original file {}".format(path, orig_path))
|
||||
base_names.append(path.name)
|
||||
|
||||
if not set(base_names).issuperset(set(required_files)):
|
||||
# or should just make this warn as the next exception would be more specific
|
||||
raise ValueError("Missing a binary/signature pair from {}".format(required_files))
|
||||
|
||||
files_to_hash = []
|
||||
if random_order:
|
||||
for path in sig_files:
|
||||
orig_path = os.path.splitext(path)[0]
|
||||
files_to_hash.append((os.path.basename(orig_path), orig_path, path))
|
||||
else:
|
||||
for basename in required_files:
|
||||
found_file = False
|
||||
for path in sig_files:
|
||||
orig_path = os.path.splitext(path)[0]
|
||||
if orig_path.endswith(basename):
|
||||
files_to_hash.append((basename, orig_path, path))
|
||||
found_file = True
|
||||
break
|
||||
if not found_file:
|
||||
raise Exception("Failed to locate a file sig/pair for {}".format(basename))
|
||||
|
||||
return files_to_hash
|
||||
|
||||
|
||||
def make_vmp_buff(browser_dir, file_msg_out):
|
||||
with open(file_msg_out, "wb") as fd:
|
||||
fd.write(build_vmp_field(get_files_with_signatures(browser_dir)))
|
||||
|
||||
|
||||
if len(sys.argv) < 3:
|
||||
print("Usage: {} BrowserPathWithVersion OutputPBMessage.bin".format(sys.argv[0]))
|
||||
else:
|
||||
make_vmp_buff(sys.argv[1], sys.argv[2])
|
65
scripts/WVD/JsonWVDtoStructWVD.py
Normal file
65
scripts/WVD/JsonWVDtoStructWVD.py
Normal file
@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
|
||||
from vinetrimmer.utils.widevine.device import LocalDevice
|
||||
|
||||
"""
|
||||
Code to convert common folder/file structure to a vinetrimmer WVD.
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"JsonWVDtoStructWVD",
|
||||
description="Simple script to read cdm data from old wvd json and write it into a new WVD struct file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i", "--input",
|
||||
help="path to wvd json file",
|
||||
required=False)
|
||||
parser.add_argument(
|
||||
"-d", "--dir",
|
||||
help="path to MULTIPLE wvd json files",
|
||||
required=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
files = []
|
||||
if args.dir:
|
||||
files.extend(os.listdir(args.dir))
|
||||
elif args.input:
|
||||
files.append(args.input)
|
||||
|
||||
for file in files:
|
||||
if not file.lower().endswith(".wvd") or os.path.splitext(file)[0].endswith(".struct"):
|
||||
continue
|
||||
|
||||
if not os.path.isfile(file):
|
||||
raise ValueError("Not a file or doesn't exist...")
|
||||
|
||||
print(f"Generating wvd struct file for {file}...")
|
||||
|
||||
with open(file, encoding="utf-8") as fd:
|
||||
wvd_json = json.load(fd)
|
||||
|
||||
device = LocalDevice(
|
||||
type=LocalDevice.Types[wvd_json["device_type"].upper()],
|
||||
security_level=wvd_json["security_level"],
|
||||
flags={
|
||||
"send_key_control_nonce": wvd_json["send_key_control_nonce"]
|
||||
},
|
||||
private_key=base64.b64decode(wvd_json["device_private_key"]),
|
||||
client_id=base64.b64decode(wvd_json["device_client_id_blob"]),
|
||||
vmp=base64.b64decode(wvd_json["device_vmp_blob"]) if wvd_json.get("device_vmp_blob") else None
|
||||
)
|
||||
|
||||
out = os.path.join(os.path.dirname(file), "structs", os.path.basename(file))
|
||||
os.makedirs(os.path.dirname(out), exist_ok=True)
|
||||
|
||||
device.dump(out)
|
||||
|
||||
print(device)
|
||||
print(f"Done: {file}")
|
||||
|
||||
print("Done")
|
50
scripts/WVD/MakeWVD.py
Normal file
50
scripts/WVD/MakeWVD.py
Normal file
@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from vinetrimmer.utils.widevine.device import LocalDevice
|
||||
|
||||
"""
|
||||
Code to convert common folder/file structure to a vinetrimmer WVD.
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("dirs", metavar="DIR", nargs="+", help="Directory containing device files")
|
||||
args = parser.parse_args()
|
||||
|
||||
configs = []
|
||||
for d in args.dirs:
|
||||
for root, dirs, files in os.walk(d):
|
||||
for f in files:
|
||||
if f == "wv.json":
|
||||
configs.append(os.path.join(root, f))
|
||||
|
||||
if not configs:
|
||||
print("No wv.json file found in any of the specified directories.")
|
||||
sys.exit(1)
|
||||
|
||||
for f in configs:
|
||||
d = os.path.dirname(f)
|
||||
|
||||
print(f"Generating WVD struct file for {os.path.abspath(d)}...")
|
||||
|
||||
with open(f, encoding="utf-8") as fd:
|
||||
config = json.load(fd)
|
||||
|
||||
device = LocalDevice.from_dir(d)
|
||||
|
||||
# we cannot output to /data/CDM_Devices etc. as the CWD might not align up
|
||||
# also best to keep the security level and system id definition on the filename for easy referencing
|
||||
name = re.sub(r"_lvl\d$", "", config["name"])
|
||||
out_path = f"{name}_l{device.security_level}_{device.system_id}.wvd"
|
||||
|
||||
device.dump(out_path)
|
||||
|
||||
print(device)
|
||||
|
||||
print(f"Done, saved to: {os.path.abspath(out_path)}")
|
||||
print()
|
0
scripts/protobuf3/READEME.MD
Normal file
0
scripts/protobuf3/READEME.MD
Normal file
33
scripts/protobuf3/protobuf3/__init__.py
Normal file
33
scripts/protobuf3/protobuf3/__init__.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
__version__ = '3.20.2'
|
26
scripts/protobuf3/protobuf3/any_pb2.py
Normal file
26
scripts/protobuf3/protobuf3/any_pb2.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/any.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_ANY._serialized_start=46
|
||||
_ANY._serialized_end=84
|
||||
# @@protoc_insertion_point(module_scope)
|
32
scripts/protobuf3/protobuf3/api_pb2.py
Normal file
32
scripts/protobuf3/protobuf3/api_pb2.py
Normal file
@ -0,0 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/api.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
||||
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_API._serialized_start=113
|
||||
_API._serialized_end=370
|
||||
_METHOD._serialized_start=373
|
||||
_METHOD._serialized_end=586
|
||||
_MIXIN._serialized_start=588
|
||||
_MIXIN._serialized_end=623
|
||||
# @@protoc_insertion_point(module_scope)
|
0
scripts/protobuf3/protobuf3/compiler/__init__.py
Normal file
0
scripts/protobuf3/protobuf3/compiler/__init__.py
Normal file
35
scripts/protobuf3/protobuf3/compiler/plugin_pb2.py
Normal file
35
scripts/protobuf3/protobuf3/compiler/plugin_pb2.py
Normal file
@ -0,0 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/compiler/plugin.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb'
|
||||
_VERSION._serialized_start=101
|
||||
_VERSION._serialized_end=171
|
||||
_CODEGENERATORREQUEST._serialized_start=174
|
||||
_CODEGENERATORREQUEST._serialized_end=360
|
||||
_CODEGENERATORRESPONSE._serialized_start=363
|
||||
_CODEGENERATORRESPONSE._serialized_end=684
|
||||
_CODEGENERATORRESPONSE_FILE._serialized_start=499
|
||||
_CODEGENERATORRESPONSE_FILE._serialized_end=626
|
||||
_CODEGENERATORRESPONSE_FEATURE._serialized_start=628
|
||||
_CODEGENERATORRESPONSE_FEATURE._serialized_end=684
|
||||
# @@protoc_insertion_point(module_scope)
|
1224
scripts/protobuf3/protobuf3/descriptor.py
Normal file
1224
scripts/protobuf3/protobuf3/descriptor.py
Normal file
File diff suppressed because it is too large
Load Diff
177
scripts/protobuf3/protobuf3/descriptor_database.py
Normal file
177
scripts/protobuf3/protobuf3/descriptor_database.py
Normal file
@ -0,0 +1,177 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides a container for DescriptorProtos."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import warnings
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DescriptorDatabaseConflictingDefinitionError(Error):
|
||||
"""Raised when a proto is added with the same name & different descriptor."""
|
||||
|
||||
|
||||
class DescriptorDatabase(object):
|
||||
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
||||
|
||||
def __init__(self):
|
||||
self._file_desc_protos_by_file = {}
|
||||
self._file_desc_protos_by_symbol = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this database.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
Raises:
|
||||
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
|
||||
add a proto with the same name but different definition than an
|
||||
existing proto in the database.
|
||||
"""
|
||||
proto_name = file_desc_proto.name
|
||||
if proto_name not in self._file_desc_protos_by_file:
|
||||
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
||||
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
||||
raise DescriptorDatabaseConflictingDefinitionError(
|
||||
'%s already added, but with different descriptor.' % proto_name)
|
||||
else:
|
||||
return
|
||||
|
||||
# Add all the top-level descriptors to the index.
|
||||
package = file_desc_proto.package
|
||||
for message in file_desc_proto.message_type:
|
||||
for name in _ExtractSymbols(message, package):
|
||||
self._AddSymbol(name, file_desc_proto)
|
||||
for enum in file_desc_proto.enum_type:
|
||||
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
|
||||
for enum_value in enum.value:
|
||||
self._file_desc_protos_by_symbol[
|
||||
'.'.join((package, enum_value.name))] = file_desc_proto
|
||||
for extension in file_desc_proto.extension:
|
||||
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
|
||||
for service in file_desc_proto.service:
|
||||
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
|
||||
|
||||
def FindFileByName(self, name):
|
||||
"""Finds the file descriptor proto by file name.
|
||||
|
||||
Typically the file name is a relative path ending to a .proto file. The
|
||||
proto with the given name will have to have been added to this database
|
||||
using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
name: The file name to find.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto matching the name.
|
||||
|
||||
Raises:
|
||||
KeyError if no file by the given name was added.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_file[name]
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Finds the file descriptor proto containing the specified symbol.
|
||||
|
||||
The symbol should be a fully qualified name including the file descriptor's
|
||||
package and any containing messages. Some examples:
|
||||
|
||||
'some.package.name.Message'
|
||||
'some.package.name.Message.NestedEnum'
|
||||
'some.package.name.Message.some_field'
|
||||
|
||||
The file descriptor proto containing the specified symbol must be added to
|
||||
this database using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
symbol: The fully qualified symbol name.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto containing the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError if no file contains the specified symbol.
|
||||
"""
|
||||
try:
|
||||
return self._file_desc_protos_by_symbol[symbol]
|
||||
except KeyError:
|
||||
# Fields, enum values, and nested extensions are not in
|
||||
# _file_desc_protos_by_symbol. Try to find the top level
|
||||
# descriptor. Non-existent nested symbol under a valid top level
|
||||
# descriptor can also be found. The behavior is the same with
|
||||
# protobuf C++.
|
||||
top_level, _, _ = symbol.rpartition('.')
|
||||
try:
|
||||
return self._file_desc_protos_by_symbol[top_level]
|
||||
except KeyError:
|
||||
# Raise the original symbol as a KeyError for better diagnostics.
|
||||
raise KeyError(symbol)
|
||||
|
||||
def FindFileContainingExtension(self, extendee_name, extension_number):
|
||||
# TODO(jieluo): implement this API.
|
||||
return None
|
||||
|
||||
def FindAllExtensionNumbers(self, extendee_name):
|
||||
# TODO(jieluo): implement this API.
|
||||
return []
|
||||
|
||||
def _AddSymbol(self, name, file_desc_proto):
|
||||
if name in self._file_desc_protos_by_symbol:
|
||||
warn_msg = ('Conflict register for file "' + file_desc_proto.name +
|
||||
'": ' + name +
|
||||
' is already defined in file "' +
|
||||
self._file_desc_protos_by_symbol[name].name + '"')
|
||||
warnings.warn(warn_msg, RuntimeWarning)
|
||||
self._file_desc_protos_by_symbol[name] = file_desc_proto
|
||||
|
||||
|
||||
def _ExtractSymbols(desc_proto, package):
|
||||
"""Pulls out all the symbols from a descriptor proto.
|
||||
|
||||
Args:
|
||||
desc_proto: The proto to extract symbols from.
|
||||
package: The package containing the descriptor type.
|
||||
|
||||
Yields:
|
||||
The fully qualified name found in the descriptor.
|
||||
"""
|
||||
message_name = package + '.' + desc_proto.name if package else desc_proto.name
|
||||
yield message_name
|
||||
for nested_type in desc_proto.nested_type:
|
||||
for symbol in _ExtractSymbols(nested_type, message_name):
|
||||
yield symbol
|
||||
for enum_type in desc_proto.enum_type:
|
||||
yield '.'.join((message_name, enum_type.name))
|
1925
scripts/protobuf3/protobuf3/descriptor_pb2.py
Normal file
1925
scripts/protobuf3/protobuf3/descriptor_pb2.py
Normal file
File diff suppressed because one or more lines are too long
1295
scripts/protobuf3/protobuf3/descriptor_pool.py
Normal file
1295
scripts/protobuf3/protobuf3/descriptor_pool.py
Normal file
File diff suppressed because it is too large
Load Diff
26
scripts/protobuf3/protobuf3/duration_pb2.py
Normal file
26
scripts/protobuf3/protobuf3/duration_pb2.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/duration.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_DURATION._serialized_start=51
|
||||
_DURATION._serialized_end=93
|
||||
# @@protoc_insertion_point(module_scope)
|
26
scripts/protobuf3/protobuf3/empty_pb2.py
Normal file
26
scripts/protobuf3/protobuf3/empty_pb2.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/empty.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_EMPTY._serialized_start=48
|
||||
_EMPTY._serialized_end=55
|
||||
# @@protoc_insertion_point(module_scope)
|
26
scripts/protobuf3/protobuf3/field_mask_pb2.py
Normal file
26
scripts/protobuf3/protobuf3/field_mask_pb2.py
Normal file
@ -0,0 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/field_mask.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_FIELDMASK._serialized_start=53
|
||||
_FIELDMASK._serialized_end=79
|
||||
# @@protoc_insertion_point(module_scope)
|
0
scripts/protobuf3/protobuf3/internal/__init__.py
Normal file
0
scripts/protobuf3/protobuf3/internal/__init__.py
Normal file
443
scripts/protobuf3/protobuf3/internal/_parameterized.py
Normal file
443
scripts/protobuf3/protobuf3/internal/_parameterized.py
Normal file
@ -0,0 +1,443 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Adds support for parameterized tests to Python's unittest TestCase class.
|
||||
|
||||
A parameterized test is a method in a test case that is invoked with different
|
||||
argument tuples.
|
||||
|
||||
A simple example:
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
(1, 2, 3),
|
||||
(4, 5, 9),
|
||||
(1, 1, 3))
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Each invocation is a separate test case and properly isolated just
|
||||
like a normal test method, with its own setUp/tearDown cycle. In the
|
||||
example above, there are three separate testcases, one of which will
|
||||
fail due to an assertion error (1 + 1 != 3).
|
||||
|
||||
Parameters for individual test cases can be tuples (with positional parameters)
|
||||
or dictionaries (with named parameters):
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
{'op1': 1, 'op2': 2, 'result': 3},
|
||||
{'op1': 4, 'op2': 5, 'result': 9},
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
If a parameterized test fails, the error message will show the
|
||||
original test name (which is modified internally) and the arguments
|
||||
for the specific invocation, which are part of the string returned by
|
||||
the shortDescription() method on test cases.
|
||||
|
||||
The id method of the test, used internally by the unittest framework,
|
||||
is also modified to show the arguments. To make sure that test names
|
||||
stay the same across several invocations, object representations like
|
||||
|
||||
>>> class Foo(object):
|
||||
... pass
|
||||
>>> repr(Foo())
|
||||
'<__main__.Foo object at 0x23d8610>'
|
||||
|
||||
are turned into '<__main__.Foo>'. For even more descriptive names,
|
||||
especially in test logs, you can use the named_parameters decorator. In
|
||||
this case, only tuples are supported, and the first parameters has to
|
||||
be a string (or an object that returns an apt name when converted via
|
||||
str()):
|
||||
|
||||
class NamedExample(parameterized.TestCase):
|
||||
@parameterized.named_parameters(
|
||||
('Normal', 'aa', 'aaa', True),
|
||||
('EmptyPrefix', '', 'abc', True),
|
||||
('BothEmpty', '', '', True))
|
||||
def testStartsWith(self, prefix, string, result):
|
||||
self.assertEqual(result, strings.startswith(prefix))
|
||||
|
||||
Named tests also have the benefit that they can be run individually
|
||||
from the command line:
|
||||
|
||||
$ testmodule.py NamedExample.testStartsWithNormal
|
||||
.
|
||||
--------------------------------------------------------------------
|
||||
Ran 1 test in 0.000s
|
||||
|
||||
OK
|
||||
|
||||
Parameterized Classes
|
||||
=====================
|
||||
If invocation arguments are shared across test methods in a single
|
||||
TestCase class, instead of decorating all test methods
|
||||
individually, the class itself can be decorated:
|
||||
|
||||
@parameterized.parameters(
|
||||
(1, 2, 3)
|
||||
(4, 5, 9))
|
||||
class ArithmeticTest(parameterized.TestCase):
|
||||
def testAdd(self, arg1, arg2, result):
|
||||
self.assertEqual(arg1 + arg2, result)
|
||||
|
||||
def testSubtract(self, arg2, arg2, result):
|
||||
self.assertEqual(result - arg1, arg2)
|
||||
|
||||
Inputs from Iterables
|
||||
=====================
|
||||
If parameters should be shared across several test cases, or are dynamically
|
||||
created from other sources, a single non-tuple iterable can be passed into
|
||||
the decorator. This iterable will be used to obtain the test cases:
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
c.op1, c.op2, c.result for c in testcases
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Single-Argument Test Methods
|
||||
============================
|
||||
If a test method takes only one argument, the single argument does not need to
|
||||
be wrapped into a tuple:
|
||||
|
||||
class NegativeNumberExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
-1, -3, -4, -5
|
||||
)
|
||||
def testIsNegative(self, arg):
|
||||
self.assertTrue(IsNegative(arg))
|
||||
"""
|
||||
|
||||
__author__ = 'tmarek@google.com (Torsten Marek)'
|
||||
|
||||
import functools
|
||||
import re
|
||||
import types
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
try:
|
||||
# Since python 3
|
||||
import collections.abc as collections_abc
|
||||
except ImportError:
|
||||
# Won't work after python 3.8
|
||||
import collections as collections_abc
|
||||
|
||||
ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
|
||||
_SEPARATOR = uuid.uuid1().hex
|
||||
_FIRST_ARG = object()
|
||||
_ARGUMENT_REPR = object()
|
||||
|
||||
|
||||
def _CleanRepr(obj):
|
||||
return ADDR_RE.sub(r'<\1>', repr(obj))
|
||||
|
||||
|
||||
# Helper function formerly from the unittest module, removed from it in
|
||||
# Python 2.7.
|
||||
def _StrClass(cls):
|
||||
return '%s.%s' % (cls.__module__, cls.__name__)
|
||||
|
||||
|
||||
def _NonStringIterable(obj):
|
||||
return (isinstance(obj, collections_abc.Iterable) and
|
||||
not isinstance(obj, str))
|
||||
|
||||
|
||||
def _FormatParameterList(testcase_params):
|
||||
if isinstance(testcase_params, collections_abc.Mapping):
|
||||
return ', '.join('%s=%s' % (argname, _CleanRepr(value))
|
||||
for argname, value in testcase_params.items())
|
||||
elif _NonStringIterable(testcase_params):
|
||||
return ', '.join(map(_CleanRepr, testcase_params))
|
||||
else:
|
||||
return _FormatParameterList((testcase_params,))
|
||||
|
||||
|
||||
class _ParameterizedTestIter(object):
|
||||
"""Callable and iterable class for producing new test cases."""
|
||||
|
||||
def __init__(self, test_method, testcases, naming_type):
|
||||
"""Returns concrete test functions for a test and a list of parameters.
|
||||
|
||||
The naming_type is used to determine the name of the concrete
|
||||
functions as reported by the unittest framework. If naming_type is
|
||||
_FIRST_ARG, the testcases must be tuples, and the first element must
|
||||
have a string representation that is a valid Python identifier.
|
||||
|
||||
Args:
|
||||
test_method: The decorated test method.
|
||||
testcases: (list of tuple/dict) A list of parameter
|
||||
tuples/dicts for individual test invocations.
|
||||
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
|
||||
"""
|
||||
self._test_method = test_method
|
||||
self.testcases = testcases
|
||||
self._naming_type = naming_type
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise RuntimeError('You appear to be running a parameterized test case '
|
||||
'without having inherited from parameterized.'
|
||||
'TestCase. This is bad because none of '
|
||||
'your test cases are actually being run.')
|
||||
|
||||
def __iter__(self):
|
||||
test_method = self._test_method
|
||||
naming_type = self._naming_type
|
||||
|
||||
def MakeBoundParamTest(testcase_params):
|
||||
@functools.wraps(test_method)
|
||||
def BoundParamTest(self):
|
||||
if isinstance(testcase_params, collections_abc.Mapping):
|
||||
test_method(self, **testcase_params)
|
||||
elif _NonStringIterable(testcase_params):
|
||||
test_method(self, *testcase_params)
|
||||
else:
|
||||
test_method(self, testcase_params)
|
||||
|
||||
if naming_type is _FIRST_ARG:
|
||||
# Signal the metaclass that the name of the test function is unique
|
||||
# and descriptive.
|
||||
BoundParamTest.__x_use_name__ = True
|
||||
BoundParamTest.__name__ += str(testcase_params[0])
|
||||
testcase_params = testcase_params[1:]
|
||||
elif naming_type is _ARGUMENT_REPR:
|
||||
# __x_extra_id__ is used to pass naming information to the __new__
|
||||
# method of TestGeneratorMetaclass.
|
||||
# The metaclass will make sure to create a unique, but nondescriptive
|
||||
# name for this test.
|
||||
BoundParamTest.__x_extra_id__ = '(%s)' % (
|
||||
_FormatParameterList(testcase_params),)
|
||||
else:
|
||||
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
|
||||
|
||||
BoundParamTest.__doc__ = '%s(%s)' % (
|
||||
BoundParamTest.__name__, _FormatParameterList(testcase_params))
|
||||
if test_method.__doc__:
|
||||
BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
|
||||
return BoundParamTest
|
||||
return (MakeBoundParamTest(c) for c in self.testcases)
|
||||
|
||||
|
||||
def _IsSingletonList(testcases):
|
||||
"""True iff testcases contains only a single non-tuple element."""
|
||||
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
|
||||
|
||||
|
||||
def _ModifyClass(class_object, testcases, naming_type):
|
||||
assert not getattr(class_object, '_id_suffix', None), (
|
||||
'Cannot add parameters to %s,'
|
||||
' which already has parameterized methods.' % (class_object,))
|
||||
class_object._id_suffix = id_suffix = {}
|
||||
# We change the size of __dict__ while we iterate over it,
|
||||
# which Python 3.x will complain about, so use copy().
|
||||
for name, obj in class_object.__dict__.copy().items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix)
|
||||
and isinstance(obj, types.FunctionType)):
|
||||
delattr(class_object, name)
|
||||
methods = {}
|
||||
_UpdateClassDictForParamTestCase(
|
||||
methods, id_suffix, name,
|
||||
_ParameterizedTestIter(obj, testcases, naming_type))
|
||||
for name, meth in methods.items():
|
||||
setattr(class_object, name, meth)
|
||||
|
||||
|
||||
def _ParameterDecorator(naming_type, testcases):
|
||||
"""Implementation of the parameterization decorators.
|
||||
|
||||
Args:
|
||||
naming_type: The naming type.
|
||||
testcases: Testcase parameters.
|
||||
|
||||
Returns:
|
||||
A function for modifying the decorated object.
|
||||
"""
|
||||
def _Apply(obj):
|
||||
if isinstance(obj, type):
|
||||
_ModifyClass(
|
||||
obj,
|
||||
list(testcases) if not isinstance(testcases, collections_abc.Sequence)
|
||||
else testcases,
|
||||
naming_type)
|
||||
return obj
|
||||
else:
|
||||
return _ParameterizedTestIter(obj, testcases, naming_type)
|
||||
|
||||
if _IsSingletonList(testcases):
|
||||
assert _NonStringIterable(testcases[0]), (
|
||||
'Single parameter argument must be a non-string iterable')
|
||||
testcases = testcases[0]
|
||||
|
||||
return _Apply
|
||||
|
||||
|
||||
def parameters(*testcases): # pylint: disable=invalid-name
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example.
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples/dicts/objects (for tests
|
||||
with only one argument).
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
|
||||
|
||||
|
||||
def named_parameters(*testcases): # pylint: disable=invalid-name
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example. The first element of
|
||||
each parameter tuple should be a string and will be appended to the
|
||||
name of the test method.
|
||||
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples.
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_FIRST_ARG, testcases)
|
||||
|
||||
|
||||
class TestGeneratorMetaclass(type):
|
||||
"""Metaclass for test cases with test generators.
|
||||
|
||||
A test generator is an iterable in a testcase that produces callables. These
|
||||
callables must be single-argument methods. These methods are injected into
|
||||
the class namespace and the original iterable is removed. If the name of the
|
||||
iterable conforms to the test pattern, the injected methods will be picked
|
||||
up as tests by the unittest framework.
|
||||
|
||||
In general, it is supposed to be used in conjunction with the
|
||||
parameters decorator.
|
||||
"""
|
||||
|
||||
def __new__(mcs, class_name, bases, dct):
|
||||
dct['_id_suffix'] = id_suffix = {}
|
||||
for name, obj in dct.copy().items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
|
||||
_NonStringIterable(obj)):
|
||||
iterator = iter(obj)
|
||||
dct.pop(name)
|
||||
_UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
|
||||
|
||||
return type.__new__(mcs, class_name, bases, dct)
|
||||
|
||||
|
||||
def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
|
||||
"""Adds individual test cases to a dictionary.
|
||||
|
||||
Args:
|
||||
dct: The target dictionary.
|
||||
id_suffix: The dictionary for mapping names to test IDs.
|
||||
name: The original name of the test case.
|
||||
iterator: The iterator generating the individual test cases.
|
||||
"""
|
||||
for idx, func in enumerate(iterator):
|
||||
assert callable(func), 'Test generators must yield callables, got %r' % (
|
||||
func,)
|
||||
if getattr(func, '__x_use_name__', False):
|
||||
new_name = func.__name__
|
||||
else:
|
||||
new_name = '%s%s%d' % (name, _SEPARATOR, idx)
|
||||
assert new_name not in dct, (
|
||||
'Name of parameterized test case "%s" not unique' % (new_name,))
|
||||
dct[new_name] = func
|
||||
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
|
||||
|
||||
|
||||
class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
|
||||
"""Base class for test cases using the parameters decorator."""
|
||||
|
||||
def _OriginalName(self):
|
||||
return self._testMethodName.split(_SEPARATOR)[0]
|
||||
|
||||
def __str__(self):
|
||||
return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
|
||||
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
"""Returns the descriptive ID of the test.
|
||||
|
||||
This is used internally by the unittesting framework to get a name
|
||||
for the test to be used in reports.
|
||||
|
||||
Returns:
|
||||
The test id.
|
||||
"""
|
||||
return '%s.%s%s' % (_StrClass(self.__class__),
|
||||
self._OriginalName(),
|
||||
self._id_suffix.get(self._testMethodName, ''))
|
||||
|
||||
|
||||
def CoopTestCase(other_base_class):
|
||||
"""Returns a new base class with a cooperative metaclass base.
|
||||
|
||||
This enables the TestCase to be used in combination
|
||||
with other base classes that have custom metaclasses, such as
|
||||
mox.MoxTestBase.
|
||||
|
||||
Only works with metaclasses that do not override type.__new__.
|
||||
|
||||
Example:
|
||||
|
||||
import google3
|
||||
import mox
|
||||
|
||||
from google3.testing.pybase import parameterized
|
||||
|
||||
class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
|
||||
...
|
||||
|
||||
Args:
|
||||
other_base_class: (class) A test case base class.
|
||||
|
||||
Returns:
|
||||
A new class object.
|
||||
"""
|
||||
metaclass = type(
|
||||
'CoopMetaclass',
|
||||
(other_base_class.__metaclass__,
|
||||
TestGeneratorMetaclass), {})
|
||||
return metaclass(
|
||||
'CoopTestCase',
|
||||
(other_base_class, TestCase), {})
|
112
scripts/protobuf3/protobuf3/internal/api_implementation.py
Normal file
112
scripts/protobuf3/protobuf3/internal/api_implementation.py
Normal file
@ -0,0 +1,112 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Determine which implementation of the protobuf API is used in this process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
try:
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import _api_implementation
|
||||
# The compile-time constants in the _api_implementation module can be used to
|
||||
# switch to a certain implementation of the Python API at build time.
|
||||
_api_version = _api_implementation.api_version
|
||||
except ImportError:
|
||||
_api_version = -1 # Unspecified by compiler flags.
|
||||
|
||||
if _api_version == 1:
|
||||
raise ValueError('api_version=1 is no longer supported.')
|
||||
|
||||
|
||||
_default_implementation_type = ('cpp' if _api_version > 0 else 'python')
|
||||
|
||||
|
||||
# This environment variable can be used to switch to a certain implementation
|
||||
# of the Python API, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 'python' and 'cpp' are valid
|
||||
# values. Any other value will be ignored.
|
||||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
||||
_default_implementation_type)
|
||||
|
||||
if _implementation_type != 'python':
|
||||
_implementation_type = 'cpp'
|
||||
|
||||
if 'PyPy' in sys.version and _implementation_type == 'cpp':
|
||||
warnings.warn('PyPy does not work yet with cpp protocol buffers. '
|
||||
'Falling back to the python implementation.')
|
||||
_implementation_type = 'python'
|
||||
|
||||
|
||||
# Detect if serialization should be deterministic by default
|
||||
try:
|
||||
# The presence of this module in a build allows the proto implementation to
|
||||
# be upgraded merely via build deps.
|
||||
#
|
||||
# NOTE: Merely importing this automatically enables deterministic proto
|
||||
# serialization for C++ code, but we still need to export it as a boolean so
|
||||
# that we can do the same for `_implementation_type == 'python'`.
|
||||
#
|
||||
# NOTE2: It is possible for C++ code to enable deterministic serialization by
|
||||
# default _without_ affecting Python code, if the C++ implementation is not in
|
||||
# use by this module. That is intended behavior, so we don't actually expose
|
||||
# this boolean outside of this module.
|
||||
#
|
||||
# pylint: disable=g-import-not-at-top,unused-import
|
||||
from google.protobuf import enable_deterministic_proto_serialization
|
||||
_python_deterministic_proto_serialization = True
|
||||
except ImportError:
|
||||
_python_deterministic_proto_serialization = False
|
||||
|
||||
|
||||
# Usage of this function is discouraged. Clients shouldn't care which
|
||||
# implementation of the API is in use. Note that there is no guarantee
|
||||
# that differences between APIs will be maintained.
|
||||
# Please don't use this function if possible.
|
||||
def Type():
|
||||
return _implementation_type
|
||||
|
||||
|
||||
def _SetType(implementation_type):
|
||||
"""Never use! Only for protobuf benchmark."""
|
||||
global _implementation_type
|
||||
_implementation_type = implementation_type
|
||||
|
||||
|
||||
# See comment on 'Type' above.
|
||||
def Version():
|
||||
return 2
|
||||
|
||||
|
||||
# For internal use only
|
||||
def IsPythonDefaultSerializationDeterministic():
|
||||
return _python_deterministic_proto_serialization
|
130
scripts/protobuf3/protobuf3/internal/builder.py
Normal file
130
scripts/protobuf3/protobuf3/internal/builder.py
Normal file
@ -0,0 +1,130 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Builds descriptors, message classes and services for generated _pb2.py.
|
||||
|
||||
This file is only called in python generated _pb2.py files. It builds
|
||||
descriptors, message classes and services that users can directly use
|
||||
in generated code.
|
||||
"""
|
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)'
|
||||
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
def BuildMessageAndEnumDescriptors(file_des, module):
|
||||
"""Builds message and enum descriptors.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
|
||||
def BuildNestedDescriptors(msg_des, prefix):
|
||||
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
||||
module_name = prefix + name.upper()
|
||||
module[module_name] = nested_msg
|
||||
BuildNestedDescriptors(nested_msg, module_name + '_')
|
||||
for enum_des in msg_des.enum_types:
|
||||
module[prefix + enum_des.name.upper()] = enum_des
|
||||
|
||||
for (name, msg_des) in file_des.message_types_by_name.items():
|
||||
module_name = '_' + name.upper()
|
||||
module[module_name] = msg_des
|
||||
BuildNestedDescriptors(msg_des, module_name + '_')
|
||||
|
||||
|
||||
def BuildTopDescriptorsAndMessages(file_des, module_name, module):
|
||||
"""Builds top level descriptors and message classes.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module_name: str, the name of generated _pb2 module
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
|
||||
def BuildMessage(msg_des):
|
||||
create_dict = {}
|
||||
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
||||
create_dict[name] = BuildMessage(nested_msg)
|
||||
create_dict['DESCRIPTOR'] = msg_des
|
||||
create_dict['__module__'] = module_name
|
||||
message_class = _reflection.GeneratedProtocolMessageType(
|
||||
msg_des.name, (_message.Message,), create_dict)
|
||||
_sym_db.RegisterMessage(message_class)
|
||||
return message_class
|
||||
|
||||
# top level enums
|
||||
for (name, enum_des) in file_des.enum_types_by_name.items():
|
||||
module['_' + name.upper()] = enum_des
|
||||
module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
|
||||
for enum_value in enum_des.values:
|
||||
module[enum_value.name] = enum_value.number
|
||||
|
||||
# top level extensions
|
||||
for (name, extension_des) in file_des.extensions_by_name.items():
|
||||
module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
|
||||
module[name] = extension_des
|
||||
|
||||
# services
|
||||
for (name, service) in file_des.services_by_name.items():
|
||||
module['_' + name.upper()] = service
|
||||
|
||||
# Build messages.
|
||||
for (name, msg_des) in file_des.message_types_by_name.items():
|
||||
module[name] = BuildMessage(msg_des)
|
||||
|
||||
|
||||
def BuildServices(file_des, module_name, module):
|
||||
"""Builds services classes and services stub class.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module_name: str, the name of generated _pb2 module
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf import service as _service
|
||||
from google.protobuf import service_reflection
|
||||
# pylint: enable=g-import-not-at-top
|
||||
for (name, service) in file_des.services_by_name.items():
|
||||
module[name] = service_reflection.GeneratedServiceType(
|
||||
name, (_service.Service,),
|
||||
dict(DESCRIPTOR=service, __module__=module_name))
|
||||
stub_name = name + '_Stub'
|
||||
module[stub_name] = service_reflection.GeneratedServiceStubType(
|
||||
stub_name, (module[name],),
|
||||
dict(DESCRIPTOR=service, __module__=module_name))
|
710
scripts/protobuf3/protobuf3/internal/containers.py
Normal file
710
scripts/protobuf3/protobuf3/internal/containers.py
Normal file
@ -0,0 +1,710 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains container classes to represent different protocol buffer types.
|
||||
|
||||
This file defines container classes which represent categories of protocol
|
||||
buffer field types which need extra maintenance. Currently these categories
|
||||
are:
|
||||
|
||||
- Repeated scalar fields - These are all repeated fields which aren't
|
||||
composite (e.g. they are of simple types like int32, string, etc).
|
||||
- Repeated composite fields - Repeated fields which are composite. This
|
||||
includes groups and nested messages.
|
||||
"""
|
||||
|
||||
import collections.abc
|
||||
import copy
|
||||
import pickle
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableMapping,
|
||||
MutableSequence,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_K = TypeVar('_K')
|
||||
_V = TypeVar('_V')
|
||||
|
||||
|
||||
class BaseContainer(Sequence[_T]):
|
||||
"""Base container class."""
|
||||
|
||||
# Minimizes memory usage and disallows assignment to other attributes.
|
||||
__slots__ = ['_message_listener', '_values']
|
||||
|
||||
def __init__(self, message_listener: Any) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._values = []
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: int) -> _T:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: slice) -> List[_T]:
|
||||
...
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Retrieves item by the specified key."""
|
||||
return self._values[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Returns the number of elements in the container."""
|
||||
return len(self._values)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
"""Checks if another instance isn't equal to this one."""
|
||||
# The concrete classes should define __eq__.
|
||||
return not self == other
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def sort(self, *args, **kwargs) -> None:
|
||||
# Continue to support the old sort_function keyword argument.
|
||||
# This is expected to be a rare occurrence, so use LBYL to avoid
|
||||
# the overhead of actually catching KeyError.
|
||||
if 'sort_function' in kwargs:
|
||||
kwargs['cmp'] = kwargs.pop('sort_function')
|
||||
self._values.sort(*args, **kwargs)
|
||||
|
||||
def reverse(self) -> None:
|
||||
self._values.reverse()
|
||||
|
||||
|
||||
# TODO(slebedev): Remove this. BaseContainer does *not* conform to
|
||||
# MutableSequence, only its subclasses do.
|
||||
collections.abc.MutableSequence.register(BaseContainer)
|
||||
|
||||
|
||||
class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
||||
"""Simple, type-checked, list-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_type_checker']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
type_checker: Any,
|
||||
) -> None:
|
||||
"""Args:
|
||||
|
||||
message_listener: A MessageListener implementation. The
|
||||
RepeatedScalarFieldContainer will call this object's Modified() method
|
||||
when it is modified.
|
||||
type_checker: A type_checkers.ValueChecker instance to run on elements
|
||||
inserted into this container.
|
||||
"""
|
||||
super().__init__(message_listener)
|
||||
self._type_checker = type_checker
|
||||
|
||||
def append(self, value: _T) -> None:
|
||||
"""Appends an item to the list. Similar to list.append()."""
|
||||
self._values.append(self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key: int, value: _T) -> None:
|
||||
"""Inserts the item at the specified position. Similar to list.insert()."""
|
||||
self._values.insert(key, self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq: Iterable[_T]) -> None:
|
||||
"""Extends by appending the given iterable. Similar to list.extend()."""
|
||||
if elem_seq is None:
|
||||
return
|
||||
try:
|
||||
elem_seq_iter = iter(elem_seq)
|
||||
except TypeError:
|
||||
if not elem_seq:
|
||||
# silently ignore falsy inputs :-/.
|
||||
# TODO(ptucker): Deprecate this behavior. b/18413862
|
||||
return
|
||||
raise
|
||||
|
||||
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
|
||||
if new_values:
|
||||
self._values.extend(new_values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def MergeFrom(
|
||||
self,
|
||||
other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
|
||||
) -> None:
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one. We do not check the types of the individual fields.
|
||||
"""
|
||||
self._values.extend(other)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def remove(self, elem: _T):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key: Optional[int] = -1) -> _T:
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: _T) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
||||
...
|
||||
|
||||
def __setitem__(self, key, value) -> None:
|
||||
"""Sets the item on the specified position."""
|
||||
if isinstance(key, slice):
|
||||
if key.step is not None:
|
||||
raise ValueError('Extended slices not supported')
|
||||
self._values[key] = map(self._type_checker.CheckValue, value)
|
||||
self._message_listener.Modified()
|
||||
else:
|
||||
self._values[key] = self._type_checker.CheckValue(value)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key: Union[int, slice]) -> None:
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
# Special case for the same type which should be common and fast.
|
||||
if isinstance(other, self.__class__):
|
||||
return other._values == self._values
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self._values
|
||||
|
||||
def __deepcopy__(
|
||||
self,
|
||||
unused_memo: Any = None,
|
||||
) -> 'RepeatedScalarFieldContainer[_T]':
|
||||
clone = RepeatedScalarFieldContainer(
|
||||
copy.deepcopy(self._message_listener), self._type_checker)
|
||||
clone.MergeFrom(self)
|
||||
return clone
|
||||
|
||||
def __reduce__(self, **kwargs) -> NoReturn:
|
||||
raise pickle.PickleError(
|
||||
"Can't pickle repeated scalar fields, convert to list first")
|
||||
|
||||
|
||||
# TODO(slebedev): Constrain T to be a subtype of Message.
|
||||
class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
||||
"""Simple, list-like container for holding repeated composite fields."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_message_descriptor']
|
||||
|
||||
def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
|
||||
"""
|
||||
Note that we pass in a descriptor instead of the generated directly,
|
||||
since at the time we construct a _RepeatedCompositeFieldContainer we
|
||||
haven't yet necessarily initialized the type that will be contained in the
|
||||
container.
|
||||
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedCompositeFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
message_descriptor: A Descriptor instance describing the protocol type
|
||||
that should be present in this container. We'll use the
|
||||
_concrete_class field of this descriptor when the client calls add().
|
||||
"""
|
||||
super().__init__(message_listener)
|
||||
self._message_descriptor = message_descriptor
|
||||
|
||||
def add(self, **kwargs: Any) -> _T:
|
||||
"""Adds a new element at the end of the list and returns it. Keyword
|
||||
arguments may be used to initialize the element.
|
||||
"""
|
||||
new_element = self._message_descriptor._concrete_class(**kwargs)
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def append(self, value: _T) -> None:
|
||||
"""Appends one element by copying the message."""
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
new_element.CopyFrom(value)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key: int, value: _T) -> None:
|
||||
"""Inserts the item at the specified position by copying."""
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
new_element.CopyFrom(value)
|
||||
self._values.insert(key, new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq: Iterable[_T]) -> None:
|
||||
"""Extends by appending the given sequence of elements of the same type
|
||||
|
||||
as this one, copying each individual message.
|
||||
"""
|
||||
message_class = self._message_descriptor._concrete_class
|
||||
listener = self._message_listener
|
||||
values = self._values
|
||||
for message in elem_seq:
|
||||
new_element = message_class()
|
||||
new_element._SetListener(listener)
|
||||
new_element.MergeFrom(message)
|
||||
values.append(new_element)
|
||||
listener.Modified()
|
||||
|
||||
def MergeFrom(
|
||||
self,
|
||||
other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
|
||||
) -> None:
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one, copying each individual message.
|
||||
"""
|
||||
self.extend(other)
|
||||
|
||||
def remove(self, elem: _T) -> None:
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key: Optional[int] = -1) -> _T:
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: _T) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
||||
...
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
# This method is implemented to make RepeatedCompositeFieldContainer
|
||||
# structurally compatible with typing.MutableSequence. It is
|
||||
# otherwise unsupported and will always raise an error.
|
||||
raise TypeError(
|
||||
f'{self.__class__.__name__} object does not support item assignment')
|
||||
|
||||
def __delitem__(self, key: Union[int, slice]) -> None:
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
raise TypeError('Can only compare repeated composite fields against '
|
||||
'other repeated composite fields.')
|
||||
return self._values == other._values
|
||||
|
||||
|
||||
class ScalarMap(MutableMapping[_K, _V]):
|
||||
"""Simple, type-checked, dict-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
|
||||
'_entry_descriptor']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
key_checker: Any,
|
||||
value_checker: Any,
|
||||
entry_descriptor: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._key_checker = key_checker
|
||||
self._value_checker = value_checker
|
||||
self._entry_descriptor = entry_descriptor
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key: _K) -> _V:
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
val = self._value_checker.DefaultValue()
|
||||
self._values[key] = val
|
||||
return val
|
||||
|
||||
def __contains__(self, item: _K) -> bool:
|
||||
# We check the key's type to match the strong-typing flavor of the API.
|
||||
# Also this makes it easier to match the behavior of the C++ implementation.
|
||||
self._key_checker.CheckValue(item)
|
||||
return item in self._values
|
||||
|
||||
@overload
|
||||
def get(self, key: _K) -> Optional[_V]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
||||
...
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __setitem__(self, key: _K, value: _V) -> _T:
|
||||
checked_key = self._key_checker.CheckValue(key)
|
||||
checked_value = self._value_checker.CheckValue(value)
|
||||
self._values[checked_key] = checked_value
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key: _K) -> None:
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self) -> Iterator[_K]:
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
|
||||
self._values.update(other._values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def InvalidateIterators(self) -> None:
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self) -> None:
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
|
||||
def GetEntryClass(self) -> Any:
|
||||
return self._entry_descriptor._concrete_class
|
||||
|
||||
|
||||
class MessageMap(MutableMapping[_K, _V]):
|
||||
"""Simple, type-checked, dict-like container for with submessage values."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_values', '_message_listener',
|
||||
'_message_descriptor', '_entry_descriptor']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
message_descriptor: Any,
|
||||
key_checker: Any,
|
||||
entry_descriptor: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._message_descriptor = message_descriptor
|
||||
self._key_checker = key_checker
|
||||
self._entry_descriptor = entry_descriptor
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key: _K) -> _V:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values[key] = new_element
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def get_or_create(self, key: _K) -> _V:
|
||||
"""get_or_create() is an alias for getitem (ie. map[key]).
|
||||
|
||||
Args:
|
||||
key: The key to get or create in the map.
|
||||
|
||||
This is useful in cases where you want to be explicit that the call is
|
||||
mutating the map. This can avoid lint errors for statements like this
|
||||
that otherwise would appear to be pointless statements:
|
||||
|
||||
msg.my_map[key]
|
||||
"""
|
||||
return self[key]
|
||||
|
||||
@overload
|
||||
def get(self, key: _K) -> Optional[_V]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
||||
...
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __contains__(self, item: _K) -> bool:
|
||||
item = self._key_checker.CheckValue(item)
|
||||
return item in self._values
|
||||
|
||||
def __setitem__(self, key: _K, value: _V) -> NoReturn:
|
||||
raise ValueError('May not set values directly, call my_map[key].foo = 5')
|
||||
|
||||
def __delitem__(self, key: _K) -> None:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self) -> Iterator[_K]:
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
|
||||
# pylint: disable=protected-access
|
||||
for key in other._values:
|
||||
# According to documentation: "When parsing from the wire or when merging,
|
||||
# if there are duplicate map keys the last key seen is used".
|
||||
if key in self:
|
||||
del self[key]
|
||||
self[key].CopyFrom(other[key])
|
||||
# self._message_listener.Modified() not required here, because
|
||||
# mutations to submessages already propagate.
|
||||
|
||||
def InvalidateIterators(self) -> None:
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self) -> None:
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
|
||||
def GetEntryClass(self) -> Any:
|
||||
return self._entry_descriptor._concrete_class
|
||||
|
||||
|
||||
class _UnknownField:
|
||||
"""A parsed unknown field."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_field_number', '_wire_type', '_data']
|
||||
|
||||
def __init__(self, field_number, wire_type, data):
|
||||
self._field_number = field_number
|
||||
self._wire_type = wire_type
|
||||
self._data = data
|
||||
return
|
||||
|
||||
def __lt__(self, other):
|
||||
# pylint: disable=protected-access
|
||||
return self._field_number < other._field_number
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
# pylint: disable=protected-access
|
||||
return (self._field_number == other._field_number and
|
||||
self._wire_type == other._wire_type and
|
||||
self._data == other._data)
|
||||
|
||||
|
||||
class UnknownFieldRef: # pylint: disable=missing-class-docstring
|
||||
|
||||
def __init__(self, parent, index):
|
||||
self._parent = parent
|
||||
self._index = index
|
||||
|
||||
def _check_valid(self):
|
||||
if not self._parent:
|
||||
raise ValueError('UnknownField does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
if self._index >= len(self._parent):
|
||||
raise ValueError('UnknownField does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
|
||||
@property
|
||||
def field_number(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._field_number
|
||||
|
||||
@property
|
||||
def wire_type(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._wire_type
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._data
|
||||
|
||||
|
||||
class UnknownFieldSet:
|
||||
"""UnknownField container"""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_values']
|
||||
|
||||
def __init__(self):
|
||||
self._values = []
|
||||
|
||||
def __getitem__(self, index):
|
||||
if self._values is None:
|
||||
raise ValueError('UnknownFields does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
size = len(self._values)
|
||||
if index < 0:
|
||||
index += size
|
||||
if index < 0 or index >= size:
|
||||
raise IndexError('index %d out of range'.index)
|
||||
|
||||
return UnknownFieldRef(self, index)
|
||||
|
||||
def _internal_get(self, index):
|
||||
return self._values[index]
|
||||
|
||||
def __len__(self):
|
||||
if self._values is None:
|
||||
raise ValueError('UnknownFields does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
return len(self._values)
|
||||
|
||||
def _add(self, field_number, wire_type, data):
|
||||
unknown_field = _UnknownField(field_number, wire_type, data)
|
||||
self._values.append(unknown_field)
|
||||
return unknown_field
|
||||
|
||||
def __iter__(self):
|
||||
for i in range(len(self)):
|
||||
yield UnknownFieldRef(self, i)
|
||||
|
||||
def _extend(self, other):
|
||||
if other is None:
|
||||
return
|
||||
# pylint: disable=protected-access
|
||||
self._values.extend(other._values)
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
# Sort unknown fields because their order shouldn't
|
||||
# affect equality test.
|
||||
values = list(self._values)
|
||||
if other is None:
|
||||
return not values
|
||||
values.sort()
|
||||
# pylint: disable=protected-access
|
||||
other_values = sorted(other._values)
|
||||
return values == other_values
|
||||
|
||||
def _clear(self):
|
||||
for value in self._values:
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(value._data, UnknownFieldSet):
|
||||
value._data._clear() # pylint: disable=protected-access
|
||||
self._values = None
|
1029
scripts/protobuf3/protobuf3/internal/decoder.py
Normal file
1029
scripts/protobuf3/protobuf3/internal/decoder.py
Normal file
File diff suppressed because it is too large
Load Diff
829
scripts/protobuf3/protobuf3/internal/encoder.py
Normal file
829
scripts/protobuf3/protobuf3/internal/encoder.py
Normal file
@ -0,0 +1,829 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Code for encoding protocol message primitives.
|
||||
|
||||
Contains the logic for encoding every logical protocol field type
|
||||
into one of the 5 physical wire types.
|
||||
|
||||
This code is designed to push the Python interpreter's performance to the
|
||||
limits.
|
||||
|
||||
The basic idea is that at startup time, for every field (i.e. every
|
||||
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
|
||||
sizer takes a value of this field's type and computes its byte size. The
|
||||
encoder takes a writer function and a value. It encodes the value into byte
|
||||
strings and invokes the writer function to write those strings. Typically the
|
||||
writer function is the write() method of a BytesIO.
|
||||
|
||||
We try to do as much work as possible when constructing the writer and the
|
||||
sizer rather than when calling them. In particular:
|
||||
* We copy any needed global functions to local variables, so that we do not need
|
||||
to do costly global table lookups at runtime.
|
||||
* Similarly, we try to do any attribute lookups at startup time if possible.
|
||||
* Every field's tag is encoded to bytes at startup, since it can't change at
|
||||
runtime.
|
||||
* Whatever component of the field size we can compute at startup, we do.
|
||||
* We *avoid* sharing code if doing so would make the code slower and not sharing
|
||||
does not burden us too much. For example, encoders for repeated fields do
|
||||
not just call the encoders for singular fields in a loop because this would
|
||||
add an extra function call overhead for every loop iteration; instead, we
|
||||
manually inline the single-value encoder into the loop.
|
||||
* If a Python function lacks a return statement, Python actually generates
|
||||
instructions to pop the result of the last statement off the stack, push
|
||||
None onto the stack, and then return that. If we really don't care what
|
||||
value is returned, then we can save two instructions by returning the
|
||||
result of the last statement. It looks funny but it helps.
|
||||
* We assume that type and bounds checking has happened at a higher level.
|
||||
"""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import struct
|
||||
|
||||
from google.protobuf.internal import wire_format
|
||||
|
||||
|
||||
# This will overflow and thus become IEEE-754 "infinity". We would use
|
||||
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
||||
_POS_INF = 1e10000
|
||||
_NEG_INF = -_POS_INF
|
||||
|
||||
|
||||
def _VarintSize(value):
|
||||
"""Compute the size of a varint value."""
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _SignedVarintSize(value):
|
||||
"""Compute the size of a signed varint value."""
|
||||
if value < 0: return 10
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _TagSize(field_number):
|
||||
"""Returns the number of bytes required to serialize a tag with this field
|
||||
number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarintSize(wire_format.PackTag(field_number, 0))
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# In this section we define some generic sizers. Each of these functions
|
||||
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
|
||||
# It returns another function which in turn takes parameters specific to a
|
||||
# particular field, e.g. the field number and whether it is repeated or packed.
|
||||
# Look at the next section to see how these are used.
|
||||
|
||||
|
||||
def _SimpleSizer(compute_value_size):
|
||||
"""A sizer which uses the function compute_value_size to compute the size of
|
||||
each value. Typically compute_value_size is _VarintSize."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(value)
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _ModifiedSizer(compute_value_size, modify_value):
|
||||
"""Like SimpleSizer, but modify_value is invoked on each value before it is
|
||||
passed to compute_value_size. modify_value is typically ZigZagEncode."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(modify_value(value))
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _FixedSizer(value_size):
|
||||
"""Like _SimpleSizer except for a fixed-size field. The input is the size
|
||||
of one value."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = len(value) * value_size
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
element_size = value_size + tag_size
|
||||
def RepeatedFieldSize(value):
|
||||
return len(value) * element_size
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
field_size = value_size + tag_size
|
||||
def FieldSize(value):
|
||||
return field_size
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare a sizer constructor for each field type. Each "sizer
|
||||
# constructor" is a function that takes (field_number, is_repeated, is_packed)
|
||||
# as parameters and returns a sizer, which in turn takes a field value as
|
||||
# a parameter and returns its encoded size.
|
||||
|
||||
|
||||
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
|
||||
|
||||
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
|
||||
|
||||
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
|
||||
_SignedVarintSize, wire_format.ZigZagEncode)
|
||||
|
||||
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
|
||||
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
|
||||
|
||||
BoolSizer = _FixedSizer(1)
|
||||
|
||||
|
||||
def StringSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a string field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element.encode('utf-8'))
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value.encode('utf-8'))
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def BytesSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a bytes field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element)
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value)
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def GroupSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a group field."""
|
||||
|
||||
tag_size = _TagSize(field_number) * 2
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += element.ByteSize()
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + value.ByteSize()
|
||||
return FieldSize
|
||||
|
||||
|
||||
def MessageSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a message field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = element.ByteSize()
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MessageSet is special: it needs custom logic to compute its size properly.
|
||||
|
||||
|
||||
def MessageSetItemSizer(field_number):
|
||||
"""Returns a sizer for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
|
||||
_TagSize(3))
|
||||
local_VarintSize = _VarintSize
|
||||
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return static_size + local_VarintSize(l) + l
|
||||
|
||||
return FieldSize
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Map is special: it needs custom logic to compute its size properly.
|
||||
|
||||
|
||||
def MapSizer(field_descriptor, is_message_map):
|
||||
"""Returns a sizer for a map field."""
|
||||
|
||||
# Can't look at field_descriptor.message_type._concrete_class because it may
|
||||
# not have been initialized yet.
|
||||
message_type = field_descriptor.message_type
|
||||
message_sizer = MessageSizer(field_descriptor.number, False, False)
|
||||
|
||||
def FieldSize(map_value):
|
||||
total = 0
|
||||
for key in map_value:
|
||||
value = map_value[key]
|
||||
# It's wasteful to create the messages and throw them away one second
|
||||
# later since we'll do the same for the actual encode. But there's not an
|
||||
# obvious way to avoid this within the current design without tons of code
|
||||
# duplication. For message map, value.ByteSize() should be called to
|
||||
# update the status.
|
||||
entry_msg = message_type._concrete_class(key=key, value=value)
|
||||
total += message_sizer(entry_msg)
|
||||
if is_message_map:
|
||||
value.ByteSize()
|
||||
return total
|
||||
|
||||
return FieldSize
|
||||
|
||||
# ====================================================================
|
||||
# Encoders!
|
||||
|
||||
|
||||
def _VarintEncoder():
|
||||
"""Return an encoder for a basic varint value (does not include tag)."""
|
||||
|
||||
local_int2byte = struct.Struct('>B').pack
|
||||
|
||||
def EncodeVarint(write, value, unused_deterministic=None):
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_int2byte(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_int2byte(bits))
|
||||
|
||||
return EncodeVarint
|
||||
|
||||
|
||||
def _SignedVarintEncoder():
|
||||
"""Return an encoder for a basic signed varint value (does not include
|
||||
tag)."""
|
||||
|
||||
local_int2byte = struct.Struct('>B').pack
|
||||
|
||||
def EncodeSignedVarint(write, value, unused_deterministic=None):
|
||||
if value < 0:
|
||||
value += (1 << 64)
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_int2byte(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_int2byte(bits))
|
||||
|
||||
return EncodeSignedVarint
|
||||
|
||||
|
||||
_EncodeVarint = _VarintEncoder()
|
||||
_EncodeSignedVarint = _SignedVarintEncoder()
|
||||
|
||||
|
||||
def _VarintBytes(value):
|
||||
"""Encode the given integer as a varint and return the bytes. This is only
|
||||
called at startup time so it doesn't need to be fast."""
|
||||
|
||||
pieces = []
|
||||
_EncodeVarint(pieces.append, value, True)
|
||||
return b"".join(pieces)
|
||||
|
||||
|
||||
def TagBytes(field_number, wire_type):
|
||||
"""Encode the given tag and return the bytes. Only called at startup."""
|
||||
|
||||
return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As with sizers (see above), we have a number of common encoder
|
||||
# implementations.
|
||||
|
||||
|
||||
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
|
||||
"""Return a constructor for an encoder for fields of a particular type.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
encode_value: A function which encodes an individual value, e.g.
|
||||
_EncodeVarint().
|
||||
compute_value_size: A function which computes the size of an individual
|
||||
value, e.g. _VarintSize().
|
||||
"""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(element)
|
||||
local_EncodeVarint(write, size, deterministic)
|
||||
for element in value:
|
||||
encode_value(write, element, deterministic)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, element, deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, value, deterministic)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
|
||||
"""Like SimpleEncoder but additionally invokes modify_value on every value
|
||||
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(modify_value(element))
|
||||
local_EncodeVarint(write, size, deterministic)
|
||||
for element in value:
|
||||
encode_value(write, modify_value(element), deterministic)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, modify_value(element), deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, modify_value(value), deterministic)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _StructPackEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for a fixed-width field.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
||||
for element in value:
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
return write(local_struct_pack(format, value))
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _FloatingPointEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for float fields.
|
||||
|
||||
This is like StructPackEncoder, but catches errors that may be due to
|
||||
passing non-finite floating-point values to struct.pack, and makes a
|
||||
second attempt to encode those values.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
if value_size == 4:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
# Remember that the serialized form uses little-endian byte order.
|
||||
if value == _POS_INF:
|
||||
write(b'\x00\x00\x80\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b'\x00\x00\x80\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b'\x00\x00\xC0\x7F')
|
||||
else:
|
||||
raise
|
||||
elif value_size == 8:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
if value == _POS_INF:
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise ValueError('Can\'t encode floating-point values that are '
|
||||
'%d bytes long (only 4 or 8)' % value_size)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
||||
for element in value:
|
||||
# This try/except block is going to be faster than any code that
|
||||
# we could write to check whether element is finite.
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, value))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, value)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare an encoder constructor for each field type. These work
|
||||
# very similarly to sizer constructors, described earlier.
|
||||
|
||||
|
||||
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
|
||||
|
||||
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
|
||||
|
||||
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
|
||||
wire_format.ZigZagEncode)
|
||||
|
||||
# Note that Python conveniently guarantees that when using the '<' prefix on
|
||||
# formats, they will also have the same size across all platforms (as opposed
|
||||
# to without the prefix, where their sizes depend on the C compiler's basic
|
||||
# type sizes).
|
||||
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
|
||||
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
||||
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
|
||||
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
|
||||
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
|
||||
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
|
||||
|
||||
|
||||
def BoolEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a boolean field."""
|
||||
|
||||
false_byte = b'\x00'
|
||||
true_byte = b'\x01'
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value), deterministic)
|
||||
for element in value:
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
if value:
|
||||
return write(true_byte)
|
||||
return write(false_byte)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def StringEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a string field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
encoded = element.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded), deterministic)
|
||||
write(encoded)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
encoded = value.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded), deterministic)
|
||||
return write(encoded)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def BytesEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a bytes field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(element), deterministic)
|
||||
write(element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(value), deterministic)
|
||||
return write(value)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def GroupEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a group field."""
|
||||
|
||||
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
|
||||
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(start_tag)
|
||||
element._InternalSerialize(write, deterministic)
|
||||
write(end_tag)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(start_tag)
|
||||
value._InternalSerialize(write, deterministic)
|
||||
return write(end_tag)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def MessageEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a message field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, element.ByteSize(), deterministic)
|
||||
element._InternalSerialize(write, deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
||||
return value._InternalSerialize(write, deterministic)
|
||||
return EncodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As before, MessageSet is special.
|
||||
|
||||
|
||||
def MessageSetItemEncoder(field_number):
|
||||
"""Encoder for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
start_bytes = b"".join([
|
||||
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
|
||||
TagBytes(2, wire_format.WIRETYPE_VARINT),
|
||||
_VarintBytes(field_number),
|
||||
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
|
||||
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(start_bytes)
|
||||
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
||||
value._InternalSerialize(write, deterministic)
|
||||
return write(end_bytes)
|
||||
|
||||
return EncodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As before, Map is special.
|
||||
|
||||
|
||||
def MapEncoder(field_descriptor):
|
||||
"""Encoder for extensions of MessageSet.
|
||||
|
||||
Maps always have a wire format like this:
|
||||
message MapEntry {
|
||||
key_type key = 1;
|
||||
value_type value = 2;
|
||||
}
|
||||
repeated MapEntry map = N;
|
||||
"""
|
||||
# Can't look at field_descriptor.message_type._concrete_class because it may
|
||||
# not have been initialized yet.
|
||||
message_type = field_descriptor.message_type
|
||||
encode_message = MessageEncoder(field_descriptor.number, False, False)
|
||||
|
||||
def EncodeField(write, value, deterministic):
|
||||
value_keys = sorted(value.keys()) if deterministic else value
|
||||
for key in value_keys:
|
||||
entry_msg = message_type._concrete_class(key=key, value=value[key])
|
||||
encode_message(write, entry_msg, deterministic)
|
||||
|
||||
return EncodeField
|
124
scripts/protobuf3/protobuf3/internal/enum_type_wrapper.py
Normal file
124
scripts/protobuf3/protobuf3/internal/enum_type_wrapper.py
Normal file
@ -0,0 +1,124 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""A simple wrapper around enum types to expose utility functions.
|
||||
|
||||
Instances are created as properties with the same name as the enum they wrap
|
||||
on proto classes. For usage, see:
|
||||
reflection_test.py
|
||||
"""
|
||||
|
||||
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
|
||||
|
||||
|
||||
class EnumTypeWrapper(object):
|
||||
"""A utility for finding the names of enum values."""
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
# This is a type alias, which mypy typing stubs can type as
|
||||
# a genericized parameter constrained to an int, allowing subclasses
|
||||
# to be typed with more constraint in .pyi stubs
|
||||
# Eg.
|
||||
# def MyGeneratedEnum(Message):
|
||||
# ValueType = NewType('ValueType', int)
|
||||
# def Name(self, number: MyGeneratedEnum.ValueType) -> str
|
||||
ValueType = int
|
||||
|
||||
def __init__(self, enum_type):
|
||||
"""Inits EnumTypeWrapper with an EnumDescriptor."""
|
||||
self._enum_type = enum_type
|
||||
self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
|
||||
|
||||
def Name(self, number): # pylint: disable=invalid-name
|
||||
"""Returns a string containing the name of an enum value."""
|
||||
try:
|
||||
return self._enum_type.values_by_number[number].name
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
|
||||
if not isinstance(number, int):
|
||||
raise TypeError(
|
||||
'Enum value for {} must be an int, but got {} {!r}.'.format(
|
||||
self._enum_type.name, type(number), number))
|
||||
else:
|
||||
# repr here to handle the odd case when you pass in a boolean.
|
||||
raise ValueError('Enum {} has no name defined for value {!r}'.format(
|
||||
self._enum_type.name, number))
|
||||
|
||||
def Value(self, name): # pylint: disable=invalid-name
|
||||
"""Returns the value corresponding to the given enum name."""
|
||||
try:
|
||||
return self._enum_type.values_by_name[name].number
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
raise ValueError('Enum {} has no value defined for name {!r}'.format(
|
||||
self._enum_type.name, name))
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of the string names in the enum.
|
||||
|
||||
Returns:
|
||||
A list of strs, in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.name
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def values(self):
|
||||
"""Return a list of the integer values in the enum.
|
||||
|
||||
Returns:
|
||||
A list of ints, in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.number
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def items(self):
|
||||
"""Return a list of the (name, value) pairs of the enum.
|
||||
|
||||
Returns:
|
||||
A list of (str, int) pairs, in the order they were defined
|
||||
in the .proto file.
|
||||
"""
|
||||
return [(value_descriptor.name, value_descriptor.number)
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Returns the value corresponding to the given enum name."""
|
||||
try:
|
||||
return super(
|
||||
EnumTypeWrapper,
|
||||
self).__getattribute__('_enum_type').values_by_name[name].number
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
raise AttributeError('Enum {} has no value defined for name {!r}'.format(
|
||||
self._enum_type.name, name))
|
213
scripts/protobuf3/protobuf3/internal/extension_dict.py
Normal file
213
scripts/protobuf3/protobuf3/internal/extension_dict.py
Normal file
@ -0,0 +1,213 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains _ExtensionDict class to represent extensions.
|
||||
"""
|
||||
|
||||
from google.protobuf.internal import type_checkers
|
||||
from google.protobuf.descriptor import FieldDescriptor
|
||||
|
||||
|
||||
def _VerifyExtensionHandle(message, extension_handle):
|
||||
"""Verify that the given extension handle is valid."""
|
||||
|
||||
if not isinstance(extension_handle, FieldDescriptor):
|
||||
raise KeyError('HasExtension() expects an extension handle, got: %s' %
|
||||
extension_handle)
|
||||
|
||||
if not extension_handle.is_extension:
|
||||
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
|
||||
|
||||
if not extension_handle.containing_type:
|
||||
raise KeyError('"%s" is missing a containing_type.'
|
||||
% extension_handle.full_name)
|
||||
|
||||
if extension_handle.containing_type is not message.DESCRIPTOR:
|
||||
raise KeyError('Extension "%s" extends message type "%s", but this '
|
||||
'message is of type "%s".' %
|
||||
(extension_handle.full_name,
|
||||
extension_handle.containing_type.full_name,
|
||||
message.DESCRIPTOR.full_name))
|
||||
|
||||
|
||||
# TODO(robinson): Unify error handling of "unknown extension" crap.
|
||||
# TODO(robinson): Support iteritems()-style iteration over all
|
||||
# extensions with the "has" bits turned on?
|
||||
class _ExtensionDict(object):
|
||||
|
||||
"""Dict-like container for Extension fields on proto instances.
|
||||
|
||||
Note that in all cases we expect extension handles to be
|
||||
FieldDescriptors.
|
||||
"""
|
||||
|
||||
def __init__(self, extended_message):
|
||||
"""
|
||||
Args:
|
||||
extended_message: Message instance for which we are the Extensions dict.
|
||||
"""
|
||||
self._extended_message = extended_message
|
||||
|
||||
def __getitem__(self, extension_handle):
|
||||
"""Returns the current value of the given extension handle."""
|
||||
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
result = self._extended_message._fields.get(extension_handle)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
||||
result = extension_handle._default_constructor(self._extended_message)
|
||||
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
message_type = extension_handle.message_type
|
||||
if not hasattr(message_type, '_concrete_class'):
|
||||
# pylint: disable=protected-access
|
||||
self._extended_message._FACTORY.GetPrototype(message_type)
|
||||
assert getattr(extension_handle.message_type, '_concrete_class', None), (
|
||||
'Uninitialized concrete class found for field %r (message type %r)'
|
||||
% (extension_handle.full_name,
|
||||
extension_handle.message_type.full_name))
|
||||
result = extension_handle.message_type._concrete_class()
|
||||
try:
|
||||
result._SetListener(self._extended_message._listener_for_children)
|
||||
except ReferenceError:
|
||||
pass
|
||||
else:
|
||||
# Singular scalar -- just return the default without inserting into the
|
||||
# dict.
|
||||
return extension_handle.default_value
|
||||
|
||||
# Atomically check if another thread has preempted us and, if not, swap
|
||||
# in the new object we just created. If someone has preempted us, we
|
||||
# take that object and discard ours.
|
||||
# WARNING: We are relying on setdefault() being atomic. This is true
|
||||
# in CPython but we haven't investigated others. This warning appears
|
||||
# in several other locations in this file.
|
||||
result = self._extended_message._fields.setdefault(
|
||||
extension_handle, result)
|
||||
|
||||
return result
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
|
||||
my_fields = self._extended_message.ListFields()
|
||||
other_fields = other._extended_message.ListFields()
|
||||
|
||||
# Get rid of non-extension fields.
|
||||
my_fields = [field for field in my_fields if field.is_extension]
|
||||
other_fields = [field for field in other_fields if field.is_extension]
|
||||
|
||||
return my_fields == other_fields
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __len__(self):
|
||||
fields = self._extended_message.ListFields()
|
||||
# Get rid of non-extension fields.
|
||||
extension_fields = [field for field in fields if field[0].is_extension]
|
||||
return len(extension_fields)
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
# Note that this is only meaningful for non-repeated, scalar extension
|
||||
# fields. Note also that we may have to call _Modified() when we do
|
||||
# successfully set a field this way, to set any necessary "has" bits in the
|
||||
# ancestors of the extended message.
|
||||
def __setitem__(self, extension_handle, value):
|
||||
"""If extension_handle specifies a non-repeated, scalar extension
|
||||
field, sets the value of that field.
|
||||
"""
|
||||
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
|
||||
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
|
||||
raise TypeError(
|
||||
'Cannot assign to extension "%s" because it is a repeated or '
|
||||
'composite type.' % extension_handle.full_name)
|
||||
|
||||
# It's slightly wasteful to lookup the type checker each time,
|
||||
# but we expect this to be a vanishingly uncommon case anyway.
|
||||
type_checker = type_checkers.GetTypeChecker(extension_handle)
|
||||
# pylint: disable=protected-access
|
||||
self._extended_message._fields[extension_handle] = (
|
||||
type_checker.CheckValue(value))
|
||||
self._extended_message._Modified()
|
||||
|
||||
def __delitem__(self, extension_handle):
|
||||
self._extended_message.ClearExtension(extension_handle)
|
||||
|
||||
def _FindExtensionByName(self, name):
|
||||
"""Tries to find a known extension with the specified name.
|
||||
|
||||
Args:
|
||||
name: Extension full name.
|
||||
|
||||
Returns:
|
||||
Extension field descriptor.
|
||||
"""
|
||||
return self._extended_message._extensions_by_name.get(name, None)
|
||||
|
||||
def _FindExtensionByNumber(self, number):
|
||||
"""Tries to find a known extension with the field number.
|
||||
|
||||
Args:
|
||||
number: Extension field number.
|
||||
|
||||
Returns:
|
||||
Extension field descriptor.
|
||||
"""
|
||||
return self._extended_message._extensions_by_number.get(number, None)
|
||||
|
||||
def __iter__(self):
|
||||
# Return a generator over the populated extension fields
|
||||
return (f[0] for f in self._extended_message.ListFields()
|
||||
if f[0].is_extension)
|
||||
|
||||
def __contains__(self, extension_handle):
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
if extension_handle not in self._extended_message._fields:
|
||||
return False
|
||||
|
||||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
||||
return bool(self._extended_message._fields.get(extension_handle))
|
||||
|
||||
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
value = self._extended_message._fields.get(extension_handle)
|
||||
# pylint: disable=protected-access
|
||||
return value is not None and value._is_present_in_parent
|
||||
|
||||
return True
|
78
scripts/protobuf3/protobuf3/internal/message_listener.py
Normal file
78
scripts/protobuf3/protobuf3/internal/message_listener.py
Normal file
@ -0,0 +1,78 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Defines a listener interface for observing certain
|
||||
state transitions on Message objects.
|
||||
|
||||
Also defines a null implementation of this interface.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
class MessageListener(object):
|
||||
|
||||
"""Listens for modifications made to a message. Meant to be registered via
|
||||
Message._SetListener().
|
||||
|
||||
Attributes:
|
||||
dirty: If True, then calling Modified() would be a no-op. This can be
|
||||
used to avoid these calls entirely in the common case.
|
||||
"""
|
||||
|
||||
def Modified(self):
|
||||
"""Called every time the message is modified in such a way that the parent
|
||||
message may need to be updated. This currently means either:
|
||||
(a) The message was modified for the first time, so the parent message
|
||||
should henceforth mark the message as present.
|
||||
(b) The message's cached byte size became dirty -- i.e. the message was
|
||||
modified for the first time after a previous call to ByteSize().
|
||||
Therefore the parent should also mark its byte size as dirty.
|
||||
Note that (a) implies (b), since new objects start out with a client cached
|
||||
size (zero). However, we document (a) explicitly because it is important.
|
||||
|
||||
Modified() will *only* be called in response to one of these two events --
|
||||
not every time the sub-message is modified.
|
||||
|
||||
Note that if the listener's |dirty| attribute is true, then calling
|
||||
Modified at the moment would be a no-op, so it can be skipped. Performance-
|
||||
sensitive callers should check this attribute directly before calling since
|
||||
it will be true most of the time.
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NullMessageListener(object):
|
||||
|
||||
"""No-op MessageListener implementation."""
|
||||
|
||||
def Modified(self):
|
||||
pass
|
@ -0,0 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/message_set_extensions.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
TestMessageSet.RegisterExtension(message_set_extension3)
|
||||
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'])
|
||||
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'])
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TESTMESSAGESET._options = None
|
||||
_TESTMESSAGESET._serialized_options = b'\010\001'
|
||||
_TESTMESSAGESET._serialized_start=83
|
||||
_TESTMESSAGESET._serialized_end=113
|
||||
_TESTMESSAGESETEXTENSION1._serialized_start=116
|
||||
_TESTMESSAGESETEXTENSION1._serialized_end=281
|
||||
_TESTMESSAGESETEXTENSION2._serialized_start=284
|
||||
_TESTMESSAGESETEXTENSION2._serialized_end=451
|
||||
_TESTMESSAGESETEXTENSION3._serialized_start=453
|
||||
_TESTMESSAGESETEXTENSION3._serialized_end=493
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -0,0 +1,37 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/missing_enum_values.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None
|
||||
_TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001'
|
||||
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None
|
||||
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001'
|
||||
_TESTENUMVALUES._serialized_start=88
|
||||
_TESTENUMVALUES._serialized_end=409
|
||||
_TESTENUMVALUES_NESTEDENUM._serialized_start=378
|
||||
_TESTENUMVALUES_NESTEDENUM._serialized_end=409
|
||||
_TESTMISSINGENUMVALUES._serialized_start=412
|
||||
_TESTMISSINGENUMVALUES._serialized_end=751
|
||||
_TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730
|
||||
_TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751
|
||||
_JUSTSTRING._serialized_start=753
|
||||
_JUSTSTRING._serialized_end=780
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -0,0 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/more_extensions_dynamic.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension)
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension)
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_DYNAMICMESSAGETYPE._serialized_start=132
|
||||
_DYNAMICMESSAGETYPE._serialized_end=163
|
||||
# @@protoc_insertion_point(module_scope)
|
41
scripts/protobuf3/protobuf3/internal/more_extensions_pb2.py
Normal file
41
scripts/protobuf3/protobuf3/internal/more_extensions_pb2.py
Normal file
@ -0,0 +1,41 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/more_extensions.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
ExtendedMessage.RegisterExtension(optional_int_extension)
|
||||
ExtendedMessage.RegisterExtension(optional_message_extension)
|
||||
ExtendedMessage.RegisterExtension(repeated_int_extension)
|
||||
ExtendedMessage.RegisterExtension(repeated_message_extension)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['submessage']._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001'
|
||||
_TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001'
|
||||
_NESTEDMESSAGE.fields_by_name['submessage']._options = None
|
||||
_NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001'
|
||||
_TOPLEVELMESSAGE._serialized_start=77
|
||||
_TOPLEVELMESSAGE._serialized_end=230
|
||||
_NESTEDMESSAGE._serialized_start=232
|
||||
_NESTEDMESSAGE._serialized_end=314
|
||||
_EXTENDEDMESSAGE._serialized_start=316
|
||||
_EXTENDEDMESSAGE._serialized_end=391
|
||||
_FOREIGNMESSAGE._serialized_start=393
|
||||
_FOREIGNMESSAGE._serialized_end=438
|
||||
# @@protoc_insertion_point(module_scope)
|
556
scripts/protobuf3/protobuf3/internal/more_messages_pb2.py
Normal file
556
scripts/protobuf3/protobuf3/internal/more_messages_pb2.py
Normal file
File diff suppressed because one or more lines are too long
27
scripts/protobuf3/protobuf3/internal/no_package_pb2.py
Normal file
27
scripts/protobuf3/protobuf3/internal/no_package_pb2.py
Normal file
@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/no_package.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_NOPACKAGEENUM._serialized_start=106
|
||||
_NOPACKAGEENUM._serialized_end=169
|
||||
_NOPACKAGEMESSAGE._serialized_start=45
|
||||
_NOPACKAGEMESSAGE._serialized_end=104
|
||||
# @@protoc_insertion_point(module_scope)
|
1539
scripts/protobuf3/protobuf3/internal/python_message.py
Normal file
1539
scripts/protobuf3/protobuf3/internal/python_message.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user