Upload the fork
This commit is contained in:
parent
f8c4accd54
commit
15fd02e6ff
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
@ -1,197 +0,0 @@
|
||||
/misc/
|
||||
/Temp/
|
||||
/Downloads/
|
||||
@ -194,4 +195,4 @@ pdm.lock
|
||||
/.idea/modules.xml
|
||||
/.idea/inspectionProfiles/profiles_settings.xml
|
||||
/.idea/pyplayready.iml
|
||||
/.idea/vcs.xml
|
||||
/.idea/vcs.xml
|
3
.idea/PlayReady-Amazon-Tool-main.iml
generated
3
.idea/PlayReady-Amazon-Tool-main.iml
generated
@ -2,9 +2,6 @@
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/protobuf3" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/pyplayready" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/scripts/pywidevine" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.10" jdkType="Python SDK" />
|
||||
|
26
.idea/runConfigurations/poetry.xml
generated
26
.idea/runConfigurations/poetry.xml
generated
@ -1,26 +0,0 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="poetry" type="PythonConfigurationType" factoryName="Python">
|
||||
<module name="PlayReady-Amazon-Tool-main" />
|
||||
<option name="ENV_FILES" value="" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="SDK_NAME" value="Poetry (PlayReady-Amazon-Tool-main)" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="vinetrimmer.py" />
|
||||
<option name="PARAMETERS" value="dl --no-cache --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
14
ATVP.txt
14
ATVP.txt
@ -1,11 +1,5 @@
|
||||
https://tv.apple.com/us/show/ray-donovan/umc.cmc.hr7pnm1wbx98w1h3pg7dfbey
|
||||
https://tv.apple.com/us/show/party-down/umc.cmc.6myol1kgcd19kerlujhtcr8kg
|
||||
https://tv.apple.com/us/show/mythic-quest/umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
https://tv.apple.com/us/show/the-completely-made-up-adventures-of-dick-turpin/umc.cmc.37r7vskzmm8hk2pfbzaxlcwzg
|
||||
https://tv.apple.com/us/show/the-office-superfan-episodes/umc.cmc.3r3om9j6edlrnznl5pfassikv
|
||||
https://tv.apple.com/us/show/trailer-park-boys-the-swearnet-show/umc.cmc.71tbyxchxiwotaysuuztm8p54
|
||||
https://tv.apple.com/us/show/fridays/umc.cmc.ve44y99fmo41lok4mx7azvfi
|
||||
https://tv.apple.com/us/show/utopia/umc.cmc.4uzbqvarwjrbkqz92796oelqj
|
||||
|
||||
https://tv.apple.com/us/movie/oceans-eleven/umc.cmc.4mt9j4jqou4mlup1pc9riyo63
|
||||
https://tv.apple.com/us/movie/bullet-train/umc.cmc.5erhpztw3spfkfi0daabkmaq0
|
||||
@ -82,4 +76,10 @@ https://tv.apple.com/us/show/robot-chicken/umc.cmc.5vh2acwvaldmwdqj1qibtl02u | R
|
||||
https://tv.apple.com/us/show/the-state/umc.cmc.5af6lx6evkseyhotjzhr16oot | The State - Apple TV
|
||||
https://tv.apple.com/us/show/upright-citizens-brigade/umc.cmc.638n6gvt13rg3w8g24h1chmdr | Upright Citizens Brigade - Apple TV
|
||||
https://tv.apple.com/us/show/fridays/umc.cmc.ve44y99fmo41lok4mx7azvfi | Fridays - Apple TV
|
||||
https://tv.apple.com/us/show/drunk-history/umc.cmc.2fai5tmqz2z6g9iy8er8ft11m | Drunk History - Apple TV
|
||||
https://tv.apple.com/us/show/drunk-history/umc.cmc.2fai5tmqz2z6g9iy8er8ft11m | Drunk History - Apple TV
|
||||
https://tv.apple.com/us/show/ray-donovan/umc.cmc.hr7pnm1wbx98w1h3pg7dfbey
|
||||
https://tv.apple.com/us/show/party-down/umc.cmc.6myol1kgcd19kerlujhtcr8kg
|
||||
https://tv.apple.com/us/show/the-office-superfan-episodes/umc.cmc.3r3om9j6edlrnznl5pfassikv
|
||||
https://tv.apple.com/us/show/trailer-park-boys-the-swearnet-show/umc.cmc.71tbyxchxiwotaysuuztm8p54
|
||||
https://tv.apple.com/us/show/fridays/umc.cmc.ve44y99fmo41lok4mx7azvfi
|
||||
https://tv.apple.com/us/show/utopia/umc.cmc.4uzbqvarwjrbkqz92796oelqj
|
BIN
How to use VT.pdf
Normal file
BIN
How to use VT.pdf
Normal file
Binary file not shown.
221
README.md
221
README.md
@ -1,210 +1,23 @@
|
||||
# VineTrimmer-PlayReady
|
||||
A tool to download and remove DRM from streaming services. A version of an old fork of [devine](https://github.com/devine-dl/devine).
|
||||
Modified to remove Playready DRM instead of Widevine.
|
||||
|
||||
## Features
|
||||
- Progress Bars for decryption ([mp4decrypt](https://github.com/chu23465/bentoOldFork), Shaka)
|
||||
- Refresh Token fixed for Amazon service
|
||||
- Reprovision .prd after a week
|
||||
- ISM manifest support (Microsoft Smooth Streaming) (Few features to be added)
|
||||
- N_m3u8DL-RE downloader support
|
||||
Hi
|
||||
, I'm PlayReady
|
||||
|
||||
|
||||
## Usage
|
||||
This is me Sofiya, I am posting this to show how we can use SL2000 Certificate to do amazon using Playready drm.
|
||||
"---Always Work Hard and Trust the Process---"
|
||||
|
||||
1. Run `install.bat`
|
||||
|
||||
2. Activate venv using `venv.cmd`.
|
||||
Amazon Demonstration using SL2000
|
||||
|
||||
WE have all Certificates SL2000 & SL 30000
|
||||
We have all codes to disney and all sites
|
||||
This is posted to punish the people who are making playready easy available
|
||||
|
||||
If you wanna collabrate & need support mail us on Playreadydrm@proton.me
|
||||
|
||||
Update : API USED IN THIS IS DOWN DUE TO DDOS
|
||||
Command used
|
||||
|
||||
poetry run vt dl -al en -sl en -q 1080 Amazon -b cbr -vq hd 0NRT15S2XG06SG5HBV5NQAW3E3
|
||||
|
||||
|
||||
### Config
|
||||
https://github.com/Playreadydrm/PlayReady-Amazon-Tool/assets/170321722/1fdacab6-d1db-41f4-82f6-a73b5e1286c8
|
||||
|
||||
`vinetrimmer.yml` located within the `/vinetrimmer/` folder.
|
||||
|
||||
`decryptor:` either `mp4decrypt` or `packager`
|
||||
|
||||
(shaka-packager fails to decrypt files downloaded from MSS manifests)
|
||||
|
||||
`tag:` tag for your release group
|
||||
|
||||
CDM can be configured per service or per profile.
|
||||
|
||||
```
|
||||
cdm:
|
||||
default: {text}
|
||||
Amazon: {text}
|
||||
```
|
||||
|
||||
All other option can be left to defaults, unless you know what you are doing.
|
||||
|
||||
### General Options
|
||||
|
||||
Usage: vt.cmd [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
| Command line argument | Description | Default Value |
|
||||
|----------------------------|-----------------------------------------------------------------------------------------------|-----------------------------------|
|
||||
| -d, --debug | Flag to enable debug logging | False |
|
||||
| -p, --profile | Profile to use when multiple profiles are defined for a service. | "default" |
|
||||
| -q, --quality | Download Resolution | 1080 |
|
||||
| -v, --vcodec | Video Codec | H264 |
|
||||
| -a, --acodec | Audio Codec | None |
|
||||
| -vb, --vbitrate | Video Bitrate | Max |
|
||||
| -ab, --abitrate | Audio Bitrate | Max |
|
||||
| -aa, --atmos | Prefer Atmos Audio | False |
|
||||
| -r, --range | Video Color Range `HDR`, `HDR10`, `DV`, `SDR` | SDR |
|
||||
| -w, --wanted | Wanted episodes, e.g. `S01-S05,S07`, `S01E01-S02E03`, `S02-S02E03` | Default to all |
|
||||
| -al, --alang | Language wanted for audio. | Defaults to original language |
|
||||
| -sl, --slang | Language wanted for subtitles. | Defaults to original language |
|
||||
| --proxy | Proxy URI to use. If a 2-letter country is provided, it will try get a proxy from the config. | None |
|
||||
| -A, --audio-only | Only download audio tracks. | False |
|
||||
| -S, --subs-only | Only download subtitle tracks. | False |
|
||||
| -C, --chapters-only | Only download chapters. | False |
|
||||
| -ns, --no-subs | Do not download subtitle tracks. | False |
|
||||
| -na, --no-audio | Do not download audio tracks. | False |
|
||||
| -nv, --no-video | Do not download video tracks. | False |
|
||||
| -nc, --no-chapters | Do not download chapters tracks. | False |
|
||||
| -ad, --audio-description | Download audio description tracks. | False |
|
||||
| --list | Skip downloading and list available tracks and what tracks would have been downloaded. | False |
|
||||
| --selected | List selected tracks and what tracks are downloaded. | False |
|
||||
| --cdm | Override the CDM that will be used for decryption. | None |
|
||||
| --keys | Skip downloading, retrieve the decryption keys (via CDM or Key Vaults) and print them. | False |
|
||||
| --cache | Disable the use of the CDM and only retrieve decryption keys from Key Vaults. If a needed key is unable to be retrieved from any Key Vaults, the title is skipped.| False |
|
||||
| --no-cache | Disable the use of Key Vaults and only retrieve decryption keys from the CDM. | False |
|
||||
| --no-proxy | Force disable all proxy use. | False |
|
||||
| -nm, --no-mux | Do not mux the downloaded and decrypted tracks. | False |
|
||||
| --mux | Force muxing when using --audio-only/--subs-only/--chapters-only. | False |
|
||||
| -?, -h, --help | Show this message and exit. | |
|
||||
|
||||
|
||||
COMMAND :-
|
||||
|
||||
| Alaias | Command | Service Link |
|
||||
|--------|---------------|--------------------------------------------|
|
||||
| AMZN | Amazon | https://amazon.com, https://primevideo.com |
|
||||
| ATVP | AppleTVPlus | https://tv.apple.com |
|
||||
| MAX | Max | https://max.com |
|
||||
| NF | Netflix | https://netflix.com |
|
||||
|
||||
### Amazon Specific Options
|
||||
|
||||
Usage: vt.cmd AMZN [OPTIONS] [TITLE]
|
||||
|
||||
Service code for Amazon VOD (https://amazon.com) and Amazon Prime Video (https://primevideo.com).
|
||||
|
||||
Authorization: Cookies
|
||||
|
||||
Security:
|
||||
```
|
||||
UHD@L1/SL3000
|
||||
FHD@L3(ChromeCDM)/SL2000
|
||||
SD@L3
|
||||
|
||||
Certain SL2000 can do UHD
|
||||
```
|
||||
Maintains their own license server like Netflix, be cautious.
|
||||
|
||||
Region is chosen automatically based on domain extension found in cookies.
|
||||
Prime Video specific code will be run if the ASIN is detected to be a prime video variant.
|
||||
Use 'Amazon Video ASIN Display' for Tampermonkey addon for ASIN
|
||||
https://greasyfork.org/en/scripts/381997-amazon-video-asin-display
|
||||
|
||||
vt dl --list -z uk -q 1080 Amazon B09SLGYLK8
|
||||
|
||||
Below flags to be passed after the `AMZN` or `Amazon` keyword in command.
|
||||
|
||||
| Command Line Switch | Description |
|
||||
|-------------------------------------|-----------------------------------------------------------------------------------------------------|
|
||||
| -b, --bitrate | Video Bitrate Mode to download in. CVBR=Constrained Variable Bitrate, CBR=Constant Bitrate. (CVBR or CBR or CVBR+CBR) |
|
||||
| -c, --cdn | CDN to download from, defaults to the CDN with the highest weight set by Amazon. |
|
||||
| -vq, --vquality | Manifest quality to request. (SD or HD or UHD) |
|
||||
| -s, --single | Force single episode/season instead of getting series ASIN. |
|
||||
| -am, --amanifest | Manifest to use for audio. Defaults to H265 if the video manifest is missing 640k audio. (CVBR or CBR or H265) |
|
||||
| -aq, --aquality | Manifest quality to request for audio. Defaults to the same as --quality. (SD or HD or UHD) |
|
||||
| -ism, --ism | Set manifest override to SmoothStreaming. Defaults to DASH w/o this flag. |
|
||||
| -?, -h, --help | Show this message and exit. |
|
||||
|
||||
To get UHD/4k with Amazon, navigate to -
|
||||
|
||||
```
|
||||
https://www.primevideo.com/region/eu/ontv/code?ref_=atv_auth_red_aft
|
||||
```
|
||||
|
||||
Login and get to the code pair page. Extract cookies from that page using [Open Cookies.txt](https://chromewebstore.google.com/detail/open-cookiestxt/gdocmgbfkjnnpapoeobnolbbkoibbcif).
|
||||
|
||||
Save it to the path `vinetrimmer/Cookies/Amazon/default.txt`.
|
||||
|
||||
When caching cookies, use a profile without PIN. Otherwise it causes errors.
|
||||
|
||||
### Peacock
|
||||
|
||||
- PCOK bans leaked certs quickly (for 4k), be cautious.
|
||||
|
||||
### Example Command
|
||||
|
||||
Amazon Example:
|
||||
|
||||
```bash
|
||||
poetry run vt dl -al en -sl en --selected -q 2160 -r HDR -w S01E18-S01E25 AMZN -b CBR --ism 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
```
|
||||
|
||||
Above command:
|
||||
- gets english subtitles + audio,
|
||||
- selects the HDR + 4K track,
|
||||
- gets episodes from S01E18 to S01E25 from Amazon
|
||||
- with CBR bitrate,
|
||||
- tries to force ISM
|
||||
- and the title-ID is 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
AppleTV Example:
|
||||
|
||||
```bash
|
||||
poetry run vt dl -al en -sl en --list -q 720 --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
```
|
||||
|
||||
Above command:
|
||||
- gets english subtitles + audio,
|
||||
- lists all possible qualities,
|
||||
- selects 720p video track,
|
||||
- uses the proxy for licensing,
|
||||
- gets the first episode of first season (i.e S01E01)
|
||||
- of the title umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
|
||||
|
||||
## Proxy
|
||||
I recommend [Windscribe](https://windscribe.com/). You can sign up, getting 10 GB of traffic credit every month for free. We use the VPN for everything except downloading video/audio.
|
||||
Tested so far on Amazon, AppleTVPlus, Max.
|
||||
|
||||
### Steps:
|
||||
1. For each service, within get_tracks() function we do this below.
|
||||
```python
|
||||
for track in tracks:
|
||||
track.needs_proxy = False
|
||||
```
|
||||
|
||||
This flag signals that this track does not need a proxy and a proxy will not be passed to downloader even if proxy given in CLI options.
|
||||
|
||||
2. Download Windscribe app and install it.
|
||||
|
||||
3. Go to `Options` -> `Connection` -> `Split Tunneling`. Enable it.
|
||||
|
||||
Set `Mode` as `Inclusive`.
|
||||
|
||||
5. Go to `Options` -> `Connection` -> `Proxy Gateway`. Enable it. Select `Proxy Type` as `HTTP`.
|
||||
|
||||
Copy the `IP` field (will look something like `192.168.0.141:9766`)
|
||||
|
||||
Pass above copied to Vinetrimmer with the proxy flag like below.
|
||||
|
||||
```bash
|
||||
...(other flags)... --proxy http://192.168.0.141:9766 .......
|
||||
```
|
||||
|
||||
## Other
|
||||
- For `--keys` to work with ATVP you need to pass the `--no-subs` flag also
|
||||
- Nuikta compile is an option to run on various linux distributions.
|
||||
- Errors arise when running VT within Docker or Conda like python distributions. Make sure to use proper python3.
|
||||
- To use programs in `scripts` folder, first activate venv then, then -
|
||||
```bash
|
||||
poetry run python scripts/ParseKeybox.py
|
||||
```
|
||||
|
1050
amazon_old.py
Normal file
1050
amazon_old.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
||||
N_m3u8DL-RE.exe http://avodsls3ww-s.akamaihd.net/ondemand/iad_2/c5a2/7992/6e31/4ed5-8011-893c8d4e98a6/0bc9f599-85c7-450d-b829-b69fb27d4bd6.ism/manifest --thread-count 96 --log-level ERROR --write-meta-json False --http-request-timeout 8
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
ffmpeg -i correct_file.eac3 -map 0 -c:a copy correct_file.mp4
|
Binary file not shown.
Binary file not shown.
23
binary.txt
23
binary.txt
@ -1,23 +0,0 @@
|
||||
dl -al en -sl en --keys -q 2160 --cdm hisense_smarttv_he55a7000euwts_sl3000 -r HDR --selected -w S05E08-S05E24 AMZN -b CBR -vq UHD 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
|
||||
|
||||
--include-data-files=/path/to/scan=folder_name=**/*.txt
|
||||
--include-data-files=/path/to/file/*.txt=folder_name/some.txt
|
||||
--include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/
|
||||
|
||||
--onefile --> if this flag then figure out how to set the directories to NOT TEMP folder
|
||||
|
||||
python -m nuitka --onefile --assume-yes-for-downloads --windows-console-mode=disable --show-progress --standalone --output-dir=dist --static-libpython=no vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer --include-data-dir=./binaries/=binaries --include-data-dir=./scripts/=scripts
|
||||
python -m nuitka --onefile --standalone --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/services/=vinetrimmer/services --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts
|
||||
python -m nuitka --onefile --standalone --windows-console-mode=attach --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-dir=./vinetrimmer/services/*.py=vinetrimmer/services/=**/*.py --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --onefile --standalone --windows-console-mode=attach --output-dir=dist vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files=./vinetrimmer/services/*.py=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --mode=standalone --output-dir=dist --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
|
||||
python -m nuitka --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-files="./vinetrimmer/services/*.py"=vinetrimmer/services/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
python -m nuitka --onefile --follow-imports --output-dir=dist --standalone --clang --windows-console-mode=force --show-memory vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/ --include-data-dir=./vinetrimmer/config/=vinetrimmer/config/ --include-data-dir=./vinetrimmer/config/Services/=vinetrimmer/config/Services/ --include-data-dir=./scripts/=scripts/ --include-data-files="./vinetrimmer/config/*.py"=vinetrimmer/config/
|
||||
nuitka --output-dir=dist --standalone --windows-console-mode=force vinetrimmer1.py --include-data-dir=./vinetrimmer/=vinetrimmer/
|
||||
|
||||
|
||||
nuitka --onefile --output-dir=dist --windows-console-mode=force vt.py --include-data-dir=./vinetrimmer/=vinetrimmer/
|
38
commands.txt
38
commands.txt
@ -1,36 +1,42 @@
|
||||
https://www.primevideo.com/region/eu/storefront
|
||||
|
||||
poetry run vt dl -al en -sl en -r HDR --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en -sl en --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en --selected --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -al en --selected AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
|
||||
poetry run vt dl -q 2160 -al en -sl en --list AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -q 2160 -al en -sl en --keys AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4 --bitrate CVBR+CBR
|
||||
poetry run vt dl -al en -sl en --selected AMZN -b CBR https://www.primevideo.com/detail/0I1GTXP9ZKTV7AAD7E1LCWJCUX/
|
||||
poetry run vt dl -al en -sl en -r HDR --list Amazon 0SGEGC629FCXQ5DJ9ORNE42PXK
|
||||
poetry run vt dl -al en -sl en --list Amazon 0SGEGC629FCXQ5DJ9ORNE42PXK
|
||||
poetry run vt dl -al en --selected --keys Amazon 0SGEGC629FCXQ5DJ9ORNE42PXK
|
||||
poetry run vt dl -al en --selected Amazon 0SGEGC629FCXQ5DJ9ORNE42PXK
|
||||
|
||||
poetry run vt dl -al en -sl en -q 2160 --keys -r HDR AMZN -b CBR 0OSAJR8S2YWRSQCYS4J8MEGEXI
|
||||
poetry run vt dl -al en -sl en -q 2160 -r HDR --selected -w S05E08-S05E24 AMZN -b CBR 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
poetry run vt dl -q 2160 -al en -sl en -r HDR --list Amazon 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
poetry run vt dl -q 2160 -al en -sl en --selected --keys Amazon 0H7LY5ZKKBM1MIW0244WE9O2C4
|
||||
|
||||
python vinetrimmer1.py dl -al en -sl en -q 2160 -r HDR --selected -w S05E09-S05E24 AMZN -b CBR 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
poetry run vt dl -al en -sl en --selected -q 2160 -r HDR -w S01E18-S01E25 AMZN -b CBR --ism 0IQZZIJ6W6TT2CXPT6ZOZYX396
|
||||
poetry run vt dl -q 2160 -al en -sl en --keys --no-cache --vcodec H265 --selected AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4 --bitrate CBR
|
||||
poetry run vt dl -q 2160 -al en -sl en --keys --no-cache --debug --vcodec H265 --selected AMZN 0H7LY5ZKKBM1MIW0244WE9O2C4 --bitrate CBR
|
||||
|
||||
Atmos audio download AMZN to fix --> poetry run vt dl -al en -aa -sl en --selected --debug -w S01E01 -A AMZN -b CBR --ism 0HAQAA7JM43QWX0H6GUD3IOF70
|
||||
|
||||
http://ABHIRCQAAAAAAAAMCX3W7WLVKL54A.s3-bom-ww.cf.smooth.row.aiv-cdn.net/e5b0/2fe1/032c/4fae-b896-aca9d8bef3d4/170b36b1-856d-4c69-bbf6-feb6c979185a.ism/manifest
|
||||
poetry run vt dl -al en -sl en -r HDR -w S01E01 --list -q 2160 AMZN https://www.primevideo.com/detail/0HU52DR3U1R0FGI3KSUL00XYY7
|
||||
poetry run vt dl -al en -sl en -r HDR -w S01E01 --list --debug -q 2160 Amazon https://www.primevideo.com/detail/0HU52DR3U1R0FGI3KSUL00XYY7/
|
||||
https://www.primevideo.com/detail/0HU52DR3U1R0FGI3KSUL00XYY7/
|
||||
https://ABAKS6NAAAAAAAAMBIBDKKUP3ONNU.s3-iad-2.cf.smooth.row.aiv-cdn.net/357a/1bb0/c1f3/4a6b-b709-d6f2edf5b709/15eab8ec-d8ac-4c23-96fc-f5d89f459829.ism/manifest
|
||||
|
||||
http://ABHIRCQAAAAAAAAMHLTVNGLHRCITQ.s3-bom-ww.cf.smooth.row.aiv-cdn.net/e7ab/7c49/9743/4e53-ab5c-6d15516ecf15/52bf7e61-51cd-4e5d-bd68-834706f17789.ism/manifest
|
||||
https://www.primevideo.com/region/eu/detail/0KYRVT4JDB957NXZO72E2MIFW5/
|
||||
|
||||
|
||||
https://m-5884s3.ll.smooth.row.aiv-cdn.net/iad_2/3572/bbdc/73b4/404d-a100-802b1d9de4c6/862e2506-c20e-4ba7-bacc-d6b4775e7b62.ism/manifest
|
||||
|
||||
Max show
|
||||
poetry run vt dl -al en -sl en -w S01E01 Max https://play.max.com/show/c8ea8e19-cae7-4683-9b62-cdbbed744784
|
||||
|
||||
UHD
|
||||
poetry run vt dl -al en -sl en --keys Max https://play.max.com/show/5756c2bf-36f8-4890-b1f9-ef168f1d8e9c
|
||||
poetry run vt dl -al en -sl en -v H265 --keys Max https://play.max.com/show/5756c2bf-36f8-4890-b1f9-ef168f1d8e9c
|
||||
|
||||
poetry run vt dl -al en -sl en -w S02E05-S02E10 --selected --proxy http://192.168.0.99:9766 Max
|
||||
poetry run vt dl -al en -sl en --list -w S01E01 --proxy http://192.168.0.99:9766 Max
|
||||
poetry run vt dl -al en -sl en -v H265 --list -w S01E01 --proxy http://192.168.0.99:9766 Max
|
||||
|
||||
poetry run vt dl -al all --selected --proxy http://192.168.0.99:9766 --debug -w S01E01 ATVP umc.cmc.7gvn6fekgfpq5fc72pgi1c47o
|
||||
|
||||
poetry run vt dl -al en -sl en --selected --debug -q 720 --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
poetry run vt dl -al en -sl en --selected --proxy http://192.168.0.99:9766 -w S01E01 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
|
||||
|
||||
poetry run vt dl -al en -sl en --cdm hisense_smarttv_hu50a6100uw_sl3000 --selected --proxy http://192.168.0.99:9766 --keys -w S01E02 ATVP umc.cmc.1nfdfd5zlk05fo1bwwetzldy3
|
||||
poetry run vt dl -al en -sl en --cdm hisense_smarttv_hu50a6100uw_sl3000 --selected --proxy http://192.168.0.99:9766 --keys -q 2160 ATVP umc.cmc.apzybj6eqf6pzccd97kev7bs
|
54
fix.txt
54
fix.txt
@ -1,54 +0,0 @@
|
||||
D:\PlayReady-Amazon-Tool-main>poetry run vt dl -al en -sl en --selected --keys --cdm hisense_smarttv_he55a7000euwts_sl3000 AMZN -vq UHD -b CVBR+CBR https://www.primevideo.com/detail/0I1GTXP9ZKTV7AAD7E1LCWJCUX/
|
||||
2025-02-07 22:26:57 [I] vt : vinetrimmer - Widevine DRM downloader and decrypter
|
||||
2025-02-07 22:26:57 [I] vt : [Root Config] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\vinetrimmer.yml
|
||||
2025-02-07 22:26:57 [I] vt : [Service Configs] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Services
|
||||
2025-02-07 22:26:57 [I] vt : [Cookies] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Cookies
|
||||
2025-02-07 22:26:57 [I] vt : [CDM Devices] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\devices
|
||||
2025-02-07 22:26:57 [I] vt : [Cache] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Cache
|
||||
2025-02-07 22:26:57 [I] vt : [Logs] : D:\PlayReady-Amazon-Tool-main\vinetrimmer\Logs
|
||||
2025-02-07 22:26:57 [I] vt : [Temp Files] : D:\PlayReady-Amazon-Tool-main\Temp
|
||||
2025-02-07 22:26:57 [I] vt : [Downloads] : D:\PlayReady-Amazon-Tool-main\Downloads
|
||||
2025-02-07 22:26:57 [I] dl : + 1 Local Vault
|
||||
2025-02-07 22:26:57 [I] dl : + 0 Remote Vaults
|
||||
2025-02-07 22:26:57 [I] dl : + Loaded Device: hisense_smarttv_he55a7000euwts_sl3000 (L3000)
|
||||
2025-02-07 22:26:57 [I] AMZN : Getting Account Region
|
||||
2025-02-07 22:26:59 [I] AMZN : + Region: us
|
||||
2025-02-07 22:26:59 [I] AMZN : + Using cached device bearer
|
||||
2025-02-07 22:26:59 [I] AMZN : Retrieving Titles
|
||||
2025-02-07 22:27:00 [I] Titles : Title: I Was Not Ready Da
|
||||
2025-02-07 22:27:00 [I] AMZN : Getting tracks for I Was Not Ready Da (2020) [amzn1.dv.gti.30baee18-aa4c-1fc2-72cc-6e11d5e627d9]
|
||||
2025-02-07 22:27:01 [I] AMZN : + Detected encodingVersion=2
|
||||
2025-02-07 22:27:01 [I] AMZN : + Downloading CVBR MPD
|
||||
2025-02-07 22:27:02 [I] AMZN : + Detected encodingVersion=2
|
||||
2025-02-07 22:27:02 [I] AMZN : + Downloading CBR MPD
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 1, in <module>
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1161, in __call__
|
||||
return self.main(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1082, in main
|
||||
rv = self.invoke(ctx)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1443, in invoke
|
||||
return ctx.invoke(self.callback, **ctx.params)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 788, in invoke
|
||||
return __callback(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\vinetrimmer.py", line 72, in main
|
||||
dl()
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1161, in __call__
|
||||
return self.main(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1082, in main
|
||||
rv = self.invoke(ctx)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1697, in invoke
|
||||
return _process_result(sub_ctx.command.invoke(sub_ctx))
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 1666, in _process_result
|
||||
value = ctx.invoke(self._result_callback, value, **ctx.params)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\core.py", line 788, in invoke
|
||||
return __callback(*args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\.venv\lib\site-packages\click\decorators.py", line 33, in new_func
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\commands\dl.py", line 309, in result
|
||||
title.tracks.add(service.get_tracks(title), warn_only=True)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\services\amazon.py", line 321, in get_tracks
|
||||
manifest, chosen_manifest, tracks = self.get_best_quality(title)
|
||||
File "D:\PlayReady-Amazon-Tool-main\vinetrimmer\services\amazon.py", line 1051, in get_best_quality
|
||||
best_quality = max(track_list, key=lambda x: x['max_size'])
|
||||
TypeError: '>' not supported between instances of 'NoneType' and 'NoneType'
|
@ -1,6 +1,5 @@
|
||||
@echo off
|
||||
python -m pip install poetry==1.8.5
|
||||
python -m pip install poetry
|
||||
poetry config virtualenvs.in-project true
|
||||
poetry lock --no-update
|
||||
poetry install
|
||||
pause
|
2268
poetry.lock
generated
2268
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,7 @@ beautifulsoup4 = "~4.8.2"
|
||||
click = "^8.0.1"
|
||||
cffi = "^1.16.0"
|
||||
coloredlogs = "^15.0"
|
||||
construct = "2.8.8"
|
||||
construct = "2.10.70"
|
||||
crccheck = "^1.0"
|
||||
cryptography = "^43.0.3"
|
||||
ecpy = "^1.2.5"
|
||||
@ -26,28 +26,23 @@ langcodes = { extras = ["data"], version = "^3.1.0" }
|
||||
lxml = "^4.6.3"
|
||||
m3u8 = "^0.9.0"
|
||||
marisa-trie = "^1.1.0"
|
||||
poetry = "1.8.5"
|
||||
pproxy = "^2.7.7"
|
||||
protobuf3 = { path = "./scripts/protobuf3", develop = true }
|
||||
protobuf = "^3.13.0"
|
||||
pycaption = "^2.1.1"
|
||||
pycryptodome = "^3.21.0"
|
||||
pycryptodomex = "^3.4.3"
|
||||
pyhulu = "^1.1.2"
|
||||
pymediainfo = "^5.0.3"
|
||||
PyMySQL = { extras = ["rsa"], version = "^1.0.2" }
|
||||
pymp4 = "^1.4.0"
|
||||
pyplayready = { path = "./scripts/pyplayready", develop = true }
|
||||
pywidevine = { path = "./scripts/pywidevine", develop = true }
|
||||
pysubs2 = "^1.6.1"
|
||||
PyYAML = "^6.0.1"
|
||||
requests = { extras = ["socks"], version = "2.32.3" }
|
||||
requests = { extras = ["socks"], version = "2.29.0" }
|
||||
tldextract = "^3.1.0"
|
||||
toml = "^0.10.2"
|
||||
tqdm = "^4.67.0"
|
||||
Unidecode = "^1.2.0"
|
||||
validators = "^0.18.2"
|
||||
websocket-client = "^1.1.0"
|
||||
xmltodict = "^0.14.0"
|
||||
xmltodict = "^0.13.0"
|
||||
yt-dlp = "^2022.11.11"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
@ -13,13 +13,13 @@ http.headers.update({
|
||||
})
|
||||
# get player fragment page
|
||||
fragment = http.get(sys.argv[1].replace("/videos/", "/player5_fragment/")).text
|
||||
# get encrypted manifest.xml urls for both hls and dash
|
||||
# get encrypted manifest urls for both hls and dash
|
||||
encrypted_manifests = {k: bytes.fromhex(re.findall(
|
||||
r'<source\s+type="application/' + v + r'"\s+src=".+?/e-stream-url\?stream=(.+?)"',
|
||||
fragment
|
||||
)[0][0]) for k, v in {"hls": "x-mpegURL", "dash": r"dash\+xml"}.items()}
|
||||
|
||||
# decrypt all manifest.xml urls in manifests
|
||||
# decrypt all manifest urls in manifests
|
||||
m = re.search(r"^\s*chabi:\s*'(.+?)'", fragment, re.MULTILINE)
|
||||
if not m:
|
||||
raise ValueError("Unable to get key")
|
||||
|
@ -1,33 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
__version__ = '3.20.2'
|
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/any.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_ANY._serialized_start=46
|
||||
_ANY._serialized_end=84
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,32 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/api.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
||||
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_API._serialized_start=113
|
||||
_API._serialized_end=370
|
||||
_METHOD._serialized_start=373
|
||||
_METHOD._serialized_end=586
|
||||
_MIXIN._serialized_start=588
|
||||
_MIXIN._serialized_end=623
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,35 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/compiler/plugin.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb'
|
||||
_VERSION._serialized_start=101
|
||||
_VERSION._serialized_end=171
|
||||
_CODEGENERATORREQUEST._serialized_start=174
|
||||
_CODEGENERATORREQUEST._serialized_end=360
|
||||
_CODEGENERATORRESPONSE._serialized_start=363
|
||||
_CODEGENERATORRESPONSE._serialized_end=684
|
||||
_CODEGENERATORRESPONSE_FILE._serialized_start=499
|
||||
_CODEGENERATORRESPONSE_FILE._serialized_end=626
|
||||
_CODEGENERATORRESPONSE_FEATURE._serialized_start=628
|
||||
_CODEGENERATORRESPONSE_FEATURE._serialized_end=684
|
||||
# @@protoc_insertion_point(module_scope)
|
File diff suppressed because it is too large
Load Diff
@ -1,177 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides a container for DescriptorProtos."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import warnings
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DescriptorDatabaseConflictingDefinitionError(Error):
|
||||
"""Raised when a proto is added with the same name & different descriptor."""
|
||||
|
||||
|
||||
class DescriptorDatabase(object):
|
||||
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
||||
|
||||
def __init__(self):
|
||||
self._file_desc_protos_by_file = {}
|
||||
self._file_desc_protos_by_symbol = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this database.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
Raises:
|
||||
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
|
||||
add a proto with the same name but different definition than an
|
||||
existing proto in the database.
|
||||
"""
|
||||
proto_name = file_desc_proto.name
|
||||
if proto_name not in self._file_desc_protos_by_file:
|
||||
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
||||
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
||||
raise DescriptorDatabaseConflictingDefinitionError(
|
||||
'%s already added, but with different descriptor.' % proto_name)
|
||||
else:
|
||||
return
|
||||
|
||||
# Add all the top-level descriptors to the index.
|
||||
package = file_desc_proto.package
|
||||
for message in file_desc_proto.message_type:
|
||||
for name in _ExtractSymbols(message, package):
|
||||
self._AddSymbol(name, file_desc_proto)
|
||||
for enum in file_desc_proto.enum_type:
|
||||
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
|
||||
for enum_value in enum.value:
|
||||
self._file_desc_protos_by_symbol[
|
||||
'.'.join((package, enum_value.name))] = file_desc_proto
|
||||
for extension in file_desc_proto.extension:
|
||||
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
|
||||
for service in file_desc_proto.service:
|
||||
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
|
||||
|
||||
def FindFileByName(self, name):
|
||||
"""Finds the file descriptor proto by file name.
|
||||
|
||||
Typically the file name is a relative path ending to a .proto file. The
|
||||
proto with the given name will have to have been added to this database
|
||||
using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
name: The file name to find.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto matching the name.
|
||||
|
||||
Raises:
|
||||
KeyError if no file by the given name was added.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_file[name]
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Finds the file descriptor proto containing the specified symbol.
|
||||
|
||||
The symbol should be a fully qualified name including the file descriptor's
|
||||
package and any containing messages. Some examples:
|
||||
|
||||
'some.package.name.Message'
|
||||
'some.package.name.Message.NestedEnum'
|
||||
'some.package.name.Message.some_field'
|
||||
|
||||
The file descriptor proto containing the specified symbol must be added to
|
||||
this database using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
symbol: The fully qualified symbol name.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto containing the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError if no file contains the specified symbol.
|
||||
"""
|
||||
try:
|
||||
return self._file_desc_protos_by_symbol[symbol]
|
||||
except KeyError:
|
||||
# Fields, enum values, and nested extensions are not in
|
||||
# _file_desc_protos_by_symbol. Try to find the top level
|
||||
# descriptor. Non-existent nested symbol under a valid top level
|
||||
# descriptor can also be found. The behavior is the same with
|
||||
# protobuf C++.
|
||||
top_level, _, _ = symbol.rpartition('.')
|
||||
try:
|
||||
return self._file_desc_protos_by_symbol[top_level]
|
||||
except KeyError:
|
||||
# Raise the original symbol as a KeyError for better diagnostics.
|
||||
raise KeyError(symbol)
|
||||
|
||||
def FindFileContainingExtension(self, extendee_name, extension_number):
|
||||
# TODO(jieluo): implement this API.
|
||||
return None
|
||||
|
||||
def FindAllExtensionNumbers(self, extendee_name):
|
||||
# TODO(jieluo): implement this API.
|
||||
return []
|
||||
|
||||
def _AddSymbol(self, name, file_desc_proto):
|
||||
if name in self._file_desc_protos_by_symbol:
|
||||
warn_msg = ('Conflict register for file "' + file_desc_proto.name +
|
||||
'": ' + name +
|
||||
' is already defined in file "' +
|
||||
self._file_desc_protos_by_symbol[name].name + '"')
|
||||
warnings.warn(warn_msg, RuntimeWarning)
|
||||
self._file_desc_protos_by_symbol[name] = file_desc_proto
|
||||
|
||||
|
||||
def _ExtractSymbols(desc_proto, package):
|
||||
"""Pulls out all the symbols from a descriptor proto.
|
||||
|
||||
Args:
|
||||
desc_proto: The proto to extract symbols from.
|
||||
package: The package containing the descriptor type.
|
||||
|
||||
Yields:
|
||||
The fully qualified name found in the descriptor.
|
||||
"""
|
||||
message_name = package + '.' + desc_proto.name if package else desc_proto.name
|
||||
yield message_name
|
||||
for nested_type in desc_proto.nested_type:
|
||||
for symbol in _ExtractSymbols(nested_type, message_name):
|
||||
yield symbol
|
||||
for enum_type in desc_proto.enum_type:
|
||||
yield '.'.join((message_name, enum_type.name))
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/duration.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_DURATION._serialized_start=51
|
||||
_DURATION._serialized_end=93
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/empty.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_EMPTY._serialized_start=48
|
||||
_EMPTY._serialized_end=55
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/field_mask.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_FIELDMASK._serialized_start=53
|
||||
_FIELDMASK._serialized_end=79
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,443 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Adds support for parameterized tests to Python's unittest TestCase class.
|
||||
|
||||
A parameterized test is a method in a test case that is invoked with different
|
||||
argument tuples.
|
||||
|
||||
A simple example:
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
(1, 2, 3),
|
||||
(4, 5, 9),
|
||||
(1, 1, 3))
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Each invocation is a separate test case and properly isolated just
|
||||
like a normal test method, with its own setUp/tearDown cycle. In the
|
||||
example above, there are three separate testcases, one of which will
|
||||
fail due to an assertion error (1 + 1 != 3).
|
||||
|
||||
Parameters for individual test cases can be tuples (with positional parameters)
|
||||
or dictionaries (with named parameters):
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
{'op1': 1, 'op2': 2, 'result': 3},
|
||||
{'op1': 4, 'op2': 5, 'result': 9},
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
If a parameterized test fails, the error message will show the
|
||||
original test name (which is modified internally) and the arguments
|
||||
for the specific invocation, which are part of the string returned by
|
||||
the shortDescription() method on test cases.
|
||||
|
||||
The id method of the test, used internally by the unittest framework,
|
||||
is also modified to show the arguments. To make sure that test names
|
||||
stay the same across several invocations, object representations like
|
||||
|
||||
>>> class Foo(object):
|
||||
... pass
|
||||
>>> repr(Foo())
|
||||
'<__main__.Foo object at 0x23d8610>'
|
||||
|
||||
are turned into '<__main__.Foo>'. For even more descriptive names,
|
||||
especially in test logs, you can use the named_parameters decorator. In
|
||||
this case, only tuples are supported, and the first parameters has to
|
||||
be a string (or an object that returns an apt name when converted via
|
||||
str()):
|
||||
|
||||
class NamedExample(parameterized.TestCase):
|
||||
@parameterized.named_parameters(
|
||||
('Normal', 'aa', 'aaa', True),
|
||||
('EmptyPrefix', '', 'abc', True),
|
||||
('BothEmpty', '', '', True))
|
||||
def testStartsWith(self, prefix, string, result):
|
||||
self.assertEqual(result, strings.startswith(prefix))
|
||||
|
||||
Named tests also have the benefit that they can be run individually
|
||||
from the command line:
|
||||
|
||||
$ testmodule.py NamedExample.testStartsWithNormal
|
||||
.
|
||||
--------------------------------------------------------------------
|
||||
Ran 1 test in 0.000s
|
||||
|
||||
OK
|
||||
|
||||
Parameterized Classes
|
||||
=====================
|
||||
If invocation arguments are shared across test methods in a single
|
||||
TestCase class, instead of decorating all test methods
|
||||
individually, the class itself can be decorated:
|
||||
|
||||
@parameterized.parameters(
|
||||
(1, 2, 3)
|
||||
(4, 5, 9))
|
||||
class ArithmeticTest(parameterized.TestCase):
|
||||
def testAdd(self, arg1, arg2, result):
|
||||
self.assertEqual(arg1 + arg2, result)
|
||||
|
||||
def testSubtract(self, arg2, arg2, result):
|
||||
self.assertEqual(result - arg1, arg2)
|
||||
|
||||
Inputs from Iterables
|
||||
=====================
|
||||
If parameters should be shared across several test cases, or are dynamically
|
||||
created from other sources, a single non-tuple iterable can be passed into
|
||||
the decorator. This iterable will be used to obtain the test cases:
|
||||
|
||||
class AdditionExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
c.op1, c.op2, c.result for c in testcases
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Single-Argument Test Methods
|
||||
============================
|
||||
If a test method takes only one argument, the single argument does not need to
|
||||
be wrapped into a tuple:
|
||||
|
||||
class NegativeNumberExample(parameterized.TestCase):
|
||||
@parameterized.parameters(
|
||||
-1, -3, -4, -5
|
||||
)
|
||||
def testIsNegative(self, arg):
|
||||
self.assertTrue(IsNegative(arg))
|
||||
"""
|
||||
|
||||
__author__ = 'tmarek@google.com (Torsten Marek)'
|
||||
|
||||
import functools
|
||||
import re
|
||||
import types
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
try:
|
||||
# Since python 3
|
||||
import collections.abc as collections_abc
|
||||
except ImportError:
|
||||
# Won't work after python 3.8
|
||||
import collections as collections_abc
|
||||
|
||||
ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
|
||||
_SEPARATOR = uuid.uuid1().hex
|
||||
_FIRST_ARG = object()
|
||||
_ARGUMENT_REPR = object()
|
||||
|
||||
|
||||
def _CleanRepr(obj):
|
||||
return ADDR_RE.sub(r'<\1>', repr(obj))
|
||||
|
||||
|
||||
# Helper function formerly from the unittest module, removed from it in
|
||||
# Python 2.7.
|
||||
def _StrClass(cls):
|
||||
return '%s.%s' % (cls.__module__, cls.__name__)
|
||||
|
||||
|
||||
def _NonStringIterable(obj):
|
||||
return (isinstance(obj, collections_abc.Iterable) and
|
||||
not isinstance(obj, str))
|
||||
|
||||
|
||||
def _FormatParameterList(testcase_params):
|
||||
if isinstance(testcase_params, collections_abc.Mapping):
|
||||
return ', '.join('%s=%s' % (argname, _CleanRepr(value))
|
||||
for argname, value in testcase_params.items())
|
||||
elif _NonStringIterable(testcase_params):
|
||||
return ', '.join(map(_CleanRepr, testcase_params))
|
||||
else:
|
||||
return _FormatParameterList((testcase_params,))
|
||||
|
||||
|
||||
class _ParameterizedTestIter(object):
|
||||
"""Callable and iterable class for producing new test cases."""
|
||||
|
||||
def __init__(self, test_method, testcases, naming_type):
|
||||
"""Returns concrete test functions for a test and a list of parameters.
|
||||
|
||||
The naming_type is used to determine the name of the concrete
|
||||
functions as reported by the unittest framework. If naming_type is
|
||||
_FIRST_ARG, the testcases must be tuples, and the first element must
|
||||
have a string representation that is a valid Python identifier.
|
||||
|
||||
Args:
|
||||
test_method: The decorated test method.
|
||||
testcases: (list of tuple/dict) A list of parameter
|
||||
tuples/dicts for individual test invocations.
|
||||
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
|
||||
"""
|
||||
self._test_method = test_method
|
||||
self.testcases = testcases
|
||||
self._naming_type = naming_type
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise RuntimeError('You appear to be running a parameterized test case '
|
||||
'without having inherited from parameterized.'
|
||||
'TestCase. This is bad because none of '
|
||||
'your test cases are actually being run.')
|
||||
|
||||
def __iter__(self):
|
||||
test_method = self._test_method
|
||||
naming_type = self._naming_type
|
||||
|
||||
def MakeBoundParamTest(testcase_params):
|
||||
@functools.wraps(test_method)
|
||||
def BoundParamTest(self):
|
||||
if isinstance(testcase_params, collections_abc.Mapping):
|
||||
test_method(self, **testcase_params)
|
||||
elif _NonStringIterable(testcase_params):
|
||||
test_method(self, *testcase_params)
|
||||
else:
|
||||
test_method(self, testcase_params)
|
||||
|
||||
if naming_type is _FIRST_ARG:
|
||||
# Signal the metaclass that the name of the test function is unique
|
||||
# and descriptive.
|
||||
BoundParamTest.__x_use_name__ = True
|
||||
BoundParamTest.__name__ += str(testcase_params[0])
|
||||
testcase_params = testcase_params[1:]
|
||||
elif naming_type is _ARGUMENT_REPR:
|
||||
# __x_extra_id__ is used to pass naming information to the __new__
|
||||
# method of TestGeneratorMetaclass.
|
||||
# The metaclass will make sure to create a unique, but nondescriptive
|
||||
# name for this test.
|
||||
BoundParamTest.__x_extra_id__ = '(%s)' % (
|
||||
_FormatParameterList(testcase_params),)
|
||||
else:
|
||||
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
|
||||
|
||||
BoundParamTest.__doc__ = '%s(%s)' % (
|
||||
BoundParamTest.__name__, _FormatParameterList(testcase_params))
|
||||
if test_method.__doc__:
|
||||
BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
|
||||
return BoundParamTest
|
||||
return (MakeBoundParamTest(c) for c in self.testcases)
|
||||
|
||||
|
||||
def _IsSingletonList(testcases):
|
||||
"""True iff testcases contains only a single non-tuple element."""
|
||||
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
|
||||
|
||||
|
||||
def _ModifyClass(class_object, testcases, naming_type):
|
||||
assert not getattr(class_object, '_id_suffix', None), (
|
||||
'Cannot add parameters to %s,'
|
||||
' which already has parameterized methods.' % (class_object,))
|
||||
class_object._id_suffix = id_suffix = {}
|
||||
# We change the size of __dict__ while we iterate over it,
|
||||
# which Python 3.x will complain about, so use copy().
|
||||
for name, obj in class_object.__dict__.copy().items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix)
|
||||
and isinstance(obj, types.FunctionType)):
|
||||
delattr(class_object, name)
|
||||
methods = {}
|
||||
_UpdateClassDictForParamTestCase(
|
||||
methods, id_suffix, name,
|
||||
_ParameterizedTestIter(obj, testcases, naming_type))
|
||||
for name, meth in methods.items():
|
||||
setattr(class_object, name, meth)
|
||||
|
||||
|
||||
def _ParameterDecorator(naming_type, testcases):
|
||||
"""Implementation of the parameterization decorators.
|
||||
|
||||
Args:
|
||||
naming_type: The naming type.
|
||||
testcases: Testcase parameters.
|
||||
|
||||
Returns:
|
||||
A function for modifying the decorated object.
|
||||
"""
|
||||
def _Apply(obj):
|
||||
if isinstance(obj, type):
|
||||
_ModifyClass(
|
||||
obj,
|
||||
list(testcases) if not isinstance(testcases, collections_abc.Sequence)
|
||||
else testcases,
|
||||
naming_type)
|
||||
return obj
|
||||
else:
|
||||
return _ParameterizedTestIter(obj, testcases, naming_type)
|
||||
|
||||
if _IsSingletonList(testcases):
|
||||
assert _NonStringIterable(testcases[0]), (
|
||||
'Single parameter argument must be a non-string iterable')
|
||||
testcases = testcases[0]
|
||||
|
||||
return _Apply
|
||||
|
||||
|
||||
def parameters(*testcases): # pylint: disable=invalid-name
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example.
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples/dicts/objects (for tests
|
||||
with only one argument).
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
|
||||
|
||||
|
||||
def named_parameters(*testcases): # pylint: disable=invalid-name
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example. The first element of
|
||||
each parameter tuple should be a string and will be appended to the
|
||||
name of the test method.
|
||||
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples.
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_FIRST_ARG, testcases)
|
||||
|
||||
|
||||
class TestGeneratorMetaclass(type):
|
||||
"""Metaclass for test cases with test generators.
|
||||
|
||||
A test generator is an iterable in a testcase that produces callables. These
|
||||
callables must be single-argument methods. These methods are injected into
|
||||
the class namespace and the original iterable is removed. If the name of the
|
||||
iterable conforms to the test pattern, the injected methods will be picked
|
||||
up as tests by the unittest framework.
|
||||
|
||||
In general, it is supposed to be used in conjunction with the
|
||||
parameters decorator.
|
||||
"""
|
||||
|
||||
def __new__(mcs, class_name, bases, dct):
|
||||
dct['_id_suffix'] = id_suffix = {}
|
||||
for name, obj in dct.copy().items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
|
||||
_NonStringIterable(obj)):
|
||||
iterator = iter(obj)
|
||||
dct.pop(name)
|
||||
_UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
|
||||
|
||||
return type.__new__(mcs, class_name, bases, dct)
|
||||
|
||||
|
||||
def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
|
||||
"""Adds individual test cases to a dictionary.
|
||||
|
||||
Args:
|
||||
dct: The target dictionary.
|
||||
id_suffix: The dictionary for mapping names to test IDs.
|
||||
name: The original name of the test case.
|
||||
iterator: The iterator generating the individual test cases.
|
||||
"""
|
||||
for idx, func in enumerate(iterator):
|
||||
assert callable(func), 'Test generators must yield callables, got %r' % (
|
||||
func,)
|
||||
if getattr(func, '__x_use_name__', False):
|
||||
new_name = func.__name__
|
||||
else:
|
||||
new_name = '%s%s%d' % (name, _SEPARATOR, idx)
|
||||
assert new_name not in dct, (
|
||||
'Name of parameterized test case "%s" not unique' % (new_name,))
|
||||
dct[new_name] = func
|
||||
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
|
||||
|
||||
|
||||
class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
|
||||
"""Base class for test cases using the parameters decorator."""
|
||||
|
||||
def _OriginalName(self):
|
||||
return self._testMethodName.split(_SEPARATOR)[0]
|
||||
|
||||
def __str__(self):
|
||||
return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
|
||||
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
"""Returns the descriptive ID of the test.
|
||||
|
||||
This is used internally by the unittesting framework to get a name
|
||||
for the test to be used in reports.
|
||||
|
||||
Returns:
|
||||
The test id.
|
||||
"""
|
||||
return '%s.%s%s' % (_StrClass(self.__class__),
|
||||
self._OriginalName(),
|
||||
self._id_suffix.get(self._testMethodName, ''))
|
||||
|
||||
|
||||
def CoopTestCase(other_base_class):
|
||||
"""Returns a new base class with a cooperative metaclass base.
|
||||
|
||||
This enables the TestCase to be used in combination
|
||||
with other base classes that have custom metaclasses, such as
|
||||
mox.MoxTestBase.
|
||||
|
||||
Only works with metaclasses that do not override type.__new__.
|
||||
|
||||
Example:
|
||||
|
||||
import google3
|
||||
import mox
|
||||
|
||||
from google3.testing.pybase import parameterized
|
||||
|
||||
class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
|
||||
...
|
||||
|
||||
Args:
|
||||
other_base_class: (class) A test case base class.
|
||||
|
||||
Returns:
|
||||
A new class object.
|
||||
"""
|
||||
metaclass = type(
|
||||
'CoopMetaclass',
|
||||
(other_base_class.__metaclass__,
|
||||
TestGeneratorMetaclass), {})
|
||||
return metaclass(
|
||||
'CoopTestCase',
|
||||
(other_base_class, TestCase), {})
|
@ -1,112 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Determine which implementation of the protobuf API is used in this process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
try:
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import _api_implementation
|
||||
# The compile-time constants in the _api_implementation module can be used to
|
||||
# switch to a certain implementation of the Python API at build time.
|
||||
_api_version = _api_implementation.api_version
|
||||
except ImportError:
|
||||
_api_version = -1 # Unspecified by compiler flags.
|
||||
|
||||
if _api_version == 1:
|
||||
raise ValueError('api_version=1 is no longer supported.')
|
||||
|
||||
|
||||
_default_implementation_type = ('cpp' if _api_version > 0 else 'python')
|
||||
|
||||
|
||||
# This environment variable can be used to switch to a certain implementation
|
||||
# of the Python API, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 'python' and 'cpp' are valid
|
||||
# values. Any other value will be ignored.
|
||||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
||||
_default_implementation_type)
|
||||
|
||||
if _implementation_type != 'python':
|
||||
_implementation_type = 'cpp'
|
||||
|
||||
if 'PyPy' in sys.version and _implementation_type == 'cpp':
|
||||
warnings.warn('PyPy does not work yet with cpp protocol buffers. '
|
||||
'Falling back to the python implementation.')
|
||||
_implementation_type = 'python'
|
||||
|
||||
|
||||
# Detect if serialization should be deterministic by default
|
||||
try:
|
||||
# The presence of this module in a build allows the proto implementation to
|
||||
# be upgraded merely via build deps.
|
||||
#
|
||||
# NOTE: Merely importing this automatically enables deterministic proto
|
||||
# serialization for C++ code, but we still need to export it as a boolean so
|
||||
# that we can do the same for `_implementation_type == 'python'`.
|
||||
#
|
||||
# NOTE2: It is possible for C++ code to enable deterministic serialization by
|
||||
# default _without_ affecting Python code, if the C++ implementation is not in
|
||||
# use by this module. That is intended behavior, so we don't actually expose
|
||||
# this boolean outside of this module.
|
||||
#
|
||||
# pylint: disable=g-import-not-at-top,unused-import
|
||||
from google.protobuf import enable_deterministic_proto_serialization
|
||||
_python_deterministic_proto_serialization = True
|
||||
except ImportError:
|
||||
_python_deterministic_proto_serialization = False
|
||||
|
||||
|
||||
# Usage of this function is discouraged. Clients shouldn't care which
|
||||
# implementation of the API is in use. Note that there is no guarantee
|
||||
# that differences between APIs will be maintained.
|
||||
# Please don't use this function if possible.
|
||||
def Type():
|
||||
return _implementation_type
|
||||
|
||||
|
||||
def _SetType(implementation_type):
|
||||
"""Never use! Only for protobuf benchmark."""
|
||||
global _implementation_type
|
||||
_implementation_type = implementation_type
|
||||
|
||||
|
||||
# See comment on 'Type' above.
|
||||
def Version():
|
||||
return 2
|
||||
|
||||
|
||||
# For internal use only
|
||||
def IsPythonDefaultSerializationDeterministic():
|
||||
return _python_deterministic_proto_serialization
|
@ -1,130 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Builds descriptors, message classes and services for generated _pb2.py.
|
||||
|
||||
This file is only called in python generated _pb2.py files. It builds
|
||||
descriptors, message classes and services that users can directly use
|
||||
in generated code.
|
||||
"""
|
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)'
|
||||
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
def BuildMessageAndEnumDescriptors(file_des, module):
|
||||
"""Builds message and enum descriptors.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
|
||||
def BuildNestedDescriptors(msg_des, prefix):
|
||||
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
||||
module_name = prefix + name.upper()
|
||||
module[module_name] = nested_msg
|
||||
BuildNestedDescriptors(nested_msg, module_name + '_')
|
||||
for enum_des in msg_des.enum_types:
|
||||
module[prefix + enum_des.name.upper()] = enum_des
|
||||
|
||||
for (name, msg_des) in file_des.message_types_by_name.items():
|
||||
module_name = '_' + name.upper()
|
||||
module[module_name] = msg_des
|
||||
BuildNestedDescriptors(msg_des, module_name + '_')
|
||||
|
||||
|
||||
def BuildTopDescriptorsAndMessages(file_des, module_name, module):
|
||||
"""Builds top level descriptors and message classes.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module_name: str, the name of generated _pb2 module
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
|
||||
def BuildMessage(msg_des):
|
||||
create_dict = {}
|
||||
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
||||
create_dict[name] = BuildMessage(nested_msg)
|
||||
create_dict['DESCRIPTOR'] = msg_des
|
||||
create_dict['__module__'] = module_name
|
||||
message_class = _reflection.GeneratedProtocolMessageType(
|
||||
msg_des.name, (_message.Message,), create_dict)
|
||||
_sym_db.RegisterMessage(message_class)
|
||||
return message_class
|
||||
|
||||
# top level enums
|
||||
for (name, enum_des) in file_des.enum_types_by_name.items():
|
||||
module['_' + name.upper()] = enum_des
|
||||
module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
|
||||
for enum_value in enum_des.values:
|
||||
module[enum_value.name] = enum_value.number
|
||||
|
||||
# top level extensions
|
||||
for (name, extension_des) in file_des.extensions_by_name.items():
|
||||
module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
|
||||
module[name] = extension_des
|
||||
|
||||
# services
|
||||
for (name, service) in file_des.services_by_name.items():
|
||||
module['_' + name.upper()] = service
|
||||
|
||||
# Build messages.
|
||||
for (name, msg_des) in file_des.message_types_by_name.items():
|
||||
module[name] = BuildMessage(msg_des)
|
||||
|
||||
|
||||
def BuildServices(file_des, module_name, module):
|
||||
"""Builds services classes and services stub class.
|
||||
|
||||
Args:
|
||||
file_des: FileDescriptor of the .proto file
|
||||
module_name: str, the name of generated _pb2 module
|
||||
module: Generated _pb2 module
|
||||
"""
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf import service as _service
|
||||
from google.protobuf import service_reflection
|
||||
# pylint: enable=g-import-not-at-top
|
||||
for (name, service) in file_des.services_by_name.items():
|
||||
module[name] = service_reflection.GeneratedServiceType(
|
||||
name, (_service.Service,),
|
||||
dict(DESCRIPTOR=service, __module__=module_name))
|
||||
stub_name = name + '_Stub'
|
||||
module[stub_name] = service_reflection.GeneratedServiceStubType(
|
||||
stub_name, (module[name],),
|
||||
dict(DESCRIPTOR=service, __module__=module_name))
|
@ -1,710 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains container classes to represent different protocol buffer types.
|
||||
|
||||
This file defines container classes which represent categories of protocol
|
||||
buffer field types which need extra maintenance. Currently these categories
|
||||
are:
|
||||
|
||||
- Repeated scalar fields - These are all repeated fields which aren't
|
||||
composite (e.g. they are of simple types like int32, string, etc).
|
||||
- Repeated composite fields - Repeated fields which are composite. This
|
||||
includes groups and nested messages.
|
||||
"""
|
||||
|
||||
import collections.abc
|
||||
import copy
|
||||
import pickle
|
||||
from typing import (
|
||||
Any,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableMapping,
|
||||
MutableSequence,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
|
||||
_T = TypeVar('_T')
|
||||
_K = TypeVar('_K')
|
||||
_V = TypeVar('_V')
|
||||
|
||||
|
||||
class BaseContainer(Sequence[_T]):
|
||||
"""Base container class."""
|
||||
|
||||
# Minimizes memory usage and disallows assignment to other attributes.
|
||||
__slots__ = ['_message_listener', '_values']
|
||||
|
||||
def __init__(self, message_listener: Any) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._values = []
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: int) -> _T:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: slice) -> List[_T]:
|
||||
...
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Retrieves item by the specified key."""
|
||||
return self._values[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Returns the number of elements in the container."""
|
||||
return len(self._values)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
"""Checks if another instance isn't equal to this one."""
|
||||
# The concrete classes should define __eq__.
|
||||
return not self == other
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def sort(self, *args, **kwargs) -> None:
|
||||
# Continue to support the old sort_function keyword argument.
|
||||
# This is expected to be a rare occurrence, so use LBYL to avoid
|
||||
# the overhead of actually catching KeyError.
|
||||
if 'sort_function' in kwargs:
|
||||
kwargs['cmp'] = kwargs.pop('sort_function')
|
||||
self._values.sort(*args, **kwargs)
|
||||
|
||||
def reverse(self) -> None:
|
||||
self._values.reverse()
|
||||
|
||||
|
||||
# TODO(slebedev): Remove this. BaseContainer does *not* conform to
|
||||
# MutableSequence, only its subclasses do.
|
||||
collections.abc.MutableSequence.register(BaseContainer)
|
||||
|
||||
|
||||
class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
||||
"""Simple, type-checked, list-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_type_checker']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
type_checker: Any,
|
||||
) -> None:
|
||||
"""Args:
|
||||
|
||||
message_listener: A MessageListener implementation. The
|
||||
RepeatedScalarFieldContainer will call this object's Modified() method
|
||||
when it is modified.
|
||||
type_checker: A type_checkers.ValueChecker instance to run on elements
|
||||
inserted into this container.
|
||||
"""
|
||||
super().__init__(message_listener)
|
||||
self._type_checker = type_checker
|
||||
|
||||
def append(self, value: _T) -> None:
|
||||
"""Appends an item to the list. Similar to list.append()."""
|
||||
self._values.append(self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key: int, value: _T) -> None:
|
||||
"""Inserts the item at the specified position. Similar to list.insert()."""
|
||||
self._values.insert(key, self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq: Iterable[_T]) -> None:
|
||||
"""Extends by appending the given iterable. Similar to list.extend()."""
|
||||
if elem_seq is None:
|
||||
return
|
||||
try:
|
||||
elem_seq_iter = iter(elem_seq)
|
||||
except TypeError:
|
||||
if not elem_seq:
|
||||
# silently ignore falsy inputs :-/.
|
||||
# TODO(ptucker): Deprecate this behavior. b/18413862
|
||||
return
|
||||
raise
|
||||
|
||||
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
|
||||
if new_values:
|
||||
self._values.extend(new_values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def MergeFrom(
|
||||
self,
|
||||
other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
|
||||
) -> None:
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one. We do not check the types of the individual fields.
|
||||
"""
|
||||
self._values.extend(other)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def remove(self, elem: _T):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key: Optional[int] = -1) -> _T:
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: _T) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
||||
...
|
||||
|
||||
def __setitem__(self, key, value) -> None:
|
||||
"""Sets the item on the specified position."""
|
||||
if isinstance(key, slice):
|
||||
if key.step is not None:
|
||||
raise ValueError('Extended slices not supported')
|
||||
self._values[key] = map(self._type_checker.CheckValue, value)
|
||||
self._message_listener.Modified()
|
||||
else:
|
||||
self._values[key] = self._type_checker.CheckValue(value)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key: Union[int, slice]) -> None:
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
# Special case for the same type which should be common and fast.
|
||||
if isinstance(other, self.__class__):
|
||||
return other._values == self._values
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self._values
|
||||
|
||||
def __deepcopy__(
|
||||
self,
|
||||
unused_memo: Any = None,
|
||||
) -> 'RepeatedScalarFieldContainer[_T]':
|
||||
clone = RepeatedScalarFieldContainer(
|
||||
copy.deepcopy(self._message_listener), self._type_checker)
|
||||
clone.MergeFrom(self)
|
||||
return clone
|
||||
|
||||
def __reduce__(self, **kwargs) -> NoReturn:
|
||||
raise pickle.PickleError(
|
||||
"Can't pickle repeated scalar fields, convert to list first")
|
||||
|
||||
|
||||
# TODO(slebedev): Constrain T to be a subtype of Message.
|
||||
class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
||||
"""Simple, list-like container for holding repeated composite fields."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_message_descriptor']
|
||||
|
||||
def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
|
||||
"""
|
||||
Note that we pass in a descriptor instead of the generated directly,
|
||||
since at the time we construct a _RepeatedCompositeFieldContainer we
|
||||
haven't yet necessarily initialized the type that will be contained in the
|
||||
container.
|
||||
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedCompositeFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
message_descriptor: A Descriptor instance describing the protocol type
|
||||
that should be present in this container. We'll use the
|
||||
_concrete_class field of this descriptor when the client calls add().
|
||||
"""
|
||||
super().__init__(message_listener)
|
||||
self._message_descriptor = message_descriptor
|
||||
|
||||
def add(self, **kwargs: Any) -> _T:
|
||||
"""Adds a new element at the end of the list and returns it. Keyword
|
||||
arguments may be used to initialize the element.
|
||||
"""
|
||||
new_element = self._message_descriptor._concrete_class(**kwargs)
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def append(self, value: _T) -> None:
|
||||
"""Appends one element by copying the message."""
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
new_element.CopyFrom(value)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key: int, value: _T) -> None:
|
||||
"""Inserts the item at the specified position by copying."""
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
new_element.CopyFrom(value)
|
||||
self._values.insert(key, new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq: Iterable[_T]) -> None:
|
||||
"""Extends by appending the given sequence of elements of the same type
|
||||
|
||||
as this one, copying each individual message.
|
||||
"""
|
||||
message_class = self._message_descriptor._concrete_class
|
||||
listener = self._message_listener
|
||||
values = self._values
|
||||
for message in elem_seq:
|
||||
new_element = message_class()
|
||||
new_element._SetListener(listener)
|
||||
new_element.MergeFrom(message)
|
||||
values.append(new_element)
|
||||
listener.Modified()
|
||||
|
||||
def MergeFrom(
|
||||
self,
|
||||
other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
|
||||
) -> None:
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one, copying each individual message.
|
||||
"""
|
||||
self.extend(other)
|
||||
|
||||
def remove(self, elem: _T) -> None:
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key: Optional[int] = -1) -> _T:
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: int, value: _T) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
||||
...
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
# This method is implemented to make RepeatedCompositeFieldContainer
|
||||
# structurally compatible with typing.MutableSequence. It is
|
||||
# otherwise unsupported and will always raise an error.
|
||||
raise TypeError(
|
||||
f'{self.__class__.__name__} object does not support item assignment')
|
||||
|
||||
def __delitem__(self, key: Union[int, slice]) -> None:
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
raise TypeError('Can only compare repeated composite fields against '
|
||||
'other repeated composite fields.')
|
||||
return self._values == other._values
|
||||
|
||||
|
||||
class ScalarMap(MutableMapping[_K, _V]):
|
||||
"""Simple, type-checked, dict-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
|
||||
'_entry_descriptor']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
key_checker: Any,
|
||||
value_checker: Any,
|
||||
entry_descriptor: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._key_checker = key_checker
|
||||
self._value_checker = value_checker
|
||||
self._entry_descriptor = entry_descriptor
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key: _K) -> _V:
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
val = self._value_checker.DefaultValue()
|
||||
self._values[key] = val
|
||||
return val
|
||||
|
||||
def __contains__(self, item: _K) -> bool:
|
||||
# We check the key's type to match the strong-typing flavor of the API.
|
||||
# Also this makes it easier to match the behavior of the C++ implementation.
|
||||
self._key_checker.CheckValue(item)
|
||||
return item in self._values
|
||||
|
||||
@overload
|
||||
def get(self, key: _K) -> Optional[_V]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
||||
...
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __setitem__(self, key: _K, value: _V) -> _T:
|
||||
checked_key = self._key_checker.CheckValue(key)
|
||||
checked_value = self._value_checker.CheckValue(value)
|
||||
self._values[checked_key] = checked_value
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key: _K) -> None:
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self) -> Iterator[_K]:
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
|
||||
self._values.update(other._values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def InvalidateIterators(self) -> None:
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self) -> None:
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
|
||||
def GetEntryClass(self) -> Any:
|
||||
return self._entry_descriptor._concrete_class
|
||||
|
||||
|
||||
class MessageMap(MutableMapping[_K, _V]):
|
||||
"""Simple, type-checked, dict-like container for with submessage values."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_values', '_message_listener',
|
||||
'_message_descriptor', '_entry_descriptor']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message_listener: Any,
|
||||
message_descriptor: Any,
|
||||
key_checker: Any,
|
||||
entry_descriptor: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._message_descriptor = message_descriptor
|
||||
self._key_checker = key_checker
|
||||
self._entry_descriptor = entry_descriptor
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key: _K) -> _V:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values[key] = new_element
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def get_or_create(self, key: _K) -> _V:
|
||||
"""get_or_create() is an alias for getitem (ie. map[key]).
|
||||
|
||||
Args:
|
||||
key: The key to get or create in the map.
|
||||
|
||||
This is useful in cases where you want to be explicit that the call is
|
||||
mutating the map. This can avoid lint errors for statements like this
|
||||
that otherwise would appear to be pointless statements:
|
||||
|
||||
msg.my_map[key]
|
||||
"""
|
||||
return self[key]
|
||||
|
||||
@overload
|
||||
def get(self, key: _K) -> Optional[_V]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
||||
...
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __contains__(self, item: _K) -> bool:
|
||||
item = self._key_checker.CheckValue(item)
|
||||
return item in self._values
|
||||
|
||||
def __setitem__(self, key: _K, value: _V) -> NoReturn:
|
||||
raise ValueError('May not set values directly, call my_map[key].foo = 5')
|
||||
|
||||
def __delitem__(self, key: _K) -> None:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self) -> Iterator[_K]:
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
|
||||
# pylint: disable=protected-access
|
||||
for key in other._values:
|
||||
# According to documentation: "When parsing from the wire or when merging,
|
||||
# if there are duplicate map keys the last key seen is used".
|
||||
if key in self:
|
||||
del self[key]
|
||||
self[key].CopyFrom(other[key])
|
||||
# self._message_listener.Modified() not required here, because
|
||||
# mutations to submessages already propagate.
|
||||
|
||||
def InvalidateIterators(self) -> None:
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self) -> None:
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
|
||||
def GetEntryClass(self) -> Any:
|
||||
return self._entry_descriptor._concrete_class
|
||||
|
||||
|
||||
class _UnknownField:
|
||||
"""A parsed unknown field."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_field_number', '_wire_type', '_data']
|
||||
|
||||
def __init__(self, field_number, wire_type, data):
|
||||
self._field_number = field_number
|
||||
self._wire_type = wire_type
|
||||
self._data = data
|
||||
return
|
||||
|
||||
def __lt__(self, other):
|
||||
# pylint: disable=protected-access
|
||||
return self._field_number < other._field_number
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
# pylint: disable=protected-access
|
||||
return (self._field_number == other._field_number and
|
||||
self._wire_type == other._wire_type and
|
||||
self._data == other._data)
|
||||
|
||||
|
||||
class UnknownFieldRef: # pylint: disable=missing-class-docstring
|
||||
|
||||
def __init__(self, parent, index):
|
||||
self._parent = parent
|
||||
self._index = index
|
||||
|
||||
def _check_valid(self):
|
||||
if not self._parent:
|
||||
raise ValueError('UnknownField does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
if self._index >= len(self._parent):
|
||||
raise ValueError('UnknownField does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
|
||||
@property
|
||||
def field_number(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._field_number
|
||||
|
||||
@property
|
||||
def wire_type(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._wire_type
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
self._check_valid()
|
||||
# pylint: disable=protected-access
|
||||
return self._parent._internal_get(self._index)._data
|
||||
|
||||
|
||||
class UnknownFieldSet:
|
||||
"""UnknownField container"""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_values']
|
||||
|
||||
def __init__(self):
|
||||
self._values = []
|
||||
|
||||
def __getitem__(self, index):
|
||||
if self._values is None:
|
||||
raise ValueError('UnknownFields does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
size = len(self._values)
|
||||
if index < 0:
|
||||
index += size
|
||||
if index < 0 or index >= size:
|
||||
raise IndexError('index %d out of range'.index)
|
||||
|
||||
return UnknownFieldRef(self, index)
|
||||
|
||||
def _internal_get(self, index):
|
||||
return self._values[index]
|
||||
|
||||
def __len__(self):
|
||||
if self._values is None:
|
||||
raise ValueError('UnknownFields does not exist. '
|
||||
'The parent message might be cleared.')
|
||||
return len(self._values)
|
||||
|
||||
def _add(self, field_number, wire_type, data):
|
||||
unknown_field = _UnknownField(field_number, wire_type, data)
|
||||
self._values.append(unknown_field)
|
||||
return unknown_field
|
||||
|
||||
def __iter__(self):
|
||||
for i in range(len(self)):
|
||||
yield UnknownFieldRef(self, i)
|
||||
|
||||
def _extend(self, other):
|
||||
if other is None:
|
||||
return
|
||||
# pylint: disable=protected-access
|
||||
self._values.extend(other._values)
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
# Sort unknown fields because their order shouldn't
|
||||
# affect equality test.
|
||||
values = list(self._values)
|
||||
if other is None:
|
||||
return not values
|
||||
values.sort()
|
||||
# pylint: disable=protected-access
|
||||
other_values = sorted(other._values)
|
||||
return values == other_values
|
||||
|
||||
def _clear(self):
|
||||
for value in self._values:
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(value._data, UnknownFieldSet):
|
||||
value._data._clear() # pylint: disable=protected-access
|
||||
self._values = None
|
File diff suppressed because it is too large
Load Diff
@ -1,829 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Code for encoding protocol message primitives.
|
||||
|
||||
Contains the logic for encoding every logical protocol field type
|
||||
into one of the 5 physical wire types.
|
||||
|
||||
This code is designed to push the Python interpreter's performance to the
|
||||
limits.
|
||||
|
||||
The basic idea is that at startup time, for every field (i.e. every
|
||||
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
|
||||
sizer takes a value of this field's type and computes its byte size. The
|
||||
encoder takes a writer function and a value. It encodes the value into byte
|
||||
strings and invokes the writer function to write those strings. Typically the
|
||||
writer function is the write() method of a BytesIO.
|
||||
|
||||
We try to do as much work as possible when constructing the writer and the
|
||||
sizer rather than when calling them. In particular:
|
||||
* We copy any needed global functions to local variables, so that we do not need
|
||||
to do costly global table lookups at runtime.
|
||||
* Similarly, we try to do any attribute lookups at startup time if possible.
|
||||
* Every field's tag is encoded to bytes at startup, since it can't change at
|
||||
runtime.
|
||||
* Whatever component of the field size we can compute at startup, we do.
|
||||
* We *avoid* sharing code if doing so would make the code slower and not sharing
|
||||
does not burden us too much. For example, encoders for repeated fields do
|
||||
not just call the encoders for singular fields in a loop because this would
|
||||
add an extra function call overhead for every loop iteration; instead, we
|
||||
manually inline the single-value encoder into the loop.
|
||||
* If a Python function lacks a return statement, Python actually generates
|
||||
instructions to pop the result of the last statement off the stack, push
|
||||
None onto the stack, and then return that. If we really don't care what
|
||||
value is returned, then we can save two instructions by returning the
|
||||
result of the last statement. It looks funny but it helps.
|
||||
* We assume that type and bounds checking has happened at a higher level.
|
||||
"""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import struct
|
||||
|
||||
from google.protobuf.internal import wire_format
|
||||
|
||||
|
||||
# This will overflow and thus become IEEE-754 "infinity". We would use
|
||||
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
||||
_POS_INF = 1e10000
|
||||
_NEG_INF = -_POS_INF
|
||||
|
||||
|
||||
def _VarintSize(value):
|
||||
"""Compute the size of a varint value."""
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _SignedVarintSize(value):
|
||||
"""Compute the size of a signed varint value."""
|
||||
if value < 0: return 10
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _TagSize(field_number):
|
||||
"""Returns the number of bytes required to serialize a tag with this field
|
||||
number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarintSize(wire_format.PackTag(field_number, 0))
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# In this section we define some generic sizers. Each of these functions
|
||||
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
|
||||
# It returns another function which in turn takes parameters specific to a
|
||||
# particular field, e.g. the field number and whether it is repeated or packed.
|
||||
# Look at the next section to see how these are used.
|
||||
|
||||
|
||||
def _SimpleSizer(compute_value_size):
|
||||
"""A sizer which uses the function compute_value_size to compute the size of
|
||||
each value. Typically compute_value_size is _VarintSize."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(value)
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _ModifiedSizer(compute_value_size, modify_value):
|
||||
"""Like SimpleSizer, but modify_value is invoked on each value before it is
|
||||
passed to compute_value_size. modify_value is typically ZigZagEncode."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(modify_value(value))
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _FixedSizer(value_size):
|
||||
"""Like _SimpleSizer except for a fixed-size field. The input is the size
|
||||
of one value."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = len(value) * value_size
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
element_size = value_size + tag_size
|
||||
def RepeatedFieldSize(value):
|
||||
return len(value) * element_size
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
field_size = value_size + tag_size
|
||||
def FieldSize(value):
|
||||
return field_size
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare a sizer constructor for each field type. Each "sizer
|
||||
# constructor" is a function that takes (field_number, is_repeated, is_packed)
|
||||
# as parameters and returns a sizer, which in turn takes a field value as
|
||||
# a parameter and returns its encoded size.
|
||||
|
||||
|
||||
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
|
||||
|
||||
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
|
||||
|
||||
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
|
||||
_SignedVarintSize, wire_format.ZigZagEncode)
|
||||
|
||||
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
|
||||
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
|
||||
|
||||
BoolSizer = _FixedSizer(1)
|
||||
|
||||
|
||||
def StringSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a string field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element.encode('utf-8'))
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value.encode('utf-8'))
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def BytesSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a bytes field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element)
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value)
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def GroupSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a group field."""
|
||||
|
||||
tag_size = _TagSize(field_number) * 2
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += element.ByteSize()
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + value.ByteSize()
|
||||
return FieldSize
|
||||
|
||||
|
||||
def MessageSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a message field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = element.ByteSize()
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MessageSet is special: it needs custom logic to compute its size properly.
|
||||
|
||||
|
||||
def MessageSetItemSizer(field_number):
|
||||
"""Returns a sizer for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
|
||||
_TagSize(3))
|
||||
local_VarintSize = _VarintSize
|
||||
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return static_size + local_VarintSize(l) + l
|
||||
|
||||
return FieldSize
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Map is special: it needs custom logic to compute its size properly.
|
||||
|
||||
|
||||
def MapSizer(field_descriptor, is_message_map):
|
||||
"""Returns a sizer for a map field."""
|
||||
|
||||
# Can't look at field_descriptor.message_type._concrete_class because it may
|
||||
# not have been initialized yet.
|
||||
message_type = field_descriptor.message_type
|
||||
message_sizer = MessageSizer(field_descriptor.number, False, False)
|
||||
|
||||
def FieldSize(map_value):
|
||||
total = 0
|
||||
for key in map_value:
|
||||
value = map_value[key]
|
||||
# It's wasteful to create the messages and throw them away one second
|
||||
# later since we'll do the same for the actual encode. But there's not an
|
||||
# obvious way to avoid this within the current design without tons of code
|
||||
# duplication. For message map, value.ByteSize() should be called to
|
||||
# update the status.
|
||||
entry_msg = message_type._concrete_class(key=key, value=value)
|
||||
total += message_sizer(entry_msg)
|
||||
if is_message_map:
|
||||
value.ByteSize()
|
||||
return total
|
||||
|
||||
return FieldSize
|
||||
|
||||
# ====================================================================
|
||||
# Encoders!
|
||||
|
||||
|
||||
def _VarintEncoder():
|
||||
"""Return an encoder for a basic varint value (does not include tag)."""
|
||||
|
||||
local_int2byte = struct.Struct('>B').pack
|
||||
|
||||
def EncodeVarint(write, value, unused_deterministic=None):
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_int2byte(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_int2byte(bits))
|
||||
|
||||
return EncodeVarint
|
||||
|
||||
|
||||
def _SignedVarintEncoder():
|
||||
"""Return an encoder for a basic signed varint value (does not include
|
||||
tag)."""
|
||||
|
||||
local_int2byte = struct.Struct('>B').pack
|
||||
|
||||
def EncodeSignedVarint(write, value, unused_deterministic=None):
|
||||
if value < 0:
|
||||
value += (1 << 64)
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_int2byte(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_int2byte(bits))
|
||||
|
||||
return EncodeSignedVarint
|
||||
|
||||
|
||||
_EncodeVarint = _VarintEncoder()
|
||||
_EncodeSignedVarint = _SignedVarintEncoder()
|
||||
|
||||
|
||||
def _VarintBytes(value):
|
||||
"""Encode the given integer as a varint and return the bytes. This is only
|
||||
called at startup time so it doesn't need to be fast."""
|
||||
|
||||
pieces = []
|
||||
_EncodeVarint(pieces.append, value, True)
|
||||
return b"".join(pieces)
|
||||
|
||||
|
||||
def TagBytes(field_number, wire_type):
|
||||
"""Encode the given tag and return the bytes. Only called at startup."""
|
||||
|
||||
return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As with sizers (see above), we have a number of common encoder
|
||||
# implementations.
|
||||
|
||||
|
||||
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
|
||||
"""Return a constructor for an encoder for fields of a particular type.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
encode_value: A function which encodes an individual value, e.g.
|
||||
_EncodeVarint().
|
||||
compute_value_size: A function which computes the size of an individual
|
||||
value, e.g. _VarintSize().
|
||||
"""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(element)
|
||||
local_EncodeVarint(write, size, deterministic)
|
||||
for element in value:
|
||||
encode_value(write, element, deterministic)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, element, deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, value, deterministic)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
|
||||
"""Like SimpleEncoder but additionally invokes modify_value on every value
|
||||
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(modify_value(element))
|
||||
local_EncodeVarint(write, size, deterministic)
|
||||
for element in value:
|
||||
encode_value(write, modify_value(element), deterministic)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, modify_value(element), deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, modify_value(value), deterministic)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _StructPackEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for a fixed-width field.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
||||
for element in value:
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
return write(local_struct_pack(format, value))
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _FloatingPointEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for float fields.
|
||||
|
||||
This is like StructPackEncoder, but catches errors that may be due to
|
||||
passing non-finite floating-point values to struct.pack, and makes a
|
||||
second attempt to encode those values.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
if value_size == 4:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
# Remember that the serialized form uses little-endian byte order.
|
||||
if value == _POS_INF:
|
||||
write(b'\x00\x00\x80\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b'\x00\x00\x80\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b'\x00\x00\xC0\x7F')
|
||||
else:
|
||||
raise
|
||||
elif value_size == 8:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
if value == _POS_INF:
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise ValueError('Can\'t encode floating-point values that are '
|
||||
'%d bytes long (only 4 or 8)' % value_size)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
||||
for element in value:
|
||||
# This try/except block is going to be faster than any code that
|
||||
# we could write to check whether element is finite.
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, value))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, value)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare an encoder constructor for each field type. These work
|
||||
# very similarly to sizer constructors, described earlier.
|
||||
|
||||
|
||||
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
|
||||
|
||||
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
|
||||
|
||||
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
|
||||
wire_format.ZigZagEncode)
|
||||
|
||||
# Note that Python conveniently guarantees that when using the '<' prefix on
|
||||
# formats, they will also have the same size across all platforms (as opposed
|
||||
# to without the prefix, where their sizes depend on the C compiler's basic
|
||||
# type sizes).
|
||||
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
|
||||
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
||||
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
|
||||
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
|
||||
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
|
||||
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
|
||||
|
||||
|
||||
def BoolEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a boolean field."""
|
||||
|
||||
false_byte = b'\x00'
|
||||
true_byte = b'\x01'
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value, deterministic):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value), deterministic)
|
||||
for element in value:
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeField(write, value, unused_deterministic=None):
|
||||
write(tag_bytes)
|
||||
if value:
|
||||
return write(true_byte)
|
||||
return write(false_byte)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def StringEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a string field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
encoded = element.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded), deterministic)
|
||||
write(encoded)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
encoded = value.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded), deterministic)
|
||||
return write(encoded)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def BytesEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a bytes field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(element), deterministic)
|
||||
write(element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(value), deterministic)
|
||||
return write(value)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def GroupEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a group field."""
|
||||
|
||||
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
|
||||
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(start_tag)
|
||||
element._InternalSerialize(write, deterministic)
|
||||
write(end_tag)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(start_tag)
|
||||
value._InternalSerialize(write, deterministic)
|
||||
return write(end_tag)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def MessageEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a message field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value, deterministic):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, element.ByteSize(), deterministic)
|
||||
element._InternalSerialize(write, deterministic)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
||||
return value._InternalSerialize(write, deterministic)
|
||||
return EncodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As before, MessageSet is special.
|
||||
|
||||
|
||||
def MessageSetItemEncoder(field_number):
|
||||
"""Encoder for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
start_bytes = b"".join([
|
||||
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
|
||||
TagBytes(2, wire_format.WIRETYPE_VARINT),
|
||||
_VarintBytes(field_number),
|
||||
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
|
||||
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
|
||||
def EncodeField(write, value, deterministic):
|
||||
write(start_bytes)
|
||||
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
||||
value._InternalSerialize(write, deterministic)
|
||||
return write(end_bytes)
|
||||
|
||||
return EncodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As before, Map is special.
|
||||
|
||||
|
||||
def MapEncoder(field_descriptor):
|
||||
"""Encoder for extensions of MessageSet.
|
||||
|
||||
Maps always have a wire format like this:
|
||||
message MapEntry {
|
||||
key_type key = 1;
|
||||
value_type value = 2;
|
||||
}
|
||||
repeated MapEntry map = N;
|
||||
"""
|
||||
# Can't look at field_descriptor.message_type._concrete_class because it may
|
||||
# not have been initialized yet.
|
||||
message_type = field_descriptor.message_type
|
||||
encode_message = MessageEncoder(field_descriptor.number, False, False)
|
||||
|
||||
def EncodeField(write, value, deterministic):
|
||||
value_keys = sorted(value.keys()) if deterministic else value
|
||||
for key in value_keys:
|
||||
entry_msg = message_type._concrete_class(key=key, value=value[key])
|
||||
encode_message(write, entry_msg, deterministic)
|
||||
|
||||
return EncodeField
|
@ -1,124 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""A simple wrapper around enum types to expose utility functions.
|
||||
|
||||
Instances are created as properties with the same name as the enum they wrap
|
||||
on proto classes. For usage, see:
|
||||
reflection_test.py
|
||||
"""
|
||||
|
||||
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
|
||||
|
||||
|
||||
class EnumTypeWrapper(object):
|
||||
"""A utility for finding the names of enum values."""
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
# This is a type alias, which mypy typing stubs can type as
|
||||
# a genericized parameter constrained to an int, allowing subclasses
|
||||
# to be typed with more constraint in .pyi stubs
|
||||
# Eg.
|
||||
# def MyGeneratedEnum(Message):
|
||||
# ValueType = NewType('ValueType', int)
|
||||
# def Name(self, number: MyGeneratedEnum.ValueType) -> str
|
||||
ValueType = int
|
||||
|
||||
def __init__(self, enum_type):
|
||||
"""Inits EnumTypeWrapper with an EnumDescriptor."""
|
||||
self._enum_type = enum_type
|
||||
self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
|
||||
|
||||
def Name(self, number): # pylint: disable=invalid-name
|
||||
"""Returns a string containing the name of an enum value."""
|
||||
try:
|
||||
return self._enum_type.values_by_number[number].name
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
|
||||
if not isinstance(number, int):
|
||||
raise TypeError(
|
||||
'Enum value for {} must be an int, but got {} {!r}.'.format(
|
||||
self._enum_type.name, type(number), number))
|
||||
else:
|
||||
# repr here to handle the odd case when you pass in a boolean.
|
||||
raise ValueError('Enum {} has no name defined for value {!r}'.format(
|
||||
self._enum_type.name, number))
|
||||
|
||||
def Value(self, name): # pylint: disable=invalid-name
|
||||
"""Returns the value corresponding to the given enum name."""
|
||||
try:
|
||||
return self._enum_type.values_by_name[name].number
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
raise ValueError('Enum {} has no value defined for name {!r}'.format(
|
||||
self._enum_type.name, name))
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of the string names in the enum.
|
||||
|
||||
Returns:
|
||||
A list of strs, in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.name
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def values(self):
|
||||
"""Return a list of the integer values in the enum.
|
||||
|
||||
Returns:
|
||||
A list of ints, in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.number
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def items(self):
|
||||
"""Return a list of the (name, value) pairs of the enum.
|
||||
|
||||
Returns:
|
||||
A list of (str, int) pairs, in the order they were defined
|
||||
in the .proto file.
|
||||
"""
|
||||
return [(value_descriptor.name, value_descriptor.number)
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Returns the value corresponding to the given enum name."""
|
||||
try:
|
||||
return super(
|
||||
EnumTypeWrapper,
|
||||
self).__getattribute__('_enum_type').values_by_name[name].number
|
||||
except KeyError:
|
||||
pass # fall out to break exception chaining
|
||||
raise AttributeError('Enum {} has no value defined for name {!r}'.format(
|
||||
self._enum_type.name, name))
|
@ -1,213 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains _ExtensionDict class to represent extensions.
|
||||
"""
|
||||
|
||||
from google.protobuf.internal import type_checkers
|
||||
from google.protobuf.descriptor import FieldDescriptor
|
||||
|
||||
|
||||
def _VerifyExtensionHandle(message, extension_handle):
|
||||
"""Verify that the given extension handle is valid."""
|
||||
|
||||
if not isinstance(extension_handle, FieldDescriptor):
|
||||
raise KeyError('HasExtension() expects an extension handle, got: %s' %
|
||||
extension_handle)
|
||||
|
||||
if not extension_handle.is_extension:
|
||||
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
|
||||
|
||||
if not extension_handle.containing_type:
|
||||
raise KeyError('"%s" is missing a containing_type.'
|
||||
% extension_handle.full_name)
|
||||
|
||||
if extension_handle.containing_type is not message.DESCRIPTOR:
|
||||
raise KeyError('Extension "%s" extends message type "%s", but this '
|
||||
'message is of type "%s".' %
|
||||
(extension_handle.full_name,
|
||||
extension_handle.containing_type.full_name,
|
||||
message.DESCRIPTOR.full_name))
|
||||
|
||||
|
||||
# TODO(robinson): Unify error handling of "unknown extension" crap.
|
||||
# TODO(robinson): Support iteritems()-style iteration over all
|
||||
# extensions with the "has" bits turned on?
|
||||
class _ExtensionDict(object):
|
||||
|
||||
"""Dict-like container for Extension fields on proto instances.
|
||||
|
||||
Note that in all cases we expect extension handles to be
|
||||
FieldDescriptors.
|
||||
"""
|
||||
|
||||
def __init__(self, extended_message):
|
||||
"""
|
||||
Args:
|
||||
extended_message: Message instance for which we are the Extensions dict.
|
||||
"""
|
||||
self._extended_message = extended_message
|
||||
|
||||
def __getitem__(self, extension_handle):
|
||||
"""Returns the current value of the given extension handle."""
|
||||
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
result = self._extended_message._fields.get(extension_handle)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
||||
result = extension_handle._default_constructor(self._extended_message)
|
||||
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
message_type = extension_handle.message_type
|
||||
if not hasattr(message_type, '_concrete_class'):
|
||||
# pylint: disable=protected-access
|
||||
self._extended_message._FACTORY.GetPrototype(message_type)
|
||||
assert getattr(extension_handle.message_type, '_concrete_class', None), (
|
||||
'Uninitialized concrete class found for field %r (message type %r)'
|
||||
% (extension_handle.full_name,
|
||||
extension_handle.message_type.full_name))
|
||||
result = extension_handle.message_type._concrete_class()
|
||||
try:
|
||||
result._SetListener(self._extended_message._listener_for_children)
|
||||
except ReferenceError:
|
||||
pass
|
||||
else:
|
||||
# Singular scalar -- just return the default without inserting into the
|
||||
# dict.
|
||||
return extension_handle.default_value
|
||||
|
||||
# Atomically check if another thread has preempted us and, if not, swap
|
||||
# in the new object we just created. If someone has preempted us, we
|
||||
# take that object and discard ours.
|
||||
# WARNING: We are relying on setdefault() being atomic. This is true
|
||||
# in CPython but we haven't investigated others. This warning appears
|
||||
# in several other locations in this file.
|
||||
result = self._extended_message._fields.setdefault(
|
||||
extension_handle, result)
|
||||
|
||||
return result
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
|
||||
my_fields = self._extended_message.ListFields()
|
||||
other_fields = other._extended_message.ListFields()
|
||||
|
||||
# Get rid of non-extension fields.
|
||||
my_fields = [field for field in my_fields if field.is_extension]
|
||||
other_fields = [field for field in other_fields if field.is_extension]
|
||||
|
||||
return my_fields == other_fields
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __len__(self):
|
||||
fields = self._extended_message.ListFields()
|
||||
# Get rid of non-extension fields.
|
||||
extension_fields = [field for field in fields if field[0].is_extension]
|
||||
return len(extension_fields)
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
# Note that this is only meaningful for non-repeated, scalar extension
|
||||
# fields. Note also that we may have to call _Modified() when we do
|
||||
# successfully set a field this way, to set any necessary "has" bits in the
|
||||
# ancestors of the extended message.
|
||||
def __setitem__(self, extension_handle, value):
|
||||
"""If extension_handle specifies a non-repeated, scalar extension
|
||||
field, sets the value of that field.
|
||||
"""
|
||||
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
|
||||
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
|
||||
raise TypeError(
|
||||
'Cannot assign to extension "%s" because it is a repeated or '
|
||||
'composite type.' % extension_handle.full_name)
|
||||
|
||||
# It's slightly wasteful to lookup the type checker each time,
|
||||
# but we expect this to be a vanishingly uncommon case anyway.
|
||||
type_checker = type_checkers.GetTypeChecker(extension_handle)
|
||||
# pylint: disable=protected-access
|
||||
self._extended_message._fields[extension_handle] = (
|
||||
type_checker.CheckValue(value))
|
||||
self._extended_message._Modified()
|
||||
|
||||
def __delitem__(self, extension_handle):
|
||||
self._extended_message.ClearExtension(extension_handle)
|
||||
|
||||
def _FindExtensionByName(self, name):
|
||||
"""Tries to find a known extension with the specified name.
|
||||
|
||||
Args:
|
||||
name: Extension full name.
|
||||
|
||||
Returns:
|
||||
Extension field descriptor.
|
||||
"""
|
||||
return self._extended_message._extensions_by_name.get(name, None)
|
||||
|
||||
def _FindExtensionByNumber(self, number):
|
||||
"""Tries to find a known extension with the field number.
|
||||
|
||||
Args:
|
||||
number: Extension field number.
|
||||
|
||||
Returns:
|
||||
Extension field descriptor.
|
||||
"""
|
||||
return self._extended_message._extensions_by_number.get(number, None)
|
||||
|
||||
def __iter__(self):
|
||||
# Return a generator over the populated extension fields
|
||||
return (f[0] for f in self._extended_message.ListFields()
|
||||
if f[0].is_extension)
|
||||
|
||||
def __contains__(self, extension_handle):
|
||||
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
||||
|
||||
if extension_handle not in self._extended_message._fields:
|
||||
return False
|
||||
|
||||
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
||||
return bool(self._extended_message._fields.get(extension_handle))
|
||||
|
||||
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
value = self._extended_message._fields.get(extension_handle)
|
||||
# pylint: disable=protected-access
|
||||
return value is not None and value._is_present_in_parent
|
||||
|
||||
return True
|
@ -1,78 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Defines a listener interface for observing certain
|
||||
state transitions on Message objects.
|
||||
|
||||
Also defines a null implementation of this interface.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
class MessageListener(object):
|
||||
|
||||
"""Listens for modifications made to a message. Meant to be registered via
|
||||
Message._SetListener().
|
||||
|
||||
Attributes:
|
||||
dirty: If True, then calling Modified() would be a no-op. This can be
|
||||
used to avoid these calls entirely in the common case.
|
||||
"""
|
||||
|
||||
def Modified(self):
|
||||
"""Called every time the message is modified in such a way that the parent
|
||||
message may need to be updated. This currently means either:
|
||||
(a) The message was modified for the first time, so the parent message
|
||||
should henceforth mark the message as present.
|
||||
(b) The message's cached byte size became dirty -- i.e. the message was
|
||||
modified for the first time after a previous call to ByteSize().
|
||||
Therefore the parent should also mark its byte size as dirty.
|
||||
Note that (a) implies (b), since new objects start out with a client cached
|
||||
size (zero). However, we document (a) explicitly because it is important.
|
||||
|
||||
Modified() will *only* be called in response to one of these two events --
|
||||
not every time the sub-message is modified.
|
||||
|
||||
Note that if the listener's |dirty| attribute is true, then calling
|
||||
Modified at the moment would be a no-op, so it can be skipped. Performance-
|
||||
sensitive callers should check this attribute directly before calling since
|
||||
it will be true most of the time.
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NullMessageListener(object):
|
||||
|
||||
"""No-op MessageListener implementation."""
|
||||
|
||||
def Modified(self):
|
||||
pass
|
@ -1,36 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/message_set_extensions.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
TestMessageSet.RegisterExtension(message_set_extension3)
|
||||
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'])
|
||||
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'])
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TESTMESSAGESET._options = None
|
||||
_TESTMESSAGESET._serialized_options = b'\010\001'
|
||||
_TESTMESSAGESET._serialized_start=83
|
||||
_TESTMESSAGESET._serialized_end=113
|
||||
_TESTMESSAGESETEXTENSION1._serialized_start=116
|
||||
_TESTMESSAGESETEXTENSION1._serialized_end=281
|
||||
_TESTMESSAGESETEXTENSION2._serialized_start=284
|
||||
_TESTMESSAGESETEXTENSION2._serialized_end=451
|
||||
_TESTMESSAGESETEXTENSION3._serialized_start=453
|
||||
_TESTMESSAGESETEXTENSION3._serialized_end=493
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,37 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/missing_enum_values.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None
|
||||
_TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001'
|
||||
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None
|
||||
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001'
|
||||
_TESTENUMVALUES._serialized_start=88
|
||||
_TESTENUMVALUES._serialized_end=409
|
||||
_TESTENUMVALUES_NESTEDENUM._serialized_start=378
|
||||
_TESTENUMVALUES_NESTEDENUM._serialized_end=409
|
||||
_TESTMISSINGENUMVALUES._serialized_start=412
|
||||
_TESTMISSINGENUMVALUES._serialized_end=751
|
||||
_TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730
|
||||
_TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751
|
||||
_JUSTSTRING._serialized_start=753
|
||||
_JUSTSTRING._serialized_end=780
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,29 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/more_extensions_dynamic.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension)
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension)
|
||||
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_DYNAMICMESSAGETYPE._serialized_start=132
|
||||
_DYNAMICMESSAGETYPE._serialized_end=163
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,41 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/more_extensions.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
ExtendedMessage.RegisterExtension(optional_int_extension)
|
||||
ExtendedMessage.RegisterExtension(optional_message_extension)
|
||||
ExtendedMessage.RegisterExtension(repeated_int_extension)
|
||||
ExtendedMessage.RegisterExtension(repeated_message_extension)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['submessage']._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001'
|
||||
_TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None
|
||||
_TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001'
|
||||
_NESTEDMESSAGE.fields_by_name['submessage']._options = None
|
||||
_NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001'
|
||||
_TOPLEVELMESSAGE._serialized_start=77
|
||||
_TOPLEVELMESSAGE._serialized_end=230
|
||||
_NESTEDMESSAGE._serialized_start=232
|
||||
_NESTEDMESSAGE._serialized_end=314
|
||||
_EXTENDEDMESSAGE._serialized_start=316
|
||||
_EXTENDEDMESSAGE._serialized_end=391
|
||||
_FOREIGNMESSAGE._serialized_start=393
|
||||
_FOREIGNMESSAGE._serialized_end=438
|
||||
# @@protoc_insertion_point(module_scope)
|
File diff suppressed because one or more lines are too long
@ -1,27 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/no_package.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_NOPACKAGEENUM._serialized_start=106
|
||||
_NOPACKAGEENUM._serialized_end=169
|
||||
_NOPACKAGEMESSAGE._serialized_start=45
|
||||
_NOPACKAGEMESSAGE._serialized_end=104
|
||||
# @@protoc_insertion_point(module_scope)
|
File diff suppressed because it is too large
Load Diff
@ -1,435 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides type checking routines.
|
||||
|
||||
This module defines type checking utilities in the forms of dictionaries:
|
||||
|
||||
VALUE_CHECKERS: A dictionary of field types and a value validation object.
|
||||
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
|
||||
function.
|
||||
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
|
||||
function.
|
||||
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
|
||||
corresponding wire types.
|
||||
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
|
||||
function.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import ctypes
|
||||
import numbers
|
||||
|
||||
from google.protobuf.internal import decoder
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import wire_format
|
||||
from google.protobuf import descriptor
|
||||
|
||||
_FieldDescriptor = descriptor.FieldDescriptor
|
||||
|
||||
|
||||
def TruncateToFourByteFloat(original):
|
||||
return ctypes.c_float(original).value
|
||||
|
||||
|
||||
def ToShortestFloat(original):
|
||||
"""Returns the shortest float that has same value in wire."""
|
||||
# All 4 byte floats have between 6 and 9 significant digits, so we
|
||||
# start with 6 as the lower bound.
|
||||
# It has to be iterative because use '.9g' directly can not get rid
|
||||
# of the noises for most values. For example if set a float_field=0.9
|
||||
# use '.9g' will print 0.899999976.
|
||||
precision = 6
|
||||
rounded = float('{0:.{1}g}'.format(original, precision))
|
||||
while TruncateToFourByteFloat(rounded) != original:
|
||||
precision += 1
|
||||
rounded = float('{0:.{1}g}'.format(original, precision))
|
||||
return rounded
|
||||
|
||||
|
||||
def SupportsOpenEnums(field_descriptor):
|
||||
return field_descriptor.containing_type.syntax == 'proto3'
|
||||
|
||||
|
||||
def GetTypeChecker(field):
|
||||
"""Returns a type checker for a message field of the specified types.
|
||||
|
||||
Args:
|
||||
field: FieldDescriptor object for this field.
|
||||
|
||||
Returns:
|
||||
An instance of TypeChecker which can be used to verify the types
|
||||
of values assigned to a field of the specified type.
|
||||
"""
|
||||
if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
|
||||
field.type == _FieldDescriptor.TYPE_STRING):
|
||||
return UnicodeValueChecker()
|
||||
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
||||
if SupportsOpenEnums(field):
|
||||
# When open enums are supported, any int32 can be assigned.
|
||||
return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
|
||||
else:
|
||||
return EnumValueChecker(field.enum_type)
|
||||
return _VALUE_CHECKERS[field.cpp_type]
|
||||
|
||||
|
||||
# None of the typecheckers below make any attempt to guard against people
|
||||
# subclassing builtin types and doing weird things. We're not trying to
|
||||
# protect against malicious clients here, just people accidentally shooting
|
||||
# themselves in the foot in obvious ways.
|
||||
class TypeChecker(object):
|
||||
|
||||
"""Type checker used to catch type errors as early as possible
|
||||
when the client is setting scalar fields in protocol messages.
|
||||
"""
|
||||
|
||||
def __init__(self, *acceptable_types):
|
||||
self._acceptable_types = acceptable_types
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
"""Type check the provided value and return it.
|
||||
|
||||
The returned value might have been normalized to another type.
|
||||
"""
|
||||
if not isinstance(proposed_value, self._acceptable_types):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), self._acceptable_types))
|
||||
raise TypeError(message)
|
||||
return proposed_value
|
||||
|
||||
|
||||
class TypeCheckerWithDefault(TypeChecker):
|
||||
|
||||
def __init__(self, default_value, *acceptable_types):
|
||||
TypeChecker.__init__(self, *acceptable_types)
|
||||
self._default_value = default_value
|
||||
|
||||
def DefaultValue(self):
|
||||
return self._default_value
|
||||
|
||||
|
||||
class BoolValueChecker(object):
|
||||
"""Type checker used for bool fields."""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not hasattr(proposed_value, '__index__') or (
|
||||
type(proposed_value).__module__ == 'numpy' and
|
||||
type(proposed_value).__name__ == 'ndarray'):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (bool, int)))
|
||||
raise TypeError(message)
|
||||
return bool(proposed_value)
|
||||
|
||||
def DefaultValue(self):
|
||||
return False
|
||||
|
||||
|
||||
# IntValueChecker and its subclasses perform integer type-checks
|
||||
# and bounds-checks.
|
||||
class IntValueChecker(object):
|
||||
|
||||
"""Checker used for integer fields. Performs type-check and range check."""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not hasattr(proposed_value, '__index__') or (
|
||||
type(proposed_value).__module__ == 'numpy' and
|
||||
type(proposed_value).__name__ == 'ndarray'):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (int,)))
|
||||
raise TypeError(message)
|
||||
|
||||
if not self._MIN <= int(proposed_value) <= self._MAX:
|
||||
raise ValueError('Value out of range: %d' % proposed_value)
|
||||
# We force all values to int to make alternate implementations where the
|
||||
# distinction is more significant (e.g. the C++ implementation) simpler.
|
||||
proposed_value = int(proposed_value)
|
||||
return proposed_value
|
||||
|
||||
def DefaultValue(self):
|
||||
return 0
|
||||
|
||||
|
||||
class EnumValueChecker(object):
|
||||
|
||||
"""Checker used for enum fields. Performs type-check and range check."""
|
||||
|
||||
def __init__(self, enum_type):
|
||||
self._enum_type = enum_type
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, numbers.Integral):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (int,)))
|
||||
raise TypeError(message)
|
||||
if int(proposed_value) not in self._enum_type.values_by_number:
|
||||
raise ValueError('Unknown enum value: %d' % proposed_value)
|
||||
return proposed_value
|
||||
|
||||
def DefaultValue(self):
|
||||
return self._enum_type.values[0].number
|
||||
|
||||
|
||||
class UnicodeValueChecker(object):
|
||||
|
||||
"""Checker used for string fields.
|
||||
|
||||
Always returns a unicode value, even if the input is of type str.
|
||||
"""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (bytes, str)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (bytes, str)))
|
||||
raise TypeError(message)
|
||||
|
||||
# If the value is of type 'bytes' make sure that it is valid UTF-8 data.
|
||||
if isinstance(proposed_value, bytes):
|
||||
try:
|
||||
proposed_value = proposed_value.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
|
||||
'encoding. Non-UTF-8 strings must be converted to '
|
||||
'unicode objects before being added.' %
|
||||
(proposed_value))
|
||||
else:
|
||||
try:
|
||||
proposed_value.encode('utf8')
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError('%.1024r isn\'t a valid unicode string and '
|
||||
'can\'t be encoded in UTF-8.'%
|
||||
(proposed_value))
|
||||
|
||||
return proposed_value
|
||||
|
||||
def DefaultValue(self):
|
||||
return u""
|
||||
|
||||
|
||||
class Int32ValueChecker(IntValueChecker):
|
||||
# We're sure to use ints instead of longs here since comparison may be more
|
||||
# efficient.
|
||||
_MIN = -2147483648
|
||||
_MAX = 2147483647
|
||||
|
||||
|
||||
class Uint32ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 32) - 1
|
||||
|
||||
|
||||
class Int64ValueChecker(IntValueChecker):
|
||||
_MIN = -(1 << 63)
|
||||
_MAX = (1 << 63) - 1
|
||||
|
||||
|
||||
class Uint64ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 64) - 1
|
||||
|
||||
|
||||
# The max 4 bytes float is about 3.4028234663852886e+38
|
||||
_FLOAT_MAX = float.fromhex('0x1.fffffep+127')
|
||||
_FLOAT_MIN = -_FLOAT_MAX
|
||||
_INF = float('inf')
|
||||
_NEG_INF = float('-inf')
|
||||
|
||||
|
||||
class DoubleValueChecker(object):
|
||||
"""Checker used for double fields.
|
||||
|
||||
Performs type-check and range check.
|
||||
"""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
"""Check and convert proposed_value to float."""
|
||||
if (not hasattr(proposed_value, '__float__') and
|
||||
not hasattr(proposed_value, '__index__')) or (
|
||||
type(proposed_value).__module__ == 'numpy' and
|
||||
type(proposed_value).__name__ == 'ndarray'):
|
||||
message = ('%.1024r has type %s, but expected one of: int, float' %
|
||||
(proposed_value, type(proposed_value)))
|
||||
raise TypeError(message)
|
||||
return float(proposed_value)
|
||||
|
||||
def DefaultValue(self):
|
||||
return 0.0
|
||||
|
||||
|
||||
class FloatValueChecker(DoubleValueChecker):
|
||||
"""Checker used for float fields.
|
||||
|
||||
Performs type-check and range check.
|
||||
|
||||
Values exceeding a 32-bit float will be converted to inf/-inf.
|
||||
"""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
"""Check and convert proposed_value to float."""
|
||||
converted_value = super().CheckValue(proposed_value)
|
||||
# This inf rounding matches the C++ proto SafeDoubleToFloat logic.
|
||||
if converted_value > _FLOAT_MAX:
|
||||
return _INF
|
||||
if converted_value < _FLOAT_MIN:
|
||||
return _NEG_INF
|
||||
|
||||
return TruncateToFourByteFloat(converted_value)
|
||||
|
||||
# Type-checkers for all scalar CPPTYPEs.
|
||||
_VALUE_CHECKERS = {
|
||||
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
|
||||
}
|
||||
|
||||
|
||||
# Map from field type to a function F, such that F(field_num, value)
|
||||
# gives the total byte size for a value of the given type. This
|
||||
# byte size includes tag information and any other additional space
|
||||
# associated with serializing "value".
|
||||
TYPE_TO_BYTE_SIZE_FN = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
|
||||
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
|
||||
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
|
||||
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
|
||||
}
|
||||
|
||||
|
||||
# Maps from field types to encoder constructors.
|
||||
TYPE_TO_ENCODER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
|
||||
_FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
|
||||
_FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
|
||||
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
|
||||
_FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
|
||||
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
|
||||
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
|
||||
_FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
|
||||
_FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
|
||||
_FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
|
||||
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
|
||||
_FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
|
||||
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
|
||||
_FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
|
||||
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
|
||||
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
|
||||
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
|
||||
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field types to sizer constructors.
|
||||
TYPE_TO_SIZER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
|
||||
_FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
|
||||
_FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
|
||||
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
|
||||
_FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
|
||||
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
|
||||
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
|
||||
_FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
|
||||
_FieldDescriptor.TYPE_STRING: encoder.StringSizer,
|
||||
_FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
|
||||
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
|
||||
_FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
|
||||
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
|
||||
_FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
|
||||
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
|
||||
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
|
||||
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
|
||||
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field type to a decoder constructor.
|
||||
TYPE_TO_DECODER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
|
||||
_FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
|
||||
_FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
|
||||
_FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
|
||||
_FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
|
||||
_FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
|
||||
_FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
|
||||
_FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
|
||||
_FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
|
||||
_FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
|
||||
_FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
|
||||
_FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
|
||||
_FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
|
||||
_FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
|
||||
_FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
|
||||
_FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
|
||||
_FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
|
||||
_FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
|
||||
}
|
||||
|
||||
# Maps from field type to expected wiretype.
|
||||
FIELD_TYPE_TO_WIRE_TYPE = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_STRING:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
|
||||
_FieldDescriptor.TYPE_MESSAGE:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_BYTES:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
|
||||
}
|
@ -1,878 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains well known classes.
|
||||
|
||||
This files defines well known classes which need extra maintenance including:
|
||||
- Any
|
||||
- Duration
|
||||
- FieldMask
|
||||
- Struct
|
||||
- Timestamp
|
||||
"""
|
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)'
|
||||
|
||||
import calendar
|
||||
import collections.abc
|
||||
import datetime
|
||||
|
||||
from google.protobuf.descriptor import FieldDescriptor
|
||||
|
||||
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
|
||||
_NANOS_PER_SECOND = 1000000000
|
||||
_NANOS_PER_MILLISECOND = 1000000
|
||||
_NANOS_PER_MICROSECOND = 1000
|
||||
_MILLIS_PER_SECOND = 1000
|
||||
_MICROS_PER_SECOND = 1000000
|
||||
_SECONDS_PER_DAY = 24 * 3600
|
||||
_DURATION_SECONDS_MAX = 315576000000
|
||||
|
||||
|
||||
class Any(object):
|
||||
"""Class for Any Message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def Pack(self, msg, type_url_prefix='type.googleapis.com/',
|
||||
deterministic=None):
|
||||
"""Packs the specified message into current Any message."""
|
||||
if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
|
||||
self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
||||
else:
|
||||
self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
||||
self.value = msg.SerializeToString(deterministic=deterministic)
|
||||
|
||||
def Unpack(self, msg):
|
||||
"""Unpacks the current Any message into specified message."""
|
||||
descriptor = msg.DESCRIPTOR
|
||||
if not self.Is(descriptor):
|
||||
return False
|
||||
msg.ParseFromString(self.value)
|
||||
return True
|
||||
|
||||
def TypeName(self):
|
||||
"""Returns the protobuf type name of the inner message."""
|
||||
# Only last part is to be used: b/25630112
|
||||
return self.type_url.split('/')[-1]
|
||||
|
||||
def Is(self, descriptor):
|
||||
"""Checks if this Any represents the given protobuf type."""
|
||||
return '/' in self.type_url and self.TypeName() == descriptor.full_name
|
||||
|
||||
|
||||
_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0)
|
||||
_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp(
|
||||
0, tz=datetime.timezone.utc)
|
||||
|
||||
|
||||
class Timestamp(object):
|
||||
"""Class for Timestamp message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def ToJsonString(self):
|
||||
"""Converts Timestamp to RFC 3339 date string format.
|
||||
|
||||
Returns:
|
||||
A string converted from timestamp. The string is always Z-normalized
|
||||
and uses 3, 6 or 9 fractional digits as required to represent the
|
||||
exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
|
||||
"""
|
||||
nanos = self.nanos % _NANOS_PER_SECOND
|
||||
total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
|
||||
seconds = total_sec % _SECONDS_PER_DAY
|
||||
days = (total_sec - seconds) // _SECONDS_PER_DAY
|
||||
dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
|
||||
|
||||
result = dt.isoformat()
|
||||
if (nanos % 1e9) == 0:
|
||||
# If there are 0 fractional digits, the fractional
|
||||
# point '.' should be omitted when serializing.
|
||||
return result + 'Z'
|
||||
if (nanos % 1e6) == 0:
|
||||
# Serialize 3 fractional digits.
|
||||
return result + '.%03dZ' % (nanos / 1e6)
|
||||
if (nanos % 1e3) == 0:
|
||||
# Serialize 6 fractional digits.
|
||||
return result + '.%06dZ' % (nanos / 1e3)
|
||||
# Serialize 9 fractional digits.
|
||||
return result + '.%09dZ' % nanos
|
||||
|
||||
def FromJsonString(self, value):
|
||||
"""Parse a RFC 3339 date string format to Timestamp.
|
||||
|
||||
Args:
|
||||
value: A date string. Any fractional digits (or none) and any offset are
|
||||
accepted as long as they fit into nano-seconds precision.
|
||||
Example of accepted format: '1972-01-01T10:00:20.021-05:00'
|
||||
|
||||
Raises:
|
||||
ValueError: On parsing problems.
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
|
||||
timezone_offset = value.find('Z')
|
||||
if timezone_offset == -1:
|
||||
timezone_offset = value.find('+')
|
||||
if timezone_offset == -1:
|
||||
timezone_offset = value.rfind('-')
|
||||
if timezone_offset == -1:
|
||||
raise ValueError(
|
||||
'Failed to parse timestamp: missing valid timezone offset.')
|
||||
time_value = value[0:timezone_offset]
|
||||
# Parse datetime and nanos.
|
||||
point_position = time_value.find('.')
|
||||
if point_position == -1:
|
||||
second_value = time_value
|
||||
nano_value = ''
|
||||
else:
|
||||
second_value = time_value[:point_position]
|
||||
nano_value = time_value[point_position + 1:]
|
||||
if 't' in second_value:
|
||||
raise ValueError(
|
||||
'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
|
||||
'lowercase \'t\' is not accepted'.format(second_value))
|
||||
date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT)
|
||||
td = date_object - datetime.datetime(1970, 1, 1)
|
||||
seconds = td.seconds + td.days * _SECONDS_PER_DAY
|
||||
if len(nano_value) > 9:
|
||||
raise ValueError(
|
||||
'Failed to parse Timestamp: nanos {0} more than '
|
||||
'9 fractional digits.'.format(nano_value))
|
||||
if nano_value:
|
||||
nanos = round(float('0.' + nano_value) * 1e9)
|
||||
else:
|
||||
nanos = 0
|
||||
# Parse timezone offsets.
|
||||
if value[timezone_offset] == 'Z':
|
||||
if len(value) != timezone_offset + 1:
|
||||
raise ValueError('Failed to parse timestamp: invalid trailing'
|
||||
' data {0}.'.format(value))
|
||||
else:
|
||||
timezone = value[timezone_offset:]
|
||||
pos = timezone.find(':')
|
||||
if pos == -1:
|
||||
raise ValueError(
|
||||
'Invalid timezone offset value: {0}.'.format(timezone))
|
||||
if timezone[0] == '+':
|
||||
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
||||
else:
|
||||
seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
||||
# Set seconds and nanos
|
||||
self.seconds = int(seconds)
|
||||
self.nanos = int(nanos)
|
||||
|
||||
def GetCurrentTime(self):
|
||||
"""Get the current UTC into Timestamp."""
|
||||
self.FromDatetime(datetime.datetime.utcnow())
|
||||
|
||||
def ToNanoseconds(self):
|
||||
"""Converts Timestamp to nanoseconds since epoch."""
|
||||
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
||||
|
||||
def ToMicroseconds(self):
|
||||
"""Converts Timestamp to microseconds since epoch."""
|
||||
return (self.seconds * _MICROS_PER_SECOND +
|
||||
self.nanos // _NANOS_PER_MICROSECOND)
|
||||
|
||||
def ToMilliseconds(self):
|
||||
"""Converts Timestamp to milliseconds since epoch."""
|
||||
return (self.seconds * _MILLIS_PER_SECOND +
|
||||
self.nanos // _NANOS_PER_MILLISECOND)
|
||||
|
||||
def ToSeconds(self):
|
||||
"""Converts Timestamp to seconds since epoch."""
|
||||
return self.seconds
|
||||
|
||||
def FromNanoseconds(self, nanos):
|
||||
"""Converts nanoseconds since epoch to Timestamp."""
|
||||
self.seconds = nanos // _NANOS_PER_SECOND
|
||||
self.nanos = nanos % _NANOS_PER_SECOND
|
||||
|
||||
def FromMicroseconds(self, micros):
|
||||
"""Converts microseconds since epoch to Timestamp."""
|
||||
self.seconds = micros // _MICROS_PER_SECOND
|
||||
self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
|
||||
|
||||
def FromMilliseconds(self, millis):
|
||||
"""Converts milliseconds since epoch to Timestamp."""
|
||||
self.seconds = millis // _MILLIS_PER_SECOND
|
||||
self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
|
||||
|
||||
def FromSeconds(self, seconds):
|
||||
"""Converts seconds since epoch to Timestamp."""
|
||||
self.seconds = seconds
|
||||
self.nanos = 0
|
||||
|
||||
def ToDatetime(self, tzinfo=None):
|
||||
"""Converts Timestamp to a datetime.
|
||||
|
||||
Args:
|
||||
tzinfo: A datetime.tzinfo subclass; defaults to None.
|
||||
|
||||
Returns:
|
||||
If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
|
||||
information, i.e. not aware that it's UTC).
|
||||
|
||||
Otherwise, returns a timezone-aware datetime in the input timezone.
|
||||
"""
|
||||
delta = datetime.timedelta(
|
||||
seconds=self.seconds,
|
||||
microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND))
|
||||
if tzinfo is None:
|
||||
return _EPOCH_DATETIME_NAIVE + delta
|
||||
else:
|
||||
return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta
|
||||
|
||||
def FromDatetime(self, dt):
|
||||
"""Converts datetime to Timestamp.
|
||||
|
||||
Args:
|
||||
dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
|
||||
"""
|
||||
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
|
||||
# And this conversion guide: http://docs.python.org/library/time.html
|
||||
|
||||
# Turn the date parameter into a tuple (struct_time) that can then be
|
||||
# manipulated into a long value of seconds. During the conversion from
|
||||
# struct_time to long, the source date in UTC, and so it follows that the
|
||||
# correct transformation is calendar.timegm()
|
||||
self.seconds = calendar.timegm(dt.utctimetuple())
|
||||
self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
|
||||
|
||||
|
||||
class Duration(object):
|
||||
"""Class for Duration message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def ToJsonString(self):
|
||||
"""Converts Duration to string format.
|
||||
|
||||
Returns:
|
||||
A string converted from self. The string format will contains
|
||||
3, 6, or 9 fractional digits depending on the precision required to
|
||||
represent the exact Duration value. For example: "1s", "1.010s",
|
||||
"1.000000100s", "-3.100s"
|
||||
"""
|
||||
_CheckDurationValid(self.seconds, self.nanos)
|
||||
if self.seconds < 0 or self.nanos < 0:
|
||||
result = '-'
|
||||
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
|
||||
nanos = (0 - self.nanos) % 1e9
|
||||
else:
|
||||
result = ''
|
||||
seconds = self.seconds + int(self.nanos // 1e9)
|
||||
nanos = self.nanos % 1e9
|
||||
result += '%d' % seconds
|
||||
if (nanos % 1e9) == 0:
|
||||
# If there are 0 fractional digits, the fractional
|
||||
# point '.' should be omitted when serializing.
|
||||
return result + 's'
|
||||
if (nanos % 1e6) == 0:
|
||||
# Serialize 3 fractional digits.
|
||||
return result + '.%03ds' % (nanos / 1e6)
|
||||
if (nanos % 1e3) == 0:
|
||||
# Serialize 6 fractional digits.
|
||||
return result + '.%06ds' % (nanos / 1e3)
|
||||
# Serialize 9 fractional digits.
|
||||
return result + '.%09ds' % nanos
|
||||
|
||||
def FromJsonString(self, value):
|
||||
"""Converts a string to Duration.
|
||||
|
||||
Args:
|
||||
value: A string to be converted. The string must end with 's'. Any
|
||||
fractional digits (or none) are accepted as long as they fit into
|
||||
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
|
||||
|
||||
Raises:
|
||||
ValueError: On parsing problems.
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
raise ValueError('Duration JSON value not a string: {!r}'.format(value))
|
||||
if len(value) < 1 or value[-1] != 's':
|
||||
raise ValueError(
|
||||
'Duration must end with letter "s": {0}.'.format(value))
|
||||
try:
|
||||
pos = value.find('.')
|
||||
if pos == -1:
|
||||
seconds = int(value[:-1])
|
||||
nanos = 0
|
||||
else:
|
||||
seconds = int(value[:pos])
|
||||
if value[0] == '-':
|
||||
nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
|
||||
else:
|
||||
nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
|
||||
_CheckDurationValid(seconds, nanos)
|
||||
self.seconds = seconds
|
||||
self.nanos = nanos
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
|
||||
|
||||
def ToNanoseconds(self):
|
||||
"""Converts a Duration to nanoseconds."""
|
||||
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
||||
|
||||
def ToMicroseconds(self):
|
||||
"""Converts a Duration to microseconds."""
|
||||
micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
|
||||
return self.seconds * _MICROS_PER_SECOND + micros
|
||||
|
||||
def ToMilliseconds(self):
|
||||
"""Converts a Duration to milliseconds."""
|
||||
millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
|
||||
return self.seconds * _MILLIS_PER_SECOND + millis
|
||||
|
||||
def ToSeconds(self):
|
||||
"""Converts a Duration to seconds."""
|
||||
return self.seconds
|
||||
|
||||
def FromNanoseconds(self, nanos):
|
||||
"""Converts nanoseconds to Duration."""
|
||||
self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
|
||||
nanos % _NANOS_PER_SECOND)
|
||||
|
||||
def FromMicroseconds(self, micros):
|
||||
"""Converts microseconds to Duration."""
|
||||
self._NormalizeDuration(
|
||||
micros // _MICROS_PER_SECOND,
|
||||
(micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
|
||||
|
||||
def FromMilliseconds(self, millis):
|
||||
"""Converts milliseconds to Duration."""
|
||||
self._NormalizeDuration(
|
||||
millis // _MILLIS_PER_SECOND,
|
||||
(millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
|
||||
|
||||
def FromSeconds(self, seconds):
|
||||
"""Converts seconds to Duration."""
|
||||
self.seconds = seconds
|
||||
self.nanos = 0
|
||||
|
||||
def ToTimedelta(self):
|
||||
"""Converts Duration to timedelta."""
|
||||
return datetime.timedelta(
|
||||
seconds=self.seconds, microseconds=_RoundTowardZero(
|
||||
self.nanos, _NANOS_PER_MICROSECOND))
|
||||
|
||||
def FromTimedelta(self, td):
|
||||
"""Converts timedelta to Duration."""
|
||||
self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
|
||||
td.microseconds * _NANOS_PER_MICROSECOND)
|
||||
|
||||
def _NormalizeDuration(self, seconds, nanos):
|
||||
"""Set Duration by seconds and nanos."""
|
||||
# Force nanos to be negative if the duration is negative.
|
||||
if seconds < 0 and nanos > 0:
|
||||
seconds += 1
|
||||
nanos -= _NANOS_PER_SECOND
|
||||
self.seconds = seconds
|
||||
self.nanos = nanos
|
||||
|
||||
|
||||
def _CheckDurationValid(seconds, nanos):
|
||||
if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
|
||||
raise ValueError(
|
||||
'Duration is not valid: Seconds {0} must be in range '
|
||||
'[-315576000000, 315576000000].'.format(seconds))
|
||||
if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
|
||||
raise ValueError(
|
||||
'Duration is not valid: Nanos {0} must be in range '
|
||||
'[-999999999, 999999999].'.format(nanos))
|
||||
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
|
||||
raise ValueError(
|
||||
'Duration is not valid: Sign mismatch.')
|
||||
|
||||
|
||||
def _RoundTowardZero(value, divider):
|
||||
"""Truncates the remainder part after division."""
|
||||
# For some languages, the sign of the remainder is implementation
|
||||
# dependent if any of the operands is negative. Here we enforce
|
||||
# "rounded toward zero" semantics. For example, for (-5) / 2 an
|
||||
# implementation may give -3 as the result with the remainder being
|
||||
# 1. This function ensures we always return -2 (closer to zero).
|
||||
result = value // divider
|
||||
remainder = value % divider
|
||||
if result < 0 and remainder > 0:
|
||||
return result + 1
|
||||
else:
|
||||
return result
|
||||
|
||||
|
||||
class FieldMask(object):
|
||||
"""Class for FieldMask message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def ToJsonString(self):
|
||||
"""Converts FieldMask to string according to proto3 JSON spec."""
|
||||
camelcase_paths = []
|
||||
for path in self.paths:
|
||||
camelcase_paths.append(_SnakeCaseToCamelCase(path))
|
||||
return ','.join(camelcase_paths)
|
||||
|
||||
def FromJsonString(self, value):
|
||||
"""Converts string to FieldMask according to proto3 JSON spec."""
|
||||
if not isinstance(value, str):
|
||||
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
|
||||
self.Clear()
|
||||
if value:
|
||||
for path in value.split(','):
|
||||
self.paths.append(_CamelCaseToSnakeCase(path))
|
||||
|
||||
def IsValidForDescriptor(self, message_descriptor):
|
||||
"""Checks whether the FieldMask is valid for Message Descriptor."""
|
||||
for path in self.paths:
|
||||
if not _IsValidPath(message_descriptor, path):
|
||||
return False
|
||||
return True
|
||||
|
||||
def AllFieldsFromDescriptor(self, message_descriptor):
|
||||
"""Gets all direct fields of Message Descriptor to FieldMask."""
|
||||
self.Clear()
|
||||
for field in message_descriptor.fields:
|
||||
self.paths.append(field.name)
|
||||
|
||||
def CanonicalFormFromMask(self, mask):
|
||||
"""Converts a FieldMask to the canonical form.
|
||||
|
||||
Removes paths that are covered by another path. For example,
|
||||
"foo.bar" is covered by "foo" and will be removed if "foo"
|
||||
is also in the FieldMask. Then sorts all paths in alphabetical order.
|
||||
|
||||
Args:
|
||||
mask: The original FieldMask to be converted.
|
||||
"""
|
||||
tree = _FieldMaskTree(mask)
|
||||
tree.ToFieldMask(self)
|
||||
|
||||
def Union(self, mask1, mask2):
|
||||
"""Merges mask1 and mask2 into this FieldMask."""
|
||||
_CheckFieldMaskMessage(mask1)
|
||||
_CheckFieldMaskMessage(mask2)
|
||||
tree = _FieldMaskTree(mask1)
|
||||
tree.MergeFromFieldMask(mask2)
|
||||
tree.ToFieldMask(self)
|
||||
|
||||
def Intersect(self, mask1, mask2):
|
||||
"""Intersects mask1 and mask2 into this FieldMask."""
|
||||
_CheckFieldMaskMessage(mask1)
|
||||
_CheckFieldMaskMessage(mask2)
|
||||
tree = _FieldMaskTree(mask1)
|
||||
intersection = _FieldMaskTree()
|
||||
for path in mask2.paths:
|
||||
tree.IntersectPath(path, intersection)
|
||||
intersection.ToFieldMask(self)
|
||||
|
||||
def MergeMessage(
|
||||
self, source, destination,
|
||||
replace_message_field=False, replace_repeated_field=False):
|
||||
"""Merges fields specified in FieldMask from source to destination.
|
||||
|
||||
Args:
|
||||
source: Source message.
|
||||
destination: The destination message to be merged into.
|
||||
replace_message_field: Replace message field if True. Merge message
|
||||
field if False.
|
||||
replace_repeated_field: Replace repeated field if True. Append
|
||||
elements of repeated field if False.
|
||||
"""
|
||||
tree = _FieldMaskTree(self)
|
||||
tree.MergeMessage(
|
||||
source, destination, replace_message_field, replace_repeated_field)
|
||||
|
||||
|
||||
def _IsValidPath(message_descriptor, path):
|
||||
"""Checks whether the path is valid for Message Descriptor."""
|
||||
parts = path.split('.')
|
||||
last = parts.pop()
|
||||
for name in parts:
|
||||
field = message_descriptor.fields_by_name.get(name)
|
||||
if (field is None or
|
||||
field.label == FieldDescriptor.LABEL_REPEATED or
|
||||
field.type != FieldDescriptor.TYPE_MESSAGE):
|
||||
return False
|
||||
message_descriptor = field.message_type
|
||||
return last in message_descriptor.fields_by_name
|
||||
|
||||
|
||||
def _CheckFieldMaskMessage(message):
|
||||
"""Raises ValueError if message is not a FieldMask."""
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
if (message_descriptor.name != 'FieldMask' or
|
||||
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
|
||||
raise ValueError('Message {0} is not a FieldMask.'.format(
|
||||
message_descriptor.full_name))
|
||||
|
||||
|
||||
def _SnakeCaseToCamelCase(path_name):
|
||||
"""Converts a path name from snake_case to camelCase."""
|
||||
result = []
|
||||
after_underscore = False
|
||||
for c in path_name:
|
||||
if c.isupper():
|
||||
raise ValueError(
|
||||
'Fail to print FieldMask to Json string: Path name '
|
||||
'{0} must not contain uppercase letters.'.format(path_name))
|
||||
if after_underscore:
|
||||
if c.islower():
|
||||
result.append(c.upper())
|
||||
after_underscore = False
|
||||
else:
|
||||
raise ValueError(
|
||||
'Fail to print FieldMask to Json string: The '
|
||||
'character after a "_" must be a lowercase letter '
|
||||
'in path name {0}.'.format(path_name))
|
||||
elif c == '_':
|
||||
after_underscore = True
|
||||
else:
|
||||
result += c
|
||||
|
||||
if after_underscore:
|
||||
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
|
||||
'in path name {0}.'.format(path_name))
|
||||
return ''.join(result)
|
||||
|
||||
|
||||
def _CamelCaseToSnakeCase(path_name):
|
||||
"""Converts a field name from camelCase to snake_case."""
|
||||
result = []
|
||||
for c in path_name:
|
||||
if c == '_':
|
||||
raise ValueError('Fail to parse FieldMask: Path name '
|
||||
'{0} must not contain "_"s.'.format(path_name))
|
||||
if c.isupper():
|
||||
result += '_'
|
||||
result += c.lower()
|
||||
else:
|
||||
result += c
|
||||
return ''.join(result)
|
||||
|
||||
|
||||
class _FieldMaskTree(object):
|
||||
"""Represents a FieldMask in a tree structure.
|
||||
|
||||
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
|
||||
the FieldMaskTree will be:
|
||||
[_root] -+- foo -+- bar
|
||||
| |
|
||||
| +- baz
|
||||
|
|
||||
+- bar --- baz
|
||||
In the tree, each leaf node represents a field path.
|
||||
"""
|
||||
|
||||
__slots__ = ('_root',)
|
||||
|
||||
def __init__(self, field_mask=None):
|
||||
"""Initializes the tree by FieldMask."""
|
||||
self._root = {}
|
||||
if field_mask:
|
||||
self.MergeFromFieldMask(field_mask)
|
||||
|
||||
def MergeFromFieldMask(self, field_mask):
|
||||
"""Merges a FieldMask to the tree."""
|
||||
for path in field_mask.paths:
|
||||
self.AddPath(path)
|
||||
|
||||
def AddPath(self, path):
|
||||
"""Adds a field path into the tree.
|
||||
|
||||
If the field path to add is a sub-path of an existing field path
|
||||
in the tree (i.e., a leaf node), it means the tree already matches
|
||||
the given path so nothing will be added to the tree. If the path
|
||||
matches an existing non-leaf node in the tree, that non-leaf node
|
||||
will be turned into a leaf node with all its children removed because
|
||||
the path matches all the node's children. Otherwise, a new path will
|
||||
be added.
|
||||
|
||||
Args:
|
||||
path: The field path to add.
|
||||
"""
|
||||
node = self._root
|
||||
for name in path.split('.'):
|
||||
if name not in node:
|
||||
node[name] = {}
|
||||
elif not node[name]:
|
||||
# Pre-existing empty node implies we already have this entire tree.
|
||||
return
|
||||
node = node[name]
|
||||
# Remove any sub-trees we might have had.
|
||||
node.clear()
|
||||
|
||||
def ToFieldMask(self, field_mask):
|
||||
"""Converts the tree to a FieldMask."""
|
||||
field_mask.Clear()
|
||||
_AddFieldPaths(self._root, '', field_mask)
|
||||
|
||||
def IntersectPath(self, path, intersection):
|
||||
"""Calculates the intersection part of a field path with this tree.
|
||||
|
||||
Args:
|
||||
path: The field path to calculates.
|
||||
intersection: The out tree to record the intersection part.
|
||||
"""
|
||||
node = self._root
|
||||
for name in path.split('.'):
|
||||
if name not in node:
|
||||
return
|
||||
elif not node[name]:
|
||||
intersection.AddPath(path)
|
||||
return
|
||||
node = node[name]
|
||||
intersection.AddLeafNodes(path, node)
|
||||
|
||||
def AddLeafNodes(self, prefix, node):
|
||||
"""Adds leaf nodes begin with prefix to this tree."""
|
||||
if not node:
|
||||
self.AddPath(prefix)
|
||||
for name in node:
|
||||
child_path = prefix + '.' + name
|
||||
self.AddLeafNodes(child_path, node[name])
|
||||
|
||||
def MergeMessage(
|
||||
self, source, destination,
|
||||
replace_message, replace_repeated):
|
||||
"""Merge all fields specified by this tree from source to destination."""
|
||||
_MergeMessage(
|
||||
self._root, source, destination, replace_message, replace_repeated)
|
||||
|
||||
|
||||
def _StrConvert(value):
|
||||
"""Converts value to str if it is not."""
|
||||
# This file is imported by c extension and some methods like ClearField
|
||||
# requires string for the field name. py2/py3 has different text
|
||||
# type and may use unicode.
|
||||
if not isinstance(value, str):
|
||||
return value.encode('utf-8')
|
||||
return value
|
||||
|
||||
|
||||
def _MergeMessage(
|
||||
node, source, destination, replace_message, replace_repeated):
|
||||
"""Merge all fields specified by a sub-tree from source to destination."""
|
||||
source_descriptor = source.DESCRIPTOR
|
||||
for name in node:
|
||||
child = node[name]
|
||||
field = source_descriptor.fields_by_name[name]
|
||||
if field is None:
|
||||
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
|
||||
name, source_descriptor.full_name))
|
||||
if child:
|
||||
# Sub-paths are only allowed for singular message fields.
|
||||
if (field.label == FieldDescriptor.LABEL_REPEATED or
|
||||
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
|
||||
raise ValueError('Error: Field {0} in message {1} is not a singular '
|
||||
'message field and cannot have sub-fields.'.format(
|
||||
name, source_descriptor.full_name))
|
||||
if source.HasField(name):
|
||||
_MergeMessage(
|
||||
child, getattr(source, name), getattr(destination, name),
|
||||
replace_message, replace_repeated)
|
||||
continue
|
||||
if field.label == FieldDescriptor.LABEL_REPEATED:
|
||||
if replace_repeated:
|
||||
destination.ClearField(_StrConvert(name))
|
||||
repeated_source = getattr(source, name)
|
||||
repeated_destination = getattr(destination, name)
|
||||
repeated_destination.MergeFrom(repeated_source)
|
||||
else:
|
||||
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
if replace_message:
|
||||
destination.ClearField(_StrConvert(name))
|
||||
if source.HasField(name):
|
||||
getattr(destination, name).MergeFrom(getattr(source, name))
|
||||
else:
|
||||
setattr(destination, name, getattr(source, name))
|
||||
|
||||
|
||||
def _AddFieldPaths(node, prefix, field_mask):
|
||||
"""Adds the field paths descended from node to field_mask."""
|
||||
if not node and prefix:
|
||||
field_mask.paths.append(prefix)
|
||||
return
|
||||
for name in sorted(node):
|
||||
if prefix:
|
||||
child_path = prefix + '.' + name
|
||||
else:
|
||||
child_path = name
|
||||
_AddFieldPaths(node[name], child_path, field_mask)
|
||||
|
||||
|
||||
def _SetStructValue(struct_value, value):
|
||||
if value is None:
|
||||
struct_value.null_value = 0
|
||||
elif isinstance(value, bool):
|
||||
# Note: this check must come before the number check because in Python
|
||||
# True and False are also considered numbers.
|
||||
struct_value.bool_value = value
|
||||
elif isinstance(value, str):
|
||||
struct_value.string_value = value
|
||||
elif isinstance(value, (int, float)):
|
||||
struct_value.number_value = value
|
||||
elif isinstance(value, (dict, Struct)):
|
||||
struct_value.struct_value.Clear()
|
||||
struct_value.struct_value.update(value)
|
||||
elif isinstance(value, (list, ListValue)):
|
||||
struct_value.list_value.Clear()
|
||||
struct_value.list_value.extend(value)
|
||||
else:
|
||||
raise ValueError('Unexpected type')
|
||||
|
||||
|
||||
def _GetStructValue(struct_value):
|
||||
which = struct_value.WhichOneof('kind')
|
||||
if which == 'struct_value':
|
||||
return struct_value.struct_value
|
||||
elif which == 'null_value':
|
||||
return None
|
||||
elif which == 'number_value':
|
||||
return struct_value.number_value
|
||||
elif which == 'string_value':
|
||||
return struct_value.string_value
|
||||
elif which == 'bool_value':
|
||||
return struct_value.bool_value
|
||||
elif which == 'list_value':
|
||||
return struct_value.list_value
|
||||
elif which is None:
|
||||
raise ValueError('Value not set')
|
||||
|
||||
|
||||
class Struct(object):
|
||||
"""Class for Struct message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return _GetStructValue(self.fields[key])
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.fields
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
_SetStructValue(self.fields[key], value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.fields[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.fields)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.fields)
|
||||
|
||||
def keys(self): # pylint: disable=invalid-name
|
||||
return self.fields.keys()
|
||||
|
||||
def values(self): # pylint: disable=invalid-name
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self): # pylint: disable=invalid-name
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def get_or_create_list(self, key):
|
||||
"""Returns a list for this key, creating if it didn't exist already."""
|
||||
if not self.fields[key].HasField('list_value'):
|
||||
# Clear will mark list_value modified which will indeed create a list.
|
||||
self.fields[key].list_value.Clear()
|
||||
return self.fields[key].list_value
|
||||
|
||||
def get_or_create_struct(self, key):
|
||||
"""Returns a struct for this key, creating if it didn't exist already."""
|
||||
if not self.fields[key].HasField('struct_value'):
|
||||
# Clear will mark struct_value modified which will indeed create a struct.
|
||||
self.fields[key].struct_value.Clear()
|
||||
return self.fields[key].struct_value
|
||||
|
||||
def update(self, dictionary): # pylint: disable=invalid-name
|
||||
for key, value in dictionary.items():
|
||||
_SetStructValue(self.fields[key], value)
|
||||
|
||||
collections.abc.MutableMapping.register(Struct)
|
||||
|
||||
|
||||
class ListValue(object):
|
||||
"""Class for ListValue message type."""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __len__(self):
|
||||
return len(self.values)
|
||||
|
||||
def append(self, value):
|
||||
_SetStructValue(self.values.add(), value)
|
||||
|
||||
def extend(self, elem_seq):
|
||||
for value in elem_seq:
|
||||
self.append(value)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Retrieves item by the specified index."""
|
||||
return _GetStructValue(self.values.__getitem__(index))
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
_SetStructValue(self.values.__getitem__(index), value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.values[key]
|
||||
|
||||
def items(self):
|
||||
for i in range(len(self)):
|
||||
yield self[i]
|
||||
|
||||
def add_struct(self):
|
||||
"""Appends and returns a struct value as the next value in the list."""
|
||||
struct_value = self.values.add().struct_value
|
||||
# Clear will mark struct_value modified which will indeed create a struct.
|
||||
struct_value.Clear()
|
||||
return struct_value
|
||||
|
||||
def add_list(self):
|
||||
"""Appends and returns a list value as the next value in the list."""
|
||||
list_value = self.values.add().list_value
|
||||
# Clear will mark list_value modified which will indeed create a list.
|
||||
list_value.Clear()
|
||||
return list_value
|
||||
|
||||
collections.abc.MutableSequence.register(ListValue)
|
||||
|
||||
|
||||
WKTBASES = {
|
||||
'google.protobuf.Any': Any,
|
||||
'google.protobuf.Duration': Duration,
|
||||
'google.protobuf.FieldMask': FieldMask,
|
||||
'google.protobuf.ListValue': ListValue,
|
||||
'google.protobuf.Struct': Struct,
|
||||
'google.protobuf.Timestamp': Timestamp,
|
||||
}
|
@ -1,268 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Constants and static functions to support protocol buffer wire format."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
|
||||
TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
|
||||
|
||||
# These numbers identify the wire type of a protocol buffer value.
|
||||
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
|
||||
# tag-and-type to store one of these WIRETYPE_* constants.
|
||||
# These values must match WireType enum in google/protobuf/wire_format.h.
|
||||
WIRETYPE_VARINT = 0
|
||||
WIRETYPE_FIXED64 = 1
|
||||
WIRETYPE_LENGTH_DELIMITED = 2
|
||||
WIRETYPE_START_GROUP = 3
|
||||
WIRETYPE_END_GROUP = 4
|
||||
WIRETYPE_FIXED32 = 5
|
||||
_WIRETYPE_MAX = 5
|
||||
|
||||
|
||||
# Bounds for various integer types.
|
||||
INT32_MAX = int((1 << 31) - 1)
|
||||
INT32_MIN = int(-(1 << 31))
|
||||
UINT32_MAX = (1 << 32) - 1
|
||||
|
||||
INT64_MAX = (1 << 63) - 1
|
||||
INT64_MIN = -(1 << 63)
|
||||
UINT64_MAX = (1 << 64) - 1
|
||||
|
||||
# "struct" format strings that will encode/decode the specified formats.
|
||||
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
|
||||
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
|
||||
FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
|
||||
FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
|
||||
|
||||
|
||||
# We'll have to provide alternate implementations of AppendLittleEndian*() on
|
||||
# any architectures where these checks fail.
|
||||
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
|
||||
raise AssertionError('Format "I" is not a 32-bit number.')
|
||||
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
|
||||
raise AssertionError('Format "Q" is not a 64-bit number.')
|
||||
|
||||
|
||||
def PackTag(field_number, wire_type):
|
||||
"""Returns an unsigned 32-bit integer that encodes the field number and
|
||||
wire type information in standard protocol message wire format.
|
||||
|
||||
Args:
|
||||
field_number: Expected to be an integer in the range [1, 1 << 29)
|
||||
wire_type: One of the WIRETYPE_* constants.
|
||||
"""
|
||||
if not 0 <= wire_type <= _WIRETYPE_MAX:
|
||||
raise message.EncodeError('Unknown wire type: %d' % wire_type)
|
||||
return (field_number << TAG_TYPE_BITS) | wire_type
|
||||
|
||||
|
||||
def UnpackTag(tag):
|
||||
"""The inverse of PackTag(). Given an unsigned 32-bit number,
|
||||
returns a (field_number, wire_type) tuple.
|
||||
"""
|
||||
return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
|
||||
|
||||
|
||||
def ZigZagEncode(value):
|
||||
"""ZigZag Transform: Encodes signed integers so that they can be
|
||||
effectively used with varint encoding. See wire_format.h for
|
||||
more details.
|
||||
"""
|
||||
if value >= 0:
|
||||
return value << 1
|
||||
return (value << 1) ^ (~0)
|
||||
|
||||
|
||||
def ZigZagDecode(value):
|
||||
"""Inverse of ZigZagEncode()."""
|
||||
if not value & 0x1:
|
||||
return value >> 1
|
||||
return (value >> 1) ^ (~0)
|
||||
|
||||
|
||||
|
||||
# The *ByteSize() functions below return the number of bytes required to
|
||||
# serialize "field number + type" information and then serialize the value.
|
||||
|
||||
|
||||
def Int32ByteSize(field_number, int32):
|
||||
return Int64ByteSize(field_number, int32)
|
||||
|
||||
|
||||
def Int32ByteSizeNoTag(int32):
|
||||
return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
|
||||
|
||||
|
||||
def Int64ByteSize(field_number, int64):
|
||||
# Have to convert to uint before calling UInt64ByteSize().
|
||||
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
|
||||
|
||||
|
||||
def UInt32ByteSize(field_number, uint32):
|
||||
return UInt64ByteSize(field_number, uint32)
|
||||
|
||||
|
||||
def UInt64ByteSize(field_number, uint64):
|
||||
return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
|
||||
|
||||
|
||||
def SInt32ByteSize(field_number, int32):
|
||||
return UInt32ByteSize(field_number, ZigZagEncode(int32))
|
||||
|
||||
|
||||
def SInt64ByteSize(field_number, int64):
|
||||
return UInt64ByteSize(field_number, ZigZagEncode(int64))
|
||||
|
||||
|
||||
def Fixed32ByteSize(field_number, fixed32):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def Fixed64ByteSize(field_number, fixed64):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def SFixed32ByteSize(field_number, sfixed32):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def SFixed64ByteSize(field_number, sfixed64):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def FloatByteSize(field_number, flt):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def DoubleByteSize(field_number, double):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def BoolByteSize(field_number, b):
|
||||
return TagByteSize(field_number) + 1
|
||||
|
||||
|
||||
def EnumByteSize(field_number, enum):
|
||||
return UInt32ByteSize(field_number, enum)
|
||||
|
||||
|
||||
def StringByteSize(field_number, string):
|
||||
return BytesByteSize(field_number, string.encode('utf-8'))
|
||||
|
||||
|
||||
def BytesByteSize(field_number, b):
|
||||
return (TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(len(b))
|
||||
+ len(b))
|
||||
|
||||
|
||||
def GroupByteSize(field_number, message):
|
||||
return (2 * TagByteSize(field_number) # START and END group.
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageByteSize(field_number, message):
|
||||
return (TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageSetItemByteSize(field_number, msg):
|
||||
# First compute the sizes of the tags.
|
||||
# There are 2 tags for the beginning and ending of the repeated group, that
|
||||
# is field number 1, one with field number 2 (type_id) and one with field
|
||||
# number 3 (message).
|
||||
total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
|
||||
|
||||
# Add the number of bytes for type_id.
|
||||
total_size += _VarUInt64ByteSizeNoTag(field_number)
|
||||
|
||||
message_size = msg.ByteSize()
|
||||
|
||||
# The number of bytes for encoding the length of the message.
|
||||
total_size += _VarUInt64ByteSizeNoTag(message_size)
|
||||
|
||||
# The size of the message.
|
||||
total_size += message_size
|
||||
return total_size
|
||||
|
||||
|
||||
def TagByteSize(field_number):
|
||||
"""Returns the bytes required to serialize a tag with this field number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
|
||||
|
||||
|
||||
# Private helper function for the *ByteSize() functions above.
|
||||
|
||||
def _VarUInt64ByteSizeNoTag(uint64):
|
||||
"""Returns the number of bytes required to serialize a single varint
|
||||
using boundary value comparisons. (unrolled loop optimization -WPierce)
|
||||
uint64 must be unsigned.
|
||||
"""
|
||||
if uint64 <= 0x7f: return 1
|
||||
if uint64 <= 0x3fff: return 2
|
||||
if uint64 <= 0x1fffff: return 3
|
||||
if uint64 <= 0xfffffff: return 4
|
||||
if uint64 <= 0x7ffffffff: return 5
|
||||
if uint64 <= 0x3ffffffffff: return 6
|
||||
if uint64 <= 0x1ffffffffffff: return 7
|
||||
if uint64 <= 0xffffffffffffff: return 8
|
||||
if uint64 <= 0x7fffffffffffffff: return 9
|
||||
if uint64 > UINT64_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % uint64)
|
||||
return 10
|
||||
|
||||
|
||||
NON_PACKABLE_TYPES = (
|
||||
descriptor.FieldDescriptor.TYPE_STRING,
|
||||
descriptor.FieldDescriptor.TYPE_GROUP,
|
||||
descriptor.FieldDescriptor.TYPE_MESSAGE,
|
||||
descriptor.FieldDescriptor.TYPE_BYTES
|
||||
)
|
||||
|
||||
|
||||
def IsTypePackable(field_type):
|
||||
"""Return true iff packable = true is valid for fields of this type.
|
||||
|
||||
Args:
|
||||
field_type: a FieldDescriptor::Type value.
|
||||
|
||||
Returns:
|
||||
True iff fields of this type are packable.
|
||||
"""
|
||||
return field_type not in NON_PACKABLE_TYPES
|
@ -1,912 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains routines for printing protocol messages in JSON format.
|
||||
|
||||
Simple usage example:
|
||||
|
||||
# Create a proto object and serialize it to a json format string.
|
||||
message = my_proto_pb2.MyMessage(foo='bar')
|
||||
json_string = json_format.MessageToJson(message)
|
||||
|
||||
# Parse a json format string to proto object.
|
||||
message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
|
||||
"""
|
||||
|
||||
__author__ = 'jieluo@google.com (Jie Luo)'
|
||||
|
||||
|
||||
import base64
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import math
|
||||
from operator import methodcaller
|
||||
import re
|
||||
import sys
|
||||
|
||||
from google.protobuf.internal import type_checkers
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import symbol_database
|
||||
|
||||
|
||||
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
|
||||
_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
|
||||
descriptor.FieldDescriptor.CPPTYPE_UINT32,
|
||||
descriptor.FieldDescriptor.CPPTYPE_INT64,
|
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64])
|
||||
_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
|
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64])
|
||||
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
|
||||
descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
|
||||
_INFINITY = 'Infinity'
|
||||
_NEG_INFINITY = '-Infinity'
|
||||
_NAN = 'NaN'
|
||||
|
||||
_UNPAIRED_SURROGATE_PATTERN = re.compile(
|
||||
u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]')
|
||||
|
||||
_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Top-level module error for json_format."""
|
||||
|
||||
|
||||
class SerializeToJsonError(Error):
|
||||
"""Thrown if serialization to JSON fails."""
|
||||
|
||||
|
||||
class ParseError(Error):
|
||||
"""Thrown in case of parsing error."""
|
||||
|
||||
|
||||
def MessageToJson(
|
||||
message,
|
||||
including_default_value_fields=False,
|
||||
preserving_proto_field_name=False,
|
||||
indent=2,
|
||||
sort_keys=False,
|
||||
use_integers_for_enums=False,
|
||||
descriptor_pool=None,
|
||||
float_precision=None,
|
||||
ensure_ascii=True):
|
||||
"""Converts protobuf message to JSON format.
|
||||
|
||||
Args:
|
||||
message: The protocol buffers message instance to serialize.
|
||||
including_default_value_fields: If True, singular primitive fields,
|
||||
repeated fields, and map fields will always be serialized. If
|
||||
False, only serialize non-empty fields. Singular message fields
|
||||
and oneof fields are not affected by this option.
|
||||
preserving_proto_field_name: If True, use the original proto field
|
||||
names as defined in the .proto file. If False, convert the field
|
||||
names to lowerCamelCase.
|
||||
indent: The JSON object will be pretty-printed with this indent level.
|
||||
An indent level of 0 or negative will only insert newlines.
|
||||
sort_keys: If True, then the output will be sorted by field names.
|
||||
use_integers_for_enums: If true, print integers instead of enum names.
|
||||
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
||||
default.
|
||||
float_precision: If set, use this to specify float field valid digits.
|
||||
ensure_ascii: If True, strings with non-ASCII characters are escaped.
|
||||
If False, Unicode strings are returned unchanged.
|
||||
|
||||
Returns:
|
||||
A string containing the JSON formatted protocol buffer message.
|
||||
"""
|
||||
printer = _Printer(
|
||||
including_default_value_fields,
|
||||
preserving_proto_field_name,
|
||||
use_integers_for_enums,
|
||||
descriptor_pool,
|
||||
float_precision=float_precision)
|
||||
return printer.ToJsonString(message, indent, sort_keys, ensure_ascii)
|
||||
|
||||
|
||||
def MessageToDict(
|
||||
message,
|
||||
including_default_value_fields=False,
|
||||
preserving_proto_field_name=False,
|
||||
use_integers_for_enums=False,
|
||||
descriptor_pool=None,
|
||||
float_precision=None):
|
||||
"""Converts protobuf message to a dictionary.
|
||||
|
||||
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
|
||||
|
||||
Args:
|
||||
message: The protocol buffers message instance to serialize.
|
||||
including_default_value_fields: If True, singular primitive fields,
|
||||
repeated fields, and map fields will always be serialized. If
|
||||
False, only serialize non-empty fields. Singular message fields
|
||||
and oneof fields are not affected by this option.
|
||||
preserving_proto_field_name: If True, use the original proto field
|
||||
names as defined in the .proto file. If False, convert the field
|
||||
names to lowerCamelCase.
|
||||
use_integers_for_enums: If true, print integers instead of enum names.
|
||||
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
||||
default.
|
||||
float_precision: If set, use this to specify float field valid digits.
|
||||
|
||||
Returns:
|
||||
A dict representation of the protocol buffer message.
|
||||
"""
|
||||
printer = _Printer(
|
||||
including_default_value_fields,
|
||||
preserving_proto_field_name,
|
||||
use_integers_for_enums,
|
||||
descriptor_pool,
|
||||
float_precision=float_precision)
|
||||
# pylint: disable=protected-access
|
||||
return printer._MessageToJsonObject(message)
|
||||
|
||||
|
||||
def _IsMapEntry(field):
|
||||
return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
||||
field.message_type.has_options and
|
||||
field.message_type.GetOptions().map_entry)
|
||||
|
||||
|
||||
class _Printer(object):
|
||||
"""JSON format printer for protocol message."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
including_default_value_fields=False,
|
||||
preserving_proto_field_name=False,
|
||||
use_integers_for_enums=False,
|
||||
descriptor_pool=None,
|
||||
float_precision=None):
|
||||
self.including_default_value_fields = including_default_value_fields
|
||||
self.preserving_proto_field_name = preserving_proto_field_name
|
||||
self.use_integers_for_enums = use_integers_for_enums
|
||||
self.descriptor_pool = descriptor_pool
|
||||
if float_precision:
|
||||
self.float_format = '.{}g'.format(float_precision)
|
||||
else:
|
||||
self.float_format = None
|
||||
|
||||
def ToJsonString(self, message, indent, sort_keys, ensure_ascii):
|
||||
js = self._MessageToJsonObject(message)
|
||||
return json.dumps(
|
||||
js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii)
|
||||
|
||||
def _MessageToJsonObject(self, message):
|
||||
"""Converts message to an object according to Proto3 JSON Specification."""
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
full_name = message_descriptor.full_name
|
||||
if _IsWrapperMessage(message_descriptor):
|
||||
return self._WrapperMessageToJsonObject(message)
|
||||
if full_name in _WKTJSONMETHODS:
|
||||
return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
|
||||
js = {}
|
||||
return self._RegularMessageToJsonObject(message, js)
|
||||
|
||||
def _RegularMessageToJsonObject(self, message, js):
|
||||
"""Converts normal message according to Proto3 JSON Specification."""
|
||||
fields = message.ListFields()
|
||||
|
||||
try:
|
||||
for field, value in fields:
|
||||
if self.preserving_proto_field_name:
|
||||
name = field.name
|
||||
else:
|
||||
name = field.json_name
|
||||
if _IsMapEntry(field):
|
||||
# Convert a map field.
|
||||
v_field = field.message_type.fields_by_name['value']
|
||||
js_map = {}
|
||||
for key in value:
|
||||
if isinstance(key, bool):
|
||||
if key:
|
||||
recorded_key = 'true'
|
||||
else:
|
||||
recorded_key = 'false'
|
||||
else:
|
||||
recorded_key = str(key)
|
||||
js_map[recorded_key] = self._FieldToJsonObject(
|
||||
v_field, value[key])
|
||||
js[name] = js_map
|
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
# Convert a repeated field.
|
||||
js[name] = [self._FieldToJsonObject(field, k)
|
||||
for k in value]
|
||||
elif field.is_extension:
|
||||
name = '[%s]' % field.full_name
|
||||
js[name] = self._FieldToJsonObject(field, value)
|
||||
else:
|
||||
js[name] = self._FieldToJsonObject(field, value)
|
||||
|
||||
# Serialize default value if including_default_value_fields is True.
|
||||
if self.including_default_value_fields:
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
for field in message_descriptor.fields:
|
||||
# Singular message fields and oneof fields will not be affected.
|
||||
if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
|
||||
field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
|
||||
field.containing_oneof):
|
||||
continue
|
||||
if self.preserving_proto_field_name:
|
||||
name = field.name
|
||||
else:
|
||||
name = field.json_name
|
||||
if name in js:
|
||||
# Skip the field which has been serialized already.
|
||||
continue
|
||||
if _IsMapEntry(field):
|
||||
js[name] = {}
|
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
js[name] = []
|
||||
else:
|
||||
js[name] = self._FieldToJsonObject(field, field.default_value)
|
||||
|
||||
except ValueError as e:
|
||||
raise SerializeToJsonError(
|
||||
'Failed to serialize {0} field: {1}.'.format(field.name, e))
|
||||
|
||||
return js
|
||||
|
||||
def _FieldToJsonObject(self, field, value):
|
||||
"""Converts field value according to Proto3 JSON Specification."""
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
return self._MessageToJsonObject(value)
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
||||
if self.use_integers_for_enums:
|
||||
return value
|
||||
if field.enum_type.full_name == 'google.protobuf.NullValue':
|
||||
return None
|
||||
enum_value = field.enum_type.values_by_number.get(value, None)
|
||||
if enum_value is not None:
|
||||
return enum_value.name
|
||||
else:
|
||||
if field.file.syntax == 'proto3':
|
||||
return value
|
||||
raise SerializeToJsonError('Enum field contains an integer value '
|
||||
'which can not mapped to an enum value.')
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
||||
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
# Use base64 Data encoding for bytes
|
||||
return base64.b64encode(value).decode('utf-8')
|
||||
else:
|
||||
return value
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
||||
return bool(value)
|
||||
elif field.cpp_type in _INT64_TYPES:
|
||||
return str(value)
|
||||
elif field.cpp_type in _FLOAT_TYPES:
|
||||
if math.isinf(value):
|
||||
if value < 0.0:
|
||||
return _NEG_INFINITY
|
||||
else:
|
||||
return _INFINITY
|
||||
if math.isnan(value):
|
||||
return _NAN
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
|
||||
if self.float_format:
|
||||
return float(format(value, self.float_format))
|
||||
else:
|
||||
return type_checkers.ToShortestFloat(value)
|
||||
|
||||
return value
|
||||
|
||||
def _AnyMessageToJsonObject(self, message):
|
||||
"""Converts Any message according to Proto3 JSON Specification."""
|
||||
if not message.ListFields():
|
||||
return {}
|
||||
# Must print @type first, use OrderedDict instead of {}
|
||||
js = OrderedDict()
|
||||
type_url = message.type_url
|
||||
js['@type'] = type_url
|
||||
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
|
||||
sub_message.ParseFromString(message.value)
|
||||
message_descriptor = sub_message.DESCRIPTOR
|
||||
full_name = message_descriptor.full_name
|
||||
if _IsWrapperMessage(message_descriptor):
|
||||
js['value'] = self._WrapperMessageToJsonObject(sub_message)
|
||||
return js
|
||||
if full_name in _WKTJSONMETHODS:
|
||||
js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0],
|
||||
sub_message)(self)
|
||||
return js
|
||||
return self._RegularMessageToJsonObject(sub_message, js)
|
||||
|
||||
def _GenericMessageToJsonObject(self, message):
|
||||
"""Converts message according to Proto3 JSON Specification."""
|
||||
# Duration, Timestamp and FieldMask have ToJsonString method to do the
|
||||
# convert. Users can also call the method directly.
|
||||
return message.ToJsonString()
|
||||
|
||||
def _ValueMessageToJsonObject(self, message):
|
||||
"""Converts Value message according to Proto3 JSON Specification."""
|
||||
which = message.WhichOneof('kind')
|
||||
# If the Value message is not set treat as null_value when serialize
|
||||
# to JSON. The parse back result will be different from original message.
|
||||
if which is None or which == 'null_value':
|
||||
return None
|
||||
if which == 'list_value':
|
||||
return self._ListValueMessageToJsonObject(message.list_value)
|
||||
if which == 'struct_value':
|
||||
value = message.struct_value
|
||||
else:
|
||||
value = getattr(message, which)
|
||||
oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
|
||||
return self._FieldToJsonObject(oneof_descriptor, value)
|
||||
|
||||
def _ListValueMessageToJsonObject(self, message):
|
||||
"""Converts ListValue message according to Proto3 JSON Specification."""
|
||||
return [self._ValueMessageToJsonObject(value)
|
||||
for value in message.values]
|
||||
|
||||
def _StructMessageToJsonObject(self, message):
|
||||
"""Converts Struct message according to Proto3 JSON Specification."""
|
||||
fields = message.fields
|
||||
ret = {}
|
||||
for key in fields:
|
||||
ret[key] = self._ValueMessageToJsonObject(fields[key])
|
||||
return ret
|
||||
|
||||
def _WrapperMessageToJsonObject(self, message):
|
||||
return self._FieldToJsonObject(
|
||||
message.DESCRIPTOR.fields_by_name['value'], message.value)
|
||||
|
||||
|
||||
def _IsWrapperMessage(message_descriptor):
|
||||
return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
|
||||
|
||||
|
||||
def _DuplicateChecker(js):
|
||||
result = {}
|
||||
for name, value in js:
|
||||
if name in result:
|
||||
raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
|
||||
result[name] = value
|
||||
return result
|
||||
|
||||
|
||||
def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
|
||||
"""Creates a message from a type URL."""
|
||||
db = symbol_database.Default()
|
||||
pool = db.pool if descriptor_pool is None else descriptor_pool
|
||||
type_name = type_url.split('/')[-1]
|
||||
try:
|
||||
message_descriptor = pool.FindMessageTypeByName(type_name)
|
||||
except KeyError:
|
||||
raise TypeError(
|
||||
'Can not find message descriptor by type_url: {0}'.format(type_url))
|
||||
message_class = db.GetPrototype(message_descriptor)
|
||||
return message_class()
|
||||
|
||||
|
||||
def Parse(text,
|
||||
message,
|
||||
ignore_unknown_fields=False,
|
||||
descriptor_pool=None,
|
||||
max_recursion_depth=100):
|
||||
"""Parses a JSON representation of a protocol message into a message.
|
||||
|
||||
Args:
|
||||
text: Message JSON representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
ignore_unknown_fields: If True, do not raise errors for unknown fields.
|
||||
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
||||
default.
|
||||
max_recursion_depth: max recursion depth of JSON message to be
|
||||
deserialized. JSON messages over this depth will fail to be
|
||||
deserialized. Default value is 100.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
|
||||
Raises::
|
||||
ParseError: On JSON parsing problems.
|
||||
"""
|
||||
if not isinstance(text, str):
|
||||
text = text.decode('utf-8')
|
||||
try:
|
||||
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
|
||||
except ValueError as e:
|
||||
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
|
||||
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool,
|
||||
max_recursion_depth)
|
||||
|
||||
|
||||
def ParseDict(js_dict,
|
||||
message,
|
||||
ignore_unknown_fields=False,
|
||||
descriptor_pool=None,
|
||||
max_recursion_depth=100):
|
||||
"""Parses a JSON dictionary representation into a message.
|
||||
|
||||
Args:
|
||||
js_dict: Dict representation of a JSON message.
|
||||
message: A protocol buffer message to merge into.
|
||||
ignore_unknown_fields: If True, do not raise errors for unknown fields.
|
||||
descriptor_pool: A Descriptor Pool for resolving types. If None use the
|
||||
default.
|
||||
max_recursion_depth: max recursion depth of JSON message to be
|
||||
deserialized. JSON messages over this depth will fail to be
|
||||
deserialized. Default value is 100.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
"""
|
||||
parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth)
|
||||
parser.ConvertMessage(js_dict, message, '')
|
||||
return message
|
||||
|
||||
|
||||
_INT_OR_FLOAT = (int, float)
|
||||
|
||||
|
||||
class _Parser(object):
|
||||
"""JSON format parser for protocol message."""
|
||||
|
||||
def __init__(self, ignore_unknown_fields, descriptor_pool,
|
||||
max_recursion_depth):
|
||||
self.ignore_unknown_fields = ignore_unknown_fields
|
||||
self.descriptor_pool = descriptor_pool
|
||||
self.max_recursion_depth = max_recursion_depth
|
||||
self.recursion_depth = 0
|
||||
|
||||
def ConvertMessage(self, value, message, path):
|
||||
"""Convert a JSON object into a message.
|
||||
|
||||
Args:
|
||||
value: A JSON object.
|
||||
message: A WKT or regular protocol message to record the data.
|
||||
path: parent path to log parse error info.
|
||||
|
||||
Raises:
|
||||
ParseError: In case of convert problems.
|
||||
"""
|
||||
self.recursion_depth += 1
|
||||
if self.recursion_depth > self.max_recursion_depth:
|
||||
raise ParseError('Message too deep. Max recursion depth is {0}'.format(
|
||||
self.max_recursion_depth))
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
full_name = message_descriptor.full_name
|
||||
if not path:
|
||||
path = message_descriptor.name
|
||||
if _IsWrapperMessage(message_descriptor):
|
||||
self._ConvertWrapperMessage(value, message, path)
|
||||
elif full_name in _WKTJSONMETHODS:
|
||||
methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self)
|
||||
else:
|
||||
self._ConvertFieldValuePair(value, message, path)
|
||||
self.recursion_depth -= 1
|
||||
|
||||
def _ConvertFieldValuePair(self, js, message, path):
|
||||
"""Convert field value pairs into regular message.
|
||||
|
||||
Args:
|
||||
js: A JSON object to convert the field value pairs.
|
||||
message: A regular protocol message to record the data.
|
||||
path: parent path to log parse error info.
|
||||
|
||||
Raises:
|
||||
ParseError: In case of problems converting.
|
||||
"""
|
||||
names = []
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
fields_by_json_name = dict((f.json_name, f)
|
||||
for f in message_descriptor.fields)
|
||||
for name in js:
|
||||
try:
|
||||
field = fields_by_json_name.get(name, None)
|
||||
if not field:
|
||||
field = message_descriptor.fields_by_name.get(name, None)
|
||||
if not field and _VALID_EXTENSION_NAME.match(name):
|
||||
if not message_descriptor.is_extendable:
|
||||
raise ParseError(
|
||||
'Message type {0} does not have extensions at {1}'.format(
|
||||
message_descriptor.full_name, path))
|
||||
identifier = name[1:-1] # strip [] brackets
|
||||
# pylint: disable=protected-access
|
||||
field = message.Extensions._FindExtensionByName(identifier)
|
||||
# pylint: enable=protected-access
|
||||
if not field:
|
||||
# Try looking for extension by the message type name, dropping the
|
||||
# field name following the final . separator in full_name.
|
||||
identifier = '.'.join(identifier.split('.')[:-1])
|
||||
# pylint: disable=protected-access
|
||||
field = message.Extensions._FindExtensionByName(identifier)
|
||||
# pylint: enable=protected-access
|
||||
if not field:
|
||||
if self.ignore_unknown_fields:
|
||||
continue
|
||||
raise ParseError(
|
||||
('Message type "{0}" has no field named "{1}" at "{2}".\n'
|
||||
' Available Fields(except extensions): "{3}"').format(
|
||||
message_descriptor.full_name, name, path,
|
||||
[f.json_name for f in message_descriptor.fields]))
|
||||
if name in names:
|
||||
raise ParseError('Message type "{0}" should not have multiple '
|
||||
'"{1}" fields at "{2}".'.format(
|
||||
message.DESCRIPTOR.full_name, name, path))
|
||||
names.append(name)
|
||||
value = js[name]
|
||||
# Check no other oneof field is parsed.
|
||||
if field.containing_oneof is not None and value is not None:
|
||||
oneof_name = field.containing_oneof.name
|
||||
if oneof_name in names:
|
||||
raise ParseError('Message type "{0}" should not have multiple '
|
||||
'"{1}" oneof fields at "{2}".'.format(
|
||||
message.DESCRIPTOR.full_name, oneof_name,
|
||||
path))
|
||||
names.append(oneof_name)
|
||||
|
||||
if value is None:
|
||||
if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
|
||||
and field.message_type.full_name == 'google.protobuf.Value'):
|
||||
sub_message = getattr(message, field.name)
|
||||
sub_message.null_value = 0
|
||||
elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
|
||||
and field.enum_type.full_name == 'google.protobuf.NullValue'):
|
||||
setattr(message, field.name, 0)
|
||||
else:
|
||||
message.ClearField(field.name)
|
||||
continue
|
||||
|
||||
# Parse field value.
|
||||
if _IsMapEntry(field):
|
||||
message.ClearField(field.name)
|
||||
self._ConvertMapFieldValue(value, message, field,
|
||||
'{0}.{1}'.format(path, name))
|
||||
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
message.ClearField(field.name)
|
||||
if not isinstance(value, list):
|
||||
raise ParseError('repeated field {0} must be in [] which is '
|
||||
'{1} at {2}'.format(name, value, path))
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
# Repeated message field.
|
||||
for index, item in enumerate(value):
|
||||
sub_message = getattr(message, field.name).add()
|
||||
# None is a null_value in Value.
|
||||
if (item is None and
|
||||
sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
|
||||
raise ParseError('null is not allowed to be used as an element'
|
||||
' in a repeated field at {0}.{1}[{2}]'.format(
|
||||
path, name, index))
|
||||
self.ConvertMessage(item, sub_message,
|
||||
'{0}.{1}[{2}]'.format(path, name, index))
|
||||
else:
|
||||
# Repeated scalar field.
|
||||
for index, item in enumerate(value):
|
||||
if item is None:
|
||||
raise ParseError('null is not allowed to be used as an element'
|
||||
' in a repeated field at {0}.{1}[{2}]'.format(
|
||||
path, name, index))
|
||||
getattr(message, field.name).append(
|
||||
_ConvertScalarFieldValue(
|
||||
item, field, '{0}.{1}[{2}]'.format(path, name, index)))
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
if field.is_extension:
|
||||
sub_message = message.Extensions[field]
|
||||
else:
|
||||
sub_message = getattr(message, field.name)
|
||||
sub_message.SetInParent()
|
||||
self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name))
|
||||
else:
|
||||
if field.is_extension:
|
||||
message.Extensions[field] = _ConvertScalarFieldValue(
|
||||
value, field, '{0}.{1}'.format(path, name))
|
||||
else:
|
||||
setattr(
|
||||
message, field.name,
|
||||
_ConvertScalarFieldValue(value, field,
|
||||
'{0}.{1}'.format(path, name)))
|
||||
except ParseError as e:
|
||||
if field and field.containing_oneof is None:
|
||||
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
|
||||
else:
|
||||
raise ParseError(str(e))
|
||||
except ValueError as e:
|
||||
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
|
||||
except TypeError as e:
|
||||
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
|
||||
|
||||
def _ConvertAnyMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into Any message."""
|
||||
if isinstance(value, dict) and not value:
|
||||
return
|
||||
try:
|
||||
type_url = value['@type']
|
||||
except KeyError:
|
||||
raise ParseError(
|
||||
'@type is missing when parsing any message at {0}'.format(path))
|
||||
|
||||
try:
|
||||
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
|
||||
except TypeError as e:
|
||||
raise ParseError('{0} at {1}'.format(e, path))
|
||||
message_descriptor = sub_message.DESCRIPTOR
|
||||
full_name = message_descriptor.full_name
|
||||
if _IsWrapperMessage(message_descriptor):
|
||||
self._ConvertWrapperMessage(value['value'], sub_message,
|
||||
'{0}.value'.format(path))
|
||||
elif full_name in _WKTJSONMETHODS:
|
||||
methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message,
|
||||
'{0}.value'.format(path))(
|
||||
self)
|
||||
else:
|
||||
del value['@type']
|
||||
self._ConvertFieldValuePair(value, sub_message, path)
|
||||
value['@type'] = type_url
|
||||
# Sets Any message
|
||||
message.value = sub_message.SerializeToString()
|
||||
message.type_url = type_url
|
||||
|
||||
def _ConvertGenericMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into message with FromJsonString."""
|
||||
# Duration, Timestamp, FieldMask have a FromJsonString method to do the
|
||||
# conversion. Users can also call the method directly.
|
||||
try:
|
||||
message.FromJsonString(value)
|
||||
except ValueError as e:
|
||||
raise ParseError('{0} at {1}'.format(e, path))
|
||||
|
||||
def _ConvertValueMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into Value message."""
|
||||
if isinstance(value, dict):
|
||||
self._ConvertStructMessage(value, message.struct_value, path)
|
||||
elif isinstance(value, list):
|
||||
self._ConvertListValueMessage(value, message.list_value, path)
|
||||
elif value is None:
|
||||
message.null_value = 0
|
||||
elif isinstance(value, bool):
|
||||
message.bool_value = value
|
||||
elif isinstance(value, str):
|
||||
message.string_value = value
|
||||
elif isinstance(value, _INT_OR_FLOAT):
|
||||
message.number_value = value
|
||||
else:
|
||||
raise ParseError('Value {0} has unexpected type {1} at {2}'.format(
|
||||
value, type(value), path))
|
||||
|
||||
def _ConvertListValueMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into ListValue message."""
|
||||
if not isinstance(value, list):
|
||||
raise ParseError('ListValue must be in [] which is {0} at {1}'.format(
|
||||
value, path))
|
||||
message.ClearField('values')
|
||||
for index, item in enumerate(value):
|
||||
self._ConvertValueMessage(item, message.values.add(),
|
||||
'{0}[{1}]'.format(path, index))
|
||||
|
||||
def _ConvertStructMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into Struct message."""
|
||||
if not isinstance(value, dict):
|
||||
raise ParseError('Struct must be in a dict which is {0} at {1}'.format(
|
||||
value, path))
|
||||
# Clear will mark the struct as modified so it will be created even if
|
||||
# there are no values.
|
||||
message.Clear()
|
||||
for key in value:
|
||||
self._ConvertValueMessage(value[key], message.fields[key],
|
||||
'{0}.{1}'.format(path, key))
|
||||
return
|
||||
|
||||
def _ConvertWrapperMessage(self, value, message, path):
|
||||
"""Convert a JSON representation into Wrapper message."""
|
||||
field = message.DESCRIPTOR.fields_by_name['value']
|
||||
setattr(
|
||||
message, 'value',
|
||||
_ConvertScalarFieldValue(value, field, path='{0}.value'.format(path)))
|
||||
|
||||
def _ConvertMapFieldValue(self, value, message, field, path):
|
||||
"""Convert map field value for a message map field.
|
||||
|
||||
Args:
|
||||
value: A JSON object to convert the map field value.
|
||||
message: A protocol message to record the converted data.
|
||||
field: The descriptor of the map field to be converted.
|
||||
path: parent path to log parse error info.
|
||||
|
||||
Raises:
|
||||
ParseError: In case of convert problems.
|
||||
"""
|
||||
if not isinstance(value, dict):
|
||||
raise ParseError(
|
||||
'Map field {0} must be in a dict which is {1} at {2}'.format(
|
||||
field.name, value, path))
|
||||
key_field = field.message_type.fields_by_name['key']
|
||||
value_field = field.message_type.fields_by_name['value']
|
||||
for key in value:
|
||||
key_value = _ConvertScalarFieldValue(key, key_field,
|
||||
'{0}.key'.format(path), True)
|
||||
if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
self.ConvertMessage(value[key],
|
||||
getattr(message, field.name)[key_value],
|
||||
'{0}[{1}]'.format(path, key_value))
|
||||
else:
|
||||
getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
|
||||
value[key], value_field, path='{0}[{1}]'.format(path, key_value))
|
||||
|
||||
|
||||
def _ConvertScalarFieldValue(value, field, path, require_str=False):
|
||||
"""Convert a single scalar field value.
|
||||
|
||||
Args:
|
||||
value: A scalar value to convert the scalar field value.
|
||||
field: The descriptor of the field to convert.
|
||||
path: parent path to log parse error info.
|
||||
require_str: If True, the field value must be a str.
|
||||
|
||||
Returns:
|
||||
The converted scalar field value
|
||||
|
||||
Raises:
|
||||
ParseError: In case of convert problems.
|
||||
"""
|
||||
try:
|
||||
if field.cpp_type in _INT_TYPES:
|
||||
return _ConvertInteger(value)
|
||||
elif field.cpp_type in _FLOAT_TYPES:
|
||||
return _ConvertFloat(value, field)
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
||||
return _ConvertBool(value, require_str)
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
||||
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
if isinstance(value, str):
|
||||
encoded = value.encode('utf-8')
|
||||
else:
|
||||
encoded = value
|
||||
# Add extra padding '='
|
||||
padded_value = encoded + b'=' * (4 - len(encoded) % 4)
|
||||
return base64.urlsafe_b64decode(padded_value)
|
||||
else:
|
||||
# Checking for unpaired surrogates appears to be unreliable,
|
||||
# depending on the specific Python version, so we check manually.
|
||||
if _UNPAIRED_SURROGATE_PATTERN.search(value):
|
||||
raise ParseError('Unpaired surrogate')
|
||||
return value
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
||||
# Convert an enum value.
|
||||
enum_value = field.enum_type.values_by_name.get(value, None)
|
||||
if enum_value is None:
|
||||
try:
|
||||
number = int(value)
|
||||
enum_value = field.enum_type.values_by_number.get(number, None)
|
||||
except ValueError:
|
||||
raise ParseError('Invalid enum value {0} for enum type {1}'.format(
|
||||
value, field.enum_type.full_name))
|
||||
if enum_value is None:
|
||||
if field.file.syntax == 'proto3':
|
||||
# Proto3 accepts unknown enums.
|
||||
return number
|
||||
raise ParseError('Invalid enum value {0} for enum type {1}'.format(
|
||||
value, field.enum_type.full_name))
|
||||
return enum_value.number
|
||||
except ParseError as e:
|
||||
raise ParseError('{0} at {1}'.format(e, path))
|
||||
|
||||
|
||||
def _ConvertInteger(value):
|
||||
"""Convert an integer.
|
||||
|
||||
Args:
|
||||
value: A scalar value to convert.
|
||||
|
||||
Returns:
|
||||
The integer value.
|
||||
|
||||
Raises:
|
||||
ParseError: If an integer couldn't be consumed.
|
||||
"""
|
||||
if isinstance(value, float) and not value.is_integer():
|
||||
raise ParseError('Couldn\'t parse integer: {0}'.format(value))
|
||||
|
||||
if isinstance(value, str) and value.find(' ') != -1:
|
||||
raise ParseError('Couldn\'t parse integer: "{0}"'.format(value))
|
||||
|
||||
if isinstance(value, bool):
|
||||
raise ParseError('Bool value {0} is not acceptable for '
|
||||
'integer field'.format(value))
|
||||
|
||||
return int(value)
|
||||
|
||||
|
||||
def _ConvertFloat(value, field):
|
||||
"""Convert an floating point number."""
|
||||
if isinstance(value, float):
|
||||
if math.isnan(value):
|
||||
raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead')
|
||||
if math.isinf(value):
|
||||
if value > 0:
|
||||
raise ParseError('Couldn\'t parse Infinity or value too large, '
|
||||
'use quoted "Infinity" instead')
|
||||
else:
|
||||
raise ParseError('Couldn\'t parse -Infinity or value too small, '
|
||||
'use quoted "-Infinity" instead')
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
|
||||
# pylint: disable=protected-access
|
||||
if value > type_checkers._FLOAT_MAX:
|
||||
raise ParseError('Float value too large')
|
||||
# pylint: disable=protected-access
|
||||
if value < type_checkers._FLOAT_MIN:
|
||||
raise ParseError('Float value too small')
|
||||
if value == 'nan':
|
||||
raise ParseError('Couldn\'t parse float "nan", use "NaN" instead')
|
||||
try:
|
||||
# Assume Python compatible syntax.
|
||||
return float(value)
|
||||
except ValueError:
|
||||
# Check alternative spellings.
|
||||
if value == _NEG_INFINITY:
|
||||
return float('-inf')
|
||||
elif value == _INFINITY:
|
||||
return float('inf')
|
||||
elif value == _NAN:
|
||||
return float('nan')
|
||||
else:
|
||||
raise ParseError('Couldn\'t parse float: {0}'.format(value))
|
||||
|
||||
|
||||
def _ConvertBool(value, require_str):
|
||||
"""Convert a boolean value.
|
||||
|
||||
Args:
|
||||
value: A scalar value to convert.
|
||||
require_str: If True, value must be a str.
|
||||
|
||||
Returns:
|
||||
The bool parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a boolean value couldn't be consumed.
|
||||
"""
|
||||
if require_str:
|
||||
if value == 'true':
|
||||
return True
|
||||
elif value == 'false':
|
||||
return False
|
||||
else:
|
||||
raise ParseError('Expected "true" or "false", not {0}'.format(value))
|
||||
|
||||
if not isinstance(value, bool):
|
||||
raise ParseError('Expected true or false without quotes')
|
||||
return value
|
||||
|
||||
_WKTJSONMETHODS = {
|
||||
'google.protobuf.Any': ['_AnyMessageToJsonObject',
|
||||
'_ConvertAnyMessage'],
|
||||
'google.protobuf.Duration': ['_GenericMessageToJsonObject',
|
||||
'_ConvertGenericMessage'],
|
||||
'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
|
||||
'_ConvertGenericMessage'],
|
||||
'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
|
||||
'_ConvertListValueMessage'],
|
||||
'google.protobuf.Struct': ['_StructMessageToJsonObject',
|
||||
'_ConvertStructMessage'],
|
||||
'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
|
||||
'_ConvertGenericMessage'],
|
||||
'google.protobuf.Value': ['_ValueMessageToJsonObject',
|
||||
'_ConvertValueMessage']
|
||||
}
|
@ -1,424 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO(robinson): We should just make these methods all "pure-virtual" and move
|
||||
# all implementation out, into reflection.py for now.
|
||||
|
||||
|
||||
"""Contains an abstract base class for protocol messages."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
class Error(Exception):
|
||||
"""Base error type for this module."""
|
||||
pass
|
||||
|
||||
|
||||
class DecodeError(Error):
|
||||
"""Exception raised when deserializing messages."""
|
||||
pass
|
||||
|
||||
|
||||
class EncodeError(Error):
|
||||
"""Exception raised when serializing messages."""
|
||||
pass
|
||||
|
||||
|
||||
class Message(object):
|
||||
|
||||
"""Abstract base class for protocol messages.
|
||||
|
||||
Protocol message classes are almost always generated by the protocol
|
||||
compiler. These generated types subclass Message and implement the methods
|
||||
shown below.
|
||||
"""
|
||||
|
||||
# TODO(robinson): Link to an HTML document here.
|
||||
|
||||
# TODO(robinson): Document that instances of this class will also
|
||||
# have an Extensions attribute with __getitem__ and __setitem__.
|
||||
# Again, not sure how to best convey this.
|
||||
|
||||
# TODO(robinson): Document that the class must also have a static
|
||||
# RegisterExtension(extension_field) method.
|
||||
# Not sure how to best express at this point.
|
||||
|
||||
# TODO(robinson): Document these fields and methods.
|
||||
|
||||
__slots__ = []
|
||||
|
||||
#: The :class:`google.protobuf.descriptor.Descriptor` for this message type.
|
||||
DESCRIPTOR = None
|
||||
|
||||
def __deepcopy__(self, memo=None):
|
||||
clone = type(self)()
|
||||
clone.MergeFrom(self)
|
||||
return clone
|
||||
|
||||
def __eq__(self, other_msg):
|
||||
"""Recursively compares two messages by value and structure."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __ne__(self, other_msg):
|
||||
# Can't just say self != other_msg, since that would infinitely recurse. :)
|
||||
return not self == other_msg
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def __str__(self):
|
||||
"""Outputs a human-readable representation of the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __unicode__(self):
|
||||
"""Outputs a human-readable representation of the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def MergeFrom(self, other_msg):
|
||||
"""Merges the contents of the specified message into current message.
|
||||
|
||||
This method merges the contents of the specified message into the current
|
||||
message. Singular fields that are set in the specified message overwrite
|
||||
the corresponding fields in the current message. Repeated fields are
|
||||
appended. Singular sub-messages and groups are recursively merged.
|
||||
|
||||
Args:
|
||||
other_msg (Message): A message to merge into the current message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def CopyFrom(self, other_msg):
|
||||
"""Copies the content of the specified message into the current message.
|
||||
|
||||
The method clears the current message and then merges the specified
|
||||
message using MergeFrom.
|
||||
|
||||
Args:
|
||||
other_msg (Message): A message to copy into the current one.
|
||||
"""
|
||||
if self is other_msg:
|
||||
return
|
||||
self.Clear()
|
||||
self.MergeFrom(other_msg)
|
||||
|
||||
def Clear(self):
|
||||
"""Clears all data that was set in the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def SetInParent(self):
|
||||
"""Mark this as present in the parent.
|
||||
|
||||
This normally happens automatically when you assign a field of a
|
||||
sub-message, but sometimes you want to make the sub-message
|
||||
present while keeping it empty. If you find yourself using this,
|
||||
you may want to reconsider your design.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsInitialized(self):
|
||||
"""Checks if the message is initialized.
|
||||
|
||||
Returns:
|
||||
bool: The method returns True if the message is initialized (i.e. all of
|
||||
its required fields are set).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): MergeFromString() should probably return None and be
|
||||
# implemented in terms of a helper that returns the # of bytes read. Our
|
||||
# deserialization routines would use the helper when recursively
|
||||
# deserializing, but the end user would almost always just want the no-return
|
||||
# MergeFromString().
|
||||
|
||||
def MergeFromString(self, serialized):
|
||||
"""Merges serialized protocol buffer data into this message.
|
||||
|
||||
When we find a field in `serialized` that is already present
|
||||
in this message:
|
||||
|
||||
- If it's a "repeated" field, we append to the end of our list.
|
||||
- Else, if it's a scalar, we overwrite our field.
|
||||
- Else, (it's a nonrepeated composite), we recursively merge
|
||||
into the existing composite.
|
||||
|
||||
Args:
|
||||
serialized (bytes): Any object that allows us to call
|
||||
``memoryview(serialized)`` to access a string of bytes using the
|
||||
buffer interface.
|
||||
|
||||
Returns:
|
||||
int: The number of bytes read from `serialized`.
|
||||
For non-group messages, this will always be `len(serialized)`,
|
||||
but for messages which are actually groups, this will
|
||||
generally be less than `len(serialized)`, since we must
|
||||
stop when we reach an ``END_GROUP`` tag. Note that if
|
||||
we *do* stop because of an ``END_GROUP`` tag, the number
|
||||
of bytes returned does not include the bytes
|
||||
for the ``END_GROUP`` tag information.
|
||||
|
||||
Raises:
|
||||
DecodeError: if the input cannot be parsed.
|
||||
"""
|
||||
# TODO(robinson): Document handling of unknown fields.
|
||||
# TODO(robinson): When we switch to a helper, this will return None.
|
||||
raise NotImplementedError
|
||||
|
||||
def ParseFromString(self, serialized):
|
||||
"""Parse serialized protocol buffer data into this message.
|
||||
|
||||
Like :func:`MergeFromString()`, except we clear the object first.
|
||||
|
||||
Raises:
|
||||
message.DecodeError if the input cannot be parsed.
|
||||
"""
|
||||
self.Clear()
|
||||
return self.MergeFromString(serialized)
|
||||
|
||||
def SerializeToString(self, **kwargs):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
Keyword Args:
|
||||
deterministic (bool): If true, requests deterministic serialization
|
||||
of the protobuf, with predictable ordering of map keys.
|
||||
|
||||
Returns:
|
||||
A binary string representation of the message if all of the required
|
||||
fields in the message are set (i.e. the message is initialized).
|
||||
|
||||
Raises:
|
||||
EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def SerializePartialToString(self, **kwargs):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
This method is similar to SerializeToString but doesn't check if the
|
||||
message is initialized.
|
||||
|
||||
Keyword Args:
|
||||
deterministic (bool): If true, requests deterministic serialization
|
||||
of the protobuf, with predictable ordering of map keys.
|
||||
|
||||
Returns:
|
||||
bytes: A serialized representation of the partial message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): Decide whether we like these better
|
||||
# than auto-generated has_foo() and clear_foo() methods
|
||||
# on the instances themselves. This way is less consistent
|
||||
# with C++, but it makes reflection-type access easier and
|
||||
# reduces the number of magically autogenerated things.
|
||||
#
|
||||
# TODO(robinson): Be sure to document (and test) exactly
|
||||
# which field names are accepted here. Are we case-sensitive?
|
||||
# What do we do with fields that share names with Python keywords
|
||||
# like 'lambda' and 'yield'?
|
||||
#
|
||||
# nnorwitz says:
|
||||
# """
|
||||
# Typically (in python), an underscore is appended to names that are
|
||||
# keywords. So they would become lambda_ or yield_.
|
||||
# """
|
||||
def ListFields(self):
|
||||
"""Returns a list of (FieldDescriptor, value) tuples for present fields.
|
||||
|
||||
A message field is non-empty if HasField() would return true. A singular
|
||||
primitive field is non-empty if HasField() would return true in proto2 or it
|
||||
is non zero in proto3. A repeated field is non-empty if it contains at least
|
||||
one element. The fields are ordered by field number.
|
||||
|
||||
Returns:
|
||||
list[tuple(FieldDescriptor, value)]: field descriptors and values
|
||||
for all fields in the message which are not empty. The values vary by
|
||||
field type.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def HasField(self, field_name):
|
||||
"""Checks if a certain field is set for the message.
|
||||
|
||||
For a oneof group, checks if any field inside is set. Note that if the
|
||||
field_name is not defined in the message descriptor, :exc:`ValueError` will
|
||||
be raised.
|
||||
|
||||
Args:
|
||||
field_name (str): The name of the field to check for presence.
|
||||
|
||||
Returns:
|
||||
bool: Whether a value has been set for the named field.
|
||||
|
||||
Raises:
|
||||
ValueError: if the `field_name` is not a member of this message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearField(self, field_name):
|
||||
"""Clears the contents of a given field.
|
||||
|
||||
Inside a oneof group, clears the field set. If the name neither refers to a
|
||||
defined field or oneof group, :exc:`ValueError` is raised.
|
||||
|
||||
Args:
|
||||
field_name (str): The name of the field to check for presence.
|
||||
|
||||
Raises:
|
||||
ValueError: if the `field_name` is not a member of this message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def WhichOneof(self, oneof_group):
|
||||
"""Returns the name of the field that is set inside a oneof group.
|
||||
|
||||
If no field is set, returns None.
|
||||
|
||||
Args:
|
||||
oneof_group (str): the name of the oneof group to check.
|
||||
|
||||
Returns:
|
||||
str or None: The name of the group that is set, or None.
|
||||
|
||||
Raises:
|
||||
ValueError: no group with the given name exists
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def HasExtension(self, extension_handle):
|
||||
"""Checks if a certain extension is present for this message.
|
||||
|
||||
Extensions are retrieved using the :attr:`Extensions` mapping (if present).
|
||||
|
||||
Args:
|
||||
extension_handle: The handle for the extension to check.
|
||||
|
||||
Returns:
|
||||
bool: Whether the extension is present for this message.
|
||||
|
||||
Raises:
|
||||
KeyError: if the extension is repeated. Similar to repeated fields,
|
||||
there is no separate notion of presence: a "not present" repeated
|
||||
extension is an empty list.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearExtension(self, extension_handle):
|
||||
"""Clears the contents of a given extension.
|
||||
|
||||
Args:
|
||||
extension_handle: The handle for the extension to clear.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def UnknownFields(self):
|
||||
"""Returns the UnknownFieldSet.
|
||||
|
||||
Returns:
|
||||
UnknownFieldSet: The unknown fields stored in this message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def DiscardUnknownFields(self):
|
||||
"""Clears all fields in the :class:`UnknownFieldSet`.
|
||||
|
||||
This operation is recursive for nested message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ByteSize(self):
|
||||
"""Returns the serialized size of this message.
|
||||
|
||||
Recursively calls ByteSize() on all contained messages.
|
||||
|
||||
Returns:
|
||||
int: The number of bytes required to serialize this message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def FromString(cls, s):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def RegisterExtension(extension_handle):
|
||||
raise NotImplementedError
|
||||
|
||||
def _SetListener(self, message_listener):
|
||||
"""Internal method used by the protocol message implementation.
|
||||
Clients should not call this directly.
|
||||
|
||||
Sets a listener that this message will call on certain state transitions.
|
||||
|
||||
The purpose of this method is to register back-edges from children to
|
||||
parents at runtime, for the purpose of setting "has" bits and
|
||||
byte-size-dirty bits in the parent and ancestor objects whenever a child or
|
||||
descendant object is modified.
|
||||
|
||||
If the client wants to disconnect this Message from the object tree, she
|
||||
explicitly sets callback to None.
|
||||
|
||||
If message_listener is None, unregisters any existing listener. Otherwise,
|
||||
message_listener must implement the MessageListener interface in
|
||||
internal/message_listener.py, and we discard any listener registered
|
||||
via a previous _SetListener() call.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __getstate__(self):
|
||||
"""Support the pickle protocol."""
|
||||
return dict(serialized=self.SerializePartialToString())
|
||||
|
||||
def __setstate__(self, state):
|
||||
"""Support the pickle protocol."""
|
||||
self.__init__()
|
||||
serialized = state['serialized']
|
||||
# On Python 3, using encoding='latin1' is required for unpickling
|
||||
# protos pickled by Python 2.
|
||||
if not isinstance(serialized, bytes):
|
||||
serialized = serialized.encode('latin1')
|
||||
self.ParseFromString(serialized)
|
||||
|
||||
def __reduce__(self):
|
||||
message_descriptor = self.DESCRIPTOR
|
||||
if message_descriptor.containing_type is None:
|
||||
return type(self), (), self.__getstate__()
|
||||
# the message type must be nested.
|
||||
# Python does not pickle nested classes; use the symbol_database on the
|
||||
# receiving end.
|
||||
container = message_descriptor
|
||||
return (_InternalConstructMessage, (container.full_name,),
|
||||
self.__getstate__())
|
||||
|
||||
|
||||
def _InternalConstructMessage(full_name):
|
||||
"""Constructs a nested message."""
|
||||
from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
|
||||
|
||||
return symbol_database.Default().GetSymbol(full_name)()
|
@ -1,185 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides a factory class for generating dynamic messages.
|
||||
|
||||
The easiest way to use this class is if you have access to the FileDescriptor
|
||||
protos containing the messages you want to create you can just do the following:
|
||||
|
||||
message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
|
||||
my_proto_instance = message_classes['some.proto.package.MessageName']()
|
||||
"""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf import descriptor_pool
|
||||
from google.protobuf import message
|
||||
|
||||
if api_implementation.Type() == 'cpp':
|
||||
from google.protobuf.pyext import cpp_message as message_impl
|
||||
else:
|
||||
from google.protobuf.internal import python_message as message_impl
|
||||
|
||||
|
||||
# The type of all Message classes.
|
||||
_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
|
||||
|
||||
|
||||
class MessageFactory(object):
|
||||
"""Factory for creating Proto2 messages from descriptors in a pool."""
|
||||
|
||||
def __init__(self, pool=None):
|
||||
"""Initializes a new factory."""
|
||||
self.pool = pool or descriptor_pool.DescriptorPool()
|
||||
|
||||
# local cache of all classes built from protobuf descriptors
|
||||
self._classes = {}
|
||||
|
||||
def GetPrototype(self, descriptor):
|
||||
"""Obtains a proto2 message class based on the passed in descriptor.
|
||||
|
||||
Passing a descriptor with a fully qualified name matching a previous
|
||||
invocation will cause the same class to be returned.
|
||||
|
||||
Args:
|
||||
descriptor: The descriptor to build from.
|
||||
|
||||
Returns:
|
||||
A class describing the passed in descriptor.
|
||||
"""
|
||||
if descriptor not in self._classes:
|
||||
result_class = self.CreatePrototype(descriptor)
|
||||
# The assignment to _classes is redundant for the base implementation, but
|
||||
# might avoid confusion in cases where CreatePrototype gets overridden and
|
||||
# does not call the base implementation.
|
||||
self._classes[descriptor] = result_class
|
||||
return result_class
|
||||
return self._classes[descriptor]
|
||||
|
||||
def CreatePrototype(self, descriptor):
|
||||
"""Builds a proto2 message class based on the passed in descriptor.
|
||||
|
||||
Don't call this function directly, it always creates a new class. Call
|
||||
GetPrototype() instead. This method is meant to be overridden in subblasses
|
||||
to perform additional operations on the newly constructed class.
|
||||
|
||||
Args:
|
||||
descriptor: The descriptor to build from.
|
||||
|
||||
Returns:
|
||||
A class describing the passed in descriptor.
|
||||
"""
|
||||
descriptor_name = descriptor.name
|
||||
result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
|
||||
descriptor_name,
|
||||
(message.Message,),
|
||||
{
|
||||
'DESCRIPTOR': descriptor,
|
||||
# If module not set, it wrongly points to message_factory module.
|
||||
'__module__': None,
|
||||
})
|
||||
result_class._FACTORY = self # pylint: disable=protected-access
|
||||
# Assign in _classes before doing recursive calls to avoid infinite
|
||||
# recursion.
|
||||
self._classes[descriptor] = result_class
|
||||
for field in descriptor.fields:
|
||||
if field.message_type:
|
||||
self.GetPrototype(field.message_type)
|
||||
for extension in result_class.DESCRIPTOR.extensions:
|
||||
if extension.containing_type not in self._classes:
|
||||
self.GetPrototype(extension.containing_type)
|
||||
extended_class = self._classes[extension.containing_type]
|
||||
extended_class.RegisterExtension(extension)
|
||||
return result_class
|
||||
|
||||
def GetMessages(self, files):
|
||||
"""Gets all the messages from a specified file.
|
||||
|
||||
This will find and resolve dependencies, failing if the descriptor
|
||||
pool cannot satisfy them.
|
||||
|
||||
Args:
|
||||
files: The file names to extract messages from.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes. This will include
|
||||
any dependent messages as well as any messages defined in the same file as
|
||||
a specified message.
|
||||
"""
|
||||
result = {}
|
||||
for file_name in files:
|
||||
file_desc = self.pool.FindFileByName(file_name)
|
||||
for desc in file_desc.message_types_by_name.values():
|
||||
result[desc.full_name] = self.GetPrototype(desc)
|
||||
|
||||
# While the extension FieldDescriptors are created by the descriptor pool,
|
||||
# the python classes created in the factory need them to be registered
|
||||
# explicitly, which is done below.
|
||||
#
|
||||
# The call to RegisterExtension will specifically check if the
|
||||
# extension was already registered on the object and either
|
||||
# ignore the registration if the original was the same, or raise
|
||||
# an error if they were different.
|
||||
|
||||
for extension in file_desc.extensions_by_name.values():
|
||||
if extension.containing_type not in self._classes:
|
||||
self.GetPrototype(extension.containing_type)
|
||||
extended_class = self._classes[extension.containing_type]
|
||||
extended_class.RegisterExtension(extension)
|
||||
return result
|
||||
|
||||
|
||||
_FACTORY = MessageFactory()
|
||||
|
||||
|
||||
def GetMessages(file_protos):
|
||||
"""Builds a dictionary of all the messages available in a set of files.
|
||||
|
||||
Args:
|
||||
file_protos: Iterable of FileDescriptorProto to build messages out of.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes. This will include
|
||||
any dependent messages as well as any messages defined in the same file as
|
||||
a specified message.
|
||||
"""
|
||||
# The cpp implementation of the protocol buffer library requires to add the
|
||||
# message in topological order of the dependency graph.
|
||||
file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
|
||||
def _AddFile(file_proto):
|
||||
for dependency in file_proto.dependency:
|
||||
if dependency in file_by_name:
|
||||
# Remove from elements to be visited, in order to cut cycles.
|
||||
_AddFile(file_by_name.pop(dependency))
|
||||
_FACTORY.pool.Add(file_proto)
|
||||
while file_by_name:
|
||||
_AddFile(file_by_name.popitem()[1])
|
||||
return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
|
@ -1,134 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Dynamic Protobuf class creator."""
|
||||
|
||||
from collections import OrderedDict
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import message_factory
|
||||
|
||||
|
||||
def _GetMessageFromFactory(factory, full_name):
|
||||
"""Get a proto class from the MessageFactory by name.
|
||||
|
||||
Args:
|
||||
factory: a MessageFactory instance.
|
||||
full_name: str, the fully qualified name of the proto type.
|
||||
Returns:
|
||||
A class, for the type identified by full_name.
|
||||
Raises:
|
||||
KeyError, if the proto is not found in the factory's descriptor pool.
|
||||
"""
|
||||
proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
|
||||
proto_cls = factory.GetPrototype(proto_descriptor)
|
||||
return proto_cls
|
||||
|
||||
|
||||
def MakeSimpleProtoClass(fields, full_name=None, pool=None):
|
||||
"""Create a Protobuf class whose fields are basic types.
|
||||
|
||||
Note: this doesn't validate field names!
|
||||
|
||||
Args:
|
||||
fields: dict of {name: field_type} mappings for each field in the proto. If
|
||||
this is an OrderedDict the order will be maintained, otherwise the
|
||||
fields will be sorted by name.
|
||||
full_name: optional str, the fully-qualified name of the proto type.
|
||||
pool: optional DescriptorPool instance.
|
||||
Returns:
|
||||
a class, the new protobuf class with a FileDescriptor.
|
||||
"""
|
||||
factory = message_factory.MessageFactory(pool=pool)
|
||||
|
||||
if full_name is not None:
|
||||
try:
|
||||
proto_cls = _GetMessageFromFactory(factory, full_name)
|
||||
return proto_cls
|
||||
except KeyError:
|
||||
# The factory's DescriptorPool doesn't know about this class yet.
|
||||
pass
|
||||
|
||||
# Get a list of (name, field_type) tuples from the fields dict. If fields was
|
||||
# an OrderedDict we keep the order, but otherwise we sort the field to ensure
|
||||
# consistent ordering.
|
||||
field_items = fields.items()
|
||||
if not isinstance(fields, OrderedDict):
|
||||
field_items = sorted(field_items)
|
||||
|
||||
# Use a consistent file name that is unlikely to conflict with any imported
|
||||
# proto files.
|
||||
fields_hash = hashlib.sha1()
|
||||
for f_name, f_type in field_items:
|
||||
fields_hash.update(f_name.encode('utf-8'))
|
||||
fields_hash.update(str(f_type).encode('utf-8'))
|
||||
proto_file_name = fields_hash.hexdigest() + '.proto'
|
||||
|
||||
# If the proto is anonymous, use the same hash to name it.
|
||||
if full_name is None:
|
||||
full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
|
||||
fields_hash.hexdigest())
|
||||
try:
|
||||
proto_cls = _GetMessageFromFactory(factory, full_name)
|
||||
return proto_cls
|
||||
except KeyError:
|
||||
# The factory's DescriptorPool doesn't know about this class yet.
|
||||
pass
|
||||
|
||||
# This is the first time we see this proto: add a new descriptor to the pool.
|
||||
factory.pool.Add(
|
||||
_MakeFileDescriptorProto(proto_file_name, full_name, field_items))
|
||||
return _GetMessageFromFactory(factory, full_name)
|
||||
|
||||
|
||||
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
|
||||
"""Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
|
||||
package, name = full_name.rsplit('.', 1)
|
||||
file_proto = descriptor_pb2.FileDescriptorProto()
|
||||
file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
|
||||
file_proto.package = package
|
||||
desc_proto = file_proto.message_type.add()
|
||||
desc_proto.name = name
|
||||
for f_number, (f_name, f_type) in enumerate(field_items, 1):
|
||||
field_proto = desc_proto.field.add()
|
||||
field_proto.name = f_name
|
||||
# # If the number falls in the reserved range, reassign it to the correct
|
||||
# # number after the range.
|
||||
if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
|
||||
f_number += (
|
||||
descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
|
||||
descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
|
||||
field_proto.number = f_number
|
||||
field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
|
||||
field_proto.type = f_type
|
||||
return file_proto
|
@ -1,65 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Protocol message implementation hooks for C++ implementation.
|
||||
|
||||
Contains helper functions used to create protocol message classes from
|
||||
Descriptor objects at runtime backed by the protocol buffer C++ API.
|
||||
"""
|
||||
|
||||
__author__ = 'tibell@google.com (Johan Tibell)'
|
||||
|
||||
from google.protobuf.pyext import _message
|
||||
|
||||
|
||||
class GeneratedProtocolMessageType(_message.MessageMeta):
|
||||
|
||||
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol
|
||||
message classes at runtime. Clients can also manually create their own
|
||||
classes at runtime, as in this example:
|
||||
|
||||
mydescriptor = Descriptor(.....)
|
||||
factory = symbol_database.Default()
|
||||
factory.pool.AddDescriptor(mydescriptor)
|
||||
MyProtoClass = factory.GetPrototype(mydescriptor)
|
||||
myproto_instance = MyProtoClass()
|
||||
myproto.foo_field = 23
|
||||
...
|
||||
|
||||
The above example will not work for nested types. If you wish to include them,
|
||||
use reflection.MakeClass() instead of manually instantiating the class in
|
||||
order to create the appropriate class structure.
|
||||
"""
|
||||
|
||||
# Must be consistent with the protocol-compiler code in
|
||||
# proto2/compiler/internal/generator.*.
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
@ -1,34 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/pyext/python.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
TestAllExtensions.RegisterExtension(optional_nested_message_extension)
|
||||
TestAllExtensions.RegisterExtension(repeated_nested_message_extension)
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'H\001'
|
||||
_TESTALLTYPES._serialized_start=72
|
||||
_TESTALLTYPES._serialized_end=388
|
||||
_TESTALLTYPES_NESTEDMESSAGE._serialized_start=300
|
||||
_TESTALLTYPES_NESTEDMESSAGE._serialized_end=388
|
||||
_FOREIGNMESSAGE._serialized_start=390
|
||||
_FOREIGNMESSAGE._serialized_end=428
|
||||
_TESTALLEXTENSIONS._serialized_start=430
|
||||
_TESTALLEXTENSIONS._serialized_end=459
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,95 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# This code is meant to work on Python 2.4 and above only.
|
||||
|
||||
"""Contains a metaclass and helper functions used to create
|
||||
protocol message classes from Descriptor objects at runtime.
|
||||
|
||||
Recall that a metaclass is the "type" of a class.
|
||||
(A class is to a metaclass what an instance is to a class.)
|
||||
|
||||
In this case, we use the GeneratedProtocolMessageType metaclass
|
||||
to inject all the useful functionality into the classes
|
||||
output by the protocol compiler at compile-time.
|
||||
|
||||
The upshot of all this is that the real implementation
|
||||
details for ALL pure-Python protocol buffers are *here in
|
||||
this file*.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
from google.protobuf import message_factory
|
||||
from google.protobuf import symbol_database
|
||||
|
||||
# The type of all Message classes.
|
||||
# Part of the public interface, but normally only used by message factories.
|
||||
GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
|
||||
|
||||
MESSAGE_CLASS_CACHE = {}
|
||||
|
||||
|
||||
# Deprecated. Please NEVER use reflection.ParseMessage().
|
||||
def ParseMessage(descriptor, byte_str):
|
||||
"""Generate a new Message instance from this Descriptor and a byte string.
|
||||
|
||||
DEPRECATED: ParseMessage is deprecated because it is using MakeClass().
|
||||
Please use MessageFactory.GetPrototype() instead.
|
||||
|
||||
Args:
|
||||
descriptor: Protobuf Descriptor object
|
||||
byte_str: Serialized protocol buffer byte string
|
||||
|
||||
Returns:
|
||||
Newly created protobuf Message object.
|
||||
"""
|
||||
result_class = MakeClass(descriptor)
|
||||
new_msg = result_class()
|
||||
new_msg.ParseFromString(byte_str)
|
||||
return new_msg
|
||||
|
||||
|
||||
# Deprecated. Please NEVER use reflection.MakeClass().
|
||||
def MakeClass(descriptor):
|
||||
"""Construct a class object for a protobuf described by descriptor.
|
||||
|
||||
DEPRECATED: use MessageFactory.GetPrototype() instead.
|
||||
|
||||
Args:
|
||||
descriptor: A descriptor.Descriptor object describing the protobuf.
|
||||
Returns:
|
||||
The Message class object described by the descriptor.
|
||||
"""
|
||||
# Original implementation leads to duplicate message classes, which won't play
|
||||
# well with extensions. Message factory info is also missing.
|
||||
# Redirect to message_factory.
|
||||
return symbol_database.Default().GetPrototype(descriptor)
|
@ -1,228 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""DEPRECATED: Declares the RPC service interfaces.
|
||||
|
||||
This module declares the abstract interfaces underlying proto2 RPC
|
||||
services. These are intended to be independent of any particular RPC
|
||||
implementation, so that proto2 services can be used on top of a variety
|
||||
of implementations. Starting with version 2.3.0, RPC implementations should
|
||||
not try to build on these, but should instead provide code generator plugins
|
||||
which generate code specific to the particular RPC implementation. This way
|
||||
the generated code can be more appropriate for the implementation in use
|
||||
and can avoid unnecessary layers of indirection.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class RpcException(Exception):
|
||||
"""Exception raised on failed blocking RPC method call."""
|
||||
pass
|
||||
|
||||
|
||||
class Service(object):
|
||||
|
||||
"""Abstract base interface for protocol-buffer-based RPC services.
|
||||
|
||||
Services themselves are abstract classes (implemented either by servers or as
|
||||
stubs), but they subclass this base interface. The methods of this
|
||||
interface can be used to call the methods of the service without knowing
|
||||
its exact type at compile time (analogous to the Message interface).
|
||||
"""
|
||||
|
||||
def GetDescriptor():
|
||||
"""Retrieves this service's descriptor."""
|
||||
raise NotImplementedError
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, done):
|
||||
"""Calls a method of the service specified by method_descriptor.
|
||||
|
||||
If "done" is None then the call is blocking and the response
|
||||
message will be returned directly. Otherwise the call is asynchronous
|
||||
and "done" will later be called with the response value.
|
||||
|
||||
In the blocking case, RpcException will be raised on error.
|
||||
|
||||
Preconditions:
|
||||
|
||||
* method_descriptor.service == GetDescriptor
|
||||
* request is of the exact same classes as returned by
|
||||
GetRequestClass(method).
|
||||
* After the call has started, the request must not be modified.
|
||||
* "rpc_controller" is of the correct type for the RPC implementation being
|
||||
used by this Service. For stubs, the "correct type" depends on the
|
||||
RpcChannel which the stub is using.
|
||||
|
||||
Postconditions:
|
||||
|
||||
* "done" will be called when the method is complete. This may be
|
||||
before CallMethod() returns or it may be at some point in the future.
|
||||
* If the RPC failed, the response value passed to "done" will be None.
|
||||
Further details about the failure can be found by querying the
|
||||
RpcController.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request message for the specified method.
|
||||
|
||||
CallMethod() requires that the request is of a particular subclass of
|
||||
Message. GetRequestClass() gets the default instance of this required
|
||||
type.
|
||||
|
||||
Example:
|
||||
method = service.GetDescriptor().FindMethodByName("Foo")
|
||||
request = stub.GetRequestClass(method)()
|
||||
request.ParseFromString(input)
|
||||
service.CallMethod(method, request, callback)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response message for the specified method.
|
||||
|
||||
This method isn't really needed, as the RpcChannel's CallMethod constructs
|
||||
the response protocol message. It's provided anyway in case it is useful
|
||||
for the caller to know the response type in advance.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcController(object):
|
||||
|
||||
"""An RpcController mediates a single method call.
|
||||
|
||||
The primary purpose of the controller is to provide a way to manipulate
|
||||
settings specific to the RPC implementation and to find out about RPC-level
|
||||
errors. The methods provided by the RpcController interface are intended
|
||||
to be a "least common denominator" set of features which we expect all
|
||||
implementations to support. Specific implementations may provide more
|
||||
advanced features (e.g. deadline propagation).
|
||||
"""
|
||||
|
||||
# Client-side methods below
|
||||
|
||||
def Reset(self):
|
||||
"""Resets the RpcController to its initial state.
|
||||
|
||||
After the RpcController has been reset, it may be reused in
|
||||
a new call. Must not be called while an RPC is in progress.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def Failed(self):
|
||||
"""Returns true if the call failed.
|
||||
|
||||
After a call has finished, returns true if the call failed. The possible
|
||||
reasons for failure depend on the RPC implementation. Failed() must not
|
||||
be called before a call has finished. If Failed() returns true, the
|
||||
contents of the response message are undefined.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ErrorText(self):
|
||||
"""If Failed is true, returns a human-readable description of the error."""
|
||||
raise NotImplementedError
|
||||
|
||||
def StartCancel(self):
|
||||
"""Initiate cancellation.
|
||||
|
||||
Advises the RPC system that the caller desires that the RPC call be
|
||||
canceled. The RPC system may cancel it immediately, may wait awhile and
|
||||
then cancel it, or may not even cancel the call at all. If the call is
|
||||
canceled, the "done" callback will still be called and the RpcController
|
||||
will indicate that the call failed at that time.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# Server-side methods below
|
||||
|
||||
def SetFailed(self, reason):
|
||||
"""Sets a failure reason.
|
||||
|
||||
Causes Failed() to return true on the client side. "reason" will be
|
||||
incorporated into the message returned by ErrorText(). If you find
|
||||
you need to return machine-readable information about failures, you
|
||||
should incorporate it into your response protocol buffer and should
|
||||
NOT call SetFailed().
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsCanceled(self):
|
||||
"""Checks if the client cancelled the RPC.
|
||||
|
||||
If true, indicates that the client canceled the RPC, so the server may
|
||||
as well give up on replying to it. The server should still call the
|
||||
final "done" callback.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def NotifyOnCancel(self, callback):
|
||||
"""Sets a callback to invoke on cancel.
|
||||
|
||||
Asks that the given callback be called when the RPC is canceled. The
|
||||
callback will always be called exactly once. If the RPC completes without
|
||||
being canceled, the callback will be called after completion. If the RPC
|
||||
has already been canceled when NotifyOnCancel() is called, the callback
|
||||
will be called immediately.
|
||||
|
||||
NotifyOnCancel() must be called no more than once per request.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcChannel(object):
|
||||
|
||||
"""Abstract interface for an RPC channel.
|
||||
|
||||
An RpcChannel represents a communication line to a service which can be used
|
||||
to call that service's methods. The service may be running on another
|
||||
machine. Normally, you should not use an RpcChannel directly, but instead
|
||||
construct a stub {@link Service} wrapping it. Example:
|
||||
|
||||
Example:
|
||||
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
|
||||
RpcController controller = rpcImpl.Controller()
|
||||
MyService service = MyService_Stub(channel)
|
||||
service.MyMethod(controller, request, callback)
|
||||
"""
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, response_class, done):
|
||||
"""Calls the method identified by the descriptor.
|
||||
|
||||
Call the given method of the remote service. The signature of this
|
||||
procedure looks the same as Service.CallMethod(), but the requirements
|
||||
are less strict in one important way: the request object doesn't have to
|
||||
be of any specific class as long as its descriptor is method.input_type.
|
||||
"""
|
||||
raise NotImplementedError
|
@ -1,295 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains metaclasses used to create protocol service and service stub
|
||||
classes from ServiceDescriptor objects at runtime.
|
||||
|
||||
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
|
||||
inject all useful functionality into the classes output by the protocol
|
||||
compiler at compile-time.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class GeneratedServiceType(type):
|
||||
|
||||
"""Metaclass for service classes created at runtime from ServiceDescriptors.
|
||||
|
||||
Implementations for all methods described in the Service class are added here
|
||||
by this class. We also create properties to allow getting/setting all fields
|
||||
in the protocol message.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol service
|
||||
classes at runtime. Clients can also manually create their own classes at
|
||||
runtime, as in this example::
|
||||
|
||||
mydescriptor = ServiceDescriptor(.....)
|
||||
class MyProtoService(service.Service):
|
||||
__metaclass__ = GeneratedServiceType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myservice_instance = MyProtoService()
|
||||
# ...
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the metaclass
|
||||
protocol).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service class is subclassed.
|
||||
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
|
||||
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
|
||||
service_builder = _ServiceBuilder(descriptor)
|
||||
service_builder.BuildService(cls)
|
||||
cls.DESCRIPTOR = descriptor
|
||||
|
||||
|
||||
class GeneratedServiceStubType(GeneratedServiceType):
|
||||
|
||||
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
|
||||
|
||||
This class has similar responsibilities as GeneratedServiceType, except that
|
||||
it creates the service stub classes.
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service stub class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, here).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service stub is subclassed.
|
||||
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
|
||||
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
|
||||
service_stub_builder = _ServiceStubBuilder(descriptor)
|
||||
service_stub_builder.BuildServiceStub(cls)
|
||||
|
||||
|
||||
class _ServiceBuilder(object):
|
||||
|
||||
"""This class constructs a protocol service class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a class that represents
|
||||
the specified service descriptor. One service builder instance constructs
|
||||
exactly one service class. That means all instances of that class share the
|
||||
same builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
service class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildService(builder, cls):
|
||||
"""Constructs the service class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
# CallMethod needs to operate with an instance of the Service class. This
|
||||
# internal wrapper function exists only to be able to pass the service
|
||||
# instance to the method that does the real CallMethod work.
|
||||
# Making sure to use exact argument names from the abstract interface in
|
||||
# service.py to match the type signature
|
||||
def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done):
|
||||
return builder._CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, done)
|
||||
|
||||
def _WrapGetRequestClass(self, method_descriptor):
|
||||
return builder._GetRequestClass(method_descriptor)
|
||||
|
||||
def _WrapGetResponseClass(self, method_descriptor):
|
||||
return builder._GetResponseClass(method_descriptor)
|
||||
|
||||
builder.cls = cls
|
||||
cls.CallMethod = _WrapCallMethod
|
||||
cls.GetDescriptor = staticmethod(lambda: builder.descriptor)
|
||||
cls.GetDescriptor.__doc__ = 'Returns the service descriptor.'
|
||||
cls.GetRequestClass = _WrapGetRequestClass
|
||||
cls.GetResponseClass = _WrapGetResponseClass
|
||||
for method in builder.descriptor.methods:
|
||||
setattr(cls, method.name, builder._GenerateNonImplementedMethod(method))
|
||||
|
||||
def _CallMethod(self, srvc, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""Calls the method described by a given method descriptor.
|
||||
|
||||
Args:
|
||||
srvc: Instance of the service for which this method is called.
|
||||
method_descriptor: Descriptor that represent the method to call.
|
||||
rpc_controller: RPC controller to use for this method's execution.
|
||||
request: Request protocol message.
|
||||
callback: A callback to invoke after the method has completed.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'CallMethod() given method descriptor for wrong service type.')
|
||||
method = getattr(srvc, method_descriptor.name)
|
||||
return method(rpc_controller, request, callback)
|
||||
|
||||
def _GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
request protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the input protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetRequestClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.input_type._concrete_class
|
||||
|
||||
def _GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
response protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the output protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetResponseClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.output_type._concrete_class
|
||||
|
||||
def _GenerateNonImplementedMethod(self, method):
|
||||
"""Generates and returns a method that can be set for a service methods.
|
||||
|
||||
Args:
|
||||
method: Descriptor of the service method for which a method is to be
|
||||
generated.
|
||||
|
||||
Returns:
|
||||
A method that can be added to the service class.
|
||||
"""
|
||||
return lambda inst, rpc_controller, request, callback: (
|
||||
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
||||
|
||||
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
|
||||
"""The body of all methods in the generated service class.
|
||||
|
||||
Args:
|
||||
method_name: Name of the method being executed.
|
||||
rpc_controller: RPC controller used to execute this method.
|
||||
callback: A callback which will be invoked when the method finishes.
|
||||
"""
|
||||
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
|
||||
callback(None)
|
||||
|
||||
|
||||
class _ServiceStubBuilder(object):
|
||||
|
||||
"""Constructs a protocol service stub class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a suitable stub class.
|
||||
A stub is just a type-safe wrapper around an RpcChannel which emulates a
|
||||
local implementation of the service.
|
||||
|
||||
One service stub builder instance constructs exactly one class. It means all
|
||||
instances of that class share the same service stub builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service stub class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
stub class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildServiceStub(self, cls):
|
||||
"""Constructs the stub class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
def _ServiceStubInit(stub, rpc_channel):
|
||||
stub.rpc_channel = rpc_channel
|
||||
self.cls = cls
|
||||
cls.__init__ = _ServiceStubInit
|
||||
for method in self.descriptor.methods:
|
||||
setattr(cls, method.name, self._GenerateStubMethod(method))
|
||||
|
||||
def _GenerateStubMethod(self, method):
|
||||
return (lambda inst, rpc_controller, request, callback=None:
|
||||
self._StubMethod(inst, method, rpc_controller, request, callback))
|
||||
|
||||
def _StubMethod(self, stub, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""The body of all service methods in the generated stub class.
|
||||
|
||||
Args:
|
||||
stub: Stub instance.
|
||||
method_descriptor: Descriptor of the invoked method.
|
||||
rpc_controller: Rpc controller to execute the method.
|
||||
request: Request protocol message.
|
||||
callback: A callback to execute when the method finishes.
|
||||
Returns:
|
||||
Response message (in case of blocking call).
|
||||
"""
|
||||
return stub.rpc_channel.CallMethod(
|
||||
method_descriptor, rpc_controller, request,
|
||||
method_descriptor.output_type._concrete_class, callback)
|
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/source_context.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_SOURCECONTEXT._serialized_start=57
|
||||
_SOURCECONTEXT._serialized_end=91
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,36 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/struct.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_STRUCT_FIELDSENTRY._options = None
|
||||
_STRUCT_FIELDSENTRY._serialized_options = b'8\001'
|
||||
_NULLVALUE._serialized_start=474
|
||||
_NULLVALUE._serialized_end=501
|
||||
_STRUCT._serialized_start=50
|
||||
_STRUCT._serialized_end=182
|
||||
_STRUCT_FIELDSENTRY._serialized_start=113
|
||||
_STRUCT_FIELDSENTRY._serialized_end=182
|
||||
_VALUE._serialized_start=185
|
||||
_VALUE._serialized_end=419
|
||||
_LISTVALUE._serialized_start=421
|
||||
_LISTVALUE._serialized_end=472
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,194 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""A database of Python protocol buffer generated symbols.
|
||||
|
||||
SymbolDatabase is the MessageFactory for messages generated at compile time,
|
||||
and makes it easy to create new instances of a registered type, given only the
|
||||
type's protocol buffer symbol name.
|
||||
|
||||
Example usage::
|
||||
|
||||
db = symbol_database.SymbolDatabase()
|
||||
|
||||
# Register symbols of interest, from one or multiple files.
|
||||
db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
|
||||
db.RegisterMessage(my_proto_pb2.MyMessage)
|
||||
db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
|
||||
|
||||
# The database can be used as a MessageFactory, to generate types based on
|
||||
# their name:
|
||||
types = db.GetMessages(['my_proto.proto'])
|
||||
my_message_instance = types['MyMessage']()
|
||||
|
||||
# The database's underlying descriptor pool can be queried, so it's not
|
||||
# necessary to know a type's filename to be able to generate it:
|
||||
filename = db.pool.FindFileContainingSymbol('MyMessage')
|
||||
my_message_instance = db.GetMessages([filename])['MyMessage']()
|
||||
|
||||
# This functionality is also provided directly via a convenience method:
|
||||
my_message_instance = db.GetSymbol('MyMessage')()
|
||||
"""
|
||||
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf import descriptor_pool
|
||||
from google.protobuf import message_factory
|
||||
|
||||
|
||||
class SymbolDatabase(message_factory.MessageFactory):
|
||||
"""A database of Python generated symbols."""
|
||||
|
||||
def RegisterMessage(self, message):
|
||||
"""Registers the given message type in the local database.
|
||||
|
||||
Calls to GetSymbol() and GetMessages() will return messages registered here.
|
||||
|
||||
Args:
|
||||
message: A :class:`google.protobuf.message.Message` subclass (or
|
||||
instance); its descriptor will be registered.
|
||||
|
||||
Returns:
|
||||
The provided message.
|
||||
"""
|
||||
|
||||
desc = message.DESCRIPTOR
|
||||
self._classes[desc] = message
|
||||
self.RegisterMessageDescriptor(desc)
|
||||
return message
|
||||
|
||||
def RegisterMessageDescriptor(self, message_descriptor):
|
||||
"""Registers the given message descriptor in the local database.
|
||||
|
||||
Args:
|
||||
message_descriptor (Descriptor): the message descriptor to add.
|
||||
"""
|
||||
if api_implementation.Type() == 'python':
|
||||
# pylint: disable=protected-access
|
||||
self.pool._AddDescriptor(message_descriptor)
|
||||
|
||||
def RegisterEnumDescriptor(self, enum_descriptor):
|
||||
"""Registers the given enum descriptor in the local database.
|
||||
|
||||
Args:
|
||||
enum_descriptor (EnumDescriptor): The enum descriptor to register.
|
||||
|
||||
Returns:
|
||||
EnumDescriptor: The provided descriptor.
|
||||
"""
|
||||
if api_implementation.Type() == 'python':
|
||||
# pylint: disable=protected-access
|
||||
self.pool._AddEnumDescriptor(enum_descriptor)
|
||||
return enum_descriptor
|
||||
|
||||
def RegisterServiceDescriptor(self, service_descriptor):
|
||||
"""Registers the given service descriptor in the local database.
|
||||
|
||||
Args:
|
||||
service_descriptor (ServiceDescriptor): the service descriptor to
|
||||
register.
|
||||
"""
|
||||
if api_implementation.Type() == 'python':
|
||||
# pylint: disable=protected-access
|
||||
self.pool._AddServiceDescriptor(service_descriptor)
|
||||
|
||||
def RegisterFileDescriptor(self, file_descriptor):
|
||||
"""Registers the given file descriptor in the local database.
|
||||
|
||||
Args:
|
||||
file_descriptor (FileDescriptor): The file descriptor to register.
|
||||
"""
|
||||
if api_implementation.Type() == 'python':
|
||||
# pylint: disable=protected-access
|
||||
self.pool._InternalAddFileDescriptor(file_descriptor)
|
||||
|
||||
def GetSymbol(self, symbol):
|
||||
"""Tries to find a symbol in the local database.
|
||||
|
||||
Currently, this method only returns message.Message instances, however, if
|
||||
may be extended in future to support other symbol types.
|
||||
|
||||
Args:
|
||||
symbol (str): a protocol buffer symbol.
|
||||
|
||||
Returns:
|
||||
A Python class corresponding to the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError: if the symbol could not be found.
|
||||
"""
|
||||
|
||||
return self._classes[self.pool.FindMessageTypeByName(symbol)]
|
||||
|
||||
def GetMessages(self, files):
|
||||
# TODO(amauryfa): Fix the differences with MessageFactory.
|
||||
"""Gets all registered messages from a specified file.
|
||||
|
||||
Only messages already created and registered will be returned; (this is the
|
||||
case for imported _pb2 modules)
|
||||
But unlike MessageFactory, this version also returns already defined nested
|
||||
messages, but does not register any message extensions.
|
||||
|
||||
Args:
|
||||
files (list[str]): The file names to extract messages from.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes.
|
||||
|
||||
Raises:
|
||||
KeyError: if a file could not be found.
|
||||
"""
|
||||
|
||||
def _GetAllMessages(desc):
|
||||
"""Walk a message Descriptor and recursively yields all message names."""
|
||||
yield desc
|
||||
for msg_desc in desc.nested_types:
|
||||
for nested_desc in _GetAllMessages(msg_desc):
|
||||
yield nested_desc
|
||||
|
||||
result = {}
|
||||
for file_name in files:
|
||||
file_desc = self.pool.FindFileByName(file_name)
|
||||
for msg_desc in file_desc.message_types_by_name.values():
|
||||
for desc in _GetAllMessages(msg_desc):
|
||||
try:
|
||||
result[desc.full_name] = self._classes[desc]
|
||||
except KeyError:
|
||||
# This descriptor has no registered class, skip it.
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
|
||||
|
||||
|
||||
def Default():
|
||||
"""Returns the default SymbolDatabase."""
|
||||
return _DEFAULT
|
@ -1,110 +0,0 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Encoding related utilities."""
|
||||
import re
|
||||
|
||||
_cescape_chr_to_symbol_map = {}
|
||||
_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
|
||||
_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
|
||||
_cescape_chr_to_symbol_map[13] = r'\r' # optional escape
|
||||
_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape
|
||||
_cescape_chr_to_symbol_map[39] = r"\'" # optional escape
|
||||
_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape
|
||||
|
||||
# Lookup table for unicode
|
||||
_cescape_unicode_to_str = [chr(i) for i in range(0, 256)]
|
||||
for byte, string in _cescape_chr_to_symbol_map.items():
|
||||
_cescape_unicode_to_str[byte] = string
|
||||
|
||||
# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
|
||||
_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
|
||||
[chr(i) for i in range(32, 127)] +
|
||||
[r'\%03o' % i for i in range(127, 256)])
|
||||
for byte, string in _cescape_chr_to_symbol_map.items():
|
||||
_cescape_byte_to_str[byte] = string
|
||||
del byte, string
|
||||
|
||||
|
||||
def CEscape(text, as_utf8):
|
||||
# type: (...) -> str
|
||||
"""Escape a bytes string for use in an text protocol buffer.
|
||||
|
||||
Args:
|
||||
text: A byte string to be escaped.
|
||||
as_utf8: Specifies if result may contain non-ASCII characters.
|
||||
In Python 3 this allows unescaped non-ASCII Unicode characters.
|
||||
In Python 2 the return value will be valid UTF-8 rather than only ASCII.
|
||||
Returns:
|
||||
Escaped string (str).
|
||||
"""
|
||||
# Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
|
||||
# satisfy our needs; they encodes unprintable characters using two-digit hex
|
||||
# escapes whereas our C++ unescaping function allows hex escapes to be any
|
||||
# length. So, "\0011".encode('string_escape') ends up being "\\x011", which
|
||||
# will be decoded in C++ as a single-character string with char code 0x11.
|
||||
text_is_unicode = isinstance(text, str)
|
||||
if as_utf8 and text_is_unicode:
|
||||
# We're already unicode, no processing beyond control char escapes.
|
||||
return text.translate(_cescape_chr_to_symbol_map)
|
||||
ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
|
||||
if as_utf8:
|
||||
return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
|
||||
return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
|
||||
|
||||
|
||||
_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
|
||||
|
||||
|
||||
def CUnescape(text):
|
||||
# type: (str) -> bytes
|
||||
"""Unescape a text string with C-style escape sequences to UTF-8 bytes.
|
||||
|
||||
Args:
|
||||
text: The data to parse in a str.
|
||||
Returns:
|
||||
A byte string.
|
||||
"""
|
||||
|
||||
def ReplaceHex(m):
|
||||
# Only replace the match if the number of leading back slashes is odd. i.e.
|
||||
# the slash itself is not escaped.
|
||||
if len(m.group(1)) & 1:
|
||||
return m.group(1) + 'x0' + m.group(2)
|
||||
return m.group(0)
|
||||
|
||||
# This is required because the 'string_escape' encoding doesn't
|
||||
# allow single-digit hex escapes (like '\xf').
|
||||
result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
|
||||
|
||||
return (result.encode('utf-8') # Make it bytes to allow decode.
|
||||
.decode('unicode_escape')
|
||||
# Make it bytes again to return the proper type.
|
||||
.encode('raw_unicode_escape'))
|
File diff suppressed because it is too large
Load Diff
@ -1,26 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/timestamp.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_TIMESTAMP._serialized_start=52
|
||||
_TIMESTAMP._serialized_end=95
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,42 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/type.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
|
||||
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_SYNTAX._serialized_start=1413
|
||||
_SYNTAX._serialized_end=1459
|
||||
_TYPE._serialized_start=113
|
||||
_TYPE._serialized_end=328
|
||||
_FIELD._serialized_start=331
|
||||
_FIELD._serialized_end=1056
|
||||
_FIELD_KIND._serialized_start=610
|
||||
_FIELD_KIND._serialized_end=938
|
||||
_FIELD_CARDINALITY._serialized_start=940
|
||||
_FIELD_CARDINALITY._serialized_end=1056
|
||||
_ENUM._serialized_start=1059
|
||||
_ENUM._serialized_end=1265
|
||||
_ENUMVALUE._serialized_start=1267
|
||||
_ENUMVALUE._serialized_end=1350
|
||||
_OPTION._serialized_start=1352
|
||||
_OPTION._serialized_end=1411
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,72 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/util/json_format.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext'])
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
_TESTBOOLMAP_BOOLMAPENTRY._options = None
|
||||
_TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001'
|
||||
_TESTSTRINGMAP_STRINGMAPENTRY._options = None
|
||||
_TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001'
|
||||
_TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None
|
||||
_TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001'
|
||||
_ENUMVALUE._serialized_start=1607
|
||||
_ENUMVALUE._serialized_end=1657
|
||||
_TESTFLAGSANDSTRINGS._serialized_start=62
|
||||
_TESTFLAGSANDSTRINGS._serialized_end=199
|
||||
_TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173
|
||||
_TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199
|
||||
_TESTBASE64BYTEARRAYS._serialized_start=201
|
||||
_TESTBASE64BYTEARRAYS._serialized_end=234
|
||||
_TESTJAVASCRIPTJSON._serialized_start=236
|
||||
_TESTJAVASCRIPTJSON._serialized_end=307
|
||||
_TESTJAVASCRIPTORDERJSON1._serialized_start=309
|
||||
_TESTJAVASCRIPTORDERJSON1._serialized_end=390
|
||||
_TESTJAVASCRIPTORDERJSON2._serialized_start=393
|
||||
_TESTJAVASCRIPTORDERJSON2._serialized_end=530
|
||||
_TESTLARGEINT._serialized_start=532
|
||||
_TESTLARGEINT._serialized_end=568
|
||||
_TESTNUMBERS._serialized_start=571
|
||||
_TESTNUMBERS._serialized_end=731
|
||||
_TESTNUMBERS_MYTYPE._serialized_start=691
|
||||
_TESTNUMBERS_MYTYPE._serialized_end=731
|
||||
_TESTCAMELCASE._serialized_start=733
|
||||
_TESTCAMELCASE._serialized_end=817
|
||||
_TESTBOOLMAP._serialized_start=819
|
||||
_TESTBOOLMAP._serialized_end=943
|
||||
_TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897
|
||||
_TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943
|
||||
_TESTRECURSION._serialized_start=945
|
||||
_TESTRECURSION._serialized_end=1024
|
||||
_TESTSTRINGMAP._serialized_start=1027
|
||||
_TESTSTRINGMAP._serialized_end=1161
|
||||
_TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113
|
||||
_TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161
|
||||
_TESTSTRINGSERIALIZER._serialized_start=1164
|
||||
_TESTSTRINGSERIALIZER._serialized_end=1360
|
||||
_TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113
|
||||
_TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161
|
||||
_TESTMESSAGEWITHEXTENSION._serialized_start=1362
|
||||
_TESTMESSAGEWITHEXTENSION._serialized_end=1398
|
||||
_TESTEXTENSION._serialized_start=1400
|
||||
_TESTEXTENSION._serialized_end=1522
|
||||
_TESTDEFAULTENUMVALUE._serialized_start=1524
|
||||
_TESTDEFAULTENUMVALUE._serialized_end=1605
|
||||
# @@protoc_insertion_point(module_scope)
|
File diff suppressed because one or more lines are too long
@ -1,42 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/wrappers.proto
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf.internal import builder as _builder
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
||||
_DOUBLEVALUE._serialized_start=51
|
||||
_DOUBLEVALUE._serialized_end=79
|
||||
_FLOATVALUE._serialized_start=81
|
||||
_FLOATVALUE._serialized_end=108
|
||||
_INT64VALUE._serialized_start=110
|
||||
_INT64VALUE._serialized_end=137
|
||||
_UINT64VALUE._serialized_start=139
|
||||
_UINT64VALUE._serialized_end=167
|
||||
_INT32VALUE._serialized_start=169
|
||||
_INT32VALUE._serialized_end=196
|
||||
_UINT32VALUE._serialized_start=198
|
||||
_UINT32VALUE._serialized_end=226
|
||||
_BOOLVALUE._serialized_start=228
|
||||
_BOOLVALUE._serialized_end=254
|
||||
_STRINGVALUE._serialized_start=256
|
||||
_STRINGVALUE._serialized_end=284
|
||||
_BYTESVALUE._serialized_start=286
|
||||
_BYTESVALUE._serialized_end=313
|
||||
# @@protoc_insertion_point(module_scope)
|
@ -1,19 +0,0 @@
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "protobuf3"
|
||||
version = "3.20.2"
|
||||
description = "protobuf3"
|
||||
license = "CC BY-NC-ND 4.0"
|
||||
authors = ["google"]
|
||||
repository = "https://github.com/protocolbuffers/protobuf/"
|
||||
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Issues" = "https://github.com/protocolbuffers/protobuf//issues"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.7,<4.0"
|
||||
requests = "^2.32.3"
|
@ -1,90 +0,0 @@
|
||||
# pyplayready
|
||||
All of this is already public. 100% of this code has been derived from the mspr_toolkit.
|
||||
|
||||
## Installation
|
||||
```shell
|
||||
pip install pyplayready
|
||||
```
|
||||
|
||||
Run `pyplayready --help` to view available cli functions
|
||||
|
||||
## Devices
|
||||
Run the command below to create a Playready Device (.prd) from a `bgroupcert.dat` and `zgpriv.dat`:
|
||||
```shell
|
||||
pyplayready create-device -c bgroupcert.dat -k zgpriv.dat
|
||||
```
|
||||
|
||||
Test a playready device:
|
||||
```shell
|
||||
pyplayready test DEVICE.prd
|
||||
```
|
||||
|
||||
> [!IMPORTANT]
|
||||
> There currently isn't a proper method of extracting Group Certificates/Keys. They can be found inside older Samsung phones/Smart TVs, Windows DLLs and set-top-boxes in encrypted form.
|
||||
|
||||
Export a provisioned device to its raw .dat files
|
||||
```shell
|
||||
pyplayready export-device DEVICE.prd
|
||||
```
|
||||
|
||||
## Usage
|
||||
An example code snippet:
|
||||
|
||||
```python
|
||||
from pyplayready.cdm import Cdm
|
||||
from pyplayready.device import Device
|
||||
from pyplayready.system.pssh import PSSH
|
||||
|
||||
import requests
|
||||
|
||||
device = Device.load("C:/Path/To/A/Device.prd")
|
||||
cdm = Cdm.from_device(device)
|
||||
session_id = cdm.open()
|
||||
|
||||
pssh = PSSH(
|
||||
"AAADfHBzc2gAAAAAmgTweZhAQoarkuZb4IhflQAAA1xcAwAAAQABAFIDPABXAFIATQBIAEUAQQBEAEUAUgAgAHgAbQBsAG4AcwA9ACIAaAB0AH"
|
||||
"QAcAA6AC8ALwBzAGMAaABlAG0AYQBzAC4AbQBpAGMAcgBvAHMAbwBmAHQALgBjAG8AbQAvAEQAUgBNAC8AMgAwADAANwAvADAAMwAvAFAAbABh"
|
||||
"AHkAUgBlAGEAZAB5AEgAZQBhAGQAZQByACIAIAB2AGUAcgBzAGkAbwBuAD0AIgA0AC4AMAAuADAALgAwACIAPgA8AEQAQQBUAEEAPgA8AFAAUg"
|
||||
"BPAFQARQBDAFQASQBOAEYATwA+ADwASwBFAFkATABFAE4APgAxADYAPAAvAEsARQBZAEwARQBOAD4APABBAEwARwBJAEQAPgBBAEUAUwBDAFQA"
|
||||
"UgA8AC8AQQBMAEcASQBEAD4APAAvAFAAUgBPAFQARQBDAFQASQBOAEYATwA+ADwASwBJAEQAPgA0AFIAcABsAGIAKwBUAGIATgBFAFMAOAB0AE"
|
||||
"cAawBOAEYAVwBUAEUASABBAD0APQA8AC8ASwBJAEQAPgA8AEMASABFAEMASwBTAFUATQA+AEsATABqADMAUQB6AFEAUAAvAE4AQQA9ADwALwBD"
|
||||
"AEgARQBDAEsAUwBVAE0APgA8AEwAQQBfAFUAUgBMAD4AaAB0AHQAcABzADoALwAvAHAAcgBvAGYAZgBpAGMAaQBhAGwAcwBpAHQAZQAuAGsAZQ"
|
||||
"B5AGQAZQBsAGkAdgBlAHIAeQAuAG0AZQBkAGkAYQBzAGUAcgB2AGkAYwBlAHMALgB3AGkAbgBkAG8AdwBzAC4AbgBlAHQALwBQAGwAYQB5AFIA"
|
||||
"ZQBhAGQAeQAvADwALwBMAEEAXwBVAFIATAA+ADwAQwBVAFMAVABPAE0AQQBUAFQAUgBJAEIAVQBUAEUAUwA+ADwASQBJAFMAXwBEAFIATQBfAF"
|
||||
"YARQBSAFMASQBPAE4APgA4AC4AMQAuADIAMwAwADQALgAzADEAPAAvAEkASQBTAF8ARABSAE0AXwBWAEUAUgBTAEkATwBOAD4APAAvAEMAVQBT"
|
||||
"AFQATwBNAEEAVABUAFIASQBCAFUAVABFAFMAPgA8AC8ARABBAFQAQQA+ADwALwBXAFIATQBIAEUAQQBEAEUAUgA+AA=="
|
||||
)
|
||||
|
||||
wrm_headers = pssh.get_wrm_headers()
|
||||
request = cdm.get_license_challenge(session_id, wrm_headers[0])
|
||||
|
||||
response = requests.post(
|
||||
url="https://test.playready.microsoft.com/service/rightsmanager.asmx?cfg=(persist:false,sl:2000)",
|
||||
headers={
|
||||
'Content-Type': 'text/xml; charset=UTF-8',
|
||||
},
|
||||
data=request,
|
||||
)
|
||||
|
||||
cdm.parse_license(session_id, response.text)
|
||||
|
||||
for key in cdm.get_keys(session_id):
|
||||
print(f"{key.key_id.hex}:{key.key.hex()}")
|
||||
|
||||
cdm.close(session_id)
|
||||
```
|
||||
|
||||
## Disclaimer
|
||||
|
||||
1. This project requires a valid Microsoft Certificate and Group Key, which are not provided by this project.
|
||||
2. Public test provisions are available and provided by Microsoft to use for testing projects such as this one.
|
||||
3. This project does not condone piracy or any action against the terms of the DRM systems.
|
||||
4. All efforts in this project have been the result of Reverse-Engineering, Publicly available research, and Trial & Error.
|
||||
5. Do not use this program to decrypt or access any content for which you do not have the legal rights or explicit permission.
|
||||
6. Unauthorized decryption or distribution of copyrighted materials is a violation of applicable laws and intellectual property rights.
|
||||
7. This tool must not be used for any illegal activities, including but not limited to piracy, circumventing digital rights management (DRM), or unauthorized access to protected content.
|
||||
8. The developers, contributors, and maintainers of this program are not responsible for any misuse or illegal activities performed using this software.
|
||||
9. By using this program, you agree to comply with all applicable laws and regulations governing digital rights and copyright protections.
|
||||
|
||||
## Credits
|
||||
+ [mspr_toolkit](https://security-explorations.com/materials/mspr_toolkit.zip)
|
@ -1,14 +0,0 @@
|
||||
from pyplayready.cdm import *
|
||||
from pyplayready.crypto.ecc_key import *
|
||||
from pyplayready.crypto.elgamal import *
|
||||
from pyplayready.device import *
|
||||
from pyplayready.license.key import *
|
||||
from pyplayready.license.xml_key import *
|
||||
from pyplayready.license.xmrlicense import *
|
||||
from pyplayready.remote.remotecdm import *
|
||||
from pyplayready.system.bcert import *
|
||||
from pyplayready.system.pssh import *
|
||||
from pyplayready.system.session import *
|
||||
|
||||
|
||||
__version__ = "0.5.0"
|
@ -1,96 +0,0 @@
|
||||
from typing import Union, Tuple
|
||||
|
||||
from Crypto.Hash import SHA256
|
||||
from Crypto.Hash.SHA256 import SHA256Hash
|
||||
from Crypto.PublicKey.ECC import EccKey
|
||||
from Crypto.Signature import DSS
|
||||
from ecpy.curves import Point, Curve
|
||||
|
||||
from pyplayready.crypto.elgamal import ElGamal
|
||||
from pyplayready.crypto.ecc_key import ECCKey
|
||||
|
||||
|
||||
class Crypto:
|
||||
def __init__(self, curve: str = "secp256r1"):
|
||||
self.curve = Curve.get_curve(curve)
|
||||
self.elgamal = ElGamal(self.curve)
|
||||
|
||||
def ecc256_encrypt(self, public_key: Union[ECCKey, Point], plaintext: Union[Point, bytes]) -> bytes:
|
||||
if isinstance(public_key, ECCKey):
|
||||
public_key = public_key.get_point(self.curve)
|
||||
if not isinstance(public_key, Point):
|
||||
raise ValueError(f"Expecting ECCKey or Point input, got {public_key!r}")
|
||||
|
||||
if isinstance(plaintext, bytes):
|
||||
plaintext = Point(
|
||||
x=int.from_bytes(plaintext[:32], 'big'),
|
||||
y=int.from_bytes(plaintext[32:64], 'big'),
|
||||
curve=self.curve
|
||||
)
|
||||
if not isinstance(plaintext, Point):
|
||||
raise ValueError(f"Expecting Point or Bytes input, got {plaintext!r}")
|
||||
|
||||
point1, point2 = self.elgamal.encrypt(
|
||||
message_point=plaintext,
|
||||
public_key=public_key
|
||||
)
|
||||
return b''.join([
|
||||
self.elgamal.to_bytes(point1.x),
|
||||
self.elgamal.to_bytes(point1.y),
|
||||
self.elgamal.to_bytes(point2.x),
|
||||
self.elgamal.to_bytes(point2.y)
|
||||
])
|
||||
|
||||
def ecc256_decrypt(self, private_key: ECCKey, ciphertext: Union[Tuple[Point, Point], bytes]) -> bytes:
|
||||
if isinstance(ciphertext, bytes):
|
||||
ciphertext = (
|
||||
Point(
|
||||
x=int.from_bytes(ciphertext[:32], 'big'),
|
||||
y=int.from_bytes(ciphertext[32:64], 'big'),
|
||||
curve=self.curve
|
||||
),
|
||||
Point(
|
||||
x=int.from_bytes(ciphertext[64:96], 'big'),
|
||||
y=int.from_bytes(ciphertext[96:128], 'big'),
|
||||
curve=self.curve
|
||||
)
|
||||
)
|
||||
if not isinstance(ciphertext, Tuple):
|
||||
raise ValueError(f"Expecting Tuple[Point, Point] or Bytes input, got {ciphertext!r}")
|
||||
|
||||
decrypted = self.elgamal.decrypt(ciphertext, int(private_key.key.d))
|
||||
return self.elgamal.to_bytes(decrypted.x)
|
||||
|
||||
@staticmethod
|
||||
def ecc256_sign(private_key: Union[ECCKey, EccKey], data: Union[SHA256Hash, bytes]) -> bytes:
|
||||
if isinstance(private_key, ECCKey):
|
||||
private_key = private_key.key
|
||||
if not isinstance(private_key, EccKey):
|
||||
raise ValueError(f"Expecting ECCKey or EccKey input, got {private_key!r}")
|
||||
|
||||
if isinstance(data, bytes):
|
||||
data = SHA256.new(data)
|
||||
if not isinstance(data, SHA256Hash):
|
||||
raise ValueError(f"Expecting SHA256Hash or Bytes input, got {data!r}")
|
||||
|
||||
signer = DSS.new(private_key, 'fips-186-3')
|
||||
return signer.sign(data)
|
||||
|
||||
@staticmethod
|
||||
def ecc256_verify(public_key: Union[ECCKey, EccKey], data: Union[SHA256Hash, bytes], signature: bytes) -> bool:
|
||||
if isinstance(public_key, ECCKey):
|
||||
public_key = public_key.key
|
||||
if not isinstance(public_key, EccKey):
|
||||
raise ValueError(f"Expecting ECCKey or EccKey input, got {public_key!r}")
|
||||
|
||||
if isinstance(data, bytes):
|
||||
data = SHA256.new(data)
|
||||
if not isinstance(data, SHA256Hash):
|
||||
raise ValueError(f"Expecting SHA256Hash or Bytes input, got {data!r}")
|
||||
|
||||
verifier = DSS.new(public_key, 'fips-186-3')
|
||||
try:
|
||||
verifier.verify(data, signature)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
@ -1,41 +0,0 @@
|
||||
from construct import Struct, Const, Int8ub, Bytes, this, Int32ub, Switch, Embedded
|
||||
|
||||
|
||||
class DeviceStructs:
|
||||
magic = Const(b"PRD")
|
||||
|
||||
# was never in production
|
||||
v1 = Struct(
|
||||
"group_key_length" / Int32ub,
|
||||
"group_key" / Bytes(this.group_key_length),
|
||||
"group_certificate_length" / Int32ub,
|
||||
"group_certificate" / Bytes(this.group_certificate_length)
|
||||
)
|
||||
|
||||
v2 = Struct(
|
||||
"group_certificate_length" / Int32ub,
|
||||
"group_certificate" / Bytes(this.group_certificate_length),
|
||||
"encryption_key" / Bytes(96),
|
||||
"signing_key" / Bytes(96),
|
||||
)
|
||||
|
||||
v3 = Struct(
|
||||
"group_key" / Bytes(96),
|
||||
"encryption_key" / Bytes(96),
|
||||
"signing_key" / Bytes(96),
|
||||
"group_certificate_length" / Int32ub,
|
||||
"group_certificate" / Bytes(this.group_certificate_length),
|
||||
)
|
||||
|
||||
prd = Struct(
|
||||
"signature" / magic,
|
||||
"version" / Int8ub,
|
||||
Embedded(Switch(
|
||||
lambda ctx: ctx.version,
|
||||
{
|
||||
1: v1,
|
||||
2: v2,
|
||||
3: v3
|
||||
}
|
||||
))
|
||||
)
|
@ -1,44 +0,0 @@
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "pyplayready"
|
||||
version = "0.5.0"
|
||||
description = "pyplayready CDM (Content Decryption Module) implementation in Python."
|
||||
license = "CC BY-NC-ND 4.0"
|
||||
authors = ["DevLARLEY, Erevoc", "DevataDev"]
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/ready-dl/pyplayready"
|
||||
keywords = ["python", "drm", "playready", "microsoft"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Topic :: Security :: Cryptography",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules"
|
||||
]
|
||||
include = [
|
||||
{ path = "README.md", format = "sdist" },
|
||||
{ path = "LICENSE", format = "sdist" },
|
||||
]
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Issues" = "https://github.com/ready-dl/pyplayready/issues"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8,<4.0"
|
||||
requests = "^2.32.3"
|
||||
pycryptodome = "^3.21.0"
|
||||
construct = "^2.8.8"
|
||||
ECPy = "^1.2.5"
|
||||
click = "^8.1.7"
|
||||
xmltodict = "^0.14.2"
|
||||
PyYAML = "^6.0.1"
|
||||
aiohttp = {version = "^3.9.1", optional = true}
|
||||
|
||||
[tool.poetry.scripts]
|
||||
pyplayready = "pyplayready.main:main"
|
@ -1,166 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="docs/images/widevine_icon_24.png"> <a href="https://github.com/devine-dl/pywidevine">pywidevine</a>
|
||||
<br/>
|
||||
<sup><em>Python Widevine CDM implementation</em></sup>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/devine-dl/pywidevine/actions/workflows/ci.yml">
|
||||
<img src="https://github.com/devine-dl/pywidevine/actions/workflows/ci.yml/badge.svg" alt="Build status">
|
||||
</a>
|
||||
<a href="https://pypi.org/project/pywidevine">
|
||||
<img src="https://img.shields.io/badge/python-3.8%2B-informational" alt="Python version">
|
||||
</a>
|
||||
<a href="https://deepsource.io/gh/devine-dl/pywidevine">
|
||||
<img src="https://deepsource.io/gh/devine-dl/pywidevine.svg/?label=active+issues" alt="DeepSource">
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/astral-sh/ruff">
|
||||
<img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Linter: Ruff">
|
||||
</a>
|
||||
<a href="https://python-poetry.org">
|
||||
<img src="https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json" alt="Dependency management: Poetry">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## Features
|
||||
|
||||
- 🚀 Seamless Installation via [pip](#installation)
|
||||
- 🛡️ Robust Security with message signature verification
|
||||
- 🙈 Privacy Mode with Service Certificates
|
||||
- 🌐 Servable CDM API Server and Client with Authentication
|
||||
- 📦 Custom provision serialization format (WVD v2)
|
||||
- 🧰 Create, parse, or convert PSSH headers with ease
|
||||
- 🗃️ User-friendly YAML configuration
|
||||
- ❤️ Forever FOSS!
|
||||
|
||||
## Installation
|
||||
|
||||
```shell
|
||||
$ pip install pywidevine
|
||||
```
|
||||
|
||||
> **Note**
|
||||
If pip gives you a warning about a path not being in your PATH environment variable then promptly add that path then
|
||||
close all open command prompt/terminal windows, or `pywidevine` CLI won't work as it will not be found.
|
||||
|
||||
Voilà 🎉 — You now have the `pywidevine` package installed!
|
||||
You can now import pywidevine in scripts ([see below](#usage)).
|
||||
A command-line interface is also available, try `pywidevine --help`.
|
||||
|
||||
## Usage
|
||||
|
||||
The following is a minimal example of using pywidevine in a script to get a License for Bitmovin's
|
||||
Art of Motion Demo.
|
||||
|
||||
```py
|
||||
from pywidevine.cdm import Cdm
|
||||
from pywidevine.device import Device
|
||||
from pywidevine.pssh import PSSH
|
||||
|
||||
import requests
|
||||
|
||||
# prepare pssh
|
||||
pssh = PSSH("AAAAW3Bzc2gAAAAA7e+LqXnWSs6jyCfc1R0h7QAAADsIARIQ62dqu8s0Xpa"
|
||||
"7z2FmMPGj2hoNd2lkZXZpbmVfdGVzdCIQZmtqM2xqYVNkZmFsa3IzaioCSEQyAA==")
|
||||
|
||||
# load device
|
||||
device = Device.load("C:/Path/To/A/Provision.wvd")
|
||||
|
||||
# load cdm
|
||||
cdm = Cdm.from_device(device)
|
||||
|
||||
# open cdm session
|
||||
session_id = cdm.open()
|
||||
|
||||
# get license challenge
|
||||
challenge = cdm.get_license_challenge(session_id, pssh)
|
||||
|
||||
# send license challenge (assuming a generic license server SDK with no API front)
|
||||
licence = requests.post("https://...", data=challenge)
|
||||
licence.raise_for_status()
|
||||
|
||||
# parse license challenge
|
||||
cdm.parse_license(session_id, licence.content)
|
||||
|
||||
# print keys
|
||||
for key in cdm.get_keys(session_id):
|
||||
print(f"[{key.type}] {key.kid.hex}:{key.key.hex()}")
|
||||
|
||||
# close session, disposes of session data
|
||||
cdm.close(session_id)
|
||||
```
|
||||
|
||||
> **Note**
|
||||
> There are various features not shown in this specific example like:
|
||||
>
|
||||
> - Privacy Mode
|
||||
> - Setting Service Certificates
|
||||
> - Remote CDMs and Serving
|
||||
> - Choosing a License Type to request
|
||||
> - Creating WVD files
|
||||
> - and much more!
|
||||
>
|
||||
> Take a look at the methods available in the [Cdm class](/pywidevine/cdm.py) and their doc-strings for
|
||||
> further information. For more examples see the [CLI functions](/pywidevine/main.py) which uses a lot
|
||||
> of previously mentioned features.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
1. This project requires a valid Google-provisioned Private Key and Client Identification blob which are not
|
||||
provided by this project.
|
||||
2. Public test provisions are available and provided by Google to use for testing projects such as this one.
|
||||
3. License Servers have the ability to block requests from any provision, and are likely already blocking test
|
||||
provisions on production endpoints.
|
||||
4. This project does not condone piracy or any action against the terms of the DRM systems.
|
||||
5. All efforts in this project have been the result of Reverse-Engineering, Publicly available research, and Trial
|
||||
& Error.
|
||||
|
||||
## Key and Output Security
|
||||
|
||||
*Licenses, Content Keys, and Decrypted Data is not secure in this CDM implementation.*
|
||||
|
||||
The Content Decryption Module is meant to do all downloading, decrypting, and decoding of content, not just license
|
||||
acquisition. This Python implementation only does License Acquisition within the CDM.
|
||||
|
||||
The section of which a 'Decrypt Frame' call is made would be more of a 'Decrypt File' in this implementation. Just
|
||||
returning the original file in plain text defeats the point of the DRM. Even if 'Decrypt File' was somehow secure, the
|
||||
Content Keys used to decrypt the files are already exposed to the caller anyway, allowing them to manually decrypt.
|
||||
|
||||
An attack on a 'Decrypt Frame' system would be analogous to doing an HDMI capture or similar attack. This is because it
|
||||
would require re-encoding the video by splicing each individual frame with the right frame-rate, syncing to audio, and
|
||||
more.
|
||||
|
||||
While a 'Decrypt Video' system would be analogous to downloading a Video and passing it through a script. Not much of
|
||||
an attack if at all. The only protection against a system like this would be monitoring the provision and acquisitions
|
||||
of licenses and prevent them. This can be done by revoking the device provision, or the user or their authorization to
|
||||
the service.
|
||||
|
||||
There isn't any immediate way to secure either Key or Decrypted information within a Python environment that is not
|
||||
Hardware backed. Even if obfuscation or some other form of Security by Obscurity was used, this is a Software-based
|
||||
Content Protection Module (in Python no less) with no hardware backed security. It would be incredibly trivial to break
|
||||
any sort of protection against retrieving the original video data.
|
||||
|
||||
Though, it's not impossible. Google's Chrome Browser CDM is a simple library extension file programmed in C++ that has
|
||||
been improving its security using math and obscurity for years. It's getting harder and harder to break with its latest
|
||||
versions only being beaten by Brute-force style methods. However, they have a huge team of very skilled workers, and
|
||||
making a CDM in C++ has immediate security benefits and a lot of methods to obscure and obfuscate the code.
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/rlaphoenix"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/17136956?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
|
||||
<a href="https://github.com/mediaminister"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/45148099?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
|
||||
<a href="https://github.com/sr0lle"><img src="https://images.weserv.nl/?url=avatars.githubusercontent.com/u/111277375?v=4&h=25&w=25&fit=cover&mask=circle&maxage=7d" alt=""/></a>
|
||||
|
||||
## Licensing
|
||||
|
||||
This software is licensed under the terms of [GNU General Public License, Version 3.0](LICENSE).
|
||||
You can find a copy of the license in the LICENSE file in the root folder.
|
||||
|
||||
- Widevine Icon © Google.
|
||||
- Props to the awesome community for their shared research and insight into the Widevine Protocol and Key Derivation.
|
||||
|
||||
* * *
|
||||
|
||||
© rlaphoenix 2022-2023
|
@ -1,86 +0,0 @@
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "pywidevine"
|
||||
version = "1.8.0"
|
||||
description = "Widevine CDM (Content Decryption Module) implementation in Python."
|
||||
license = "GPL-3.0-only"
|
||||
authors = ["rlaphoenix <rlaphoenix@pm.me>"]
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/devine-dl/pywidevine"
|
||||
keywords = ["python", "drm", "widevine", "google"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Multimedia :: Video",
|
||||
"Topic :: Security :: Cryptography",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules"
|
||||
]
|
||||
include = [
|
||||
{ path = "CHANGELOG.md", format = "sdist" },
|
||||
{ path = "README.md", format = "sdist" },
|
||||
{ path = "LICENSE", format = "sdist" },
|
||||
]
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Issues" = "https://github.com/devine-dl/pywidevine/issues"
|
||||
"Discussions" = "https://github.com/devine-dl/pywidevine/discussions"
|
||||
"Changelog" = "https://github.com/devine-dl/pywidevine/blob/master/CHANGELOG.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8,<4.0"
|
||||
protobuf = "^4.25.1"
|
||||
pycryptodome = "^3.19.0"
|
||||
click = "^8.1.7"
|
||||
requests = "^2.31.0"
|
||||
Unidecode = "^1.3.7"
|
||||
PyYAML = "^6.0.1"
|
||||
aiohttp = {version = "^3.9.1", optional = true}
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pre-commit = "^3.5.0"
|
||||
mypy = "^1.7.1"
|
||||
mypy-protobuf = "^3.5.0"
|
||||
types-protobuf = "^4.24.0.4"
|
||||
types-requests = "^2.31.0.10"
|
||||
types-PyYAML = "^6.0.12.12"
|
||||
isort = "^5.12.0"
|
||||
ruff = "~0.1.7"
|
||||
|
||||
[tool.poetry.extras]
|
||||
serve = ["aiohttp"]
|
||||
|
||||
[tool.poetry.scripts]
|
||||
pywidevine = "pywidevine.main:main"
|
||||
|
||||
[tool.ruff]
|
||||
extend-exclude = [
|
||||
"*_pb2.py",
|
||||
"*.pyi",
|
||||
]
|
||||
force-exclude = true
|
||||
line-length = 120
|
||||
select = ["E4", "E7", "E9", "F", "W"]
|
||||
|
||||
[tool.ruff.extend-per-file-ignores]
|
||||
"pywidevine/__init__.py" = ["F403"]
|
||||
|
||||
[tool.isort]
|
||||
line_length = 118
|
||||
extend_skip_glob = ["*_pb2.py", "*.pyi"]
|
||||
|
||||
[tool.mypy]
|
||||
check_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_untyped_defs = true
|
||||
exclude = [
|
||||
'_pb2.pyi?$' # generated protobuffer files
|
||||
]
|
||||
follow_imports = "silent"
|
||||
ignore_missing_imports = true
|
||||
no_implicit_optional = true
|
@ -1,8 +0,0 @@
|
||||
from .cdm import *
|
||||
from .device import *
|
||||
from .key import *
|
||||
from .pssh import *
|
||||
from .remotecdm import *
|
||||
from .session import *
|
||||
|
||||
__version__ = "1.8.0"
|
@ -1,658 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import random
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from Crypto.Cipher import AES, PKCS1_OAEP
|
||||
from Crypto.Hash import CMAC, HMAC, SHA1, SHA256
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Random import get_random_bytes
|
||||
from Crypto.Signature import pss
|
||||
from Crypto.Util import Padding
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from pywidevine.device import Device, DeviceTypes
|
||||
from pywidevine.exceptions import (InvalidContext, InvalidInitData, InvalidLicenseMessage, InvalidLicenseType,
|
||||
InvalidSession, NoKeysLoaded, SignatureMismatch, TooManySessions)
|
||||
from pywidevine.key import Key
|
||||
from pywidevine.license_protocol_pb2 import (ClientIdentification, DrmCertificate, EncryptedClientIdentification,
|
||||
License, LicenseRequest, LicenseType, SignedDrmCertificate,
|
||||
SignedMessage)
|
||||
from pywidevine.pssh import PSSH
|
||||
from pywidevine.session import Session
|
||||
from pywidevine.utils import get_binary_path
|
||||
|
||||
|
||||
class Cdm:
|
||||
uuid = UUID(bytes=b"\xed\xef\x8b\xa9\x79\xd6\x4a\xce\xa3\xc8\x27\xdc\xd5\x1d\x21\xed")
|
||||
urn = f"urn:uuid:{uuid}"
|
||||
key_format = urn
|
||||
service_certificate_challenge = b"\x08\x04"
|
||||
common_privacy_cert = (
|
||||
# Used by Google's production license server (license.google.com)
|
||||
# Not publicly accessible directly, but a lot of services have their own gateways to it
|
||||
"CAUSxwUKwQIIAxIQFwW5F8wSBIaLBjM6L3cqjBiCtIKSBSKOAjCCAQoCggEBAJntWzsyfateJO/DtiqVtZhSCtW8yzdQPgZFuBTYdrjfQFEE"
|
||||
"Qa2M462xG7iMTnJaXkqeB5UpHVhYQCOn4a8OOKkSeTkwCGELbxWMh4x+Ib/7/up34QGeHleB6KRfRiY9FOYOgFioYHrc4E+shFexN6jWfM3r"
|
||||
"M3BdmDoh+07svUoQykdJDKR+ql1DghjduvHK3jOS8T1v+2RC/THhv0CwxgTRxLpMlSCkv5fuvWCSmvzu9Vu69WTi0Ods18Vcc6CCuZYSC4NZ"
|
||||
"7c4kcHCCaA1vZ8bYLErF8xNEkKdO7DevSy8BDFnoKEPiWC8La59dsPxebt9k+9MItHEbzxJQAZyfWgkCAwEAAToUbGljZW5zZS53aWRldmlu"
|
||||
"ZS5jb20SgAOuNHMUtag1KX8nE4j7e7jLUnfSSYI83dHaMLkzOVEes8y96gS5RLknwSE0bv296snUE5F+bsF2oQQ4RgpQO8GVK5uk5M4PxL/C"
|
||||
"CpgIqq9L/NGcHc/N9XTMrCjRtBBBbPneiAQwHL2zNMr80NQJeEI6ZC5UYT3wr8+WykqSSdhV5Cs6cD7xdn9qm9Nta/gr52u/DLpP3lnSq8x2"
|
||||
"/rZCR7hcQx+8pSJmthn8NpeVQ/ypy727+voOGlXnVaPHvOZV+WRvWCq5z3CqCLl5+Gf2Ogsrf9s2LFvE7NVV2FvKqcWTw4PIV9Sdqrd+QLeF"
|
||||
"Hd/SSZiAjjWyWOddeOrAyhb3BHMEwg2T7eTo/xxvF+YkPj89qPwXCYcOxF+6gjomPwzvofcJOxkJkoMmMzcFBDopvab5tDQsyN9UPLGhGC98"
|
||||
"X/8z8QSQ+spbJTYLdgFenFoGq47gLwDS6NWYYQSqzE3Udf2W7pzk4ybyG4PHBYV3s4cyzdq8amvtE/sNSdOKReuHpfQ=")
|
||||
staging_privacy_cert = (
|
||||
# Used by Google's staging license server (staging.google.com)
|
||||
# This can be publicly accessed without authentication using https://cwip-shaka-proxy.appspot.com/no_auth
|
||||
"CAUSxQUKvwIIAxIQKHA0VMAI9jYYredEPbbEyBiL5/mQBSKOAjCCAQoCggEBALUhErjQXQI/zF2V4sJRwcZJtBd82NK+7zVbsGdD3mYePSq8"
|
||||
"MYK3mUbVX9wI3+lUB4FemmJ0syKix/XgZ7tfCsB6idRa6pSyUW8HW2bvgR0NJuG5priU8rmFeWKqFxxPZmMNPkxgJxiJf14e+baq9a1Nuip+"
|
||||
"FBdt8TSh0xhbWiGKwFpMQfCB7/+Ao6BAxQsJu8dA7tzY8U1nWpGYD5LKfdxkagatrVEB90oOSYzAHwBTK6wheFC9kF6QkjZWt9/v70JIZ2fz"
|
||||
"PvYoPU9CVKtyWJOQvuVYCPHWaAgNRdiTwryi901goMDQoJk87wFgRwMzTDY4E5SGvJ2vJP1noH+a2UMCAwEAAToSc3RhZ2luZy5nb29nbGUu"
|
||||
"Y29tEoADmD4wNSZ19AunFfwkm9rl1KxySaJmZSHkNlVzlSlyH/iA4KrvxeJ7yYDa6tq/P8OG0ISgLIJTeEjMdT/0l7ARp9qXeIoA4qprhM19"
|
||||
"ccB6SOv2FgLMpaPzIDCnKVww2pFbkdwYubyVk7jei7UPDe3BKTi46eA5zd4Y+oLoG7AyYw/pVdhaVmzhVDAL9tTBvRJpZjVrKH1lexjOY9Dv"
|
||||
"1F/FJp6X6rEctWPlVkOyb/SfEJwhAa/K81uDLyiPDZ1Flg4lnoX7XSTb0s+Cdkxd2b9yfvvpyGH4aTIfat4YkF9Nkvmm2mU224R1hx0WjocL"
|
||||
"sjA89wxul4TJPS3oRa2CYr5+DU4uSgdZzvgtEJ0lksckKfjAF0K64rPeytvDPD5fS69eFuy3Tq26/LfGcF96njtvOUA4P5xRFtICogySKe6W"
|
||||
"nCUZcYMDtQ0BMMM1LgawFNg4VA+KDCJ8ABHg9bOOTimO0sswHrRWSWX1XF15dXolCk65yEqz5lOfa2/fVomeopkU")
|
||||
root_signed_cert = SignedDrmCertificate()
|
||||
root_signed_cert.ParseFromString(base64.b64decode(
|
||||
"CpwDCAASAQAY3ZSIiwUijgMwggGKAoIBgQC0/jnDZZAD2zwRlwnoaM3yw16b8udNI7EQ24dl39z7nzWgVwNTTPZtNX2meNuzNtI/nECplSZy"
|
||||
"f7i+Zt/FIZh4FRZoXS9GDkPLioQ5q/uwNYAivjQji6tTW3LsS7VIaVM+R1/9Cf2ndhOPD5LWTN+udqm62SIQqZ1xRdbX4RklhZxTmpfrhNfM"
|
||||
"qIiCIHAmIP1+QFAn4iWTb7w+cqD6wb0ptE2CXMG0y5xyfrDpihc+GWP8/YJIK7eyM7l97Eu6iR8nuJuISISqGJIOZfXIbBH/azbkdDTKjDOx"
|
||||
"+biOtOYS4AKYeVJeRTP/Edzrw1O6fGAaET0A+9K3qjD6T15Id1sX3HXvb9IZbdy+f7B4j9yCYEy/5CkGXmmMOROtFCXtGbLynwGCDVZEiMg1"
|
||||
"7B8RsyTgWQ035Ec86kt/lzEcgXyUikx9aBWE/6UI/Rjn5yvkRycSEbgj7FiTPKwS0ohtQT3F/hzcufjUUT4H5QNvpxLoEve1zqaWVT94tGSC"
|
||||
"UNIzX5ECAwEAARKAA1jx1k0ECXvf1+9dOwI5F/oUNnVKOGeFVxKnFO41FtU9v0KG9mkAds2T9Hyy355EzUzUrgkYU0Qy7OBhG+XaE9NVxd0a"
|
||||
"y5AeflvG6Q8in76FAv6QMcxrA4S9IsRV+vXyCM1lQVjofSnaBFiC9TdpvPNaV4QXezKHcLKwdpyywxXRESYqI3WZPrl3IjINvBoZwdVlkHZV"
|
||||
"dA8OaU1fTY8Zr9/WFjGUqJJfT7x6Mfiujq0zt+kw0IwKimyDNfiKgbL+HIisKmbF/73mF9BiC9yKRfewPlrIHkokL2yl4xyIFIPVxe9enz2F"
|
||||
"RXPia1BSV0z7kmxmdYrWDRuu8+yvUSIDXQouY5OcCwEgqKmELhfKrnPsIht5rvagcizfB0fbiIYwFHghESKIrNdUdPnzJsKlVshWTwApHQh7"
|
||||
"evuVicPumFSePGuUBRMS9nG5qxPDDJtGCHs9Mmpoyh6ckGLF7RC5HxclzpC5bc3ERvWjYhN0AqdipPpV2d7PouaAdFUGSdUCDA=="
|
||||
))
|
||||
root_cert = DrmCertificate()
|
||||
root_cert.ParseFromString(root_signed_cert.drm_certificate)
|
||||
|
||||
MAX_NUM_OF_SESSIONS = 16
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_type: Union[DeviceTypes, str],
|
||||
system_id: int,
|
||||
security_level: int,
|
||||
client_id: ClientIdentification,
|
||||
rsa_key: RSA.RsaKey
|
||||
):
|
||||
"""Initialize a Widevine Content Decryption Module (CDM)."""
|
||||
if not device_type:
|
||||
raise ValueError("Device Type must be provided")
|
||||
if isinstance(device_type, str):
|
||||
device_type = DeviceTypes[device_type]
|
||||
if not isinstance(device_type, DeviceTypes):
|
||||
raise TypeError(f"Expected device_type to be a {DeviceTypes!r} not {device_type!r}")
|
||||
|
||||
if not system_id:
|
||||
raise ValueError("System ID must be provided")
|
||||
if not isinstance(system_id, int):
|
||||
raise TypeError(f"Expected system_id to be a {int} not {system_id!r}")
|
||||
|
||||
if not security_level:
|
||||
raise ValueError("Security Level must be provided")
|
||||
if not isinstance(security_level, int):
|
||||
raise TypeError(f"Expected security_level to be a {int} not {security_level!r}")
|
||||
|
||||
if not client_id:
|
||||
raise ValueError("Client ID must be provided")
|
||||
if not isinstance(client_id, ClientIdentification):
|
||||
raise TypeError(f"Expected client_id to be a {ClientIdentification} not {client_id!r}")
|
||||
|
||||
if not rsa_key:
|
||||
raise ValueError("RSA Key must be provided")
|
||||
if not isinstance(rsa_key, RSA.RsaKey):
|
||||
raise TypeError(f"Expected rsa_key to be a {RSA.RsaKey} not {rsa_key!r}")
|
||||
|
||||
self.device_type = device_type
|
||||
self.system_id = system_id
|
||||
self.security_level = security_level
|
||||
self.__client_id = client_id
|
||||
|
||||
self.__signer = pss.new(rsa_key)
|
||||
self.__decrypter = PKCS1_OAEP.new(rsa_key)
|
||||
|
||||
self.__sessions: dict[bytes, Session] = {}
|
||||
|
||||
@classmethod
|
||||
def from_device(cls, device: Device) -> Cdm:
|
||||
"""Initialize a Widevine CDM from a Widevine Device (.wvd) file."""
|
||||
return cls(
|
||||
device_type=device.type,
|
||||
system_id=device.system_id,
|
||||
security_level=device.security_level,
|
||||
client_id=device.client_id,
|
||||
rsa_key=device.private_key
|
||||
)
|
||||
|
||||
def open(self) -> bytes:
|
||||
"""
|
||||
Open a Widevine Content Decryption Module (CDM) session.
|
||||
|
||||
Raises:
|
||||
TooManySessions: If the session cannot be opened as limit has been reached.
|
||||
"""
|
||||
if len(self.__sessions) > self.MAX_NUM_OF_SESSIONS:
|
||||
raise TooManySessions(f"Too many Sessions open ({self.MAX_NUM_OF_SESSIONS}).")
|
||||
|
||||
session = Session(len(self.__sessions) + 1)
|
||||
self.__sessions[session.id] = session
|
||||
|
||||
return session.id
|
||||
|
||||
def close(self, session_id: bytes) -> None:
|
||||
"""
|
||||
Close a Widevine Content Decryption Module (CDM) session.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
del self.__sessions[session_id]
|
||||
|
||||
def set_service_certificate(self, session_id: bytes, certificate: Optional[Union[bytes, str]]) -> Optional[str]:
|
||||
"""
|
||||
Set a Service Privacy Certificate for Privacy Mode. (optional but recommended)
|
||||
|
||||
The Service Certificate is used to encrypt Client IDs in Licenses. This is also
|
||||
known as Privacy Mode and may be required for some services or for some devices.
|
||||
Chrome CDM requires it as of the enforcement of VMP (Verified Media Path).
|
||||
|
||||
We reject direct DrmCertificates as they do not have signature verification and
|
||||
cannot be verified. You must provide a SignedDrmCertificate or a SignedMessage
|
||||
containing a SignedDrmCertificate.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
certificate: SignedDrmCertificate (or SignedMessage containing one) in Base64
|
||||
or Bytes form obtained from the Service. Some services have their own,
|
||||
but most use the common privacy cert, (common_privacy_cert). If None, it
|
||||
will remove the current certificate.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
DecodeError: If the certificate could not be parsed as a SignedDrmCertificate
|
||||
nor a SignedMessage containing a SignedDrmCertificate.
|
||||
SignatureMismatch: If the Signature of the SignedDrmCertificate does not
|
||||
match the underlying DrmCertificate.
|
||||
|
||||
Returns the Service Provider ID of the verified DrmCertificate if successful.
|
||||
If certificate is None, it will return the now-unset certificate's Provider ID,
|
||||
or None if no certificate was set yet.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
if certificate is None:
|
||||
if session.service_certificate:
|
||||
drm_certificate = DrmCertificate()
|
||||
drm_certificate.ParseFromString(session.service_certificate.drm_certificate)
|
||||
provider_id = drm_certificate.provider_id
|
||||
else:
|
||||
provider_id = None
|
||||
session.service_certificate = None
|
||||
return provider_id
|
||||
|
||||
if isinstance(certificate, str):
|
||||
try:
|
||||
certificate = base64.b64decode(certificate) # assuming base64
|
||||
except binascii.Error:
|
||||
raise DecodeError("Could not decode certificate string as Base64, expected bytes.")
|
||||
elif not isinstance(certificate, bytes):
|
||||
raise DecodeError(f"Expecting Certificate to be bytes, not {certificate!r}")
|
||||
|
||||
signed_message = SignedMessage()
|
||||
signed_drm_certificate = SignedDrmCertificate()
|
||||
drm_certificate = DrmCertificate()
|
||||
|
||||
try:
|
||||
signed_message.ParseFromString(certificate)
|
||||
if all(
|
||||
# See https://github.com/devine-dl/pywidevine/issues/41
|
||||
bytes(chunk) == signed_message.SerializeToString()
|
||||
for chunk in zip(*[iter(certificate)] * len(signed_message.SerializeToString()))
|
||||
):
|
||||
signed_drm_certificate.ParseFromString(signed_message.msg)
|
||||
else:
|
||||
signed_drm_certificate.ParseFromString(certificate)
|
||||
if signed_drm_certificate.SerializeToString() != certificate:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
# could be a direct unsigned DrmCertificate, but reject those anyway
|
||||
raise DecodeError(f"Could not parse certificate as a SignedDrmCertificate, {e}")
|
||||
|
||||
try:
|
||||
pss. \
|
||||
new(RSA.import_key(self.root_cert.public_key)). \
|
||||
verify(
|
||||
msg_hash=SHA1.new(signed_drm_certificate.drm_certificate),
|
||||
signature=signed_drm_certificate.signature
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
raise SignatureMismatch("Signature Mismatch on SignedDrmCertificate, rejecting certificate")
|
||||
|
||||
try:
|
||||
drm_certificate.ParseFromString(signed_drm_certificate.drm_certificate)
|
||||
if drm_certificate.SerializeToString() != signed_drm_certificate.drm_certificate:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Could not parse signed certificate's message as a DrmCertificate, {e}")
|
||||
|
||||
# must be stored as a SignedDrmCertificate as the signature needs to be kept for RemoteCdm
|
||||
# if we store as DrmCertificate (no signature) then RemoteCdm cannot verify the Certificate
|
||||
session.service_certificate = signed_drm_certificate
|
||||
return drm_certificate.provider_id
|
||||
|
||||
def get_service_certificate(self, session_id: bytes) -> Optional[SignedDrmCertificate]:
|
||||
"""
|
||||
Get the currently set Service Privacy Certificate of the Session.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
|
||||
Returns the Service Certificate if one is set, otherwise None.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
return session.service_certificate
|
||||
|
||||
def get_license_challenge(
|
||||
self,
|
||||
session_id: bytes,
|
||||
pssh: PSSH,
|
||||
license_type: str = "STREAMING",
|
||||
privacy_mode: bool = True
|
||||
) -> bytes:
|
||||
"""
|
||||
Get a License Request (Challenge) to send to a License Server.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
pssh: PSSH Object to get the init data from.
|
||||
license_type: Type of License you wish to exchange, often `STREAMING`.
|
||||
- "STREAMING": Normal one-time-use license.
|
||||
- "OFFLINE": Offline-use licence, usually for Downloaded content.
|
||||
- "AUTOMATIC": License type decision is left to provider.
|
||||
privacy_mode: Encrypt the Client ID using the Privacy Certificate. If the
|
||||
privacy certificate is not set yet, this does nothing.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
InvalidInitData: If the Init Data (or PSSH box) provided is invalid.
|
||||
InvalidLicenseType: If the type_ parameter value is not a License Type. It
|
||||
must be a LicenseType enum, or a string/int representing the enum's keys
|
||||
or values.
|
||||
|
||||
Returns a SignedMessage containing a LicenseRequest message. It's signed with
|
||||
the Private Key of the device provision.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
if not pssh:
|
||||
raise InvalidInitData("A pssh must be provided.")
|
||||
if not isinstance(pssh, PSSH):
|
||||
raise InvalidInitData(f"Expected pssh to be a {PSSH}, not {pssh!r}")
|
||||
|
||||
if not isinstance(license_type, str):
|
||||
raise InvalidLicenseType(f"Expected license_type to be a {str}, not {license_type!r}")
|
||||
if license_type not in LicenseType.keys():
|
||||
raise InvalidLicenseType(
|
||||
f"Invalid license_type value of '{license_type}'. "
|
||||
f"Available values: {LicenseType.keys()}"
|
||||
)
|
||||
|
||||
if self.device_type == DeviceTypes.ANDROID:
|
||||
# OEMCrypto's request_id seems to be in AES CTR Counter block form with no suffix
|
||||
# Bytes 5-8 does not seem random, in real tests they have been consecutive \x00 or \xFF
|
||||
# Real example: A0DCE548000000000500000000000000
|
||||
request_id = (get_random_bytes(4) + (b"\x00" * 4)) # (?)
|
||||
request_id += session.number.to_bytes(8, "little") # counter
|
||||
# as you can see in the real example, it is stored as uppercase hex and re-encoded
|
||||
# it's really 16 bytes of data, but it's stored as a 32-char HEX string (32 bytes)
|
||||
request_id = request_id.hex().upper().encode()
|
||||
else:
|
||||
request_id = get_random_bytes(16)
|
||||
|
||||
license_request = LicenseRequest(
|
||||
client_id=(
|
||||
self.__client_id
|
||||
) if not (session.service_certificate and privacy_mode) else None,
|
||||
encrypted_client_id=self.encrypt_client_id(
|
||||
client_id=self.__client_id,
|
||||
service_certificate=session.service_certificate
|
||||
) if session.service_certificate and privacy_mode else None,
|
||||
content_id=LicenseRequest.ContentIdentification(
|
||||
widevine_pssh_data=LicenseRequest.ContentIdentification.WidevinePsshData(
|
||||
pssh_data=[pssh.init_data], # either a WidevineCencHeader or custom data
|
||||
license_type=license_type,
|
||||
request_id=request_id
|
||||
)
|
||||
),
|
||||
type="NEW",
|
||||
request_time=int(time.time()),
|
||||
protocol_version="VERSION_2_1",
|
||||
key_control_nonce=random.randrange(1, 2 ** 31),
|
||||
).SerializeToString()
|
||||
|
||||
signed_license_request = SignedMessage(
|
||||
type="LICENSE_REQUEST",
|
||||
msg=license_request,
|
||||
signature=self.__signer.sign(SHA1.new(license_request))
|
||||
).SerializeToString()
|
||||
|
||||
session.context[request_id] = self.derive_context(license_request)
|
||||
|
||||
return signed_license_request
|
||||
|
||||
def parse_license(self, session_id: bytes, license_message: Union[SignedMessage, bytes, str]) -> None:
|
||||
"""
|
||||
Load Keys from a License Message from a License Server Response.
|
||||
|
||||
License Messages can only be loaded a single time. An InvalidContext error will
|
||||
be raised if you attempt to parse a License Message more than once.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
license_message: A SignedMessage containing a License message.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
InvalidLicenseMessage: The License message could not be decoded as a Signed
|
||||
Message or License message.
|
||||
InvalidContext: If the Session has no Context Data. This is likely to happen
|
||||
if the License Challenge was not made by this CDM instance, or was not
|
||||
by this CDM at all. It could also happen if the Session is closed after
|
||||
calling parse_license but not before it got the context data.
|
||||
SignatureMismatch: If the Signature of the License SignedMessage does not
|
||||
match the underlying License.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
if not license_message:
|
||||
raise InvalidLicenseMessage("Cannot parse an empty license_message")
|
||||
|
||||
if isinstance(license_message, str):
|
||||
try:
|
||||
license_message = base64.b64decode(license_message)
|
||||
except (binascii.Error, binascii.Incomplete) as e:
|
||||
raise InvalidLicenseMessage(f"Could not decode license_message as Base64, {e}")
|
||||
|
||||
if isinstance(license_message, bytes):
|
||||
signed_message = SignedMessage()
|
||||
try:
|
||||
signed_message.ParseFromString(license_message)
|
||||
if signed_message.SerializeToString() != license_message:
|
||||
raise DecodeError(license_message)
|
||||
except DecodeError as e:
|
||||
raise InvalidLicenseMessage(f"Could not parse license_message as a SignedMessage, {e}")
|
||||
license_message = signed_message
|
||||
|
||||
if not isinstance(license_message, SignedMessage):
|
||||
raise InvalidLicenseMessage(f"Expecting license_response to be a SignedMessage, got {license_message!r}")
|
||||
|
||||
if license_message.type != SignedMessage.MessageType.Value("LICENSE"):
|
||||
raise InvalidLicenseMessage(
|
||||
f"Expecting a LICENSE message, not a "
|
||||
f"'{SignedMessage.MessageType.Name(license_message.type)}' message."
|
||||
)
|
||||
|
||||
licence = License()
|
||||
licence.ParseFromString(license_message.msg)
|
||||
|
||||
context = session.context.get(licence.id.request_id)
|
||||
if not context:
|
||||
raise InvalidContext("Cannot parse a license message without first making a license request")
|
||||
|
||||
enc_key, mac_key_server, _ = self.derive_keys(
|
||||
*context,
|
||||
key=self.__decrypter.decrypt(license_message.session_key)
|
||||
)
|
||||
|
||||
# 1. Explicitly use the original `license_message.msg` instead of a re-serializing from `licence`
|
||||
# as some differences may end up in the output due to differences in the proto schema
|
||||
# 2. The oemcrypto_core_message (unknown purpose) is part of the signature algorithm starting with
|
||||
# OEM Crypto API v16 and if available, must be prefixed when HMAC'ing a signature.
|
||||
|
||||
computed_signature = HMAC. \
|
||||
new(mac_key_server, digestmod=SHA256). \
|
||||
update(license_message.oemcrypto_core_message or b""). \
|
||||
update(license_message.msg). \
|
||||
digest()
|
||||
|
||||
if license_message.signature != computed_signature:
|
||||
raise SignatureMismatch("Signature Mismatch on License Message, rejecting license")
|
||||
|
||||
session.keys = [
|
||||
Key.from_key_container(key, enc_key)
|
||||
for key in licence.key
|
||||
]
|
||||
|
||||
del session.context[licence.id.request_id]
|
||||
|
||||
def get_keys(self, session_id: bytes, type_: Optional[Union[int, str]] = None) -> list[Key]:
|
||||
"""
|
||||
Get Keys from the loaded License message.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
type_: (optional) Key Type to filter by and return.
|
||||
|
||||
Raises:
|
||||
InvalidSession: If the Session identifier is invalid.
|
||||
TypeError: If the provided type_ is an unexpected value type.
|
||||
ValueError: If the provided type_ is not a valid Key Type.
|
||||
"""
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
try:
|
||||
if isinstance(type_, str):
|
||||
type_ = License.KeyContainer.KeyType.Value(type_)
|
||||
elif isinstance(type_, int):
|
||||
License.KeyContainer.KeyType.Name(type_) # only test
|
||||
elif type_ is not None:
|
||||
raise TypeError(f"Expected type_ to be a {License.KeyContainer.KeyType} or int, not {type_!r}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Could not parse type_ as a {License.KeyContainer.KeyType}, {e}")
|
||||
|
||||
return [
|
||||
key
|
||||
for key in session.keys
|
||||
if not type_ or key.type == License.KeyContainer.KeyType.Name(type_)
|
||||
]
|
||||
|
||||
def decrypt(
|
||||
self,
|
||||
session_id: bytes,
|
||||
input_file: Union[Path, str],
|
||||
output_file: Union[Path, str],
|
||||
temp_dir: Optional[Union[Path, str]] = None,
|
||||
exists_ok: bool = False
|
||||
) -> int:
|
||||
"""
|
||||
Decrypt a Widevine-encrypted file using Shaka-packager.
|
||||
Shaka-packager is much more stable than mp4decrypt.
|
||||
|
||||
Parameters:
|
||||
session_id: Session identifier.
|
||||
input_file: File to be decrypted with Session's currently loaded keys.
|
||||
output_file: Location to save decrypted file.
|
||||
temp_dir: Directory to store temporary data while decrypting.
|
||||
exists_ok: Allow overwriting the output_file if it exists.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input or output paths have not been supplied or are
|
||||
invalid.
|
||||
FileNotFoundError: If the input file path does not exist.
|
||||
FileExistsError: If the output file path already exists. Ignored if exists_ok
|
||||
is set to True.
|
||||
NoKeysLoaded: No License was parsed for this Session, No Keys available.
|
||||
EnvironmentError: If the shaka-packager executable could not be found.
|
||||
subprocess.CalledProcessError: If the shaka-packager call returned a non-zero
|
||||
exit code.
|
||||
"""
|
||||
if not input_file:
|
||||
raise ValueError("Cannot decrypt nothing, specify an input path")
|
||||
if not output_file:
|
||||
raise ValueError("Cannot decrypt nowhere, specify an output path")
|
||||
|
||||
if not isinstance(input_file, (Path, str)):
|
||||
raise ValueError(f"Expecting input_file to be a Path or str, got {input_file!r}")
|
||||
if not isinstance(output_file, (Path, str)):
|
||||
raise ValueError(f"Expecting output_file to be a Path or str, got {output_file!r}")
|
||||
if not isinstance(temp_dir, (Path, str)) and temp_dir is not None:
|
||||
raise ValueError(f"Expecting temp_dir to be a Path or str, got {temp_dir!r}")
|
||||
|
||||
input_file = Path(input_file)
|
||||
output_file = Path(output_file)
|
||||
temp_dir_ = Path(temp_dir) if temp_dir else None
|
||||
|
||||
if not input_file.is_file():
|
||||
raise FileNotFoundError(f"Input file does not exist, {input_file}")
|
||||
if output_file.is_file() and not exists_ok:
|
||||
raise FileExistsError(f"Output file already exists, {output_file}")
|
||||
|
||||
session = self.__sessions.get(session_id)
|
||||
if not session:
|
||||
raise InvalidSession(f"Session identifier {session_id!r} is invalid.")
|
||||
|
||||
if not session.keys:
|
||||
raise NoKeysLoaded("No Keys are loaded yet, cannot decrypt")
|
||||
|
||||
platform = {"win32": "win", "darwin": "osx"}.get(sys.platform, sys.platform)
|
||||
executable = get_binary_path("shaka-packager", f"packager-{platform}", f"packager-{platform}-x64")
|
||||
if not executable:
|
||||
raise EnvironmentError("Shaka Packager executable not found but is required")
|
||||
|
||||
args = [
|
||||
f"input={input_file},stream=0,output={output_file}",
|
||||
"--enable_raw_key_decryption",
|
||||
"--keys", ",".join([
|
||||
label
|
||||
for i, key in enumerate(session.keys)
|
||||
for label in [
|
||||
f"label=1_{i}:key_id={key.kid.hex}:key={key.key.hex()}",
|
||||
# some services need the KID blanked, e.g., Apple TV+
|
||||
f"label=2_{i}:key_id={'0' * 32}:key={key.key.hex()}"
|
||||
]
|
||||
if key.type == "CONTENT"
|
||||
])
|
||||
]
|
||||
|
||||
if temp_dir_:
|
||||
temp_dir_.mkdir(parents=True, exist_ok=True)
|
||||
args.extend(["--temp_dir", str(temp_dir_)])
|
||||
|
||||
return subprocess.check_call([executable, *args])
|
||||
|
||||
@staticmethod
|
||||
def encrypt_client_id(
|
||||
client_id: ClientIdentification,
|
||||
service_certificate: Union[SignedDrmCertificate, DrmCertificate],
|
||||
key: Optional[bytes] = None,
|
||||
iv: Optional[bytes] = None
|
||||
) -> EncryptedClientIdentification:
|
||||
"""Encrypt the Client ID with the Service's Privacy Certificate."""
|
||||
privacy_key = key or get_random_bytes(16)
|
||||
privacy_iv = iv or get_random_bytes(16)
|
||||
|
||||
if isinstance(service_certificate, SignedDrmCertificate):
|
||||
drm_certificate = DrmCertificate()
|
||||
drm_certificate.ParseFromString(service_certificate.drm_certificate)
|
||||
service_certificate = drm_certificate
|
||||
if not isinstance(service_certificate, DrmCertificate):
|
||||
raise ValueError(f"Expecting Service Certificate to be a DrmCertificate, not {service_certificate!r}")
|
||||
|
||||
encrypted_client_id = EncryptedClientIdentification(
|
||||
provider_id=service_certificate.provider_id,
|
||||
service_certificate_serial_number=service_certificate.serial_number,
|
||||
encrypted_client_id=AES.
|
||||
new(privacy_key, AES.MODE_CBC, privacy_iv).
|
||||
encrypt(Padding.pad(client_id.SerializeToString(), 16)),
|
||||
encrypted_client_id_iv=privacy_iv,
|
||||
encrypted_privacy_key=PKCS1_OAEP.
|
||||
new(RSA.importKey(service_certificate.public_key)).
|
||||
encrypt(privacy_key)
|
||||
)
|
||||
|
||||
return encrypted_client_id
|
||||
|
||||
@staticmethod
|
||||
def derive_context(message: bytes) -> tuple[bytes, bytes]:
|
||||
"""Returns 2 Context Data used for computing the AES Encryption and HMAC Keys."""
|
||||
|
||||
def _get_enc_context(msg: bytes) -> bytes:
|
||||
label = b"ENCRYPTION"
|
||||
key_size = 16 * 8 # 128-bit
|
||||
return label + b"\x00" + msg + key_size.to_bytes(4, "big")
|
||||
|
||||
def _get_mac_context(msg: bytes) -> bytes:
|
||||
label = b"AUTHENTICATION"
|
||||
key_size = 32 * 8 * 2 # 512-bit
|
||||
return label + b"\x00" + msg + key_size.to_bytes(4, "big")
|
||||
|
||||
return _get_enc_context(message), _get_mac_context(message)
|
||||
|
||||
@staticmethod
|
||||
def derive_keys(enc_context: bytes, mac_context: bytes, key: bytes) -> tuple[bytes, bytes, bytes]:
|
||||
"""
|
||||
Returns 3 keys derived from the input message.
|
||||
Key can either be a pre-provision device aes key, provision key, or a session key.
|
||||
|
||||
For provisioning:
|
||||
- enc: aes key used for unwrapping RSA key out of response
|
||||
- mac_key_server: hmac-sha256 key used for verifying provisioning response
|
||||
- mac_key_client: hmac-sha256 key used for signing provisioning request
|
||||
|
||||
When used with a session key:
|
||||
- enc: decrypting content and other keys
|
||||
- mac_key_server: verifying response
|
||||
- mac_key_client: renewals
|
||||
|
||||
With key as pre-provision device key, it can be used to provision and get an
|
||||
RSA device key and token/cert with key as session key (OAEP wrapped with the
|
||||
post-provision RSA device key), it can be used to decrypt content and signing
|
||||
keys and verify licenses.
|
||||
"""
|
||||
|
||||
def _derive(session_key: bytes, context: bytes, counter: int) -> bytes:
|
||||
return CMAC. \
|
||||
new(session_key, ciphermod=AES). \
|
||||
update(counter.to_bytes(1, "big") + context). \
|
||||
digest()
|
||||
|
||||
enc_key = _derive(key, enc_context, 1)
|
||||
mac_key_server = _derive(key, mac_context, 1)
|
||||
mac_key_server += _derive(key, mac_context, 2)
|
||||
mac_key_client = _derive(key, mac_context, 3)
|
||||
mac_key_client += _derive(key, mac_context, 4)
|
||||
|
||||
return enc_key, mac_key_server, mac_key_client
|
||||
|
||||
|
||||
__all__ = ("Cdm",)
|
@ -1,276 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import base64
|
||||
import logging
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from construct import BitStruct, Bytes, Const, ConstructError, Container
|
||||
from construct import Enum as CEnum
|
||||
from construct import Int8ub, Int16ub
|
||||
from construct import Optional as COptional
|
||||
from construct import Padded, Padding, Struct, this
|
||||
from Crypto.PublicKey import RSA
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from pywidevine.license_protocol_pb2 import ClientIdentification, DrmCertificate, FileHashes, SignedDrmCertificate
|
||||
|
||||
|
||||
class DeviceTypes(Enum):
|
||||
CHROME = 1
|
||||
ANDROID = 2
|
||||
PLAYREADY = 3
|
||||
|
||||
|
||||
class _Structures:
|
||||
magic = Const(b"WVD")
|
||||
|
||||
header = Struct(
|
||||
"signature" / magic,
|
||||
"version" / Int8ub
|
||||
)
|
||||
|
||||
# - Removed vmp and vmp_len as it should already be within the Client ID
|
||||
v2 = Struct(
|
||||
"signature" / magic,
|
||||
"version" / Const(Int8ub, 2),
|
||||
"type_" / CEnum(
|
||||
Int8ub,
|
||||
**{t.name: t.value for t in DeviceTypes}
|
||||
),
|
||||
"security_level" / Int8ub,
|
||||
"flags" / Padded(1, COptional(BitStruct(
|
||||
# no per-device flags yet
|
||||
Padding(8)
|
||||
))),
|
||||
"private_key_len" / Int16ub,
|
||||
"private_key" / Bytes(this.private_key_len),
|
||||
"client_id_len" / Int16ub,
|
||||
"client_id" / Bytes(this.client_id_len)
|
||||
)
|
||||
|
||||
# - Removed system_id as it can be retrieved from the Client ID's DRM Certificate
|
||||
v1 = Struct(
|
||||
"signature" / magic,
|
||||
"version" / Const(Int8ub, 1),
|
||||
"type_" / CEnum(
|
||||
Int8ub,
|
||||
**{t.name: t.value for t in DeviceTypes}
|
||||
),
|
||||
"security_level" / Int8ub,
|
||||
"flags" / Padded(1, COptional(BitStruct(
|
||||
# no per-device flags yet
|
||||
Padding(8)
|
||||
))),
|
||||
"private_key_len" / Int16ub,
|
||||
"private_key" / Bytes(this.private_key_len),
|
||||
"client_id_len" / Int16ub,
|
||||
"client_id" / Bytes(this.client_id_len),
|
||||
"vmp_len" / Int16ub,
|
||||
"vmp" / Bytes(this.vmp_len)
|
||||
)
|
||||
|
||||
|
||||
class Device:
|
||||
Structures = _Structures
|
||||
supported_structure = Structures.v2
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*_: Any,
|
||||
type_: DeviceTypes,
|
||||
security_level: int,
|
||||
flags: Optional[dict],
|
||||
private_key: Optional[bytes],
|
||||
client_id: Optional[bytes],
|
||||
**__: Any
|
||||
):
|
||||
"""
|
||||
This is the device key data that is needed for the CDM (Content Decryption Module).
|
||||
|
||||
Parameters:
|
||||
type_: Device Type
|
||||
security_level: Security level from 1 (the highest ranking) to 3 (the lowest ranking)
|
||||
flags: Extra flags
|
||||
private_key: Device Private Key
|
||||
client_id: Device Client Identification Blob
|
||||
"""
|
||||
# *_,*__ is to ignore unwanted args, like signature and version from the struct
|
||||
|
||||
if not client_id:
|
||||
raise ValueError("Client ID is required, the WVD does not contain one or is malformed.")
|
||||
if not private_key:
|
||||
raise ValueError("Private Key is required, the WVD does not contain one or is malformed.")
|
||||
|
||||
self.type = DeviceTypes[type_] if isinstance(type_, str) else type_
|
||||
self.security_level = security_level
|
||||
self.flags = flags or {}
|
||||
self.private_key = RSA.importKey(private_key)
|
||||
self.client_id = ClientIdentification()
|
||||
try:
|
||||
self.client_id.ParseFromString(client_id)
|
||||
if self.client_id.SerializeToString() != client_id:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse client_id as a ClientIdentification, {e}")
|
||||
|
||||
self.vmp = FileHashes()
|
||||
if self.client_id.vmp_data:
|
||||
try:
|
||||
self.vmp.ParseFromString(self.client_id.vmp_data)
|
||||
if self.vmp.SerializeToString() != self.client_id.vmp_data:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse Client ID's VMP data as a FileHashes, {e}")
|
||||
|
||||
signed_drm_certificate = SignedDrmCertificate()
|
||||
drm_certificate = DrmCertificate()
|
||||
|
||||
try:
|
||||
signed_drm_certificate.ParseFromString(self.client_id.token)
|
||||
if signed_drm_certificate.SerializeToString() != self.client_id.token:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse the Signed DRM Certificate of the Client ID, {e}")
|
||||
|
||||
try:
|
||||
drm_certificate.ParseFromString(signed_drm_certificate.drm_certificate)
|
||||
if drm_certificate.SerializeToString() != signed_drm_certificate.drm_certificate:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse the DRM Certificate of the Client ID, {e}")
|
||||
|
||||
self.system_id = drm_certificate.system_id
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{name}({items})".format(
|
||||
name=self.__class__.__name__,
|
||||
items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()])
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def loads(cls, data: Union[bytes, str]) -> Device:
|
||||
if isinstance(data, str):
|
||||
data = base64.b64decode(data)
|
||||
if not isinstance(data, bytes):
|
||||
raise ValueError(f"Expecting Bytes or Base64 input, got {data!r}")
|
||||
return cls(**cls.supported_structure.parse(data))
|
||||
|
||||
@classmethod
|
||||
def from_dir(cls, dir: Union[Path, str]) -> Device:
|
||||
try:
|
||||
with open(os.path.join(d, "wv.json")) as fd:
|
||||
config = json.load(fd)
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError("wv.json file is required")
|
||||
|
||||
try:
|
||||
with open(os.path.join(d, "device_private_key"), "rb") as fd:
|
||||
private_key = fd.read()
|
||||
except FileNotFoundError:
|
||||
private_key = None
|
||||
|
||||
with open(os.path.join(d, "device_client_id_blob"), "rb") as fd:
|
||||
client_id = fd.read()
|
||||
|
||||
try:
|
||||
with open(os.path.join(d, "device_vmp_blob"), "rb") as fd:
|
||||
vmp = fd.read()
|
||||
except FileNotFoundError:
|
||||
vmp = None
|
||||
|
||||
return cls(
|
||||
type=getattr(DeviceTypes, config["session_id_type"].upper()),
|
||||
security_level=int(config["security_level"]),
|
||||
flags={
|
||||
"send_key_control_nonce": config.get("send_key_control_nonce", config["session_id_type"] == "android"),
|
||||
},
|
||||
private_key=private_key,
|
||||
client_id=client_id,
|
||||
vmp=vmp,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def load(cls, path: Union[Path, str]) -> Device:
|
||||
if not isinstance(path, (Path, str)):
|
||||
raise ValueError(f"Expecting Path object or path string, got {path!r}")
|
||||
with Path(path).open(mode="rb") as f:
|
||||
return cls(**cls.supported_structure.parse_stream(f))
|
||||
|
||||
def dumps(self) -> bytes:
|
||||
private_key = self.private_key.export_key("DER") if self.private_key else None
|
||||
return self.supported_structure.build(dict(
|
||||
version=2,
|
||||
type_=self.type.value,
|
||||
security_level=self.security_level,
|
||||
flags=self.flags,
|
||||
private_key_len=len(private_key) if private_key else 0,
|
||||
private_key=private_key,
|
||||
client_id_len=len(self.client_id.SerializeToString()) if self.client_id else 0,
|
||||
client_id=self.client_id.SerializeToString() if self.client_id else None
|
||||
))
|
||||
|
||||
def dump(self, path: Union[Path, str]) -> None:
|
||||
if not isinstance(path, (Path, str)):
|
||||
raise ValueError(f"Expecting Path object or path string, got {path!r}")
|
||||
path = Path(path)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_bytes(self.dumps())
|
||||
|
||||
@classmethod
|
||||
def migrate(cls, data: Union[bytes, str]) -> Device:
|
||||
if isinstance(data, str):
|
||||
data = base64.b64decode(data)
|
||||
if not isinstance(data, bytes):
|
||||
raise ValueError(f"Expecting Bytes or Base64 input, got {data!r}")
|
||||
|
||||
header = _Structures.header.parse(data)
|
||||
if header.version == 2:
|
||||
raise ValueError("Device Data is already migrated to the latest version.")
|
||||
if header.version == 0 or header.version > 2:
|
||||
# we have never used version 0, likely data that just so happened to use the WVD magic
|
||||
raise ValueError("Device Data does not seem to be a WVD file (v0).")
|
||||
|
||||
if header.version == 1: # v1 to v2
|
||||
v1_struct = _Structures.v1.parse(data)
|
||||
v1_struct.version = 2 # update version to 2 to allow loading
|
||||
v1_struct.flags = Container() # blank flags that may have been used in v1
|
||||
|
||||
vmp = FileHashes()
|
||||
if v1_struct.vmp:
|
||||
try:
|
||||
vmp.ParseFromString(v1_struct.vmp)
|
||||
if vmp.SerializeToString() != v1_struct.vmp:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse VMP data as FileHashes, {e}")
|
||||
v1_struct.vmp = vmp
|
||||
|
||||
client_id = ClientIdentification()
|
||||
try:
|
||||
client_id.ParseFromString(v1_struct.client_id)
|
||||
if client_id.SerializeToString() != v1_struct.client_id:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise DecodeError(f"Failed to parse VMP data as FileHashes, {e}")
|
||||
|
||||
new_vmp_data = v1_struct.vmp.SerializeToString()
|
||||
if client_id.vmp_data and client_id.vmp_data != new_vmp_data:
|
||||
logging.getLogger("migrate").warning("Client ID already has Verified Media Path data")
|
||||
client_id.vmp_data = new_vmp_data
|
||||
v1_struct.client_id = client_id.SerializeToString()
|
||||
|
||||
try:
|
||||
data = _Structures.v2.build(v1_struct)
|
||||
except ConstructError as e:
|
||||
raise ValueError(f"Migration failed, {e}")
|
||||
|
||||
try:
|
||||
return cls.loads(data)
|
||||
except ConstructError as e:
|
||||
raise ValueError(f"Device Data seems to be corrupt or invalid, or migration failed, {e}")
|
||||
|
||||
|
||||
__all__ = ("Device", "DeviceTypes")
|
@ -1,38 +0,0 @@
|
||||
class PyWidevineException(Exception):
|
||||
"""Exceptions used by pywidevine."""
|
||||
|
||||
|
||||
class TooManySessions(PyWidevineException):
|
||||
"""Too many Sessions are open."""
|
||||
|
||||
|
||||
class InvalidSession(PyWidevineException):
|
||||
"""No Session is open with the specified identifier."""
|
||||
|
||||
|
||||
class InvalidInitData(PyWidevineException):
|
||||
"""The Widevine Cenc Header Data is invalid or empty."""
|
||||
|
||||
|
||||
class InvalidLicenseType(PyWidevineException):
|
||||
"""The License Type is an Invalid Value."""
|
||||
|
||||
|
||||
class InvalidLicenseMessage(PyWidevineException):
|
||||
"""The License Message is Invalid or Missing."""
|
||||
|
||||
|
||||
class InvalidContext(PyWidevineException):
|
||||
"""The Context is Invalid or Missing."""
|
||||
|
||||
|
||||
class SignatureMismatch(PyWidevineException):
|
||||
"""The Signature did not match."""
|
||||
|
||||
|
||||
class NoKeysLoaded(PyWidevineException):
|
||||
"""No License was parsed for this Session, No Keys available."""
|
||||
|
||||
|
||||
class DeviceMismatch(PyWidevineException):
|
||||
"""The Remote CDMs Device information and the APIs Device information did not match."""
|
@ -1,66 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Padding
|
||||
|
||||
from pywidevine.license_protocol_pb2 import License
|
||||
|
||||
|
||||
class Key:
|
||||
def __init__(self, type_: str, kid: UUID, key: bytes, permissions: Optional[list[str]] = None):
|
||||
self.type = type_
|
||||
self.kid = kid
|
||||
self.key = key
|
||||
self.permissions = permissions or []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{name}({items})".format(
|
||||
name=self.__class__.__name__,
|
||||
items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()])
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_key_container(cls, key: License.KeyContainer, enc_key: bytes) -> Key:
|
||||
"""Load Key from a KeyContainer object."""
|
||||
permissions = []
|
||||
if key.type == License.KeyContainer.KeyType.Value("OPERATOR_SESSION"):
|
||||
for descriptor, value in key.operator_session_key_permissions.ListFields():
|
||||
if value == 1:
|
||||
permissions.append(descriptor.name)
|
||||
|
||||
return Key(
|
||||
type_=License.KeyContainer.KeyType.Name(key.type),
|
||||
kid=cls.kid_to_uuid(key.id),
|
||||
key=Padding.unpad(
|
||||
AES.new(enc_key, AES.MODE_CBC, iv=key.iv).decrypt(key.key),
|
||||
16
|
||||
),
|
||||
permissions=permissions
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def kid_to_uuid(kid: Union[str, bytes]) -> UUID:
|
||||
"""
|
||||
Convert a Key ID from a string or bytes to a UUID object.
|
||||
At first this may seem very simple but some types of Key IDs
|
||||
may not be 16 bytes and some may be decimal vs. hex.
|
||||
"""
|
||||
if isinstance(kid, str):
|
||||
kid = base64.b64decode(kid)
|
||||
if not kid:
|
||||
kid = b"\x00" * 16
|
||||
|
||||
if kid.decode(errors="replace").isdigit():
|
||||
return UUID(int=int(kid.decode()))
|
||||
|
||||
if len(kid) < 16:
|
||||
kid += b"\x00" * (16 - len(kid))
|
||||
|
||||
return UUID(bytes=kid)
|
||||
|
||||
|
||||
__all__ = ("Key",)
|
@ -1,752 +0,0 @@
|
||||
syntax = "proto2";
|
||||
|
||||
package pywidevine_license_protocol;
|
||||
|
||||
// need this if we are using libprotobuf-cpp-2.3.0-lite
|
||||
option optimize_for = LITE_RUNTIME;
|
||||
|
||||
option java_package = "com.rlaphoenix.pywidevine.protos";
|
||||
|
||||
enum LicenseType {
|
||||
STREAMING = 1;
|
||||
OFFLINE = 2;
|
||||
// License type decision is left to provider.
|
||||
AUTOMATIC = 3;
|
||||
}
|
||||
|
||||
enum PlatformVerificationStatus {
|
||||
// The platform is not verified.
|
||||
PLATFORM_UNVERIFIED = 0;
|
||||
// Tampering detected on the platform.
|
||||
PLATFORM_TAMPERED = 1;
|
||||
// The platform has been verified by means of software.
|
||||
PLATFORM_SOFTWARE_VERIFIED = 2;
|
||||
// The platform has been verified by means of hardware (e.g. secure boot).
|
||||
PLATFORM_HARDWARE_VERIFIED = 3;
|
||||
// Platform verification was not performed.
|
||||
PLATFORM_NO_VERIFICATION = 4;
|
||||
// Platform and secure storage capability have been verified by means of
|
||||
// software.
|
||||
PLATFORM_SECURE_STORAGE_SOFTWARE_VERIFIED = 5;
|
||||
}
|
||||
|
||||
// LicenseIdentification is propagated from LicenseRequest to License,
|
||||
// incrementing version with each iteration.
|
||||
message LicenseIdentification {
|
||||
optional bytes request_id = 1;
|
||||
optional bytes session_id = 2;
|
||||
optional bytes purchase_id = 3;
|
||||
optional LicenseType type = 4;
|
||||
optional int32 version = 5;
|
||||
optional bytes provider_session_token = 6;
|
||||
}
|
||||
|
||||
message License {
|
||||
message Policy {
|
||||
// Indicates that playback of the content is allowed.
|
||||
optional bool can_play = 1 [default = false];
|
||||
|
||||
// Indicates that the license may be persisted to non-volatile
|
||||
// storage for offline use.
|
||||
optional bool can_persist = 2 [default = false];
|
||||
|
||||
// Indicates that renewal of this license is allowed.
|
||||
optional bool can_renew = 3 [default = false];
|
||||
|
||||
// For the |*duration*| fields, playback must halt when
|
||||
// license_start_time (seconds since the epoch (UTC)) +
|
||||
// license_duration_seconds is exceeded. A value of 0
|
||||
// indicates that there is no limit to the duration.
|
||||
|
||||
// Indicates the rental window.
|
||||
optional int64 rental_duration_seconds = 4 [default = 0];
|
||||
|
||||
// Indicates the viewing window, once playback has begun.
|
||||
optional int64 playback_duration_seconds = 5 [default = 0];
|
||||
|
||||
// Indicates the time window for this specific license.
|
||||
optional int64 license_duration_seconds = 6 [default = 0];
|
||||
|
||||
// The |renewal*| fields only apply if |can_renew| is true.
|
||||
|
||||
// The window of time, in which playback is allowed to continue while
|
||||
// renewal is attempted, yet unsuccessful due to backend problems with
|
||||
// the license server.
|
||||
optional int64 renewal_recovery_duration_seconds = 7 [default = 0];
|
||||
|
||||
// All renewal requests for this license shall be directed to the
|
||||
// specified URL.
|
||||
optional string renewal_server_url = 8;
|
||||
|
||||
// How many seconds after license_start_time, before renewal is first
|
||||
// attempted.
|
||||
optional int64 renewal_delay_seconds = 9 [default = 0];
|
||||
|
||||
// Specifies the delay in seconds between subsequent license
|
||||
// renewal requests, in case of failure.
|
||||
optional int64 renewal_retry_interval_seconds = 10 [default = 0];
|
||||
|
||||
// Indicates that the license shall be sent for renewal when usage is
|
||||
// started.
|
||||
optional bool renew_with_usage = 11 [default = false];
|
||||
|
||||
// Indicates to client that license renewal and release requests ought to
|
||||
// include ClientIdentification (client_id).
|
||||
optional bool always_include_client_id = 12 [default = false];
|
||||
|
||||
// Duration of grace period before playback_duration_seconds (short window)
|
||||
// goes into effect. Optional.
|
||||
optional int64 play_start_grace_period_seconds = 13 [default = 0];
|
||||
|
||||
// Enables "soft enforcement" of playback_duration_seconds, letting the user
|
||||
// finish playback even if short window expires. Optional.
|
||||
optional bool soft_enforce_playback_duration = 14 [default = false];
|
||||
|
||||
// Enables "soft enforcement" of rental_duration_seconds. Initial playback
|
||||
// must always start before rental duration expires. In order to allow
|
||||
// subsequent playbacks to start after the rental duration expires,
|
||||
// soft_enforce_playback_duration must be true. Otherwise, subsequent
|
||||
// playbacks will not be allowed once rental duration expires. Optional.
|
||||
optional bool soft_enforce_rental_duration = 15 [default = true];
|
||||
}
|
||||
|
||||
message KeyContainer {
|
||||
enum KeyType {
|
||||
SIGNING = 1; // Exactly one key of this type must appear.
|
||||
CONTENT = 2; // Content key.
|
||||
KEY_CONTROL = 3; // Key control block for license renewals. No key.
|
||||
OPERATOR_SESSION = 4; // wrapped keys for auxiliary crypto operations.
|
||||
ENTITLEMENT = 5; // Entitlement keys.
|
||||
OEM_CONTENT = 6; // Partner-specific content key.
|
||||
}
|
||||
|
||||
// The SecurityLevel enumeration allows the server to communicate the level
|
||||
// of robustness required by the client, in order to use the key.
|
||||
enum SecurityLevel {
|
||||
// Software-based whitebox crypto is required.
|
||||
SW_SECURE_CRYPTO = 1;
|
||||
|
||||
// Software crypto and an obfuscated decoder is required.
|
||||
SW_SECURE_DECODE = 2;
|
||||
|
||||
// The key material and crypto operations must be performed within a
|
||||
// hardware backed trusted execution environment.
|
||||
HW_SECURE_CRYPTO = 3;
|
||||
|
||||
// The crypto and decoding of content must be performed within a hardware
|
||||
// backed trusted execution environment.
|
||||
HW_SECURE_DECODE = 4;
|
||||
|
||||
// The crypto, decoding and all handling of the media (compressed and
|
||||
// uncompressed) must be handled within a hardware backed trusted
|
||||
// execution environment.
|
||||
HW_SECURE_ALL = 5;
|
||||
}
|
||||
|
||||
message KeyControl {
|
||||
// |key_control| is documented in:
|
||||
// Widevine Modular DRM Security Integration Guide for CENC
|
||||
// If present, the key control must be communicated to the secure
|
||||
// environment prior to any usage. This message is automatically generated
|
||||
// by the Widevine License Server SDK.
|
||||
optional bytes key_control_block = 1;
|
||||
optional bytes iv = 2;
|
||||
}
|
||||
|
||||
message OutputProtection {
|
||||
// Indicates whether HDCP is required on digital outputs, and which
|
||||
// version should be used.
|
||||
enum HDCP {
|
||||
HDCP_NONE = 0;
|
||||
HDCP_V1 = 1;
|
||||
HDCP_V2 = 2;
|
||||
HDCP_V2_1 = 3;
|
||||
HDCP_V2_2 = 4;
|
||||
HDCP_V2_3 = 5;
|
||||
HDCP_NO_DIGITAL_OUTPUT = 0xff;
|
||||
}
|
||||
optional HDCP hdcp = 1 [default = HDCP_NONE];
|
||||
|
||||
// Indicate the CGMS setting to be inserted on analog output.
|
||||
enum CGMS {
|
||||
CGMS_NONE = 42;
|
||||
COPY_FREE = 0;
|
||||
COPY_ONCE = 2;
|
||||
COPY_NEVER = 3;
|
||||
}
|
||||
optional CGMS cgms_flags = 2 [default = CGMS_NONE];
|
||||
|
||||
enum HdcpSrmRule {
|
||||
HDCP_SRM_RULE_NONE = 0;
|
||||
// In 'required_protection', this means most current SRM is required.
|
||||
// Update the SRM on the device. If update cannot happen,
|
||||
// do not allow the key.
|
||||
// In 'requested_protection', this means most current SRM is requested.
|
||||
// Update the SRM on the device. If update cannot happen,
|
||||
// allow use of the key anyway.
|
||||
CURRENT_SRM = 1;
|
||||
}
|
||||
optional HdcpSrmRule hdcp_srm_rule = 3 [default = HDCP_SRM_RULE_NONE];
|
||||
// Optional requirement to indicate analog output is not allowed.
|
||||
optional bool disable_analog_output = 4 [default = false];
|
||||
// Optional requirement to indicate digital output is not allowed.
|
||||
optional bool disable_digital_output = 5 [default = false];
|
||||
}
|
||||
|
||||
message VideoResolutionConstraint {
|
||||
// Minimum and maximum video resolutions in the range (height x width).
|
||||
optional uint32 min_resolution_pixels = 1;
|
||||
optional uint32 max_resolution_pixels = 2;
|
||||
// Optional output protection requirements for this range. If not
|
||||
// specified, the OutputProtection in the KeyContainer applies.
|
||||
optional OutputProtection required_protection = 3;
|
||||
}
|
||||
|
||||
message OperatorSessionKeyPermissions {
|
||||
// Permissions/key usage flags for operator service keys
|
||||
// (type = OPERATOR_SESSION).
|
||||
optional bool allow_encrypt = 1 [default = false];
|
||||
optional bool allow_decrypt = 2 [default = false];
|
||||
optional bool allow_sign = 3 [default = false];
|
||||
optional bool allow_signature_verify = 4 [default = false];
|
||||
}
|
||||
|
||||
optional bytes id = 1;
|
||||
optional bytes iv = 2;
|
||||
optional bytes key = 3;
|
||||
optional KeyType type = 4;
|
||||
optional SecurityLevel level = 5 [default = SW_SECURE_CRYPTO];
|
||||
optional OutputProtection required_protection = 6;
|
||||
// NOTE: Use of requested_protection is not recommended as it is only
|
||||
// supported on a small number of platforms.
|
||||
optional OutputProtection requested_protection = 7;
|
||||
optional KeyControl key_control = 8;
|
||||
optional OperatorSessionKeyPermissions operator_session_key_permissions = 9;
|
||||
// Optional video resolution constraints. If the video resolution of the
|
||||
// content being decrypted/decoded falls within one of the specified ranges,
|
||||
// the optional required_protections may be applied. Otherwise an error will
|
||||
// be reported.
|
||||
// NOTE: Use of this feature is not recommended, as it is only supported on
|
||||
// a small number of platforms.
|
||||
repeated VideoResolutionConstraint video_resolution_constraints = 10;
|
||||
// Optional flag to indicate the key must only be used if the client
|
||||
// supports anti rollback of the user table. Content provider can query the
|
||||
// client capabilities to determine if the client support this feature.
|
||||
optional bool anti_rollback_usage_table = 11 [default = false];
|
||||
// Optional not limited to commonly known track types such as SD, HD.
|
||||
// It can be some provider defined label to identify the track.
|
||||
optional string track_label = 12;
|
||||
}
|
||||
|
||||
optional LicenseIdentification id = 1;
|
||||
optional Policy policy = 2;
|
||||
repeated KeyContainer key = 3;
|
||||
// Time of the request in seconds (UTC) as set in
|
||||
// LicenseRequest.request_time. If this time is not set in the request,
|
||||
// the local time at the license service is used in this field.
|
||||
optional int64 license_start_time = 4;
|
||||
optional bool remote_attestation_verified = 5 [default = false];
|
||||
// Client token generated by the content provider. Optional.
|
||||
optional bytes provider_client_token = 6;
|
||||
// 4cc code specifying the CENC protection scheme as defined in the CENC 3.0
|
||||
// specification. Propagated from Widevine PSSH box. Optional.
|
||||
optional uint32 protection_scheme = 7;
|
||||
// 8 byte verification field "HDCPDATA" followed by unsigned 32 bit minimum
|
||||
// HDCP SRM version (whether the version is for HDCP1 SRM or HDCP2 SRM
|
||||
// depends on client max_hdcp_version).
|
||||
// Additional details can be found in Widevine Modular DRM Security
|
||||
// Integration Guide for CENC.
|
||||
optional bytes srm_requirement = 8;
|
||||
// If present this contains a signed SRM file (either HDCP1 SRM or HDCP2 SRM
|
||||
// depending on client max_hdcp_version) that should be installed on the
|
||||
// client device.
|
||||
optional bytes srm_update = 9;
|
||||
// Indicates the status of any type of platform verification performed by the
|
||||
// server.
|
||||
optional PlatformVerificationStatus platform_verification_status = 10
|
||||
[default = PLATFORM_NO_VERIFICATION];
|
||||
// IDs of the groups for which keys are delivered in this license, if any.
|
||||
repeated bytes group_ids = 11;
|
||||
}
|
||||
|
||||
enum ProtocolVersion {
|
||||
VERSION_2_0 = 20;
|
||||
VERSION_2_1 = 21;
|
||||
VERSION_2_2 = 22;
|
||||
}
|
||||
|
||||
message LicenseRequest {
|
||||
message ContentIdentification {
|
||||
message WidevinePsshData {
|
||||
repeated bytes pssh_data = 1;
|
||||
optional LicenseType license_type = 2;
|
||||
optional bytes request_id = 3; // Opaque, client-specified.
|
||||
}
|
||||
|
||||
message WebmKeyId {
|
||||
optional bytes header = 1;
|
||||
optional LicenseType license_type = 2;
|
||||
optional bytes request_id = 3; // Opaque, client-specified.
|
||||
}
|
||||
|
||||
message ExistingLicense {
|
||||
optional LicenseIdentification license_id = 1;
|
||||
optional int64 seconds_since_started = 2;
|
||||
optional int64 seconds_since_last_played = 3;
|
||||
optional bytes session_usage_table_entry = 4;
|
||||
}
|
||||
|
||||
message InitData {
|
||||
enum InitDataType {
|
||||
CENC = 1;
|
||||
WEBM = 2;
|
||||
}
|
||||
|
||||
optional InitDataType init_data_type = 1 [default = CENC];
|
||||
optional bytes init_data = 2;
|
||||
optional LicenseType license_type = 3;
|
||||
optional bytes request_id = 4;
|
||||
}
|
||||
|
||||
oneof content_id_variant {
|
||||
// Exactly one of these must be present.
|
||||
WidevinePsshData widevine_pssh_data = 1;
|
||||
WebmKeyId webm_key_id = 2;
|
||||
ExistingLicense existing_license = 3;
|
||||
InitData init_data = 4;
|
||||
}
|
||||
}
|
||||
|
||||
enum RequestType {
|
||||
NEW = 1;
|
||||
RENEWAL = 2;
|
||||
RELEASE = 3;
|
||||
}
|
||||
|
||||
// The client_id provides information authenticating the calling device. It
|
||||
// contains the Widevine keybox token that was installed on the device at the
|
||||
// factory. This field or encrypted_client_id below is required for a valid
|
||||
// license request, but both should never be present in the same request.
|
||||
optional ClientIdentification client_id = 1;
|
||||
optional ContentIdentification content_id = 2;
|
||||
optional RequestType type = 3;
|
||||
// Time of the request in seconds (UTC) as set by the client.
|
||||
optional int64 request_time = 4;
|
||||
// Old-style decimal-encoded string key control nonce.
|
||||
optional bytes key_control_nonce_deprecated = 5;
|
||||
optional ProtocolVersion protocol_version = 6 [default = VERSION_2_0];
|
||||
// New-style uint32 key control nonce, please use instead of
|
||||
// key_control_nonce_deprecated.
|
||||
optional uint32 key_control_nonce = 7;
|
||||
// Encrypted ClientIdentification message, used for privacy purposes.
|
||||
optional EncryptedClientIdentification encrypted_client_id = 8;
|
||||
}
|
||||
|
||||
message MetricData {
|
||||
enum MetricType {
|
||||
// The time spent in the 'stage', specified in microseconds.
|
||||
LATENCY = 1;
|
||||
// The UNIX epoch timestamp at which the 'stage' was first accessed in
|
||||
// microseconds.
|
||||
TIMESTAMP = 2;
|
||||
}
|
||||
|
||||
message TypeValue {
|
||||
optional MetricType type = 1;
|
||||
// The value associated with 'type'. For example if type == LATENCY, the
|
||||
// value would be the time in microseconds spent in this 'stage'.
|
||||
optional int64 value = 2 [default = 0];
|
||||
}
|
||||
|
||||
// 'stage' that is currently processing the SignedMessage. Required.
|
||||
optional string stage_name = 1;
|
||||
// metric and associated value.
|
||||
repeated TypeValue metric_data = 2;
|
||||
}
|
||||
|
||||
message VersionInfo {
|
||||
// License SDK version reported by the Widevine License SDK. This field
|
||||
// is populated automatically by the SDK.
|
||||
optional string license_sdk_version = 1;
|
||||
// Version of the service hosting the license SDK. This field is optional.
|
||||
// It may be provided by the hosting service.
|
||||
optional string license_service_version = 2;
|
||||
}
|
||||
|
||||
message SignedMessage {
|
||||
enum MessageType {
|
||||
LICENSE_REQUEST = 1;
|
||||
LICENSE = 2;
|
||||
ERROR_RESPONSE = 3;
|
||||
SERVICE_CERTIFICATE_REQUEST = 4;
|
||||
SERVICE_CERTIFICATE = 5;
|
||||
SUB_LICENSE = 6;
|
||||
CAS_LICENSE_REQUEST = 7;
|
||||
CAS_LICENSE = 8;
|
||||
EXTERNAL_LICENSE_REQUEST = 9;
|
||||
EXTERNAL_LICENSE = 10;
|
||||
}
|
||||
|
||||
enum SessionKeyType {
|
||||
UNDEFINED = 0;
|
||||
WRAPPED_AES_KEY = 1;
|
||||
EPHERMERAL_ECC_PUBLIC_KEY = 2;
|
||||
}
|
||||
optional MessageType type = 1;
|
||||
optional bytes msg = 2;
|
||||
// Required field that contains the signature of the bytes of msg.
|
||||
// For license requests, the signing algorithm is determined by the
|
||||
// certificate contained in the request.
|
||||
// For license responses, the signing algorithm is HMAC with signing key based
|
||||
// on |session_key|.
|
||||
optional bytes signature = 3;
|
||||
// If populated, the contents of this field will be signaled by the
|
||||
// |session_key_type| type. If the |session_key_type| is WRAPPED_AES_KEY the
|
||||
// key is the bytes of an encrypted AES key. If the |session_key_type| is
|
||||
// EPHERMERAL_ECC_PUBLIC_KEY the field contains the bytes of an RFC5208 ASN1
|
||||
// serialized ECC public key.
|
||||
optional bytes session_key = 4;
|
||||
// Remote attestation data which will be present in the initial license
|
||||
// request for ChromeOS client devices operating in verified mode. Remote
|
||||
// attestation challenge data is |msg| field above. Optional.
|
||||
optional bytes remote_attestation = 5;
|
||||
|
||||
repeated MetricData metric_data = 6;
|
||||
// Version information from the SDK and license service. This information is
|
||||
// provided in the license response.
|
||||
optional VersionInfo service_version_info = 7;
|
||||
// Optional field that contains the algorithm type used to generate the
|
||||
// session_key and signature in a LICENSE message.
|
||||
optional SessionKeyType session_key_type = 8 [default = WRAPPED_AES_KEY];
|
||||
// The core message is the simple serialization of fields used by OEMCrypto.
|
||||
// This field was introduced in OEMCrypto API v16.
|
||||
optional bytes oemcrypto_core_message = 9;
|
||||
}
|
||||
|
||||
enum HashAlgorithmProto {
|
||||
// Unspecified hash algorithm: SHA_256 shall be used for ECC based algorithms
|
||||
// and SHA_1 shall be used otherwise.
|
||||
HASH_ALGORITHM_UNSPECIFIED = 0;
|
||||
HASH_ALGORITHM_SHA_1 = 1;
|
||||
HASH_ALGORITHM_SHA_256 = 2;
|
||||
HASH_ALGORITHM_SHA_384 = 3;
|
||||
}
|
||||
|
||||
// ClientIdentification message used to authenticate the client device.
|
||||
message ClientIdentification {
|
||||
enum TokenType {
|
||||
KEYBOX = 0;
|
||||
DRM_DEVICE_CERTIFICATE = 1;
|
||||
REMOTE_ATTESTATION_CERTIFICATE = 2;
|
||||
OEM_DEVICE_CERTIFICATE = 3;
|
||||
}
|
||||
|
||||
message NameValue {
|
||||
optional string name = 1;
|
||||
optional string value = 2;
|
||||
}
|
||||
|
||||
// Capabilities which not all clients may support. Used for the license
|
||||
// exchange protocol only.
|
||||
message ClientCapabilities {
|
||||
enum HdcpVersion {
|
||||
HDCP_NONE = 0;
|
||||
HDCP_V1 = 1;
|
||||
HDCP_V2 = 2;
|
||||
HDCP_V2_1 = 3;
|
||||
HDCP_V2_2 = 4;
|
||||
HDCP_V2_3 = 5;
|
||||
HDCP_NO_DIGITAL_OUTPUT = 0xff;
|
||||
}
|
||||
|
||||
enum CertificateKeyType {
|
||||
RSA_2048 = 0;
|
||||
RSA_3072 = 1;
|
||||
ECC_SECP256R1 = 2;
|
||||
ECC_SECP384R1 = 3;
|
||||
ECC_SECP521R1 = 4;
|
||||
}
|
||||
|
||||
enum AnalogOutputCapabilities {
|
||||
ANALOG_OUTPUT_UNKNOWN = 0;
|
||||
ANALOG_OUTPUT_NONE = 1;
|
||||
ANALOG_OUTPUT_SUPPORTED = 2;
|
||||
ANALOG_OUTPUT_SUPPORTS_CGMS_A = 3;
|
||||
}
|
||||
|
||||
optional bool client_token = 1 [default = false];
|
||||
optional bool session_token = 2 [default = false];
|
||||
optional bool video_resolution_constraints = 3 [default = false];
|
||||
optional HdcpVersion max_hdcp_version = 4 [default = HDCP_NONE];
|
||||
optional uint32 oem_crypto_api_version = 5;
|
||||
// Client has hardware support for protecting the usage table, such as
|
||||
// storing the generation number in secure memory. For Details, see:
|
||||
// Widevine Modular DRM Security Integration Guide for CENC
|
||||
optional bool anti_rollback_usage_table = 6 [default = false];
|
||||
// The client shall report |srm_version| if available.
|
||||
optional uint32 srm_version = 7;
|
||||
// A device may have SRM data, and report a version, but may not be capable
|
||||
// of updating SRM data.
|
||||
optional bool can_update_srm = 8 [default = false];
|
||||
repeated CertificateKeyType supported_certificate_key_type = 9;
|
||||
optional AnalogOutputCapabilities analog_output_capabilities = 10
|
||||
[default = ANALOG_OUTPUT_UNKNOWN];
|
||||
optional bool can_disable_analog_output = 11 [default = false];
|
||||
// Clients can indicate a performance level supported by OEMCrypto.
|
||||
// This will allow applications and providers to choose an appropriate
|
||||
// quality of content to serve. Currently defined tiers are
|
||||
// 1 (low), 2 (medium) and 3 (high). Any other value indicates that
|
||||
// the resource rating is unavailable or reporting erroneous values
|
||||
// for that device. For details see,
|
||||
// Widevine Modular DRM Security Integration Guide for CENC
|
||||
optional uint32 resource_rating_tier = 12 [default = 0];
|
||||
}
|
||||
|
||||
message ClientCredentials {
|
||||
optional TokenType type = 1 [default = KEYBOX];
|
||||
optional bytes token = 2;
|
||||
}
|
||||
|
||||
// Type of factory-provisioned device root of trust. Optional.
|
||||
optional TokenType type = 1 [default = KEYBOX];
|
||||
// Factory-provisioned device root of trust. Required.
|
||||
optional bytes token = 2;
|
||||
// Optional client information name/value pairs.
|
||||
repeated NameValue client_info = 3;
|
||||
// Client token generated by the content provider. Optional.
|
||||
optional bytes provider_client_token = 4;
|
||||
// Number of licenses received by the client to which the token above belongs.
|
||||
// Only present if client_token is specified.
|
||||
optional uint32 license_counter = 5;
|
||||
// List of non-baseline client capabilities.
|
||||
optional ClientCapabilities client_capabilities = 6;
|
||||
// Serialized VmpData message. Optional.
|
||||
optional bytes vmp_data = 7;
|
||||
// Optional field that may contain additional provisioning credentials.
|
||||
repeated ClientCredentials device_credentials = 8;
|
||||
}
|
||||
|
||||
// EncryptedClientIdentification message used to hold ClientIdentification
|
||||
// messages encrypted for privacy purposes.
|
||||
message EncryptedClientIdentification {
|
||||
// Provider ID for which the ClientIdentifcation is encrypted (owner of
|
||||
// service certificate).
|
||||
optional string provider_id = 1;
|
||||
// Serial number for the service certificate for which ClientIdentification is
|
||||
// encrypted.
|
||||
optional bytes service_certificate_serial_number = 2;
|
||||
// Serialized ClientIdentification message, encrypted with the privacy key
|
||||
// using AES-128-CBC with PKCS#5 padding.
|
||||
optional bytes encrypted_client_id = 3;
|
||||
// Initialization vector needed to decrypt encrypted_client_id.
|
||||
optional bytes encrypted_client_id_iv = 4;
|
||||
// AES-128 privacy key, encrypted with the service public key using RSA-OAEP.
|
||||
optional bytes encrypted_privacy_key = 5;
|
||||
}
|
||||
|
||||
// DRM certificate definition for user devices, intermediate, service, and root
|
||||
// certificates.
|
||||
message DrmCertificate {
|
||||
enum Type {
|
||||
ROOT = 0; // ProtoBestPractices: ignore.
|
||||
DEVICE_MODEL = 1;
|
||||
DEVICE = 2;
|
||||
SERVICE = 3;
|
||||
PROVISIONER = 4;
|
||||
}
|
||||
enum ServiceType {
|
||||
UNKNOWN_SERVICE_TYPE = 0;
|
||||
LICENSE_SERVER_SDK = 1;
|
||||
LICENSE_SERVER_PROXY_SDK = 2;
|
||||
PROVISIONING_SDK = 3;
|
||||
CAS_PROXY_SDK = 4;
|
||||
}
|
||||
enum Algorithm {
|
||||
UNKNOWN_ALGORITHM = 0;
|
||||
RSA = 1;
|
||||
ECC_SECP256R1 = 2;
|
||||
ECC_SECP384R1 = 3;
|
||||
ECC_SECP521R1 = 4;
|
||||
}
|
||||
|
||||
message EncryptionKey {
|
||||
// Device public key. PKCS#1 ASN.1 DER-encoded. Required.
|
||||
optional bytes public_key = 1;
|
||||
// Required. The algorithm field contains the curve used to create the
|
||||
// |public_key| if algorithm is one of the ECC types.
|
||||
// The |algorithm| is used for both to determine the if the certificate is
|
||||
// ECC or RSA. The |algorithm| also specifies the parameters that were used
|
||||
// to create |public_key| and are used to create an ephemeral session key.
|
||||
optional Algorithm algorithm = 2 [default = RSA];
|
||||
}
|
||||
|
||||
// Type of certificate. Required.
|
||||
optional Type type = 1;
|
||||
// 128-bit globally unique serial number of certificate.
|
||||
// Value is 0 for root certificate. Required.
|
||||
optional bytes serial_number = 2;
|
||||
// POSIX time, in seconds, when the certificate was created. Required.
|
||||
optional uint32 creation_time_seconds = 3;
|
||||
// POSIX time, in seconds, when the certificate should expire. Value of zero
|
||||
// denotes indefinite expiry time. For more information on limited lifespan
|
||||
// DRM certificates see (go/limited-lifespan-drm-certificates).
|
||||
optional uint32 expiration_time_seconds = 12;
|
||||
// Device public key. PKCS#1 ASN.1 DER-encoded. Required.
|
||||
optional bytes public_key = 4;
|
||||
// Widevine system ID for the device. Required for intermediate and
|
||||
// user device certificates.
|
||||
optional uint32 system_id = 5;
|
||||
// Deprecated field, which used to indicate whether the device was a test
|
||||
// (non-production) device. The test_device field in ProvisionedDeviceInfo
|
||||
// below should be observed instead.
|
||||
optional bool test_device_deprecated = 6 [deprecated = true];
|
||||
// Service identifier (web origin) for the provider which owns the
|
||||
// certificate. Required for service and provisioner certificates.
|
||||
optional string provider_id = 7;
|
||||
// This field is used only when type = SERVICE to specify which SDK uses
|
||||
// service certificate. This repeated field is treated as a set. A certificate
|
||||
// may be used for the specified service SDK if the appropriate ServiceType
|
||||
// is specified in this field.
|
||||
repeated ServiceType service_types = 8;
|
||||
// Required. The algorithm field contains the curve used to create the
|
||||
// |public_key| if algorithm is one of the ECC types.
|
||||
// The |algorithm| is used for both to determine the if the certificate is ECC
|
||||
// or RSA. The |algorithm| also specifies the parameters that were used to
|
||||
// create |public_key| and are used to create an ephemeral session key.
|
||||
optional Algorithm algorithm = 9 [default = RSA];
|
||||
// Optional. May be present in DEVICE certificate types. This is the root
|
||||
// of trust identifier that holds an encrypted value that identifies the
|
||||
// keybox or other root of trust that was used to provision a DEVICE drm
|
||||
// certificate.
|
||||
optional bytes rot_id = 10;
|
||||
// Optional. May be present in devices that explicitly support dual keys. When
|
||||
// present the |public_key| is used for verification of received license
|
||||
// request messages.
|
||||
optional EncryptionKey encryption_key = 11;
|
||||
}
|
||||
|
||||
// DrmCertificate signed by a higher (CA) DRM certificate.
|
||||
message SignedDrmCertificate {
|
||||
// Serialized certificate. Required.
|
||||
optional bytes drm_certificate = 1;
|
||||
// Signature of certificate. Signed with root or intermediate
|
||||
// certificate specified below. Required.
|
||||
optional bytes signature = 2;
|
||||
// SignedDrmCertificate used to sign this certificate.
|
||||
optional SignedDrmCertificate signer = 3;
|
||||
// Optional field that indicates the hash algorithm used in signature scheme.
|
||||
optional HashAlgorithmProto hash_algorithm = 4;
|
||||
}
|
||||
|
||||
message WidevinePsshData {
|
||||
enum Type {
|
||||
SINGLE = 0; // Single PSSH to be used to retrieve content keys.
|
||||
ENTITLEMENT = 1; // Primary PSSH used to retrieve entitlement keys.
|
||||
ENTITLED_KEY = 2; // Secondary PSSH containing entitled key(s).
|
||||
}
|
||||
|
||||
message EntitledKey {
|
||||
// ID of entitlement key used for wrapping |key|.
|
||||
optional bytes entitlement_key_id = 1;
|
||||
// ID of the entitled key.
|
||||
optional bytes key_id = 2;
|
||||
// Wrapped key. Required.
|
||||
optional bytes key = 3;
|
||||
// IV used for wrapping |key|. Required.
|
||||
optional bytes iv = 4;
|
||||
// Size of entitlement key used for wrapping |key|.
|
||||
optional uint32 entitlement_key_size_bytes = 5 [default = 32];
|
||||
}
|
||||
|
||||
// Entitlement or content key IDs. Can onnly present in SINGLE or ENTITLEMENT
|
||||
// PSSHs. May be repeated to facilitate delivery of multiple keys in a
|
||||
// single license. Cannot be used in conjunction with content_id or
|
||||
// group_ids, which are the preferred mechanism.
|
||||
repeated bytes key_ids = 2;
|
||||
|
||||
// Content identifier which may map to multiple entitlement or content key
|
||||
// IDs to facilitate the delivery of multiple keys in a single license.
|
||||
// Cannot be present in conjunction with key_ids, but if used must be in all
|
||||
// PSSHs.
|
||||
optional bytes content_id = 4;
|
||||
|
||||
// Crypto period index, for media using key rotation. Always corresponds to
|
||||
// The content key period. This means that if using entitlement licensing
|
||||
// the ENTITLED_KEY PSSHs will have sequential crypto_period_index's, whereas
|
||||
// the ENTITELEMENT PSSHs will have gaps in the sequence. Required if doing
|
||||
// key rotation.
|
||||
optional uint32 crypto_period_index = 7;
|
||||
|
||||
// Protection scheme identifying the encryption algorithm. The protection
|
||||
// scheme is represented as a uint32 value. The uint32 contains 4 bytes each
|
||||
// representing a single ascii character in one of the 4CC protection scheme
|
||||
// values. To be deprecated in favor of signaling from content.
|
||||
// 'cenc' (AES-CTR) protection_scheme = 0x63656E63,
|
||||
// 'cbc1' (AES-CBC) protection_scheme = 0x63626331,
|
||||
// 'cens' (AES-CTR pattern encryption) protection_scheme = 0x63656E73,
|
||||
// 'cbcs' (AES-CBC pattern encryption) protection_scheme = 0x63626373.
|
||||
optional uint32 protection_scheme = 9;
|
||||
|
||||
// Optional. For media using key rotation, this represents the duration
|
||||
// of each crypto period in seconds.
|
||||
optional uint32 crypto_period_seconds = 10;
|
||||
|
||||
// Type of PSSH. Required if not SINGLE.
|
||||
optional Type type = 11 [default = SINGLE];
|
||||
|
||||
// Key sequence for Widevine-managed keys. Optional.
|
||||
optional uint32 key_sequence = 12;
|
||||
|
||||
// Group identifiers for all groups to which the content belongs. This can
|
||||
// be used to deliver licenses to unlock multiple titles / channels.
|
||||
// Optional, and may only be present in ENTITLEMENT and ENTITLED_KEY PSSHs, and
|
||||
// not in conjunction with key_ids.
|
||||
repeated bytes group_ids = 13;
|
||||
|
||||
// Copy/copies of the content key used to decrypt the media stream in which
|
||||
// the PSSH box is embedded, each wrapped with a different entitlement key.
|
||||
// May also contain sub-licenses to support devices with OEMCrypto 13 or
|
||||
// older. May be repeated if using group entitlement keys. Present only in
|
||||
// PSSHs of type ENTITLED_KEY.
|
||||
repeated EntitledKey entitled_keys = 14;
|
||||
|
||||
// Video feature identifier, which is used in conjunction with |content_id|
|
||||
// to determine the set of keys to be returned in the license. Cannot be
|
||||
// present in conjunction with |key_ids|.
|
||||
// Current values are "HDR".
|
||||
optional string video_feature = 15;
|
||||
|
||||
//////////////////////////// Deprecated Fields ////////////////////////////
|
||||
enum Algorithm {
|
||||
UNENCRYPTED = 0;
|
||||
AESCTR = 1;
|
||||
};
|
||||
optional Algorithm algorithm = 1 [deprecated = true];
|
||||
|
||||
// Content provider name.
|
||||
optional string provider = 3 [deprecated = true];
|
||||
|
||||
// Track type. Acceptable values are SD, HD and AUDIO. Used to
|
||||
// differentiate content keys used by an asset.
|
||||
optional string track_type = 5 [deprecated = true];
|
||||
|
||||
// The name of a registered policy to be used for this asset.
|
||||
optional string policy = 6 [deprecated = true];
|
||||
|
||||
// Optional protected context for group content. The grouped_license is a
|
||||
// serialized SignedMessage.
|
||||
optional bytes grouped_license = 8 [deprecated = true];
|
||||
}
|
||||
|
||||
// File Hashes for Verified Media Path (VMP) support.
|
||||
message FileHashes {
|
||||
message Signature {
|
||||
optional string filename = 1;
|
||||
optional bool test_signing = 2; //0 - release, 1 - testing
|
||||
optional bytes SHA512Hash = 3;
|
||||
optional bool main_exe = 4; //0 for dlls, 1 for exe, this is field 3 in file
|
||||
optional bytes signature = 5;
|
||||
}
|
||||
optional bytes signer = 1;
|
||||
repeated Signature signatures = 2;
|
||||
}
|
File diff suppressed because one or more lines are too long
@ -1,607 +0,0 @@
|
||||
# mypy: ignore-errors
|
||||
|
||||
from google.protobuf.internal import containers as _containers
|
||||
from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union
|
||||
|
||||
AUTOMATIC: LicenseType
|
||||
DESCRIPTOR: _descriptor.FileDescriptor
|
||||
HASH_ALGORITHM_SHA_1: HashAlgorithmProto
|
||||
HASH_ALGORITHM_SHA_256: HashAlgorithmProto
|
||||
HASH_ALGORITHM_SHA_384: HashAlgorithmProto
|
||||
HASH_ALGORITHM_UNSPECIFIED: HashAlgorithmProto
|
||||
OFFLINE: LicenseType
|
||||
PLATFORM_HARDWARE_VERIFIED: PlatformVerificationStatus
|
||||
PLATFORM_NO_VERIFICATION: PlatformVerificationStatus
|
||||
PLATFORM_SECURE_STORAGE_SOFTWARE_VERIFIED: PlatformVerificationStatus
|
||||
PLATFORM_SOFTWARE_VERIFIED: PlatformVerificationStatus
|
||||
PLATFORM_TAMPERED: PlatformVerificationStatus
|
||||
PLATFORM_UNVERIFIED: PlatformVerificationStatus
|
||||
STREAMING: LicenseType
|
||||
VERSION_2_0: ProtocolVersion
|
||||
VERSION_2_1: ProtocolVersion
|
||||
VERSION_2_2: ProtocolVersion
|
||||
|
||||
class ClientIdentification(_message.Message):
|
||||
__slots__ = ["client_capabilities", "client_info", "device_credentials", "license_counter", "provider_client_token", "token", "type", "vmp_data"]
|
||||
class TokenType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class ClientCapabilities(_message.Message):
|
||||
__slots__ = ["analog_output_capabilities", "anti_rollback_usage_table", "can_disable_analog_output", "can_update_srm", "client_token", "max_hdcp_version", "oem_crypto_api_version", "resource_rating_tier", "session_token", "srm_version", "supported_certificate_key_type", "video_resolution_constraints"]
|
||||
class AnalogOutputCapabilities(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class CertificateKeyType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class HdcpVersion(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
ANALOG_OUTPUT_CAPABILITIES_FIELD_NUMBER: _ClassVar[int]
|
||||
ANALOG_OUTPUT_NONE: ClientIdentification.ClientCapabilities.AnalogOutputCapabilities
|
||||
ANALOG_OUTPUT_SUPPORTED: ClientIdentification.ClientCapabilities.AnalogOutputCapabilities
|
||||
ANALOG_OUTPUT_SUPPORTS_CGMS_A: ClientIdentification.ClientCapabilities.AnalogOutputCapabilities
|
||||
ANALOG_OUTPUT_UNKNOWN: ClientIdentification.ClientCapabilities.AnalogOutputCapabilities
|
||||
ANTI_ROLLBACK_USAGE_TABLE_FIELD_NUMBER: _ClassVar[int]
|
||||
CAN_DISABLE_ANALOG_OUTPUT_FIELD_NUMBER: _ClassVar[int]
|
||||
CAN_UPDATE_SRM_FIELD_NUMBER: _ClassVar[int]
|
||||
CLIENT_TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
ECC_SECP256R1: ClientIdentification.ClientCapabilities.CertificateKeyType
|
||||
ECC_SECP384R1: ClientIdentification.ClientCapabilities.CertificateKeyType
|
||||
ECC_SECP521R1: ClientIdentification.ClientCapabilities.CertificateKeyType
|
||||
HDCP_NONE: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_NO_DIGITAL_OUTPUT: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_V1: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_V2: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_V2_1: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_V2_2: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
HDCP_V2_3: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
MAX_HDCP_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
OEM_CRYPTO_API_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
RESOURCE_RATING_TIER_FIELD_NUMBER: _ClassVar[int]
|
||||
RSA_2048: ClientIdentification.ClientCapabilities.CertificateKeyType
|
||||
RSA_3072: ClientIdentification.ClientCapabilities.CertificateKeyType
|
||||
SESSION_TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
SRM_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
SUPPORTED_CERTIFICATE_KEY_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
VIDEO_RESOLUTION_CONSTRAINTS_FIELD_NUMBER: _ClassVar[int]
|
||||
analog_output_capabilities: ClientIdentification.ClientCapabilities.AnalogOutputCapabilities
|
||||
anti_rollback_usage_table: bool
|
||||
can_disable_analog_output: bool
|
||||
can_update_srm: bool
|
||||
client_token: bool
|
||||
max_hdcp_version: ClientIdentification.ClientCapabilities.HdcpVersion
|
||||
oem_crypto_api_version: int
|
||||
resource_rating_tier: int
|
||||
session_token: bool
|
||||
srm_version: int
|
||||
supported_certificate_key_type: _containers.RepeatedScalarFieldContainer[ClientIdentification.ClientCapabilities.CertificateKeyType]
|
||||
video_resolution_constraints: bool
|
||||
def __init__(self, client_token: bool = ..., session_token: bool = ..., video_resolution_constraints: bool = ..., max_hdcp_version: _Optional[_Union[ClientIdentification.ClientCapabilities.HdcpVersion, str]] = ..., oem_crypto_api_version: _Optional[int] = ..., anti_rollback_usage_table: bool = ..., srm_version: _Optional[int] = ..., can_update_srm: bool = ..., supported_certificate_key_type: _Optional[_Iterable[_Union[ClientIdentification.ClientCapabilities.CertificateKeyType, str]]] = ..., analog_output_capabilities: _Optional[_Union[ClientIdentification.ClientCapabilities.AnalogOutputCapabilities, str]] = ..., can_disable_analog_output: bool = ..., resource_rating_tier: _Optional[int] = ...) -> None: ...
|
||||
class ClientCredentials(_message.Message):
|
||||
__slots__ = ["token", "type"]
|
||||
TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
token: bytes
|
||||
type: ClientIdentification.TokenType
|
||||
def __init__(self, type: _Optional[_Union[ClientIdentification.TokenType, str]] = ..., token: _Optional[bytes] = ...) -> None: ...
|
||||
class NameValue(_message.Message):
|
||||
__slots__ = ["name", "value"]
|
||||
NAME_FIELD_NUMBER: _ClassVar[int]
|
||||
VALUE_FIELD_NUMBER: _ClassVar[int]
|
||||
name: str
|
||||
value: str
|
||||
def __init__(self, name: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ...
|
||||
CLIENT_CAPABILITIES_FIELD_NUMBER: _ClassVar[int]
|
||||
CLIENT_INFO_FIELD_NUMBER: _ClassVar[int]
|
||||
DEVICE_CREDENTIALS_FIELD_NUMBER: _ClassVar[int]
|
||||
DRM_DEVICE_CERTIFICATE: ClientIdentification.TokenType
|
||||
KEYBOX: ClientIdentification.TokenType
|
||||
LICENSE_COUNTER_FIELD_NUMBER: _ClassVar[int]
|
||||
OEM_DEVICE_CERTIFICATE: ClientIdentification.TokenType
|
||||
PROVIDER_CLIENT_TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
REMOTE_ATTESTATION_CERTIFICATE: ClientIdentification.TokenType
|
||||
TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
VMP_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
client_capabilities: ClientIdentification.ClientCapabilities
|
||||
client_info: _containers.RepeatedCompositeFieldContainer[ClientIdentification.NameValue]
|
||||
device_credentials: _containers.RepeatedCompositeFieldContainer[ClientIdentification.ClientCredentials]
|
||||
license_counter: int
|
||||
provider_client_token: bytes
|
||||
token: bytes
|
||||
type: ClientIdentification.TokenType
|
||||
vmp_data: bytes
|
||||
def __init__(self, type: _Optional[_Union[ClientIdentification.TokenType, str]] = ..., token: _Optional[bytes] = ..., client_info: _Optional[_Iterable[_Union[ClientIdentification.NameValue, _Mapping]]] = ..., provider_client_token: _Optional[bytes] = ..., license_counter: _Optional[int] = ..., client_capabilities: _Optional[_Union[ClientIdentification.ClientCapabilities, _Mapping]] = ..., vmp_data: _Optional[bytes] = ..., device_credentials: _Optional[_Iterable[_Union[ClientIdentification.ClientCredentials, _Mapping]]] = ...) -> None: ...
|
||||
|
||||
class DrmCertificate(_message.Message):
|
||||
__slots__ = ["algorithm", "creation_time_seconds", "encryption_key", "expiration_time_seconds", "provider_id", "public_key", "rot_id", "serial_number", "service_types", "system_id", "test_device_deprecated", "type"]
|
||||
class Algorithm(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class ServiceType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class Type(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class EncryptionKey(_message.Message):
|
||||
__slots__ = ["algorithm", "public_key"]
|
||||
ALGORITHM_FIELD_NUMBER: _ClassVar[int]
|
||||
PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
algorithm: DrmCertificate.Algorithm
|
||||
public_key: bytes
|
||||
def __init__(self, public_key: _Optional[bytes] = ..., algorithm: _Optional[_Union[DrmCertificate.Algorithm, str]] = ...) -> None: ...
|
||||
ALGORITHM_FIELD_NUMBER: _ClassVar[int]
|
||||
CAS_PROXY_SDK: DrmCertificate.ServiceType
|
||||
CREATION_TIME_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
DEVICE: DrmCertificate.Type
|
||||
DEVICE_MODEL: DrmCertificate.Type
|
||||
ECC_SECP256R1: DrmCertificate.Algorithm
|
||||
ECC_SECP384R1: DrmCertificate.Algorithm
|
||||
ECC_SECP521R1: DrmCertificate.Algorithm
|
||||
ENCRYPTION_KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
EXPIRATION_TIME_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_SERVER_PROXY_SDK: DrmCertificate.ServiceType
|
||||
LICENSE_SERVER_SDK: DrmCertificate.ServiceType
|
||||
PROVIDER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
PROVISIONER: DrmCertificate.Type
|
||||
PROVISIONING_SDK: DrmCertificate.ServiceType
|
||||
PUBLIC_KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
ROOT: DrmCertificate.Type
|
||||
ROT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
RSA: DrmCertificate.Algorithm
|
||||
SERIAL_NUMBER_FIELD_NUMBER: _ClassVar[int]
|
||||
SERVICE: DrmCertificate.Type
|
||||
SERVICE_TYPES_FIELD_NUMBER: _ClassVar[int]
|
||||
SYSTEM_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
TEST_DEVICE_DEPRECATED_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
UNKNOWN_ALGORITHM: DrmCertificate.Algorithm
|
||||
UNKNOWN_SERVICE_TYPE: DrmCertificate.ServiceType
|
||||
algorithm: DrmCertificate.Algorithm
|
||||
creation_time_seconds: int
|
||||
encryption_key: DrmCertificate.EncryptionKey
|
||||
expiration_time_seconds: int
|
||||
provider_id: str
|
||||
public_key: bytes
|
||||
rot_id: bytes
|
||||
serial_number: bytes
|
||||
service_types: _containers.RepeatedScalarFieldContainer[DrmCertificate.ServiceType]
|
||||
system_id: int
|
||||
test_device_deprecated: bool
|
||||
type: DrmCertificate.Type
|
||||
def __init__(self, type: _Optional[_Union[DrmCertificate.Type, str]] = ..., serial_number: _Optional[bytes] = ..., creation_time_seconds: _Optional[int] = ..., expiration_time_seconds: _Optional[int] = ..., public_key: _Optional[bytes] = ..., system_id: _Optional[int] = ..., test_device_deprecated: bool = ..., provider_id: _Optional[str] = ..., service_types: _Optional[_Iterable[_Union[DrmCertificate.ServiceType, str]]] = ..., algorithm: _Optional[_Union[DrmCertificate.Algorithm, str]] = ..., rot_id: _Optional[bytes] = ..., encryption_key: _Optional[_Union[DrmCertificate.EncryptionKey, _Mapping]] = ...) -> None: ...
|
||||
|
||||
class EncryptedClientIdentification(_message.Message):
|
||||
__slots__ = ["encrypted_client_id", "encrypted_client_id_iv", "encrypted_privacy_key", "provider_id", "service_certificate_serial_number"]
|
||||
ENCRYPTED_CLIENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ENCRYPTED_CLIENT_ID_IV_FIELD_NUMBER: _ClassVar[int]
|
||||
ENCRYPTED_PRIVACY_KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
PROVIDER_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
SERVICE_CERTIFICATE_SERIAL_NUMBER_FIELD_NUMBER: _ClassVar[int]
|
||||
encrypted_client_id: bytes
|
||||
encrypted_client_id_iv: bytes
|
||||
encrypted_privacy_key: bytes
|
||||
provider_id: str
|
||||
service_certificate_serial_number: bytes
|
||||
def __init__(self, provider_id: _Optional[str] = ..., service_certificate_serial_number: _Optional[bytes] = ..., encrypted_client_id: _Optional[bytes] = ..., encrypted_client_id_iv: _Optional[bytes] = ..., encrypted_privacy_key: _Optional[bytes] = ...) -> None: ...
|
||||
|
||||
class FileHashes(_message.Message):
|
||||
__slots__ = ["signatures", "signer"]
|
||||
class Signature(_message.Message):
|
||||
__slots__ = ["SHA512Hash", "filename", "main_exe", "signature", "test_signing"]
|
||||
FILENAME_FIELD_NUMBER: _ClassVar[int]
|
||||
MAIN_EXE_FIELD_NUMBER: _ClassVar[int]
|
||||
SHA512HASH_FIELD_NUMBER: _ClassVar[int]
|
||||
SHA512Hash: bytes
|
||||
SIGNATURE_FIELD_NUMBER: _ClassVar[int]
|
||||
TEST_SIGNING_FIELD_NUMBER: _ClassVar[int]
|
||||
filename: str
|
||||
main_exe: bool
|
||||
signature: bytes
|
||||
test_signing: bool
|
||||
def __init__(self, filename: _Optional[str] = ..., test_signing: bool = ..., SHA512Hash: _Optional[bytes] = ..., main_exe: bool = ..., signature: _Optional[bytes] = ...) -> None: ...
|
||||
SIGNATURES_FIELD_NUMBER: _ClassVar[int]
|
||||
SIGNER_FIELD_NUMBER: _ClassVar[int]
|
||||
signatures: _containers.RepeatedCompositeFieldContainer[FileHashes.Signature]
|
||||
signer: bytes
|
||||
def __init__(self, signer: _Optional[bytes] = ..., signatures: _Optional[_Iterable[_Union[FileHashes.Signature, _Mapping]]] = ...) -> None: ...
|
||||
|
||||
class License(_message.Message):
|
||||
__slots__ = ["group_ids", "id", "key", "license_start_time", "platform_verification_status", "policy", "protection_scheme", "provider_client_token", "remote_attestation_verified", "srm_requirement", "srm_update"]
|
||||
class KeyContainer(_message.Message):
|
||||
__slots__ = ["anti_rollback_usage_table", "id", "iv", "key", "key_control", "level", "operator_session_key_permissions", "requested_protection", "required_protection", "track_label", "type", "video_resolution_constraints"]
|
||||
class KeyType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class SecurityLevel(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class KeyControl(_message.Message):
|
||||
__slots__ = ["iv", "key_control_block"]
|
||||
IV_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_CONTROL_BLOCK_FIELD_NUMBER: _ClassVar[int]
|
||||
iv: bytes
|
||||
key_control_block: bytes
|
||||
def __init__(self, key_control_block: _Optional[bytes] = ..., iv: _Optional[bytes] = ...) -> None: ...
|
||||
class OperatorSessionKeyPermissions(_message.Message):
|
||||
__slots__ = ["allow_decrypt", "allow_encrypt", "allow_sign", "allow_signature_verify"]
|
||||
ALLOW_DECRYPT_FIELD_NUMBER: _ClassVar[int]
|
||||
ALLOW_ENCRYPT_FIELD_NUMBER: _ClassVar[int]
|
||||
ALLOW_SIGNATURE_VERIFY_FIELD_NUMBER: _ClassVar[int]
|
||||
ALLOW_SIGN_FIELD_NUMBER: _ClassVar[int]
|
||||
allow_decrypt: bool
|
||||
allow_encrypt: bool
|
||||
allow_sign: bool
|
||||
allow_signature_verify: bool
|
||||
def __init__(self, allow_encrypt: bool = ..., allow_decrypt: bool = ..., allow_sign: bool = ..., allow_signature_verify: bool = ...) -> None: ...
|
||||
class OutputProtection(_message.Message):
|
||||
__slots__ = ["cgms_flags", "disable_analog_output", "disable_digital_output", "hdcp", "hdcp_srm_rule"]
|
||||
class CGMS(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class HDCP(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class HdcpSrmRule(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
CGMS_FLAGS_FIELD_NUMBER: _ClassVar[int]
|
||||
CGMS_NONE: License.KeyContainer.OutputProtection.CGMS
|
||||
COPY_FREE: License.KeyContainer.OutputProtection.CGMS
|
||||
COPY_NEVER: License.KeyContainer.OutputProtection.CGMS
|
||||
COPY_ONCE: License.KeyContainer.OutputProtection.CGMS
|
||||
CURRENT_SRM: License.KeyContainer.OutputProtection.HdcpSrmRule
|
||||
DISABLE_ANALOG_OUTPUT_FIELD_NUMBER: _ClassVar[int]
|
||||
DISABLE_DIGITAL_OUTPUT_FIELD_NUMBER: _ClassVar[int]
|
||||
HDCP_FIELD_NUMBER: _ClassVar[int]
|
||||
HDCP_NONE: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_NO_DIGITAL_OUTPUT: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_SRM_RULE_FIELD_NUMBER: _ClassVar[int]
|
||||
HDCP_SRM_RULE_NONE: License.KeyContainer.OutputProtection.HdcpSrmRule
|
||||
HDCP_V1: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_V2: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_V2_1: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_V2_2: License.KeyContainer.OutputProtection.HDCP
|
||||
HDCP_V2_3: License.KeyContainer.OutputProtection.HDCP
|
||||
cgms_flags: License.KeyContainer.OutputProtection.CGMS
|
||||
disable_analog_output: bool
|
||||
disable_digital_output: bool
|
||||
hdcp: License.KeyContainer.OutputProtection.HDCP
|
||||
hdcp_srm_rule: License.KeyContainer.OutputProtection.HdcpSrmRule
|
||||
def __init__(self, hdcp: _Optional[_Union[License.KeyContainer.OutputProtection.HDCP, str]] = ..., cgms_flags: _Optional[_Union[License.KeyContainer.OutputProtection.CGMS, str]] = ..., hdcp_srm_rule: _Optional[_Union[License.KeyContainer.OutputProtection.HdcpSrmRule, str]] = ..., disable_analog_output: bool = ..., disable_digital_output: bool = ...) -> None: ...
|
||||
class VideoResolutionConstraint(_message.Message):
|
||||
__slots__ = ["max_resolution_pixels", "min_resolution_pixels", "required_protection"]
|
||||
MAX_RESOLUTION_PIXELS_FIELD_NUMBER: _ClassVar[int]
|
||||
MIN_RESOLUTION_PIXELS_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUIRED_PROTECTION_FIELD_NUMBER: _ClassVar[int]
|
||||
max_resolution_pixels: int
|
||||
min_resolution_pixels: int
|
||||
required_protection: License.KeyContainer.OutputProtection
|
||||
def __init__(self, min_resolution_pixels: _Optional[int] = ..., max_resolution_pixels: _Optional[int] = ..., required_protection: _Optional[_Union[License.KeyContainer.OutputProtection, _Mapping]] = ...) -> None: ...
|
||||
ANTI_ROLLBACK_USAGE_TABLE_FIELD_NUMBER: _ClassVar[int]
|
||||
CONTENT: License.KeyContainer.KeyType
|
||||
ENTITLEMENT: License.KeyContainer.KeyType
|
||||
HW_SECURE_ALL: License.KeyContainer.SecurityLevel
|
||||
HW_SECURE_CRYPTO: License.KeyContainer.SecurityLevel
|
||||
HW_SECURE_DECODE: License.KeyContainer.SecurityLevel
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
IV_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_CONTROL: License.KeyContainer.KeyType
|
||||
KEY_CONTROL_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
LEVEL_FIELD_NUMBER: _ClassVar[int]
|
||||
OEM_CONTENT: License.KeyContainer.KeyType
|
||||
OPERATOR_SESSION: License.KeyContainer.KeyType
|
||||
OPERATOR_SESSION_KEY_PERMISSIONS_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUESTED_PROTECTION_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUIRED_PROTECTION_FIELD_NUMBER: _ClassVar[int]
|
||||
SIGNING: License.KeyContainer.KeyType
|
||||
SW_SECURE_CRYPTO: License.KeyContainer.SecurityLevel
|
||||
SW_SECURE_DECODE: License.KeyContainer.SecurityLevel
|
||||
TRACK_LABEL_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
VIDEO_RESOLUTION_CONSTRAINTS_FIELD_NUMBER: _ClassVar[int]
|
||||
anti_rollback_usage_table: bool
|
||||
id: bytes
|
||||
iv: bytes
|
||||
key: bytes
|
||||
key_control: License.KeyContainer.KeyControl
|
||||
level: License.KeyContainer.SecurityLevel
|
||||
operator_session_key_permissions: License.KeyContainer.OperatorSessionKeyPermissions
|
||||
requested_protection: License.KeyContainer.OutputProtection
|
||||
required_protection: License.KeyContainer.OutputProtection
|
||||
track_label: str
|
||||
type: License.KeyContainer.KeyType
|
||||
video_resolution_constraints: _containers.RepeatedCompositeFieldContainer[License.KeyContainer.VideoResolutionConstraint]
|
||||
def __init__(self, id: _Optional[bytes] = ..., iv: _Optional[bytes] = ..., key: _Optional[bytes] = ..., type: _Optional[_Union[License.KeyContainer.KeyType, str]] = ..., level: _Optional[_Union[License.KeyContainer.SecurityLevel, str]] = ..., required_protection: _Optional[_Union[License.KeyContainer.OutputProtection, _Mapping]] = ..., requested_protection: _Optional[_Union[License.KeyContainer.OutputProtection, _Mapping]] = ..., key_control: _Optional[_Union[License.KeyContainer.KeyControl, _Mapping]] = ..., operator_session_key_permissions: _Optional[_Union[License.KeyContainer.OperatorSessionKeyPermissions, _Mapping]] = ..., video_resolution_constraints: _Optional[_Iterable[_Union[License.KeyContainer.VideoResolutionConstraint, _Mapping]]] = ..., anti_rollback_usage_table: bool = ..., track_label: _Optional[str] = ...) -> None: ...
|
||||
class Policy(_message.Message):
|
||||
__slots__ = ["always_include_client_id", "can_persist", "can_play", "can_renew", "license_duration_seconds", "play_start_grace_period_seconds", "playback_duration_seconds", "renew_with_usage", "renewal_delay_seconds", "renewal_recovery_duration_seconds", "renewal_retry_interval_seconds", "renewal_server_url", "rental_duration_seconds", "soft_enforce_playback_duration", "soft_enforce_rental_duration"]
|
||||
ALWAYS_INCLUDE_CLIENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
CAN_PERSIST_FIELD_NUMBER: _ClassVar[int]
|
||||
CAN_PLAY_FIELD_NUMBER: _ClassVar[int]
|
||||
CAN_RENEW_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_DURATION_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
PLAYBACK_DURATION_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
PLAY_START_GRACE_PERIOD_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
RENEWAL_DELAY_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
RENEWAL_RECOVERY_DURATION_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
RENEWAL_RETRY_INTERVAL_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
RENEWAL_SERVER_URL_FIELD_NUMBER: _ClassVar[int]
|
||||
RENEW_WITH_USAGE_FIELD_NUMBER: _ClassVar[int]
|
||||
RENTAL_DURATION_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
SOFT_ENFORCE_PLAYBACK_DURATION_FIELD_NUMBER: _ClassVar[int]
|
||||
SOFT_ENFORCE_RENTAL_DURATION_FIELD_NUMBER: _ClassVar[int]
|
||||
always_include_client_id: bool
|
||||
can_persist: bool
|
||||
can_play: bool
|
||||
can_renew: bool
|
||||
license_duration_seconds: int
|
||||
play_start_grace_period_seconds: int
|
||||
playback_duration_seconds: int
|
||||
renew_with_usage: bool
|
||||
renewal_delay_seconds: int
|
||||
renewal_recovery_duration_seconds: int
|
||||
renewal_retry_interval_seconds: int
|
||||
renewal_server_url: str
|
||||
rental_duration_seconds: int
|
||||
soft_enforce_playback_duration: bool
|
||||
soft_enforce_rental_duration: bool
|
||||
def __init__(self, can_play: bool = ..., can_persist: bool = ..., can_renew: bool = ..., rental_duration_seconds: _Optional[int] = ..., playback_duration_seconds: _Optional[int] = ..., license_duration_seconds: _Optional[int] = ..., renewal_recovery_duration_seconds: _Optional[int] = ..., renewal_server_url: _Optional[str] = ..., renewal_delay_seconds: _Optional[int] = ..., renewal_retry_interval_seconds: _Optional[int] = ..., renew_with_usage: bool = ..., always_include_client_id: bool = ..., play_start_grace_period_seconds: _Optional[int] = ..., soft_enforce_playback_duration: bool = ..., soft_enforce_rental_duration: bool = ...) -> None: ...
|
||||
GROUP_IDS_FIELD_NUMBER: _ClassVar[int]
|
||||
ID_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_START_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||
PLATFORM_VERIFICATION_STATUS_FIELD_NUMBER: _ClassVar[int]
|
||||
POLICY_FIELD_NUMBER: _ClassVar[int]
|
||||
PROTECTION_SCHEME_FIELD_NUMBER: _ClassVar[int]
|
||||
PROVIDER_CLIENT_TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
REMOTE_ATTESTATION_VERIFIED_FIELD_NUMBER: _ClassVar[int]
|
||||
SRM_REQUIREMENT_FIELD_NUMBER: _ClassVar[int]
|
||||
SRM_UPDATE_FIELD_NUMBER: _ClassVar[int]
|
||||
group_ids: _containers.RepeatedScalarFieldContainer[bytes]
|
||||
id: LicenseIdentification
|
||||
key: _containers.RepeatedCompositeFieldContainer[License.KeyContainer]
|
||||
license_start_time: int
|
||||
platform_verification_status: PlatformVerificationStatus
|
||||
policy: License.Policy
|
||||
protection_scheme: int
|
||||
provider_client_token: bytes
|
||||
remote_attestation_verified: bool
|
||||
srm_requirement: bytes
|
||||
srm_update: bytes
|
||||
def __init__(self, id: _Optional[_Union[LicenseIdentification, _Mapping]] = ..., policy: _Optional[_Union[License.Policy, _Mapping]] = ..., key: _Optional[_Iterable[_Union[License.KeyContainer, _Mapping]]] = ..., license_start_time: _Optional[int] = ..., remote_attestation_verified: bool = ..., provider_client_token: _Optional[bytes] = ..., protection_scheme: _Optional[int] = ..., srm_requirement: _Optional[bytes] = ..., srm_update: _Optional[bytes] = ..., platform_verification_status: _Optional[_Union[PlatformVerificationStatus, str]] = ..., group_ids: _Optional[_Iterable[bytes]] = ...) -> None: ...
|
||||
|
||||
class LicenseIdentification(_message.Message):
|
||||
__slots__ = ["provider_session_token", "purchase_id", "request_id", "session_id", "type", "version"]
|
||||
PROVIDER_SESSION_TOKEN_FIELD_NUMBER: _ClassVar[int]
|
||||
PURCHASE_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUEST_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
SESSION_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
provider_session_token: bytes
|
||||
purchase_id: bytes
|
||||
request_id: bytes
|
||||
session_id: bytes
|
||||
type: LicenseType
|
||||
version: int
|
||||
def __init__(self, request_id: _Optional[bytes] = ..., session_id: _Optional[bytes] = ..., purchase_id: _Optional[bytes] = ..., type: _Optional[_Union[LicenseType, str]] = ..., version: _Optional[int] = ..., provider_session_token: _Optional[bytes] = ...) -> None: ...
|
||||
|
||||
class LicenseRequest(_message.Message):
|
||||
__slots__ = ["client_id", "content_id", "encrypted_client_id", "key_control_nonce", "key_control_nonce_deprecated", "protocol_version", "request_time", "type"]
|
||||
class RequestType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class ContentIdentification(_message.Message):
|
||||
__slots__ = ["existing_license", "init_data", "webm_key_id", "widevine_pssh_data"]
|
||||
class ExistingLicense(_message.Message):
|
||||
__slots__ = ["license_id", "seconds_since_last_played", "seconds_since_started", "session_usage_table_entry"]
|
||||
LICENSE_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
SECONDS_SINCE_LAST_PLAYED_FIELD_NUMBER: _ClassVar[int]
|
||||
SECONDS_SINCE_STARTED_FIELD_NUMBER: _ClassVar[int]
|
||||
SESSION_USAGE_TABLE_ENTRY_FIELD_NUMBER: _ClassVar[int]
|
||||
license_id: LicenseIdentification
|
||||
seconds_since_last_played: int
|
||||
seconds_since_started: int
|
||||
session_usage_table_entry: bytes
|
||||
def __init__(self, license_id: _Optional[_Union[LicenseIdentification, _Mapping]] = ..., seconds_since_started: _Optional[int] = ..., seconds_since_last_played: _Optional[int] = ..., session_usage_table_entry: _Optional[bytes] = ...) -> None: ...
|
||||
class InitData(_message.Message):
|
||||
__slots__ = ["init_data", "init_data_type", "license_type", "request_id"]
|
||||
class InitDataType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
CENC: LicenseRequest.ContentIdentification.InitData.InitDataType
|
||||
INIT_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
INIT_DATA_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUEST_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
WEBM: LicenseRequest.ContentIdentification.InitData.InitDataType
|
||||
init_data: bytes
|
||||
init_data_type: LicenseRequest.ContentIdentification.InitData.InitDataType
|
||||
license_type: LicenseType
|
||||
request_id: bytes
|
||||
def __init__(self, init_data_type: _Optional[_Union[LicenseRequest.ContentIdentification.InitData.InitDataType, str]] = ..., init_data: _Optional[bytes] = ..., license_type: _Optional[_Union[LicenseType, str]] = ..., request_id: _Optional[bytes] = ...) -> None: ...
|
||||
class WebmKeyId(_message.Message):
|
||||
__slots__ = ["header", "license_type", "request_id"]
|
||||
HEADER_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUEST_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
header: bytes
|
||||
license_type: LicenseType
|
||||
request_id: bytes
|
||||
def __init__(self, header: _Optional[bytes] = ..., license_type: _Optional[_Union[LicenseType, str]] = ..., request_id: _Optional[bytes] = ...) -> None: ...
|
||||
class WidevinePsshData(_message.Message):
|
||||
__slots__ = ["license_type", "pssh_data", "request_id"]
|
||||
LICENSE_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
PSSH_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
REQUEST_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
license_type: LicenseType
|
||||
pssh_data: _containers.RepeatedScalarFieldContainer[bytes]
|
||||
request_id: bytes
|
||||
def __init__(self, pssh_data: _Optional[_Iterable[bytes]] = ..., license_type: _Optional[_Union[LicenseType, str]] = ..., request_id: _Optional[bytes] = ...) -> None: ...
|
||||
EXISTING_LICENSE_FIELD_NUMBER: _ClassVar[int]
|
||||
INIT_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
WEBM_KEY_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
WIDEVINE_PSSH_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
existing_license: LicenseRequest.ContentIdentification.ExistingLicense
|
||||
init_data: LicenseRequest.ContentIdentification.InitData
|
||||
webm_key_id: LicenseRequest.ContentIdentification.WebmKeyId
|
||||
widevine_pssh_data: LicenseRequest.ContentIdentification.WidevinePsshData
|
||||
def __init__(self, widevine_pssh_data: _Optional[_Union[LicenseRequest.ContentIdentification.WidevinePsshData, _Mapping]] = ..., webm_key_id: _Optional[_Union[LicenseRequest.ContentIdentification.WebmKeyId, _Mapping]] = ..., existing_license: _Optional[_Union[LicenseRequest.ContentIdentification.ExistingLicense, _Mapping]] = ..., init_data: _Optional[_Union[LicenseRequest.ContentIdentification.InitData, _Mapping]] = ...) -> None: ...
|
||||
CLIENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
CONTENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ENCRYPTED_CLIENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_CONTROL_NONCE_DEPRECATED_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_CONTROL_NONCE_FIELD_NUMBER: _ClassVar[int]
|
||||
NEW: LicenseRequest.RequestType
|
||||
PROTOCOL_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
RELEASE: LicenseRequest.RequestType
|
||||
RENEWAL: LicenseRequest.RequestType
|
||||
REQUEST_TIME_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
client_id: ClientIdentification
|
||||
content_id: LicenseRequest.ContentIdentification
|
||||
encrypted_client_id: EncryptedClientIdentification
|
||||
key_control_nonce: int
|
||||
key_control_nonce_deprecated: bytes
|
||||
protocol_version: ProtocolVersion
|
||||
request_time: int
|
||||
type: LicenseRequest.RequestType
|
||||
def __init__(self, client_id: _Optional[_Union[ClientIdentification, _Mapping]] = ..., content_id: _Optional[_Union[LicenseRequest.ContentIdentification, _Mapping]] = ..., type: _Optional[_Union[LicenseRequest.RequestType, str]] = ..., request_time: _Optional[int] = ..., key_control_nonce_deprecated: _Optional[bytes] = ..., protocol_version: _Optional[_Union[ProtocolVersion, str]] = ..., key_control_nonce: _Optional[int] = ..., encrypted_client_id: _Optional[_Union[EncryptedClientIdentification, _Mapping]] = ...) -> None: ...
|
||||
|
||||
class MetricData(_message.Message):
|
||||
__slots__ = ["metric_data", "stage_name"]
|
||||
class MetricType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class TypeValue(_message.Message):
|
||||
__slots__ = ["type", "value"]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
VALUE_FIELD_NUMBER: _ClassVar[int]
|
||||
type: MetricData.MetricType
|
||||
value: int
|
||||
def __init__(self, type: _Optional[_Union[MetricData.MetricType, str]] = ..., value: _Optional[int] = ...) -> None: ...
|
||||
LATENCY: MetricData.MetricType
|
||||
METRIC_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
STAGE_NAME_FIELD_NUMBER: _ClassVar[int]
|
||||
TIMESTAMP: MetricData.MetricType
|
||||
metric_data: _containers.RepeatedCompositeFieldContainer[MetricData.TypeValue]
|
||||
stage_name: str
|
||||
def __init__(self, stage_name: _Optional[str] = ..., metric_data: _Optional[_Iterable[_Union[MetricData.TypeValue, _Mapping]]] = ...) -> None: ...
|
||||
|
||||
class SignedDrmCertificate(_message.Message):
|
||||
__slots__ = ["drm_certificate", "hash_algorithm", "signature", "signer"]
|
||||
DRM_CERTIFICATE_FIELD_NUMBER: _ClassVar[int]
|
||||
HASH_ALGORITHM_FIELD_NUMBER: _ClassVar[int]
|
||||
SIGNATURE_FIELD_NUMBER: _ClassVar[int]
|
||||
SIGNER_FIELD_NUMBER: _ClassVar[int]
|
||||
drm_certificate: bytes
|
||||
hash_algorithm: HashAlgorithmProto
|
||||
signature: bytes
|
||||
signer: SignedDrmCertificate
|
||||
def __init__(self, drm_certificate: _Optional[bytes] = ..., signature: _Optional[bytes] = ..., signer: _Optional[_Union[SignedDrmCertificate, _Mapping]] = ..., hash_algorithm: _Optional[_Union[HashAlgorithmProto, str]] = ...) -> None: ...
|
||||
|
||||
class SignedMessage(_message.Message):
|
||||
__slots__ = ["metric_data", "msg", "oemcrypto_core_message", "remote_attestation", "service_version_info", "session_key", "session_key_type", "signature", "type"]
|
||||
class MessageType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class SessionKeyType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
CAS_LICENSE: SignedMessage.MessageType
|
||||
CAS_LICENSE_REQUEST: SignedMessage.MessageType
|
||||
EPHERMERAL_ECC_PUBLIC_KEY: SignedMessage.SessionKeyType
|
||||
ERROR_RESPONSE: SignedMessage.MessageType
|
||||
EXTERNAL_LICENSE: SignedMessage.MessageType
|
||||
EXTERNAL_LICENSE_REQUEST: SignedMessage.MessageType
|
||||
LICENSE: SignedMessage.MessageType
|
||||
LICENSE_REQUEST: SignedMessage.MessageType
|
||||
METRIC_DATA_FIELD_NUMBER: _ClassVar[int]
|
||||
MSG_FIELD_NUMBER: _ClassVar[int]
|
||||
OEMCRYPTO_CORE_MESSAGE_FIELD_NUMBER: _ClassVar[int]
|
||||
REMOTE_ATTESTATION_FIELD_NUMBER: _ClassVar[int]
|
||||
SERVICE_CERTIFICATE: SignedMessage.MessageType
|
||||
SERVICE_CERTIFICATE_REQUEST: SignedMessage.MessageType
|
||||
SERVICE_VERSION_INFO_FIELD_NUMBER: _ClassVar[int]
|
||||
SESSION_KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
SESSION_KEY_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
SIGNATURE_FIELD_NUMBER: _ClassVar[int]
|
||||
SUB_LICENSE: SignedMessage.MessageType
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
UNDEFINED: SignedMessage.SessionKeyType
|
||||
WRAPPED_AES_KEY: SignedMessage.SessionKeyType
|
||||
metric_data: _containers.RepeatedCompositeFieldContainer[MetricData]
|
||||
msg: bytes
|
||||
oemcrypto_core_message: bytes
|
||||
remote_attestation: bytes
|
||||
service_version_info: VersionInfo
|
||||
session_key: bytes
|
||||
session_key_type: SignedMessage.SessionKeyType
|
||||
signature: bytes
|
||||
type: SignedMessage.MessageType
|
||||
def __init__(self, type: _Optional[_Union[SignedMessage.MessageType, str]] = ..., msg: _Optional[bytes] = ..., signature: _Optional[bytes] = ..., session_key: _Optional[bytes] = ..., remote_attestation: _Optional[bytes] = ..., metric_data: _Optional[_Iterable[_Union[MetricData, _Mapping]]] = ..., service_version_info: _Optional[_Union[VersionInfo, _Mapping]] = ..., session_key_type: _Optional[_Union[SignedMessage.SessionKeyType, str]] = ..., oemcrypto_core_message: _Optional[bytes] = ...) -> None: ...
|
||||
|
||||
class VersionInfo(_message.Message):
|
||||
__slots__ = ["license_sdk_version", "license_service_version"]
|
||||
LICENSE_SDK_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
LICENSE_SERVICE_VERSION_FIELD_NUMBER: _ClassVar[int]
|
||||
license_sdk_version: str
|
||||
license_service_version: str
|
||||
def __init__(self, license_sdk_version: _Optional[str] = ..., license_service_version: _Optional[str] = ...) -> None: ...
|
||||
|
||||
class WidevinePsshData(_message.Message):
|
||||
__slots__ = ["algorithm", "content_id", "crypto_period_index", "crypto_period_seconds", "entitled_keys", "group_ids", "grouped_license", "key_ids", "key_sequence", "policy", "protection_scheme", "provider", "track_type", "type", "video_feature"]
|
||||
class Algorithm(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class Type(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
class EntitledKey(_message.Message):
|
||||
__slots__ = ["entitlement_key_id", "entitlement_key_size_bytes", "iv", "key", "key_id"]
|
||||
ENTITLEMENT_KEY_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ENTITLEMENT_KEY_SIZE_BYTES_FIELD_NUMBER: _ClassVar[int]
|
||||
IV_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
entitlement_key_id: bytes
|
||||
entitlement_key_size_bytes: int
|
||||
iv: bytes
|
||||
key: bytes
|
||||
key_id: bytes
|
||||
def __init__(self, entitlement_key_id: _Optional[bytes] = ..., key_id: _Optional[bytes] = ..., key: _Optional[bytes] = ..., iv: _Optional[bytes] = ..., entitlement_key_size_bytes: _Optional[int] = ...) -> None: ...
|
||||
AESCTR: WidevinePsshData.Algorithm
|
||||
ALGORITHM_FIELD_NUMBER: _ClassVar[int]
|
||||
CONTENT_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
CRYPTO_PERIOD_INDEX_FIELD_NUMBER: _ClassVar[int]
|
||||
CRYPTO_PERIOD_SECONDS_FIELD_NUMBER: _ClassVar[int]
|
||||
ENTITLED_KEY: WidevinePsshData.Type
|
||||
ENTITLED_KEYS_FIELD_NUMBER: _ClassVar[int]
|
||||
ENTITLEMENT: WidevinePsshData.Type
|
||||
GROUPED_LICENSE_FIELD_NUMBER: _ClassVar[int]
|
||||
GROUP_IDS_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_IDS_FIELD_NUMBER: _ClassVar[int]
|
||||
KEY_SEQUENCE_FIELD_NUMBER: _ClassVar[int]
|
||||
POLICY_FIELD_NUMBER: _ClassVar[int]
|
||||
PROTECTION_SCHEME_FIELD_NUMBER: _ClassVar[int]
|
||||
PROVIDER_FIELD_NUMBER: _ClassVar[int]
|
||||
SINGLE: WidevinePsshData.Type
|
||||
TRACK_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
UNENCRYPTED: WidevinePsshData.Algorithm
|
||||
VIDEO_FEATURE_FIELD_NUMBER: _ClassVar[int]
|
||||
algorithm: WidevinePsshData.Algorithm
|
||||
content_id: bytes
|
||||
crypto_period_index: int
|
||||
crypto_period_seconds: int
|
||||
entitled_keys: _containers.RepeatedCompositeFieldContainer[WidevinePsshData.EntitledKey]
|
||||
group_ids: _containers.RepeatedScalarFieldContainer[bytes]
|
||||
grouped_license: bytes
|
||||
key_ids: _containers.RepeatedScalarFieldContainer[bytes]
|
||||
key_sequence: int
|
||||
policy: str
|
||||
protection_scheme: int
|
||||
provider: str
|
||||
track_type: str
|
||||
type: WidevinePsshData.Type
|
||||
video_feature: str
|
||||
def __init__(self, key_ids: _Optional[_Iterable[bytes]] = ..., content_id: _Optional[bytes] = ..., crypto_period_index: _Optional[int] = ..., protection_scheme: _Optional[int] = ..., crypto_period_seconds: _Optional[int] = ..., type: _Optional[_Union[WidevinePsshData.Type, str]] = ..., key_sequence: _Optional[int] = ..., group_ids: _Optional[_Iterable[bytes]] = ..., entitled_keys: _Optional[_Iterable[_Union[WidevinePsshData.EntitledKey, _Mapping]]] = ..., video_feature: _Optional[str] = ..., algorithm: _Optional[_Union[WidevinePsshData.Algorithm, str]] = ..., provider: _Optional[str] = ..., track_type: _Optional[str] = ..., policy: _Optional[str] = ..., grouped_license: _Optional[bytes] = ...) -> None: ...
|
||||
|
||||
class LicenseType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
|
||||
class PlatformVerificationStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
|
||||
class ProtocolVersion(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
||||
|
||||
class HashAlgorithmProto(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
|
||||
__slots__ = []
|
@ -1,398 +0,0 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from zlib import crc32
|
||||
|
||||
import click
|
||||
import requests
|
||||
import yaml
|
||||
from construct import ConstructError
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
from unidecode import UnidecodeError, unidecode
|
||||
|
||||
from pywidevine import __version__
|
||||
from pywidevine.cdm import Cdm
|
||||
from pywidevine.device import Device, DeviceTypes
|
||||
from pywidevine.license_protocol_pb2 import FileHashes, LicenseType
|
||||
from pywidevine.pssh import PSSH
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.option("-v", "--version", is_flag=True, default=False, help="Print version information.")
|
||||
@click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.")
|
||||
def main(version: bool, debug: bool) -> None:
|
||||
"""pywidevine—Python Widevine CDM implementation."""
|
||||
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
|
||||
log = logging.getLogger()
|
||||
|
||||
current_year = datetime.now().year
|
||||
copyright_years = f"2022-{current_year}"
|
||||
|
||||
log.info("pywidevine version %s Copyright (c) %s rlaphoenix", __version__, copyright_years)
|
||||
log.info("https://github.com/devine-dl/pywidevine")
|
||||
if version:
|
||||
return
|
||||
|
||||
|
||||
@main.command(name="license")
|
||||
@click.argument("device_path", type=Path)
|
||||
@click.argument("pssh", type=PSSH)
|
||||
@click.argument("server", type=str)
|
||||
@click.option("-t", "--type", "license_type", type=click.Choice(LicenseType.keys(), case_sensitive=False),
|
||||
default="STREAMING",
|
||||
help="License Type to Request.")
|
||||
@click.option("-p", "--privacy", is_flag=True, default=False,
|
||||
help="Use Privacy Mode, off by default.")
|
||||
def license_(device_path: Path, pssh: PSSH, server: str, license_type: str, privacy: bool) -> None:
|
||||
"""
|
||||
Make a License Request for PSSH to SERVER using DEVICE.
|
||||
It will return a list of all keys within the returned license.
|
||||
|
||||
This expects the Licence Server to be a simple opaque interface where the Challenge
|
||||
is sent as is (as bytes), and the License response is returned as is (as bytes).
|
||||
This is a common behavior for some License Servers and is our only option for a generic
|
||||
licensing function.
|
||||
|
||||
You may modify this function to change how it sends the Challenge and how it parses
|
||||
the License response. However, for non-generic license calls, I recommend creating a
|
||||
new script that imports and uses the pywidevine module instead. This generic function
|
||||
is only useful as a quick generic license call.
|
||||
|
||||
This is also a great way of showing you how to use pywidevine in your own projects.
|
||||
"""
|
||||
log = logging.getLogger("license")
|
||||
|
||||
# load device
|
||||
device = Device.load(device_path)
|
||||
log.info("[+] Loaded Device (%s L%s)", device.system_id, device.security_level)
|
||||
log.debug(device)
|
||||
|
||||
# load cdm
|
||||
cdm = Cdm.from_device(device)
|
||||
log.info("[+] Loaded CDM")
|
||||
log.debug(cdm)
|
||||
|
||||
# open cdm session
|
||||
session_id = cdm.open()
|
||||
log.info("[+] Opened CDM Session: %s", session_id.hex())
|
||||
|
||||
if privacy:
|
||||
# get service cert for license server via cert challenge
|
||||
service_cert_res = requests.post(
|
||||
url=server,
|
||||
data=cdm.service_certificate_challenge
|
||||
)
|
||||
if service_cert_res.status_code != 200:
|
||||
log.error(
|
||||
"[-] Failed to get Service Privacy Certificate: [%s] %s",
|
||||
service_cert_res.status_code,
|
||||
service_cert_res.text
|
||||
)
|
||||
return
|
||||
service_cert = service_cert_res.content
|
||||
provider_id = cdm.set_service_certificate(session_id, service_cert)
|
||||
log.info("[+] Set Service Privacy Certificate: %s", provider_id)
|
||||
log.debug(service_cert)
|
||||
|
||||
# get license challenge
|
||||
challenge = cdm.get_license_challenge(session_id, pssh, license_type, privacy_mode=True)
|
||||
log.info("[+] Created License Request Message (Challenge)")
|
||||
log.debug(challenge)
|
||||
|
||||
# send license challenge
|
||||
license_res = requests.post(
|
||||
url=server,
|
||||
data=challenge
|
||||
)
|
||||
if license_res.status_code != 200:
|
||||
log.error("[-] Failed to send challenge: [%s] %s", license_res.status_code, license_res.text)
|
||||
return
|
||||
licence = license_res.content
|
||||
log.info("[+] Got License Message")
|
||||
log.debug(licence)
|
||||
|
||||
# parse license challenge
|
||||
cdm.parse_license(session_id, licence)
|
||||
log.info("[+] License Parsed Successfully")
|
||||
|
||||
# print keys
|
||||
for key in cdm.get_keys(session_id):
|
||||
log.info("[%s] %s:%s", key.type, key.kid.hex, key.key.hex())
|
||||
|
||||
# close session, disposes of session data
|
||||
cdm.close(session_id)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("device", type=Path)
|
||||
@click.option("-p", "--privacy", is_flag=True, default=False,
|
||||
help="Use Privacy Mode, off by default.")
|
||||
@click.pass_context
|
||||
def test(ctx: click.Context, device: Path, privacy: bool) -> None:
|
||||
"""
|
||||
Test the CDM code by getting Content Keys for Bitmovin's Art of Motion example.
|
||||
https://bitmovin.com/demos/drm
|
||||
https://bitmovin-a.akamaihd.net/content/art-of-motion_drm/mpds/11331.mpd
|
||||
|
||||
The device argument is a Path to a Widevine Device (.wvd) file which contains
|
||||
the device private key among other required information.
|
||||
"""
|
||||
# The PSSH is the same for all tracks both video and audio.
|
||||
# However, this might not be the case for all services/manifests.
|
||||
pssh = PSSH("AAAAW3Bzc2gAAAAA7e+LqXnWSs6jyCfc1R0h7QAAADsIARIQ62dqu8s0Xpa"
|
||||
"7z2FmMPGj2hoNd2lkZXZpbmVfdGVzdCIQZmtqM2xqYVNkZmFsa3IzaioCSEQyAA==")
|
||||
|
||||
# This License Server requires no authorization at all, no cookies, no credentials
|
||||
# nothing. This is often not the case for real services.
|
||||
license_server = "https://cwip-shaka-proxy.appspot.com/no_auth"
|
||||
|
||||
# Specify OFFLINE if it's a PSSH for a download/offline mode title, e.g., the
|
||||
# Download feature on Netflix Apps. Otherwise, use STREAMING or AUTOMATIC.
|
||||
license_type = "STREAMING"
|
||||
|
||||
# this runs the `cdm license` CLI-command code with the data we set above
|
||||
# it will print information as it goes to the terminal
|
||||
ctx.invoke(
|
||||
license_,
|
||||
device_path=device,
|
||||
pssh=pssh,
|
||||
server=license_server,
|
||||
license_type=license_type,
|
||||
privacy=privacy
|
||||
)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-t", "--type", "type_", type=click.Choice([x.name for x in DeviceTypes], case_sensitive=False),
|
||||
required=True, help="Device Type")
|
||||
@click.option("-l", "--level", type=click.IntRange(1, 3), required=True, help="Device Security Level")
|
||||
@click.option("-k", "--key", type=Path, required=True, help="Device RSA Private Key in PEM or DER format")
|
||||
@click.option("-c", "--client_id", type=Path, required=True, help="Widevine ClientIdentification Blob file")
|
||||
@click.option("-v", "--vmp", type=Path, default=None, help="Widevine FileHashes Blob file")
|
||||
@click.option("-o", "--output", type=Path, default=None, help="Output Path or Directory")
|
||||
@click.pass_context
|
||||
def create_device(
|
||||
ctx: click.Context,
|
||||
type_: str,
|
||||
level: int,
|
||||
key: Path,
|
||||
client_id: Path,
|
||||
vmp: Optional[Path] = None,
|
||||
output: Optional[Path] = None
|
||||
) -> None:
|
||||
"""
|
||||
Create a Widevine Device (.wvd) file from an RSA Private Key (PEM or DER) and Client ID Blob.
|
||||
Optionally also a VMP (Verified Media Path) Blob, which will be stored in the Client ID.
|
||||
"""
|
||||
if not key.is_file():
|
||||
raise click.UsageError("key: Not a path to a file, or it doesn't exist.", ctx)
|
||||
if not client_id.is_file():
|
||||
raise click.UsageError("client_id: Not a path to a file, or it doesn't exist.", ctx)
|
||||
if vmp and not vmp.is_file():
|
||||
raise click.UsageError("vmp: Not a path to a file, or it doesn't exist.", ctx)
|
||||
|
||||
log = logging.getLogger("create-device")
|
||||
|
||||
device = Device(
|
||||
type_=DeviceTypes[type_.upper()],
|
||||
security_level=level,
|
||||
flags=None,
|
||||
private_key=key.read_bytes(),
|
||||
client_id=client_id.read_bytes()
|
||||
)
|
||||
|
||||
if vmp:
|
||||
new_vmp_data = vmp.read_bytes()
|
||||
if device.client_id.vmp_data and device.client_id.vmp_data != new_vmp_data:
|
||||
log.warning("Client ID already has Verified Media Path data")
|
||||
device.client_id.vmp_data = new_vmp_data
|
||||
|
||||
client_info = {}
|
||||
for entry in device.client_id.client_info:
|
||||
client_info[entry.name] = entry.value
|
||||
|
||||
wvd_bin = device.dumps()
|
||||
|
||||
name = f"{client_info['company_name']} {client_info['model_name']}"
|
||||
if client_info.get("widevine_cdm_version"):
|
||||
name += f" {client_info['widevine_cdm_version']}"
|
||||
name += f" {crc32(wvd_bin).to_bytes(4, 'big').hex()}"
|
||||
|
||||
try:
|
||||
name = unidecode(name.strip().lower().replace(" ", "_"))
|
||||
except UnidecodeError as e:
|
||||
raise click.ClickException(f"Failed to sanitize name, {e}")
|
||||
|
||||
if output and output.suffix:
|
||||
if output.suffix.lower() != ".wvd":
|
||||
log.warning(f"Saving WVD with the file extension '{output.suffix}' but '.wvd' is recommended.")
|
||||
out_path = output
|
||||
else:
|
||||
out_dir = output or Path.cwd()
|
||||
out_path = out_dir / f"{name}_{device.system_id}_l{device.security_level}.wvd"
|
||||
|
||||
if out_path.exists():
|
||||
log.error(f"A file already exists at the path '{out_path}', cannot overwrite.")
|
||||
return
|
||||
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_path.write_bytes(wvd_bin)
|
||||
|
||||
log.info("Created Widevine Device (.wvd) file, %s", out_path.name)
|
||||
log.info(" + Type: %s", device.type.name)
|
||||
log.info(" + System ID: %s", device.system_id)
|
||||
log.info(" + Security Level: %s", device.security_level)
|
||||
log.info(" + Flags: %s", device.flags)
|
||||
log.info(" + Private Key: %s (%s bit)", bool(device.private_key), device.private_key.size_in_bits())
|
||||
log.info(" + Client ID: %s (%s bytes)", bool(device.client_id), len(device.client_id.SerializeToString()))
|
||||
if device.client_id.vmp_data:
|
||||
file_hashes_ = FileHashes()
|
||||
file_hashes_.ParseFromString(device.client_id.vmp_data)
|
||||
log.info(" + VMP: True (%s signatures)", len(file_hashes_.signatures))
|
||||
else:
|
||||
log.info(" + VMP: False")
|
||||
log.info(" + Saved to: %s", out_path.absolute())
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("wvd_path", type=Path)
|
||||
@click.option("-o", "--out_dir", type=Path, default=None, help="Output Directory")
|
||||
@click.pass_context
|
||||
def export_device(ctx: click.Context, wvd_path: Path, out_dir: Optional[Path] = None) -> None:
|
||||
"""
|
||||
Export a Widevine Device (.wvd) file to an RSA Private Key (PEM and DER) and Client ID Blob.
|
||||
Optionally also a VMP (Verified Media Path) Blob, which will be stored in the Client ID.
|
||||
|
||||
If an output directory is not specified, it will be stored in the current working directory.
|
||||
"""
|
||||
if not wvd_path.is_file():
|
||||
raise click.UsageError("wvd_path: Not a path to a file, or it doesn't exist.", ctx)
|
||||
|
||||
log = logging.getLogger("export-device")
|
||||
log.info("Exporting Widevine Device (.wvd) file, %s", wvd_path.stem)
|
||||
|
||||
if not out_dir:
|
||||
out_dir = Path.cwd()
|
||||
|
||||
out_path = out_dir / wvd_path.stem
|
||||
if out_path.exists():
|
||||
if any(out_path.iterdir()):
|
||||
log.error("Output directory is not empty, cannot overwrite.")
|
||||
return
|
||||
else:
|
||||
log.warning("Output directory already exists, but is empty.")
|
||||
else:
|
||||
out_path.mkdir(parents=True)
|
||||
|
||||
device = Device.load(wvd_path)
|
||||
|
||||
log.info(f"L{device.security_level} {device.system_id} {device.type.name}")
|
||||
log.info(f"Saving to: {out_path}")
|
||||
|
||||
device_meta = {
|
||||
"wvd": {
|
||||
"device_type": device.type.name,
|
||||
"security_level": device.security_level,
|
||||
**device.flags
|
||||
},
|
||||
"client_info": {},
|
||||
"capabilities": MessageToDict(device.client_id, preserving_proto_field_name=True)["client_capabilities"]
|
||||
}
|
||||
for client_info in device.client_id.client_info:
|
||||
device_meta["client_info"][client_info.name] = client_info.value
|
||||
|
||||
device_meta_path = out_path / "metadata.yml"
|
||||
device_meta_path.write_text(yaml.dump(device_meta), encoding="utf8")
|
||||
log.info("Exported Device Metadata as metadata.yml")
|
||||
|
||||
if device.private_key:
|
||||
private_key_path = out_path / "private_key.pem"
|
||||
private_key_path.write_text(
|
||||
data=device.private_key.export_key().decode(),
|
||||
encoding="utf8"
|
||||
)
|
||||
private_key_path.with_suffix(".der").write_bytes(
|
||||
device.private_key.export_key(format="DER")
|
||||
)
|
||||
log.info("Exported Private Key as private_key.der and private_key.pem")
|
||||
else:
|
||||
log.warning("No Private Key available")
|
||||
|
||||
if device.client_id:
|
||||
client_id_path = out_path / "client_id.bin"
|
||||
client_id_path.write_bytes(device.client_id.SerializeToString())
|
||||
log.info("Exported Client ID as client_id.bin")
|
||||
else:
|
||||
log.warning("No Client ID available")
|
||||
|
||||
if device.client_id.vmp_data:
|
||||
vmp_path = out_path / "vmp.bin"
|
||||
vmp_path.write_bytes(device.client_id.vmp_data)
|
||||
log.info("Exported VMP (File Hashes) as vmp.bin")
|
||||
else:
|
||||
log.info("No VMP (File Hashes) available")
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("path", type=Path)
|
||||
@click.pass_context
|
||||
def migrate(ctx: click.Context, path: Path) -> None:
|
||||
"""
|
||||
Upgrade from earlier versions of the Widevine Device (.wvd) format.
|
||||
|
||||
The path argument can be a direct path to a Widevine Device (.wvd) file, or a path
|
||||
to a folder of Widevine Devices files.
|
||||
|
||||
The migrated devices are saved to its original location, overwriting the old version.
|
||||
"""
|
||||
if not path.exists():
|
||||
raise click.UsageError(f"path: The path '{path}' does not exist.", ctx)
|
||||
|
||||
log = logging.getLogger("migrate")
|
||||
|
||||
if path.is_dir():
|
||||
devices = list(path.glob("*.wvd"))
|
||||
else:
|
||||
devices = [path]
|
||||
|
||||
migrated = 0
|
||||
for device in devices:
|
||||
log.info("Migrating %s...", device.name)
|
||||
|
||||
try:
|
||||
new_device = Device.migrate(device.read_bytes())
|
||||
except (ConstructError, ValueError) as e:
|
||||
log.error(" - %s", e)
|
||||
continue
|
||||
|
||||
log.debug(new_device)
|
||||
new_device.dump(device)
|
||||
|
||||
log.info(" + Success")
|
||||
migrated += 1
|
||||
|
||||
log.info("Migrated %s/%s devices!", migrated, len(devices))
|
||||
|
||||
|
||||
@main.command("serve", short_help="Serve your local CDM and Widevine Devices Remotely.")
|
||||
@click.argument("config_path", type=Path)
|
||||
@click.option("-h", "--host", type=str, default="127.0.0.1", help="Host to serve from.")
|
||||
@click.option("-p", "--port", type=int, default=8786, help="Port to serve from.")
|
||||
def serve_(config_path: Path, host: str, port: int) -> None:
|
||||
"""
|
||||
Serve your local CDM and Widevine Devices Remotely.
|
||||
|
||||
\b
|
||||
[CONFIG] is a path to a serve config file.
|
||||
See `serve.example.yml` for an example config file.
|
||||
|
||||
\b
|
||||
Host as 127.0.0.1 may block remote access even if port-forwarded.
|
||||
Instead, use 0.0.0.0 and ensure the TCP port you choose is forwarded.
|
||||
"""
|
||||
from pywidevine import serve # isort:skip
|
||||
import yaml # isort:skip
|
||||
|
||||
config = yaml.safe_load(config_path.read_text(encoding="utf8"))
|
||||
serve.run(config, host, port)
|
@ -1,442 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import string
|
||||
from io import BytesIO
|
||||
from typing import Optional, Union
|
||||
from uuid import UUID
|
||||
from xml.etree.ElementTree import XML
|
||||
|
||||
import construct
|
||||
from construct import Container
|
||||
from google.protobuf.message import DecodeError
|
||||
from pymp4.parser import Box
|
||||
|
||||
from pywidevine.license_protocol_pb2 import WidevinePsshData
|
||||
|
||||
|
||||
class PSSH:
|
||||
"""
|
||||
MP4 PSSH Box-related utilities.
|
||||
Allows you to load, create, and modify various kinds of DRM system headers.
|
||||
"""
|
||||
|
||||
class SystemId:
|
||||
Widevine = UUID(hex="edef8ba979d64acea3c827dcd51d21ed")
|
||||
PlayReady = UUID(hex="9a04f07998404286ab92e65be0885f95")
|
||||
|
||||
def __init__(self, data: Union[Container, str, bytes], strict: bool = False):
|
||||
"""
|
||||
Load a PSSH box, WidevineCencHeader, or PlayReadyHeader.
|
||||
|
||||
When loading a WidevineCencHeader or PlayReadyHeader, a new v0 PSSH box will be
|
||||
created and the header will be parsed and stored in the init_data field. However,
|
||||
PlayReadyHeaders (and PlayReadyObjects) are not yet currently parsed and are
|
||||
stored as bytes.
|
||||
|
||||
[Strict mode (strict=True)]
|
||||
|
||||
Supports the following forms of input data in either Base64 or Bytes form:
|
||||
- Full PSSH mp4 boxes (as defined by pymp4 Box).
|
||||
- Full Widevine Cenc Headers (as defined by WidevinePsshData proto).
|
||||
- Full PlayReady Objects and Headers (as defined by Microsoft Docs).
|
||||
|
||||
[Lenient mode (strict=False, default)]
|
||||
|
||||
If the data is not supported in Strict mode, and is assumed not to be corrupt or
|
||||
parsed incorrectly, the License Server likely accepts a custom init_data value
|
||||
during a License Request call. This is uncommon behavior but not out of realm of
|
||||
possibilities. For example, Netflix does this with it's MSL WidevineExchange
|
||||
scheme.
|
||||
|
||||
Lenient mode will craft a new v0 PSSH box with the init_data field set to
|
||||
the provided data as-is. The data will first be base64 decoded. This behavior
|
||||
may not work in your scenario and if that's the case please manually craft
|
||||
your own PSSH box with the init_data field to be used in License Requests.
|
||||
|
||||
Raises:
|
||||
ValueError: If the data is empty.
|
||||
TypeError: If the data is an unexpected type.
|
||||
binascii.Error: If the data could not be decoded as Base64 if provided as a
|
||||
string.
|
||||
DecodeError: If the data could not be parsed as a PSSH mp4 box nor a Widevine
|
||||
Cenc Header and strict mode is enabled.
|
||||
"""
|
||||
if not data:
|
||||
raise ValueError("Data must not be empty.")
|
||||
|
||||
if isinstance(data, Container):
|
||||
box = data
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = base64.b64decode(data)
|
||||
except (binascii.Error, binascii.Incomplete) as e:
|
||||
raise binascii.Error(f"Could not decode data as Base64, {e}")
|
||||
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError(f"Expected data to be a {Container}, bytes, or base64, not {data!r}")
|
||||
|
||||
try:
|
||||
box = Box.parse(data)
|
||||
except (IOError, construct.ConstructError): # not a box
|
||||
try:
|
||||
widevine_pssh_data = WidevinePsshData()
|
||||
widevine_pssh_data.ParseFromString(data)
|
||||
data_serialized = widevine_pssh_data.SerializeToString()
|
||||
if data_serialized != data: # not actually a WidevinePsshData
|
||||
raise DecodeError()
|
||||
box = Box.parse(Box.build(dict(
|
||||
type=b"pssh",
|
||||
version=0,
|
||||
flags=0,
|
||||
system_ID=PSSH.SystemId.Widevine,
|
||||
init_data=data_serialized
|
||||
)))
|
||||
except DecodeError: # not a widevine cenc header
|
||||
if "</WRMHEADER>".encode("utf-16-le") in data:
|
||||
# TODO: Actually parse `data` as a PlayReadyHeader object and store that instead
|
||||
box = Box.parse(Box.build(dict(
|
||||
type=b"pssh",
|
||||
version=0,
|
||||
flags=0,
|
||||
system_ID=PSSH.SystemId.PlayReady,
|
||||
init_data=data
|
||||
)))
|
||||
elif strict:
|
||||
raise DecodeError(f"Could not parse data as a {Container} nor a {WidevinePsshData}.")
|
||||
else:
|
||||
# Data is not a WidevineCencHeader nor a PlayReadyHeader.
|
||||
# The license server likely has something custom to parse it.
|
||||
# See doc-string about Lenient mode for more information.
|
||||
box = Box.parse(Box.build(dict(
|
||||
type=b"pssh",
|
||||
version=0,
|
||||
flags=0,
|
||||
system_ID=PSSH.SystemId.Widevine,
|
||||
init_data=data
|
||||
)))
|
||||
|
||||
self.version = box.version
|
||||
self.flags = box.flags
|
||||
self.system_id = box.system_ID
|
||||
self.__key_ids = box.key_IDs
|
||||
self.init_data = box.init_data
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"PSSH<{self.system_id}>(v{self.version}; {self.flags}, {self.key_ids}, {self.init_data})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.dumps()
|
||||
|
||||
@classmethod
|
||||
def new(
|
||||
cls,
|
||||
system_id: UUID,
|
||||
key_ids: Optional[list[Union[UUID, str, bytes]]] = None,
|
||||
init_data: Optional[Union[WidevinePsshData, str, bytes]] = None,
|
||||
version: int = 0,
|
||||
flags: int = 0
|
||||
) -> PSSH:
|
||||
"""Craft a new version 0 or 1 PSSH Box."""
|
||||
if not system_id:
|
||||
raise ValueError("A System ID must be specified.")
|
||||
if not isinstance(system_id, UUID):
|
||||
raise TypeError(f"Expected system_id to be a UUID, not {system_id!r}")
|
||||
|
||||
if key_ids is not None and not isinstance(key_ids, list):
|
||||
raise TypeError(f"Expected key_ids to be a list not {key_ids!r}")
|
||||
|
||||
if init_data is not None and not isinstance(init_data, (WidevinePsshData, str, bytes)):
|
||||
raise TypeError(f"Expected init_data to be a {WidevinePsshData}, base64, or bytes, not {init_data!r}")
|
||||
|
||||
if not isinstance(version, int):
|
||||
raise TypeError(f"Expected version to be an int not {version!r}")
|
||||
if version not in (0, 1):
|
||||
raise ValueError(f"Invalid version, must be either 0 or 1, not {version}.")
|
||||
|
||||
if not isinstance(flags, int):
|
||||
raise TypeError(f"Expected flags to be an int not {flags!r}")
|
||||
if flags < 0:
|
||||
raise ValueError("Invalid flags, cannot be less than 0.")
|
||||
|
||||
if version == 0 and key_ids is not None and init_data is not None:
|
||||
# v0 boxes use only init_data in the pssh field, but we can use the key_ids within the init_data
|
||||
raise ValueError("Version 0 PSSH boxes must use only init_data, not init_data and key_ids.")
|
||||
elif version == 1:
|
||||
# TODO: I cannot tell if they need either init_data or key_ids exclusively, or both is fine
|
||||
# So for now I will just make sure at least one is supplied
|
||||
if init_data is None and key_ids is None:
|
||||
raise ValueError("Version 1 PSSH boxes must use either init_data or key_ids but neither were provided")
|
||||
|
||||
if init_data is not None:
|
||||
if isinstance(init_data, WidevinePsshData):
|
||||
init_data = init_data.SerializeToString()
|
||||
elif isinstance(init_data, str):
|
||||
if all(c in string.hexdigits for c in init_data):
|
||||
init_data = bytes.fromhex(init_data)
|
||||
else:
|
||||
init_data = base64.b64decode(init_data)
|
||||
elif not isinstance(init_data, bytes):
|
||||
raise TypeError(
|
||||
f"Expecting init_data to be {WidevinePsshData}, hex, base64, or bytes, not {init_data!r}"
|
||||
)
|
||||
|
||||
pssh = cls(Box.parse(Box.build(dict(
|
||||
type=b"pssh",
|
||||
version=version,
|
||||
flags=flags,
|
||||
system_ID=system_id,
|
||||
init_data=[init_data, b""][init_data is None]
|
||||
# key_IDs should not be set yet
|
||||
))))
|
||||
|
||||
if key_ids:
|
||||
# We must reinforce the version because pymp4 forces v0 if key_IDs is not set.
|
||||
# The set_key_ids() func will set it efficiently in both init_data and the box where needed.
|
||||
# The version must be reinforced ONLY if we have key_id data or there's a possibility of making
|
||||
# a v1 PSSH box, that did not have key_IDs set in the PSSH box.
|
||||
pssh.version = version
|
||||
pssh.set_key_ids(key_ids)
|
||||
|
||||
return pssh
|
||||
|
||||
@property
|
||||
def key_ids(self) -> list[UUID]:
|
||||
"""
|
||||
Get all Key IDs from within the Box or Init Data, wherever possible.
|
||||
|
||||
Supports:
|
||||
- Version 1 PSSH Boxes
|
||||
- WidevineCencHeaders
|
||||
- PlayReadyHeaders (4.0.0.0->4.3.0.0)
|
||||
"""
|
||||
if self.version == 1 and self.__key_ids:
|
||||
return self.__key_ids
|
||||
|
||||
if self.system_id == PSSH.SystemId.Widevine:
|
||||
# TODO: What if its not a Widevine Cenc Header but the System ID is set as Widevine?
|
||||
cenc_header = WidevinePsshData()
|
||||
cenc_header.ParseFromString(self.init_data)
|
||||
return [
|
||||
# the key_ids value may or may not be hex underlying
|
||||
(
|
||||
UUID(bytes=key_id) if len(key_id) == 16 else # normal
|
||||
UUID(hex=key_id.decode()) if len(key_id) == 32 else # stored as hex
|
||||
UUID(int=int.from_bytes(key_id, "big")) # assuming as number
|
||||
)
|
||||
for key_id in cenc_header.key_ids
|
||||
]
|
||||
|
||||
if self.system_id == PSSH.SystemId.PlayReady:
|
||||
# Assuming init data is a PRO (PlayReadyObject)
|
||||
# https://learn.microsoft.com/en-us/playready/specifications/playready-header-specification
|
||||
pro_data = BytesIO(self.init_data)
|
||||
pro_length = int.from_bytes(pro_data.read(4), "little")
|
||||
if pro_length != len(self.init_data):
|
||||
raise ValueError("The PlayReadyObject seems to be corrupt (too big or small, or missing data).")
|
||||
pro_record_count = int.from_bytes(pro_data.read(2), "little")
|
||||
|
||||
for _ in range(pro_record_count):
|
||||
prr_type = int.from_bytes(pro_data.read(2), "little")
|
||||
prr_length = int.from_bytes(pro_data.read(2), "little")
|
||||
prr_value = pro_data.read(prr_length)
|
||||
if prr_type != 0x01:
|
||||
# No PlayReady Header, skip and hope for something else
|
||||
# TODO: Add support for Embedded License Stores (0x03)
|
||||
continue
|
||||
|
||||
wrm_ns = {"wrm": "http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader"}
|
||||
prr_header = XML(prr_value.decode("utf-16-le"))
|
||||
prr_header_version = prr_header.get("version")
|
||||
if prr_header_version == "4.0.0.0":
|
||||
key_ids = [
|
||||
x.text
|
||||
for x in prr_header.findall("./wrm:DATA/wrm:KID", wrm_ns)
|
||||
if x.text
|
||||
]
|
||||
elif prr_header_version == "4.1.0.0":
|
||||
key_ids = [
|
||||
x.attrib["VALUE"]
|
||||
for x in prr_header.findall("./wrm:DATA/wrm:PROTECTINFO/wrm:KID", wrm_ns)
|
||||
]
|
||||
elif prr_header_version in ("4.2.0.0", "4.3.0.0"):
|
||||
# TODO: Retain the Encryption Scheme information in v4.3.0.0
|
||||
# This is because some Key IDs can be AES-CTR while some are AES-CBC.
|
||||
# Conversion to WidevineCencHeader could use this information.
|
||||
key_ids = [
|
||||
x.attrib["VALUE"]
|
||||
for x in prr_header.findall("./wrm:DATA/wrm:PROTECTINFO/wrm:KIDS/wrm:KID", wrm_ns)
|
||||
]
|
||||
else:
|
||||
raise ValueError(f"Unsupported PlayReadyHeader version {prr_header_version}")
|
||||
|
||||
return [
|
||||
UUID(bytes=base64.b64decode(key_id))
|
||||
for key_id in key_ids
|
||||
]
|
||||
|
||||
raise ValueError("Unsupported PlayReadyObject, no PlayReadyHeader within the object.")
|
||||
|
||||
raise ValueError(f"This PSSH is not supported by key_ids() property, {self.dumps()}")
|
||||
|
||||
def dump(self) -> bytes:
|
||||
"""Export the PSSH object as a full PSSH box in bytes form."""
|
||||
return Box.build(dict(
|
||||
type=b"pssh",
|
||||
version=self.version,
|
||||
flags=self.flags,
|
||||
system_ID=self.system_id,
|
||||
key_IDs=self.key_ids if self.version == 1 and self.key_ids else None,
|
||||
init_data=self.init_data
|
||||
))
|
||||
|
||||
def dumps(self) -> str:
|
||||
"""Export the PSSH object as a full PSSH box in base64 form."""
|
||||
return base64.b64encode(self.dump()).decode()
|
||||
|
||||
def to_widevine(self) -> None:
|
||||
"""
|
||||
Convert PlayReady PSSH data to Widevine PSSH data.
|
||||
|
||||
There's only a limited amount of information within a PlayReady PSSH header that
|
||||
can be used in a Widevine PSSH Header. The converted data may or may not result
|
||||
in an accepted PSSH. It depends on what the License Server is expecting.
|
||||
"""
|
||||
if self.system_id == PSSH.SystemId.Widevine:
|
||||
raise ValueError("This is already a Widevine PSSH")
|
||||
|
||||
widevine_pssh_data = WidevinePsshData(
|
||||
key_ids=[x.bytes for x in self.key_ids],
|
||||
algorithm="AESCTR"
|
||||
)
|
||||
|
||||
if self.version == 1:
|
||||
# ensure both cenc header and box has same Key IDs
|
||||
# v1 uses both this and within init data for basically no reason
|
||||
self.__key_ids = self.key_ids
|
||||
|
||||
self.init_data = widevine_pssh_data.SerializeToString()
|
||||
self.system_id = PSSH.SystemId.Widevine
|
||||
|
||||
def to_playready(
|
||||
self,
|
||||
la_url: Optional[str] = None,
|
||||
lui_url: Optional[str] = None,
|
||||
ds_id: Optional[bytes] = None,
|
||||
decryptor_setup: Optional[str] = None,
|
||||
custom_data: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Convert Widevine PSSH data to PlayReady v4.3.0.0 PSSH data.
|
||||
|
||||
Note that it is impossible to create the CHECKSUM values for AES-CTR Key IDs
|
||||
as you must encrypt the Key ID with the Content Encryption Key using AES-ECB.
|
||||
This may cause software incompatibilities.
|
||||
|
||||
Parameters:
|
||||
la_url: Contains the URL for the license acquisition Web service.
|
||||
Only absolute URLs are allowed.
|
||||
lui_url: Contains the URL for the license acquisition Web service.
|
||||
Only absolute URLs are allowed.
|
||||
ds_id: Service ID for the domain service.
|
||||
decryptor_setup: This tag may only contain the value "ONDEMAND". It
|
||||
indicates to an application that it should not expect the full
|
||||
license chain for the content to be available for acquisition, or
|
||||
already present on the client machine, prior to setting up the
|
||||
media graph. If this tag is not set then it indicates that an
|
||||
application can enforce the license to be acquired, or already
|
||||
present on the client machine, prior to setting up the media graph.
|
||||
custom_data: The content author can add custom XML inside this
|
||||
element. Microsoft code does not act on any data contained inside
|
||||
this element. The Syntax of this params XML is not validated.
|
||||
"""
|
||||
if self.system_id == PSSH.SystemId.PlayReady:
|
||||
raise ValueError("This is already a PlayReady PSSH")
|
||||
|
||||
key_ids_xml = ""
|
||||
for key_id in self.key_ids:
|
||||
# Note that it's impossible to create the CHECKSUM value without the Key for the KID
|
||||
key_ids_xml += f"""
|
||||
<KID ALGID="AESCTR" VALUE="{base64.b64encode(key_id.bytes).decode()}"></KID>
|
||||
"""
|
||||
|
||||
prr_value = f"""
|
||||
<WRMHEADER xmlns="http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader" version="4.3.0.0">
|
||||
<DATA>
|
||||
<PROTECTINFO>
|
||||
<KIDS>{key_ids_xml}</KIDS>
|
||||
</PROTECTINFO>
|
||||
{'<LA_URL>%s</LA_URL>' % la_url if la_url else ''}
|
||||
{'<LUI_URL>%s</LUI_URL>' % lui_url if lui_url else ''}
|
||||
{'<DS_ID>%s</DS_ID>' % base64.b64encode(ds_id).decode() if ds_id else ''}
|
||||
{'<DECRYPTORSETUP>%s</DECRYPTORSETUP>' % decryptor_setup if decryptor_setup else ''}
|
||||
{'<CUSTOMATTRIBUTES xmlns="">%s</CUSTOMATTRIBUTES>' % custom_data if custom_data else ''}
|
||||
</DATA>
|
||||
</WRMHEADER>
|
||||
""".encode("utf-16-le")
|
||||
|
||||
prr_length = len(prr_value).to_bytes(2, "little")
|
||||
prr_type = (1).to_bytes(2, "little") # Has PlayReadyHeader
|
||||
pro_record_count = (1).to_bytes(2, "little")
|
||||
pro = pro_record_count + prr_type + prr_length + prr_value
|
||||
pro = (len(pro) + 4).to_bytes(4, "little") + pro
|
||||
|
||||
self.init_data = pro
|
||||
self.system_id = PSSH.SystemId.PlayReady
|
||||
|
||||
def set_key_ids(self, key_ids: list[Union[UUID, str, bytes]]) -> None:
|
||||
"""Overwrite all Key IDs with the specified Key IDs."""
|
||||
if self.system_id != PSSH.SystemId.Widevine:
|
||||
# TODO: Add support for setting the Key IDs in a PlayReady Header
|
||||
raise ValueError(f"Only Widevine PSSH Boxes are supported, not {self.system_id}.")
|
||||
|
||||
key_id_uuids = self.parse_key_ids(key_ids)
|
||||
|
||||
if self.version == 1 or self.__key_ids:
|
||||
# only use v1 box key_ids if version is 1, or it's already being used
|
||||
# this is in case the service stupidly expects it for version 0
|
||||
self.__key_ids = key_id_uuids
|
||||
|
||||
cenc_header = WidevinePsshData()
|
||||
cenc_header.ParseFromString(self.init_data)
|
||||
|
||||
cenc_header.key_ids[:] = [
|
||||
key_id.bytes
|
||||
for key_id in key_id_uuids
|
||||
]
|
||||
|
||||
self.init_data = cenc_header.SerializeToString()
|
||||
|
||||
@staticmethod
|
||||
def parse_key_ids(key_ids: list[Union[UUID, str, bytes]]) -> list[UUID]:
|
||||
"""
|
||||
Parse a list of Key IDs in hex, base64, or bytes to UUIDs.
|
||||
|
||||
Raises TypeError if `key_ids` is not a list, or the list contains one
|
||||
or more items that are not a UUID, str, or bytes object.
|
||||
"""
|
||||
if not isinstance(key_ids, list):
|
||||
raise TypeError(f"Expected key_ids to be a list, not {key_ids!r}")
|
||||
|
||||
if not all(isinstance(x, (UUID, str, bytes)) for x in key_ids):
|
||||
raise TypeError("Some items of key_ids are not a UUID, str, or bytes. Unsure how to continue...")
|
||||
|
||||
uuids = [
|
||||
UUID(bytes=key_id_b)
|
||||
for key_id in key_ids
|
||||
for key_id_b in [
|
||||
key_id.bytes if isinstance(key_id, UUID) else
|
||||
(
|
||||
bytes.fromhex(key_id) if all(c in string.hexdigits for c in key_id) else
|
||||
base64.b64decode(key_id)
|
||||
) if isinstance(key_id, str) else
|
||||
key_id
|
||||
]
|
||||
]
|
||||
|
||||
return uuids
|
||||
|
||||
|
||||
__all__ = ("PSSH",)
|
@ -1,300 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import re
|
||||
from typing import Optional, Union
|
||||
|
||||
import requests
|
||||
from Crypto.Hash import SHA1
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Signature import pss
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from pywidevine.cdm import Cdm
|
||||
from pywidevine.device import Device, DeviceTypes
|
||||
from pywidevine.exceptions import (DeviceMismatch, InvalidInitData, InvalidLicenseMessage, InvalidLicenseType,
|
||||
SignatureMismatch)
|
||||
from pywidevine.key import Key
|
||||
from pywidevine.license_protocol_pb2 import (ClientIdentification, License, LicenseType, SignedDrmCertificate,
|
||||
SignedMessage)
|
||||
from pywidevine.pssh import PSSH
|
||||
|
||||
|
||||
class RemoteCdm(Cdm):
|
||||
"""Remote Accessible CDM using pywidevine's serve schema."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device_type: Union[DeviceTypes, str],
|
||||
system_id: int,
|
||||
security_level: int,
|
||||
host: str,
|
||||
secret: str,
|
||||
device_name: str
|
||||
):
|
||||
"""Initialize a Widevine Content Decryption Module (CDM)."""
|
||||
if not device_type:
|
||||
raise ValueError("Device Type must be provided")
|
||||
if isinstance(device_type, str):
|
||||
device_type = DeviceTypes[device_type]
|
||||
if not isinstance(device_type, DeviceTypes):
|
||||
raise TypeError(f"Expected device_type to be a {DeviceTypes!r} not {device_type!r}")
|
||||
|
||||
if not system_id:
|
||||
raise ValueError("System ID must be provided")
|
||||
if not isinstance(system_id, int):
|
||||
raise TypeError(f"Expected system_id to be a {int} not {system_id!r}")
|
||||
|
||||
if not security_level:
|
||||
raise ValueError("Security Level must be provided")
|
||||
if not isinstance(security_level, int):
|
||||
raise TypeError(f"Expected security_level to be a {int} not {security_level!r}")
|
||||
|
||||
if not host:
|
||||
raise ValueError("API Host must be provided")
|
||||
if not isinstance(host, str):
|
||||
raise TypeError(f"Expected host to be a {str} not {host!r}")
|
||||
|
||||
if not secret:
|
||||
raise ValueError("API Secret must be provided")
|
||||
if not isinstance(secret, str):
|
||||
raise TypeError(f"Expected secret to be a {str} not {secret!r}")
|
||||
|
||||
if not device_name:
|
||||
raise ValueError("API Device name must be provided")
|
||||
if not isinstance(device_name, str):
|
||||
raise TypeError(f"Expected device_name to be a {str} not {device_name!r}")
|
||||
|
||||
self.device_type = device_type
|
||||
self.system_id = system_id
|
||||
self.security_level = security_level
|
||||
self.host = host
|
||||
self.device_name = device_name
|
||||
|
||||
# spoof client_id and rsa_key just so we can construct via super call
|
||||
super().__init__(device_type, system_id, security_level, ClientIdentification(), RSA.generate(2048))
|
||||
|
||||
self.__session = requests.Session()
|
||||
self.__session.headers.update({
|
||||
"X-Secret-Key": secret
|
||||
})
|
||||
|
||||
r = requests.head(self.host)
|
||||
if r.status_code != 200:
|
||||
raise ValueError(f"Could not test Remote API version [{r.status_code}]")
|
||||
server = r.headers.get("Server")
|
||||
if not server or "pywidevine serve" not in server.lower():
|
||||
raise ValueError(f"This Remote CDM API does not seem to be a pywidevine serve API ({server}).")
|
||||
server_version_re = re.search(r"pywidevine serve v([\d.]+)", server, re.IGNORECASE)
|
||||
if not server_version_re:
|
||||
raise ValueError("The pywidevine server API is not stating the version correctly, cannot continue.")
|
||||
server_version = server_version_re.group(1)
|
||||
if server_version < "1.4.3":
|
||||
raise ValueError(f"This pywidevine serve API version ({server_version}) is not supported.")
|
||||
|
||||
@classmethod
|
||||
def from_device(cls, device: Device) -> RemoteCdm:
|
||||
raise NotImplementedError("You cannot load a RemoteCdm from a local Device file.")
|
||||
|
||||
def open(self) -> bytes:
|
||||
r = self.__session.get(
|
||||
url=f"{self.host}/{self.device_name}/open"
|
||||
).json()
|
||||
if r['status'] != 200:
|
||||
raise ValueError(f"Cannot Open CDM Session, {r['message']} [{r['status']}]")
|
||||
r = r["data"]
|
||||
|
||||
if int(r["device"]["system_id"]) != self.system_id:
|
||||
raise DeviceMismatch("The System ID specified does not match the one specified in the API response.")
|
||||
|
||||
if int(r["device"]["security_level"]) != self.security_level:
|
||||
raise DeviceMismatch("The Security Level specified does not match the one specified in the API response.")
|
||||
|
||||
return bytes.fromhex(r["session_id"])
|
||||
|
||||
def close(self, session_id: bytes) -> None:
|
||||
r = self.__session.get(
|
||||
url=f"{self.host}/{self.device_name}/close/{session_id.hex()}"
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Cannot Close CDM Session, {r['message']} [{r['status']}]")
|
||||
|
||||
def set_service_certificate(self, session_id: bytes, certificate: Optional[Union[bytes, str]]) -> str:
|
||||
if certificate is None:
|
||||
certificate_b64 = None
|
||||
elif isinstance(certificate, str):
|
||||
certificate_b64 = certificate # assuming base64
|
||||
elif isinstance(certificate, bytes):
|
||||
certificate_b64 = base64.b64encode(certificate).decode()
|
||||
else:
|
||||
raise DecodeError(f"Expecting Certificate to be base64 or bytes, not {certificate!r}")
|
||||
|
||||
r = self.__session.post(
|
||||
url=f"{self.host}/{self.device_name}/set_service_certificate",
|
||||
json={
|
||||
"session_id": session_id.hex(),
|
||||
"certificate": certificate_b64
|
||||
}
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Cannot Set CDMs Service Certificate, {r['message']} [{r['status']}]")
|
||||
r = r["data"]
|
||||
|
||||
return r["provider_id"]
|
||||
|
||||
def get_service_certificate(self, session_id: bytes) -> Optional[SignedDrmCertificate]:
|
||||
r = self.__session.post(
|
||||
url=f"{self.host}/{self.device_name}/get_service_certificate",
|
||||
json={
|
||||
"session_id": session_id.hex()
|
||||
}
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Cannot Get CDMs Service Certificate, {r['message']} [{r['status']}]")
|
||||
r = r["data"]
|
||||
|
||||
service_certificate = r["service_certificate"]
|
||||
if not service_certificate:
|
||||
return None
|
||||
|
||||
service_certificate = base64.b64decode(service_certificate)
|
||||
signed_drm_certificate = SignedDrmCertificate()
|
||||
|
||||
try:
|
||||
signed_drm_certificate.ParseFromString(service_certificate)
|
||||
if signed_drm_certificate.SerializeToString() != service_certificate:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
# could be a direct unsigned DrmCertificate, but reject those anyway
|
||||
raise DecodeError(f"Could not parse certificate as a SignedDrmCertificate, {e}")
|
||||
|
||||
try:
|
||||
pss. \
|
||||
new(RSA.import_key(self.root_cert.public_key)). \
|
||||
verify(
|
||||
msg_hash=SHA1.new(signed_drm_certificate.drm_certificate),
|
||||
signature=signed_drm_certificate.signature
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
raise SignatureMismatch("Signature Mismatch on SignedDrmCertificate, rejecting certificate")
|
||||
|
||||
return signed_drm_certificate
|
||||
|
||||
def get_license_challenge(
|
||||
self,
|
||||
session_id: bytes,
|
||||
pssh: PSSH,
|
||||
license_type: str = "STREAMING",
|
||||
privacy_mode: bool = True
|
||||
) -> bytes:
|
||||
if not pssh:
|
||||
raise InvalidInitData("A pssh must be provided.")
|
||||
if not isinstance(pssh, PSSH):
|
||||
raise InvalidInitData(f"Expected pssh to be a {PSSH}, not {pssh!r}")
|
||||
|
||||
if not isinstance(license_type, str):
|
||||
raise InvalidLicenseType(f"Expected license_type to be a {str}, not {license_type!r}")
|
||||
if license_type not in LicenseType.keys():
|
||||
raise InvalidLicenseType(
|
||||
f"Invalid license_type value of '{license_type}'. "
|
||||
f"Available values: {LicenseType.keys()}"
|
||||
)
|
||||
|
||||
r = self.__session.post(
|
||||
url=f"{self.host}/{self.device_name}/get_license_challenge/{license_type}",
|
||||
json={
|
||||
"session_id": session_id.hex(),
|
||||
"init_data": pssh.dumps(),
|
||||
"privacy_mode": privacy_mode
|
||||
}
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Cannot get Challenge, {r['message']} [{r['status']}]")
|
||||
r = r["data"]
|
||||
|
||||
try:
|
||||
challenge = base64.b64decode(r["challenge_b64"])
|
||||
license_message = SignedMessage()
|
||||
license_message.ParseFromString(challenge)
|
||||
if license_message.SerializeToString() != challenge:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise InvalidLicenseMessage(f"Failed to parse license request, {e}")
|
||||
|
||||
return license_message.SerializeToString()
|
||||
|
||||
def parse_license(self, session_id: bytes, license_message: Union[SignedMessage, bytes, str]) -> None:
|
||||
if not license_message:
|
||||
raise InvalidLicenseMessage("Cannot parse an empty license_message")
|
||||
|
||||
if isinstance(license_message, str):
|
||||
try:
|
||||
license_message = base64.b64decode(license_message)
|
||||
except (binascii.Error, binascii.Incomplete) as e:
|
||||
raise InvalidLicenseMessage(f"Could not decode license_message as Base64, {e}")
|
||||
|
||||
if isinstance(license_message, bytes):
|
||||
signed_message = SignedMessage()
|
||||
try:
|
||||
signed_message.ParseFromString(license_message)
|
||||
if signed_message.SerializeToString() != license_message:
|
||||
raise DecodeError("partial parse")
|
||||
except DecodeError as e:
|
||||
raise InvalidLicenseMessage(f"Could not parse license_message as a SignedMessage, {e}")
|
||||
license_message = signed_message
|
||||
|
||||
if not isinstance(license_message, SignedMessage):
|
||||
raise InvalidLicenseMessage(f"Expecting license_response to be a SignedMessage, got {license_message!r}")
|
||||
|
||||
if license_message.type != SignedMessage.MessageType.Value("LICENSE"):
|
||||
raise InvalidLicenseMessage(
|
||||
f"Expecting a LICENSE message, not a "
|
||||
f"'{SignedMessage.MessageType.Name(license_message.type)}' message."
|
||||
)
|
||||
|
||||
r = self.__session.post(
|
||||
url=f"{self.host}/{self.device_name}/parse_license",
|
||||
json={
|
||||
"session_id": session_id.hex(),
|
||||
"license_message": base64.b64encode(license_message.SerializeToString()).decode()
|
||||
}
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Cannot parse License, {r['message']} [{r['status']}]")
|
||||
|
||||
def get_keys(self, session_id: bytes, type_: Optional[Union[int, str]] = None) -> list[Key]:
|
||||
try:
|
||||
if isinstance(type_, str):
|
||||
License.KeyContainer.KeyType.Value(type_) # only test
|
||||
elif isinstance(type_, int):
|
||||
type_ = License.KeyContainer.KeyType.Name(type_)
|
||||
elif type_ is None:
|
||||
type_ = "ALL"
|
||||
else:
|
||||
raise TypeError(f"Expected type_ to be a {License.KeyContainer.KeyType} or int, not {type_!r}")
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Could not parse type_ as a {License.KeyContainer.KeyType}, {e}")
|
||||
|
||||
r = self.__session.post(
|
||||
url=f"{self.host}/{self.device_name}/get_keys/{type_}",
|
||||
json={
|
||||
"session_id": session_id.hex()
|
||||
}
|
||||
).json()
|
||||
if r["status"] != 200:
|
||||
raise ValueError(f"Could not get {type_} Keys, {r['message']} [{r['status']}]")
|
||||
r = r["data"]
|
||||
|
||||
return [
|
||||
Key(
|
||||
type_=key["type"],
|
||||
kid=Key.kid_to_uuid(bytes.fromhex(key["key_id"])),
|
||||
key=bytes.fromhex(key["key"]),
|
||||
permissions=key["permissions"]
|
||||
)
|
||||
for key in r["keys"]
|
||||
]
|
||||
|
||||
|
||||
__all__ = ("RemoteCdm",)
|
@ -1,458 +0,0 @@
|
||||
import base64
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from aiohttp.typedefs import Handler
|
||||
from google.protobuf.message import DecodeError
|
||||
|
||||
from pywidevine.pssh import PSSH
|
||||
|
||||
try:
|
||||
from aiohttp import web
|
||||
except ImportError:
|
||||
print(
|
||||
"Missing the extra dependencies for serve functionality. "
|
||||
"You may install them under poetry with `poetry install -E serve`, "
|
||||
"or under pip with `pip install pywidevine[serve]`."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from pywidevine import __version__
|
||||
from pywidevine.cdm import Cdm
|
||||
from pywidevine.device import Device
|
||||
from pywidevine.exceptions import (InvalidContext, InvalidInitData, InvalidLicenseMessage, InvalidLicenseType,
|
||||
InvalidSession, SignatureMismatch, TooManySessions)
|
||||
|
||||
routes = web.RouteTableDef()
|
||||
|
||||
|
||||
async def _startup(app: web.Application) -> None:
|
||||
app["cdms"] = {}
|
||||
app["config"]["devices"] = {
|
||||
path.stem: path
|
||||
for x in app["config"]["devices"]
|
||||
for path in [Path(x)]
|
||||
}
|
||||
for device in app["config"]["devices"].values():
|
||||
if not device.is_file():
|
||||
raise FileNotFoundError(f"Device file does not exist: {device}")
|
||||
|
||||
|
||||
async def _cleanup(app: web.Application) -> None:
|
||||
app["cdms"].clear()
|
||||
del app["cdms"]
|
||||
app["config"].clear()
|
||||
del app["config"]
|
||||
|
||||
|
||||
@routes.get("/")
|
||||
async def ping(_: Any) -> web.Response:
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Pong!"
|
||||
})
|
||||
|
||||
|
||||
@routes.get("/{device}/open")
|
||||
async def open_(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
user = request.app["config"]["users"][secret_key]
|
||||
|
||||
if device_name not in user["devices"] or device_name not in request.app["config"]["devices"]:
|
||||
# we don't want to be verbose with the error as to not reveal device names
|
||||
# by trial and error to users that are not authorized to use them
|
||||
return web.json_response({
|
||||
"status": 403,
|
||||
"message": f"Device '{device_name}' is not found or you are not authorized to use it."
|
||||
}, status=403)
|
||||
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
device = Device.load(request.app["config"]["devices"][device_name])
|
||||
cdm = request.app["cdms"][(secret_key, device_name)] = Cdm.from_device(device)
|
||||
|
||||
try:
|
||||
session_id = cdm.open()
|
||||
except TooManySessions as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": str(e)
|
||||
}, status=400)
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Success",
|
||||
"data": {
|
||||
"session_id": session_id.hex(),
|
||||
"device": {
|
||||
"system_id": cdm.system_id,
|
||||
"security_level": cdm.security_level
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@routes.get("/{device}/close/{session_id}")
|
||||
async def close(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
session_id = bytes.fromhex(request.match_info["session_id"])
|
||||
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to close."
|
||||
}, status=400)
|
||||
|
||||
try:
|
||||
cdm.close(session_id)
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": f"Successfully closed Session '{session_id.hex()}'."
|
||||
})
|
||||
|
||||
|
||||
@routes.post("/{device}/set_service_certificate")
|
||||
async def set_service_certificate(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
|
||||
body = await request.json()
|
||||
for required_field in ("session_id", "certificate"):
|
||||
if required_field == "certificate":
|
||||
has_field = required_field in body # it needs the key, but can be empty/null
|
||||
else:
|
||||
has_field = body.get(required_field)
|
||||
if not has_field:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Missing required field '{required_field}' in JSON body."
|
||||
}, status=400)
|
||||
|
||||
# get session id
|
||||
session_id = bytes.fromhex(body["session_id"])
|
||||
|
||||
# get cdm
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to use."
|
||||
}, status=400)
|
||||
|
||||
# set service certificate
|
||||
certificate = body.get("certificate")
|
||||
try:
|
||||
provider_id = cdm.set_service_certificate(session_id, certificate)
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
except DecodeError as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Service Certificate, {e}"
|
||||
}, status=400)
|
||||
except SignatureMismatch:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": "Signature Validation failed on the Service Certificate, rejecting."
|
||||
}, status=400)
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": f"Successfully {['set', 'unset'][not certificate]} the Service Certificate.",
|
||||
"data": {
|
||||
"provider_id": provider_id
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@routes.post("/{device}/get_service_certificate")
|
||||
async def get_service_certificate(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
|
||||
body = await request.json()
|
||||
for required_field in ("session_id",):
|
||||
if not body.get(required_field):
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Missing required field '{required_field}' in JSON body."
|
||||
}, status=400)
|
||||
|
||||
# get session id
|
||||
session_id = bytes.fromhex(body["session_id"])
|
||||
|
||||
# get cdm
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to use."
|
||||
}, status=400)
|
||||
|
||||
# get service certificate
|
||||
try:
|
||||
service_certificate = cdm.get_service_certificate(session_id)
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
|
||||
if service_certificate:
|
||||
service_certificate_b64 = base64.b64encode(service_certificate.SerializeToString()).decode()
|
||||
else:
|
||||
service_certificate_b64 = None
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Successfully got the Service Certificate.",
|
||||
"data": {
|
||||
"service_certificate": service_certificate_b64
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@routes.post("/{device}/get_license_challenge/{license_type}")
|
||||
async def get_license_challenge(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
license_type = request.match_info["license_type"]
|
||||
|
||||
body = await request.json()
|
||||
for required_field in ("session_id", "init_data"):
|
||||
if not body.get(required_field):
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Missing required field '{required_field}' in JSON body."
|
||||
}, status=400)
|
||||
|
||||
# get session id
|
||||
session_id = bytes.fromhex(body["session_id"])
|
||||
|
||||
# get privacy mode flag
|
||||
privacy_mode = body.get("privacy_mode", True)
|
||||
|
||||
# get cdm
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to use."
|
||||
}, status=400)
|
||||
|
||||
# enforce service certificate (opt-in)
|
||||
if request.app["config"].get("force_privacy_mode"):
|
||||
privacy_mode = True
|
||||
if not cdm.get_service_certificate(session_id):
|
||||
return web.json_response({
|
||||
"status": 403,
|
||||
"message": "No Service Certificate set but Privacy Mode is Enforced."
|
||||
}, status=403)
|
||||
|
||||
# get init data
|
||||
init_data = PSSH(body["init_data"])
|
||||
|
||||
# get challenge
|
||||
try:
|
||||
license_request = cdm.get_license_challenge(
|
||||
session_id=session_id,
|
||||
pssh=init_data,
|
||||
license_type=license_type,
|
||||
privacy_mode=privacy_mode
|
||||
)
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
except InvalidInitData as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Init Data, {e}"
|
||||
}, status=400)
|
||||
except InvalidLicenseType:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid License Type '{license_type}'"
|
||||
}, status=400)
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Success",
|
||||
"data": {
|
||||
"challenge_b64": base64.b64encode(license_request).decode()
|
||||
}
|
||||
}, status=200)
|
||||
|
||||
|
||||
@routes.post("/{device}/parse_license")
|
||||
async def parse_license(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
|
||||
body = await request.json()
|
||||
for required_field in ("session_id", "license_message"):
|
||||
if not body.get(required_field):
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Missing required field '{required_field}' in JSON body."
|
||||
}, status=400)
|
||||
|
||||
# get session id
|
||||
session_id = bytes.fromhex(body["session_id"])
|
||||
|
||||
# get cdm
|
||||
cdm: Optional[Cdm] = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to use."
|
||||
}, status=400)
|
||||
|
||||
# parse the license message
|
||||
try:
|
||||
cdm.parse_license(session_id, body["license_message"])
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
except InvalidLicenseMessage as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid License Message, {e}"
|
||||
}, status=400)
|
||||
except InvalidContext as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Context, {e}"
|
||||
}, status=400)
|
||||
except SignatureMismatch:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": "Signature Validation failed on the License Message, rejecting."
|
||||
}, status=400)
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Successfully parsed and loaded the Keys from the License message."
|
||||
})
|
||||
|
||||
|
||||
@routes.post("/{device}/get_keys/{key_type}")
|
||||
async def get_keys(request: web.Request) -> web.Response:
|
||||
secret_key = request.headers["X-Secret-Key"]
|
||||
device_name = request.match_info["device"]
|
||||
|
||||
body = await request.json()
|
||||
for required_field in ("session_id",):
|
||||
if not body.get(required_field):
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Missing required field '{required_field}' in JSON body."
|
||||
}, status=400)
|
||||
|
||||
# get session id
|
||||
session_id = bytes.fromhex(body["session_id"])
|
||||
|
||||
# get key type
|
||||
key_type: Optional[str] = request.match_info["key_type"]
|
||||
if key_type == "ALL":
|
||||
key_type = None
|
||||
|
||||
# get cdm
|
||||
cdm = request.app["cdms"].get((secret_key, device_name))
|
||||
if not cdm:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"No Cdm session for {device_name} has been opened yet. No session to use."
|
||||
}, status=400)
|
||||
|
||||
# get keys
|
||||
try:
|
||||
keys = cdm.get_keys(session_id, key_type)
|
||||
except InvalidSession:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"Invalid Session ID '{session_id.hex()}', it may have expired."
|
||||
}, status=400)
|
||||
except ValueError as e:
|
||||
return web.json_response({
|
||||
"status": 400,
|
||||
"message": f"The Key Type value '{key_type}' is invalid, {e}"
|
||||
}, status=400)
|
||||
|
||||
# get the keys in json form
|
||||
keys_json = [
|
||||
{
|
||||
"key_id": key.kid.hex,
|
||||
"key": key.key.hex(),
|
||||
"type": key.type,
|
||||
"permissions": key.permissions,
|
||||
}
|
||||
for key in keys
|
||||
if not key_type or key.type == key_type
|
||||
]
|
||||
|
||||
return web.json_response({
|
||||
"status": 200,
|
||||
"message": "Success",
|
||||
"data": {
|
||||
"keys": keys_json
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@web.middleware
|
||||
async def authentication(request: web.Request, handler: Handler) -> web.Response:
|
||||
secret_key = request.headers.get("X-Secret-Key")
|
||||
|
||||
if request.path != "/" and not secret_key:
|
||||
request.app.logger.debug(f"{request.remote} did not provide authorization.")
|
||||
response = web.json_response({
|
||||
"status": "401",
|
||||
"message": "Secret Key is Empty."
|
||||
}, status=401)
|
||||
elif request.path != "/" and secret_key not in request.app["config"]["users"]:
|
||||
request.app.logger.debug(f"{request.remote} failed authentication with '{secret_key}'.")
|
||||
response = web.json_response({
|
||||
"status": "401",
|
||||
"message": "Secret Key is Invalid, the Key is case-sensitive."
|
||||
}, status=401)
|
||||
else:
|
||||
try:
|
||||
response = await handler(request) # type: ignore[assignment]
|
||||
except web.HTTPException as e:
|
||||
request.app.logger.error(f"An unexpected error has occurred, {e}")
|
||||
response = web.json_response({
|
||||
"status": 500,
|
||||
"message": e.reason
|
||||
}, status=500)
|
||||
|
||||
response.headers.update({
|
||||
"Server": f"https://github.com/devine-dl/pywidevine serve v{__version__}"
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def run(config: dict, host: Optional[Union[str, web.HostSequence]] = None, port: Optional[int] = None) -> None:
|
||||
app = web.Application(middlewares=[authentication])
|
||||
app.on_startup.append(_startup)
|
||||
app.on_cleanup.append(_cleanup)
|
||||
app.add_routes(routes)
|
||||
app["config"] = config
|
||||
web.run_app(app, host=host, port=port)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user