merge fix conflict

pull/11/head
luky 2024-04-22 20:36:25 +02:00
commit 43e672a0f5
9 changed files with 99 additions and 64 deletions

View File

@ -3,8 +3,12 @@ import shutil
from typing import Optional
import click
from rich.padding import Padding
from rich.table import Table
from rich.tree import Tree
from devine.core.config import config, config_path
from devine.core.config import POSSIBLE_CONFIG_PATHS, config, config_path
from devine.core.console import console
from devine.core.constants import context_settings
from devine.core.services import Services
@ -18,13 +22,32 @@ def env() -> None:
def info() -> None:
"""Displays information about the current environment."""
log = logging.getLogger("env")
log.info(f"[Config] : {config_path or '--'}")
log.info(f"[Cookies] : {config.directories.cookies}")
log.info(f"[WVDs] : {config.directories.wvds}")
log.info(f"[Cache] : {config.directories.cache}")
log.info(f"[Logs] : {config.directories.logs}")
log.info(f"[Temp Files] : {config.directories.temp}")
log.info(f"[Downloads] : {config.directories.downloads}")
if config_path:
log.info(f"Config loaded from {config_path}")
else:
tree = Tree("No config file found, you can use any of the following locations:")
for i, path in enumerate(POSSIBLE_CONFIG_PATHS, start=1):
tree.add(f"[repr.number]{i}.[/] [text2]{path.resolve()}[/]")
console.print(Padding(
tree,
(0, 5)
))
table = Table(title="Directories", expand=True)
table.add_column("Name", no_wrap=True)
table.add_column("Path")
for name in sorted(dir(config.directories)):
if name.startswith("__") or name == "app_dirs":
continue
path = getattr(config.directories, name).resolve()
table.add_row(name.title(), str(path))
console.print(Padding(
table,
(1, 5)
))
@env.group(name="clear", short_help="Clear an environment directory.", context_settings=context_settings)

View File

@ -77,29 +77,27 @@ class Config:
return cls(**yaml.safe_load(path.read_text(encoding="utf8")) or {})
# noinspection PyProtectedMember
POSSIBLE_CONFIG_PATHS = (
# The Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages/devine)
Config._Directories.namespace_dir / Config._Filenames.root_config,
# The Parent Folder to the Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages)
Config._Directories.namespace_dir.parent / Config._Filenames.root_config,
# The AppDirs User Config Folder (e.g., %localappdata%/devine)
Config._Directories.user_configs / Config._Filenames.root_config
)
def get_config_path() -> Optional[Path]:
"""
Get Path to Config from various locations.
Looks for a config file in the following folders in order:
1. The Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages/devine)
2. The Parent Folder to the Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages)
3. The AppDirs User Config Folder (e.g., %localappdata%/devine)
Get Path to Config from any one of the possible locations.
Returns None if no config file could be found.
"""
# noinspection PyProtectedMember
path = Config._Directories.namespace_dir / Config._Filenames.root_config
if not path.exists():
# noinspection PyProtectedMember
path = Config._Directories.namespace_dir.parent / Config._Filenames.root_config
if not path.exists():
# noinspection PyProtectedMember
path = Config._Directories.user_configs / Config._Filenames.root_config
if not path.exists():
path = None
return path
for path in POSSIBLE_CONFIG_PATHS:
if path.exists():
return path
return None
config_path = get_config_path()

View File

@ -6,7 +6,6 @@ from http.cookiejar import CookieJar
from pathlib import Path
from typing import Any, Generator, MutableMapping, Optional, Union
from curl_cffi import CurlOpt
from curl_cffi.requests import Session
from rich import filesize
@ -18,7 +17,7 @@ MAX_ATTEMPTS = 5
RETRY_WAIT = 2
CHUNK_SIZE = 1024
PROGRESS_WINDOW = 5
BROWSER = config.curl_impersonate.get("browser", "chrome120")
BROWSER = config.curl_impersonate.get("browser", "chrome124")
def download(
@ -53,11 +52,6 @@ def download(
for one-time request changes like a header, cookie, or proxy. For example,
to request Byte-ranges use e.g., `headers={"Range": "bytes=0-128"}`.
"""
# https://github.com/yifeikong/curl_cffi/issues/6#issuecomment-2028518677
# must be applied here since the `session.curl` is thread-localized
# noinspection PyProtectedMember
session.curl.setopt(CurlOpt.PROXY_CAINFO, session.curl._cacert)
save_dir = save_path.parent
control_file = save_path.with_name(f"{save_path.name}.!dev")

View File

@ -302,7 +302,9 @@ class ALL4(Service):
if web_assets is not None:
try:
b_manifest, b_token, b_subtitle, data = web_assets
web_tracks = DASH.from_url(b_manifest, self.session).to_tracks("en")
session = self.session
session.headers.update(self.config["headers"])
web_tracks = DASH.from_url(b_manifest, session).to_tracks("en")
web_heights = sorted([int(track.height) for track in web_tracks.videos], reverse=True)
except Exception:
web_heights = None
@ -312,9 +314,6 @@ class ALL4(Service):
sys.exit(1)
if not android_heights or android_heights[0] < 1080:
self.log.warning(
"ANDROID data returned None or is missing full quality profile, falling back to WEB data..."
)
lic_token = self.decrypt_token(b_token, client="WEB")
return b_manifest, lic_token, b_subtitle, data
else:
@ -322,7 +321,6 @@ class ALL4(Service):
return a_manifest, lic_token, a_subtitle, data
def android_playlist(self, video_id: str) -> tuple:
self.log.info("Requesting ANDROID assets...")
url = self.config["android"]["vod"].format(video_id=video_id)
headers = {"authorization": self.authorization}
@ -342,9 +340,8 @@ class ALL4(Service):
return manifest, token, subtitle, data
def web_playlist(self, video_id: str) -> tuple:
self.log.info("Requesting WEB assets...")
url = self.config["web"]["vod"].format(programmeId=video_id)
r = self.session.get(url)
r = self.session.get(url, headers=self.config["headers"])
if not r.ok:
self.log.warning("Request for WEB endpoint returned %s", r)
return

View File

@ -1,3 +1,7 @@
headers:
Accept-Language: en-US,en;q=0.8
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.75 Safari/537.36
endpoints:
login: https://api.channel4.com/online/v2/auth/token
title: https://api.channel4.com/online/v1/views/content-hubs/{title}.json
@ -9,7 +13,7 @@ android:
iv: MURDRDAzODNES0RGU0w4Mg=="
auth: MzZVVUN0OThWTVF2QkFnUTI3QXU4ekdIbDMxTjlMUTE6Sllzd3lIdkdlNjJWbGlrVw==
vod: https://api.channel4.com/online/v1/vod/stream/{video_id}?client=android-mod
web:
key: bjljTGllWWtxd3pOQ3F2aQ==
iv: b2R6Y1UzV2RVaVhMdWNWZA==

View File

@ -36,6 +36,7 @@ class iP(Service):
\b
Tips:
- Use full title URL as input for best results.
- Use --list-titles before anything, iPlayer's listings are often messed up.
\b
- An SSL certificate (PEM) is required for accessing the UHD endpoint.
Specify its path using the service configuration data in the root config:
@ -45,7 +46,7 @@ class iP(Service):
cert: path/to/cert
\b
- Use -v H.265 to request UHD tracks
- See which titles are available in UHD:
- See which titles are available in UHD:
https://www.bbc.co.uk/iplayer/help/questions/programme-availability/uhd-content
"""
@ -121,18 +122,29 @@ class iP(Service):
)
else:
seasons = [self.get_data(pid, x["id"]) for x in data["slices"] or [{"id": None}]]
episodes = [self.create_episode(episode) for season in seasons for episode in season["entities"]["results"]]
episodes = [self.create_episode(episode, data) for season in seasons for episode in season["entities"]["results"]]
return Series(episodes)
def get_tracks(self, title: Union[Movie, Episode]) -> Tracks:
r = self.session.get(url=self.config["endpoints"]["playlist"].format(pid=title.id))
r.raise_for_status()
if not r.ok:
self.log.error(r.text)
sys.exit(1)
versions = r.json().get("allAvailableVersions")
if not versions:
r = self.session.get(self.config["base_url"].format(type="episode", pid=title.id))
redux = re.search("window.__IPLAYER_REDUX_STATE__ = (.*?);</script>", r.text).group(1)
data = json.loads(redux)
versions = [
{"pid": x.get("id") for x in data["versions"] if not x.get("kind") == "audio-described"}
]
quality = [
connection.get("height")
for i in (
self.check_all_versions(version)
for version in (x.get("pid") for x in r.json()["allAvailableVersions"])
for version in (x.get("pid") for x in versions)
)
for connection in i
if connection.get("height")
@ -140,9 +152,9 @@ class iP(Service):
max_quality = max((h for h in quality if h < "1080"), default=None)
media = next((i for i in (self.check_all_versions(version)
for version in (x.get("pid") for x in r.json()["allAvailableVersions"]))
for version in (x.get("pid") for x in versions))
if any(connection.get("height") == max_quality for connection in i)), None)
connection = {}
for video in [x for x in media if x["kind"] == "video"]:
connections = sorted(video["connection"], key=lambda x: x["priority"])
@ -244,7 +256,7 @@ class iP(Service):
r.raise_for_status()
return r.json()["data"]["programme"]
def check_all_versions(self, vpid: str) -> list:
if self.config.get("cert"):
url = self.config["endpoints"]["manifest_"].format(
@ -274,11 +286,13 @@ class iP(Service):
return manifest["media"]
def create_episode(self, episode):
def create_episode(self, episode: dict, data: dict) -> Episode:
title = episode["episode"]["title"]["default"].strip()
subtitle = episode["episode"]["subtitle"]
series = re.finditer(r"Series (\d+):|Season (\d+):|(\d{4}/\d{2}): Episode \d+", subtitle.get("default") or "")
season_num = int(next((m.group(1) or m.group(2) or m.group(3).replace("/", "") for m in series), 0))
if season_num == 0 and not data.get("slices"):
season_num = 1
number = re.finditer(r"(\d+)\.|Episode (\d+)", subtitle.get("slice") or subtitle.get("default") or "")
ep_num = int(next((m.group(1) or m.group(2) for m in number), 0))
@ -296,6 +310,7 @@ class iP(Service):
number=ep_num,
name=ep_name,
language="en",
data=episode,
)
def get_single_episode(self, url: str) -> Series:

View File

@ -1,3 +1,4 @@
base_url: https://www.bbc.co.uk/iplayer/{type}/{pid}
user_agent: 'smarttv_AFTMM_Build_0003255372676_Chromium_41.0.2250.2'
api_key: 'D2FgtcTxGqqIgLsfBWTJdrQh2tVdeaAp'

29
poetry.lock generated
View File

@ -523,28 +523,31 @@ testing = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "p
[[package]]
name = "curl-cffi"
version = "0.6.2"
description = "libcurl ffi bindings for Python, with impersonation support"
version = "0.7.0b4"
description = "libcurl ffi bindings for Python, with impersonation support."
optional = false
python-versions = ">=3.8"
files = [
{file = "curl_cffi-0.6.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:23b8a2872b160718c04b06b1f8aa4fb1a2f4f94bce7040493515e081a27cad19"},
{file = "curl_cffi-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ad3c1cf5360810825ec4bc3da425f26ee4098878a615dab9d309a99afd883ba9"},
{file = "curl_cffi-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d01de6ed737ad1924aaa0198195b9020c38e77ce90ea3d72b9eacf4938c7adf"},
{file = "curl_cffi-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37e513cc149d024a2d625e202f2cc9d4423d2937343ea2e06f797d99779e62dc"},
{file = "curl_cffi-0.6.2-cp38-abi3-win32.whl", hash = "sha256:12e829af97cbf7c1d5afef177e786f6f404ddf163b08897a1ed087cadbeb4837"},
{file = "curl_cffi-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:3791b7a9ae4cb1298165300f2dc2d60a86779f055570ae83163fc2d8a74bf714"},
{file = "curl_cffi-0.6.2.tar.gz", hash = "sha256:9ee519e960b5fc6e0bbf13d0ecba9ce5f6306cb929354504bf03cc30f59a8f63"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-win32.whl", hash = "sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0"},
{file = "curl_cffi-0.7.0b4-cp38-abi3-win_amd64.whl", hash = "sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e"},
{file = "curl_cffi-0.7.0b4.tar.gz", hash = "sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2"},
]
[package.dependencies]
certifi = "*"
certifi = ">=2024.2.2"
cffi = ">=1.12.0"
[package.extras]
build = ["cibuildwheel", "wheel"]
dev = ["autoflake (==1.4)", "coverage (==6.4.1)", "cryptography (==38.0.3)", "flake8 (==6.0.0)", "flake8-bugbear (==22.7.1)", "flake8-pie (==0.15.0)", "httpx (==0.23.1)", "mypy (==0.971)", "nest-asyncio (==1.6.0)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "ruff (==0.1.14)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)", "websockets (==11.0.3)"]
test = ["cryptography (==38.0.3)", "fastapi (==0.100.0)", "httpx (==0.23.1)", "nest-asyncio (==1.6.0)", "proxy.py (==2.4.3)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "python-multipart (==0.0.6)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)", "websockets (==11.0.3)"]
dev = ["charset-normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
test = ["charset-normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
[[package]]
name = "distlib"
@ -2004,4 +2007,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<4.0"
content-hash = "db110c0b1b9e30309fcd4e0f80e4369f20b055651f1bef81d0f5e6153a250dec"
content-hash = "4d40927d8e7b25e21de0f56910d6dce05ba2b7c8ce71c465d9e23463f74bc94a"

View File

@ -61,7 +61,7 @@ subtitle-filter = "^1.4.9"
Unidecode = "^1.3.8"
urllib3 = "^2.2.1"
chardet = "^5.2.0"
curl-cffi = "^0.6.2"
curl-cffi = "^0.7.0b4"
language-data = "^1.2.0"
marisa-trie = "^1.1.0"