release: 1.6.0

This commit is contained in:
acgnhik 2022-04-09 23:01:32 +08:00
parent 5c9887a2a8
commit adf6e36b8f
96 changed files with 4258 additions and 1004 deletions

47
.github/workflows/docker-hub.yml vendored Normal file
View File

@ -0,0 +1,47 @@
name: CI to Docker Hub
on:
push:
tags:
- v*.*.*
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: ./
file: ./Dockerfile
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/blrec:latest,${{ secrets.DOCKER_HUB_USERNAME }}/blrec:${{ github.ref_name }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

48
.github/workflows/ghcr.yml vendored Normal file
View File

@ -0,0 +1,48 @@
name: CI to GHCR
on:
push:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to ghcr
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: ./
file: ./Dockerfile
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ghcr.io/${{ github.repository_owner }}/blrec:latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}

105
.github/workflows/portable.yml vendored Normal file
View File

@ -0,0 +1,105 @@
name: Windows portable
on:
push:
tags:
- v*.*.*
env:
FFMPEG_ARCHIVE_URL: https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-n5.0-latest-win64-lgpl-shared-5.0.zip
FFMPEG_ARCHIVE_NAME: ffmpeg-n5.0-latest-win64-lgpl-shared-5.0.zip
PYTHON_ARCHIVE_URL: https://www.python.org/ftp/python/3.10.4/python-3.10.4-embed-amd64.zip
jobs:
build:
name: Build Windows portable distributions
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: "3.10"
- name: Download ffmpeg archive
run: Invoke-WebRequest -Uri $($env:FFMPEG_ARCHIVE_URL) -OutFile ffmpeg.zip
- name: Download python archive
run: Invoke-WebRequest -Uri $($env:PYTHON_ARCHIVE_URL) -OutFile python.zip
- name: Create build directory and dist directory
run: New-Item -Path @("build", "dist") -ItemType Directory
- name: Unzip ffmpeg archive
run: Expand-Archive -LiteralPath "ffmpeg.zip" -DestinationPath "build"
- name: Unzip Python archive
run: Expand-Archive -LiteralPath "python.zip" -DestinationPath "build\python"
- name: Enter build directory
run: |
Set-Location -Path "build"
ls
- name: Rename ffmpeg directory
working-directory: build
run: Rename-Item -Path $($env:FFMPEG_ARCHIVE_NAME).Substring(0, $($env:FFMPEG_ARCHIVE_NAME).Length - 4) "ffmpeg"
- name: Sliming ffmpeg
working-directory: build
run: |
Get-ChildItem -Path "ffmpeg" -Exclude @("LICENSE.txt", "bin") | Remove-Item -Recurse
ls ffmpeg
- name: Create venv
working-directory: build
run: python -m venv venv
- name: Install packages
working-directory: build
run: |
ls ${{ github.workspace }}
.\venv\Scripts\activate
pip install ${{ github.workspace }}
ls venv\Lib\site-packages
- name: Copy site-packages
shell: cmd
working-directory: build
run: (robocopy venv\Lib\site-packages python\Lib\site-packages /mir /xd __pycache__* pip* setuptools*) ^& IF %ERRORLEVEL% LSS 8 SET ERRORLEVEL = 0
# https://ss64.com/nt/robocopy-exit.html
# https://superuser.com/questions/280425/getting-robocopy-to-return-a-proper-exit-code
# https://social.msdn.microsoft.com/Forums/en-US/d599833c-dcea-46f5-85e9-b1f028a0fefe/robocopy-exits-with-error-code-1
- name: Add search path
working-directory: build
run: Add-Content -Path "python\python310._pth" "Lib\site-packages"
- name: Copy run.bat
working-directory: build
run: Copy-Item "${{ github.workspace }}\run.bat" -Destination ".\run.bat"
- name: Exit build directory
working-directory: build
run: |
ls
Set-Location -Path ".."
- name: Zip files
run: |
ls build
Compress-Archive -Path @("build\run.bat", "build\python", "build\ffmpeg") -DestinationPath "dist\blrec-${{ github.ref_name }}-win64.zip"
ls dist
- name: Upload distributions to release
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: dist\*
tag: ${{ github.ref }}
overwrite: true
file_glob: true

31
.github/workflows/pypi.yml vendored Normal file
View File

@ -0,0 +1,31 @@
name: CI to PyPI
on:
push:
tags:
- v*.*.*
jobs:
build-and-publish:
name: Build and publish Python 🐍 distributions 📦 to PyPI
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: 3.8
- name: Install pypa/build
run: python -m pip install build --user
- name: Build a binary wheel and a source tarball
run: python -m build --sdist --wheel --outdir dist/ .
- name: Publish distribution 📦 to PyPI
uses: pypa/gh-action-pypi-publish@v1.5.0
with:
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@ -1,5 +1,24 @@
# 更新日志
## 1.6.0
- 更新 Pushplus 消息推送 url (issue #26)
- 邮箱通知支持 STARTTLS (issue #35)
- 超时没接收到推流事件弹幕自动开始录制流 (issue #31, #36)
- 增加一个源文件删除策略
- 添加并优先使用 APP API (缓解被 ban 的几率)
- 改进启动时任务加载 (不用等加载完才可访问)
- 支持录制 HLS 直播流 (实验性)
- 去掉一行最多显示 3 个任务卡片的限制 (网格布局自适应)
- 在任务卡片上显示录制信息 (从任务卡片右下角菜单打开)
- 任务详情页面添加网络详情和图表
### P.S.
支持录制 HLS 直播流需要 ffmpeg获取直播流信息需要 ffprobe。
从命令行运行需自行安装 ffmpeg 和 ffprobe docker 和绿色版已内置不需要安装。
## 1.6.0-alpha
- REST API 支持获取正在录制的 flv 文件的路径和元数据

View File

@ -6,13 +6,13 @@ WORKDIR /app
VOLUME ["/cfg", "/log", "/rec"]
COPY src src/
COPY setup.py setup.cfg .
COPY setup.py setup.cfg ./
RUN apt-get update \
&& apt-get install -y --no-install-recommends build-essential python3-dev \
&& rm -rf /var/lib/apt/lists/* \
&& pip3 install --no-cache-dir -e . \
&& apt-get purge -y --auto-remove build-essential python3-dev
RUN apt-get update && \
apt-get install -y --no-install-recommends ffmpeg build-essential python3-dev && \
rm -rf /var/lib/apt/lists/* && \
pip3 install --no-cache-dir -e . && \
apt-get purge -y --auto-remove build-essential python3-dev
# ref: https://github.com/docker-library/python/issues/60#issuecomment-134322383
ENV DEFAULT_SETTINGS_FILE=/cfg/settings.toml

View File

@ -6,15 +6,15 @@ WORKDIR /app
VOLUME ["/cfg", "/log", "/rec"]
COPY src src/
COPY setup.py setup.cfg .
COPY setup.py setup.cfg ./
RUN sed -i "s/deb.debian.org/mirrors.aliyun.com/g" /etc/apt/sources.list \
&& sed -i "s/security.debian.org/mirrors.aliyun.com/g" /etc/apt/sources.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends build-essential python3-dev \
&& rm -rf /var/lib/apt/lists/* \
&& pip3 install -i https://mirrors.aliyun.com/pypi/simple --no-cache-dir -e . \
&& apt-get purge -y --auto-remove build-essential python3-dev
RUN sed -i "s/deb.debian.org/mirrors.tuna.tsinghua.edu.cn/g" /etc/apt/sources.list && \
sed -i "s/security.debian.org/mirrors.tuna.tsinghua.edu.cn/g" /etc/apt/sources.list && \
apt-get update && \
apt-get install -y --no-install-recommends ffmpeg build-essential python3-dev && \
rm -rf /var/lib/apt/lists/* && \
pip3 install -i https://mirrors.aliyun.com/pypi/simple --no-cache-dir -e . && \
apt-get purge -y --auto-remove build-essential python3-dev
# ref: https://github.com/docker-library/python/issues/60#issuecomment-134322383
ENV DEFAULT_SETTINGS_FILE=/cfg/settings.toml

View File

@ -29,7 +29,7 @@
## 先决条件
Python 3.8+
ffmpeg (如果需要转换 flv 为 mp4)
ffmpeg、 ffprobe
## 安装

22
run.bat Normal file
View File

@ -0,0 +1,22 @@
@echo off
chcp 65001
set PATH=.\ffmpeg\bin;.\python;%PATH%
REM 不使用代理
set no_proxy=*
REM 默认本地主机和端口绑定
set host=localhost
set port=2233
REM 服务器主机和端口绑定,去掉注释并按照自己的情况修改。
REM set host=0.0.0.0
REM set port=80
set DEFAULT_LOG_DIR=日志文件
set DEFAULT_OUT_DIR=录播文件
python -m blrec -c settings.toml --open --host %host% --port %port%
pause

View File

@ -38,6 +38,7 @@ install_requires =
typing-extensions >= 3.10.0.0
fastapi >= 0.70.0, < 0.71.0
email_validator >= 1.1.3, < 2.0.0
click < 8.1.0
typer >= 0.4.0, < 0.5.0
aiohttp >= 3.8.1, < 4.0.0
requests >= 2.24.0, < 3.0.0
@ -49,6 +50,8 @@ install_requires =
attrs >= 21.2.0, < 22.0.0
lxml >= 4.6.4, < 5.0.0
toml >= 0.10.2, < 0.11.0
m3u8 >= 1.0.0, < 2.0.0
jsonpath == 0.82
psutil >= 5.8.0, < 6.0.0
rx >= 3.2.0, < 4.0.0
bitarray >= 2.2.5, < 3.0.0

View File

@ -1,4 +1,4 @@
__prog__ = 'blrec'
__version__ = '1.6.0-alpha'
__version__ = '1.6.0'
__github__ = 'https://github.com/acgnhiki/blrec'

View File

@ -8,6 +8,7 @@ import psutil
from . import __prog__, __version__
from .flv.data_analyser import MetaData
from .core.stream_analyzer import StreamProfile
from .disk_space import SpaceMonitor, SpaceReclaimer
from .bili.helpers import ensure_room_id
from .task import (
@ -17,7 +18,7 @@ from .task import (
VideoFileDetail,
DanmakuFileDetail,
)
from .exception import ExistsError, ExceptionHandler
from .exception import ExistsError, ExceptionHandler, exception_callback
from .event.event_submitters import SpaceEventSubmitter
from .setting import (
SettingsManager,
@ -103,8 +104,9 @@ class Application:
async def launch(self) -> None:
self._setup()
await self._task_manager.load_all_tasks()
logger.info(f'Launched Application v{__version__}')
task = asyncio.create_task(self._task_manager.load_all_tasks())
task.add_done_callback(exception_callback)
async def exit(self) -> None:
await self._exit()
@ -130,68 +132,104 @@ class Application:
async def add_task(self, room_id: int) -> int:
room_id = await ensure_room_id(room_id)
if self._settings_manager.has_task_settings(room_id):
if self._task_manager.has_task(room_id):
raise ExistsError(
f"a task for the room {room_id} is already existed"
f'a task for the room {room_id} is already existed'
)
settings = await self._settings_manager.add_task_settings(room_id)
settings = self._settings_manager.find_task_settings(room_id)
if not settings:
settings = await self._settings_manager.add_task_settings(room_id)
await self._task_manager.add_task(settings)
logger.info(f'Added task: {room_id}')
return room_id
async def remove_task(self, room_id: int) -> None:
logger.info(f'Removing task {room_id}...')
await self._task_manager.remove_task(room_id)
await self._settings_manager.remove_task_settings(room_id)
logger.info(f'Removed task: {room_id}')
logger.info(f'Successfully removed task {room_id}')
async def remove_all_tasks(self) -> None:
logger.info('Removing all tasks...')
await self._task_manager.remove_all_tasks()
await self._settings_manager.remove_all_task_settings()
logger.info('Removed all tasks')
logger.info('Successfully removed all tasks')
async def start_task(self, room_id: int) -> None:
logger.info(f'Starting task {room_id}...')
await self._task_manager.start_task(room_id)
await self._settings_manager.mark_task_enabled(room_id)
logger.info(f'Started task: {room_id}')
logger.info(f'Successfully started task {room_id}')
async def stop_task(self, room_id: int, force: bool = False) -> None:
logger.info(f'Stopping task {room_id}...')
await self._task_manager.stop_task(room_id, force)
await self._settings_manager.mark_task_disabled(room_id)
logger.info(f'Stopped task: {room_id}')
logger.info(f'Successfully stopped task {room_id}')
async def start_all_tasks(self) -> None:
logger.info('Starting all tasks...')
await self._task_manager.start_all_tasks()
await self._settings_manager.mark_all_tasks_enabled()
logger.info('Started all tasks')
logger.info('Successfully started all tasks')
async def stop_all_tasks(self, force: bool = False) -> None:
logger.info('Stopping all tasks...')
await self._task_manager.stop_all_tasks(force)
await self._settings_manager.mark_all_tasks_disabled()
logger.info('Stopped all tasks')
logger.info('Successfully stopped all tasks')
async def enable_task_monitor(self, room_id: int) -> None:
logger.info(f'Enabling monitor for task {room_id}...')
await self._task_manager.enable_task_monitor(room_id)
await self._settings_manager.mark_task_monitor_enabled(room_id)
logger.info(f'Successfully enabled monitor for task {room_id}')
async def disable_task_monitor(self, room_id: int) -> None:
logger.info(f'Disabling monitor for task {room_id}...')
await self._task_manager.disable_task_monitor(room_id)
await self._settings_manager.mark_task_monitor_disabled(room_id)
logger.info(f'Successfully disabled monitor for task {room_id}')
async def enable_all_task_monitors(self) -> None:
logger.info('Enabling monitors for all tasks...')
await self._task_manager.enable_all_task_monitors()
await self._settings_manager.mark_all_task_monitors_enabled()
logger.info('Successfully enabled monitors for all tasks')
async def disable_all_task_monitors(self) -> None:
logger.info('Disabling monitors for all tasks...')
await self._task_manager.disable_all_task_monitors()
await self._settings_manager.mark_all_task_monitors_disabled()
logger.info('Successfully disabled monitors for all tasks')
async def enable_task_recorder(self, room_id: int) -> None:
logger.info(f'Enabling recorder for task {room_id}...')
await self._task_manager.enable_task_recorder(room_id)
await self._settings_manager.mark_task_recorder_enabled(room_id)
logger.info(f'Enabled task recorder: {room_id}')
logger.info(f'Successfully enabled recorder for task {room_id}')
async def disable_task_recorder(
self, room_id: int, force: bool = False
) -> None:
logger.info(f'Disabling recorder for task {room_id}...')
await self._task_manager.disable_task_recorder(room_id, force)
await self._settings_manager.mark_task_recorder_disabled(room_id)
logger.info(f'Disabled task recorder: {room_id}')
logger.info(f'Successfully disabled recorder for task {room_id}')
async def enable_all_task_recorders(self) -> None:
logger.info('Enabling recorders for all tasks...')
await self._task_manager.enable_all_task_recorders()
await self._settings_manager.mark_all_task_recorders_enabled()
logger.info('Enabled all task recorders')
logger.info('Successfully enabled recorders for all tasks')
async def disable_all_task_recorders(self, force: bool = False) -> None:
logger.info('Disabling recorders for all tasks...')
await self._task_manager.disable_all_task_recorders(force)
await self._settings_manager.mark_all_task_recorders_disabled()
logger.info('Disabled all task recorders')
logger.info('Successfully disabled recorders for all tasks')
def get_task_data(self, room_id: int) -> TaskData:
return self._task_manager.get_task_data(room_id)
@ -205,6 +243,9 @@ class Application:
def get_task_metadata(self, room_id: int) -> Optional[MetaData]:
return self._task_manager.get_task_metadata(room_id)
def get_task_stream_profile(self, room_id: int) -> StreamProfile:
return self._task_manager.get_task_stream_profile(room_id)
def get_task_video_file_details(
self, room_id: int
) -> Iterator[VideoFileDetail]:
@ -222,10 +263,14 @@ class Application:
return self._task_manager.cut_stream(room_id)
async def update_task_info(self, room_id: int) -> None:
logger.info(f'Updating info for task {room_id}...')
await self._task_manager.update_task_info(room_id)
logger.info(f'Successfully updated info for task {room_id}')
async def update_all_task_infos(self) -> None:
logger.info('Updating info for all tasks...')
await self._task_manager.update_all_task_infos()
logger.info('Successfully updated info for all tasks')
def get_settings(
self,

View File

@ -1,4 +1,8 @@
from typing import Any, Final
from abc import ABC
import hashlib
from urllib.parse import urlencode
from datetime import datetime
from typing import Mapping, Dict, Any, Final
import aiohttp
from tenacity import (
@ -11,26 +15,10 @@ from .typing import QualityNumber, JsonResponse, ResponseData
from .exceptions import ApiRequestError
__all__ = 'WebApi',
__all__ = 'AppApi', 'WebApi'
class WebApi:
BASE_API_URL: Final[str] = 'https://api.bilibili.com'
BASE_LIVE_API_URL: Final[str] = 'https://api.live.bilibili.com'
GET_USER_INFO_URL: Final[str] = BASE_API_URL + '/x/space/acc/info'
GET_DANMU_INFO_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v1/index/getDanmuInfo'
ROOM_INIT_URL: Final[str] = BASE_LIVE_API_URL + '/room/v1/Room/room_init'
GET_INFO_URL: Final[str] = BASE_LIVE_API_URL + '/room/v1/Room/get_info'
GET_INFO_BY_ROOM_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v1/index/getInfoByRoom'
GET_ROOM_PLAY_INFO_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v2/index/getRoomPlayInfo'
GET_TIMESTAMP_URL: Final[str] = BASE_LIVE_API_URL + \
'/av/v1/Time/getTimestamp?platform=pc'
class BaseApi(ABC):
def __init__(self, session: aiohttp.ClientSession):
self._session = session
self.timeout = 10
@ -58,6 +46,135 @@ class WebApi:
self._check_response(json_res)
return json_res
class AppApi(BaseApi):
# taken from https://github.com/SocialSisterYi/bilibili-API-collect/blob/master/other/API_sign.md # noqa
_appkey = '1d8b6e7d45233436'
_appsec = '560c52ccd288fed045859ed18bffd973'
_headers = {
'User-Agent': 'Mozilla/5.0 BiliDroid/6.64.0 (bbcallen@gmail.com) os/android model/Unknown mobi_app/android build/6640400 channel/bili innerVer/6640400 osVer/6.0.1 network/2', # noqa
'Connection': 'Keep-Alive',
'Accept-Encoding': 'gzip',
}
@classmethod
def signed(cls, params: Mapping[str, Any]) -> Dict[str, Any]:
if isinstance(params, Mapping):
params = dict(sorted({**params, 'appkey': cls._appkey}.items()))
else:
raise ValueError(type(params))
query = urlencode(params, doseq=True)
sign = hashlib.md5((query + cls._appsec).encode()).hexdigest()
params.update(sign=sign)
return params
async def get_room_play_info(
self,
room_id: int,
qn: QualityNumber = 10000,
*,
only_video: bool = False,
only_audio: bool = False,
) -> ResponseData:
url = 'https://api.live.bilibili.com/xlive/app-room/v2/index/getRoomPlayInfo' # noqa
params = self.signed({
'actionKey': 'appkey',
'build': '6640400',
'channel': 'bili',
'codec': '0,1', # 0: avc, 1: hevc
'device': 'android',
'device_name': 'Unknown',
'disable_rcmd': '0',
'dolby': '1',
'format': '0,1,2', # 0: flv, 1: ts, 2: fmp4
'free_type': '0',
'http': '1',
'mask': '0',
'mobi_app': 'android',
'need_hdr': '0',
'no_playurl': '0',
'only_audio': '1' if only_audio else '0',
'only_video': '1' if only_video else '0',
'platform': 'android',
'play_type': '0',
'protocol': '0,1',
'qn': qn,
'room_id': room_id,
'ts': int(datetime.utcnow().timestamp()),
})
r = await self._get(url, params=params, headers=self._headers)
return r['data']
async def get_info_by_room(self, room_id: int) -> ResponseData:
url = 'https://api.live.bilibili.com/xlive/app-room/v1/index/getInfoByRoom' # noqa
params = self.signed({
'actionKey': 'appkey',
'build': '6640400',
'channel': 'bili',
'device': 'android',
'mobi_app': 'android',
'platform': 'android',
'room_id': room_id,
'ts': int(datetime.utcnow().timestamp()),
})
r = await self._get(url, params=params)
return r['data']
async def get_user_info(self, uid: int) -> ResponseData:
url = 'https://app.bilibili.com/x/v2/space'
params = self.signed({
'build': '6640400',
'channel': 'bili',
'mobi_app': 'android',
'platform': 'android',
'ts': int(datetime.utcnow().timestamp()),
'vmid': uid,
})
r = await self._get(url, params=params)
return r['data']
async def get_danmu_info(self, room_id: int) -> ResponseData:
url = 'https://api.live.bilibili.com/xlive/app-room/v1/index/getDanmuInfo' # noqa
params = self.signed({
'actionKey': 'appkey',
'build': '6640400',
'channel': 'bili',
'device': 'android',
'mobi_app': 'android',
'platform': 'android',
'room_id': room_id,
'ts': int(datetime.utcnow().timestamp()),
})
r = await self._get(url, params=params)
return r['data']
class WebApi(BaseApi):
BASE_API_URL: Final[str] = 'https://api.bilibili.com'
BASE_LIVE_API_URL: Final[str] = 'https://api.live.bilibili.com'
GET_USER_INFO_URL: Final[str] = BASE_API_URL + '/x/space/acc/info'
GET_DANMU_INFO_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v1/index/getDanmuInfo'
ROOM_INIT_URL: Final[str] = BASE_LIVE_API_URL + '/room/v1/Room/room_init'
GET_INFO_URL: Final[str] = BASE_LIVE_API_URL + '/room/v1/Room/get_info'
GET_INFO_BY_ROOM_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v1/index/getInfoByRoom'
GET_ROOM_PLAY_INFO_URL: Final[str] = BASE_LIVE_API_URL + \
'/xlive/web-room/v2/index/getRoomPlayInfo'
GET_TIMESTAMP_URL: Final[str] = BASE_LIVE_API_URL + \
'/av/v1/Time/getTimestamp?platform=pc'
async def room_init(self, room_id: int) -> ResponseData:
r = await self._get(self.ROOM_INIT_URL, params={'id': room_id})
return r['data']

View File

@ -15,7 +15,7 @@ from tenacity import (
retry_if_exception_type,
)
from .api import WebApi
from .api import AppApi, WebApi
from .typing import Danmaku
from ..event.event_emitter import EventListener, EventEmitter
from ..exception import exception_callback
@ -52,14 +52,16 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
def __init__(
self,
session: ClientSession,
api: WebApi,
appapi: AppApi,
webapi: WebApi,
room_id: int,
*,
max_retries: int = 10,
) -> None:
super().__init__()
self.session = session
self.api = api
self.appapi = appapi
self.webapi = webapi
self._room_id = room_id
self._host_index: int = 0
@ -151,7 +153,10 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
raise ValueError(f'Unexpected code: {code}')
async def _update_danmu_info(self) -> None:
self._danmu_info = await self.api.get_danmu_info(self._room_id)
try:
self._danmu_info = await self.appapi.get_danmu_info(self._room_id)
except Exception:
self._danmu_info = await self.webapi.get_danmu_info(self._room_id)
logger.debug('Danmu info updated')
async def _disconnect(self) -> None:
@ -177,7 +182,10 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
async def _send_heartbeat(self) -> None:
data = Frame.encode(WS.OP_HEARTBEAT, '')
while True:
await self._ws.send_bytes(data)
try:
await self._ws.send_bytes(data)
except Exception as exc:
logger.debug(f'Failed to send heartbeat due to: {repr(exc)}')
await asyncio.sleep(self._HEARTBEAT_INTERVAL)
async def _create_message_loop(self) -> None:

View File

@ -20,5 +20,17 @@ class LiveRoomEncrypted(Exception):
pass
class NoStreamUrlAvailable(Exception):
class NoStreamAvailable(Exception):
pass
class NoStreamFormatAvailable(Exception):
pass
class NoStreamCodecAvailable(Exception):
pass
class NoStreamQualityAvailable(Exception):
pass

View File

@ -2,7 +2,7 @@
import aiohttp
from .api import WebApi
from .typing import ResponseData
from .typing import ResponseData, QualityNumber
from .exceptions import ApiRequestError
from ..exception import NotFoundError
@ -27,3 +27,16 @@ async def ensure_room_id(room_id: int) -> int:
raise
else:
return result['room_id']
def get_quality_name(qn: QualityNumber) -> str:
QUALITY_MAPPING = {
20000: '4K',
10000: '原画',
401: '蓝光(杜比)',
400: '蓝光',
250: '超清',
150: '高清',
80: '流畅',
}
return QUALITY_MAPPING.get(qn, '')

View File

@ -1,10 +1,10 @@
import asyncio
import re
import json
import asyncio
from typing import Dict, List, cast
import aiohttp
from jsonpath import jsonpath
from tenacity import (
retry,
wait_exponential,
@ -13,11 +13,12 @@ from tenacity import (
)
from .api import WebApi
from .api import AppApi, WebApi
from .models import LiveStatus, RoomInfo, UserInfo
from .typing import QualityNumber, StreamFormat, ResponseData
from .typing import StreamFormat, QualityNumber, StreamCodec, ResponseData
from .exceptions import (
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamUrlAvailable
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamAvailable,
NoStreamFormatAvailable, NoStreamCodecAvailable, NoStreamQualityAvailable,
)
@ -63,8 +64,9 @@ class Live:
return {
'Referer': 'https://live.bilibili.com/',
'Connection': 'Keep-Alive',
'Accept-Encoding': 'gzip',
'User-Agent': self._user_agent,
'cookie': self._cookie,
'Cookie': self._cookie,
}
@property
@ -72,8 +74,12 @@ class Live:
return self._session
@property
def api(self) -> WebApi:
return self._api
def appapi(self) -> AppApi:
return self._appapi
@property
def webapi(self) -> WebApi:
return self._webapi
@property
def room_id(self) -> int:
@ -89,11 +95,13 @@ class Live:
async def init(self) -> None:
self._session = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(limit=200),
headers=self.headers,
raise_for_status=True,
trust_env=True,
)
self._api = WebApi(self._session)
self._appapi = AppApi(self._session)
self._webapi = WebApi(self._session)
self._room_info = await self.get_room_info()
self._user_info = await self.get_user_info(self._room_info.uid)
@ -125,28 +133,19 @@ class Live:
async def update_info(self) -> None:
await asyncio.wait([self.update_user_info(), self.update_room_info()])
@retry(
reraise=True,
retry=retry_if_exception_type((
asyncio.TimeoutError, aiohttp.ClientError,
)),
wait=wait_exponential(max=10),
stop=stop_after_delay(60),
)
async def update_user_info(self) -> None:
self._user_info = await self.get_user_info(self._room_info.uid)
async def update_room_info(self) -> None:
self._room_info = await self.get_room_info()
@retry(
reraise=True,
retry=retry_if_exception_type((
asyncio.TimeoutError, aiohttp.ClientError,
)),
wait=wait_exponential(max=10),
stop=stop_after_delay(60),
)
async def update_room_info(self) -> None:
self._room_info = await self.get_room_info()
async def get_room_info(self) -> RoomInfo:
try:
# frequent requests will be intercepted by the server's firewall!
@ -154,44 +153,54 @@ class Live:
except Exception:
# more cpu consumption
room_info_data = await self._get_room_info_via_html_page()
return RoomInfo.from_data(room_info_data)
@retry(
retry=retry_if_exception_type((
asyncio.TimeoutError, aiohttp.ClientError,
)),
wait=wait_exponential(max=10),
stop=stop_after_delay(60),
)
async def get_user_info(self, uid: int) -> UserInfo:
user_info_data = await self._api.get_user_info(uid)
return UserInfo.from_data(user_info_data)
try:
user_info_data = await self._appapi.get_user_info(uid)
return UserInfo.from_app_api_data(user_info_data)
except Exception:
user_info_data = await self._webapi.get_user_info(uid)
return UserInfo.from_web_api_data(user_info_data)
async def get_server_timestamp(self) -> int:
# the timestamp on the server at the moment in seconds
return await self._api.get_timestamp()
return await self._webapi.get_timestamp()
async def get_live_stream_urls(
self,
qn: QualityNumber = 10000,
format: StreamFormat = 'flv',
stream_format: StreamFormat = 'flv',
stream_codec: StreamCodec = 'avc',
) -> List[str]:
try:
data = await self._api.get_room_play_info(self._room_id, qn)
info = await self._appapi.get_room_play_info(self._room_id, qn)
except Exception:
# fallback to the html page global info
data = await self._get_room_play_info_via_html_page()
self._check_room_play_info(data)
stream = data['playurl_info']['playurl']['stream']
if stream[0]['format'][0]['codec'][0]['current_qn'] != qn:
raise
else:
self._check_room_play_info(data)
info = await self._webapi.get_room_play_info(self._room_id, qn)
self._check_room_play_info(info)
streams = jsonpath(info, '$.playurl_info.playurl.stream[*]')
if not streams:
raise NoStreamAvailable(qn, stream_format, stream_codec)
formats = jsonpath(streams, f'$[*].format[?(@.format_name == "{stream_format}")]') # noqa
if not formats:
raise NoStreamFormatAvailable(qn, stream_format, stream_codec)
codecs = jsonpath(formats, f'$[*].codec[?(@.codec_name == "{stream_codec}")]') # noqa
if not codecs:
raise NoStreamCodecAvailable(qn, stream_format, stream_codec)
codec = codecs[0]
streams = list(filter(
lambda s: s['format'][0]['format_name'] == format,
data['playurl_info']['playurl']['stream']
))
codec = streams[0]['format'][0]['codec'][0]
accept_qn = cast(List[QualityNumber], codec['accept_qn'])
if qn not in accept_qn:
return []
assert codec['current_qn'] == qn
if qn not in accept_qn or codec['current_qn'] != qn:
raise NoStreamQualityAvailable(qn, stream_format, stream_codec)
return [
i['host'] + codec['base_url'] + i['extra']
@ -199,16 +208,12 @@ class Live:
]
def _check_room_play_info(self, data: ResponseData) -> None:
if data['is_hidden']:
if data.get('is_hidden'):
raise LiveRoomHidden()
if data['is_locked']:
if data.get('is_locked'):
raise LiveRoomLocked()
if data['encrypted'] and not data['pwd_verified']:
if data.get('encrypted') and not data.get('pwd_verified'):
raise LiveRoomEncrypted()
try:
data['playurl_info']['playurl']['stream'][0]
except Exception:
raise NoStreamUrlAvailable()
async def _get_live_status_via_api(self) -> int:
room_info_data = await self._get_room_info_via_api()
@ -216,10 +221,14 @@ class Live:
async def _get_room_info_via_api(self) -> ResponseData:
try:
room_info_data = await self._api.get_info(self._room_id)
except Exception:
info_data = await self._api.get_info_by_room(self._room_id)
info_data = await self._appapi.get_info_by_room(self._room_id)
room_info_data = info_data['room_info']
except Exception:
try:
info_data = await self._webapi.get_info_by_room(self._room_id)
room_info_data = info_data['room_info']
except Exception:
room_info_data = await self._webapi.get_info(self._room_id)
return room_info_data

View File

@ -86,7 +86,7 @@ class UserInfo:
sign: str
@staticmethod
def from_data(data: ResponseData) -> 'UserInfo':
def from_web_api_data(data: ResponseData) -> 'UserInfo':
return UserInfo(
name=data['name'],
gender=data['sex'],
@ -95,3 +95,15 @@ class UserInfo:
level=data['level'],
sign=data['sign'],
)
@staticmethod
def from_app_api_data(data: ResponseData) -> 'UserInfo':
card = data['card']
return UserInfo(
name=card['name'],
gender=card['sex'],
face=ensure_scheme(card['face'], 'https'),
uid=card['mid'],
level=card['level_info']['current_level'],
sign=card['sign'],
)

View File

@ -19,5 +19,10 @@ StreamFormat = Literal[
'fmp4',
]
StreamCodec = Literal[
'avc',
'hevc',
]
JsonResponse = Dict[str, Any]
ResponseData = Dict[str, Any]

View File

@ -2,17 +2,16 @@ import io
import os
import re
import time
import errno
import asyncio
import logging
from threading import Thread
from abc import ABC, abstractmethod
from threading import Thread, Event
from datetime import datetime, timezone, timedelta
from collections import OrderedDict
from typing import Any, BinaryIO, Dict, Iterator, Optional, Tuple
import aiohttp
import requests
import urllib3
from tqdm import tqdm
from rx.subject import Subject
@ -25,30 +24,29 @@ from tenacity import (
wait_exponential,
stop_after_delay,
stop_after_attempt,
retry_if_result,
retry_if_exception_type,
retry_if_not_exception_type,
Retrying,
TryAgain,
)
from .. import __version__, __prog__, __github__
from .retry import wait_exponential_for_same_exceptions, before_sleep_log
from .stream_remuxer import StreamRemuxer
from .stream_analyzer import StreamProfile
from .statistics import StatisticsCalculator
from ..event.event_emitter import EventListener, EventEmitter
from ..bili.live import Live
from ..bili.typing import QualityNumber
from ..bili.typing import StreamFormat, QualityNumber
from ..bili.helpers import get_quality_name
from ..flv.data_analyser import MetaData
from ..flv.stream_processor import StreamProcessor, BaseOutputFileManager
from ..utils.mixins import AsyncCooperationMix, AsyncStoppableMixin
from ..utils.mixins import AsyncCooperationMixin, AsyncStoppableMixin
from ..path import escape_path
from ..flv.exceptions import FlvDataError, FlvStreamCorruptedError
from ..logging.room_id import aio_task_with_room_id
from ..bili.exceptions import (
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamUrlAvailable
NoStreamFormatAvailable, NoStreamCodecAvailable, NoStreamQualityAvailable,
)
__all__ = 'StreamRecorderEventListener', 'StreamRecorder'
__all__ = 'BaseStreamRecorder', 'StreamRecorderEventListener', 'StreamProxy'
logger = logging.getLogger(__name__)
@ -68,10 +66,11 @@ class StreamRecorderEventListener(EventListener):
...
class StreamRecorder(
class BaseStreamRecorder(
EventEmitter[StreamRecorderEventListener],
AsyncCooperationMix,
AsyncCooperationMixin,
AsyncStoppableMixin,
ABC,
):
def __init__(
self,
@ -79,6 +78,7 @@ class StreamRecorder(
out_dir: str,
path_template: str,
*,
stream_format: StreamFormat = 'flv',
quality_number: QualityNumber = 10000,
buffer_size: Optional[int] = None,
read_timeout: Optional[int] = None,
@ -90,14 +90,19 @@ class StreamRecorder(
self._live = live
self._progress_bar: Optional[tqdm] = None
self._stream_remuxer: Optional[StreamRemuxer] = None
self._stream_processor: Optional[StreamProcessor] = None
self._calculator = StatisticsCalculator()
self._dl_calculator = StatisticsCalculator()
self._rec_calculator = StatisticsCalculator()
self._file_manager = OutputFileManager(
live, out_dir, path_template, buffer_size
)
self._stream_format = stream_format
self._quality_number = quality_number
self._real_stream_format: Optional[StreamFormat] = None
self._real_quality_number: Optional[QualityNumber] = None
self._use_candidate_stream: bool = False
self.buffer_size = buffer_size or io.DEFAULT_BUFFER_SIZE # bytes
self.read_timeout = read_timeout or 3 # seconds
self.disconnection_timeout = disconnection_timeout or 600 # seconds
@ -105,6 +110,12 @@ class StreamRecorder(
self._filesize_limit = filesize_limit or 0
self._duration_limit = duration_limit or 0
self._stream_url: str = ''
self._stream_host: str = ''
self._stream_profile: StreamProfile = {}
self._connection_recovered = Event()
def on_file_created(args: Tuple[str, int]) -> None:
logger.info(f"Video file created: '{args[0]}'")
self._emit_event('video_file_created', *args)
@ -117,16 +128,32 @@ class StreamRecorder(
self._file_manager.file_closes.subscribe(on_file_closed)
@property
def data_count(self) -> int:
return self._calculator.count
def stream_url(self) -> str:
return self._stream_url
@property
def data_rate(self) -> float:
return self._calculator.rate
def stream_host(self) -> str:
return self._stream_host
@property
def elapsed(self) -> float:
return self._calculator.elapsed
def dl_total(self) -> int:
return self._dl_calculator.count
@property
def dl_rate(self) -> float:
return self._dl_calculator.rate
@property
def rec_elapsed(self) -> float:
return self._rec_calculator.elapsed
@property
def rec_total(self) -> int:
return self._rec_calculator.count
@property
def rec_rate(self) -> float:
return self._rec_calculator.rate
@property
def out_dir(self) -> str:
@ -144,6 +171,15 @@ class StreamRecorder(
def path_template(self, value: str) -> None:
self._file_manager.path_template = value
@property
def stream_format(self) -> StreamFormat:
return self._stream_format
@stream_format.setter
def stream_format(self, value: StreamFormat) -> None:
self._stream_format = value
self._real_stream_format = None
@property
def quality_number(self) -> QualityNumber:
return self._quality_number
@ -153,9 +189,13 @@ class StreamRecorder(
self._quality_number = value
self._real_quality_number = None
@property
def real_stream_format(self) -> StreamFormat:
return self._real_stream_format or self.stream_format
@property
def real_quality_number(self) -> QualityNumber:
return self._real_quality_number or 10000
return self._real_quality_number or self.quality_number
@property
def filesize_limit(self) -> int:
@ -194,6 +234,10 @@ class StreamRecorder(
else:
return None
@property
def stream_profile(self) -> StreamProfile:
return self._stream_profile
def has_file(self) -> bool:
return self._file_manager.has_file()
@ -218,6 +262,14 @@ class StreamRecorder(
self._progress_bar.set_postfix_str(self._make_pbar_postfix())
async def _do_start(self) -> None:
logger.debug('Starting stream recorder...')
self._dl_calculator.reset()
self._rec_calculator.reset()
self._stream_url = ''
self._stream_host = ''
self._stream_profile = {}
self._use_candidate_stream = False
self._connection_recovered.clear()
self._thread = Thread(
target=self._run, name=f'StreamRecorder::{self._live.room_id}'
)
@ -231,147 +283,9 @@ class StreamRecorder(
await self._loop.run_in_executor(None, self._thread.join)
logger.debug('Stopped stream recorder')
@abstractmethod
def _run(self) -> None:
self._calculator.reset()
self._use_candidate_stream: bool = False
try:
with tqdm(
desc='Recording',
unit='B',
unit_scale=True,
unit_divisor=1024,
postfix=self._make_pbar_postfix(),
) as progress_bar:
self._progress_bar = progress_bar
def update_size(size: int) -> None:
progress_bar.update(size)
self._calculator.submit(size)
self._stream_processor = StreamProcessor(
self._file_manager,
filesize_limit=self._filesize_limit,
duration_limit=self._duration_limit,
metadata=self._make_metadata(),
analyse_data=True,
dedup_join=True,
save_extra_metadata=True,
)
self._stream_processor.size_updates.subscribe(update_size)
with requests.Session() as self._session:
self._main_loop()
except TryAgain:
pass
except Exception as e:
self._handle_exception(e)
finally:
if self._stream_processor is not None:
self._stream_processor.finalize()
self._stream_processor = None
self._progress_bar = None
self._calculator.freeze()
self._emit_event('stream_recording_stopped')
def _main_loop(self) -> None:
for attempt in Retrying(
reraise=True,
retry=(
retry_if_result(lambda r: not self._stopped) |
retry_if_not_exception_type((OSError, NotImplementedError))
),
wait=wait_exponential_for_same_exceptions(max=60),
before_sleep=before_sleep_log(logger, logging.DEBUG, 'main_loop'),
):
with attempt:
try:
self._streaming_loop()
except NoStreamUrlAvailable:
logger.debug('No stream url available')
if not self._stopped:
raise TryAgain
except OSError as e:
if e.errno == errno.ENOSPC:
# OSError(28, 'No space left on device')
raise
logger.critical(repr(e), exc_info=e)
raise TryAgain
except LiveRoomHidden:
logger.error('The live room has been hidden!')
self._stopped = True
except LiveRoomLocked:
logger.error('The live room has been locked!')
self._stopped = True
except LiveRoomEncrypted:
logger.error('The live room has been encrypted!')
self._stopped = True
except Exception as e:
logger.exception(e)
self._handle_exception(e)
raise
def _streaming_loop(self) -> None:
url = self._get_live_stream_url()
while not self._stopped:
try:
self._streaming(url)
except requests.exceptions.HTTPError as e:
# frequently occurred when the live just started or ended.
logger.debug(repr(e))
self._defer_retry(1, 'streaming_loop')
# the url may has been forbidden or expired
# when the status code is 404 or 403
if e.response.status_code in (403, 404):
url = self._get_live_stream_url()
except requests.exceptions.Timeout as e:
logger.warning(repr(e))
except urllib3.exceptions.TimeoutError as e:
logger.warning(repr(e))
except urllib3.exceptions.ProtocolError as e:
# ProtocolError('Connection broken: IncompleteRead(
logger.warning(repr(e))
except requests.exceptions.ConnectionError as e:
logger.warning(repr(e))
logger.info(
f'Waiting {self.disconnection_timeout} seconds '
'for connection recovery... '
)
try:
self._wait_connection_recovered(self.disconnection_timeout)
except TimeoutError as e:
logger.error(repr(e))
self._stopped = True
else:
logger.debug('Connection recovered')
except FlvDataError as e:
logger.warning(repr(e))
self._use_candidate_stream = not self._use_candidate_stream
url = self._get_live_stream_url()
except FlvStreamCorruptedError as e:
logger.warning(repr(e))
url = self._get_live_stream_url()
def _streaming(self, url: str) -> None:
logger.debug('Getting the live stream...')
with self._session.get(
url,
headers=self._live.headers,
stream=True,
timeout=self.read_timeout,
) as response:
logger.debug('Response received')
response.raise_for_status()
if self._stopped:
return
assert self._stream_processor is not None
self._stream_processor.process_stream(
io.BufferedReader(
ResponseProxy(response.raw), buffer_size=8192
)
)
raise NotImplementedError()
@retry(
reraise=True,
@ -383,23 +297,47 @@ class StreamRecorder(
)
def _get_live_stream_url(self) -> str:
qn = self._real_quality_number or self.quality_number
logger.debug(
'Getting the live stream url... '
f'qn: {qn}, use_candidate_stream: {self._use_candidate_stream}'
fmt = self._real_stream_format or self.stream_format
logger.info(
f'Getting the live stream url... qn: {qn}, format: {fmt}, '
f'use_candidate_stream: {self._use_candidate_stream}'
)
urls = self._run_coroutine(self._live.get_live_stream_urls(qn, 'flv'))
if self._real_quality_number is None:
if not urls:
try:
urls = self._run_coroutine(
self._live.get_live_stream_urls(qn, fmt)
)
except NoStreamQualityAvailable:
logger.info(
f'The specified stream quality ({qn}) is not available, '
'will using the original stream quality (10000) instead.'
)
self._real_quality_number = 10000
raise TryAgain
except NoStreamFormatAvailable:
if fmt == 'fmp4':
logger.info(
f'The specified video quality ({qn}) is not available, '
'using the original video quality (10000) instead.'
'The specified stream format (fmp4) is not available, '
'falling back to stream format (ts).'
)
self._real_quality_number = 10000
raise TryAgain
self._real_stream_format = 'ts'
elif fmt == 'ts':
logger.info(
'The specified stream format (ts) is not available, '
'falling back to stream format (flv).'
)
self._real_stream_format = 'flv'
else:
logger.info(f'The specified video quality ({qn}) is available')
self._real_quality_number = self.quality_number
raise NotImplementedError(fmt)
raise TryAgain
except NoStreamCodecAvailable as e:
logger.warning(repr(e))
raise TryAgain
else:
logger.info(
f'Adopted the stream format ({fmt}) and quality ({qn})'
)
self._real_quality_number = qn
self._real_stream_format = fmt
if not self._use_candidate_stream:
url = urls[0]
@ -407,12 +345,12 @@ class StreamRecorder(
try:
url = urls[1]
except IndexError:
logger.debug(
'no candidate stream url available, '
'using the primary stream url instead.'
logger.info(
'No candidate stream url available, '
'will using the primary stream url instead.'
)
url = urls[0]
logger.debug(f"Got live stream url: '{url}'")
logger.info(f"Got live stream url: '{url}'")
return url
@ -422,16 +360,28 @@ class StreamRecorder(
logger.debug(f'Retry {name} after {seconds} seconds')
time.sleep(seconds)
def _wait_connection_recovered(
self, timeout: Optional[int] = None, check_interval: int = 3
) -> None:
def _wait_for_connection_error(self) -> None:
Thread(
target=self._conectivity_checker,
name=f'ConectivityChecker::{self._live.room_id}',
daemon=True,
).start()
self._connection_recovered.wait()
self._connection_recovered.clear()
def _conectivity_checker(self, check_interval: int = 3) -> None:
timeout = self.disconnection_timeout
logger.info(f'Waiting {timeout} seconds for connection recovery... ')
timebase = time.monotonic()
while not self._run_coroutine(self._live.check_connectivity()):
if timeout is not None and time.monotonic() - timebase > timeout:
raise TimeoutError(
f'Connection not recovered in {timeout} seconds'
)
logger.error(f'Connection not recovered in {timeout} seconds')
self._stopped = True
self._connection_recovered.set()
time.sleep(check_interval)
else:
logger.info('Connection recovered')
self._connection_recovered.set()
def _make_pbar_postfix(self) -> str:
return '{room_id} - {user_name}: {room_title}'.format(
@ -445,6 +395,13 @@ class StreamRecorder(
self._live.room_info.live_start_time, timezone(timedelta(hours=8))
)
assert self._real_quality_number is not None
stream_quality = '{} ({}{})'.format(
get_quality_name(self._real_quality_number),
self._real_quality_number,
', bluray' if '_bluray' in self._stream_url else '',
)
return {
'Title': self._live.room_info.title,
'Artist': self._live.user_info.name,
@ -456,6 +413,9 @@ B站直播录像
分区{self._live.room_info.parent_area_name} - {self._live.room_info.area_name}
房间号{self._live.room_info.room_id}
开播时间{live_start_time}
流主机: {self._stream_host}
流格式{self._real_stream_format}
流画质{stream_quality}
录制程序{__prog__} v{__version__} {__github__}''',
'description': OrderedDict({
'UserId': str(self._live.user_info.uid),
@ -465,17 +425,30 @@ B站直播录像
'Area': self._live.room_info.area_name,
'ParentArea': self._live.room_info.parent_area_name,
'LiveStartTime': str(live_start_time),
'StreamHost': self._stream_host,
'StreamFormat': self._real_stream_format,
'StreamQuality': stream_quality,
'Recorder': f'{__prog__} v{__version__} {__github__}',
})
}
def _emit_event(self, name: str, *args: Any, **kwds: Any) -> None:
self._run_coroutine(super()._emit(name, *args, **kwds))
self._run_coroutine(self._emit(name, *args, **kwds))
@aio_task_with_room_id
async def _emit(self, *args: Any, **kwds: Any) -> None: # type: ignore
await super()._emit(*args, **kwds)
class ResponseProxy(io.RawIOBase):
def __init__(self, response: urllib3.HTTPResponse) -> None:
self._response = response
class StreamProxy(io.RawIOBase):
def __init__(self, stream: io.BufferedIOBase) -> None:
self._stream = stream
self._offset = 0
self._size_updates = Subject()
@property
def size_updates(self) -> Observable:
return self._size_updates
@property
def closed(self) -> bool:
@ -485,23 +458,32 @@ class ResponseProxy(io.RawIOBase):
# ref: `https://gihub.com/python/cpython/blob/63298930fb531ba2bb4f23bc3b915dbf1e17e9e1/Modules/_io/bufferedio.c#L882` # noqa
return False
def fileno(self) -> int:
return self._stream.fileno()
def readable(self) -> bool:
return True
def read(self, size: int = -1) -> bytes:
return self._response.read(size)
data = self._stream.read(size)
self._offset += len(data)
self._size_updates.on_next(len(data))
return data
def tell(self) -> int:
return self._response.tell()
return self._offset
def readinto(self, b: Any) -> int:
return self._response.readinto(b)
n = self._stream.readinto(b)
self._offset += n
self._size_updates.on_next(n)
return n
def close(self) -> None:
self._response.close()
self._stream.close()
class OutputFileManager(BaseOutputFileManager, AsyncCooperationMix):
class OutputFileManager(BaseOutputFileManager, AsyncCooperationMixin):
def __init__(
self,
live: Live,

View File

@ -12,7 +12,9 @@ from tenacity import (
from .. import __version__, __prog__, __github__
from .danmaku_receiver import DanmakuReceiver, DanmuMsg
from .stream_recorder import StreamRecorder, StreamRecorderEventListener
from .base_stream_recorder import (
BaseStreamRecorder, StreamRecorderEventListener
)
from .statistics import StatisticsCalculator
from ..bili.live import Live
from ..exception import exception_callback, submit_exception
@ -49,7 +51,7 @@ class DanmakuDumper(
def __init__(
self,
live: Live,
stream_recorder: StreamRecorder,
stream_recorder: BaseStreamRecorder,
danmaku_receiver: DanmakuReceiver,
*,
danmu_uname: bool = False,
@ -75,7 +77,7 @@ class DanmakuDumper(
self._calculator = StatisticsCalculator(interval=60)
@property
def danmu_count(self) -> int:
def danmu_total(self) -> int:
return self._calculator.count
@property
@ -90,6 +92,14 @@ class DanmakuDumper(
def dumping_path(self) -> Optional[str]:
return self._path
def change_stream_recorder(
self, stream_recorder: BaseStreamRecorder
) -> None:
self._stream_recorder.remove_listener(self)
self._stream_recorder = stream_recorder
self._stream_recorder.add_listener(self)
logger.debug('Changed stream recorder')
def _do_enable(self) -> None:
self._stream_recorder.add_listener(self)
logger.debug('Enabled danmaku dumper')

View File

@ -0,0 +1,214 @@
import io
import errno
import logging
from urllib.parse import urlparse
from typing import Optional
import urllib3
import requests
from tqdm import tqdm
from tenacity import (
retry_if_result,
retry_if_not_exception_type,
Retrying,
TryAgain,
)
from .stream_analyzer import StreamProfile
from .base_stream_recorder import BaseStreamRecorder, StreamProxy
from .retry import wait_exponential_for_same_exceptions, before_sleep_log
from ..bili.live import Live
from ..bili.typing import StreamFormat, QualityNumber
from ..flv.stream_processor import StreamProcessor
from ..utils.mixins import AsyncCooperationMixin, AsyncStoppableMixin
from ..flv.exceptions import FlvDataError, FlvStreamCorruptedError
from ..bili.exceptions import (
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamAvailable,
)
__all__ = 'FLVStreamRecorder',
logger = logging.getLogger(__name__)
class FLVStreamRecorder(
BaseStreamRecorder,
AsyncCooperationMixin,
AsyncStoppableMixin,
):
def __init__(
self,
live: Live,
out_dir: str,
path_template: str,
*,
stream_format: StreamFormat = 'flv',
quality_number: QualityNumber = 10000,
buffer_size: Optional[int] = None,
read_timeout: Optional[int] = None,
disconnection_timeout: Optional[int] = None,
filesize_limit: int = 0,
duration_limit: int = 0,
) -> None:
super().__init__(
live=live,
out_dir=out_dir,
path_template=path_template,
stream_format=stream_format,
quality_number=quality_number,
buffer_size=buffer_size,
read_timeout=read_timeout,
disconnection_timeout=disconnection_timeout,
filesize_limit=filesize_limit,
duration_limit=duration_limit,
)
def _run(self) -> None:
logger.debug('Stream recorder thread started')
try:
with tqdm(
desc='Recording',
unit='B',
unit_scale=True,
postfix=self._make_pbar_postfix(),
) as progress_bar:
self._progress_bar = progress_bar
self._stream_processor = StreamProcessor(
self._file_manager,
filesize_limit=self._filesize_limit,
duration_limit=self._duration_limit,
analyse_data=True,
dedup_join=True,
save_extra_metadata=True,
)
def update_size(size: int) -> None:
progress_bar.update(size)
self._rec_calculator.submit(size)
def update_stream_profile(profile: StreamProfile) -> None:
self._stream_profile = profile
self._stream_processor.size_updates.subscribe(update_size)
self._stream_processor.stream_profile_updates.subscribe(
update_stream_profile
)
with requests.Session() as self._session:
self._main_loop()
except TryAgain:
pass
except Exception as e:
self._handle_exception(e)
finally:
if self._stream_processor is not None:
self._stream_processor.finalize()
self._stream_processor = None
self._progress_bar = None
self._dl_calculator.freeze()
self._rec_calculator.freeze()
self._emit_event('stream_recording_stopped')
logger.debug('Stream recorder thread stopped')
def _main_loop(self) -> None:
for attempt in Retrying(
reraise=True,
retry=(
retry_if_result(lambda r: not self._stopped) |
retry_if_not_exception_type((OSError, NotImplementedError))
),
wait=wait_exponential_for_same_exceptions(max=60),
before_sleep=before_sleep_log(logger, logging.DEBUG, 'main_loop'),
):
with attempt:
try:
self._streaming_loop()
except NoStreamAvailable as e:
logger.warning(f'No stream available: {repr(e)}')
if not self._stopped:
raise TryAgain
except OSError as e:
logger.critical(repr(e), exc_info=e)
if e.errno == errno.ENOSPC:
# OSError(28, 'No space left on device')
self._handle_exception(e)
self._stopped = True
raise TryAgain
except LiveRoomHidden:
logger.error('The live room has been hidden!')
self._stopped = True
except LiveRoomLocked:
logger.error('The live room has been locked!')
self._stopped = True
except LiveRoomEncrypted:
logger.error('The live room has been encrypted!')
self._stopped = True
except Exception as e:
logger.exception(e)
self._handle_exception(e)
self._stopped = True
def _streaming_loop(self) -> None:
url = self._get_live_stream_url()
while not self._stopped:
try:
self._streaming(url)
except requests.exceptions.HTTPError as e:
# frequently occurred when the live just started or ended.
logger.warning(repr(e))
self._defer_retry(1, 'streaming_loop')
# the url may has been forbidden or expired
# when the status code is 404 or 403
if e.response.status_code in (403, 404):
url = self._get_live_stream_url()
except requests.exceptions.Timeout as e:
logger.warning(repr(e))
except urllib3.exceptions.TimeoutError as e:
logger.warning(repr(e))
except urllib3.exceptions.ProtocolError as e:
# ProtocolError('Connection broken: IncompleteRead(
logger.warning(repr(e))
except requests.exceptions.ConnectionError as e:
logger.warning(repr(e))
self._wait_for_connection_error()
except FlvDataError as e:
logger.warning(repr(e))
self._use_candidate_stream = not self._use_candidate_stream
url = self._get_live_stream_url()
except FlvStreamCorruptedError as e:
logger.warning(repr(e))
url = self._get_live_stream_url()
def _streaming(self, url: str) -> None:
logger.debug(f'Requesting live stream... {url}')
self._stream_url = url
self._stream_host = urlparse(url).hostname or ''
with self._session.get(
url,
stream=True,
headers=self._live.headers,
timeout=self.read_timeout,
) as response:
logger.debug('Response received')
response.raise_for_status()
if self._stopped:
return
assert self._stream_processor is not None
self._stream_processor.set_metadata(self._make_metadata())
stream_proxy = StreamProxy(response.raw)
stream_proxy.size_updates.subscribe(
lambda n: self._dl_calculator.submit(n)
)
self._stream_processor.process_stream(
io.BufferedReader(stream_proxy, buffer_size=8192)
)

View File

@ -0,0 +1,442 @@
import io
import time
import errno
import logging
from queue import Queue, Empty
from threading import Thread, Event, Lock
from datetime import datetime
from contextlib import suppress
from urllib.parse import urlparse
from typing import Set, Optional
import urllib3
import requests
import m3u8
from m3u8.model import Segment
from tqdm import tqdm
from tenacity import (
retry,
wait_exponential,
stop_after_delay,
retry_if_result,
retry_if_exception_type,
retry_if_not_exception_type,
Retrying,
TryAgain,
RetryError,
)
from .stream_remuxer import StreamRemuxer
from .stream_analyzer import ffprobe, StreamProfile
from .base_stream_recorder import BaseStreamRecorder, StreamProxy
from .retry import wait_exponential_for_same_exceptions, before_sleep_log
from ..bili.live import Live
from ..bili.typing import StreamFormat, QualityNumber
from ..flv.stream_processor import StreamProcessor
from ..utils.mixins import (
AsyncCooperationMixin, AsyncStoppableMixin, SupportDebugMixin
)
from ..bili.exceptions import (
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamAvailable,
)
__all__ = 'HLSStreamRecorder',
logger = logging.getLogger(__name__)
class HLSStreamRecorder(
BaseStreamRecorder,
AsyncCooperationMixin,
AsyncStoppableMixin,
SupportDebugMixin,
):
def __init__(
self,
live: Live,
out_dir: str,
path_template: str,
*,
stream_format: StreamFormat = 'flv',
quality_number: QualityNumber = 10000,
buffer_size: Optional[int] = None,
read_timeout: Optional[int] = None,
disconnection_timeout: Optional[int] = None,
filesize_limit: int = 0,
duration_limit: int = 0,
) -> None:
super().__init__(
live=live,
out_dir=out_dir,
path_template=path_template,
stream_format=stream_format,
quality_number=quality_number,
buffer_size=buffer_size,
read_timeout=read_timeout,
disconnection_timeout=disconnection_timeout,
filesize_limit=filesize_limit,
duration_limit=duration_limit,
)
self._init_for_debug(self._live.room_id)
self._stream_analysed_lock = Lock()
self._last_segment_uris: Set[str] = set()
def _run(self) -> None:
logger.debug('Stream recorder thread started')
try:
if self._debug:
path = '{}/playlist-{}-{}.m3u8'.format(
self._debug_dir,
self._live.room_id,
datetime.now().strftime('%Y-%m-%d-%H%M%S-%f'),
)
self._playlist_debug_file = open(path, 'wt', encoding='utf-8')
with StreamRemuxer(self._live.room_id) as self._stream_remuxer:
with requests.Session() as self._session:
self._session.headers.update(self._live.headers)
self._segment_queue: Queue[Segment] = Queue(maxsize=1000)
self._segment_data_queue: Queue[bytes] = Queue(maxsize=100)
self._stream_host_available = Event()
self._segment_fetcher_thread = Thread(
target=self._run_segment_fetcher,
name=f'SegmentFetcher::{self._live.room_id}',
daemon=True,
)
self._segment_fetcher_thread.start()
self._segment_data_feeder_thread = Thread(
target=self._run_segment_data_feeder,
name=f'SegmentDataFeeder::{self._live.room_id}',
daemon=True,
)
self._segment_data_feeder_thread.start()
self._stream_processor_thread = Thread(
target=self._run_stream_processor,
name=f'StreamProcessor::{self._live.room_id}',
daemon=True,
)
self._stream_processor_thread.start()
try:
self._main_loop()
finally:
if self._stream_processor is not None:
self._stream_processor.cancel()
self._segment_fetcher_thread.join(timeout=10)
self._segment_data_feeder_thread.join(timeout=10)
self._last_segment_uris.clear()
del self._segment_queue
del self._segment_data_queue
except TryAgain:
pass
except Exception as e:
self._handle_exception(e)
finally:
with suppress(Exception):
self._stream_processor_thread.join(timeout=10)
with suppress(Exception):
self._playlist_debug_file.close()
self._emit_event('stream_recording_stopped')
logger.debug('Stream recorder thread stopped')
def _main_loop(self) -> None:
for attempt in Retrying(
reraise=True,
retry=(
retry_if_result(lambda r: not self._stopped) |
retry_if_not_exception_type((OSError, NotImplementedError))
),
wait=wait_exponential_for_same_exceptions(max=60),
before_sleep=before_sleep_log(logger, logging.DEBUG, 'main_loop'),
):
with attempt:
try:
self._streaming_loop()
except NoStreamAvailable as e:
logger.warning(f'No stream available: {repr(e)}')
if not self._stopped:
raise TryAgain
except OSError as e:
logger.critical(repr(e), exc_info=e)
if e.errno == errno.ENOSPC:
# OSError(28, 'No space left on device')
self._handle_exception(e)
self._stopped = True
raise TryAgain
except LiveRoomHidden:
logger.error('The live room has been hidden!')
self._stopped = True
except LiveRoomLocked:
logger.error('The live room has been locked!')
self._stopped = True
except LiveRoomEncrypted:
logger.error('The live room has been encrypted!')
self._stopped = True
except Exception as e:
logger.exception(e)
self._handle_exception(e)
self._stopped = True
def _streaming_loop(self) -> None:
url = self._get_live_stream_url()
while not self._stopped:
try:
self._playlist_fetcher(url)
except requests.exceptions.HTTPError as e:
# frequently occurred when the live just started or ended.
logger.warning(repr(e))
self._defer_retry(1, 'streaming_loop')
# the url may has been forbidden or expired
# when the status code is 404 or 403
if e.response.status_code in (403, 404):
url = self._get_live_stream_url()
except requests.exceptions.ConnectionError as e:
logger.warning(repr(e))
self._wait_for_connection_error()
except RetryError as e:
logger.warning(repr(e))
def _playlist_fetcher(self, url: str) -> None:
self._stream_url = url
self._stream_host = urlparse(url).hostname or ''
self._stream_host_available.set()
with self._stream_analysed_lock:
self._stream_analysed = False
while not self._stopped:
content = self._fetch_playlist(url)
playlist = m3u8.loads(content, uri=url)
if self._debug:
self._playlist_debug_file.write(content + '\n')
if playlist.is_variant:
url = sorted(
playlist.playlists,
key=lambda p: p.stream_info.bandwidth
)[-1].absolute_uri
logger.debug(f'playlist changed to variant playlist: {url}')
self._stream_url = url
self._stream_host = urlparse(url).hostname or ''
with self._stream_analysed_lock:
self._stream_analysed = False
continue
uris: Set[str] = set()
for seg in playlist.segments:
uris.add(seg.uri)
if seg.uri not in self._last_segment_uris:
self._segment_queue.put(seg, timeout=60)
if (
self._last_segment_uris and
not uris.intersection(self._last_segment_uris)
):
logger.debug(
'segments broken!\n'
f'last segments: {self._last_segment_uris}\n'
f'current segments: {uris}'
)
with self._stream_analysed_lock:
self._stream_analysed = False
self._last_segment_uris = uris
if playlist.is_endlist:
logger.debug('playlist ended')
self._stopped = True
break
time.sleep(1)
def _run_segment_fetcher(self) -> None:
logger.debug('Segment fetcher thread started')
try:
self._segment_fetcher()
except Exception as e:
logger.exception(e)
self._handle_exception(e)
finally:
self._dl_calculator.freeze()
logger.debug('Segment fetcher thread stopped')
def _segment_fetcher(self) -> None:
assert self._stream_remuxer is not None
init_section = None
self._init_section_data = None
while not self._stopped:
try:
seg = self._segment_queue.get(timeout=1)
except Empty:
continue
for attempt in Retrying(
reraise=True,
retry=(
retry_if_result(lambda r: not self._stopped) |
retry_if_not_exception_type((OSError, NotImplementedError))
),
):
if attempt.retry_state.attempt_number > 3:
break
with attempt:
try:
if (
getattr(seg, 'init_section', None) and
(
not init_section or
seg.init_section.uri != init_section.uri
)
):
data = self._fetch_segment(
seg.init_section.absolute_uri
)
init_section = seg.init_section
self._init_section_data = data
self._segment_data_queue.put(data, timeout=60)
data = self._fetch_segment(seg.absolute_uri)
self._segment_data_queue.put(data, timeout=60)
except requests.exceptions.HTTPError as e:
logger.warning(f'Failed to fetch segment: {repr(e)}')
if e.response.status_code in (403, 404, 599):
break
except requests.exceptions.ConnectionError as e:
logger.warning(repr(e))
self._connection_recovered.wait()
except RetryError as e:
logger.warning(repr(e))
break
else:
break
def _run_segment_data_feeder(self) -> None:
logger.debug('Segment data feeder thread started')
try:
self._segment_data_feeder()
except Exception as e:
logger.exception(e)
self._handle_exception(e)
finally:
logger.debug('Segment data feeder thread stopped')
def _segment_data_feeder(self) -> None:
assert self._stream_remuxer is not None
bytes_io = io.BytesIO()
segment_count = 0
def on_next(profile: StreamProfile) -> None:
self._stream_profile = profile
def on_error(e: Exception) -> None:
logger.warning(f'Failed to analyse stream: {repr(e)}')
while not self._stopped:
try:
data = self._segment_data_queue.get(timeout=1)
except Empty:
continue
else:
with self._stream_analysed_lock:
if not self._stream_analysed:
if self._init_section_data and not bytes_io.getvalue():
bytes_io.write(self._init_section_data)
else:
bytes_io.write(data)
segment_count += 1
if segment_count >= 3:
ffprobe(bytes_io.getvalue()).subscribe(
on_next, on_error
)
bytes_io = io.BytesIO()
segment_count = 0
self._stream_analysed = True
try:
self._stream_remuxer.input.write(data)
except BrokenPipeError:
return
def _run_stream_processor(self) -> None:
logger.debug('Stream processor thread started')
assert self._stream_remuxer is not None
with tqdm(
desc='Recording',
unit='B',
unit_scale=True,
postfix=self._make_pbar_postfix(),
) as progress_bar:
self._progress_bar = progress_bar
def update_size(size: int) -> None:
progress_bar.update(size)
self._rec_calculator.submit(size)
self._stream_processor = StreamProcessor(
self._file_manager,
filesize_limit=self._filesize_limit,
duration_limit=self._duration_limit,
analyse_data=True,
dedup_join=True,
save_extra_metadata=True,
)
self._stream_processor.size_updates.subscribe(update_size)
try:
self._stream_host_available.wait()
self._stream_processor.set_metadata(self._make_metadata())
self._stream_processor.process_stream(
StreamProxy(self._stream_remuxer.output), # type: ignore
)
except Exception as e:
if not self._stopped:
logger.exception(e)
self._handle_exception(e)
finally:
self._stream_processor.finalize()
self._progress_bar = None
self._rec_calculator.freeze()
logger.debug('Stream processor thread stopped')
@retry(
retry=retry_if_exception_type((
requests.exceptions.Timeout,
urllib3.exceptions.TimeoutError,
urllib3.exceptions.ProtocolError,
)),
wait=wait_exponential(multiplier=0.1, max=1),
stop=stop_after_delay(10),
)
def _fetch_playlist(self, url: str) -> str:
response = self._session.get(url, timeout=3)
response.raise_for_status()
response.encoding = 'utf-8'
return response.text
@retry(
retry=retry_if_exception_type((
requests.exceptions.Timeout,
urllib3.exceptions.TimeoutError,
urllib3.exceptions.ProtocolError,
)),
wait=wait_exponential(multiplier=0.1, max=5),
stop=stop_after_delay(60),
)
def _fetch_segment(self, url: str) -> bytes:
with self._session.get(url, stream=True, timeout=10) as response:
response.raise_for_status()
bytes_io = io.BytesIO()
for chunk in response:
bytes_io.write(chunk)
self._dl_calculator.submit(len(chunk))
return bytes_io.getvalue()

View File

@ -12,7 +12,10 @@ from tenacity import (
)
from .raw_danmaku_receiver import RawDanmakuReceiver
from .stream_recorder import StreamRecorder, StreamRecorderEventListener
from .base_stream_recorder import (
BaseStreamRecorder, StreamRecorderEventListener
)
from ..bili.live import Live
from ..exception import exception_callback, submit_exception
from ..event.event_emitter import EventListener, EventEmitter
from ..path import raw_danmaku_path
@ -41,13 +44,23 @@ class RawDanmakuDumper(
):
def __init__(
self,
stream_recorder: StreamRecorder,
live: Live,
stream_recorder: BaseStreamRecorder,
danmaku_receiver: RawDanmakuReceiver,
) -> None:
super().__init__()
self._live = live # @aio_task_with_room_id
self._stream_recorder = stream_recorder
self._receiver = danmaku_receiver
def change_stream_recorder(
self, stream_recorder: BaseStreamRecorder
) -> None:
self._stream_recorder.remove_listener(self)
self._stream_recorder = stream_recorder
self._stream_recorder.add_listener(self)
logger.debug('Changed stream recorder')
def _do_enable(self) -> None:
self._stream_recorder.add_listener(self)
logger.debug('Enabled raw danmaku dumper')

View File

@ -1,7 +1,8 @@
from __future__ import annotations
import asyncio
import logging
from datetime import datetime
from typing import Iterator, Optional
from typing import Iterator, Optional, Type
import aiohttp
import aiofiles
@ -12,14 +13,19 @@ from .danmaku_receiver import DanmakuReceiver
from .danmaku_dumper import DanmakuDumper, DanmakuDumperEventListener
from .raw_danmaku_receiver import RawDanmakuReceiver
from .raw_danmaku_dumper import RawDanmakuDumper, RawDanmakuDumperEventListener
from .stream_recorder import StreamRecorder, StreamRecorderEventListener
from .base_stream_recorder import (
BaseStreamRecorder, StreamRecorderEventListener
)
from .stream_analyzer import StreamProfile
from .flv_stream_recorder import FLVStreamRecorder
from .hls_stream_recorder import HLSStreamRecorder
from ..event.event_emitter import EventListener, EventEmitter
from ..flv.data_analyser import MetaData
from ..bili.live import Live
from ..bili.models import RoomInfo
from ..bili.danmaku_client import DanmakuClient
from ..bili.live_monitor import LiveMonitor, LiveEventListener
from ..bili.typing import QualityNumber
from ..bili.typing import StreamFormat, QualityNumber
from ..utils.mixins import AsyncStoppableMixin
from ..path import cover_path
from ..logging.room_id import aio_task_with_room_id
@ -88,9 +94,13 @@ class Recorder(
out_dir: str,
path_template: str,
*,
stream_format: StreamFormat = 'flv',
quality_number: QualityNumber = 10000,
buffer_size: Optional[int] = None,
read_timeout: Optional[int] = None,
disconnection_timeout: Optional[int] = None,
filesize_limit: int = 0,
duration_limit: int = 0,
danmu_uname: bool = False,
record_gift_send: bool = False,
record_free_gifts: bool = False,
@ -98,8 +108,6 @@ class Recorder(
record_super_chat: bool = False,
save_cover: bool = False,
save_raw_danmaku: bool = False,
filesize_limit: int = 0,
duration_limit: int = 0,
) -> None:
super().__init__()
@ -110,11 +118,19 @@ class Recorder(
self.save_raw_danmaku = save_raw_danmaku
self._recording: bool = False
self._stream_available: bool = False
self._stream_recorder = StreamRecorder(
cls: Type[BaseStreamRecorder]
if stream_format == 'flv':
cls = FLVStreamRecorder
else:
cls = HLSStreamRecorder
self._stream_recorder = cls(
self._live,
out_dir=out_dir,
path_template=path_template,
stream_format=stream_format,
quality_number=quality_number,
buffer_size=buffer_size,
read_timeout=read_timeout,
disconnection_timeout=disconnection_timeout,
@ -135,6 +151,7 @@ class Recorder(
)
self._raw_danmaku_receiver = RawDanmakuReceiver(danmaku_client)
self._raw_danmaku_dumper = RawDanmakuDumper(
self._live,
self._stream_recorder,
self._raw_danmaku_receiver,
)
@ -147,6 +164,14 @@ class Recorder(
def recording(self) -> bool:
return self._recording
@property
def stream_format(self) -> StreamFormat:
return self._stream_recorder.stream_format
@stream_format.setter
def stream_format(self, value: StreamFormat) -> None:
self._stream_recorder.stream_format = value
@property
def quality_number(self) -> QualityNumber:
return self._stream_recorder.quality_number
@ -155,6 +180,10 @@ class Recorder(
def quality_number(self, value: QualityNumber) -> None:
self._stream_recorder.quality_number = value
@property
def real_stream_format(self) -> StreamFormat:
return self._stream_recorder.real_stream_format
@property
def real_quality_number(self) -> QualityNumber:
return self._stream_recorder.real_quality_number
@ -224,20 +253,36 @@ class Recorder(
self._danmaku_dumper.record_super_chat = value
@property
def elapsed(self) -> float:
return self._stream_recorder.elapsed
def stream_url(self) -> str:
return self._stream_recorder.stream_url
@property
def data_count(self) -> int:
return self._stream_recorder.data_count
def stream_host(self) -> str:
return self._stream_recorder.stream_host
@property
def data_rate(self) -> float:
return self._stream_recorder.data_rate
def dl_total(self) -> int:
return self._stream_recorder.dl_total
@property
def danmu_count(self) -> int:
return self._danmaku_dumper.danmu_count
def dl_rate(self) -> float:
return self._stream_recorder.dl_rate
@property
def rec_elapsed(self) -> float:
return self._stream_recorder.rec_elapsed
@property
def rec_total(self) -> int:
return self._stream_recorder.rec_total
@property
def rec_rate(self) -> float:
return self._stream_recorder.rec_rate
@property
def danmu_total(self) -> int:
return self._danmaku_dumper.danmu_total
@property
def danmu_rate(self) -> float:
@ -283,26 +328,9 @@ class Recorder(
def metadata(self) -> Optional[MetaData]:
return self._stream_recorder.metadata
async def _do_start(self) -> None:
self._live_monitor.add_listener(self)
self._danmaku_dumper.add_listener(self)
self._raw_danmaku_dumper.add_listener(self)
self._stream_recorder.add_listener(self)
logger.debug('Started recorder')
self._print_live_info()
if self._live.is_living():
await self._start_recording(stream_available=True)
else:
self._print_waiting_message()
async def _do_stop(self) -> None:
await self._stop_recording()
self._live_monitor.remove_listener(self)
self._danmaku_dumper.remove_listener(self)
self._raw_danmaku_dumper.remove_listener(self)
self._stream_recorder.remove_listener(self)
logger.debug('Stopped recorder')
@property
def stream_profile(self) -> StreamProfile:
return self._stream_recorder.stream_profile
def get_recording_files(self) -> Iterator[str]:
if self._stream_recorder.recording_path is not None:
@ -329,17 +357,19 @@ class Recorder(
async def on_live_ended(self, live: Live) -> None:
logger.info('The live has ended')
self._stream_available = False
await self._stop_recording()
self._print_waiting_message()
async def on_live_stream_available(self, live: Live) -> None:
logger.debug('The live stream becomes available')
self._stream_available = True
await self._stream_recorder.start()
async def on_live_stream_reset(self, live: Live) -> None:
logger.warning('The live stream has been reset')
if not self._recording:
await self._start_recording(stream_available=True)
await self._start_recording()
async def on_room_changed(self, room_info: RoomInfo) -> None:
self._print_changed_room_info(room_info)
@ -371,9 +401,32 @@ class Recorder(
logger.debug('Stream recording stopped')
await self._stop_recording()
async def _start_recording(self, stream_available: bool = False) -> None:
async def _do_start(self) -> None:
self._live_monitor.add_listener(self)
self._danmaku_dumper.add_listener(self)
self._raw_danmaku_dumper.add_listener(self)
self._stream_recorder.add_listener(self)
logger.debug('Started recorder')
self._print_live_info()
if self._live.is_living():
self._stream_available = True
await self._start_recording()
else:
self._print_waiting_message()
async def _do_stop(self) -> None:
await self._stop_recording()
self._live_monitor.remove_listener(self)
self._danmaku_dumper.remove_listener(self)
self._raw_danmaku_dumper.remove_listener(self)
self._stream_recorder.remove_listener(self)
logger.debug('Stopped recorder')
async def _start_recording(self) -> None:
if self._recording:
return
self._change_stream_recorder()
self._recording = True
if self.save_raw_danmaku:
@ -383,8 +436,10 @@ class Recorder(
self._danmaku_receiver.start()
await self._prepare()
if stream_available:
if self._stream_available:
await self._stream_recorder.start()
else:
asyncio.create_task(self._guard())
logger.info('Started recording')
await self._emit('recording_started', self)
@ -414,23 +469,82 @@ class Recorder(
self._danmaku_dumper.clear_files()
self._stream_recorder.clear_files()
@retry(wait=wait_fixed(1), stop=stop_after_attempt(3))
@aio_task_with_room_id
async def _guard(self, timeout: float = 60) -> None:
await asyncio.sleep(timeout)
if not self._recording:
return
if self._stream_available:
return
logger.debug(
f'Stream not available in {timeout} seconds, the event maybe lost.'
)
await self._live.update_info()
if self._live.is_living():
logger.debug('The live is living now')
self._stream_available = True
if self._stream_recorder.stopped:
await self._stream_recorder.start()
else:
logger.debug('The live has ended before streaming')
self._stream_available = False
if not self._stream_recorder.stopped:
await self.stop()
def _change_stream_recorder(self) -> None:
if self._recording:
logger.debug('Can not change stream recorder while recording')
return
cls: Type[BaseStreamRecorder]
if self.stream_format == 'flv':
cls = FLVStreamRecorder
else:
cls = HLSStreamRecorder
if self._stream_recorder.__class__ == cls:
return
self._stream_recorder.remove_listener(self)
self._stream_recorder = cls(
self._live,
out_dir=self.out_dir,
path_template=self.path_template,
stream_format=self.stream_format,
quality_number=self.quality_number,
buffer_size=self.buffer_size,
read_timeout=self.read_timeout,
disconnection_timeout=self.disconnection_timeout,
filesize_limit=self.filesize_limit,
duration_limit=self.duration_limit,
)
self._stream_recorder.add_listener(self)
self._danmaku_dumper.change_stream_recorder(self._stream_recorder)
self._raw_danmaku_dumper.change_stream_recorder(self._stream_recorder)
logger.debug(f'Changed stream recorder to {cls.__name__}')
@aio_task_with_room_id
async def _save_cover_image(self, video_path: str) -> None:
await self._live.update_info()
try:
await self._live.update_info()
url = self._live.room_info.cover
ext = url.rsplit('.', 1)[-1]
path = cover_path(video_path, ext)
await self._save_file(url, path)
except Exception as e:
logger.error(f'Failed to save cover image: {repr(e)}')
else:
logger.info(f'Saved cover image: {path}')
@retry(reraise=True, wait=wait_fixed(1), stop=stop_after_attempt(3))
async def _save_file(self, url: str, path: str) -> None:
async with aiohttp.ClientSession(raise_for_status=True) as session:
try:
url = self._live.room_info.cover
async with session.get(url) as response:
ext = url.rsplit('.', 1)[-1]
path = cover_path(video_path, ext)
async with aiofiles.open(path, 'wb') as file:
await file.write(await response.read())
except Exception as e:
logger.error(f'Failed to save cover image: {repr(e)}')
raise
else:
logger.info(f'Saved cover image: {path}')
async with session.get(url) as response:
async with aiofiles.open(path, 'wb') as file:
await file.write(await response.read())
def _print_waiting_message(self) -> None:
logger.info('Waiting... until the live starts')

View File

@ -0,0 +1,54 @@
import json
import logging
from subprocess import Popen, PIPE
from typing import Dict, Any, Optional
from rx import create
from rx.core import Observable
from rx.core.typing import Observer, Scheduler, Disposable
from rx.scheduler.newthreadscheduler import NewThreadScheduler
logger = logging.getLogger(__name__)
__all__ = 'ffprobe', 'StreamProfile'
StreamProfile = Dict[str, Any]
def ffprobe(data: bytes) -> Observable:
def subscribe(
observer: Observer[StreamProfile],
scheduler: Optional[Scheduler] = None,
) -> Disposable:
_scheduler = scheduler or NewThreadScheduler()
def action(scheduler, state): # type: ignore
args = [
'ffprobe',
'-show_streams',
'-show_format',
'-print_format',
'json',
'pipe:0',
]
with Popen(
args, stdin=PIPE, stdout=PIPE, stderr=PIPE
) as process:
try:
stdout, stderr = process.communicate(data, timeout=10)
except Exception as e:
process.kill()
process.wait()
observer.on_error(e)
else:
profile = json.loads(stdout)
observer.on_next(profile)
observer.on_completed()
return _scheduler.schedule(action)
return create(subscribe)

View File

@ -0,0 +1,142 @@
import os
import io
import errno
import shlex
import logging
from threading import Thread, Event
from subprocess import Popen, PIPE, CalledProcessError
from typing import List, Optional, cast
from ..utils.mixins import StoppableMixin, SupportDebugMixin
logger = logging.getLogger(__name__)
__all__ = 'StreamRemuxer',
class StreamRemuxer(StoppableMixin, SupportDebugMixin):
def __init__(self, room_id: int, bufsize: int = 1024 * 1024) -> None:
super().__init__()
self._room_id = room_id
self._bufsize = bufsize
self._exception: Optional[Exception] = None
self._subprocess_setup = Event()
self._MAX_ERROR_MESSAGES = 10
self._error_messages: List[str] = []
self._env = None
self._init_for_debug(room_id)
if self._debug:
self._env = os.environ.copy()
path = os.path.join(self._debug_dir, f'ffreport-{room_id}-%t.log')
self._env['FFREPORT'] = f'file={path}:level=48'
@property
def input(self) -> io.BufferedWriter:
assert self._subprocess.stdin is not None
return cast(io.BufferedWriter, self._subprocess.stdin)
@property
def output(self) -> io.BufferedReader:
assert self._subprocess.stdout is not None
return cast(io.BufferedReader, self._subprocess.stdout)
@property
def exception(self) -> Optional[Exception]:
return self._exception
def __enter__(self): # type: ignore
self.start()
self.wait_for_subprocess()
return self
def __exit__(self, exc_type, value, traceback): # type: ignore
self.stop()
self.raise_for_exception()
def wait_for_subprocess(self) -> None:
self._subprocess_setup.wait()
def raise_for_exception(self) -> None:
if not self.exception:
return
raise self.exception
def _do_start(self) -> None:
logger.debug('Starting stream remuxer...')
self._thread = Thread(
target=self._run,
name=f'StreamRemuxer::{self._room_id}',
daemon=True,
)
self._thread.start()
def _do_stop(self) -> None:
logger.debug('Stopping stream remuxer...')
if hasattr(self, '_subprocess'):
self._subprocess.kill()
self._subprocess.wait(timeout=10)
if hasattr(self, '_thread'):
self._thread.join(timeout=10)
def _run(self) -> None:
logger.debug('Started stream remuxer')
self._exception = None
self._error_messages.clear()
self._subprocess_setup.clear()
try:
self._run_subprocess()
except BrokenPipeError:
pass
except Exception as e:
# OSError: [Errno 22] Invalid argument
# https://stackoverflow.com/questions/23688492/oserror-errno-22-invalid-argument-in-subprocess
if isinstance(e, OSError) and e.errno == errno.EINVAL:
pass
else:
self._exception = e
logger.exception(e)
finally:
logger.debug('Stopped stream remuxer')
def _run_subprocess(self) -> None:
cmd = 'ffmpeg -i pipe:0 -c copy -f flv pipe:1'
args = shlex.split(cmd)
with Popen(
args, stdin=PIPE, stdout=PIPE, stderr=PIPE,
bufsize=self._bufsize, env=self._env,
) as self._subprocess:
self._subprocess_setup.set()
assert self._subprocess.stderr is not None
while not self._stopped:
data = self._subprocess.stderr.readline()
if not data:
if self._subprocess.poll() is not None:
break
else:
continue
line = data.decode('utf-8', errors='backslashreplace')
if self._debug:
logger.debug('ffmpeg: %s', line)
self._check_error(line)
if not self._stopped and self._subprocess.returncode not in (0, 255):
# 255: Exiting normally, received signal 2.
raise CalledProcessError(
self._subprocess.returncode,
cmd=cmd,
output='\n'.join(self._error_messages),
)
def _check_error(self, line: str) -> None:
if 'error' not in line.lower() and 'failed' not in line.lower():
return
logger.warning(f'ffmpeg error: {line}')
self._error_messages.append(line)
if len(self._error_messages) > self._MAX_ERROR_MESSAGES:
self._error_messages.remove(self._error_messages[0])

View File

@ -84,6 +84,232 @@ The above copyright notice and this permission notice shall be included in all c
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
echarts
Apache-2.0
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
========================================================================
Apache ECharts Subcomponents:
The Apache ECharts project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for these
subcomponents is also subject to the terms and conditions of the following
licenses.
BSD 3-Clause (d3.js):
The following files embed [d3.js](https://github.com/d3/d3) BSD 3-Clause:
`/src/chart/treemap/treemapLayout.ts`,
`/src/chart/tree/layoutHelper.ts`,
`/src/chart/graph/forceHelper.ts`,
`/src/util/number.ts`
See `/licenses/LICENSE-d3` for details of the license.
filesize
BSD-3-Clause
Copyright (c) 2021, Jason Mulligan
@ -170,6 +396,31 @@ terms above.
ng-zorro-antd
MIT
ngx-echarts
MIT
MIT License
Copyright (c) 2017 Xie, Ziyu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
ngx-logger
MIT
The MIT License
@ -450,3 +701,36 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
zrender
BSD-3-Clause
BSD 3-Clause License
Copyright (c) 2017, Baidu Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -10,6 +10,6 @@
<body>
<app-root></app-root>
<noscript>Please enable JavaScript to continue using this application.</noscript>
<script src="runtime.dbc624475730f362.js" type="module"></script><script src="polyfills.4b08448aee19bb22.js" type="module"></script><script src="main.042620305008901b.js" type="module"></script>
<script src="runtime.23c91f03d62c595a.js" type="module"></script><script src="polyfills.4b08448aee19bb22.js" type="module"></script><script src="main.8a8c73fae6ff9291.js" type="module"></script>
</body></html>

View File

@ -1,6 +1,6 @@
{
"configVersion": 1,
"timestamp": 1645421972060,
"timestamp": 1649386979751,
"index": "/index.html",
"assetGroups": [
{
@ -13,15 +13,16 @@
"urls": [
"/103.5b5d2a6e5a8a7479.js",
"/146.92e3b29c4c754544.js",
"/66.97582e026891bf70.js",
"/694.d4844204c9f8d279.js",
"/853.84ee7e1d7cff8913.js",
"/45.c90c3cea2bf1a66e.js",
"/66.d8b06f1fef317761.js",
"/694.92a3e0c2fc842a42.js",
"/869.95d68b28a4188d76.js",
"/common.858f777e9296e6f2.js",
"/index.html",
"/main.042620305008901b.js",
"/main.8a8c73fae6ff9291.js",
"/manifest.webmanifest",
"/polyfills.4b08448aee19bb22.js",
"/runtime.dbc624475730f362.js",
"/runtime.23c91f03d62c595a.js",
"/styles.1f581691b230dc4d.css"
],
"patterns": []
@ -1635,9 +1636,10 @@
"hashTable": {
"/103.5b5d2a6e5a8a7479.js": "cc0240f217015b6d4ddcc14f31fcc42e1c1c282a",
"/146.92e3b29c4c754544.js": "3824de681dd1f982ea69a065cdf54d7a1e781f4d",
"/66.97582e026891bf70.js": "11cfd8acd3399fef42f0cf77d64aafc62c7e6994",
"/694.d4844204c9f8d279.js": "513c6b68a84ad47494a7397a06194c5136da3adc",
"/853.84ee7e1d7cff8913.js": "6281853ef474fc543ac39fb47ec4a0a61ca875fa",
"/45.c90c3cea2bf1a66e.js": "e5bfb8cf3803593e6b8ea14c90b3d3cb6a066764",
"/66.d8b06f1fef317761.js": "43676d9dc886b5624dadecc50f17d4972b183d2d",
"/694.92a3e0c2fc842a42.js": "f8f093029b9996b3db0c4e738bf9f8573fba8392",
"/869.95d68b28a4188d76.js": "cd1add38c89b1df3c0783b74c931b51839f1c530",
"/assets/animal/panda.js": "fec2868bb3053dd2da45f96bbcb86d5116ed72b1",
"/assets/animal/panda.svg": "bebd302cdc601e0ead3a6d2710acf8753f3d83b1",
"/assets/fill/.gitkeep": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
@ -3232,11 +3234,11 @@
"/assets/twotone/warning.js": "fb2d7ea232f3a99bf8f080dbc94c65699232ac01",
"/assets/twotone/warning.svg": "8c7a2d3e765a2e7dd58ac674870c6655cecb0068",
"/common.858f777e9296e6f2.js": "b68ca68e1e214a2537d96935c23410126cc564dd",
"/index.html": "4a8198a30590a4863ef700f1c541a0fce551e8c1",
"/main.042620305008901b.js": "03d1b5d12f588193841fdc44913ec20625404c7c",
"/index.html": "114f00ffcd1f7fa5aaaa7f2fcf3109f26c77c715",
"/main.8a8c73fae6ff9291.js": "41a5a5a8fb5cda4cfa0e28532812594816257122",
"/manifest.webmanifest": "62c1cb8c5ad2af551a956b97013ab55ce77dd586",
"/polyfills.4b08448aee19bb22.js": "8e73f2d42cc13ca353cea5c886d930bd6da08d0d",
"/runtime.dbc624475730f362.js": "030dff9e7d735e03d87257f33e8167d467d99adb",
"/runtime.23c91f03d62c595a.js": "0819f1120ed1e37c2ad069ef949147450c951069",
"/styles.1f581691b230dc4d.css": "6f5befbbad57c2b2e80aae855139744b8010d150"
},
"navigationUrls": [

View File

@ -0,0 +1 @@
(()=>{"use strict";var e,v={},m={};function r(e){var i=m[e];if(void 0!==i)return i.exports;var t=m[e]={exports:{}};return v[e].call(t.exports,t,t.exports,r),t.exports}r.m=v,e=[],r.O=(i,t,f,o)=>{if(!t){var a=1/0;for(n=0;n<e.length;n++){for(var[t,f,o]=e[n],c=!0,l=0;l<t.length;l++)(!1&o||a>=o)&&Object.keys(r.O).every(b=>r.O[b](t[l]))?t.splice(l--,1):(c=!1,o<a&&(a=o));if(c){e.splice(n--,1);var d=f();void 0!==d&&(i=d)}}return i}o=o||0;for(var n=e.length;n>0&&e[n-1][2]>o;n--)e[n]=e[n-1];e[n]=[t,f,o]},r.n=e=>{var i=e&&e.__esModule?()=>e.default:()=>e;return r.d(i,{a:i}),i},r.d=(e,i)=>{for(var t in i)r.o(i,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:i[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((i,t)=>(r.f[t](e,i),i),[])),r.u=e=>(592===e?"common":e)+"."+{45:"c90c3cea2bf1a66e",66:"d8b06f1fef317761",103:"5b5d2a6e5a8a7479",146:"92e3b29c4c754544",592:"858f777e9296e6f2",694:"92a3e0c2fc842a42",869:"95d68b28a4188d76"}[e]+".js",r.miniCssF=e=>{},r.o=(e,i)=>Object.prototype.hasOwnProperty.call(e,i),(()=>{var e={},i="blrec:";r.l=(t,f,o,n)=>{if(e[t])e[t].push(f);else{var a,c;if(void 0!==o)for(var l=document.getElementsByTagName("script"),d=0;d<l.length;d++){var u=l[d];if(u.getAttribute("src")==t||u.getAttribute("data-webpack")==i+o){a=u;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",i+o),a.src=r.tu(t)),e[t]=[f];var s=(g,b)=>{a.onerror=a.onload=null,clearTimeout(p);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(b)),g)return g(b)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tu=i=>(void 0===e&&(e={createScriptURL:t=>t},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e.createScriptURL(i))})(),r.p="",(()=>{var e={666:0};r.f.j=(f,o)=>{var n=r.o(e,f)?e[f]:void 0;if(0!==n)if(n)o.push(n[2]);else if(666!=f){var a=new Promise((u,s)=>n=e[f]=[u,s]);o.push(n[2]=a);var c=r.p+r.u(f),l=new Error;r.l(c,u=>{if(r.o(e,f)&&(0!==(n=e[f])&&(e[f]=void 0),n)){var s=u&&("load"===u.type?"missing":u.type),p=u&&u.target&&u.target.src;l.message="Loading chunk "+f+" failed.\n("+s+": "+p+")",l.name="ChunkLoadError",l.type=s,l.request=p,n[1](l)}},"chunk-"+f,f)}else e[f]=0},r.O.j=f=>0===e[f];var i=(f,o)=>{var l,d,[n,a,c]=o,u=0;if(n.some(p=>0!==e[p])){for(l in a)r.o(a,l)&&(r.m[l]=a[l]);if(c)var s=c(r)}for(f&&f(o);u<n.length;u++)r.o(e,d=n[u])&&e[d]&&e[d][0](),e[n[u]]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(i.bind(null,0)),t.push=i.bind(null,t.push.bind(t))})()})();

View File

@ -1 +0,0 @@
(()=>{"use strict";var e,v={},m={};function r(e){var i=m[e];if(void 0!==i)return i.exports;var t=m[e]={exports:{}};return v[e].call(t.exports,t,t.exports,r),t.exports}r.m=v,e=[],r.O=(i,t,o,f)=>{if(!t){var a=1/0;for(n=0;n<e.length;n++){for(var[t,o,f]=e[n],c=!0,d=0;d<t.length;d++)(!1&f||a>=f)&&Object.keys(r.O).every(p=>r.O[p](t[d]))?t.splice(d--,1):(c=!1,f<a&&(a=f));if(c){e.splice(n--,1);var u=o();void 0!==u&&(i=u)}}return i}f=f||0;for(var n=e.length;n>0&&e[n-1][2]>f;n--)e[n]=e[n-1];e[n]=[t,o,f]},r.n=e=>{var i=e&&e.__esModule?()=>e.default:()=>e;return r.d(i,{a:i}),i},r.d=(e,i)=>{for(var t in i)r.o(i,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:i[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((i,t)=>(r.f[t](e,i),i),[])),r.u=e=>(592===e?"common":e)+"."+{66:"97582e026891bf70",103:"5b5d2a6e5a8a7479",146:"92e3b29c4c754544",592:"858f777e9296e6f2",694:"d4844204c9f8d279",853:"84ee7e1d7cff8913"}[e]+".js",r.miniCssF=e=>{},r.o=(e,i)=>Object.prototype.hasOwnProperty.call(e,i),(()=>{var e={},i="blrec:";r.l=(t,o,f,n)=>{if(e[t])e[t].push(o);else{var a,c;if(void 0!==f)for(var d=document.getElementsByTagName("script"),u=0;u<d.length;u++){var l=d[u];if(l.getAttribute("src")==t||l.getAttribute("data-webpack")==i+f){a=l;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",i+f),a.src=r.tu(t)),e[t]=[o];var s=(g,p)=>{a.onerror=a.onload=null,clearTimeout(b);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(p)),g)return g(p)},b=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tu=i=>(void 0===e&&(e={createScriptURL:t=>t},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e.createScriptURL(i))})(),r.p="",(()=>{var e={666:0};r.f.j=(o,f)=>{var n=r.o(e,o)?e[o]:void 0;if(0!==n)if(n)f.push(n[2]);else if(666!=o){var a=new Promise((l,s)=>n=e[o]=[l,s]);f.push(n[2]=a);var c=r.p+r.u(o),d=new Error;r.l(c,l=>{if(r.o(e,o)&&(0!==(n=e[o])&&(e[o]=void 0),n)){var s=l&&("load"===l.type?"missing":l.type),b=l&&l.target&&l.target.src;d.message="Loading chunk "+o+" failed.\n("+s+": "+b+")",d.name="ChunkLoadError",d.type=s,d.request=b,n[1](d)}},"chunk-"+o,o)}else e[o]=0},r.O.j=o=>0===e[o];var i=(o,f)=>{var d,u,[n,a,c]=f,l=0;if(n.some(b=>0!==e[b])){for(d in a)r.o(a,d)&&(r.m[d]=a[d]);if(c)var s=c(r)}for(o&&o(f);l<n.length;l++)r.o(e,u=n[l])&&e[u]&&e[u][0](),e[n[l]]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(i.bind(null,0)),t.push=i.bind(null,t.push.bind(t))})()})();

View File

@ -81,6 +81,10 @@ class DataAnalyser:
self._audio_analysed = False
self._video_analysed = False
@property
def last_timestamp(self) -> int:
return self._last_timestamp
def analyse_header(self, header: FlvHeader) -> None:
assert not self._header_analysed
self._header_analysed = True

View File

@ -130,8 +130,15 @@ def inject_metadata(
observer.on_error(e)
else:
logger.info(f"Successfully inject metadata for '{path}'")
os.replace(out_path, path)
observer.on_completed()
try:
os.replace(out_path, path)
except Exception as e:
logger.error(
f"Failed to replace file {path} with '{out_path}'"
)
observer.on_error(e)
else:
observer.on_completed()
if room_id is not None:
return _scheduler.schedule(with_room_id(room_id)(action))

View File

@ -41,14 +41,14 @@ class ParametersChecker:
if is_audio_sequence_header(tag):
if self._last_audio_header_tag is not None:
if not tag.is_the_same_as(self._last_audio_header_tag):
logger.warning('Audio parameters changed')
logger.debug(f'Audio parameters changed: {tag}')
self._last_audio_header_tag = tag
raise AudioParametersChanged()
self._last_audio_header_tag = tag
elif is_video_sequence_header(tag):
if self._last_video_header_tag is not None:
if not tag.is_the_same_as(self._last_video_header_tag):
logger.warning('Video parameters changed')
logger.debug(f'Video parameters changed: {tag}')
self._last_video_header_tag = tag
raise VideoParametersChanged()
self._last_video_header_tag = tag

View File

@ -6,7 +6,7 @@ import json
import logging
from typing import (
Any, BinaryIO, Dict, List, Final, Iterable, Iterator, Optional, Tuple,
Protocol, TypedDict, Union, cast
Protocol, TypedDict, Union, cast, TYPE_CHECKING
)
import attr
@ -31,11 +31,13 @@ from .exceptions import (
CutStream,
)
from .common import (
is_audio_tag, is_metadata_tag, is_video_tag, parse_metadata,
is_audio_data_tag, is_video_data_tag, is_sequence_header,
enrich_metadata, update_metadata, is_data_tag, read_tags_in_duration,
is_audio_tag, is_video_tag, is_metadata_tag, parse_metadata,
is_audio_data_tag, is_video_data_tag, enrich_metadata, update_metadata,
is_data_tag, read_tags_in_duration,
)
from ..path import extra_metadata_path
if TYPE_CHECKING:
from ..core.stream_analyzer import StreamProfile
__all__ = 'StreamProcessor', 'BaseOutputFileManager', 'JoinPoint'
@ -83,6 +85,7 @@ class StreamProcessor:
self._stream_count: int = 0
self._size_updates = Subject()
self._time_updates = Subject()
self._stream_profile_updates = Subject()
self._delta: int = 0
self._has_audio: bool = False
@ -123,6 +126,8 @@ class StreamProcessor:
return None
try:
return self._data_analyser.make_metadata()
except AssertionError:
return None
except Exception as e:
logger.debug(f'Failed to make metadata data, due to: {repr(e)}')
return None
@ -135,6 +140,10 @@ class StreamProcessor:
def time_updates(self) -> Observable:
return self._time_updates
@property
def stream_profile_updates(self) -> Observable:
return self._stream_profile_updates
@property
def cancelled(self) -> bool:
return self._cancelled
@ -146,6 +155,9 @@ class StreamProcessor:
def cancel(self) -> None:
self._cancelled = True
def set_metadata(self, metadata: Dict[str, Any]) -> None:
self._metadata = metadata.copy()
def process_stream(self, stream: RandomIO) -> None:
assert not self._cancelled and not self._finalized, \
'should not be called after the processing cancelled or finalized'
@ -241,6 +253,7 @@ class StreamProcessor:
self._write_header(self._ensure_header_correct(flv_header))
self._transfer_meta_tags()
self._transfer_first_data_tag(first_data_tag)
self._update_stream_profile(flv_header, first_data_tag)
except Exception:
self._last_tags = []
self._resetting_file = True
@ -346,6 +359,33 @@ class StreamProcessor:
logger.debug('Meta tags have been transfered')
def _update_stream_profile(
self, flv_header: FlvHeader, first_data_tag: FlvTag
) -> None:
from ..core.stream_analyzer import ffprobe
if self._parameters_checker.last_metadata_tag is None:
return
if self._parameters_checker.last_video_header_tag is None:
return
bytes_io = io.BytesIO()
writer = FlvWriter(bytes_io)
writer.write_header(flv_header)
writer.write_tag(self._parameters_checker.last_metadata_tag)
writer.write_tag(self._parameters_checker.last_video_header_tag)
if self._parameters_checker.last_audio_header_tag is not None:
writer.write_tag(self._parameters_checker.last_audio_header_tag)
writer.write_tag(first_data_tag)
def on_next(profile: StreamProfile) -> None:
self._stream_profile_updates.on_next(profile)
def on_error(e: Exception) -> None:
logger.warning(f'Failed to analyse stream: {repr(e)}')
ffprobe(bytes_io.getvalue()).subscribe(on_next, on_error)
def _transfer_first_data_tag(self, tag: FlvTag) -> None:
logger.debug(f'Transfer the first data tag: {tag}')
self._delta = -tag.timestamp
@ -385,6 +425,18 @@ class StreamProcessor:
except EOFError:
logger.debug('The input stream exhausted')
break
except AudioParametersChanged:
if self._analyse_data:
logger.warning('Audio parameters changed at {}'.format(
format_timestamp(self._data_analyser.last_timestamp),
))
yield tag
except VideoParametersChanged:
if self._analyse_data:
logger.warning('Video parameters changed at {}'.format(
format_timestamp(self._data_analyser.last_timestamp),
))
raise
except Exception as e:
logger.debug(f'Failed to read data, due to: {repr(e)}')
raise
@ -396,7 +448,7 @@ class StreamProcessor:
try:
count: int = 0
for tag in filter(lambda t: not is_sequence_header(t), tags):
for tag in tags:
self._ensure_ts_correct(tag)
self._write_tag(self._correct_ts(tag, self._delta))
count += 1
@ -741,13 +793,13 @@ class FlvReaderWithTimestampFix(RobustFlvReader):
tag = super().read_tag(no_body=no_body)
if self._last_tag is None:
if is_data_tag(tag):
if is_video_tag(tag) or is_audio_tag(tag):
self._update_last_tags(tag)
elif is_metadata_tag(tag):
self._update_parameters(tag)
return tag
if not is_data_tag(tag):
if not is_video_tag(tag) and not is_audio_tag(tag):
return tag
if self._is_ts_rebounded(tag):
@ -790,11 +842,11 @@ class FlvReaderWithTimestampFix(RobustFlvReader):
if is_video_tag(tag):
if self._last_video_tag is None:
return False
return tag.timestamp < self._last_video_tag.timestamp
return tag.timestamp <= self._last_video_tag.timestamp
elif is_audio_tag(tag):
if self._last_audio_tag is None:
return False
return tag.timestamp < self._last_audio_tag.timestamp
return tag.timestamp <= self._last_audio_tag.timestamp
else:
return False

View File

@ -36,8 +36,11 @@ def aio_task_with_room_id(
curr_task = asyncio.current_task()
assert curr_task is not None
old_name = curr_task.get_name()
curr_task.set_name(f'{func.__qualname__}::{room_id}')
return await func(obj, *arg, **kwargs)
try:
return await func(obj, *arg, **kwargs)
finally:
curr_task.set_name(old_name)
return wrapper

View File

@ -1,3 +1,4 @@
import ssl
import logging
import asyncio
import smtplib
@ -62,10 +63,18 @@ class EmailService(MessagingProvider):
msg['To'] = self.dst_addr
msg.set_content(content, subtype=msg_type, charset='utf-8')
with smtplib.SMTP_SSL(self.smtp_host, self.smtp_port) as smtp:
# smtp.set_debuglevel(1)
smtp.login(self.src_addr, self.auth_code)
smtp.send_message(msg, self.src_addr, self.dst_addr)
try:
with smtplib.SMTP_SSL(self.smtp_host, self.smtp_port) as smtp:
# smtp.set_debuglevel(1)
smtp.login(self.src_addr, self.auth_code)
smtp.send_message(msg, self.src_addr, self.dst_addr)
except ssl.SSLError:
with smtplib.SMTP(self.smtp_host, self.smtp_port) as smtp:
# smtp.set_debuglevel(1)
context = ssl.create_default_context()
smtp.starttls(context=context)
smtp.login(self.src_addr, self.auth_code)
smtp.send_message(msg, self.src_addr, self.dst_addr)
def _check_parameters(self) -> None:
if not self.src_addr:
@ -105,7 +114,7 @@ class PushplusResponse(TypedDict):
class Pushplus(MessagingProvider):
url = 'http://pushplus.hxtrip.com/send'
url = 'http://www.pushplus.plus/send'
def __init__(self, token: str = '', topic: str = '') -> None:
super().__init__()

View File

@ -13,6 +13,7 @@ class PostprocessorStatus(Enum):
class DeleteStrategy(Enum):
AUTO = 'auto'
SAFE = 'safe'
NEVER = 'never'
def __str__(self) -> str:

View File

@ -17,7 +17,7 @@ from ..event.event_emitter import EventListener, EventEmitter
from ..bili.live import Live
from ..core import Recorder, RecorderEventListener
from ..exception import submit_exception
from ..utils.mixins import AsyncStoppableMixin, AsyncCooperationMix
from ..utils.mixins import AsyncStoppableMixin, AsyncCooperationMixin
from ..path import extra_metadata_path
from ..flv.metadata_injector import inject_metadata, InjectProgress
from ..flv.helpers import is_valid_flv_file
@ -46,7 +46,7 @@ class Postprocessor(
EventEmitter[PostprocessorEventListener],
RecorderEventListener,
AsyncStoppableMixin,
AsyncCooperationMix,
AsyncCooperationMixin,
):
def __init__(
self,
@ -162,8 +162,11 @@ class Postprocessor(
self._queue.task_done()
async def _inject_extra_metadata(self, path: str) -> str:
metadata = await get_extra_metadata(path)
await self._inject_metadata(path, metadata, self._scheduler)
try:
metadata = await get_extra_metadata(path)
await self._inject_metadata(path, metadata, self._scheduler)
except Exception as e:
logger.error(f"Failed to inject metadata for '{path}': {repr(e)}")
return path
async def _remux_flv_to_mp4(self, in_path: str) -> str:
@ -260,6 +263,9 @@ class Postprocessor(
if self.delete_source == DeleteStrategy.AUTO:
if not remux_result.is_failed():
return True
elif self.delete_source == DeleteStrategy.SAFE:
if not remux_result.is_failed() and not remux_result.is_warned():
return True
elif self.delete_source == DeleteStrategy.NEVER:
return False

View File

@ -18,7 +18,7 @@ from pydantic import BaseModel as PydanticBaseModel
from pydantic import Field, BaseSettings, validator, PrivateAttr
from pydantic.networks import HttpUrl, EmailStr
from ..bili.typing import QualityNumber
from ..bili.typing import StreamFormat, QualityNumber
from ..postprocess import DeleteStrategy
from ..logging.typing import LOG_LEVEL
from ..utils.string import camel_case
@ -138,6 +138,7 @@ class DanmakuSettings(DanmakuOptions):
class RecorderOptions(BaseModel):
stream_format: Optional[StreamFormat]
quality_number: Optional[QualityNumber]
read_timeout: Optional[int] # seconds
disconnection_timeout: Optional[int] # seconds
@ -164,6 +165,7 @@ class RecorderOptions(BaseModel):
class RecorderSettings(RecorderOptions):
stream_format: StreamFormat = 'flv'
quality_number: QualityNumber = 20000 # 4K, the highest quality.
read_timeout: int = 3
disconnection_timeout: int = 600
@ -240,7 +242,7 @@ class OutputOptions(BaseModel):
def out_dir_factory() -> str:
path = os.path.expanduser(DEFAULT_OUT_DIR)
path = os.path.normpath(os.path.expanduser(DEFAULT_OUT_DIR))
os.makedirs(path, exist_ok=True)
return path
@ -285,7 +287,7 @@ class TaskSettings(TaskOptions):
def log_dir_factory() -> str:
path = os.path.expanduser(DEFAULT_LOG_DIR)
path = os.path.normpath(os.path.expanduser(DEFAULT_LOG_DIR))
os.makedirs(path, exist_ok=True)
return path
@ -444,7 +446,7 @@ class Settings(BaseModel):
@validator('tasks')
def _validate_tasks(cls, tasks: List[TaskSettings]) -> List[TaskSettings]:
if len(tasks) >= cls._MAX_TASKS:
if len(tasks) > cls._MAX_TASKS:
raise ValueError(f'Out of max tasks limits: {cls._MAX_TASKS}')
return tasks

View File

@ -164,6 +164,28 @@ class SettingsManager:
settings.enable_recorder = False
await self.dump_settings()
async def mark_task_monitor_enabled(self, room_id: int) -> None:
settings = self.find_task_settings(room_id)
assert settings is not None
settings.enable_monitor = True
await self.dump_settings()
async def mark_task_monitor_disabled(self, room_id: int) -> None:
settings = self.find_task_settings(room_id)
assert settings is not None
settings.enable_monitor = False
await self.dump_settings()
async def mark_all_task_monitors_enabled(self) -> None:
for settings in self._settings.tasks:
settings.enable_monitor = True
await self.dump_settings()
async def mark_all_task_monitors_disabled(self) -> None:
for settings in self._settings.tasks:
settings.enable_monitor = False
await self.dump_settings()
async def mark_task_recorder_enabled(self, room_id: int) -> None:
settings = self.find_task_settings(room_id)
assert settings is not None

View File

@ -5,7 +5,7 @@ from typing import Optional
import attr
from ..bili.models import RoomInfo, UserInfo
from ..bili.typing import QualityNumber
from ..bili.typing import StreamFormat, QualityNumber
from ..postprocess import DeleteStrategy, PostprocessorStatus
from ..postprocess.typing import Progress
@ -23,11 +23,16 @@ class TaskStatus:
monitor_enabled: bool
recorder_enabled: bool
running_status: RunningStatus
elapsed: float # time elapsed
data_count: int # Number of Bytes in total
data_rate: float # Number of Bytes per second
danmu_count: int # Number of Danmu in total
stream_url: str
stream_host: str
dl_total: int # Number of Bytes in total
dl_rate: float # Number of Bytes per second
rec_elapsed: float # time elapsed
rec_total: int # Number of Bytes in total
rec_rate: float # Number of Bytes per second
danmu_total: int # Number of Danmu in total
danmu_rate: float # Number of Danmu per minutes
real_stream_format: StreamFormat
real_quality_number: QualityNumber
recording_path: Optional[str] = None
postprocessor_status: PostprocessorStatus = PostprocessorStatus.WAITING
@ -53,6 +58,7 @@ class TaskParam:
record_super_chat: bool
save_raw_danmaku: bool
# RecorderSettings
stream_format: StreamFormat
quality_number: QualityNumber
read_timeout: int
disconnection_timeout: Optional[int]

View File

@ -16,8 +16,9 @@ from ..bili.live import Live
from ..bili.models import RoomInfo, UserInfo
from ..bili.danmaku_client import DanmakuClient
from ..bili.live_monitor import LiveMonitor
from ..bili.typing import QualityNumber
from ..bili.typing import StreamFormat, QualityNumber
from ..core import Recorder
from ..core.stream_analyzer import StreamProfile
from ..postprocess import Postprocessor, PostprocessorStatus, DeleteStrategy
from ..postprocess.remuxer import RemuxProgress
from ..flv.metadata_injector import InjectProgress
@ -43,18 +44,6 @@ class RecordTask:
path_template: str = '',
cookie: str = '',
user_agent: str = '',
danmu_uname: bool = False,
record_gift_send: bool = False,
record_free_gifts: bool = False,
record_guard_buy: bool = False,
record_super_chat: bool = False,
save_cover: bool = False,
save_raw_danmaku: bool = False,
buffer_size: Optional[int] = None,
read_timeout: Optional[int] = None,
disconnection_timeout: Optional[int] = None,
filesize_limit: int = 0,
duration_limit: int = 0,
remux_to_mp4: bool = False,
inject_extra_metadata: bool = True,
delete_source: DeleteStrategy = DeleteStrategy.AUTO,
@ -68,18 +57,6 @@ class RecordTask:
self._path_template = path_template
self._cookie = cookie
self._user_agent = user_agent
self._danmu_uname = danmu_uname
self._record_gift_send = record_gift_send
self._record_free_gifts = record_free_gifts
self._record_guard_buy = record_guard_buy
self._record_super_chat = record_super_chat
self._save_cover = save_cover
self._save_raw_danmaku = save_raw_danmaku
self._buffer_size = buffer_size
self._read_timeout = read_timeout
self._disconnection_timeout = disconnection_timeout
self._filesize_limit = filesize_limit
self._duration_limit = duration_limit
self._remux_to_mp4 = remux_to_mp4
self._inject_extra_metadata = inject_extra_metadata
self._delete_source = delete_source
@ -127,11 +104,16 @@ class RecordTask:
monitor_enabled=self.monitor_enabled,
recorder_enabled=self.recorder_enabled,
running_status=self.running_status,
elapsed=self._recorder.elapsed,
data_count=self._recorder.data_count,
data_rate=self._recorder.data_rate,
danmu_count=self._recorder.danmu_count,
stream_url=self._recorder.stream_url,
stream_host=self._recorder.stream_host,
dl_total=self._recorder.dl_total,
dl_rate=self._recorder.dl_rate,
rec_elapsed=self._recorder.rec_elapsed,
rec_total=self._recorder.rec_total,
rec_rate=self._recorder.rec_rate,
danmu_total=self._recorder.danmu_total,
danmu_rate=self._recorder.danmu_rate,
real_stream_format=self._recorder.real_stream_format,
real_quality_number=self._recorder.real_quality_number,
recording_path=self.recording_path,
postprocessor_status=self._postprocessor.status,
@ -283,6 +265,14 @@ class RecordTask:
def save_raw_danmaku(self, value: bool) -> None:
self._recorder.save_raw_danmaku = value
@property
def stream_format(self) -> StreamFormat:
return self._recorder.stream_format
@stream_format.setter
def stream_format(self, value: StreamFormat) -> None:
self._recorder.stream_format = value
@property
def quality_number(self) -> QualityNumber:
return self._recorder.quality_number
@ -291,6 +281,10 @@ class RecordTask:
def quality_number(self, value: QualityNumber) -> None:
self._recorder.quality_number = value
@property
def real_stream_format(self) -> StreamFormat:
return self._recorder.real_stream_format
@property
def real_quality_number(self) -> QualityNumber:
return self._recorder.real_quality_number
@ -359,6 +353,10 @@ class RecordTask:
def metadata(self) -> Optional[MetaData]:
return self._recorder.metadata
@property
def stream_profile(self) -> StreamProfile:
return self._recorder.stream_profile
@property
def remux_to_mp4(self) -> bool:
return self._postprocessor.remux_to_mp4
@ -452,7 +450,8 @@ class RecordTask:
await self._live.deinit()
await self._live.init()
self._danmaku_client.session = self._live.session
self._danmaku_client.api = self._live.api
self._danmaku_client.appapi = self._live.appapi
self._danmaku_client.webapi = self._live.webapi
if self._monitor_enabled:
await self._danmaku_client.start()
@ -468,7 +467,10 @@ class RecordTask:
def _setup_danmaku_client(self) -> None:
self._danmaku_client = DanmakuClient(
self._live.session, self._live.api, self._live.room_id
self._live.session,
self._live.appapi,
self._live.webapi,
self._live.room_id
)
def _setup_live_monitor(self) -> None:
@ -484,18 +486,6 @@ class RecordTask:
self._live_monitor,
self._out_dir,
self._path_template,
buffer_size=self._buffer_size,
read_timeout=self._read_timeout,
disconnection_timeout=self._disconnection_timeout,
danmu_uname=self._danmu_uname,
record_gift_send=self._record_gift_send,
record_free_gifts=self._record_free_gifts,
record_guard_buy=self._record_guard_buy,
record_super_chat=self._record_super_chat,
save_cover=self._save_cover,
save_raw_danmaku=self._save_raw_danmaku,
filesize_limit=self._filesize_limit,
duration_limit=self._duration_limit,
)
def _setup_recorder_event_submitter(self) -> None:

View File

@ -1,12 +1,22 @@
from __future__ import annotations
import asyncio
import logging
from typing import Dict, Iterator, Optional, TYPE_CHECKING
import aiohttp
from tenacity import (
retry,
wait_exponential,
stop_after_delay,
retry_if_exception_type,
)
from .task import RecordTask
from .models import TaskData, TaskParam, VideoFileDetail, DanmakuFileDetail
from ..flv.data_analyser import MetaData
from ..exception import NotFoundError
from ..core.stream_analyzer import StreamProfile
from ..exception import submit_exception, NotFoundError
from ..bili.exceptions import ApiRequestError
if TYPE_CHECKING:
from ..setting import SettingsManager
from ..setting import (
@ -22,76 +32,109 @@ from ..setting import (
__all__ = 'RecordTaskManager',
logger = logging.getLogger(__name__)
class RecordTaskManager:
def __init__(self, settings_manager: SettingsManager) -> None:
self._settings_manager = settings_manager
self._tasks: Dict[int, RecordTask] = {}
import threading
self._lock = threading.Lock()
async def load_all_tasks(self) -> None:
logger.info('Loading all tasks...')
settings_list = self._settings_manager.get_settings({'tasks'}).tasks
assert settings_list is not None
for settings in settings_list:
await self.add_task(settings)
try:
await self.add_task(settings)
except Exception as e:
submit_exception(e)
logger.info('Load all tasks complete')
async def destroy_all_tasks(self) -> None:
logger.info('Destroying all tasks...')
if not self._tasks:
return
await asyncio.wait([t.destroy() for t in self._tasks.values()])
await asyncio.wait([
t.destroy() for t in self._tasks.values() if t.ready
])
self._tasks.clear()
logger.info('Successfully destroyed all task')
def has_task(self, room_id: int) -> bool:
return room_id in self._tasks
@retry(
reraise=True,
retry=retry_if_exception_type((
asyncio.TimeoutError, aiohttp.ClientError, ApiRequestError,
)),
wait=wait_exponential(max=10),
stop=stop_after_delay(60),
)
async def add_task(self, settings: TaskSettings) -> None:
logger.info(f'Adding task {settings.room_id}...')
task = RecordTask(settings.room_id)
self._tasks[settings.room_id] = task
await self._settings_manager.apply_task_header_settings(
settings.room_id, settings.header, update_session=False
)
await task.setup()
try:
await self._settings_manager.apply_task_header_settings(
settings.room_id, settings.header, update_session=False
)
await task.setup()
self._settings_manager.apply_task_output_settings(
settings.room_id, settings.output
)
self._settings_manager.apply_task_danmaku_settings(
settings.room_id, settings.danmaku
)
self._settings_manager.apply_task_recorder_settings(
settings.room_id, settings.recorder
)
self._settings_manager.apply_task_postprocessing_settings(
settings.room_id, settings.postprocessing
)
self._settings_manager.apply_task_output_settings(
settings.room_id, settings.output
)
self._settings_manager.apply_task_danmaku_settings(
settings.room_id, settings.danmaku
)
self._settings_manager.apply_task_recorder_settings(
settings.room_id, settings.recorder
)
self._settings_manager.apply_task_postprocessing_settings(
settings.room_id, settings.postprocessing
)
if settings.enable_monitor:
await task.enable_monitor()
if settings.enable_recorder:
await task.enable_recorder()
if settings.enable_monitor:
await task.enable_monitor()
if settings.enable_recorder:
await task.enable_recorder()
except Exception as e:
logger.error(
f'Failed to add task {settings.room_id} due to: {repr(e)}'
)
del self._tasks[settings.room_id]
raise
logger.info(f'Successfully added task {settings.room_id}')
async def remove_task(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.disable_recorder(force=True)
await task.disable_monitor()
await task.destroy()
del self._tasks[room_id]
async def remove_all_tasks(self) -> None:
coros = [self.remove_task(i) for i in self._tasks]
coros = [
self.remove_task(i) for i, t in self._tasks.items() if t.ready
]
if coros:
await asyncio.wait(coros)
async def start_task(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.update_info()
await task.enable_monitor()
await task.enable_recorder()
async def stop_task(self, room_id: int, force: bool = False) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.disable_recorder(force)
await task.disable_monitor()
@ -105,46 +148,47 @@ class RecordTaskManager:
await self.disable_all_task_monitors()
async def enable_task_monitor(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.enable_monitor()
async def disable_task_monitor(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.disable_monitor()
async def enable_all_task_monitors(self) -> None:
coros = [t.enable_monitor() for t in self._tasks.values()]
coros = [t.enable_monitor() for t in self._tasks.values() if t.ready]
if coros:
await asyncio.wait(coros)
async def disable_all_task_monitors(self) -> None:
coros = [t.disable_monitor() for t in self._tasks.values()]
coros = [t.disable_monitor() for t in self._tasks.values() if t.ready]
if coros:
await asyncio.wait(coros)
async def enable_task_recorder(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.enable_recorder()
async def disable_task_recorder(
self, room_id: int, force: bool = False
) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.disable_recorder(force)
async def enable_all_task_recorders(self) -> None:
coros = [t.enable_recorder() for t in self._tasks.values()]
coros = [t.enable_recorder() for t in self._tasks.values() if t.ready]
if coros:
await asyncio.wait(coros)
async def disable_all_task_recorders(self, force: bool = False) -> None:
coros = [t.disable_recorder(force) for t in self._tasks.values()]
coros = [
t.disable_recorder(force) for t in self._tasks.values() if t.ready
]
if coros:
await asyncio.wait(coros)
def get_task_data(self, room_id: int) -> TaskData:
task = self._get_task(room_id)
assert task.ready, "the task isn't ready yet, couldn't get task data!"
task = self._get_task(room_id, check_ready=True)
return self._make_task_data(task)
def get_all_task_data(self) -> Iterator[TaskData]:
@ -152,39 +196,43 @@ class RecordTaskManager:
yield self._make_task_data(task)
def get_task_param(self, room_id: int) -> TaskParam:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
return self._make_task_param(task)
def get_task_metadata(self, room_id: int) -> Optional[MetaData]:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
return task.metadata
def get_task_stream_profile(self, room_id: int) -> StreamProfile:
task = self._get_task(room_id, check_ready=True)
return task.stream_profile
def get_task_video_file_details(
self, room_id: int
) -> Iterator[VideoFileDetail]:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
yield from task.video_file_details
def get_task_danmaku_file_details(
self, room_id: int
) -> Iterator[DanmakuFileDetail]:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
yield from task.danmaku_file_details
def can_cut_stream(self, room_id: int) -> bool:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
return task.can_cut_stream()
def cut_stream(self, room_id: int) -> bool:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
return task.cut_stream()
async def update_task_info(self, room_id: int) -> None:
task = self._get_task(room_id)
task = self._get_task(room_id, check_ready=True)
await task.update_info()
async def update_all_task_infos(self) -> None:
coros = [t.update_info() for t in self._tasks.values()]
coros = [t.update_info() for t in self._tasks.values() if t.ready]
if coros:
await asyncio.wait(coros)
@ -235,6 +283,7 @@ class RecordTaskManager:
self, room_id: int, settings: RecorderSettings
) -> None:
task = self._get_task(room_id)
task.stream_format = settings.stream_format
task.quality_number = settings.quality_number
task.read_timeout = settings.read_timeout
task.disconnection_timeout = settings.disconnection_timeout
@ -249,11 +298,15 @@ class RecordTaskManager:
task.inject_extra_metadata = settings.inject_extra_metadata
task.delete_source = settings.delete_source
def _get_task(self, room_id: int) -> RecordTask:
def _get_task(self, room_id: int, check_ready: bool = False) -> RecordTask:
try:
return self._tasks[room_id]
task = self._tasks[room_id]
except KeyError:
raise NotFoundError(f'no task for the room {room_id}')
else:
if check_ready and not task.ready:
raise NotFoundError(f'the task {room_id} is not ready yet')
return task
def _make_task_param(self, task: RecordTask) -> TaskParam:
return TaskParam(
@ -270,6 +323,7 @@ class RecordTaskManager:
record_super_chat=task.record_super_chat,
save_cover=task.save_cover,
save_raw_danmaku=task.save_raw_danmaku,
stream_format=task.stream_format,
quality_number=task.quality_number,
read_timeout=task.read_timeout,
disconnection_timeout=task.disconnection_timeout,

View File

@ -1,3 +1,4 @@
import os
from abc import ABC, abstractmethod
import asyncio
from typing import Awaitable, TypeVar, final
@ -102,18 +103,38 @@ class AsyncStoppableMixin(ABC):
_T = TypeVar('_T')
class AsyncCooperationMix(ABC):
class AsyncCooperationMixin(ABC):
def __init__(self) -> None:
super().__init__()
self._loop = asyncio.get_running_loop()
def _handle_exception(self, exc: BaseException) -> None:
from ..exception import submit_exception # XXX circular import
from ..exception import submit_exception
async def wrapper() -> None:
# call submit_exception in a coroutine
# workaround for `RuntimeError: no running event loop`
submit_exception(exc)
self._run_coroutine(wrapper())
def _run_coroutine(self, coro: Awaitable[_T]) -> _T:
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
return future.result()
class SupportDebugMixin(ABC):
def __init__(self) -> None:
super().__init__()
def _init_for_debug(self, room_id: int) -> None:
if (
(value := os.environ.get('DEBUG')) and
(value == '*' or room_id in value.split(','))
):
self._debug = True
self._debug_dir = os.path.expanduser(f'~/.blrec/debug/{room_id}')
self._debug_dir = os.path.normpath(self._debug_dir)
os.makedirs(self._debug_dir, exist_ok=True)
else:
self._debug = False
self._debug_dir = ''

View File

@ -1,6 +1,7 @@
from typing import Any, Dict, List
import attr
from pydantic import PositiveInt, conint
from fastapi import (
APIRouter,
status,
@ -31,17 +32,30 @@ router = APIRouter(
@router.get('/data')
async def get_all_task_data(
async def get_task_data(
page: PositiveInt = 1,
size: conint(ge=10, lt=100) = 100, # type: ignore
filter: TaskDataFilter = Depends(task_data_filter)
) -> List[Dict[str, Any]]:
return [attr.asdict(d) for d in filter(app.get_all_task_data())]
start = (page - 1) * size
stop = page * size
task_data = []
for index, data in enumerate(filter(app.get_all_task_data())):
if index < start:
continue
if index >= stop:
break
task_data.append(attr.asdict(data))
return task_data
@router.get(
'/{room_id}/data',
responses={**not_found_responses},
)
async def get_task_data(room_id: int) -> Dict[str, Any]:
async def get_one_task_data(room_id: int) -> Dict[str, Any]:
return attr.asdict(app.get_task_data(room_id))
@ -64,6 +78,14 @@ async def get_task_metadata(room_id: int) -> Dict[str, Any]:
return attr.asdict(metadata)
@router.get(
'/{room_id}/profile',
responses={**not_found_responses},
)
async def get_task_stream_profile(room_id: int) -> Dict[str, Any]:
return app.get_task_stream_profile(room_id)
@router.get(
'/{room_id}/videos',
responses={**not_found_responses},
@ -276,7 +298,7 @@ async def add_task(room_id: int) -> ResponseMessage:
"""
real_room_id = await app.add_task(room_id)
return ResponseMessage(
message='Added Task Successfully',
message='Successfully Added Task',
data={'room_id': real_room_id},
)

View File

@ -18,9 +18,11 @@
"@angular/platform-browser-dynamic": "^13.1.3",
"@angular/router": "^13.1.3",
"@angular/service-worker": "~13.1.3",
"echarts": "^5.3.1",
"filesize": "^6.4.0",
"lodash-es": "^4.17.21",
"ng-zorro-antd": "^13.0.1",
"ngx-echarts": "^8.0.1",
"ngx-logger": "^4.2.2",
"rxjs": "~6.6.0",
"tslib": "^2.3.0",
@ -5410,6 +5412,20 @@
"integrity": "sha1-Or5DrvODX4rgd9E23c4PJ2sEAOY=",
"dev": true
},
"node_modules/echarts": {
"version": "5.3.1",
"resolved": "https://registry.npmmirror.com/echarts/-/echarts-5.3.1.tgz",
"integrity": "sha512-nWdlbgX3OVY0hpqncSvp0gDt1FRSKWn7lsWEH+PHmfCuvE0QmSw17pczQvm8AvawnLEkmf1Cts7YwQJZNC0AEQ==",
"dependencies": {
"tslib": "2.3.0",
"zrender": "5.3.1"
}
},
"node_modules/echarts/node_modules/tslib": {
"version": "2.3.0",
"resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
"integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="
},
"node_modules/ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmmirror.com/ee-first/download/ee-first-1.1.1.tgz",
@ -8789,6 +8805,17 @@
"@angular/router": "^13.0.1"
}
},
"node_modules/ngx-echarts": {
"version": "8.0.1",
"resolved": "https://registry.npmmirror.com/ngx-echarts/-/ngx-echarts-8.0.1.tgz",
"integrity": "sha512-CP+WnCcnMCNpCL9BVmDIZmhGSVPnkJhhFbQEKt0nrwV0L6d4QTAGZ+e4y6G1zTTFKkIMPHpaO0nhtDRgSXAW/w==",
"dependencies": {
"tslib": "^2.3.0"
},
"peerDependencies": {
"echarts": ">=5.0.0"
}
},
"node_modules/ngx-logger": {
"version": "4.3.3",
"resolved": "https://registry.npmmirror.com/ngx-logger/download/ngx-logger-4.3.3.tgz",
@ -12601,6 +12628,19 @@
"dependencies": {
"tslib": "^2.0.0"
}
},
"node_modules/zrender": {
"version": "5.3.1",
"resolved": "https://registry.npmmirror.com/zrender/-/zrender-5.3.1.tgz",
"integrity": "sha512-7olqIjy0gWfznKr6vgfnGBk7y4UtdMvdwFmK92vVQsQeDPyzkHW1OlrLEKg6GHz1W5ePf0FeN1q2vkl/HFqhXw==",
"dependencies": {
"tslib": "2.3.0"
}
},
"node_modules/zrender/node_modules/tslib": {
"version": "2.3.0",
"resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
"integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="
}
},
"dependencies": {
@ -16612,6 +16652,22 @@
"integrity": "sha1-Or5DrvODX4rgd9E23c4PJ2sEAOY=",
"dev": true
},
"echarts": {
"version": "5.3.1",
"resolved": "https://registry.npmmirror.com/echarts/-/echarts-5.3.1.tgz",
"integrity": "sha512-nWdlbgX3OVY0hpqncSvp0gDt1FRSKWn7lsWEH+PHmfCuvE0QmSw17pczQvm8AvawnLEkmf1Cts7YwQJZNC0AEQ==",
"requires": {
"tslib": "2.3.0",
"zrender": "5.3.1"
},
"dependencies": {
"tslib": {
"version": "2.3.0",
"resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
"integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="
}
}
},
"ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmmirror.com/ee-first/download/ee-first-1.1.1.tgz",
@ -19223,6 +19279,14 @@
"tslib": "^2.3.0"
}
},
"ngx-echarts": {
"version": "8.0.1",
"resolved": "https://registry.npmmirror.com/ngx-echarts/-/ngx-echarts-8.0.1.tgz",
"integrity": "sha512-CP+WnCcnMCNpCL9BVmDIZmhGSVPnkJhhFbQEKt0nrwV0L6d4QTAGZ+e4y6G1zTTFKkIMPHpaO0nhtDRgSXAW/w==",
"requires": {
"tslib": "^2.3.0"
}
},
"ngx-logger": {
"version": "4.3.3",
"resolved": "https://registry.npmmirror.com/ngx-logger/download/ngx-logger-4.3.3.tgz",
@ -22140,6 +22204,21 @@
"requires": {
"tslib": "^2.0.0"
}
},
"zrender": {
"version": "5.3.1",
"resolved": "https://registry.npmmirror.com/zrender/-/zrender-5.3.1.tgz",
"integrity": "sha512-7olqIjy0gWfznKr6vgfnGBk7y4UtdMvdwFmK92vVQsQeDPyzkHW1OlrLEKg6GHz1W5ePf0FeN1q2vkl/HFqhXw==",
"requires": {
"tslib": "2.3.0"
},
"dependencies": {
"tslib": {
"version": "2.3.0",
"resolved": "https://registry.npmmirror.com/tslib/-/tslib-2.3.0.tgz",
"integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg=="
}
}
}
}
}

View File

@ -20,9 +20,11 @@
"@angular/platform-browser-dynamic": "^13.1.3",
"@angular/router": "^13.1.3",
"@angular/service-worker": "~13.1.3",
"echarts": "^5.3.1",
"filesize": "^6.4.0",
"lodash-es": "^4.17.21",
"ng-zorro-antd": "^13.0.1",
"ngx-echarts": "^8.0.1",
"ngx-logger": "^4.2.2",
"rxjs": "~6.6.0",
"tslib": "^2.3.0",

View File

@ -43,8 +43,9 @@
>
<ng-template #deleteSourceTip>
<p>
自动: 转换成功才删除源文件<br />
从不: 转换后总是保留源文件<br />
自动: 没出错就删除源文件<br />
谨慎: 没出错且没警告才删除源文件<br />
从不: 总是保留源文件<br />
</p>
</ng-template>
<nz-form-control

View File

@ -1,4 +1,38 @@
<form nz-form [formGroup]="settingsForm">
<nz-form-item class="setting-item">
<nz-form-label
class="setting-label"
nzNoColon
[nzTooltipTitle]="streamFormatTip"
>直播流格式</nz-form-label
>
<ng-template #streamFormatTip>
<p>
选择要录制的直播流格式<br />
<br />
FLV 网络不稳定容易中断丢失数据 <br />
HLS (ts) 基本不受本地网络影响 <br />
HLS (fmp4) 只有少数直播间支持 <br />
<br />
P.S.<br />
非 FLV 格式需要 ffmpeg<br />
HLS (fmp4) 不支持会自动切换到 HLS (ts)<br />
</p>
</ng-template>
<nz-form-control
class="setting-control select"
[nzWarningTip]="syncFailedWarningTip"
[nzValidateStatus]="
syncStatus.streamFormat ? streamFormatControl : 'warning'
"
>
<nz-select
formControlName="streamFormat"
[nzOptions]="streamFormatOptions"
>
</nz-select>
</nz-form-control>
</nz-form-item>
<nz-form-item class="setting-item">
<nz-form-label
class="setting-label"

View File

@ -15,6 +15,7 @@ import { Observable } from 'rxjs';
import type { Mutable } from '../../shared/utility-types';
import {
BUFFER_OPTIONS,
STREAM_FORMAT_OPTIONS,
QUALITY_OPTIONS,
TIMEOUT_OPTIONS,
DISCONNECTION_TIMEOUT_OPTIONS,
@ -39,6 +40,9 @@ export class RecorderSettingsComponent implements OnInit, OnChanges {
readonly settingsForm: FormGroup;
readonly syncFailedWarningTip = SYNC_FAILED_WARNING_TIP;
readonly streamFormatOptions = cloneDeep(STREAM_FORMAT_OPTIONS) as Mutable<
typeof STREAM_FORMAT_OPTIONS
>;
readonly qualityOptions = cloneDeep(QUALITY_OPTIONS) as Mutable<
typeof QUALITY_OPTIONS
>;
@ -58,6 +62,7 @@ export class RecorderSettingsComponent implements OnInit, OnChanges {
private settingsSyncService: SettingsSyncService
) {
this.settingsForm = formBuilder.group({
streamFormat: [''],
qualityNumber: [''],
readTimeout: [''],
disconnectionTimeout: [''],
@ -66,6 +71,10 @@ export class RecorderSettingsComponent implements OnInit, OnChanges {
});
}
get streamFormatControl() {
return this.settingsForm.get('streamFormat') as FormControl;
}
get qualityNumberControl() {
return this.settingsForm.get('qualityNumber') as FormControl;
}

View File

@ -48,9 +48,16 @@ export const DURATION_LIMIT_OPTIONS = [
export const DELETE_STRATEGIES = [
{ label: '自动', value: DeleteStrategy.AUTO },
{ label: '谨慎', value: DeleteStrategy.SAFE },
{ label: '从不', value: DeleteStrategy.NEVER },
] as const;
export const STREAM_FORMAT_OPTIONS = [
{ label: 'FLV', value: 'flv' },
{ label: 'HLS (ts)', value: 'ts' },
{ label: 'HLS (fmp4)', value: 'fmp4' },
] as const;
export const QUALITY_OPTIONS = [
{ label: '4K', value: 20000 },
{ label: '原画', value: 10000 },

View File

@ -18,6 +18,8 @@ export interface DanmakuSettings {
export type DanmakuOptions = Nullable<DanmakuSettings>;
export type StreamFormat = 'flv' | 'ts' | 'fmp4';
export type QualityNumber =
| 20000 // 4K
| 10000 // 原画
@ -28,6 +30,7 @@ export type QualityNumber =
| 80; // 流畅
export interface RecorderSettings {
streamFormat: StreamFormat;
qualityNumber: QualityNumber;
readTimeout: number;
disconnectionTimeout: number;
@ -39,6 +42,7 @@ export type RecorderOptions = Nullable<RecorderSettings>;
export enum DeleteStrategy {
AUTO = 'auto',
SAFE = 'safe',
NEVER = 'never',
}

View File

@ -0,0 +1,8 @@
import { DataratePipe } from './datarate.pipe';
describe('DataratePipe', () => {
it('create an instance', () => {
const pipe = new DataratePipe();
expect(pipe).toBeTruthy();
});
});

View File

@ -0,0 +1,38 @@
import { Pipe, PipeTransform } from '@angular/core';
import { toBitRateString, toByteRateString } from '../utils';
@Pipe({
name: 'datarate',
})
export class DataratePipe implements PipeTransform {
transform(
rate: number | string,
options?: {
bitrate?: boolean;
precision?: number;
spacer?: string;
}
): string {
if (typeof rate === 'string') {
rate = parseFloat(rate);
} else if (typeof rate === 'number' && !isNaN(rate)) {
// pass
} else {
return 'N/A';
}
options = Object.assign(
{
bitrate: false,
precision: 3,
spacer: ' ',
},
options
);
if (options.bitrate) {
return toBitRateString(rate, options.spacer, options.precision);
} else {
return toByteRateString(rate, options.spacer, options.precision);
}
}
}

View File

@ -1,8 +0,0 @@
import { SpeedPipe } from './speed.pipe';
describe('SpeedPipe', () => {
it('create an instance', () => {
const pipe = new SpeedPipe();
expect(pipe).toBeTruthy();
});
});

View File

@ -1,34 +0,0 @@
import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'speed',
})
export class SpeedPipe implements PipeTransform {
transform(rate: number, precision: number = 3): string {
let num: number;
let unit: string;
if (rate <= 0) {
return '0B/s';
}
if (rate < 1e3) {
num = rate;
unit = 'B';
} else if (rate < 1e6) {
num = rate / 1e3;
unit = 'kB';
} else if (rate < 1e9) {
num = rate / 1e6;
unit = 'MB';
} else if (rate < 1e12) {
num = rate / 1e9;
unit = 'GB';
} else {
throw RangeError(`the rate argument ${rate} out of range`);
}
const digits = precision - Math.floor(Math.abs(Math.log10(num))) - 1;
return num.toFixed(digits) + unit + '/s';
}
}

View File

@ -6,7 +6,7 @@ import { NzPageHeaderModule } from 'ng-zorro-antd/page-header';
import { DataurlPipe } from './pipes/dataurl.pipe';
import { DurationPipe } from './pipes/duration.pipe';
import { SpeedPipe } from './pipes/speed.pipe';
import { DataratePipe } from './pipes/datarate.pipe';
import { FilesizePipe } from './pipes/filesize.pipe';
import { QualityPipe } from './pipes/quality.pipe';
import { ProgressPipe } from './pipes/progress.pipe';
@ -20,7 +20,7 @@ import { FilestatusPipe } from './pipes/filestatus.pipe';
declarations: [
DataurlPipe,
DurationPipe,
SpeedPipe,
DataratePipe,
FilesizePipe,
QualityPipe,
SubPageComponent,
@ -30,15 +30,11 @@ import { FilestatusPipe } from './pipes/filestatus.pipe';
FilenamePipe,
FilestatusPipe,
],
imports: [
CommonModule,
NzSpinModule,
NzPageHeaderModule,
],
imports: [CommonModule, NzSpinModule, NzPageHeaderModule],
exports: [
DataurlPipe,
DurationPipe,
SpeedPipe,
DataratePipe,
FilesizePipe,
QualityPipe,
ProgressPipe,
@ -48,6 +44,6 @@ import { FilestatusPipe } from './pipes/filestatus.pipe';
SubPageContentDirective,
PageSectionComponent,
FilestatusPipe,
]
],
})
export class SharedModule { }
export class SharedModule {}

View File

@ -18,3 +18,70 @@ export function difference(object: object, base: object): object {
}
return diff(object, base);
}
export function toBitRateString(
bitrate: number,
spacer: string = ' ',
precision: number = 3
): string {
let num: number;
let unit: string;
if (bitrate <= 0) {
return '0 kbps/s';
}
if (bitrate < 1e6) {
num = bitrate / 1e3;
unit = 'kbps';
} else if (bitrate < 1e9) {
num = bitrate / 1e6;
unit = 'Mbps';
} else if (bitrate < 1e12) {
num = bitrate / 1e9;
unit = 'Gbps';
} else if (bitrate < 1e15) {
num = bitrate / 1e12;
unit = 'Tbps';
} else {
throw RangeError(`the rate argument ${bitrate} out of range`);
}
const digits = precision - Math.floor(Math.abs(Math.log10(num))) - 1;
return num.toFixed(digits < 0 ? 0 : digits) + spacer + unit;
}
export function toByteRateString(
rate: number,
spacer: string = ' ',
precision: number = 3
): string {
let num: number;
let unit: string;
if (rate <= 0) {
return '0B/s';
}
if (rate < 1e3) {
num = rate;
unit = 'B/s';
} else if (rate < 1e6) {
num = rate / 1e3;
unit = 'KB/s';
} else if (rate < 1e9) {
num = rate / 1e6;
unit = 'MB/s';
} else if (rate < 1e12) {
num = rate / 1e9;
unit = 'GB/s';
} else if (rate < 1e15) {
num = rate / 1e12;
unit = 'TB/s';
} else {
throw RangeError(`the rate argument ${rate} out of range`);
}
const digits = precision - Math.floor(Math.abs(Math.log10(num))) - 1;
return num.toFixed(digits < 0 ? 0 : digits) + spacer + unit;
}

View File

@ -0,0 +1,100 @@
<div class="info-panel">
<button class="close-panel" (click)="closePanel($event)" title="关闭">
[x]
</button>
<ul
class="info-list"
*ngIf="
data.task_status.running_status === RunningStatus.RECORDING &&
profile &&
profile.streams &&
profile.format &&
metadata
"
>
<li class="info-item">
<span class="label">视频信息</span>
<span class="value">
<span>
{{ profile.streams[0]?.codec_name }}
<!-- <ng-container *ngIf="profile.streams[0]?.profile">
({{ profile.streams[0]?.profile }})
</ng-container> -->
</span>
<span>
{{ profile.streams[0]?.width }}x{{ profile.streams[0]?.height }}
</span>
<span> {{ profile.streams[0]?.r_frame_rate!.split("/")[0] }} fps</span>
<!-- <span
*ngIf="
profile.streams[0]?.bit_rate && profile.streams[0]?.bit_rate !== '1'
"
>
{{ profile.streams[0]?.bit_rate! | datarate: { bitrate: true } }}
</span> -->
<span>
{{ metadata.videodatarate * 1000 | datarate: { bitrate: true } }}
</span>
</span>
</li>
<li class="info-item">
<span class="label">音频信息</span>
<span class="value">
<span>
{{ profile.streams[1]?.codec_name }}
<!-- <ng-container *ngIf="profile.streams[1]?.profile">
({{ profile.streams[1]?.profile }})
</ng-container> -->
</span>
<span> {{ profile.streams[1]?.sample_rate }} HZ</span>
<span>
{{ profile.streams[1]?.channel_layout }}
</span>
<!-- <span *ngIf="profile.streams[1]?.bit_rate">
{{ profile.streams[1]?.bit_rate! | datarate: { bitrate: true } }}
</span> -->
<span>
{{ metadata.audiodatarate * 1000 | datarate: { bitrate: true } }}
</span>
</span>
</li>
<li class="info-item">
<span class="label">格式画质</span
><span class="value">
<span>
{{ data.task_status.real_stream_format }}
</span>
<span>
{{ data.task_status.real_quality_number | quality }}
({{ data.task_status.real_quality_number
}}<ng-container *ngIf="isBlurayStreamQuality()">, bluray</ng-container
>)
</span>
</span>
</li>
<li class="info-item" *ngIf="profile.streams[0]?.tags?.encoder">
<span class="label">流编码器</span>
<span class="value">{{ profile.streams[0]?.tags?.encoder }}</span>
</li>
<li class="info-item">
<span class="label">流主机名</span
><span class="value">
{{ data.task_status.stream_host }}
</span>
</li>
<li class="info-item">
<span class="label">下载速度</span>
<app-wave-graph [value]="data.task_status.dl_rate"></app-wave-graph>
<span class="value">
{{ data.task_status.dl_rate | datarate: { bitrate: true } }}
</span>
</li>
<li class="info-item">
<span class="label">录制速度</span>
<app-wave-graph [value]="data.task_status.rec_rate"></app-wave-graph>
<span class="value">
{{ data.task_status.rec_rate | datarate }}
</span>
</li>
</ul>
</div>

View File

@ -0,0 +1,85 @@
@use "../../shared/styles/layout";
@use "../../shared/styles/list";
@use "../../shared/styles/text";
.info-panel {
position: absolute;
top: 2.55rem;
bottom: 2rem;
left: 0rem;
right: 0rem;
width: 100%;
font-size: 1rem;
@extend %osd-text;
@include text.elide-text-overflow;
overflow: auto;
&::-webkit-scrollbar {
background-color: transparent;
width: 4px;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: #eee;
border-radius: 2px;
}
&::-webkit-scrollbar-thumb:hover {
background: #fff;
}
.close-panel {
position: absolute;
top: 0rem;
right: 0rem;
width: 2rem;
height: 2rem;
padding: 0;
color: white;
background: transparent;
border: none;
font-size: 1rem;
@include layout.center-content;
cursor: pointer;
}
.info-list {
@include list.reset-list;
width: 100%;
height: 100%;
.info-item {
.label {
display: inline-block;
margin: 0;
width: 5rem;
text-align: right;
&::after {
content: "";
}
}
.value {
display: inline-block;
margin: 0;
text-align: left;
span:not(:first-child) {
&::before {
content: ", ";
}
}
}
}
}
}
app-wave-graph {
margin-right: 1rem;
}

View File

@ -0,0 +1,25 @@
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { InfoPanelComponent } from './info-panel.component';
describe('InfoPanelComponent', () => {
let component: InfoPanelComponent;
let fixture: ComponentFixture<InfoPanelComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ InfoPanelComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(InfoPanelComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});

View File

@ -0,0 +1,95 @@
import {
Component,
OnInit,
ChangeDetectionStrategy,
Input,
OnDestroy,
ChangeDetectorRef,
Output,
EventEmitter,
} from '@angular/core';
import { HttpErrorResponse } from '@angular/common/http';
import { NzNotificationService } from 'ng-zorro-antd/notification';
import { interval, of, Subscription, zip } from 'rxjs';
import { catchError, concatAll, switchMap } from 'rxjs/operators';
import { retry } from 'src/app/shared/rx-operators';
import { Metadata, RunningStatus, TaskData } from '../shared/task.model';
import { TaskService } from '../shared/services/task.service';
import { StreamProfile } from '../shared/task.model';
@Component({
selector: 'app-info-panel',
templateUrl: './info-panel.component.html',
styleUrls: ['./info-panel.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class InfoPanelComponent implements OnInit, OnDestroy {
@Input() data!: TaskData;
@Input() profile!: StreamProfile;
@Input() metadata: Metadata | null = null;
@Output() close = new EventEmitter<undefined>();
readonly RunningStatus = RunningStatus;
private dataSubscription?: Subscription;
constructor(
private changeDetector: ChangeDetectorRef,
private notification: NzNotificationService,
private taskService: TaskService
) {}
ngOnInit(): void {
this.syncData();
}
ngOnDestroy(): void {
this.desyncData();
}
isBlurayStreamQuality(): boolean {
return /_bluray/.test(this.data.task_status.stream_url);
}
closePanel(event: Event): void {
event.preventDefault();
event.stopPropagation();
this.close.emit();
}
private syncData(): void {
this.dataSubscription = of(of(0), interval(1000))
.pipe(
concatAll(),
switchMap(() =>
zip(
this.taskService.getStreamProfile(this.data.room_info.room_id),
this.taskService.getMetadata(this.data.room_info.room_id)
)
),
catchError((error: HttpErrorResponse) => {
this.notification.error('获取数据出错', error.message);
throw error;
}),
retry(3, 1000)
)
.subscribe(
([profile, metadata]) => {
this.profile = profile;
this.metadata = metadata;
this.changeDetector.markForCheck();
},
(error: HttpErrorResponse) => {
this.notification.error(
'获取数据出错',
'网络连接异常, 请待网络正常后刷新。',
{ nzDuration: 0 }
);
}
);
}
private desyncData(): void {
this.dataSubscription?.unsubscribe();
}
}

View File

@ -0,0 +1,4 @@
:host {
position: relative;
top: 2px;
}

View File

@ -0,0 +1,25 @@
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { WaveGraphComponent } from './wave-graph.component';
describe('WaveGraphComponent', () => {
let component: WaveGraphComponent;
let fixture: ComponentFixture<WaveGraphComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ WaveGraphComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(WaveGraphComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});

View File

@ -0,0 +1,7 @@
<svg [attr.width]="width" [attr.height]="height">
<polyline
[attr.stroke]="stroke"
fill="none"
[attr.points]="polylinePoints"
></polyline>
</svg>

After

Width:  |  Height:  |  Size: 164 B

View File

@ -0,0 +1,62 @@
import {
Component,
OnInit,
ChangeDetectionStrategy,
Input,
ChangeDetectorRef,
OnDestroy,
} from '@angular/core';
import { interval, Subscription } from 'rxjs';
interface Point {
x: number;
y: number;
}
@Component({
selector: 'app-wave-graph',
templateUrl: './wave-graph.component.svg',
styleUrls: ['./wave-graph.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class WaveGraphComponent implements OnInit, OnDestroy {
@Input() value: number = 0;
@Input() width: number = 200;
@Input() height: number = 16;
@Input() stroke: string = 'white';
private data: number[] = [];
private points: Point[] = [];
private subscription?: Subscription;
constructor(private changeDetector: ChangeDetectorRef) {
for (let x = 0; x <= this.width; x += 2) {
this.data.push(0);
this.points.push({ x: x, y: this.height });
}
}
get polylinePoints(): string {
return this.points.map((p) => `${p.x},${p.y}`).join(' ');
}
ngOnInit(): void {
this.subscription = interval(1000).subscribe(() => {
this.data.push(this.value || 0);
this.data.shift();
let maximum = Math.max(...this.data);
this.points = this.data.map((value, index) => ({
x: Math.min(index * 2, this.width),
y: (1 - value / (maximum || 1)) * this.height,
}));
this.changeDetector.markForCheck();
});
}
ngOnDestroy(): void {
this.subscription?.unsubscribe();
}
}

View File

@ -0,0 +1,16 @@
import { TestBed } from '@angular/core/testing';
import { TaskSettingsService } from './task-settings.service';
describe('TaskSettingsService', () => {
let service: TaskSettingsService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(TaskSettingsService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
});

View File

@ -0,0 +1,33 @@
import { Injectable } from '@angular/core';
import { StorageService } from 'src/app/core/services/storage.service';
export interface TaskSettings {
showInfoPanel?: boolean;
}
@Injectable({
providedIn: 'root',
})
export class TaskSettingsService {
constructor(private storage: StorageService) {}
getSettings(roomId: number): TaskSettings {
const settingsString = this.storage.getData(this.getStorageKey(roomId));
if (settingsString) {
return JSON.parse(settingsString) ?? {};
} else {
return {};
}
}
updateSettings(roomId: number, settings: TaskSettings): void {
settings = Object.assign(this.getSettings(roomId), settings);
const settingsString = JSON.stringify(settings);
this.storage.setData(this.getStorageKey(roomId), settingsString);
}
private getStorageKey(roomId: number): string {
return `app-tasks-${roomId}`;
}
}

View File

@ -9,6 +9,8 @@ import {
TaskData,
DataSelection,
TaskParam,
Metadata,
StreamProfile,
AddTaskResult,
VideoFileDetail,
DanmakuFileDetail,
@ -49,6 +51,16 @@ export class TaskService {
return this.http.get<TaskParam>(url);
}
getMetadata(roomId: number): Observable<Metadata | null> {
const url = apiUrl + `/api/v1/tasks/${roomId}/metadata`;
return this.http.get<Metadata | null>(url);
}
getStreamProfile(roomId: number): Observable<StreamProfile> {
const url = apiUrl + `/api/v1/tasks/${roomId}/profile`;
return this.http.get<StreamProfile>(url);
}
updateAllTaskInfos(): Observable<ResponseMessage> {
const url = apiUrl + '/api/v1/tasks/info';
return this.http.post<ResponseMessage>(url, null);

View File

@ -1,6 +1,7 @@
import { ResponseMessage } from '../../shared/api.models';
import {
DeleteStrategy,
StreamFormat,
QualityNumber,
} from '../../settings/shared/setting.model';
@ -81,12 +82,18 @@ export interface TaskStatus {
readonly monitor_enabled: boolean;
readonly recorder_enabled: boolean;
readonly running_status: RunningStatus;
readonly elapsed: number;
readonly data_count: number;
readonly data_rate: number;
readonly danmu_count: number;
readonly stream_url: string;
readonly stream_host: string;
readonly dl_total: number;
readonly dl_rate: number;
readonly rec_elapsed: number;
readonly rec_total: number;
readonly rec_rate: number;
readonly danmu_total: number;
readonly danmu_rate: number;
readonly real_stream_format: StreamFormat;
readonly real_quality_number: QualityNumber;
readonly recording_path: string | null;
readonly postprocessor_status: PostprocessorStatus;
readonly postprocessing_path: string | null;
readonly postprocessing_progress: Progress | null;
@ -102,15 +109,185 @@ export interface TaskParam {
readonly cookie: string;
readonly danmu_uname: boolean;
readonly record_gift_send: boolean;
readonly record_free_gifts: boolean;
readonly record_guard_buy: boolean;
readonly record_super_chat: boolean;
readonly save_raw_danmaku: boolean;
readonly stream_format: StreamFormat;
readonly quality_number: QualityNumber;
readonly read_timeout: number;
readonly disconnection_timeout: number;
readonly buffer_size: number;
readonly save_cover: boolean;
readonly remux_to_mp4: boolean;
readonly inject_extra_metadata: boolean;
readonly delete_source: DeleteStrategy;
}
export interface VideoStreamProfile {
index?: number;
codec_name?: string;
codec_long_name?: string;
profile?: string;
codec_type?: string;
codec_tag_string?: string;
codec_tag?: string;
width?: number;
height?: number;
coded_width?: number;
coded_height?: number;
closed_captions?: number;
film_grain?: number;
has_b_frames?: number;
pix_fmt?: string;
level?: number;
chroma_location?: string;
field_order?: string;
refs?: number;
is_avc?: string;
nal_length_size?: string;
r_frame_rate?: string;
avg_frame_rate?: string;
time_base?: string;
start_pts?: number;
start_time?: string;
bit_rate?: string;
bits_per_raw_sample?: string;
extradata_size?: number;
disposition?: {
default?: number;
dub?: number;
original?: number;
comment?: number;
lyrics?: number;
karaoke?: number;
forced?: number;
hearing_impaired?: number;
visual_impaired?: number;
clean_effects?: number;
attached_pic?: number;
timed_thumbnails?: number;
captions?: number;
descriptions?: number;
metadata?: number;
dependent?: number;
still_image?: number;
};
tags?: {
language?: string;
handler_name?: string;
vendor_id?: string;
encoder?: string;
};
}
export interface AudioStreamProfile {
index?: number;
codec_name?: string;
codec_long_name?: string;
profile?: string;
codec_type?: string;
codec_tag_string?: string;
codec_tag?: string;
sample_fmt?: string;
sample_rate?: string;
channels?: number;
channel_layout?: string;
bits_per_sample?: number;
r_frame_rate?: string;
avg_frame_rate?: string;
time_base?: string;
start_pts?: number;
start_time?: string;
duration_ts?: number;
duration?: string;
bit_rate?: string;
extradata_size?: number;
disposition?: {
default?: number;
dub?: number;
original?: number;
comment?: number;
lyrics?: number;
karaoke?: number;
forced?: number;
hearing_impaired?: number;
visual_impaired?: number;
clean_effects?: number;
attached_pic?: number;
timed_thumbnails?: number;
captions?: number;
descriptions?: number;
metadata?: number;
dependent?: number;
still_image?: number;
};
}
export interface Metadata {
hasAudio: boolean;
hasVideo: boolean;
hasMetadata: boolean;
hasKeyframes: boolean;
canSeekToEnd: boolean;
duration: number;
datasize: number;
filesize: number;
audiosize: number;
audiocodecid: number;
audiodatarate: number;
audiosamplerate: 3;
audiosamplesize: 1;
stereo: boolean;
videosize: number;
framerate: number;
videocodecid: 7;
videodatarate: number;
width: number;
height: number;
lasttimestamp: number;
lastkeyframelocation: number;
lastkeyframetimestamp: number;
keyframes: {
times: number[];
filepositions: number[];
};
}
export interface StreamProfile {
streams?: [VideoStreamProfile, AudioStreamProfile];
format?: {
filename?: string;
nb_streams?: number;
nb_programs?: number;
format_name?: string;
format_long_name?: string;
start_time?: string;
duration?: string;
probe_score?: number;
tags?: {
displayWidth?: string;
displayHeight?: string;
fps?: string;
profile?: string;
level?: string;
videocodecreal?: string;
cdn_ip?: string;
Server?: string;
Rawdata?: string;
encoder?: string;
string?: string;
mau?: string;
major_brand?: string;
minor_version?: string;
compatible_brands?: string;
};
};
}
export enum VideoFileStatus {
RECORDING = 'recording',
REMUXING = 'remuxing',

View File

@ -4,7 +4,7 @@
<span
class="status-indicator"
nz-tooltip
nzTooltipTitle="正在录制"
nzTooltipTitle="正在录制"
nzTooltipPlacement="top"
>
<i></i>
@ -12,39 +12,39 @@
<span
class="time-elapsed"
nz-tooltip
nzTooltipTitle="开始录制到现在过去的时间"
nzTooltipTitle="录制用时"
nzTooltipPlacement="top"
>
{{ status.elapsed | duration }}
{{ status.rec_elapsed | duration }}
</span>
<span
class="data-rate"
nz-tooltip
nzTooltipTitle="当前实时录制速度"
nzTooltipTitle="录制速度"
nzTooltipPlacement="top"
>
{{ status.data_rate | speed }}
{{ status.rec_rate | datarate: { spacer: "" } }}
</span>
<span
class="data-count"
nz-tooltip
nzTooltipTitle="已录制的数据"
nzTooltipTitle="录制总计"
nzTooltipPlacement="top"
>
{{ status.data_count | filesize: { spacer: "" } }}
{{ status.rec_total | filesize: { spacer: "" } }}
</span>
<span
class="danmu-count"
nz-tooltip
nzTooltipTitle="弹幕数量:{{ status.danmu_count | number: '1.0-0' }}"
nzTooltipTitle="弹幕总计:{{ status.danmu_total | number: '1.0-0' }}"
nzTooltipPlacement="top"
>
{{ status.danmu_count | number: "1.0-0" }}
{{ status.danmu_total | number: "1.0-0" }}
</span>
<span
class="quality"
nz-tooltip
nzTooltipTitle="当前录制画质"
nzTooltipTitle="录制画质"
nzTooltipPlacement="leftTop"
>
{{ status.real_quality_number | quality }}

View File

@ -23,6 +23,12 @@
[taskStatus]="taskData.task_status"
></app-task-recording-detail>
<app-task-network-detail
*ngIf="taskData"
[loading]="loading"
[taskStatus]="taskData.task_status"
></app-task-network-detail>
<app-task-postprocessing-detail
*ngIf="taskData?.task_status?.postprocessing_path"
[loading]="loading"

View File

@ -3,6 +3,7 @@ import {
OnInit,
ChangeDetectionStrategy,
ChangeDetectorRef,
OnDestroy,
} from '@angular/core';
import { ActivatedRoute, ParamMap, Router } from '@angular/router';
import { HttpErrorResponse } from '@angular/common/http';
@ -25,7 +26,7 @@ import {
styleUrls: ['./task-detail.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class TaskDetailComponent implements OnInit {
export class TaskDetailComponent implements OnInit, OnDestroy {
roomId!: number;
taskData!: TaskData;
videoFileDetails: VideoFileDetail[] = [];

View File

@ -0,0 +1,41 @@
<nz-card nzTitle="网络详情" [nzLoading]="loading">
<div class="statistics">
<nz-statistic
class="stream-host"
[nzTitle]="'流主机'"
[nzValueTemplate]="streamHost"
></nz-statistic>
<ng-template #streamHost>{{ taskStatus.stream_host }}</ng-template>
<nz-statistic
[nzTitle]="'流格式'"
[nzValueTemplate]="realStreamFormat"
></nz-statistic>
<ng-template #realStreamFormat>{{
taskStatus.real_stream_format
}}</ng-template>
<nz-statistic
[nzTitle]="'下载速度'"
[nzValueTemplate]="downloadRate"
></nz-statistic>
<ng-template #downloadRate>{{
taskStatus.dl_rate * 8 | datarate: { bitrate: true }
}}</ng-template>
<nz-statistic
[nzTitle]="'下载总计'"
[nzValueTemplate]="downloadTotal"
></nz-statistic>
<ng-template #downloadTotal>{{
taskStatus.dl_total | filesize: { spacer: " " }
}}</ng-template>
</div>
<div
class="dl-rate-chart"
echarts
[loading]="loading"
[options]="initialChartOptions"
[merge]="updatedChartOptions"
></div>
</nz-card>

View File

@ -0,0 +1,39 @@
$grid-width: 200px;
.statistics {
--grid-width: #{$grid-width};
display: grid;
grid-template-columns: repeat(auto-fill, var(--grid-width));
gap: 1em;
justify-content: center;
margin: 0 auto;
@media screen and (max-width: 1024px) {
--grid-width: 180px;
}
@media screen and (max-width: 720px) {
--grid-width: 160px;
}
@media screen and (max-width: 680px) {
--grid-width: 140px;
}
@media screen and (max-width: 480px) {
--grid-width: 120px;
}
}
.stream-host {
grid-column: 1 / 3;
grid-row: 1;
}
.dl-rate-chart {
width: 100%;
height: 300px;
margin: 0;
// margin-top: 2em;
}

View File

@ -0,0 +1,25 @@
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { TaskNetworkDetailComponent } from './task-network-detail.component';
describe('TaskNetworkDetailComponent', () => {
let component: TaskNetworkDetailComponent;
let fixture: ComponentFixture<TaskNetworkDetailComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ TaskNetworkDetailComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(TaskNetworkDetailComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});

View File

@ -0,0 +1,135 @@
import {
Component,
ChangeDetectionStrategy,
Input,
ChangeDetectorRef,
OnChanges,
} from '@angular/core';
import { EChartsOption } from 'echarts';
import { RunningStatus, TaskStatus } from '../../shared/task.model';
import { toBitRateString } from '../../../shared/utils';
interface ChartDataItem {
name: string;
value: [string, number];
}
@Component({
selector: 'app-task-network-detail',
templateUrl: './task-network-detail.component.html',
styleUrls: ['./task-network-detail.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class TaskNetworkDetailComponent implements OnChanges {
@Input() loading: boolean = true;
@Input() taskStatus!: TaskStatus;
initialChartOptions: EChartsOption = {};
updatedChartOptions: EChartsOption = {};
private chartData: ChartDataItem[] = [];
constructor(private changeDetector: ChangeDetectorRef) {
this.initChartOptions();
}
ngOnChanges(): void {
if (this.taskStatus.running_status === RunningStatus.RECORDING) {
this.updateChartOptions();
}
}
private initChartOptions(): void {
const timestamp = Date.now();
for (let i = 60 - 1; i >= 0; i--) {
const date = new Date(timestamp - i * 1000);
this.chartData.push({
name: date.toLocaleString('zh-CN', { hour12: false }),
value: [date.toISOString(), 0],
});
}
this.initialChartOptions = {
title: {
// text: '下载速度',
},
tooltip: {
trigger: 'axis',
formatter: (params: any) => {
const param = params[0] as ChartDataItem;
return `
<div>
<div>
${new Date(param.name).toLocaleTimeString('zh-CN', {
hour12: false,
})}
</div>
<div>${toBitRateString(param.value[1])}</div>
</div>
`;
},
axisPointer: {
animation: false,
},
},
xAxis: {
type: 'time',
name: '时间',
min: 'dataMin',
max: 'dataMax',
splitLine: {
show: true,
},
},
yAxis: {
type: 'value',
name: '下载速度',
// boundaryGap: [0, '100%'],
splitLine: {
show: true,
},
axisLabel: {
formatter: function (value: number) {
return toBitRateString(value);
},
},
},
series: [
{
name: '下载速度',
type: 'line',
showSymbol: false,
smooth: true,
lineStyle: {
width: 1,
},
areaStyle: {
opacity: 0.2,
},
data: this.chartData,
},
],
};
}
private updateChartOptions(): void {
const date = new Date();
this.chartData.push({
name: date.toLocaleString('zh-CN', { hour12: false }),
value: [date.toISOString(), this.taskStatus.dl_rate * 8],
});
this.chartData.shift();
this.updatedChartOptions = {
series: [
{
data: this.chartData,
},
],
};
this.changeDetector.markForCheck();
}
}

View File

@ -2,23 +2,50 @@
<div class="statistics">
<nz-statistic
[nzTitle]="'录制用时'"
[nzValue]="taskStatus.elapsed | duration"
[nzValueTemplate]="recordingElapsed"
></nz-statistic>
<ng-template #recordingElapsed>{{
taskStatus.rec_elapsed | duration
}}</ng-template>
<nz-statistic
[nzTitle]="'录制速度'"
[nzValue]="taskStatus.data_rate | speed"
[nzValueTemplate]="recordingRate"
></nz-statistic>
<ng-template #recordingRate>{{
taskStatus.rec_rate | datarate
}}</ng-template>
<nz-statistic
[nzTitle]="'已录数据'"
[nzValue]="taskStatus.data_count | filesize: { spacer: '' }"
[nzTitle]="'录制总计'"
[nzValueTemplate]="recordedTotal"
></nz-statistic>
<ng-template #recordedTotal>{{
taskStatus.rec_total | filesize: { spacer: " " }
}}</ng-template>
<nz-statistic
[nzTitle]="'弹幕数量'"
[nzValue]="(taskStatus.danmu_count | number: '1.0-2')!"
[nzTitle]="'录制画质'"
[nzValueTemplate]="recordingQuality"
></nz-statistic>
<ng-template #recordingQuality>{{
(taskStatus.real_quality_number | quality)! +
" " +
"(" +
taskStatus.real_quality_number +
")"
}}</ng-template>
<nz-statistic
[nzTitle]="'所录画质'"
[nzValue]="(taskStatus.real_quality_number | quality)!"
[nzTitle]="'弹幕总计'"
[nzValue]="(taskStatus.danmu_total | number: '1.0-2')!"
></nz-statistic>
</div>
<div
class="rec-rate-chart"
echarts
[loading]="loading"
[options]="initialChartOptions"
[merge]="updatedChartOptions"
></div>
</nz-card>

View File

@ -25,3 +25,10 @@ $grid-width: 200px;
--grid-width: 120px;
}
}
.rec-rate-chart {
width: 100%;
height: 300px;
margin: 0;
// margin-top: 2em;
}

View File

@ -1,11 +1,20 @@
import {
Component,
OnInit,
ChangeDetectionStrategy,
Input,
ChangeDetectorRef,
OnChanges,
} from '@angular/core';
import { TaskStatus } from '../../shared/task.model';
import { EChartsOption } from 'echarts';
import { RunningStatus, TaskStatus } from '../../shared/task.model';
import { toByteRateString } from '../../../shared/utils';
interface ChartDataItem {
name: string;
value: [string, number];
}
@Component({
selector: 'app-task-recording-detail',
@ -13,11 +22,115 @@ import { TaskStatus } from '../../shared/task.model';
styleUrls: ['./task-recording-detail.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class TaskRecordingDetailComponent implements OnInit {
export class TaskRecordingDetailComponent implements OnChanges {
@Input() loading: boolean = true;
@Input() taskStatus!: TaskStatus;
constructor() {}
initialChartOptions: EChartsOption = {};
updatedChartOptions: EChartsOption = {};
ngOnInit(): void {}
private chartData: ChartDataItem[] = [];
constructor(private changeDetector: ChangeDetectorRef) {
this.initChartOptions();
}
ngOnChanges(): void {
if (this.taskStatus.running_status === RunningStatus.RECORDING) {
this.updateChartOptions();
}
}
private initChartOptions(): void {
const timestamp = Date.now();
for (let i = 60 - 1; i >= 0; i--) {
const date = new Date(timestamp - i * 1000);
this.chartData.push({
name: date.toLocaleString('zh-CN', { hour12: false }),
value: [date.toISOString(), 0],
});
}
this.initialChartOptions = {
title: {
// text: '录制速度',
},
tooltip: {
trigger: 'axis',
formatter: (params: any) => {
const param = params[0] as ChartDataItem;
return `
<div>
<div>
${new Date(param.name).toLocaleTimeString('zh-CN', {
hour12: false,
})}
</div>
<div>${toByteRateString(param.value[1])}</div>
</div>
`;
},
axisPointer: {
animation: false,
},
},
xAxis: {
type: 'time',
name: '时间',
min: 'dataMin',
max: 'dataMax',
splitLine: {
show: true,
},
},
yAxis: {
type: 'value',
name: '录制速度',
// boundaryGap: [0, '100%'],
splitLine: {
show: true,
},
axisLabel: {
formatter: (value: number) => {
return toByteRateString(value);
},
},
},
series: [
{
name: '录制速度',
type: 'line',
showSymbol: false,
smooth: true,
lineStyle: {
width: 1,
},
areaStyle: {
opacity: 0.2,
},
data: this.chartData,
},
],
};
}
private updateChartOptions(): void {
const date = new Date();
this.chartData.push({
name: date.toLocaleString('zh-CN', { hour12: false }),
value: [date.toISOString(), this.taskStatus.rec_rate],
});
this.chartData.shift();
this.updatedChartOptions = {
series: [
{
data: this.chartData,
},
],
};
this.changeDetector.markForCheck();
}
}

View File

@ -35,6 +35,11 @@
{{ data.room_info.title }}
</h2>
<app-info-panel
*ngIf="showInfoPanel"
[data]="data"
(close)="showInfoPanel = false"
></app-info-panel>
<app-status-display [status]="data.task_status"></app-status-display>
</div>
</a>
@ -199,6 +204,7 @@
<li nz-menu-item (click)="stopTask(true)">强制停止任务</li>
<li nz-menu-item (click)="disableRecorder(true)">强制关闭录制</li>
<li nz-menu-item (click)="updateTaskInfo()">刷新数据</li>
<li nz-menu-item (click)="showInfoPanel = true">显示录制信息</li>
</ul>
</ng-template>
</nz-dropdown-menu>

View File

@ -26,6 +26,7 @@ import {
GlobalTaskSettings,
TaskOptionsIn,
} from '../../settings/shared/setting.model';
import { TaskSettingsService } from '../shared/services/task-settings.service';
@Component({
selector: 'app-task-item',
@ -54,7 +55,8 @@ export class TaskItemComponent implements OnChanges, OnDestroy {
private message: NzMessageService,
private modal: NzModalService,
private settingService: SettingService,
private taskManager: TaskManagerService
private taskManager: TaskManagerService,
private appTaskSettings: TaskSettingsService
) {
breakpointObserver
.observe(breakpoints[0])
@ -73,6 +75,14 @@ export class TaskItemComponent implements OnChanges, OnDestroy {
return !this.data.task_status.monitor_enabled;
}
get showInfoPanel() {
return Boolean(this.appTaskSettings.getSettings(this.roomId).showInfoPanel);
}
set showInfoPanel(value: boolean) {
this.appTaskSettings.updateSettings(this.roomId, { showInfoPanel: value });
}
ngOnChanges(changes: SimpleChanges): void {
console.debug('[ngOnChanges]', this.roomId, changes);
this.stopped =

View File

@ -1,13 +1,11 @@
@use '../../shared/styles/layout';
@use "../../shared/styles/layout";
$card-width: 400px;
$grid-gutter: 12px;
$max-columns: 3;
:host {
--card-width: #{$card-width};
--grid-gutter: #{$grid-gutter};
--max-columns: #{$max-columns};
@extend %inner-content;
padding: var(--grid-gutter);
@ -24,13 +22,7 @@ $max-columns: 3;
gap: var(--grid-gutter);
justify-content: center;
max-width: min(
100%,
calc(
var(--card-width) * var(--max-columns) + var(--grid-gutter) *
(var(--max-columns) - 1)
)
);
max-width: min(100%);
margin: 0 auto;
}

View File

@ -110,6 +110,46 @@
<div ngModelGroup="recorder" class="form-group recorder">
<h2>录制</h2>
<nz-form-item class="setting-item">
<nz-form-label
class="setting-label"
nzNoColon
[nzTooltipTitle]="streamFormatTip"
>直播流格式</nz-form-label
>
<ng-template #streamFormatTip>
<p>
选择要录制的直播流格式<br />
<br />
FLV 网络不稳定容易中断丢失数据 <br />
HLS (ts) 基本不受本地网络影响 <br />
HLS (fmp4) 只有少数直播间支持 <br />
<br />
P.S.<br />
非 FLV 格式需要 ffmpeg<br />
HLS (fmp4) 不支持会自动切换到 HLS (ts)<br />
</p>
</ng-template>
<nz-form-control class="setting-control select">
<nz-select
name="streamFormat"
[(ngModel)]="model.recorder.streamFormat"
[disabled]="options.recorder.streamFormat === null"
[nzOptions]="streamFormatOptions"
>
</nz-select>
</nz-form-control>
<label
nz-checkbox
[nzChecked]="options.recorder.streamFormat !== null"
(nzCheckedChange)="
options.recorder.streamFormat = $event
? globalSettings.recorder.streamFormat
: null
"
>覆盖全局设置</label
>
</nz-form-item>
<nz-form-item class="setting-item">
<nz-form-label
class="setting-label"
@ -482,8 +522,9 @@
>
<ng-template #deleteSourceTip>
<p>
自动: 转换成功才删除源文件<br />
从不: 转换后总是保留源文件<br />
自动: 没出错就删除源文件<br />
谨慎: 没出错且没警告才删除源文件<br />
从不: 总是保留源文件<br />
</p>
</ng-template>
<nz-form-control class="setting-control select">

View File

@ -23,6 +23,7 @@ import {
PATH_TEMPLATE_PATTERN,
FILESIZE_LIMIT_OPTIONS,
DURATION_LIMIT_OPTIONS,
STREAM_FORMAT_OPTIONS,
QUALITY_OPTIONS,
TIMEOUT_OPTIONS,
DISCONNECTION_TIMEOUT_OPTIONS,
@ -63,6 +64,9 @@ export class TaskSettingsDialogComponent implements OnChanges {
readonly durationLimitOptions = cloneDeep(DURATION_LIMIT_OPTIONS) as Mutable<
typeof DURATION_LIMIT_OPTIONS
>;
readonly streamFormatOptions = cloneDeep(STREAM_FORMAT_OPTIONS) as Mutable<
typeof STREAM_FORMAT_OPTIONS
>;
readonly qualityOptions = cloneDeep(QUALITY_OPTIONS) as Mutable<
typeof QUALITY_OPTIONS
>;

View File

@ -30,6 +30,7 @@ import { NzProgressModule } from 'ng-zorro-antd/progress';
import { NzTableModule } from 'ng-zorro-antd/table';
import { NzStatisticModule } from 'ng-zorro-antd/statistic';
import { NzDescriptionsModule } from 'ng-zorro-antd/descriptions';
import { NgxEchartsModule } from 'ngx-echarts';
import { SharedModule } from '../shared/shared.module';
import { TasksRoutingModule } from './tasks-routing.module';
@ -47,6 +48,9 @@ import { TaskUserInfoDetailComponent } from './task-detail/task-user-info-detail
import { TaskRoomInfoDetailComponent } from './task-detail/task-room-info-detail/task-room-info-detail.component';
import { TaskPostprocessingDetailComponent } from './task-detail/task-postprocessing-detail/task-postprocessing-detail.component';
import { TaskRecordingDetailComponent } from './task-detail/task-recording-detail/task-recording-detail.component';
import { TaskNetworkDetailComponent } from './task-detail/task-network-detail/task-network-detail.component';
import { InfoPanelComponent } from './info-panel/info-panel.component';
import { WaveGraphComponent } from './info-panel/wave-graph/wave-graph.component';
@NgModule({
declarations: [
@ -64,6 +68,9 @@ import { TaskRecordingDetailComponent } from './task-detail/task-recording-detai
TaskRoomInfoDetailComponent,
TaskPostprocessingDetailComponent,
TaskRecordingDetailComponent,
TaskNetworkDetailComponent,
InfoPanelComponent,
WaveGraphComponent,
],
imports: [
CommonModule,
@ -99,6 +106,9 @@ import { TaskRecordingDetailComponent } from './task-detail/task-recording-detai
NzTableModule,
NzStatisticModule,
NzDescriptionsModule,
NgxEchartsModule.forRoot({
echarts: () => import('echarts'),
}),
TasksRoutingModule,
SharedModule,