mirror of
https://github.com/acgnhiki/blrec.git
synced 2024-12-26 00:10:09 +08:00
refactor: refactor stream recording ...
This commit is contained in:
parent
eaa60a0817
commit
a460c1e9be
2
.flake8
2
.flake8
@ -1,6 +1,6 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
ignore = D203, W504, W503
|
||||
ignore = D203, W504, W503, E203
|
||||
exclude =
|
||||
__*,
|
||||
.*,
|
||||
|
@ -54,7 +54,7 @@ install_requires =
|
||||
m3u8 >= 1.0.0, < 2.0.0
|
||||
jsonpath == 0.82
|
||||
psutil >= 5.8.0, < 6.0.0
|
||||
rx >= 3.2.0, < 4.0.0
|
||||
reactivex >= 4.0.0, < 5.0.0
|
||||
bitarray >= 2.2.5, < 3.0.0
|
||||
brotli >= 1.0.9, < 2.0.0
|
||||
uvicorn[standard] >= 0.15.0, < 0.16.0
|
||||
@ -65,6 +65,7 @@ install_requires =
|
||||
mypy == 0.910 # https://github.com/samuelcolvin/pydantic/issues/3528
|
||||
isort >= 5.10.1
|
||||
black >= 22.3.0
|
||||
autoflake >= 1.4
|
||||
|
||||
setuptools >= 59.4.0
|
||||
wheel >= 0.37
|
||||
|
@ -7,8 +7,7 @@ import attr
|
||||
import psutil
|
||||
|
||||
from . import __prog__, __version__
|
||||
from .flv.data_analyser import MetaData
|
||||
from .core.stream_analyzer import StreamProfile
|
||||
from .flv.operators import MetaData, StreamProfile
|
||||
from .disk_space import SpaceMonitor, SpaceReclaimer
|
||||
from .bili.helpers import ensure_room_id
|
||||
from .task import (
|
||||
|
@ -34,3 +34,7 @@ class NoStreamCodecAvailable(Exception):
|
||||
|
||||
class NoStreamQualityAvailable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoAlternativeStreamAvailable(Exception):
|
||||
pass
|
||||
|
@ -1,30 +1,27 @@
|
||||
import re
|
||||
import json
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
from typing import Dict, List, cast
|
||||
|
||||
import aiohttp
|
||||
from jsonpath import jsonpath
|
||||
from tenacity import (
|
||||
retry,
|
||||
wait_exponential,
|
||||
stop_after_delay,
|
||||
retry_if_exception_type,
|
||||
)
|
||||
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_delay, wait_exponential
|
||||
|
||||
from .api import AppApi, WebApi
|
||||
from .models import LiveStatus, RoomInfo, UserInfo
|
||||
from .typing import (
|
||||
ApiPlatform, StreamFormat, QualityNumber, StreamCodec, ResponseData
|
||||
)
|
||||
from .exceptions import (
|
||||
LiveRoomHidden, LiveRoomLocked, LiveRoomEncrypted, NoStreamAvailable,
|
||||
NoStreamFormatAvailable, NoStreamCodecAvailable, NoStreamQualityAvailable,
|
||||
LiveRoomEncrypted,
|
||||
LiveRoomHidden,
|
||||
LiveRoomLocked,
|
||||
NoAlternativeStreamAvailable,
|
||||
NoStreamAvailable,
|
||||
NoStreamCodecAvailable,
|
||||
NoStreamFormatAvailable,
|
||||
NoStreamQualityAvailable,
|
||||
)
|
||||
from .models import LiveStatus, RoomInfo, UserInfo
|
||||
from .typing import ApiPlatform, QualityNumber, ResponseData, StreamCodec, StreamFormat
|
||||
|
||||
|
||||
__all__ = 'Live',
|
||||
__all__ = ('Live',)
|
||||
|
||||
|
||||
_INFO_PATTERN = re.compile(
|
||||
@ -34,9 +31,7 @@ _LIVE_STATUS_PATTERN = re.compile(rb'"live_status"\s*:\s*(\d)')
|
||||
|
||||
|
||||
class Live:
|
||||
def __init__(
|
||||
self, room_id: int, user_agent: str = '', cookie: str = ''
|
||||
) -> None:
|
||||
def __init__(self, room_id: int, user_agent: str = '', cookie: str = '') -> None:
|
||||
self._room_id = room_id
|
||||
self._user_agent = user_agent
|
||||
self._cookie = cookie
|
||||
@ -142,9 +137,7 @@ class Live:
|
||||
self._room_info = await self.get_room_info()
|
||||
|
||||
@retry(
|
||||
retry=retry_if_exception_type((
|
||||
asyncio.TimeoutError, aiohttp.ClientError,
|
||||
)),
|
||||
retry=retry_if_exception_type((asyncio.TimeoutError, aiohttp.ClientError)),
|
||||
wait=wait_exponential(max=10),
|
||||
stop=stop_after_delay(60),
|
||||
)
|
||||
@ -158,9 +151,7 @@ class Live:
|
||||
return RoomInfo.from_data(room_info_data)
|
||||
|
||||
@retry(
|
||||
retry=retry_if_exception_type((
|
||||
asyncio.TimeoutError, aiohttp.ClientError,
|
||||
)),
|
||||
retry=retry_if_exception_type((asyncio.TimeoutError, aiohttp.ClientError)),
|
||||
wait=wait_exponential(max=10),
|
||||
stop=stop_after_delay(60),
|
||||
)
|
||||
@ -176,14 +167,15 @@ class Live:
|
||||
# the timestamp on the server at the moment in seconds
|
||||
return await self._webapi.get_timestamp()
|
||||
|
||||
async def get_live_stream_urls(
|
||||
async def get_live_stream_url(
|
||||
self,
|
||||
qn: QualityNumber = 10000,
|
||||
*,
|
||||
api_platform: ApiPlatform = 'android',
|
||||
stream_format: StreamFormat = 'flv',
|
||||
stream_codec: StreamCodec = 'avc',
|
||||
) -> List[str]:
|
||||
select_alternative: bool = False,
|
||||
) -> str:
|
||||
if api_platform == 'android':
|
||||
info = await self._appapi.get_room_play_info(self._room_id, qn)
|
||||
else:
|
||||
@ -193,23 +185,30 @@ class Live:
|
||||
|
||||
streams = jsonpath(info, '$.playurl_info.playurl.stream[*]')
|
||||
if not streams:
|
||||
raise NoStreamAvailable(qn, stream_format, stream_codec)
|
||||
formats = jsonpath(streams, f'$[*].format[?(@.format_name == "{stream_format}")]') # noqa
|
||||
raise NoStreamAvailable(stream_format, stream_codec, qn)
|
||||
formats = jsonpath(
|
||||
streams, f'$[*].format[?(@.format_name == "{stream_format}")]'
|
||||
)
|
||||
if not formats:
|
||||
raise NoStreamFormatAvailable(qn, stream_format, stream_codec)
|
||||
codecs = jsonpath(formats, f'$[*].codec[?(@.codec_name == "{stream_codec}")]') # noqa
|
||||
raise NoStreamFormatAvailable(stream_format, stream_codec, qn)
|
||||
codecs = jsonpath(formats, f'$[*].codec[?(@.codec_name == "{stream_codec}")]')
|
||||
if not codecs:
|
||||
raise NoStreamCodecAvailable(qn, stream_format, stream_codec)
|
||||
raise NoStreamCodecAvailable(stream_format, stream_codec, qn)
|
||||
codec = codecs[0]
|
||||
|
||||
accept_qn = cast(List[QualityNumber], codec['accept_qn'])
|
||||
if qn not in accept_qn or codec['current_qn'] != qn:
|
||||
raise NoStreamQualityAvailable(qn, stream_format, stream_codec)
|
||||
raise NoStreamQualityAvailable(stream_format, stream_codec, qn)
|
||||
|
||||
return [
|
||||
i['host'] + codec['base_url'] + i['extra']
|
||||
for i in codec['url_info']
|
||||
]
|
||||
urls = [i['host'] + codec['base_url'] + i['extra'] for i in codec['url_info']]
|
||||
|
||||
if not select_alternative:
|
||||
return urls[0]
|
||||
|
||||
try:
|
||||
return urls[1]
|
||||
except IndexError:
|
||||
raise NoAlternativeStreamAvailable(stream_format, stream_codec, qn)
|
||||
|
||||
def _check_room_play_info(self, data: ResponseData) -> None:
|
||||
if data.get('is_hidden'):
|
||||
|
@ -13,7 +13,7 @@ from tenacity import (
|
||||
from .. import __version__, __prog__, __github__
|
||||
from .danmaku_receiver import DanmakuReceiver, DanmuMsg
|
||||
from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
from .statistics import StatisticsCalculator
|
||||
from .statistics import Statistics
|
||||
from ..bili.live import Live
|
||||
from ..exception import exception_callback, submit_exception
|
||||
from ..event.event_emitter import EventListener, EventEmitter
|
||||
@ -73,19 +73,19 @@ class DanmakuDumper(
|
||||
self._lock: asyncio.Lock = asyncio.Lock()
|
||||
self._path: Optional[str] = None
|
||||
self._files: List[str] = []
|
||||
self._calculator = StatisticsCalculator(interval=60)
|
||||
self._statistics = Statistics(interval=60)
|
||||
|
||||
@property
|
||||
def danmu_total(self) -> int:
|
||||
return self._calculator.count
|
||||
return self._statistics.count
|
||||
|
||||
@property
|
||||
def danmu_rate(self) -> float:
|
||||
return self._calculator.rate
|
||||
return self._statistics.rate
|
||||
|
||||
@property
|
||||
def elapsed(self) -> float:
|
||||
return self._calculator.elapsed
|
||||
return self._statistics.elapsed
|
||||
|
||||
@property
|
||||
def dumping_path(self) -> Optional[str]:
|
||||
@ -145,7 +145,7 @@ class DanmakuDumper(
|
||||
async def _do_dump(self) -> None:
|
||||
assert self._path is not None
|
||||
logger.debug('Started dumping danmaku')
|
||||
self._calculator.reset()
|
||||
self._statistics.reset()
|
||||
|
||||
try:
|
||||
async with DanmakuWriter(self._path) as writer:
|
||||
@ -169,14 +169,14 @@ class DanmakuDumper(
|
||||
logger.info(f"Danmaku file completed: '{self._path}'")
|
||||
await self._emit('danmaku_file_completed', self._path)
|
||||
logger.debug('Stopped dumping danmaku')
|
||||
self._calculator.freeze()
|
||||
self._statistics.freeze()
|
||||
|
||||
async def _dumping_loop(self, writer: DanmakuWriter) -> None:
|
||||
while True:
|
||||
msg = await self._receiver.get_message()
|
||||
if isinstance(msg, DanmuMsg):
|
||||
await writer.write_danmu(self._make_danmu(msg))
|
||||
self._calculator.submit(1)
|
||||
self._statistics.submit(1)
|
||||
elif isinstance(msg, GiftSendMsg):
|
||||
if not self.record_gift_send:
|
||||
continue
|
||||
|
@ -1,21 +1,14 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from tenacity import TryAgain
|
||||
from tqdm import tqdm
|
||||
from reactivex.scheduler import NewThreadScheduler
|
||||
|
||||
from ..bili.live import Live
|
||||
from ..bili.typing import QualityNumber
|
||||
from ..flv.exceptions import FlvDataError, FlvStreamCorruptedError
|
||||
from ..flv.stream_processor import StreamProcessor
|
||||
from .stream_analyzer import StreamProfile
|
||||
from .stream_recorder_impl import StreamProxy, StreamRecorderImpl
|
||||
from ..flv import operators as flv_ops
|
||||
from .stream_recorder_impl import StreamRecorderImpl
|
||||
|
||||
__all__ = 'FLVStreamRecorderImpl',
|
||||
__all__ = ('FLVStreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -49,113 +42,29 @@ class FLVStreamRecorderImpl(StreamRecorderImpl):
|
||||
)
|
||||
|
||||
def _run(self) -> None:
|
||||
logger.debug('Stream recorder thread started')
|
||||
try:
|
||||
with tqdm(
|
||||
desc='Recording',
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
postfix=self._make_pbar_postfix(),
|
||||
) as progress_bar:
|
||||
self._progress_bar = progress_bar
|
||||
|
||||
self._stream_processor = StreamProcessor(
|
||||
self._file_manager,
|
||||
filesize_limit=self._filesize_limit,
|
||||
duration_limit=self._duration_limit,
|
||||
analyse_data=True,
|
||||
dedup_join=True,
|
||||
save_extra_metadata=True,
|
||||
backup_timestamp=True,
|
||||
)
|
||||
|
||||
def update_size(size: int) -> None:
|
||||
progress_bar.update(size)
|
||||
self._rec_calculator.submit(size)
|
||||
|
||||
def update_stream_profile(profile: StreamProfile) -> None:
|
||||
self._stream_profile = profile
|
||||
|
||||
self._stream_processor.size_updates.subscribe(update_size)
|
||||
self._stream_processor.stream_profile_updates.subscribe(
|
||||
update_stream_profile
|
||||
)
|
||||
|
||||
with requests.Session() as self._session:
|
||||
self._main_loop()
|
||||
except TryAgain:
|
||||
pass
|
||||
except Exception as e:
|
||||
self._handle_exception(e)
|
||||
finally:
|
||||
self._stopped = True
|
||||
if self._stream_processor is not None:
|
||||
self._stream_processor.finalize()
|
||||
self._stream_processor = None
|
||||
self._progress_bar = None
|
||||
self._dl_calculator.freeze()
|
||||
self._rec_calculator.freeze()
|
||||
self._emit_event('stream_recording_stopped')
|
||||
logger.debug('Stream recorder thread stopped')
|
||||
|
||||
def _streaming_loop(self) -> None:
|
||||
url = self._get_live_stream_url()
|
||||
|
||||
while not self._stopped:
|
||||
try:
|
||||
self._streaming(url)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# frequently occurred when the live just started or ended.
|
||||
logger.warning(repr(e))
|
||||
self._defer_retry(1, 'streaming_loop')
|
||||
# the url may has been forbidden or expired
|
||||
# when the status code is 404 or 403
|
||||
if e.response.status_code in (403, 404):
|
||||
url = self._get_live_stream_url()
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.warning(repr(e))
|
||||
except urllib3.exceptions.TimeoutError as e:
|
||||
logger.warning(repr(e))
|
||||
except urllib3.exceptions.ProtocolError as e:
|
||||
# ProtocolError('Connection broken: IncompleteRead(
|
||||
logger.warning(repr(e))
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.warning(repr(e))
|
||||
self._wait_for_connection_error()
|
||||
except (FlvDataError, FlvStreamCorruptedError) as e:
|
||||
logger.warning(repr(e))
|
||||
if not self._use_alternative_stream:
|
||||
self._use_alternative_stream = True
|
||||
else:
|
||||
self._rotate_api_platform()
|
||||
url = self._get_live_stream_url()
|
||||
|
||||
def _streaming(self, url: str) -> None:
|
||||
logger.debug(f'Requesting live stream... {url}')
|
||||
self._stream_url = url
|
||||
self._stream_host = urlparse(url).hostname or ''
|
||||
|
||||
with self._session.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers=self._live.headers,
|
||||
timeout=self.read_timeout,
|
||||
) as response:
|
||||
logger.debug('Response received')
|
||||
response.raise_for_status()
|
||||
|
||||
if self._stopped:
|
||||
return
|
||||
|
||||
assert self._stream_processor is not None
|
||||
self._stream_processor.set_metadata(self._make_metadata())
|
||||
|
||||
stream_proxy = StreamProxy(response.raw)
|
||||
stream_proxy.size_updates.subscribe(
|
||||
lambda n: self._dl_calculator.submit(n)
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
self._stream_fetcher,
|
||||
self._dl_statistics,
|
||||
self._stream_parser,
|
||||
self._request_exception_handler,
|
||||
self._connection_error_handler,
|
||||
flv_ops.process(),
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._join_point_extractor,
|
||||
self._prober,
|
||||
self._injector,
|
||||
self._analyser,
|
||||
self._dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._exception_handler,
|
||||
)
|
||||
|
||||
self._stream_processor.process_stream(
|
||||
io.BufferedReader(stream_proxy, buffer_size=8192)
|
||||
.subscribe(
|
||||
on_completed=lambda: self._emit_event('stream_recording_completed'),
|
||||
scheduler=NewThreadScheduler(self._thread_factory('StreamRecorder')),
|
||||
)
|
||||
)
|
||||
|
@ -1,51 +1,22 @@
|
||||
import io
|
||||
import logging
|
||||
import time
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
from queue import Empty, Queue
|
||||
from threading import Condition, Event, Lock, Thread
|
||||
from typing import Final, List, Optional, Set
|
||||
from urllib.parse import urlparse
|
||||
from typing import Optional
|
||||
|
||||
import m3u8
|
||||
import requests
|
||||
import urllib3
|
||||
from m3u8.model import Segment
|
||||
from ordered_set import OrderedSet
|
||||
from tenacity import (
|
||||
RetryError,
|
||||
Retrying,
|
||||
TryAgain,
|
||||
retry,
|
||||
retry_if_exception_type,
|
||||
retry_if_not_exception_type,
|
||||
retry_if_result,
|
||||
stop_after_delay,
|
||||
wait_exponential,
|
||||
)
|
||||
from tqdm import tqdm
|
||||
from reactivex import operators as ops
|
||||
from reactivex.scheduler import NewThreadScheduler
|
||||
|
||||
from ..bili.live import Live
|
||||
from ..bili.typing import QualityNumber
|
||||
from ..flv.exceptions import FlvDataError, FlvStreamCorruptedError
|
||||
from ..flv.stream_processor import StreamProcessor
|
||||
from ..utils.mixins import SupportDebugMixin
|
||||
from .stream_analyzer import StreamProfile, ffprobe
|
||||
from .stream_recorder_impl import StreamProxy, StreamRecorderImpl
|
||||
from .stream_remuxer import StreamRemuxer
|
||||
from ..flv import operators as flv_ops
|
||||
from . import operators as core_ops
|
||||
from .stream_recorder_impl import StreamRecorderImpl
|
||||
|
||||
__all__ = 'HLSStreamRecorderImpl',
|
||||
__all__ = ('HLSStreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FailedToFetchSegments(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class HLSStreamRecorderImpl(StreamRecorderImpl, SupportDebugMixin):
|
||||
class HLSStreamRecorderImpl(StreamRecorderImpl):
|
||||
def __init__(
|
||||
self,
|
||||
live: Live,
|
||||
@ -71,417 +42,50 @@ class HLSStreamRecorderImpl(StreamRecorderImpl, SupportDebugMixin):
|
||||
filesize_limit=filesize_limit,
|
||||
duration_limit=duration_limit,
|
||||
)
|
||||
self._init_for_debug(self._live.room_id)
|
||||
self._init_section_data: Optional[bytes] = None
|
||||
self._ready_to_fetch_segments = Condition()
|
||||
self._failed_to_fetch_segments = Event()
|
||||
self._stream_analysed_lock = Lock()
|
||||
self._last_seg_uris: OrderedSet[str] = OrderedSet()
|
||||
self._MAX_LAST_SEG_URIS: Final[int] = 30
|
||||
|
||||
self._playlist_fetcher = core_ops.PlaylistFetcher(self._live, self._session)
|
||||
self._playlist_resolver = core_ops.PlaylistResolver()
|
||||
self._segment_fetcher = core_ops.SegmentFetcher(self._live, self._session)
|
||||
self._segment_remuxer = core_ops.SegmentRemuxer(live)
|
||||
|
||||
def _run(self) -> None:
|
||||
logger.debug('Stream recorder thread started')
|
||||
try:
|
||||
if self._debug:
|
||||
path = '{}/playlist-{}-{}.m3u8'.format(
|
||||
self._debug_dir,
|
||||
self._live.room_id,
|
||||
datetime.now().strftime('%Y-%m-%d-%H%M%S-%f'),
|
||||
)
|
||||
self._playlist_debug_file = open(path, 'wt', encoding='utf-8')
|
||||
|
||||
self._session = requests.Session()
|
||||
self._session.headers.update(self._live.headers)
|
||||
|
||||
self._stream_remuxer = StreamRemuxer(
|
||||
self._live.room_id,
|
||||
remove_filler_data=True,
|
||||
)
|
||||
self._segment_queue: Queue[Segment] = Queue(maxsize=1000)
|
||||
self._segment_data_queue: Queue[bytes] = Queue(maxsize=100)
|
||||
self._stream_host_available = Event()
|
||||
|
||||
self._segment_fetcher_thread = Thread(
|
||||
target=self._run_segment_fetcher,
|
||||
name=f'SegmentFetcher::{self._live.room_id}',
|
||||
daemon=True,
|
||||
)
|
||||
self._segment_fetcher_thread.start()
|
||||
|
||||
self._segment_data_feeder_thread = Thread(
|
||||
target=self._run_segment_data_feeder,
|
||||
name=f'SegmentDataFeeder::{self._live.room_id}',
|
||||
daemon=True,
|
||||
)
|
||||
self._segment_data_feeder_thread.start()
|
||||
|
||||
self._stream_processor_thread = Thread(
|
||||
target=self._run_stream_processor,
|
||||
name=f'StreamProcessor::{self._live.room_id}',
|
||||
daemon=True,
|
||||
)
|
||||
self._stream_processor_thread.start()
|
||||
|
||||
try:
|
||||
self._main_loop()
|
||||
finally:
|
||||
if self._stream_processor is not None:
|
||||
self._stream_processor.cancel()
|
||||
self._stream_processor_thread.join(timeout=10)
|
||||
self._segment_fetcher_thread.join(timeout=10)
|
||||
self._segment_data_feeder_thread.join(timeout=10)
|
||||
self._stream_remuxer.stop()
|
||||
self._stream_remuxer.raise_for_exception()
|
||||
self._last_seg_uris.clear()
|
||||
del self._segment_queue
|
||||
del self._segment_data_queue
|
||||
except TryAgain:
|
||||
pass
|
||||
except Exception as e:
|
||||
self._handle_exception(e)
|
||||
finally:
|
||||
self._stopped = True
|
||||
with suppress(Exception):
|
||||
self._playlist_debug_file.close()
|
||||
self._emit_event('stream_recording_stopped')
|
||||
logger.debug('Stream recorder thread stopped')
|
||||
|
||||
def _streaming_loop(self) -> None:
|
||||
url = self._get_live_stream_url()
|
||||
|
||||
while not self._stopped:
|
||||
try:
|
||||
self._playlist_fetcher(url)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# frequently occurred when the live just started or ended.
|
||||
logger.warning(repr(e))
|
||||
self._defer_retry(1, 'streaming_loop')
|
||||
# the url may has been forbidden or expired
|
||||
# when the status code is 404 or 403
|
||||
if e.response.status_code in (403, 404):
|
||||
url = self._get_live_stream_url()
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.warning(repr(e))
|
||||
self._wait_for_connection_error()
|
||||
except FailedToFetchSegments:
|
||||
url = self._get_live_stream_url()
|
||||
except RetryError as e:
|
||||
logger.warning(repr(e))
|
||||
|
||||
def _playlist_fetcher(self, url: str) -> None:
|
||||
self._stream_url = url
|
||||
self._stream_host = urlparse(url).hostname or ''
|
||||
self._stream_host_available.set()
|
||||
with self._stream_analysed_lock:
|
||||
self._stream_analysed = False
|
||||
|
||||
while not self._stopped:
|
||||
if self._failed_to_fetch_segments.is_set():
|
||||
with self._segment_queue.mutex:
|
||||
self._segment_queue.queue.clear()
|
||||
with self._ready_to_fetch_segments:
|
||||
self._ready_to_fetch_segments.notify_all()
|
||||
self._failed_to_fetch_segments.clear()
|
||||
raise FailedToFetchSegments()
|
||||
|
||||
content = self._fetch_playlist(url)
|
||||
playlist = m3u8.loads(content, uri=url)
|
||||
|
||||
if self._debug:
|
||||
self._playlist_debug_file.write(content + '\n')
|
||||
|
||||
if playlist.is_variant:
|
||||
url = sorted(
|
||||
playlist.playlists,
|
||||
key=lambda p: p.stream_info.bandwidth
|
||||
)[-1].absolute_uri
|
||||
logger.debug(f'playlist changed to variant playlist: {url}')
|
||||
self._stream_url = url
|
||||
self._stream_host = urlparse(url).hostname or ''
|
||||
with self._stream_analysed_lock:
|
||||
self._stream_analysed = False
|
||||
continue
|
||||
|
||||
curr_seg_uris: Set[str] = set()
|
||||
for seg in playlist.segments:
|
||||
curr_seg_uris.add(seg.uri)
|
||||
if seg.uri not in self._last_seg_uris:
|
||||
self._segment_queue.put(seg, timeout=60)
|
||||
self._last_seg_uris.add(seg.uri)
|
||||
if len(self._last_seg_uris) > self._MAX_LAST_SEG_URIS:
|
||||
self._last_seg_uris.pop(0)
|
||||
|
||||
if (
|
||||
self._last_seg_uris and
|
||||
not curr_seg_uris.intersection(self._last_seg_uris)
|
||||
):
|
||||
logger.debug(
|
||||
'segments broken!\n'
|
||||
f'last segments uris: {self._last_seg_uris}\n'
|
||||
f'current segments uris: {curr_seg_uris}'
|
||||
)
|
||||
with self._stream_analysed_lock:
|
||||
self._stream_analysed = False
|
||||
|
||||
if playlist.is_endlist:
|
||||
logger.debug('playlist ended')
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
def _run_segment_fetcher(self) -> None:
|
||||
logger.debug('Segment fetcher thread started')
|
||||
try:
|
||||
self._segment_fetcher()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
self._handle_exception(e)
|
||||
finally:
|
||||
self._dl_calculator.freeze()
|
||||
logger.debug('Segment fetcher thread stopped')
|
||||
|
||||
def _segment_fetcher(self) -> None:
|
||||
assert self._stream_remuxer is not None
|
||||
init_section = None
|
||||
self._init_section_data = None
|
||||
num_of_continuously_failed = 0
|
||||
self._failed_to_fetch_segments.clear()
|
||||
|
||||
while not self._stopped:
|
||||
try:
|
||||
seg = self._segment_queue.get(timeout=1)
|
||||
except Empty:
|
||||
continue
|
||||
for attempt in Retrying(
|
||||
reraise=True,
|
||||
retry=(
|
||||
retry_if_result(lambda r: not self._stopped) |
|
||||
retry_if_not_exception_type((OSError, NotImplementedError))
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
ops.subscribe_on(
|
||||
NewThreadScheduler(self._thread_factory('PlaylistFetcher'))
|
||||
),
|
||||
):
|
||||
if attempt.retry_state.attempt_number > 3:
|
||||
break
|
||||
with attempt:
|
||||
try:
|
||||
if (
|
||||
getattr(seg, 'init_section', None) and
|
||||
(
|
||||
not init_section or
|
||||
seg.init_section.uri != init_section.uri
|
||||
)
|
||||
):
|
||||
data = self._fetch_segment(
|
||||
seg.init_section.absolute_uri
|
||||
)
|
||||
init_section = seg.init_section
|
||||
self._init_section_data = data
|
||||
self._segment_data_queue.put(data, timeout=60)
|
||||
data = self._fetch_segment(seg.absolute_uri)
|
||||
self._segment_data_queue.put(data, timeout=60)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.warning(f'Failed to fetch segment: {repr(e)}')
|
||||
if e.response.status_code in (403, 404, 599):
|
||||
num_of_continuously_failed += 1
|
||||
if num_of_continuously_failed >= 3:
|
||||
self._failed_to_fetch_segments.set()
|
||||
with self._ready_to_fetch_segments:
|
||||
self._ready_to_fetch_segments.wait()
|
||||
num_of_continuously_failed = 0
|
||||
self._failed_to_fetch_segments.clear()
|
||||
break
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.warning(repr(e))
|
||||
self._wait_for_connection_error()
|
||||
except RetryError as e:
|
||||
logger.warning(repr(e))
|
||||
break
|
||||
else:
|
||||
num_of_continuously_failed = 0
|
||||
break
|
||||
|
||||
def _run_segment_data_feeder(self) -> None:
|
||||
logger.debug('Segment data feeder thread started')
|
||||
try:
|
||||
self._segment_data_feeder()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
self._handle_exception(e)
|
||||
finally:
|
||||
logger.debug('Segment data feeder thread stopped')
|
||||
|
||||
def _segment_data_feeder(self) -> None:
|
||||
assert self._stream_remuxer is not None
|
||||
MAX_SEGMENT_DATA_CACHE = 3
|
||||
segment_data_cache: List[bytes] = []
|
||||
bytes_io = io.BytesIO()
|
||||
segment_count = 0
|
||||
|
||||
def on_next(profile: StreamProfile) -> None:
|
||||
self._stream_profile = profile
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
logger.warning(f'Failed to analyse stream: {repr(e)}')
|
||||
|
||||
while not self._stopped:
|
||||
try:
|
||||
data = self._segment_data_queue.get(timeout=1)
|
||||
except Empty:
|
||||
continue
|
||||
else:
|
||||
with self._stream_analysed_lock:
|
||||
if not self._stream_analysed:
|
||||
if self._init_section_data and not bytes_io.getvalue():
|
||||
bytes_io.write(self._init_section_data)
|
||||
else:
|
||||
bytes_io.write(data)
|
||||
segment_count += 1
|
||||
|
||||
if segment_count >= 3:
|
||||
ffprobe(bytes_io.getvalue()).subscribe(
|
||||
on_next, on_error
|
||||
)
|
||||
bytes_io = io.BytesIO()
|
||||
segment_count = 0
|
||||
self._stream_analysed = True
|
||||
|
||||
try:
|
||||
if self._stream_remuxer.stopped:
|
||||
self._stream_remuxer.start()
|
||||
while True:
|
||||
ready = self._stream_remuxer.wait(timeout=1)
|
||||
if self._stopped:
|
||||
return
|
||||
if ready:
|
||||
break
|
||||
if segment_data_cache:
|
||||
if self._init_section_data:
|
||||
self._stream_remuxer.input.write(
|
||||
self._init_section_data
|
||||
)
|
||||
for cached_data in segment_data_cache:
|
||||
if cached_data == self._init_section_data:
|
||||
continue
|
||||
self._stream_remuxer.input.write(cached_data)
|
||||
|
||||
self._stream_remuxer.input.write(data)
|
||||
except BrokenPipeError as e:
|
||||
if not self._stopped:
|
||||
logger.warning(repr(e))
|
||||
else:
|
||||
logger.debug(repr(e))
|
||||
except ValueError as e:
|
||||
if not self._stopped:
|
||||
logger.warning(repr(e))
|
||||
else:
|
||||
logger.debug(repr(e))
|
||||
|
||||
segment_data_cache.append(data)
|
||||
if len(segment_data_cache) > MAX_SEGMENT_DATA_CACHE:
|
||||
segment_data_cache.pop(0)
|
||||
|
||||
def _run_stream_processor(self) -> None:
|
||||
logger.debug('Stream processor thread started')
|
||||
assert self._stream_remuxer is not None
|
||||
|
||||
with tqdm(
|
||||
desc='Recording',
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
postfix=self._make_pbar_postfix(),
|
||||
) as progress_bar:
|
||||
self._progress_bar = progress_bar
|
||||
|
||||
def update_size(size: int) -> None:
|
||||
progress_bar.update(size)
|
||||
self._rec_calculator.submit(size)
|
||||
|
||||
self._stream_processor = StreamProcessor(
|
||||
self._file_manager,
|
||||
filesize_limit=self._filesize_limit,
|
||||
duration_limit=self._duration_limit,
|
||||
analyse_data=True,
|
||||
dedup_join=True,
|
||||
save_extra_metadata=True,
|
||||
backup_timestamp=True,
|
||||
self._playlist_fetcher,
|
||||
self._request_exception_handler,
|
||||
self._connection_error_handler,
|
||||
self._playlist_resolver,
|
||||
ops.observe_on(
|
||||
NewThreadScheduler(self._thread_factory('SegmentFetcher'))
|
||||
),
|
||||
self._segment_fetcher,
|
||||
self._dl_statistics,
|
||||
self._prober,
|
||||
ops.observe_on(
|
||||
NewThreadScheduler(self._thread_factory('SegmentRemuxer'))
|
||||
),
|
||||
self._segment_remuxer,
|
||||
ops.observe_on(
|
||||
NewThreadScheduler(self._thread_factory('StreamRecorder'))
|
||||
),
|
||||
self._stream_parser,
|
||||
flv_ops.process(),
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._join_point_extractor,
|
||||
self._injector,
|
||||
self._analyser,
|
||||
self._dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._exception_handler,
|
||||
)
|
||||
self._stream_processor.size_updates.subscribe(update_size)
|
||||
|
||||
try:
|
||||
while not self._stopped:
|
||||
while True:
|
||||
ready = self._stream_remuxer.wait(timeout=1)
|
||||
if self._stopped:
|
||||
return
|
||||
if ready:
|
||||
break
|
||||
|
||||
self._stream_host_available.wait()
|
||||
self._stream_processor.set_metadata(self._make_metadata())
|
||||
|
||||
try:
|
||||
self._stream_processor.process_stream(
|
||||
StreamProxy(
|
||||
self._stream_remuxer.output,
|
||||
read_timeout=10,
|
||||
) # type: ignore
|
||||
)
|
||||
except BrokenPipeError as e:
|
||||
logger.debug(repr(e))
|
||||
except TimeoutError as e:
|
||||
logger.debug(repr(e))
|
||||
self._stream_remuxer.stop()
|
||||
except FlvDataError as e:
|
||||
logger.warning(repr(e))
|
||||
self._stream_remuxer.stop()
|
||||
except FlvStreamCorruptedError as e:
|
||||
logger.warning(repr(e))
|
||||
self._stream_remuxer.stop()
|
||||
except ValueError as e:
|
||||
logger.warning(repr(e))
|
||||
self._stream_remuxer.stop()
|
||||
except Exception as e:
|
||||
if not self._stopped:
|
||||
logger.exception(e)
|
||||
self._handle_exception(e)
|
||||
else:
|
||||
logger.debug(repr(e))
|
||||
finally:
|
||||
self._stream_processor.finalize()
|
||||
self._progress_bar = None
|
||||
self._rec_calculator.freeze()
|
||||
logger.debug('Stream processor thread stopped')
|
||||
|
||||
@retry(
|
||||
retry=retry_if_exception_type((
|
||||
requests.exceptions.Timeout,
|
||||
urllib3.exceptions.TimeoutError,
|
||||
urllib3.exceptions.ProtocolError,
|
||||
)),
|
||||
wait=wait_exponential(multiplier=0.1, max=1),
|
||||
stop=stop_after_delay(10),
|
||||
)
|
||||
def _fetch_playlist(self, url: str) -> str:
|
||||
response = self._session.get(url, timeout=3)
|
||||
response.raise_for_status()
|
||||
response.encoding = 'utf-8'
|
||||
return response.text
|
||||
|
||||
@retry(
|
||||
retry=retry_if_exception_type((
|
||||
requests.exceptions.Timeout,
|
||||
urllib3.exceptions.TimeoutError,
|
||||
urllib3.exceptions.ProtocolError,
|
||||
)),
|
||||
wait=wait_exponential(multiplier=0.1, max=5),
|
||||
stop=stop_after_delay(60),
|
||||
)
|
||||
def _fetch_segment(self, url: str) -> bytes:
|
||||
with self._session.get(url, stream=True, timeout=10) as response:
|
||||
response.raise_for_status()
|
||||
|
||||
bytes_io = io.BytesIO()
|
||||
for chunk in response:
|
||||
bytes_io.write(chunk)
|
||||
self._dl_calculator.submit(len(chunk))
|
||||
|
||||
return bytes_io.getvalue()
|
||||
.subscribe(
|
||||
on_completed=lambda: self._emit_event('stream_recording_completed')
|
||||
)
|
||||
)
|
||||
|
68
src/blrec/core/metadata_provider.py
Normal file
68
src/blrec/core/metadata_provider.py
Normal file
@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TYPE_CHECKING, Any, Dict
|
||||
|
||||
from .. import __github__, __prog__, __version__
|
||||
from ..bili.helpers import get_quality_name
|
||||
from ..bili.live import Live
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .stream_recorder_impl import StreamRecorderImpl
|
||||
|
||||
__all__ = ('MetadataProvider',)
|
||||
|
||||
|
||||
class MetadataProvider:
|
||||
def __init__(self, live: Live, stream_recorder: StreamRecorderImpl) -> None:
|
||||
super().__init__()
|
||||
self._live = live
|
||||
self._stream_recorder = stream_recorder
|
||||
|
||||
def __call__(self) -> Dict[str, Any]:
|
||||
return self._make_metadata()
|
||||
|
||||
def _make_metadata(self) -> Dict[str, Any]:
|
||||
live_start_time = datetime.fromtimestamp(
|
||||
self._live.room_info.live_start_time, timezone(timedelta(hours=8))
|
||||
)
|
||||
|
||||
assert self._stream_recorder.real_quality_number is not None
|
||||
stream_quality = '{} ({}{})'.format(
|
||||
get_quality_name(self._stream_recorder.real_quality_number),
|
||||
self._stream_recorder.real_quality_number,
|
||||
', bluray' if '_bluray' in self._stream_recorder.stream_url else '',
|
||||
)
|
||||
|
||||
return {
|
||||
'Title': self._live.room_info.title,
|
||||
'Artist': self._live.user_info.name,
|
||||
'Date': str(live_start_time),
|
||||
'Comment': f'''\
|
||||
B站直播录像
|
||||
主播:{self._live.user_info.name}
|
||||
标题:{self._live.room_info.title}
|
||||
分区:{self._live.room_info.parent_area_name} - {self._live.room_info.area_name}
|
||||
房间号:{self._live.room_info.room_id}
|
||||
开播时间:{live_start_time}
|
||||
流主机: {self._stream_recorder.stream_host}
|
||||
流格式:{self._stream_recorder.stream_format}
|
||||
流画质:{stream_quality}
|
||||
录制程序:{__prog__} v{__version__} {__github__}''',
|
||||
'description': OrderedDict(
|
||||
{
|
||||
'UserId': str(self._live.user_info.uid),
|
||||
'UserName': self._live.user_info.name,
|
||||
'RoomId': str(self._live.room_info.room_id),
|
||||
'RoomTitle': self._live.room_info.title,
|
||||
'Area': self._live.room_info.area_name,
|
||||
'ParentArea': self._live.room_info.parent_area_name,
|
||||
'LiveStartTime': str(live_start_time),
|
||||
'StreamHost': self._stream_recorder.stream_host,
|
||||
'StreamFormat': self._stream_recorder.stream_format,
|
||||
'StreamQuality': stream_quality,
|
||||
'Recorder': f'{__prog__} v{__version__} {__github__}',
|
||||
}
|
||||
),
|
||||
}
|
34
src/blrec/core/operators/__init__.py
Normal file
34
src/blrec/core/operators/__init__.py
Normal file
@ -0,0 +1,34 @@
|
||||
from .connection_error_handler import ConnectionErrorHandler
|
||||
from .exception_handler import ExceptionHandler
|
||||
from .hls_prober import HLSProber, StreamProfile
|
||||
from .playlist_fetcher import PlaylistFetcher
|
||||
from .playlist_resolver import PlaylistResolver
|
||||
from .progress_bar import ProgressBar
|
||||
from .request_exception_handler import RequestExceptionHandler
|
||||
from .segment_fetcher import InitSectionData, SegmentData, SegmentFetcher
|
||||
from .segment_remuxer import SegmentRemuxer
|
||||
from .sized_statistics import SizedStatistics
|
||||
from .stream_fetcher import StreamFetcher
|
||||
from .stream_parser import StreamParser
|
||||
from .stream_statistics import StreamStatistics
|
||||
from .stream_url_resolver import StreamURLResolver
|
||||
|
||||
__all__ = (
|
||||
'ConnectionErrorHandler',
|
||||
'ExceptionHandler',
|
||||
'HLSProber',
|
||||
'InitSectionData',
|
||||
'PlaylistFetcher',
|
||||
'PlaylistResolver',
|
||||
'ProgressBar',
|
||||
'RequestExceptionHandler',
|
||||
'SegmentData',
|
||||
'SegmentFetcher',
|
||||
'SegmentRemuxer',
|
||||
'SizedStatistics',
|
||||
'StreamFetcher',
|
||||
'StreamParser',
|
||||
'StreamProfile',
|
||||
'StreamStatistics',
|
||||
'StreamURLResolver',
|
||||
)
|
87
src/blrec/core/operators/connection_error_handler.py
Normal file
87
src/blrec/core/operators/connection_error_handler.py
Normal file
@ -0,0 +1,87 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ...bili.live import Live
|
||||
from ...utils import operators as utils_ops
|
||||
from ...utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('ConnectionErrorHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
class ConnectionErrorHandler(AsyncCooperationMixin):
|
||||
def __init__(
|
||||
self,
|
||||
live: Live,
|
||||
*,
|
||||
disconnection_timeout: Optional[int] = None,
|
||||
check_interval: int = 3,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._live = live
|
||||
self.disconnection_timeout = disconnection_timeout or 600 # seconds
|
||||
self.check_interval = check_interval
|
||||
|
||||
def __call__(self, source: Observable[_T]) -> Observable[_T]:
|
||||
return self._handle(source).pipe(
|
||||
utils_ops.retry(should_retry=self._should_retry)
|
||||
)
|
||||
|
||||
def _handle(self, source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[_T],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_error(exc: Exception) -> None:
|
||||
try:
|
||||
raise exc
|
||||
except (
|
||||
aiohttp.ClientConnectionError,
|
||||
requests.exceptions.ConnectionError,
|
||||
) as e:
|
||||
logger.warning(repr(e))
|
||||
if self._wait_for_connection_error():
|
||||
observer.on_error(exc)
|
||||
else:
|
||||
observer.on_completed()
|
||||
except Exception:
|
||||
pass
|
||||
observer.on_error(exc)
|
||||
|
||||
return source.subscribe(
|
||||
observer.on_next, on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _should_retry(self, exc: Exception) -> bool:
|
||||
if isinstance(
|
||||
exc, (aiohttp.ClientConnectionError, requests.exceptions.ConnectionError)
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _wait_for_connection_error(self) -> bool:
|
||||
timeout = self.disconnection_timeout
|
||||
logger.info(f'Waiting {timeout} seconds for connection recovery... ')
|
||||
timebase = time.monotonic()
|
||||
while not self._run_coroutine(self._live.check_connectivity()):
|
||||
if timeout is not None and time.monotonic() - timebase > timeout:
|
||||
logger.error(f'Connection not recovered in {timeout} seconds')
|
||||
return False
|
||||
time.sleep(self.check_interval)
|
||||
else:
|
||||
logger.info('Connection recovered')
|
||||
return True
|
57
src/blrec/core/operators/exception_handler.py
Normal file
57
src/blrec/core/operators/exception_handler.py
Normal file
@ -0,0 +1,57 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import logging
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ...bili.exceptions import LiveRoomEncrypted, LiveRoomHidden, LiveRoomLocked
|
||||
from ...utils import operators as utils_ops
|
||||
from ...utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('ExceptionHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
class ExceptionHandler(AsyncCooperationMixin):
|
||||
def __call__(self, source: Observable[_T]) -> Observable[_T]:
|
||||
return self._handle(source).pipe(utils_ops.retry(delay=1))
|
||||
|
||||
def _handle(self, source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[_T],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_error(exc: Exception) -> None:
|
||||
self._submit_exception(exc)
|
||||
try:
|
||||
raise exc
|
||||
except OSError as e:
|
||||
logger.critical(repr(e), exc_info=e)
|
||||
if e.errno == errno.ENOSPC:
|
||||
# OSError(28, 'No space left on device')
|
||||
observer.on_completed()
|
||||
else:
|
||||
observer.on_error(exc)
|
||||
except LiveRoomHidden:
|
||||
logger.error('The live room has been hidden!')
|
||||
observer.on_completed()
|
||||
except LiveRoomLocked:
|
||||
logger.error('The live room has been locked!')
|
||||
observer.on_completed()
|
||||
except LiveRoomEncrypted:
|
||||
logger.error('The live room has been encrypted!')
|
||||
observer.on_completed()
|
||||
except Exception:
|
||||
observer.on_error(exc)
|
||||
|
||||
return source.subscribe(
|
||||
observer.on_next, on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
79
src/blrec/core/operators/hls_prober.py
Normal file
79
src/blrec/core/operators/hls_prober.py
Normal file
@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from reactivex import Observable, Subject, abc
|
||||
|
||||
from ...utils.ffprobe import StreamProfile, ffprobe
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('HLSProber', 'StreamProfile')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HLSProber:
|
||||
def __init__(self) -> None:
|
||||
self._profiles: Subject[StreamProfile] = Subject()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._gathering: bool = False
|
||||
self._gathered_items: List[Union[InitSectionData, SegmentData]] = []
|
||||
|
||||
@property
|
||||
def profiles(self) -> Observable[StreamProfile]:
|
||||
return self._profiles
|
||||
|
||||
def __call__(
|
||||
self, source: Observable[Union[InitSectionData, SegmentData]]
|
||||
) -> Observable[Union[InitSectionData, SegmentData]]:
|
||||
return self._probe(source)
|
||||
|
||||
def _probe(
|
||||
self, source: Observable[Union[InitSectionData, SegmentData]]
|
||||
) -> Observable[Union[InitSectionData, SegmentData]]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[Union[InitSectionData, SegmentData]],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
self._reset()
|
||||
|
||||
def on_next(item: Union[InitSectionData, SegmentData]) -> None:
|
||||
if isinstance(item, InitSectionData):
|
||||
self._gathered_items.clear()
|
||||
self._gathering = True
|
||||
|
||||
if self._gathering:
|
||||
self._gathered_items.append(item)
|
||||
if len(self._gathered_items) >= 10:
|
||||
try:
|
||||
self._do_probe()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to probe stream: {repr(e)}')
|
||||
finally:
|
||||
self._gathered_items.clear()
|
||||
self._gathering = False
|
||||
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _do_probe(self) -> None:
|
||||
bytes_io = io.BytesIO()
|
||||
for item in self._gathered_items:
|
||||
bytes_io.write(item.payload)
|
||||
|
||||
def on_next(profile: StreamProfile) -> None:
|
||||
self._profiles.on_next(profile)
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
logger.warning(f'Failed to probe stream by ffprobe: {repr(e)}')
|
||||
|
||||
ffprobe(bytes_io.getvalue()).subscribe(on_next, on_error)
|
105
src/blrec/core/operators/playlist_fetcher.py
Normal file
105
src/blrec/core/operators/playlist_fetcher.py
Normal file
@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import m3u8
|
||||
import requests
|
||||
import urllib3
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_delay, wait_exponential
|
||||
|
||||
from ...bili.live import Live
|
||||
from ...utils.mixins import SupportDebugMixin
|
||||
|
||||
__all__ = ('PlaylistFetcher',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlaylistFetcher(SupportDebugMixin):
|
||||
def __init__(self, live: Live, session: requests.Session) -> None:
|
||||
super().__init__()
|
||||
self._init_for_debug(live.room_id)
|
||||
self._live = live
|
||||
self._session = session
|
||||
|
||||
def __call__(self, source: Observable[str]) -> Observable[m3u8.M3U8]:
|
||||
return self._fetch(source)
|
||||
|
||||
def _fetch(self, source: Observable[str]) -> Observable[m3u8.M3U8]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[m3u8.M3U8],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
if self._debug:
|
||||
path = '{}/playlist-{}-{}.m3u8'.format(
|
||||
self._debug_dir,
|
||||
self._live.room_id,
|
||||
datetime.now().strftime('%Y-%m-%d-%H%M%S-%f'),
|
||||
)
|
||||
playlist_debug_file = open(path, 'wt', encoding='utf-8')
|
||||
|
||||
disposed = False
|
||||
subscription = SerialDisposable()
|
||||
|
||||
def on_next(url: str) -> None:
|
||||
logger.info(f'Fetching playlist... {url}')
|
||||
while not disposed:
|
||||
try:
|
||||
content = self._fetch_playlist(url)
|
||||
if self._debug:
|
||||
playlist_debug_file.write(content + '\n')
|
||||
playlist = m3u8.loads(content, uri=url)
|
||||
if playlist.is_variant:
|
||||
url = self._get_best_quality_url(playlist)
|
||||
logger.debug('Playlist changed to variant playlist')
|
||||
on_next(url)
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to fetch playlist: {repr(e)}')
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_next(playlist)
|
||||
time.sleep(1)
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
if self._debug:
|
||||
playlist_debug_file.close()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _get_best_quality_url(self, playlist: m3u8.M3U8) -> str:
|
||||
sorted_playlists = sorted(
|
||||
playlist.playlists, key=lambda p: p.stream_info.bandwidth
|
||||
)
|
||||
return sorted_playlists[-1].absolute_uri
|
||||
|
||||
@retry(
|
||||
reraise=True,
|
||||
retry=retry_if_exception_type(
|
||||
(
|
||||
requests.exceptions.Timeout,
|
||||
urllib3.exceptions.TimeoutError,
|
||||
urllib3.exceptions.ProtocolError,
|
||||
)
|
||||
),
|
||||
wait=wait_exponential(multiplier=0.1, max=1),
|
||||
stop=stop_after_delay(10),
|
||||
)
|
||||
def _fetch_playlist(self, url: str) -> str:
|
||||
response = self._session.get(url, headers=self._live.headers, timeout=3)
|
||||
response.raise_for_status()
|
||||
response.encoding = 'utf-8'
|
||||
return response.text
|
68
src/blrec/core/operators/playlist_resolver.py
Normal file
68
src/blrec/core/operators/playlist_resolver.py
Normal file
@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Final, Optional, Set
|
||||
|
||||
import m3u8
|
||||
import urllib3
|
||||
from ordered_set import OrderedSet
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
__all__ = ('PlaylistResolver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger(urllib3.__name__).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class PlaylistResolver:
|
||||
_MAX_LAST_SEG_URIS: Final[int] = 30
|
||||
|
||||
def __call__(self, source: Observable[m3u8.M3U8]) -> Observable[m3u8.Segment]:
|
||||
return self._solve(source)
|
||||
|
||||
def _solve(self, source: Observable[m3u8.M3U8]) -> Observable[m3u8.Segment]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[m3u8.Segment],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
disposed = False
|
||||
subscription = SerialDisposable()
|
||||
last_seg_uris: OrderedSet[str] = OrderedSet()
|
||||
|
||||
def on_next(playlist: m3u8.M3U8) -> None:
|
||||
curr_seg_uris: Set[str] = set()
|
||||
|
||||
for seg in playlist.segments:
|
||||
if disposed:
|
||||
return
|
||||
curr_seg_uris.add(seg.uri)
|
||||
if seg.uri not in last_seg_uris:
|
||||
observer.on_next(seg)
|
||||
last_seg_uris.add(seg.uri)
|
||||
if len(last_seg_uris) > self._MAX_LAST_SEG_URIS:
|
||||
last_seg_uris.pop(0)
|
||||
|
||||
if last_seg_uris and not curr_seg_uris.intersection(last_seg_uris):
|
||||
logger.debug(
|
||||
'Segments broken!\n'
|
||||
f'Last segments uris: {last_seg_uris}\n'
|
||||
f'Current segments uris: {curr_seg_uris}'
|
||||
)
|
||||
|
||||
if playlist.is_endlist:
|
||||
logger.debug('Playlist ended')
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
last_seg_uris.clear()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
75
src/blrec/core/operators/progress_bar.py
Normal file
75
src/blrec/core/operators/progress_bar.py
Normal file
@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from tqdm import tqdm
|
||||
|
||||
from ...bili.live import Live
|
||||
from ...flv.operators.typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('ProgressBar',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
def __init__(self, live: Live) -> None:
|
||||
self._live = live
|
||||
self._pbar: Optional[tqdm] = None
|
||||
|
||||
def update_bar_info(self) -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.set_postfix_str(self._make_pbar_postfix())
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._progress(source)
|
||||
|
||||
def _progress(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
subscription = SerialDisposable()
|
||||
|
||||
self._pbar = tqdm(
|
||||
desc='Recording',
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
postfix=self._make_pbar_postfix(),
|
||||
)
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.update(len(item))
|
||||
observer.on_next(item)
|
||||
|
||||
def on_completed() -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.close()
|
||||
self._pbar = None
|
||||
observer.on_completed()
|
||||
|
||||
def dispose() -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.close()
|
||||
self._pbar = None
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _make_pbar_postfix(self) -> str:
|
||||
return '{room_id} - {user_name}: {room_title}'.format(
|
||||
room_id=self._live.room_info.room_id,
|
||||
user_name=self._live.user_info.name,
|
||||
room_title=self._live.room_info.title,
|
||||
)
|
69
src/blrec/core/operators/request_exception_handler.py
Normal file
69
src/blrec/core/operators/request_exception_handler.py
Normal file
@ -0,0 +1,69 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ...utils import operators as utils_ops
|
||||
|
||||
__all__ = ('RequestExceptionHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
class RequestExceptionHandler:
|
||||
def __call__(self, source: Observable[_T]) -> Observable[_T]:
|
||||
return self._handle(source).pipe(
|
||||
utils_ops.retry(delay=1, should_retry=self._should_retry)
|
||||
)
|
||||
|
||||
def _handle(self, source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[_T],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_error(exc: Exception) -> None:
|
||||
try:
|
||||
raise exc
|
||||
except asyncio.exceptions.TimeoutError:
|
||||
logger.warning(repr(exc))
|
||||
except requests.exceptions.Timeout:
|
||||
logger.warning(repr(exc))
|
||||
except requests.exceptions.HTTPError:
|
||||
logger.warning(repr(exc))
|
||||
except urllib3.exceptions.TimeoutError:
|
||||
logger.warning(repr(exc))
|
||||
except urllib3.exceptions.ProtocolError:
|
||||
# ProtocolError('Connection broken: IncompleteRead(
|
||||
logger.warning(repr(exc))
|
||||
except Exception:
|
||||
pass
|
||||
observer.on_error(exc)
|
||||
|
||||
return source.subscribe(
|
||||
observer.on_next, on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _should_retry(self, exc: Exception) -> bool:
|
||||
if isinstance(
|
||||
exc,
|
||||
(
|
||||
asyncio.exceptions.TimeoutError,
|
||||
requests.exceptions.Timeout,
|
||||
requests.exceptions.HTTPError,
|
||||
urllib3.exceptions.TimeoutError,
|
||||
urllib3.exceptions.ProtocolError,
|
||||
),
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
105
src/blrec/core/operators/segment_fetcher.py
Normal file
105
src/blrec/core/operators/segment_fetcher.py
Normal file
@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
import attr
|
||||
import m3u8
|
||||
import requests
|
||||
import urllib3
|
||||
from m3u8.model import InitializationSection
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_delay, wait_exponential
|
||||
|
||||
from ...bili.live import Live
|
||||
|
||||
__all__ = ('SegmentFetcher', 'InitSectionData', 'SegmentData')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class InitSectionData:
|
||||
payload: bytes
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.payload)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class SegmentData:
|
||||
payload: bytes
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.payload)
|
||||
|
||||
|
||||
class SegmentFetcher:
|
||||
def __init__(self, live: Live, session: requests.Session) -> None:
|
||||
self._live = live
|
||||
self._session = session
|
||||
|
||||
def __call__(
|
||||
self, source: Observable[m3u8.Segment]
|
||||
) -> Observable[Union[InitSectionData, SegmentData]]:
|
||||
return self._fetch(source)
|
||||
|
||||
def _fetch(
|
||||
self, source: Observable[m3u8.Segment]
|
||||
) -> Observable[Union[InitSectionData, SegmentData]]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[Union[InitSectionData, SegmentData]],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
disposed = False
|
||||
subscription = SerialDisposable()
|
||||
init_section: Optional[InitializationSection] = None
|
||||
|
||||
def on_next(seg: m3u8.Segment) -> None:
|
||||
nonlocal init_section
|
||||
url: str = ''
|
||||
try:
|
||||
if getattr(seg, 'init_section', None) and (
|
||||
not init_section or seg.init_section.uri != init_section.uri
|
||||
):
|
||||
url = seg.init_section.absolute_uri
|
||||
data = self._fetch_segment(url)
|
||||
init_section = seg.init_section
|
||||
observer.on_next(InitSectionData(payload=data))
|
||||
url = seg.absolute_uri
|
||||
data = self._fetch_segment(url)
|
||||
observer.on_next(SegmentData(payload=data))
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to fetch segment {url}: {repr(e)}')
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
@retry(
|
||||
reraise=True,
|
||||
retry=retry_if_exception_type(
|
||||
(
|
||||
requests.exceptions.Timeout,
|
||||
requests.exceptions.HTTPError,
|
||||
urllib3.exceptions.TimeoutError,
|
||||
urllib3.exceptions.ProtocolError,
|
||||
)
|
||||
),
|
||||
wait=wait_exponential(multiplier=0.1, max=5),
|
||||
stop=stop_after_delay(60),
|
||||
)
|
||||
def _fetch_segment(self, url: str) -> bytes:
|
||||
with self._session.get(url, headers=self._live.headers, timeout=10) as response:
|
||||
response.raise_for_status()
|
||||
return response.content
|
123
src/blrec/core/operators/segment_remuxer.py
Normal file
123
src/blrec/core/operators/segment_remuxer.py
Normal file
@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import Final, List, Optional, Union
|
||||
|
||||
import urllib3
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from ...bili.live import Live
|
||||
from ...utils.io import wait_for
|
||||
from ..stream_remuxer import StreamRemuxer
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('SegmentRemuxer',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger(urllib3.__name__).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class SegmentRemuxer:
|
||||
_MAX_SEGMENT_DATA_CACHE: Final = 3
|
||||
|
||||
def __init__(self, live: Live) -> None:
|
||||
self._live = live
|
||||
self._stream_remuxer = StreamRemuxer(live.room_id, remove_filler_data=True)
|
||||
|
||||
def __call__(
|
||||
self, source: Observable[Union[InitSectionData, SegmentData]]
|
||||
) -> Observable[io.RawIOBase]:
|
||||
return self._remux(source)
|
||||
|
||||
def _remux(
|
||||
self, source: Observable[Union[InitSectionData, SegmentData]]
|
||||
) -> Observable[io.RawIOBase]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[io.RawIOBase],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
disposed = False
|
||||
subscription = SerialDisposable()
|
||||
|
||||
init_section_data: Optional[bytes] = None
|
||||
segment_data_cache: List[bytes] = []
|
||||
|
||||
self._stream_remuxer.stop()
|
||||
|
||||
def on_next(data: Union[InitSectionData, SegmentData]) -> None:
|
||||
nonlocal init_section_data
|
||||
|
||||
if isinstance(data, InitSectionData):
|
||||
init_section_data = data.payload
|
||||
|
||||
try:
|
||||
if self._stream_remuxer.stopped:
|
||||
self._stream_remuxer.start()
|
||||
while True:
|
||||
ready = self._stream_remuxer.wait(timeout=1)
|
||||
if disposed:
|
||||
return
|
||||
if ready:
|
||||
break
|
||||
|
||||
observer.on_next(RemuxedStream(self._stream_remuxer))
|
||||
|
||||
if segment_data_cache:
|
||||
if init_section_data:
|
||||
self._stream_remuxer.input.write(init_section_data)
|
||||
for cached_data in segment_data_cache:
|
||||
if cached_data == init_section_data:
|
||||
continue
|
||||
self._stream_remuxer.input.write(cached_data)
|
||||
|
||||
self._stream_remuxer.input.write(data.payload)
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to write data to stream remuxer: {exc}')
|
||||
self._stream_remuxer.stop()
|
||||
|
||||
segment_data_cache.append(data.payload)
|
||||
if len(segment_data_cache) > self._MAX_SEGMENT_DATA_CACHE:
|
||||
segment_data_cache.pop(0)
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
|
||||
class RemuxedStream(io.RawIOBase):
|
||||
def __init__(
|
||||
self, stream_remuxer: StreamRemuxer, *, read_timeout: float = 10
|
||||
) -> None:
|
||||
self._stream_remuxer = stream_remuxer
|
||||
self._read_timmeout = read_timeout
|
||||
self._offset: int = 0
|
||||
|
||||
def read(self, size: int = -1) -> bytes:
|
||||
try:
|
||||
data = wait_for(
|
||||
self._stream_remuxer.output.read,
|
||||
args=(size,),
|
||||
timeout=self._read_timmeout,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to read data from stream remuxer: {exc}')
|
||||
self._stream_remuxer.stop()
|
||||
raise EOFError
|
||||
else:
|
||||
assert data is not None
|
||||
self._offset += len(data)
|
||||
return data
|
||||
|
||||
def tell(self) -> int:
|
||||
return self._offset
|
51
src/blrec/core/operators/sized_statistics.py
Normal file
51
src/blrec/core/operators/sized_statistics.py
Normal file
@ -0,0 +1,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Sized
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..statistics import Statistics
|
||||
|
||||
__all__ = ('SizedStatistics',)
|
||||
|
||||
|
||||
class SizedStatistics:
|
||||
def __init__(self) -> None:
|
||||
self._statistics = Statistics()
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
return self._statistics.count
|
||||
|
||||
@property
|
||||
def rate(self) -> float:
|
||||
return self._statistics.rate
|
||||
|
||||
@property
|
||||
def elapsed(self) -> float:
|
||||
return self._statistics.elapsed
|
||||
|
||||
def freeze(self) -> None:
|
||||
self._statistics.freeze()
|
||||
|
||||
def reset(self) -> None:
|
||||
self._statistics.reset()
|
||||
|
||||
def __call__(self, source: Observable[Sized]) -> Observable[Sized]:
|
||||
self.reset()
|
||||
return self._calc(source)
|
||||
|
||||
def _calc(self, source: Observable[Sized]) -> Observable[Sized]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[Sized],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(item: Sized) -> None:
|
||||
self._statistics.submit(len(item))
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
61
src/blrec/core/operators/stream_fetcher.py
Normal file
61
src/blrec/core/operators/stream_fetcher.py
Normal file
@ -0,0 +1,61 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ...bili.live import Live
|
||||
from ...utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('StreamFetcher',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamFetcher(AsyncCooperationMixin):
|
||||
def __init__(
|
||||
self,
|
||||
live: Live,
|
||||
session: requests.Session,
|
||||
*,
|
||||
read_timeout: Optional[int] = None,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._live = live
|
||||
self._session = session
|
||||
self.read_timeout = read_timeout or 3
|
||||
|
||||
def __call__(self, source: Observable[str]) -> Observable[io.RawIOBase]:
|
||||
return self._fetch(source)
|
||||
|
||||
def _fetch(self, source: Observable[str]) -> Observable[io.RawIOBase]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[io.RawIOBase],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(url: str) -> None:
|
||||
try:
|
||||
logger.info(f'Requesting live stream... {url}')
|
||||
response = self._session.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers=self._live.headers,
|
||||
timeout=self.read_timeout,
|
||||
)
|
||||
logger.info('Response received')
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to request live stream: {repr(e)}')
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_next(response.raw) # urllib3.response.HTTPResponse
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
54
src/blrec/core/operators/stream_parser.py
Normal file
54
src/blrec/core/operators/stream_parser.py
Normal file
@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
|
||||
from reactivex import Observable
|
||||
from reactivex import operators as ops
|
||||
|
||||
from ...flv import operators as flv_ops
|
||||
from ...flv.exceptions import FlvDataError
|
||||
from ...flv.operators.typing import FLVStream
|
||||
from ...utils import operators as utils_ops
|
||||
from ..stream_param_holder import StreamParamHolder
|
||||
|
||||
__all__ = ('StreamParser',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamParser:
|
||||
def __init__(
|
||||
self, stream_param_holder: StreamParamHolder, *, ignore_eof: bool = False
|
||||
) -> None:
|
||||
self._stream_param_holder = stream_param_holder
|
||||
self._ignore_eof = ignore_eof
|
||||
|
||||
def __call__(self, source: Observable[io.RawIOBase]) -> FLVStream:
|
||||
return source.pipe( # type: ignore
|
||||
flv_ops.parse(ignore_eof=self._ignore_eof, backup_timestamp=True),
|
||||
ops.do_action(on_error=self._before_retry),
|
||||
utils_ops.retry(should_retry=self._should_retry),
|
||||
)
|
||||
|
||||
def _should_retry(self, exc: Exception) -> bool:
|
||||
if isinstance(exc, (EOFError, FlvDataError)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _before_retry(self, exc: Exception) -> None:
|
||||
try:
|
||||
raise exc
|
||||
except EOFError:
|
||||
logger.debug(repr(exc))
|
||||
except FlvDataError:
|
||||
logger.warning(f'Failed to parse stream: {repr(exc)}')
|
||||
if not self._stream_param_holder.use_alternative_stream:
|
||||
self._stream_param_holder.use_alternative_stream = True
|
||||
else:
|
||||
self._stream_param_holder.use_alternative_stream = False
|
||||
self._stream_param_holder.rotate_api_platform()
|
||||
except Exception:
|
||||
pass
|
81
src/blrec/core/operators/stream_statistics.py
Normal file
81
src/blrec/core/operators/stream_statistics.py
Normal file
@ -0,0 +1,81 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
from typing import Any, Optional
|
||||
|
||||
from reactivex import Observable, Subject, abc
|
||||
|
||||
from ..statistics import Statistics
|
||||
|
||||
__all__ = ('StreamStatistics',)
|
||||
|
||||
|
||||
class StreamStatistics:
|
||||
def __init__(self) -> None:
|
||||
self._statistics = Statistics()
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
return self._statistics.count
|
||||
|
||||
@property
|
||||
def rate(self) -> float:
|
||||
return self._statistics.rate
|
||||
|
||||
@property
|
||||
def elapsed(self) -> float:
|
||||
return self._statistics.elapsed
|
||||
|
||||
def freeze(self) -> None:
|
||||
self._statistics.freeze()
|
||||
|
||||
def reset(self) -> None:
|
||||
self._statistics.reset()
|
||||
|
||||
def __call__(self, source: Observable[io.RawIOBase]) -> Observable[io.RawIOBase]:
|
||||
self.reset()
|
||||
return self._calc(source)
|
||||
|
||||
def _calc(self, source: Observable[io.RawIOBase]) -> Observable[io.RawIOBase]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[io.RawIOBase],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(stream: io.RawIOBase) -> None:
|
||||
calculable_stream = CalculableStream(stream)
|
||||
calculable_stream.size_updates.subscribe(self._statistics.submit)
|
||||
observer.on_next(calculable_stream)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
|
||||
class CalculableStream(io.RawIOBase):
|
||||
def __init__(self, stream: io.RawIOBase) -> None:
|
||||
self._stream = stream
|
||||
self._offset: int = 0
|
||||
self._size_updates: Subject[int] = Subject()
|
||||
|
||||
@property
|
||||
def size_updates(self) -> Observable[int]:
|
||||
return self._size_updates
|
||||
|
||||
def read(self, size: int = -1) -> bytes:
|
||||
data = self._stream.read(size)
|
||||
assert data is not None
|
||||
self._offset += len(data)
|
||||
self._size_updates.on_next(len(data))
|
||||
return data
|
||||
|
||||
def readinto(self, b: Any) -> int:
|
||||
n = self._stream.readinto(b)
|
||||
assert n is not None
|
||||
self._offset += n
|
||||
self._size_updates.on_next(n)
|
||||
return n
|
||||
|
||||
def tell(self) -> int:
|
||||
return self._offset
|
160
src/blrec/core/operators/stream_url_resolver.py
Normal file
160
src/blrec/core/operators/stream_url_resolver.py
Normal file
@ -0,0 +1,160 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from reactivex import Observable, abc
|
||||
from reactivex import operators as ops
|
||||
|
||||
from ...bili.exceptions import (
|
||||
LiveRoomEncrypted,
|
||||
LiveRoomHidden,
|
||||
LiveRoomLocked,
|
||||
NoAlternativeStreamAvailable,
|
||||
NoStreamAvailable,
|
||||
NoStreamCodecAvailable,
|
||||
NoStreamFormatAvailable,
|
||||
NoStreamQualityAvailable,
|
||||
)
|
||||
from ...bili.live import Live
|
||||
from ...utils import operators as utils_ops
|
||||
from ...utils.mixins import AsyncCooperationMixin
|
||||
from ..stream_param_holder import StreamParamHolder, StreamParams
|
||||
|
||||
__all__ = ('StreamURLResolver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger(urllib3.__name__).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class StreamURLResolver(AsyncCooperationMixin):
|
||||
def __init__(self, live: Live, stream_param_holder: StreamParamHolder) -> None:
|
||||
super().__init__()
|
||||
self._live = live
|
||||
self._stream_param_holder = stream_param_holder
|
||||
self._stream_url: str = ''
|
||||
self._stream_host: str = ''
|
||||
self._stream_params: Optional[StreamParams] = None
|
||||
|
||||
@property
|
||||
def stream_url(self) -> str:
|
||||
return self._stream_url
|
||||
|
||||
@property
|
||||
def stream_host(self) -> str:
|
||||
return self._stream_host
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._stream_url = ''
|
||||
self._stream_host = ''
|
||||
self._stream_params = None
|
||||
|
||||
def __call__(self, source: Observable[StreamParams]) -> Observable[str]:
|
||||
self._reset()
|
||||
return self._solve(source).pipe(
|
||||
ops.do_action(on_error=self._before_retry),
|
||||
utils_ops.retry(delay=1, should_retry=self._should_retry),
|
||||
)
|
||||
|
||||
def _solve(self, source: Observable[StreamParams]) -> Observable[str]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[str],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(params: StreamParams) -> None:
|
||||
if self._can_resue_url(params):
|
||||
observer.on_next(self._stream_url)
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
f'Getting the live stream url... '
|
||||
f'qn: {params.quality_number}, '
|
||||
f'format: {params.stream_format}, '
|
||||
f'api platform: {params.api_platform}, '
|
||||
f'use alternative stream: {params.use_alternative_stream}'
|
||||
)
|
||||
url = self._run_coroutine(
|
||||
self._live.get_live_stream_url(
|
||||
params.quality_number,
|
||||
api_platform=params.api_platform,
|
||||
stream_format=params.stream_format,
|
||||
select_alternative=params.use_alternative_stream,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to get live stream url: {repr(e)}')
|
||||
observer.on_error(e)
|
||||
else:
|
||||
logger.info(f"Got live stream url: '{url}'")
|
||||
self._stream_url = url
|
||||
self._stream_host = urlparse(url).hostname or ''
|
||||
self._stream_params = params
|
||||
observer.on_next(url)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _can_resue_url(self, params: StreamParams) -> bool:
|
||||
if params == self._stream_params and self._stream_url:
|
||||
try:
|
||||
response = requests.get(
|
||||
self._stream_url, stream=True, headers=self._live.headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _should_retry(self, exc: Exception) -> bool:
|
||||
if isinstance(
|
||||
exc,
|
||||
(
|
||||
NoStreamAvailable,
|
||||
NoStreamCodecAvailable,
|
||||
NoStreamFormatAvailable,
|
||||
NoStreamQualityAvailable,
|
||||
NoAlternativeStreamAvailable,
|
||||
),
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _before_retry(self, exc: Exception) -> None:
|
||||
try:
|
||||
raise exc
|
||||
except (NoStreamAvailable, NoStreamCodecAvailable, NoStreamFormatAvailable):
|
||||
pass
|
||||
except NoStreamQualityAvailable:
|
||||
qn = self._stream_param_holder.quality_number
|
||||
logger.info(
|
||||
f'The specified stream quality ({qn}) is not available, '
|
||||
'will using the original stream quality (10000) instead.'
|
||||
)
|
||||
self._stream_param_holder.fall_back_quality()
|
||||
except NoAlternativeStreamAvailable:
|
||||
logger.debug(
|
||||
'No alternative stream url available, '
|
||||
'will using the primary stream url instead.'
|
||||
)
|
||||
self._stream_param_holder.use_alternative_stream = False
|
||||
self._stream_param_holder.rotate_api_platform()
|
||||
except LiveRoomHidden:
|
||||
pass
|
||||
except LiveRoomLocked:
|
||||
pass
|
||||
except LiveRoomEncrypted:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
81
src/blrec/core/path_provider.py
Normal file
81
src/blrec/core/path_provider.py
Normal file
@ -0,0 +1,81 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Tuple
|
||||
|
||||
import aiohttp
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_delay, wait_exponential
|
||||
|
||||
from ..bili.live import Live
|
||||
from ..path import escape_path
|
||||
from ..utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('PathProvider',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PathProvider(AsyncCooperationMixin):
|
||||
def __init__(self, live: Live, out_dir: str, path_template: str) -> None:
|
||||
super().__init__()
|
||||
self._live = live
|
||||
self.out_dir = out_dir
|
||||
self.path_template = path_template
|
||||
|
||||
def __call__(self) -> Tuple[str, int]:
|
||||
timestamp = self._get_timestamp()
|
||||
path = self._make_path(timestamp)
|
||||
return path, timestamp
|
||||
|
||||
def _get_timestamp(self) -> int:
|
||||
try:
|
||||
return self._get_server_timestamp()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to get server timestamp: {repr(e)}')
|
||||
return self._get_local_timestamp()
|
||||
|
||||
def _get_local_timestamp(self) -> int:
|
||||
return int(time.time())
|
||||
|
||||
@retry(
|
||||
reraise=True,
|
||||
retry=retry_if_exception_type((asyncio.TimeoutError, aiohttp.ClientError)),
|
||||
wait=wait_exponential(multiplier=0.1, max=1),
|
||||
stop=stop_after_delay(3),
|
||||
)
|
||||
def _get_server_timestamp(self) -> int:
|
||||
return self._run_coroutine(self._live.get_server_timestamp())
|
||||
|
||||
def _make_path(self, timestamp: int) -> str:
|
||||
date_time = datetime.fromtimestamp(timestamp)
|
||||
relpath = self.path_template.format(
|
||||
roomid=self._live.room_id,
|
||||
uname=escape_path(self._live.user_info.name),
|
||||
title=escape_path(self._live.room_info.title),
|
||||
area=escape_path(self._live.room_info.area_name),
|
||||
parent_area=escape_path(self._live.room_info.parent_area_name),
|
||||
year=date_time.year,
|
||||
month=str(date_time.month).rjust(2, '0'),
|
||||
day=str(date_time.day).rjust(2, '0'),
|
||||
hour=str(date_time.hour).rjust(2, '0'),
|
||||
minute=str(date_time.minute).rjust(2, '0'),
|
||||
second=str(date_time.second).rjust(2, '0'),
|
||||
)
|
||||
|
||||
pathname = os.path.abspath(
|
||||
os.path.expanduser(os.path.join(self.out_dir, relpath) + '.flv')
|
||||
)
|
||||
os.makedirs(os.path.dirname(pathname), exist_ok=True)
|
||||
while os.path.exists(pathname):
|
||||
root, ext = os.path.splitext(pathname)
|
||||
m = re.search(r'_\((\d+)\)$', root)
|
||||
if m is None:
|
||||
root += '_(1)'
|
||||
else:
|
||||
root = re.sub(r'\(\d+\)$', f'({int(m.group(1)) + 1})', root)
|
||||
pathname = root + ext
|
||||
|
||||
return pathname
|
@ -13,7 +13,7 @@ from ..bili.live_monitor import LiveEventListener, LiveMonitor
|
||||
from ..bili.models import RoomInfo
|
||||
from ..bili.typing import QualityNumber, StreamFormat
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..flv.data_analyser import MetaData
|
||||
from ..flv.operators import MetaData, StreamProfile
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
from ..utils.mixins import AsyncStoppableMixin
|
||||
from .cover_downloader import CoverDownloader, CoverSaveStrategy
|
||||
@ -21,7 +21,6 @@ from .danmaku_dumper import DanmakuDumper, DanmakuDumperEventListener
|
||||
from .danmaku_receiver import DanmakuReceiver
|
||||
from .raw_danmaku_dumper import RawDanmakuDumper, RawDanmakuDumperEventListener
|
||||
from .raw_danmaku_receiver import RawDanmakuReceiver
|
||||
from .stream_analyzer import StreamProfile
|
||||
from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
|
||||
__all__ = 'RecorderEventListener', 'Recorder'
|
||||
@ -402,8 +401,8 @@ class Recorder(
|
||||
async def on_raw_danmaku_file_completed(self, path: str) -> None:
|
||||
await self._emit('raw_danmaku_file_completed', self, path)
|
||||
|
||||
async def on_stream_recording_stopped(self) -> None:
|
||||
logger.debug('Stream recording stopped')
|
||||
async def on_stream_recording_completed(self) -> None:
|
||||
logger.debug('Stream recording completed')
|
||||
await self._stop_recording()
|
||||
|
||||
async def _do_start(self) -> None:
|
||||
|
@ -1,60 +0,0 @@
|
||||
import logging
|
||||
from typing import Any, Callable, Optional, Type, cast
|
||||
|
||||
from tenacity import wait_exponential, RetryCallState
|
||||
from tenacity import _utils
|
||||
|
||||
|
||||
class wait_exponential_for_same_exceptions(wait_exponential):
|
||||
"""Wait strategy that applies exponential backoff only for same
|
||||
continuing exceptions.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
multiplier: float = 1,
|
||||
max: float = _utils.MAX_WAIT,
|
||||
exp_base: int = 2,
|
||||
min: float = 0,
|
||||
continuing_criteria: float = 5.0,
|
||||
) -> None:
|
||||
super().__init__(multiplier, max, exp_base, min)
|
||||
self._continuing_criteria = continuing_criteria
|
||||
self._prev_exc_type: Optional[Type[BaseException]] = None
|
||||
self._prev_exc_ts: Optional[float] = None
|
||||
self._last_wait_time: float = 0
|
||||
|
||||
def __call__(self, retry_state: RetryCallState) -> float:
|
||||
if (
|
||||
retry_state.outcome is not None and
|
||||
(exc := retry_state.outcome.exception())
|
||||
):
|
||||
curr_exc_type = type(exc)
|
||||
curr_exc_ts = cast(float, retry_state.outcome_timestamp)
|
||||
if (
|
||||
curr_exc_type is not self._prev_exc_type or
|
||||
not self._is_continuing(curr_exc_ts)
|
||||
):
|
||||
retry_state.attempt_number = 1
|
||||
self._prev_exc_type = curr_exc_type
|
||||
self._prev_exc_ts = curr_exc_ts
|
||||
|
||||
self._last_wait_time = wait_time = super().__call__(retry_state)
|
||||
return wait_time
|
||||
|
||||
def _is_continuing(self, curr_exc_ts: float) -> bool:
|
||||
assert self._prev_exc_ts is not None
|
||||
return (
|
||||
curr_exc_ts - (self._prev_exc_ts + self._last_wait_time) <
|
||||
self._continuing_criteria
|
||||
)
|
||||
|
||||
|
||||
def before_sleep_log(
|
||||
logger: logging.Logger, log_level: int, name: str = ''
|
||||
) -> Callable[[RetryCallState], Any]:
|
||||
def log_it(retry_state: RetryCallState) -> None:
|
||||
seconds = cast(float, getattr(retry_state.next_action, 'sleep'))
|
||||
logger.log(log_level, 'Retry %s after %s seconds', name, seconds)
|
||||
|
||||
return log_it
|
@ -1,10 +1,10 @@
|
||||
import time
|
||||
|
||||
|
||||
__all__ = 'StatisticsCalculator',
|
||||
__all__ = 'Statistics',
|
||||
|
||||
|
||||
class StatisticsCalculator:
|
||||
class Statistics:
|
||||
def __init__(self, interval: float = 1.0) -> None:
|
||||
self._interval = interval
|
||||
self._frozen = True
|
||||
|
120
src/blrec/core/stream_param_holder.py
Normal file
120
src/blrec/core/stream_param_holder.py
Normal file
@ -0,0 +1,120 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Final, Optional
|
||||
|
||||
import attr
|
||||
from reactivex import Observable, abc, create
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from reactivex.scheduler.currentthreadscheduler import CurrentThreadScheduler
|
||||
|
||||
from ..bili.typing import ApiPlatform, QualityNumber, StreamFormat
|
||||
|
||||
__all__ = ('StreamParamHolder',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class StreamParams:
|
||||
stream_format: StreamFormat
|
||||
quality_number: QualityNumber
|
||||
api_platform: ApiPlatform
|
||||
use_alternative_stream: bool
|
||||
|
||||
|
||||
class StreamParamHolder:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
stream_format: StreamFormat = 'flv',
|
||||
quality_number: QualityNumber = 10000,
|
||||
api_platform: ApiPlatform = 'android',
|
||||
use_alternative_stream: bool = False,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._stream_format: Final = stream_format
|
||||
self._quality_number = quality_number
|
||||
self._real_quality_number: Optional[QualityNumber] = None
|
||||
self._api_platform: ApiPlatform = api_platform
|
||||
self._use_alternative_stream: bool = use_alternative_stream
|
||||
self._cancelled: bool = False
|
||||
|
||||
def reset(self) -> None:
|
||||
self._real_quality_number = None
|
||||
self._api_platform = 'android'
|
||||
self._use_alternative_stream = False
|
||||
self._cancelled = False
|
||||
|
||||
def cancel(self) -> None:
|
||||
self._cancelled = True
|
||||
|
||||
@property
|
||||
def stream_format(self) -> StreamFormat:
|
||||
return self._stream_format
|
||||
|
||||
@property
|
||||
def quality_number(self) -> QualityNumber:
|
||||
return self._quality_number
|
||||
|
||||
@quality_number.setter
|
||||
def quality_number(self, value: QualityNumber) -> None:
|
||||
self._quality_number = value
|
||||
|
||||
@property
|
||||
def real_quality_number(self) -> QualityNumber:
|
||||
return self._real_quality_number or self._quality_number
|
||||
|
||||
@property
|
||||
def use_alternative_stream(self) -> bool:
|
||||
return self._use_alternative_stream
|
||||
|
||||
@use_alternative_stream.setter
|
||||
def use_alternative_stream(self, value: bool) -> None:
|
||||
self._use_alternative_stream = value
|
||||
|
||||
def fall_back_quality(self) -> None:
|
||||
self._real_quality_number = 10000
|
||||
|
||||
def rotate_api_platform(self) -> None:
|
||||
if self._api_platform == 'android':
|
||||
self._api_platform = 'web'
|
||||
else:
|
||||
self._api_platform = 'android'
|
||||
|
||||
def get_stream_params(self) -> Observable[StreamParams]:
|
||||
self.reset()
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[StreamParams],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
_scheduler = scheduler or CurrentThreadScheduler.singleton()
|
||||
|
||||
disposed = False
|
||||
cancelable = SerialDisposable()
|
||||
|
||||
def action(
|
||||
scheduler: abc.SchedulerBase, state: Optional[Any] = None
|
||||
) -> None:
|
||||
if self._cancelled or disposed:
|
||||
return
|
||||
|
||||
params = StreamParams(
|
||||
stream_format=self._stream_format,
|
||||
quality_number=self._real_quality_number or self._quality_number,
|
||||
api_platform=self._api_platform,
|
||||
use_alternative_stream=self._use_alternative_stream,
|
||||
)
|
||||
observer.on_next(params)
|
||||
|
||||
cancelable.disposable = _scheduler.schedule(action)
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
|
||||
return CompositeDisposable(cancelable, Disposable(dispose))
|
||||
|
||||
return create(subscribe)
|
@ -6,11 +6,10 @@ from typing import Iterator, Optional
|
||||
from ..bili.live import Live
|
||||
from ..bili.typing import QualityNumber, StreamFormat
|
||||
from ..event.event_emitter import EventEmitter
|
||||
from ..flv.data_analyser import MetaData
|
||||
from ..flv.operators import MetaData, StreamProfile
|
||||
from ..utils.mixins import AsyncStoppableMixin
|
||||
from .flv_stream_recorder_impl import FLVStreamRecorderImpl
|
||||
from .hls_stream_recorder_impl import HLSStreamRecorderImpl
|
||||
from .stream_analyzer import StreamProfile
|
||||
from .stream_recorder_impl import StreamRecorderEventListener
|
||||
|
||||
__all__ = 'StreamRecorder', 'StreamRecorderEventListener'
|
||||
@ -230,15 +229,15 @@ class StreamRecorder(
|
||||
async def on_video_file_completed(self, path: str) -> None:
|
||||
await self._emit('video_file_completed', path)
|
||||
|
||||
async def on_stream_recording_stopped(self) -> None:
|
||||
await self._emit('stream_recording_stopped')
|
||||
async def on_stream_recording_completed(self) -> None:
|
||||
await self._emit('stream_recording_completed')
|
||||
|
||||
async def _wait_fmp4_stream(self) -> bool:
|
||||
end_time = time.monotonic() + self.fmp4_stream_timeout
|
||||
available = False # debounce
|
||||
while True:
|
||||
try:
|
||||
await self._impl._live.get_live_stream_urls(stream_format='fmp4')
|
||||
await self._impl._live.get_live_stream_url(stream_format='fmp4')
|
||||
except Exception:
|
||||
available = False
|
||||
if time.monotonic() > end_time:
|
||||
|
@ -1,61 +1,27 @@
|
||||
import asyncio
|
||||
import errno
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from threading import Thread
|
||||
from typing import Any, BinaryIO, Dict, Final, Iterator, Optional, Tuple
|
||||
from typing import Any, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
import urllib3
|
||||
from rx.core import Observable
|
||||
from rx.subject import Subject
|
||||
from tenacity import (
|
||||
Retrying,
|
||||
TryAgain,
|
||||
retry,
|
||||
retry_if_exception_type,
|
||||
retry_if_not_exception_type,
|
||||
retry_if_result,
|
||||
stop_after_attempt,
|
||||
stop_after_delay,
|
||||
wait_chain,
|
||||
wait_exponential,
|
||||
wait_fixed,
|
||||
wait_none,
|
||||
)
|
||||
from tqdm import tqdm
|
||||
from reactivex import abc
|
||||
from reactivex.typing import StartableFactory, StartableTarget
|
||||
|
||||
from .. import __github__, __prog__, __version__
|
||||
from ..bili.exceptions import (
|
||||
LiveRoomEncrypted,
|
||||
LiveRoomHidden,
|
||||
LiveRoomLocked,
|
||||
NoStreamAvailable,
|
||||
NoStreamFormatAvailable,
|
||||
NoStreamQualityAvailable,
|
||||
)
|
||||
from ..bili.helpers import get_quality_name
|
||||
from ..bili.live import Live
|
||||
from ..bili.typing import ApiPlatform, QualityNumber, StreamFormat
|
||||
from ..bili.typing import QualityNumber, StreamFormat
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..flv.data_analyser import MetaData
|
||||
from ..flv.stream_processor import BaseOutputFileManager, StreamProcessor
|
||||
from ..flv import operators as flv_ops
|
||||
from ..flv.metadata_dumper import MetadataDumper
|
||||
from ..flv.operators import StreamProfile
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
from ..path import escape_path
|
||||
from ..utils.io import wait_for
|
||||
from ..utils.mixins import AsyncCooperationMixin, AsyncStoppableMixin
|
||||
from .retry import before_sleep_log, wait_exponential_for_same_exceptions
|
||||
from .statistics import StatisticsCalculator
|
||||
from .stream_analyzer import StreamProfile
|
||||
from .stream_remuxer import StreamRemuxer
|
||||
from . import operators as core_ops
|
||||
from .metadata_provider import MetadataProvider
|
||||
from .path_provider import PathProvider
|
||||
from .stream_param_holder import StreamParamHolder
|
||||
|
||||
__all__ = 'StreamRecorderImpl',
|
||||
__all__ = ('StreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -63,15 +29,13 @@ logging.getLogger(urllib3.__name__).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class StreamRecorderEventListener(EventListener):
|
||||
async def on_video_file_created(
|
||||
self, path: str, record_start_time: int
|
||||
) -> None:
|
||||
async def on_video_file_created(self, path: str, record_start_time: int) -> None:
|
||||
...
|
||||
|
||||
async def on_video_file_completed(self, path: str) -> None:
|
||||
...
|
||||
|
||||
async def on_stream_recording_stopped(self) -> None:
|
||||
async def on_stream_recording_completed(self) -> None:
|
||||
...
|
||||
|
||||
|
||||
@ -98,139 +62,183 @@ class StreamRecorderImpl(
|
||||
super().__init__()
|
||||
|
||||
self._live = live
|
||||
self._progress_bar: Optional[tqdm] = None
|
||||
self._stream_remuxer: Optional[StreamRemuxer] = None
|
||||
self._stream_processor: Optional[StreamProcessor] = None
|
||||
self._dl_calculator = StatisticsCalculator()
|
||||
self._rec_calculator = StatisticsCalculator()
|
||||
self._file_manager = OutputFileManager(
|
||||
live, out_dir, path_template, buffer_size
|
||||
self._session = requests.Session()
|
||||
self._stream_param_holder = StreamParamHolder(
|
||||
stream_format=stream_format, quality_number=quality_number
|
||||
)
|
||||
self._stream_url_resolver = core_ops.StreamURLResolver(
|
||||
live, self._stream_param_holder
|
||||
)
|
||||
self._stream_fetcher = core_ops.StreamFetcher(
|
||||
live, self._session, read_timeout=read_timeout
|
||||
)
|
||||
self._stream_parser = core_ops.StreamParser(
|
||||
self._stream_param_holder, ignore_eof=stream_format != 'flv'
|
||||
)
|
||||
self._progress_bar = core_ops.ProgressBar(live)
|
||||
self._analyser = flv_ops.Analyser()
|
||||
self._metadata_provider = MetadataProvider(live, self)
|
||||
self._injector = flv_ops.Injector(self._metadata_provider)
|
||||
self._join_point_extractor = flv_ops.JoinPointExtractor()
|
||||
self._limiter = flv_ops.Limiter(filesize_limit, duration_limit)
|
||||
self._cutter = flv_ops.Cutter()
|
||||
self._path_provider = PathProvider(live, out_dir, path_template)
|
||||
self._dumper = flv_ops.Dumper(self._path_provider, buffer_size)
|
||||
self._rec_statistics = core_ops.SizedStatistics()
|
||||
|
||||
self._stream_format: Final = stream_format
|
||||
self._quality_number = quality_number
|
||||
self._real_quality_number: Optional[QualityNumber] = None
|
||||
self._api_platform: ApiPlatform = 'android'
|
||||
self._use_alternative_stream: bool = False
|
||||
self.buffer_size = buffer_size or io.DEFAULT_BUFFER_SIZE # bytes
|
||||
self.read_timeout = read_timeout or 3 # seconds
|
||||
self.disconnection_timeout = disconnection_timeout or 600 # seconds
|
||||
self._prober: Union[flv_ops.Prober, core_ops.HLSProber]
|
||||
self._dl_statistics: Union[core_ops.StreamStatistics, core_ops.SizedStatistics]
|
||||
if stream_format == 'flv':
|
||||
self._prober = flv_ops.Prober()
|
||||
self._dl_statistics = core_ops.StreamStatistics()
|
||||
else:
|
||||
self._prober = core_ops.HLSProber()
|
||||
self._dl_statistics = core_ops.SizedStatistics()
|
||||
|
||||
self._filesize_limit = filesize_limit or 0
|
||||
self._duration_limit = duration_limit or 0
|
||||
self._request_exception_handler = core_ops.RequestExceptionHandler()
|
||||
self._connection_error_handler = core_ops.ConnectionErrorHandler(
|
||||
live, disconnection_timeout=disconnection_timeout
|
||||
)
|
||||
self._exception_handler = core_ops.ExceptionHandler()
|
||||
self._metadata_dumper = MetadataDumper(
|
||||
self._dumper, self._analyser, self._join_point_extractor
|
||||
)
|
||||
self._metadata_dumper.enable()
|
||||
|
||||
self._stream_url: str = ''
|
||||
self._stream_host: str = ''
|
||||
self._subscription: abc.DisposableBase
|
||||
|
||||
self._threads: List[Thread] = []
|
||||
self._files: List[str] = []
|
||||
self._stream_profile: StreamProfile = {}
|
||||
|
||||
def on_file_created(args: Tuple[str, int]) -> None:
|
||||
def on_profile_updated(profile: StreamProfile) -> None:
|
||||
self._stream_profile = profile
|
||||
|
||||
self._prober.profiles.subscribe(on_profile_updated)
|
||||
|
||||
def on_file_opened(args: Tuple[str, int]) -> None:
|
||||
logger.info(f"Video file created: '{args[0]}'")
|
||||
self._files.append(args[0])
|
||||
self._emit_event('video_file_created', *args)
|
||||
|
||||
def on_file_closed(path: str) -> None:
|
||||
logger.info(f"Video file completed: '{path}'")
|
||||
self._emit_event('video_file_completed', path)
|
||||
|
||||
self._file_manager.file_creates.subscribe(on_file_created)
|
||||
self._file_manager.file_closes.subscribe(on_file_closed)
|
||||
self._dumper.file_opened.subscribe(on_file_opened)
|
||||
self._dumper.file_closed.subscribe(on_file_closed)
|
||||
|
||||
@property
|
||||
def stream_url(self) -> str:
|
||||
return self._stream_url
|
||||
return self._stream_url_resolver.stream_url
|
||||
|
||||
@property
|
||||
def stream_host(self) -> str:
|
||||
return self._stream_host
|
||||
return self._stream_url_resolver.stream_host
|
||||
|
||||
@property
|
||||
def dl_total(self) -> int:
|
||||
return self._dl_calculator.count
|
||||
return self._dl_statistics.count
|
||||
|
||||
@property
|
||||
def dl_rate(self) -> float:
|
||||
return self._dl_calculator.rate
|
||||
return self._dl_statistics.rate
|
||||
|
||||
@property
|
||||
def rec_elapsed(self) -> float:
|
||||
return self._rec_calculator.elapsed
|
||||
return self._rec_statistics.elapsed
|
||||
|
||||
@property
|
||||
def rec_total(self) -> int:
|
||||
return self._rec_calculator.count
|
||||
return self._rec_statistics.count
|
||||
|
||||
@property
|
||||
def rec_rate(self) -> float:
|
||||
return self._rec_calculator.rate
|
||||
return self._rec_statistics.rate
|
||||
|
||||
@property
|
||||
def out_dir(self) -> str:
|
||||
return self._file_manager.out_dir
|
||||
return self._path_provider.out_dir
|
||||
|
||||
@out_dir.setter
|
||||
def out_dir(self, value: str) -> None:
|
||||
self._file_manager.out_dir = value
|
||||
self._path_provider.out_dir = value
|
||||
|
||||
@property
|
||||
def path_template(self) -> str:
|
||||
return self._file_manager.path_template
|
||||
return self._path_provider.path_template
|
||||
|
||||
@path_template.setter
|
||||
def path_template(self, value: str) -> None:
|
||||
self._file_manager.path_template = value
|
||||
self._path_provider.path_template = value
|
||||
|
||||
@property
|
||||
def stream_format(self) -> StreamFormat:
|
||||
return self._stream_format
|
||||
return self._stream_param_holder.stream_format
|
||||
|
||||
@property
|
||||
def quality_number(self) -> QualityNumber:
|
||||
return self._quality_number
|
||||
return self._stream_param_holder.quality_number
|
||||
|
||||
@quality_number.setter
|
||||
def quality_number(self, value: QualityNumber) -> None:
|
||||
self._quality_number = value
|
||||
self._stream_param_holder.quality_number = value
|
||||
|
||||
@property
|
||||
def real_quality_number(self) -> Optional[QualityNumber]:
|
||||
if self.stopped:
|
||||
return None
|
||||
return self._real_quality_number
|
||||
return self._stream_param_holder.real_quality_number
|
||||
|
||||
@property
|
||||
def filesize_limit(self) -> int:
|
||||
if self._stream_processor is not None:
|
||||
return self._stream_processor.filesize_limit
|
||||
else:
|
||||
return self._filesize_limit
|
||||
return self._limiter.filesize_limit
|
||||
|
||||
@filesize_limit.setter
|
||||
def filesize_limit(self, value: int) -> None:
|
||||
self._filesize_limit = value
|
||||
if self._stream_processor is not None:
|
||||
self._stream_processor.filesize_limit = value
|
||||
self._limiter.filesize_limit = value
|
||||
|
||||
@property
|
||||
def duration_limit(self) -> int:
|
||||
if self._stream_processor is not None:
|
||||
return self._stream_processor.duration_limit
|
||||
else:
|
||||
return self._duration_limit
|
||||
return self._limiter.duration_limit
|
||||
|
||||
@duration_limit.setter
|
||||
def duration_limit(self, value: int) -> None:
|
||||
self._duration_limit = value
|
||||
if self._stream_processor is not None:
|
||||
self._stream_processor.duration_limit = value
|
||||
self._limiter.duration_limit = value
|
||||
|
||||
@property
|
||||
def read_timeout(self) -> int:
|
||||
return self._stream_fetcher.read_timeout
|
||||
|
||||
@read_timeout.setter
|
||||
def read_timeout(self, value: int) -> None:
|
||||
self._stream_fetcher.read_timeout = value
|
||||
|
||||
@property
|
||||
def disconnection_timeout(self) -> int:
|
||||
return self._connection_error_handler.disconnection_timeout
|
||||
|
||||
@disconnection_timeout.setter
|
||||
def disconnection_timeout(self, value: int) -> None:
|
||||
self._connection_error_handler.disconnection_timeout = value
|
||||
|
||||
@property
|
||||
def buffer_size(self) -> int:
|
||||
return self._dumper.buffer_size
|
||||
|
||||
@buffer_size.setter
|
||||
def buffer_size(self, value: int) -> None:
|
||||
self._dumper.buffer_size = value
|
||||
|
||||
@property
|
||||
def recording_path(self) -> Optional[str]:
|
||||
return self._file_manager.curr_path
|
||||
return self._dumper.path
|
||||
|
||||
@property
|
||||
def metadata(self) -> Optional[MetaData]:
|
||||
if self._stream_processor is not None:
|
||||
return self._stream_processor.metadata
|
||||
else:
|
||||
def metadata(self) -> Optional[flv_ops.MetaData]:
|
||||
try:
|
||||
return self._analyser.make_metadata()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@property
|
||||
@ -238,225 +246,58 @@ class StreamRecorderImpl(
|
||||
return self._stream_profile
|
||||
|
||||
def has_file(self) -> bool:
|
||||
return self._file_manager.has_file()
|
||||
return bool(self._files)
|
||||
|
||||
def get_files(self) -> Iterator[str]:
|
||||
yield from self._file_manager.get_files()
|
||||
yield from iter(self._files)
|
||||
|
||||
def clear_files(self) -> None:
|
||||
self._file_manager.clear_files()
|
||||
self._files.clear()
|
||||
|
||||
def can_cut_stream(self) -> bool:
|
||||
if self._stream_processor is None:
|
||||
return False
|
||||
return self._stream_processor.can_cut_stream()
|
||||
return self._cutter.can_cut_stream()
|
||||
|
||||
def cut_stream(self) -> bool:
|
||||
if self._stream_processor is None:
|
||||
return False
|
||||
return self._stream_processor.cut_stream()
|
||||
return self._cutter.cut_stream()
|
||||
|
||||
def update_progress_bar_info(self) -> None:
|
||||
if self._progress_bar is not None:
|
||||
self._progress_bar.set_postfix_str(self._make_pbar_postfix())
|
||||
self._progress_bar.update_bar_info()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._dl_calculator.reset()
|
||||
self._rec_calculator.reset()
|
||||
self._stream_url = ''
|
||||
self._stream_host = ''
|
||||
self._files.clear()
|
||||
self._stream_profile = {}
|
||||
self._api_platform = 'android'
|
||||
self._real_quality_number = None
|
||||
self._use_alternative_stream = False
|
||||
self._fall_back_stream_format = False
|
||||
|
||||
async def _do_start(self) -> None:
|
||||
logger.debug('Starting stream recorder...')
|
||||
self._reset()
|
||||
self._thread = Thread(
|
||||
target=self._run, name=f'StreamRecorder::{self._live.room_id}'
|
||||
)
|
||||
self._thread.start()
|
||||
self._run()
|
||||
logger.debug('Started stream recorder')
|
||||
|
||||
async def _do_stop(self) -> None:
|
||||
logger.debug('Stopping stream recorder...')
|
||||
if self._stream_processor is not None:
|
||||
self._stream_processor.cancel()
|
||||
await self._loop.run_in_executor(None, self._thread.join)
|
||||
self._stream_param_holder.cancel()
|
||||
thread = self._thread_factory('StreamRecorderDisposer')(
|
||||
self._subscription.dispose
|
||||
)
|
||||
thread.start()
|
||||
for thread in self._threads:
|
||||
await self._loop.run_in_executor(None, thread.join, 30)
|
||||
self._threads.clear()
|
||||
logger.debug('Stopped stream recorder')
|
||||
|
||||
@abstractmethod
|
||||
def _run(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def _streaming_loop(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _main_loop(self) -> None:
|
||||
for attempt in Retrying(
|
||||
reraise=True,
|
||||
retry=(
|
||||
retry_if_result(lambda r: not self._stopped) |
|
||||
retry_if_not_exception_type((NotImplementedError))
|
||||
),
|
||||
wait=wait_exponential_for_same_exceptions(max=60),
|
||||
before_sleep=before_sleep_log(logger, logging.DEBUG, 'main_loop'),
|
||||
):
|
||||
with attempt:
|
||||
try:
|
||||
self._streaming_loop()
|
||||
except (NoStreamAvailable, NoStreamFormatAvailable) as e:
|
||||
logger.warning(f'Failed to get live stream url: {repr(e)}')
|
||||
except OSError as e:
|
||||
logger.critical(repr(e), exc_info=e)
|
||||
if e.errno == errno.ENOSPC:
|
||||
# OSError(28, 'No space left on device')
|
||||
self._handle_exception(e)
|
||||
self._stopped = True
|
||||
except LiveRoomHidden:
|
||||
logger.error('The live room has been hidden!')
|
||||
self._stopped = True
|
||||
except LiveRoomLocked:
|
||||
logger.error('The live room has been locked!')
|
||||
self._stopped = True
|
||||
except LiveRoomEncrypted:
|
||||
logger.error('The live room has been encrypted!')
|
||||
self._stopped = True
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
self._handle_exception(e)
|
||||
|
||||
def _rotate_api_platform(self) -> None:
|
||||
if self._api_platform == 'android':
|
||||
self._api_platform = 'web'
|
||||
else:
|
||||
self._api_platform = 'android'
|
||||
|
||||
@retry(
|
||||
reraise=True,
|
||||
retry=retry_if_exception_type((
|
||||
asyncio.TimeoutError, aiohttp.ClientError,
|
||||
)),
|
||||
wait=wait_chain(wait_none(), wait_fixed(1)),
|
||||
stop=stop_after_attempt(300),
|
||||
)
|
||||
def _get_live_stream_url(self) -> str:
|
||||
fmt = self._stream_format
|
||||
qn = self._real_quality_number or self.quality_number
|
||||
logger.info(
|
||||
f'Getting the live stream url... qn: {qn}, format: {fmt}, '
|
||||
f'api platform: {self._api_platform}, '
|
||||
f'use alternative stream: {self._use_alternative_stream}'
|
||||
)
|
||||
try:
|
||||
urls = self._run_coroutine(
|
||||
self._live.get_live_stream_urls(
|
||||
qn,
|
||||
api_platform=self._api_platform,
|
||||
stream_format=fmt,
|
||||
)
|
||||
def _thread_factory(self, name: str) -> StartableFactory:
|
||||
def factory(target: StartableTarget) -> Thread:
|
||||
thread = Thread(
|
||||
target=target, daemon=True, name=f'{name}::{self._live.room_id}'
|
||||
)
|
||||
except NoStreamQualityAvailable:
|
||||
logger.info(
|
||||
f'The specified stream quality ({qn}) is not available, '
|
||||
'will using the original stream quality (10000) instead.'
|
||||
)
|
||||
self._real_quality_number = 10000
|
||||
raise TryAgain
|
||||
else:
|
||||
logger.info(
|
||||
f'Adopted the stream format ({fmt}) and quality ({qn})'
|
||||
)
|
||||
self._real_quality_number = qn
|
||||
self._threads.append(thread)
|
||||
return thread
|
||||
|
||||
if not self._use_alternative_stream:
|
||||
url = urls[0]
|
||||
else:
|
||||
try:
|
||||
url = urls[1]
|
||||
except IndexError:
|
||||
self._use_alternative_stream = False
|
||||
self._rotate_api_platform()
|
||||
logger.info(
|
||||
'No alternative stream url available, '
|
||||
'will using the primary stream url '
|
||||
f'from {self._api_platform} api instead.'
|
||||
)
|
||||
raise TryAgain
|
||||
logger.info(f"Got live stream url: '{url}'")
|
||||
|
||||
return url
|
||||
|
||||
def _defer_retry(self, seconds: float, name: str = '') -> None:
|
||||
if seconds <= 0:
|
||||
return
|
||||
logger.debug(f'Retry {name} after {seconds} seconds')
|
||||
time.sleep(seconds)
|
||||
|
||||
def _wait_for_connection_error(self, check_interval: int = 3) -> None:
|
||||
timeout = self.disconnection_timeout
|
||||
logger.info(f'Waiting {timeout} seconds for connection recovery... ')
|
||||
timebase = time.monotonic()
|
||||
while not self._run_coroutine(self._live.check_connectivity()):
|
||||
if timeout is not None and time.monotonic() - timebase > timeout:
|
||||
logger.error(f'Connection not recovered in {timeout} seconds')
|
||||
self._stopped = True
|
||||
break
|
||||
time.sleep(check_interval)
|
||||
else:
|
||||
logger.info('Connection recovered')
|
||||
|
||||
def _make_pbar_postfix(self) -> str:
|
||||
return '{room_id} - {user_name}: {room_title}'.format(
|
||||
room_id=self._live.room_info.room_id,
|
||||
user_name=self._live.user_info.name,
|
||||
room_title=self._live.room_info.title,
|
||||
)
|
||||
|
||||
def _make_metadata(self) -> Dict[str, Any]:
|
||||
live_start_time = datetime.fromtimestamp(
|
||||
self._live.room_info.live_start_time, timezone(timedelta(hours=8))
|
||||
)
|
||||
|
||||
assert self._real_quality_number is not None
|
||||
stream_quality = '{} ({}{})'.format(
|
||||
get_quality_name(self._real_quality_number),
|
||||
self._real_quality_number,
|
||||
', bluray' if '_bluray' in self._stream_url else '',
|
||||
)
|
||||
|
||||
return {
|
||||
'Title': self._live.room_info.title,
|
||||
'Artist': self._live.user_info.name,
|
||||
'Date': str(live_start_time),
|
||||
'Comment': f'''\
|
||||
B站直播录像
|
||||
主播:{self._live.user_info.name}
|
||||
标题:{self._live.room_info.title}
|
||||
分区:{self._live.room_info.parent_area_name} - {self._live.room_info.area_name}
|
||||
房间号:{self._live.room_info.room_id}
|
||||
开播时间:{live_start_time}
|
||||
流主机: {self._stream_host}
|
||||
流格式:{self._stream_format}
|
||||
流画质:{stream_quality}
|
||||
录制程序:{__prog__} v{__version__} {__github__}''',
|
||||
'description': OrderedDict({
|
||||
'UserId': str(self._live.user_info.uid),
|
||||
'UserName': self._live.user_info.name,
|
||||
'RoomId': str(self._live.room_info.room_id),
|
||||
'RoomTitle': self._live.room_info.title,
|
||||
'Area': self._live.room_info.area_name,
|
||||
'ParentArea': self._live.room_info.parent_area_name,
|
||||
'LiveStartTime': str(live_start_time),
|
||||
'StreamHost': self._stream_host,
|
||||
'StreamFormat': self._stream_format,
|
||||
'StreamQuality': stream_quality,
|
||||
'Recorder': f'{__prog__} v{__version__} {__github__}',
|
||||
})
|
||||
}
|
||||
return factory
|
||||
|
||||
def _emit_event(self, name: str, *args: Any, **kwds: Any) -> None:
|
||||
self._run_coroutine(self._emit(name, *args, **kwds))
|
||||
@ -464,155 +305,3 @@ B站直播录像
|
||||
@aio_task_with_room_id
|
||||
async def _emit(self, *args: Any, **kwds: Any) -> None: # type: ignore
|
||||
await super()._emit(*args, **kwds)
|
||||
|
||||
|
||||
class StreamProxy(io.RawIOBase):
|
||||
def __init__(
|
||||
self,
|
||||
stream: io.BufferedIOBase,
|
||||
*,
|
||||
read_timeout: Optional[float] = None,
|
||||
) -> None:
|
||||
self._stream = stream
|
||||
self._read_timmeout = read_timeout
|
||||
self._offset = 0
|
||||
self._size_updates = Subject()
|
||||
|
||||
@property
|
||||
def size_updates(self) -> Observable:
|
||||
return self._size_updates
|
||||
|
||||
@property
|
||||
def closed(self) -> bool:
|
||||
# always return False to avoid that `ValueError: read of closed file`,
|
||||
# raised from `CHECK_CLOSED(self, "read of closed file")`,
|
||||
# result in losing data those remaining in the buffer.
|
||||
# ref: `https://gihub.com/python/cpython/blob/63298930fb531ba2bb4f23bc3b915dbf1e17e9e1/Modules/_io/bufferedio.c#L882` # noqa
|
||||
return False
|
||||
|
||||
def fileno(self) -> int:
|
||||
return self._stream.fileno()
|
||||
|
||||
def readable(self) -> bool:
|
||||
return True
|
||||
|
||||
def read(self, size: int = -1) -> bytes:
|
||||
if self._stream.closed:
|
||||
raise EOFError
|
||||
if self._read_timmeout:
|
||||
data = wait_for(
|
||||
self._stream.read, args=(size, ), timeout=self._read_timmeout
|
||||
)
|
||||
else:
|
||||
data = self._stream.read(size)
|
||||
self._offset += len(data)
|
||||
self._size_updates.on_next(len(data))
|
||||
return data
|
||||
|
||||
def tell(self) -> int:
|
||||
return self._offset
|
||||
|
||||
def readinto(self, b: Any) -> int:
|
||||
if self._stream.closed:
|
||||
raise EOFError
|
||||
if self._read_timmeout:
|
||||
n = wait_for(
|
||||
self._stream.readinto, args=(b, ), timeout=self._read_timmeout
|
||||
)
|
||||
else:
|
||||
n = self._stream.readinto(b)
|
||||
self._offset += n
|
||||
self._size_updates.on_next(n)
|
||||
return n
|
||||
|
||||
def close(self) -> None:
|
||||
self._stream.close()
|
||||
|
||||
|
||||
class OutputFileManager(BaseOutputFileManager, AsyncCooperationMixin):
|
||||
def __init__(
|
||||
self,
|
||||
live: Live,
|
||||
out_dir: str,
|
||||
path_template: str,
|
||||
buffer_size: Optional[int] = None,
|
||||
) -> None:
|
||||
super().__init__(buffer_size)
|
||||
self._live = live
|
||||
|
||||
self.out_dir = out_dir
|
||||
self.path_template = path_template
|
||||
|
||||
self._file_creates = Subject()
|
||||
self._file_closes = Subject()
|
||||
|
||||
@property
|
||||
def file_creates(self) -> Observable:
|
||||
return self._file_creates
|
||||
|
||||
@property
|
||||
def file_closes(self) -> Observable:
|
||||
return self._file_closes
|
||||
|
||||
def create_file(self) -> BinaryIO:
|
||||
self._start_time = self._get_timestamp()
|
||||
file = super().create_file()
|
||||
self._file_creates.on_next((self._curr_path, self._start_time))
|
||||
return file
|
||||
|
||||
def close_file(self) -> None:
|
||||
path = self._curr_path
|
||||
super().close_file()
|
||||
self._file_closes.on_next(path)
|
||||
|
||||
def _get_timestamp(self) -> int:
|
||||
try:
|
||||
return self._get_server_timestamp()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to get server timestamp: {repr(e)}')
|
||||
return self._get_local_timestamp()
|
||||
|
||||
def _get_local_timestamp(self) -> int:
|
||||
return int(time.time())
|
||||
|
||||
@retry(
|
||||
reraise=True,
|
||||
retry=retry_if_exception_type((
|
||||
asyncio.TimeoutError, aiohttp.ClientError,
|
||||
)),
|
||||
wait=wait_exponential(multiplier=0.1, max=1),
|
||||
stop=stop_after_delay(3),
|
||||
)
|
||||
def _get_server_timestamp(self) -> int:
|
||||
return self._run_coroutine(self._live.get_server_timestamp())
|
||||
|
||||
def _make_path(self) -> str:
|
||||
date_time = datetime.fromtimestamp(self._start_time)
|
||||
relpath = self.path_template.format(
|
||||
roomid=self._live.room_id,
|
||||
uname=escape_path(self._live.user_info.name),
|
||||
title=escape_path(self._live.room_info.title),
|
||||
area=escape_path(self._live.room_info.area_name),
|
||||
parent_area=escape_path(self._live.room_info.parent_area_name),
|
||||
year=date_time.year,
|
||||
month=str(date_time.month).rjust(2, '0'),
|
||||
day=str(date_time.day).rjust(2, '0'),
|
||||
hour=str(date_time.hour).rjust(2, '0'),
|
||||
minute=str(date_time.minute).rjust(2, '0'),
|
||||
second=str(date_time.second).rjust(2, '0'),
|
||||
)
|
||||
|
||||
pathname = os.path.abspath(
|
||||
os.path.expanduser(os.path.join(self.out_dir, relpath) + '.flv')
|
||||
)
|
||||
os.makedirs(os.path.dirname(pathname), exist_ok=True)
|
||||
while os.path.exists(pathname):
|
||||
root, ext = os.path.splitext(pathname)
|
||||
m = re.search(r'_\((\d+)\)$', root)
|
||||
if m is None:
|
||||
root += '_(1)'
|
||||
else:
|
||||
root = re.sub(r'\(\d+\)$', f'({int(m.group(1)) + 1})', root)
|
||||
pathname = root + ext
|
||||
|
||||
return pathname
|
||||
|
@ -98,23 +98,23 @@ class StreamRemuxer(StoppableMixin, SupportDebugMixin):
|
||||
self._exception = None
|
||||
try:
|
||||
self._run_subprocess()
|
||||
except BrokenPipeError as e:
|
||||
logger.debug(repr(e))
|
||||
except FFmpegError as e:
|
||||
except BrokenPipeError as exc:
|
||||
logger.debug(repr(exc))
|
||||
except FFmpegError as exc:
|
||||
if not self._stopped:
|
||||
logger.warning(repr(e))
|
||||
logger.warning(repr(exc))
|
||||
else:
|
||||
logger.debug(repr(e))
|
||||
except TimeoutError as e:
|
||||
logger.debug(repr(e))
|
||||
except Exception as e:
|
||||
logger.debug(repr(exc))
|
||||
except TimeoutError as exc:
|
||||
logger.debug(repr(exc))
|
||||
except Exception as exc:
|
||||
# OSError: [Errno 22] Invalid argument
|
||||
# https://stackoverflow.com/questions/23688492/oserror-errno-22-invalid-argument-in-subprocess
|
||||
if isinstance(e, OSError) and e.errno == errno.EINVAL:
|
||||
if isinstance(exc, OSError) and exc.errno == errno.EINVAL:
|
||||
pass
|
||||
else:
|
||||
self._exception = e
|
||||
logger.exception(e)
|
||||
self._exception = exc
|
||||
logger.exception(exc)
|
||||
finally:
|
||||
self._stopped = True
|
||||
logger.debug('Stopped stream remuxer')
|
||||
|
1
src/blrec/data/webapp/869.0ab6b8a3f466df77.js
Normal file
1
src/blrec/data/webapp/869.0ab6b8a3f466df77.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -10,6 +10,6 @@
|
||||
<body>
|
||||
<app-root></app-root>
|
||||
<noscript>Please enable JavaScript to continue using this application.</noscript>
|
||||
<script src="runtime.3ef3795d150b06f7.js" type="module"></script><script src="polyfills.4b08448aee19bb22.js" type="module"></script><script src="main.b9234f0840c7101a.js" type="module"></script>
|
||||
<script src="runtime.8ba8344712d0946d.js" type="module"></script><script src="polyfills.4b08448aee19bb22.js" type="module"></script><script src="main.b9234f0840c7101a.js" type="module"></script>
|
||||
|
||||
</body></html>
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"configVersion": 1,
|
||||
"timestamp": 1651824752572,
|
||||
"timestamp": 1653113686866,
|
||||
"index": "/index.html",
|
||||
"assetGroups": [
|
||||
{
|
||||
@ -16,13 +16,13 @@
|
||||
"/45.c90c3cea2bf1a66e.js",
|
||||
"/474.7f6529972e383566.js",
|
||||
"/66.31f5b9ae46ae9005.js",
|
||||
"/869.42b1fd9a88732b97.js",
|
||||
"/869.0ab6b8a3f466df77.js",
|
||||
"/common.858f777e9296e6f2.js",
|
||||
"/index.html",
|
||||
"/main.b9234f0840c7101a.js",
|
||||
"/manifest.webmanifest",
|
||||
"/polyfills.4b08448aee19bb22.js",
|
||||
"/runtime.3ef3795d150b06f7.js",
|
||||
"/runtime.8ba8344712d0946d.js",
|
||||
"/styles.1f581691b230dc4d.css"
|
||||
],
|
||||
"patterns": []
|
||||
@ -1639,7 +1639,7 @@
|
||||
"/45.c90c3cea2bf1a66e.js": "e5bfb8cf3803593e6b8ea14c90b3d3cb6a066764",
|
||||
"/474.7f6529972e383566.js": "1c74b5c6379705a3110c99767f97feddc42a0d54",
|
||||
"/66.31f5b9ae46ae9005.js": "cc22d2582d8e4c2a83e089d5a1ec32619e439ccd",
|
||||
"/869.42b1fd9a88732b97.js": "ca5c951f04d02218b3fe7dc5c022dad22bf36eca",
|
||||
"/869.0ab6b8a3f466df77.js": "fd3e32d78790ec916177aa38b49ca32bfe62d0d4",
|
||||
"/assets/animal/panda.js": "fec2868bb3053dd2da45f96bbcb86d5116ed72b1",
|
||||
"/assets/animal/panda.svg": "bebd302cdc601e0ead3a6d2710acf8753f3d83b1",
|
||||
"/assets/fill/.gitkeep": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
|
||||
@ -3234,11 +3234,11 @@
|
||||
"/assets/twotone/warning.js": "fb2d7ea232f3a99bf8f080dbc94c65699232ac01",
|
||||
"/assets/twotone/warning.svg": "8c7a2d3e765a2e7dd58ac674870c6655cecb0068",
|
||||
"/common.858f777e9296e6f2.js": "b68ca68e1e214a2537d96935c23410126cc564dd",
|
||||
"/index.html": "29cb770b0e30ea40770ce8c33c7254da20c3577b",
|
||||
"/index.html": "da07776fe6f05347b1b8360f61b81c59734dcc54",
|
||||
"/main.b9234f0840c7101a.js": "c8c7b588c070b957a2659f62d6a77de284aa2233",
|
||||
"/manifest.webmanifest": "62c1cb8c5ad2af551a956b97013ab55ce77dd586",
|
||||
"/polyfills.4b08448aee19bb22.js": "8e73f2d42cc13ca353cea5c886d930bd6da08d0d",
|
||||
"/runtime.3ef3795d150b06f7.js": "c33604f420d675070ccaa5359763e8be3c434c0d",
|
||||
"/runtime.8ba8344712d0946d.js": "264d0e7e1e88dd1a4383d73d401f5ccd51e40eb7",
|
||||
"/styles.1f581691b230dc4d.css": "6f5befbbad57c2b2e80aae855139744b8010d150"
|
||||
},
|
||||
"navigationUrls": [
|
||||
|
@ -1 +1 @@
|
||||
(()=>{"use strict";var e,v={},m={};function r(e){var i=m[e];if(void 0!==i)return i.exports;var t=m[e]={exports:{}};return v[e].call(t.exports,t,t.exports,r),t.exports}r.m=v,e=[],r.O=(i,t,o,f)=>{if(!t){var a=1/0;for(n=0;n<e.length;n++){for(var[t,o,f]=e[n],c=!0,l=0;l<t.length;l++)(!1&f||a>=f)&&Object.keys(r.O).every(b=>r.O[b](t[l]))?t.splice(l--,1):(c=!1,f<a&&(a=f));if(c){e.splice(n--,1);var d=o();void 0!==d&&(i=d)}}return i}f=f||0;for(var n=e.length;n>0&&e[n-1][2]>f;n--)e[n]=e[n-1];e[n]=[t,o,f]},r.n=e=>{var i=e&&e.__esModule?()=>e.default:()=>e;return r.d(i,{a:i}),i},r.d=(e,i)=>{for(var t in i)r.o(i,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:i[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((i,t)=>(r.f[t](e,i),i),[])),r.u=e=>(592===e?"common":e)+"."+{45:"c90c3cea2bf1a66e",66:"31f5b9ae46ae9005",103:"5b5d2a6e5a8a7479",146:"92e3b29c4c754544",474:"7f6529972e383566",592:"858f777e9296e6f2",869:"42b1fd9a88732b97"}[e]+".js",r.miniCssF=e=>{},r.o=(e,i)=>Object.prototype.hasOwnProperty.call(e,i),(()=>{var e={},i="blrec:";r.l=(t,o,f,n)=>{if(e[t])e[t].push(o);else{var a,c;if(void 0!==f)for(var l=document.getElementsByTagName("script"),d=0;d<l.length;d++){var u=l[d];if(u.getAttribute("src")==t||u.getAttribute("data-webpack")==i+f){a=u;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",i+f),a.src=r.tu(t)),e[t]=[o];var s=(g,b)=>{a.onerror=a.onload=null,clearTimeout(p);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(b)),g)return g(b)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tu=i=>(void 0===e&&(e={createScriptURL:t=>t},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e.createScriptURL(i))})(),r.p="",(()=>{var e={666:0};r.f.j=(o,f)=>{var n=r.o(e,o)?e[o]:void 0;if(0!==n)if(n)f.push(n[2]);else if(666!=o){var a=new Promise((u,s)=>n=e[o]=[u,s]);f.push(n[2]=a);var c=r.p+r.u(o),l=new Error;r.l(c,u=>{if(r.o(e,o)&&(0!==(n=e[o])&&(e[o]=void 0),n)){var s=u&&("load"===u.type?"missing":u.type),p=u&&u.target&&u.target.src;l.message="Loading chunk "+o+" failed.\n("+s+": "+p+")",l.name="ChunkLoadError",l.type=s,l.request=p,n[1](l)}},"chunk-"+o,o)}else e[o]=0},r.O.j=o=>0===e[o];var i=(o,f)=>{var l,d,[n,a,c]=f,u=0;if(n.some(p=>0!==e[p])){for(l in a)r.o(a,l)&&(r.m[l]=a[l]);if(c)var s=c(r)}for(o&&o(f);u<n.length;u++)r.o(e,d=n[u])&&e[d]&&e[d][0](),e[n[u]]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(i.bind(null,0)),t.push=i.bind(null,t.push.bind(t))})()})();
|
||||
(()=>{"use strict";var e,v={},m={};function r(e){var i=m[e];if(void 0!==i)return i.exports;var t=m[e]={exports:{}};return v[e].call(t.exports,t,t.exports,r),t.exports}r.m=v,e=[],r.O=(i,t,o,f)=>{if(!t){var a=1/0;for(n=0;n<e.length;n++){for(var[t,o,f]=e[n],c=!0,l=0;l<t.length;l++)(!1&f||a>=f)&&Object.keys(r.O).every(b=>r.O[b](t[l]))?t.splice(l--,1):(c=!1,f<a&&(a=f));if(c){e.splice(n--,1);var d=o();void 0!==d&&(i=d)}}return i}f=f||0;for(var n=e.length;n>0&&e[n-1][2]>f;n--)e[n]=e[n-1];e[n]=[t,o,f]},r.n=e=>{var i=e&&e.__esModule?()=>e.default:()=>e;return r.d(i,{a:i}),i},r.d=(e,i)=>{for(var t in i)r.o(i,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:i[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((i,t)=>(r.f[t](e,i),i),[])),r.u=e=>(592===e?"common":e)+"."+{45:"c90c3cea2bf1a66e",66:"31f5b9ae46ae9005",103:"5b5d2a6e5a8a7479",146:"92e3b29c4c754544",474:"7f6529972e383566",592:"858f777e9296e6f2",869:"0ab6b8a3f466df77"}[e]+".js",r.miniCssF=e=>{},r.o=(e,i)=>Object.prototype.hasOwnProperty.call(e,i),(()=>{var e={},i="blrec:";r.l=(t,o,f,n)=>{if(e[t])e[t].push(o);else{var a,c;if(void 0!==f)for(var l=document.getElementsByTagName("script"),d=0;d<l.length;d++){var u=l[d];if(u.getAttribute("src")==t||u.getAttribute("data-webpack")==i+f){a=u;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",i+f),a.src=r.tu(t)),e[t]=[o];var s=(g,b)=>{a.onerror=a.onload=null,clearTimeout(p);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(b)),g)return g(b)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tu=i=>(void 0===e&&(e={createScriptURL:t=>t},"undefined"!=typeof trustedTypes&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e.createScriptURL(i))})(),r.p="",(()=>{var e={666:0};r.f.j=(o,f)=>{var n=r.o(e,o)?e[o]:void 0;if(0!==n)if(n)f.push(n[2]);else if(666!=o){var a=new Promise((u,s)=>n=e[o]=[u,s]);f.push(n[2]=a);var c=r.p+r.u(o),l=new Error;r.l(c,u=>{if(r.o(e,o)&&(0!==(n=e[o])&&(e[o]=void 0),n)){var s=u&&("load"===u.type?"missing":u.type),p=u&&u.target&&u.target.src;l.message="Loading chunk "+o+" failed.\n("+s+": "+p+")",l.name="ChunkLoadError",l.type=s,l.request=p,n[1](l)}},"chunk-"+o,o)}else e[o]=0},r.O.j=o=>0===e[o];var i=(o,f)=>{var l,d,[n,a,c]=f,u=0;if(n.some(p=>0!==e[p])){for(l in a)r.o(a,l)&&(r.m[l]=a[l]);if(c)var s=c(r)}for(o&&o(f);u<n.length;u++)r.o(e,d=n[u])&&e[d]&&e[d][0](),e[n[u]]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(i.bind(null,0)),t.push=i.bind(null,t.push.bind(t))})()})();
|
@ -37,8 +37,8 @@ class SpaceReclaimer(SpaceEventListener, SwitchableMixin):
|
||||
if value := os.environ.get('REC_TTL'):
|
||||
try:
|
||||
rec_ttl = int(value)
|
||||
except Exception as e:
|
||||
logger.warning(repr(e))
|
||||
except Exception as exc:
|
||||
logger.warning(repr(exc))
|
||||
self.rec_ttl = rec_ttl
|
||||
self.recycle_records = recycle_records
|
||||
|
||||
|
@ -1,21 +1,18 @@
|
||||
from reactivex import Observable, Subject
|
||||
|
||||
from rx.subject import Subject
|
||||
from rx.core import Observable
|
||||
|
||||
from .typing import Event
|
||||
from ..utils.patterns import Singleton
|
||||
from .typing import Event
|
||||
|
||||
|
||||
__all__ = 'EventCenter',
|
||||
__all__ = ('EventCenter',)
|
||||
|
||||
|
||||
class EventCenter(Singleton):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._source = Subject()
|
||||
self._source: Subject[Event] = Subject()
|
||||
|
||||
@property
|
||||
def events(self) -> Observable:
|
||||
def events(self) -> Observable[Event]:
|
||||
return self._source
|
||||
|
||||
def submit(self, event: Event) -> None:
|
||||
|
@ -1,20 +1,17 @@
|
||||
|
||||
from rx.subject import Subject
|
||||
from rx.core import Observable
|
||||
from reactivex import Observable, Subject
|
||||
|
||||
from ..utils.patterns import Singleton
|
||||
|
||||
|
||||
__all__ = 'ExceptionCenter',
|
||||
__all__ = ('ExceptionCenter',)
|
||||
|
||||
|
||||
class ExceptionCenter(Singleton):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self._source = Subject()
|
||||
self._source: Subject[BaseException] = Subject()
|
||||
|
||||
@property
|
||||
def exceptions(self) -> Observable:
|
||||
def exceptions(self) -> Observable[BaseException]:
|
||||
return self._source
|
||||
|
||||
def submit(self, exc: BaseException) -> None:
|
||||
|
@ -1,17 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Iterable, Iterator, Mapping, Optional, Union
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import attr
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from .io import FlvReader
|
||||
from .io_protocols import RandomIO
|
||||
from . import scriptdata
|
||||
from .avc import extract_resolution
|
||||
from .io import FlvReader
|
||||
from .io_protocols import RandomIO
|
||||
from .models import AudioTag, AVCPacketType, FlvTag, ScriptTag, TagType, VideoTag
|
||||
from .utils import OffsetRepositor
|
||||
from .models import (
|
||||
AVCPacketType, FlvTag, AudioTag, ScriptTag, TagType, VideoTag
|
||||
)
|
||||
|
||||
|
||||
def read_tags(
|
||||
@ -54,14 +53,14 @@ def read_tags_in_duration(
|
||||
|
||||
|
||||
def peek_tags(
|
||||
file: RandomIO, reader: FlvReader, count: int, *, no_body: bool = False,
|
||||
file: RandomIO, reader: FlvReader, count: int, *, no_body: bool = False
|
||||
) -> Iterator[FlvTag]:
|
||||
with OffsetRepositor(file):
|
||||
yield from read_tags(reader, count, no_body=no_body)
|
||||
|
||||
|
||||
def rpeek_tags(
|
||||
file: RandomIO, reader: FlvReader, count: int, *, no_body: bool = False,
|
||||
file: RandomIO, reader: FlvReader, count: int, *, no_body: bool = False
|
||||
) -> Iterator[FlvTag]:
|
||||
with OffsetRepositor(file):
|
||||
yield from rread_tags(reader, count, no_body=no_body)
|
||||
@ -74,9 +73,7 @@ def find_metadata_tag(tags: Iterable[FlvTag]) -> Optional[ScriptTag]:
|
||||
return None
|
||||
|
||||
|
||||
def find_header_tag(
|
||||
tags: Iterable[FlvTag]
|
||||
) -> Optional[Union[AudioTag, VideoTag]]:
|
||||
def find_header_tag(tags: Iterable[FlvTag]) -> Optional[Union[AudioTag, VideoTag]]:
|
||||
for tag in tags:
|
||||
if is_sequence_header(tag):
|
||||
return tag
|
||||
@ -221,9 +218,7 @@ def enrich_metadata(
|
||||
|
||||
|
||||
def update_metadata(
|
||||
metadata_tag: ScriptTag,
|
||||
metadata: Mapping[str, Any],
|
||||
offset: Optional[int] = None,
|
||||
metadata_tag: ScriptTag, metadata: Mapping[str, Any], offset: Optional[int] = None
|
||||
) -> ScriptTag:
|
||||
original_tag_size = metadata_tag.tag_size
|
||||
new_tag = enrich_metadata(metadata_tag, metadata, offset)
|
||||
@ -236,7 +231,8 @@ def ensure_order(metadata: Dict[str, Any]) -> Dict[str, Any]:
|
||||
# some typical properties such as 'keyframes' must be before some custom
|
||||
# properties such as 'Comment' otherwise, it won't take effect in some
|
||||
# players!
|
||||
from .data_analyser import MetaData
|
||||
from .operators import MetaData
|
||||
|
||||
typical_props = attr.fields_dict(MetaData).keys()
|
||||
return {
|
||||
**{k: v for k, v in metadata.items() if k in typical_props},
|
||||
|
@ -1,4 +1,3 @@
|
||||
|
||||
class FlvDataError(ValueError):
|
||||
...
|
||||
|
||||
@ -17,23 +16,3 @@ class FlvStreamCorruptedError(Exception):
|
||||
|
||||
class FlvFileCorruptedError(Exception):
|
||||
...
|
||||
|
||||
|
||||
class AudioParametersChanged(Exception):
|
||||
...
|
||||
|
||||
|
||||
class VideoParametersChanged(Exception):
|
||||
...
|
||||
|
||||
|
||||
class FileSizeOverLimit(Exception):
|
||||
...
|
||||
|
||||
|
||||
class DurationOverLimit(Exception):
|
||||
...
|
||||
|
||||
|
||||
class CutStream(Exception):
|
||||
...
|
||||
|
@ -7,7 +7,7 @@ from .common import (
|
||||
is_audio_tag, is_metadata_tag, is_script_tag, is_video_tag, read_tags,
|
||||
parse_metadata, find_metadata_tag,
|
||||
)
|
||||
from .stream_processor import JoinPoint
|
||||
from .operators import JoinPoint
|
||||
from .utils import format_timestamp
|
||||
from ..path import extra_metadata_path
|
||||
|
||||
@ -32,7 +32,7 @@ def make_comment_for_joinpoints(join_points: Iterable[JoinPoint]) -> str:
|
||||
'流中断拼接详情\n' +
|
||||
'\n'.join((
|
||||
'时间戳:{}, 无缝拼接:{}'.format(
|
||||
format_timestamp(p.timestamp),
|
||||
format_timestamp(int(p.timestamp)),
|
||||
'是' if p.seamless else '否',
|
||||
)
|
||||
for p in join_points
|
||||
|
@ -1,84 +0,0 @@
|
||||
from __future__ import annotations
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
|
||||
from .models import FlvHeader, FlvTag, BACK_POINTER_SIZE, VideoTag
|
||||
from .exceptions import FileSizeOverLimit, DurationOverLimit
|
||||
from .common import is_video_nalu_keyframe
|
||||
|
||||
|
||||
__all__ = 'LimitChecker',
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LimitChecker:
|
||||
def __init__(
|
||||
self,
|
||||
filesize_limit: int = 0, # file size in bytes, no limit by default.
|
||||
duration_limit: int = 0, # duration in seconds, no limit by default.
|
||||
) -> None:
|
||||
self.filesize_limit = filesize_limit
|
||||
self.duration_limit = duration_limit
|
||||
self.reset()
|
||||
|
||||
def is_filesize_over_limit(self) -> bool:
|
||||
return (
|
||||
self._filesize + self._max_size_between_keyframes >=
|
||||
self.filesize_limit
|
||||
)
|
||||
|
||||
def is_duration_over_limit(self) -> bool:
|
||||
return (
|
||||
self._duration + self._max_duration_between_keyframes >=
|
||||
self.duration_limit
|
||||
)
|
||||
|
||||
@property
|
||||
def last_keyframe_tag(self) -> Optional[VideoTag]:
|
||||
return self._last_keyframe_tag
|
||||
|
||||
def reset(self) -> None:
|
||||
self._filesize = 0
|
||||
self._duration = 0.0
|
||||
self._max_size_between_keyframes = 0
|
||||
self._max_duration_between_keyframes = 0.0
|
||||
self._header_checked = False
|
||||
self._last_keyframe_tag: Optional[VideoTag] = None
|
||||
|
||||
def check_header(self, header: FlvHeader) -> None:
|
||||
assert not self._header_checked
|
||||
self._header_checked = True
|
||||
self._filesize += header.size + BACK_POINTER_SIZE
|
||||
|
||||
def check_tag(self, tag: FlvTag) -> None:
|
||||
self._filesize += tag.tag_size + BACK_POINTER_SIZE
|
||||
self._duration = tag.timestamp / 1000
|
||||
|
||||
if not is_video_nalu_keyframe(tag):
|
||||
return
|
||||
|
||||
if self._last_keyframe_tag is not None:
|
||||
self._max_size_between_keyframes = max(
|
||||
self._max_size_between_keyframes,
|
||||
tag.offset - self._last_keyframe_tag.offset,
|
||||
)
|
||||
self._max_duration_between_keyframes = max(
|
||||
self._max_duration_between_keyframes,
|
||||
(tag.timestamp - self._last_keyframe_tag.timestamp) / 1000,
|
||||
)
|
||||
self._last_keyframe_tag = tag
|
||||
|
||||
if self.filesize_limit > 0 and self.is_filesize_over_limit():
|
||||
logger.debug('File size will be over the limit: {} + {}'.format(
|
||||
self._filesize, self._max_size_between_keyframes,
|
||||
))
|
||||
raise FileSizeOverLimit()
|
||||
|
||||
if self.duration_limit > 0 and self.is_duration_over_limit():
|
||||
logger.debug('Duration will be over the limit: {} + {}'.format(
|
||||
self._duration, self._max_duration_between_keyframes,
|
||||
))
|
||||
raise DurationOverLimit()
|
78
src/blrec/flv/metadata_dumper.py
Normal file
78
src/blrec/flv/metadata_dumper.py
Normal file
@ -0,0 +1,78 @@
|
||||
import json
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from ..flv import operators as flv_ops
|
||||
from ..path import extra_metadata_path
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
|
||||
__all__ = ('MetadataDumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetadataDumper(SwitchableMixin):
|
||||
def __init__(
|
||||
self,
|
||||
dumper: flv_ops.Dumper,
|
||||
analyser: flv_ops.Analyser,
|
||||
joinpoint_extractor: flv_ops.JoinPointExtractor,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
|
||||
self._dumper = dumper
|
||||
self._analyser = analyser
|
||||
self._joinpoint_extractor = joinpoint_extractor
|
||||
|
||||
self._last_metadata: Optional[flv_ops.MetaData] = None
|
||||
self._last_join_points: Optional[List[flv_ops.JoinPoint]] = None
|
||||
|
||||
def _do_enable(self) -> None:
|
||||
self._metadata_subscription = self._analyser.metadatas.subscribe(
|
||||
self._update_metadata
|
||||
)
|
||||
self._join_points_subscription = (
|
||||
self._joinpoint_extractor.join_points.subscribe(self._update_join_points)
|
||||
)
|
||||
self._file_closed_subscription = self._dumper.file_closed.subscribe(
|
||||
self._dump_metadata
|
||||
)
|
||||
logger.debug('Enabled metadata dumper')
|
||||
|
||||
def _do_disable(self) -> None:
|
||||
with suppress(Exception):
|
||||
self._metadata_subscription.dispose()
|
||||
with suppress(Exception):
|
||||
self._join_points_subscription.dispose()
|
||||
with suppress(Exception):
|
||||
self._file_closed_subscription.dispose()
|
||||
logger.debug('Disabled metadata dumper')
|
||||
|
||||
def _update_metadata(self, metadata: flv_ops.MetaData) -> None:
|
||||
self._last_metadata = metadata
|
||||
|
||||
def _update_join_points(self, join_points: List[flv_ops.JoinPoint]) -> None:
|
||||
self._last_join_points = join_points
|
||||
|
||||
def _dump_metadata(self, video_path: str) -> None:
|
||||
path = extra_metadata_path(video_path)
|
||||
logger.debug(f"Dumping metadata to file: '{path}'")
|
||||
|
||||
assert self._last_metadata is not None
|
||||
assert self._last_join_points is not None
|
||||
|
||||
data = attr.asdict(self._last_metadata, filter=lambda a, v: v is not None)
|
||||
data['joinpoints'] = list(
|
||||
map(lambda p: p.to_metadata_value(), self._last_join_points)
|
||||
)
|
||||
|
||||
try:
|
||||
with open(path, 'wt', encoding='utf8') as file:
|
||||
json.dump(data, file)
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to dump metadata: {e}')
|
||||
else:
|
||||
logger.debug(f"Successfully dumped metadata to file: '{path}'")
|
60
src/blrec/flv/metadata_injection.py
Normal file
60
src/blrec/flv/metadata_injection.py
Normal file
@ -0,0 +1,60 @@
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
import attr
|
||||
from reactivex import Observable
|
||||
from reactivex import operators as ops
|
||||
|
||||
from ..utils import operators as utils_ops
|
||||
from . import operators as flv_ops
|
||||
from .helpers import make_comment_for_joinpoints
|
||||
from .operators import JoinPoint
|
||||
from .operators.helpers import from_file
|
||||
|
||||
__all__ = 'InjectingProgress', 'inject_metadata'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class InjectingProgress:
|
||||
count: int
|
||||
total: int
|
||||
|
||||
|
||||
def inject_metadata(
|
||||
path: str, metadata: Dict[str, Any], *, show_progress: bool = False
|
||||
) -> Observable[InjectingProgress]:
|
||||
filesize = os.path.getsize(path)
|
||||
append_comment_for_joinpoints(metadata)
|
||||
|
||||
root, ext = os.path.splitext(path)
|
||||
temp_path = f'{root}_injecting{ext}'
|
||||
filename = os.path.basename(path)
|
||||
|
||||
return from_file(path).pipe(
|
||||
flv_ops.Injector(lambda: metadata),
|
||||
flv_ops.Dumper(lambda: (temp_path, int(datetime.now().timestamp()))),
|
||||
flv_ops.ProgressBar(
|
||||
desc='Injecting',
|
||||
postfix=filename,
|
||||
total=filesize,
|
||||
disable=not show_progress,
|
||||
),
|
||||
ops.map(lambda i: len(i)),
|
||||
ops.scan(lambda acc, x: acc + x, 0),
|
||||
ops.map(lambda s: InjectingProgress(s, filesize)),
|
||||
utils_ops.replace(temp_path, path),
|
||||
)
|
||||
|
||||
|
||||
def append_comment_for_joinpoints(metadata: Dict[str, Any]) -> None:
|
||||
if join_points := metadata.get('joinpoints'):
|
||||
join_points = map(JoinPoint.from_metadata_value, join_points)
|
||||
if 'Comment' in metadata:
|
||||
metadata['Comment'] += '\n\n' + make_comment_for_joinpoints(join_points)
|
||||
else:
|
||||
metadata['Comment'] = make_comment_for_joinpoints(join_points)
|
@ -1,148 +0,0 @@
|
||||
import os
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import attr
|
||||
from rx import create, operators as op
|
||||
from rx.subject import Subject
|
||||
from rx.core import Observable
|
||||
from rx.core.typing import Observer, Scheduler, Disposable
|
||||
from rx.scheduler.currentthreadscheduler import CurrentThreadScheduler
|
||||
from tqdm import tqdm
|
||||
|
||||
from .stream_processor import StreamProcessor, BaseOutputFileManager, JoinPoint
|
||||
from .helpers import get_metadata, make_comment_for_joinpoints
|
||||
from ..logging.room_id import with_room_id
|
||||
|
||||
|
||||
__all__ = 'MetadataInjector', 'InjectProgress', 'inject_metadata'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class InjectProgress:
|
||||
time: int
|
||||
duration: int
|
||||
|
||||
|
||||
class MetadataInjector:
|
||||
def __init__(self, in_path: str, out_path: str) -> None:
|
||||
self._in_path = in_path
|
||||
self._file_manager = OutputFileManager(out_path)
|
||||
self._duration: int = 0
|
||||
self._progress_updates = Subject()
|
||||
|
||||
@property
|
||||
def progress_updates(self) -> Observable:
|
||||
return self._progress_updates
|
||||
|
||||
def inject(self, in_metadata: Dict[str, Any]) -> None:
|
||||
metadata = get_metadata(self._in_path)
|
||||
metadata.update(in_metadata)
|
||||
|
||||
self._duration = int(float(metadata['duration']) * 1000)
|
||||
self._progress_updates.on_next(InjectProgress(0, self._duration))
|
||||
|
||||
self._append_comment_for_joinpoints(metadata)
|
||||
|
||||
processor = StreamProcessor(
|
||||
self._file_manager,
|
||||
metadata=metadata,
|
||||
disable_limit=True,
|
||||
)
|
||||
|
||||
def update_progress(time: int) -> None:
|
||||
progress = InjectProgress(time, self._duration)
|
||||
self._progress_updates.on_next(progress)
|
||||
|
||||
processor.time_updates.subscribe(update_progress)
|
||||
|
||||
with open(self._in_path, 'rb') as in_file:
|
||||
processor.process_stream(in_file)
|
||||
processor.finalize()
|
||||
|
||||
progress = InjectProgress(self._duration, self._duration)
|
||||
self._progress_updates.on_next(progress)
|
||||
|
||||
@staticmethod
|
||||
def _append_comment_for_joinpoints(metadata: Dict[str, Any]) -> None:
|
||||
if (join_points := metadata.get('joinpoints')):
|
||||
join_points = map(JoinPoint.from_metadata_value, join_points)
|
||||
if 'Comment' in metadata:
|
||||
metadata['Comment'] += '\n\n' + \
|
||||
make_comment_for_joinpoints(join_points)
|
||||
else:
|
||||
metadata['Comment'] = make_comment_for_joinpoints(join_points)
|
||||
|
||||
|
||||
class OutputFileManager(BaseOutputFileManager):
|
||||
def __init__(self, out_path: str) -> None:
|
||||
super().__init__()
|
||||
self._out_path = out_path
|
||||
|
||||
def _make_path(self) -> str:
|
||||
return self._out_path
|
||||
|
||||
|
||||
def inject_metadata(
|
||||
path: str,
|
||||
metadata: Dict[str, Any],
|
||||
*,
|
||||
report_progress: bool = False,
|
||||
room_id: Optional[int] = None,
|
||||
) -> Observable:
|
||||
def subscribe(
|
||||
observer: Observer[InjectProgress],
|
||||
scheduler: Optional[Scheduler] = None,
|
||||
) -> Disposable:
|
||||
_scheduler = scheduler or CurrentThreadScheduler.singleton()
|
||||
|
||||
def action(scheduler, state): # type: ignore
|
||||
root, ext = os.path.splitext(path)
|
||||
out_path = f'{root}_inject_metadata{ext}'
|
||||
injector = MetadataInjector(path, out_path)
|
||||
file_name = os.path.basename(path)
|
||||
|
||||
with tqdm(desc='Injecting', unit='ms', postfix=file_name) as pbar:
|
||||
def reset(progress: InjectProgress) -> None:
|
||||
pbar.reset(progress.duration)
|
||||
|
||||
def update(progress: InjectProgress) -> None:
|
||||
pbar.update(progress.time - pbar.n)
|
||||
|
||||
injector.progress_updates.pipe(op.first()).subscribe(reset)
|
||||
injector.progress_updates.pipe(op.skip(1)).subscribe(update)
|
||||
|
||||
if report_progress:
|
||||
injector.progress_updates.subscribe(
|
||||
lambda p: observer.on_next(p)
|
||||
)
|
||||
|
||||
try:
|
||||
logger.info(f"Injecting metadata for '{path}' ...")
|
||||
injector.inject(metadata)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to inject metadata for '{path}': {repr(e)}"
|
||||
)
|
||||
observer.on_error(e)
|
||||
else:
|
||||
logger.info(f"Successfully inject metadata for '{path}'")
|
||||
try:
|
||||
os.replace(out_path, path)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to replace file {path} with '{out_path}'"
|
||||
)
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_completed()
|
||||
|
||||
if room_id is not None:
|
||||
return _scheduler.schedule(with_room_id(room_id)(action))
|
||||
else:
|
||||
return _scheduler.schedule(action)
|
||||
|
||||
return create(subscribe)
|
@ -106,6 +106,9 @@ class FlvHeader:
|
||||
else:
|
||||
self.type_flag &= ~0b0000_0100
|
||||
|
||||
def __len__(self) -> int:
|
||||
return self.size
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
return self.data_offset
|
||||
@ -151,6 +154,9 @@ class FlvTag(ABC, FlvTagHeader):
|
||||
offset: int = attr.ib(validator=[non_negative_integer_validator])
|
||||
body: Optional[bytes] = attr.ib(default=None, repr=cksum)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return self.tag_size
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def header_size(self) -> int:
|
||||
|
38
src/blrec/flv/operators/__init__.py
Normal file
38
src/blrec/flv/operators/__init__.py
Normal file
@ -0,0 +1,38 @@
|
||||
from .analyse import Analyser, KeyFrames, MetaData
|
||||
from .concat import JoinPoint, JoinPointData, JoinPointExtractor, concat
|
||||
from .correct import correct
|
||||
from .cut import Cutter
|
||||
from .defragment import defragment
|
||||
from .dump import Dumper
|
||||
from .fix import fix
|
||||
from .inject import Injector
|
||||
from .limit import Limiter
|
||||
from .parse import parse
|
||||
from .probe import Prober, StreamProfile
|
||||
from .process import process
|
||||
from .progress import ProgressBar
|
||||
from .split import split
|
||||
|
||||
__all__ = (
|
||||
'Analyser',
|
||||
'concat',
|
||||
'concat',
|
||||
'correct',
|
||||
'Cutter',
|
||||
'defragment',
|
||||
'Dumper',
|
||||
'fix',
|
||||
'Injector',
|
||||
'JoinPoint',
|
||||
'JoinPointData',
|
||||
'JoinPointExtractor',
|
||||
'KeyFrames',
|
||||
'Limiter',
|
||||
'MetaData',
|
||||
'parse',
|
||||
'Prober',
|
||||
'process',
|
||||
'ProgressBar',
|
||||
'split',
|
||||
'StreamProfile',
|
||||
)
|
@ -1,237 +1,331 @@
|
||||
"""
|
||||
Analyse FLV file to make MetaData
|
||||
ref: https://github.com/ioppermann/yamdi/blob/master/yamdi.c
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from .models import (
|
||||
AudioTag, FlvHeader, FlvTag, SoundType, VideoTag, ScriptTag,
|
||||
BACK_POINTER_SIZE
|
||||
)
|
||||
from .common import Resolution, is_audio_tag, is_script_tag, is_video_tag
|
||||
|
||||
|
||||
__all__ = 'DataAnalyser', 'MetaData', 'KeyFrames'
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class KeyFrames:
|
||||
times: List[float]
|
||||
filepositions: List[float]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class MetaData:
|
||||
hasAudio: bool
|
||||
hasVideo: bool
|
||||
hasMetadata: bool
|
||||
hasKeyframes: bool
|
||||
canSeekToEnd: bool
|
||||
duration: float
|
||||
datasize: float
|
||||
filesize: float
|
||||
|
||||
audiosize: Optional[float] = None
|
||||
audiocodecid: Optional[float] = None
|
||||
audiodatarate: Optional[float] = None
|
||||
audiosamplerate: Optional[float] = None
|
||||
audiosamplesize: Optional[float] = None
|
||||
stereo: Optional[bool] = None
|
||||
|
||||
videosize: float
|
||||
framerate: float
|
||||
videocodecid: float
|
||||
videodatarate: float
|
||||
width: float
|
||||
height: float
|
||||
|
||||
lasttimestamp: float
|
||||
lastkeyframelocation: float
|
||||
lastkeyframetimestamp: float
|
||||
keyframes: KeyFrames
|
||||
|
||||
|
||||
class DataAnalyser:
|
||||
def __init__(self) -> None:
|
||||
self.reset()
|
||||
|
||||
def reset(self) -> None:
|
||||
self._num_of_tags = 0
|
||||
self._num_of_audio_tags = 0
|
||||
self._num_of_video_tags = 0
|
||||
self._size_of_tags = 0
|
||||
self._size_of_audio_tags = 0
|
||||
self._size_of_video_tags = 0
|
||||
self._size_of_data = 0
|
||||
self._size_of_audio_data = 0
|
||||
self._size_of_video_data = 0
|
||||
self._last_timestamp = 0
|
||||
self._last_timestamp_of_audio = 0
|
||||
self._last_timestamp_of_video = 0
|
||||
self._keyframe_timestamps: List[int] = []
|
||||
self._keyframe_filepositions: List[int] = []
|
||||
self._resolution: Optional[Resolution] = None
|
||||
|
||||
self._has_audio = False
|
||||
self._has_video = False
|
||||
self._header_analysed = False
|
||||
self._audio_analysed = False
|
||||
self._video_analysed = False
|
||||
|
||||
@property
|
||||
def last_timestamp(self) -> int:
|
||||
return self._last_timestamp
|
||||
|
||||
def analyse_header(self, header: FlvHeader) -> None:
|
||||
assert not self._header_analysed
|
||||
self._header_analysed = True
|
||||
self._size_of_flv_header = header.size
|
||||
|
||||
def analyse_tag(self, tag: FlvTag) -> None:
|
||||
if is_audio_tag(tag):
|
||||
self._analyse_audio_tag(tag)
|
||||
elif is_video_tag(tag):
|
||||
self._analyse_video_tag(tag)
|
||||
elif is_script_tag(tag):
|
||||
self._analyse_script_tag(tag)
|
||||
else:
|
||||
raise ValueError('Invalid tag type')
|
||||
|
||||
self._num_of_tags += 1
|
||||
self._size_of_tags += tag.tag_size
|
||||
self._size_of_data += tag.data_size
|
||||
self._last_timestamp = tag.timestamp
|
||||
|
||||
def get_real_resolution(self) -> Optional[Resolution]:
|
||||
return self._resolution
|
||||
|
||||
def calc_frame_rate(self) -> float:
|
||||
try:
|
||||
return (
|
||||
self._num_of_video_tags / self._last_timestamp_of_video * 1000
|
||||
)
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_audio_data_rate(self) -> float:
|
||||
try:
|
||||
return self._size_of_audio_data * 8 / self._last_timestamp_of_audio
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_video_data_rate(self) -> float:
|
||||
try:
|
||||
return self._size_of_video_data * 8 / self._last_timestamp_of_video
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_data_size(self) -> int:
|
||||
return (
|
||||
self._size_of_audio_tags +
|
||||
self._num_of_audio_tags * BACK_POINTER_SIZE +
|
||||
self._size_of_video_tags +
|
||||
self._num_of_video_tags * BACK_POINTER_SIZE
|
||||
)
|
||||
|
||||
def calc_file_size(self) -> int:
|
||||
return (
|
||||
self._size_of_flv_header + BACK_POINTER_SIZE +
|
||||
self._size_of_tags + self._num_of_tags * BACK_POINTER_SIZE
|
||||
)
|
||||
|
||||
def make_keyframes(self) -> KeyFrames:
|
||||
return KeyFrames(
|
||||
times=list(map(lambda t: t / 1000, self._keyframe_timestamps)),
|
||||
filepositions=list(map(float, self._keyframe_filepositions)),
|
||||
)
|
||||
|
||||
def make_metadata(self) -> MetaData:
|
||||
assert self._header_analysed
|
||||
assert self._has_audio == self._audio_analysed
|
||||
assert self._has_video and self._video_analysed
|
||||
assert self._resolution is not None
|
||||
|
||||
if not self._has_audio:
|
||||
audiosize = None
|
||||
audiocodecid = None
|
||||
audiodatarate = None
|
||||
audiosamplerate = None
|
||||
audiosamplesize = None
|
||||
stereo = None
|
||||
else:
|
||||
audiosize = float(self._size_of_audio_tags)
|
||||
audiocodecid = float(self._audio_codec_id)
|
||||
audiodatarate = self.calc_audio_data_rate()
|
||||
audiosamplerate = float(self._audio_sample_rate)
|
||||
audiosamplesize = float(self._audio_sample_size)
|
||||
stereo = self._stereo
|
||||
|
||||
keyframes = self.make_keyframes()
|
||||
|
||||
return MetaData(
|
||||
hasAudio=self._has_audio,
|
||||
hasVideo=self._has_video,
|
||||
hasMetadata=True,
|
||||
hasKeyframes=len(self._keyframe_timestamps) != 0,
|
||||
canSeekToEnd=(
|
||||
self._last_timestamp_of_video == self._keyframe_timestamps[-1]
|
||||
),
|
||||
duration=self._last_timestamp / 1000,
|
||||
datasize=float(self.calc_data_size()),
|
||||
filesize=float(self.calc_file_size()),
|
||||
audiosize=audiosize,
|
||||
audiocodecid=audiocodecid,
|
||||
audiodatarate=audiodatarate,
|
||||
audiosamplerate=audiosamplerate,
|
||||
audiosamplesize=audiosamplesize,
|
||||
stereo=stereo,
|
||||
videosize=float(self._size_of_video_tags),
|
||||
framerate=self.calc_frame_rate(),
|
||||
videocodecid=float(self._video_codec_id),
|
||||
videodatarate=self.calc_video_data_rate(),
|
||||
width=float(self._resolution.width),
|
||||
height=float(self._resolution.height),
|
||||
lasttimestamp=self._last_timestamp / 1000,
|
||||
lastkeyframelocation=keyframes.filepositions[-1],
|
||||
lastkeyframetimestamp=keyframes.times[-1],
|
||||
keyframes=keyframes,
|
||||
)
|
||||
|
||||
def _analyse_audio_tag(self, tag: AudioTag) -> None:
|
||||
if not self._audio_analysed:
|
||||
self._has_audio = True
|
||||
self._audio_analysed = True
|
||||
self._audio_codec_id = tag.sound_format.value
|
||||
self._audio_sample_rate = tag.sound_rate.value
|
||||
self._audio_sample_size = tag.sound_size.value
|
||||
self._stereo = tag.sound_type == SoundType.STEREO
|
||||
|
||||
self._num_of_audio_tags += 1
|
||||
self._size_of_audio_tags += tag.tag_size
|
||||
self._size_of_audio_data += tag.data_size
|
||||
self._last_timestamp_of_audio = tag.timestamp
|
||||
|
||||
def _analyse_video_tag(self, tag: VideoTag) -> None:
|
||||
if tag.is_keyframe():
|
||||
self._keyframe_timestamps.append(tag.timestamp)
|
||||
self._keyframe_filepositions.append(tag.offset)
|
||||
if tag.is_avc_header():
|
||||
self._resolution = Resolution.from_aac_sequence_header(tag)
|
||||
else:
|
||||
pass
|
||||
|
||||
if not self._video_analysed:
|
||||
self._has_video = True
|
||||
self._video_analysed = True
|
||||
self._video_codec_id = tag.codec_id.value
|
||||
|
||||
self._num_of_video_tags += 1
|
||||
self._size_of_video_tags += tag.tag_size
|
||||
self._size_of_video_data += tag.data_size
|
||||
self._last_timestamp_of_video = tag.timestamp
|
||||
|
||||
def _analyse_script_tag(self, tag: ScriptTag) -> None:
|
||||
pass
|
||||
"""
|
||||
Analyse the FLV stream to make MetaData
|
||||
ref: https://github.com/ioppermann/yamdi/blob/master/yamdi.c
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, TypedDict
|
||||
|
||||
import attr
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from ..common import Resolution, is_audio_tag, is_script_tag, is_video_tag
|
||||
from ..models import (
|
||||
BACK_POINTER_SIZE,
|
||||
AudioTag,
|
||||
FlvHeader,
|
||||
FlvTag,
|
||||
ScriptTag,
|
||||
SoundType,
|
||||
VideoTag,
|
||||
)
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = 'Analyser', 'MetaData', 'KeyFrames'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class KeyFrames:
|
||||
times: List[float]
|
||||
filepositions: List[float]
|
||||
|
||||
|
||||
class KeyFramesDict(TypedDict):
|
||||
times: List[float]
|
||||
filepositions: List[float]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class MetaData:
|
||||
hasAudio: bool
|
||||
hasVideo: bool
|
||||
hasMetadata: bool
|
||||
hasKeyframes: bool
|
||||
canSeekToEnd: bool
|
||||
duration: float
|
||||
datasize: float
|
||||
filesize: float
|
||||
|
||||
audiosize: Optional[float] = None
|
||||
audiocodecid: Optional[float] = None
|
||||
audiodatarate: Optional[float] = None
|
||||
audiosamplerate: Optional[float] = None
|
||||
audiosamplesize: Optional[float] = None
|
||||
stereo: Optional[bool] = None
|
||||
|
||||
videosize: float
|
||||
framerate: float
|
||||
videocodecid: float
|
||||
videodatarate: float
|
||||
width: float
|
||||
height: float
|
||||
|
||||
lasttimestamp: float
|
||||
lastkeyframelocation: float
|
||||
lastkeyframetimestamp: float
|
||||
keyframes: KeyFrames
|
||||
|
||||
|
||||
class MetaDataDict:
|
||||
hasAudio: bool
|
||||
hasVideo: bool
|
||||
hasMetadata: bool
|
||||
hasKeyframes: bool
|
||||
canSeekToEnd: bool
|
||||
duration: float
|
||||
datasize: float
|
||||
filesize: float
|
||||
|
||||
audiosize: Optional[float] = None
|
||||
audiocodecid: Optional[float] = None
|
||||
audiodatarate: Optional[float] = None
|
||||
audiosamplerate: Optional[float] = None
|
||||
audiosamplesize: Optional[float] = None
|
||||
stereo: Optional[bool] = None
|
||||
|
||||
videosize: float
|
||||
framerate: float
|
||||
videocodecid: float
|
||||
videodatarate: float
|
||||
width: float
|
||||
height: float
|
||||
|
||||
lasttimestamp: float
|
||||
lastkeyframelocation: float
|
||||
lastkeyframetimestamp: float
|
||||
keyframes: KeyFramesDict
|
||||
|
||||
|
||||
class Analyser:
|
||||
def __init__(self) -> None:
|
||||
self._metadatas: Subject[MetaData] = Subject()
|
||||
self._reset()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._num_of_tags = 0
|
||||
self._num_of_audio_tags = 0
|
||||
self._num_of_video_tags = 0
|
||||
self._size_of_tags = 0
|
||||
self._size_of_audio_tags = 0
|
||||
self._size_of_video_tags = 0
|
||||
self._size_of_data = 0
|
||||
self._size_of_audio_data = 0
|
||||
self._size_of_video_data = 0
|
||||
self._last_timestamp = 0
|
||||
self._last_timestamp_of_audio = 0
|
||||
self._last_timestamp_of_video = 0
|
||||
self._keyframe_timestamps: List[int] = []
|
||||
self._keyframe_filepositions: List[int] = []
|
||||
self._resolution: Optional[Resolution] = None
|
||||
|
||||
self._has_audio = False
|
||||
self._has_video = False
|
||||
self._audio_analysed = False
|
||||
self._video_analysed = False
|
||||
|
||||
@property
|
||||
def metadatas(self) -> Observable[MetaData]:
|
||||
return self._metadatas
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._analyse(source)
|
||||
|
||||
def get_real_resolution(self) -> Optional[Resolution]:
|
||||
return self._resolution
|
||||
|
||||
def calc_frame_rate(self) -> float:
|
||||
try:
|
||||
return self._num_of_video_tags / self._last_timestamp_of_video * 1000
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_audio_data_rate(self) -> float:
|
||||
try:
|
||||
return self._size_of_audio_data * 8 / self._last_timestamp_of_audio
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_video_data_rate(self) -> float:
|
||||
try:
|
||||
return self._size_of_video_data * 8 / self._last_timestamp_of_video
|
||||
except ZeroDivisionError:
|
||||
return 0.0
|
||||
|
||||
def calc_data_size(self) -> int:
|
||||
return (
|
||||
self._size_of_audio_tags
|
||||
+ self._num_of_audio_tags * BACK_POINTER_SIZE
|
||||
+ self._size_of_video_tags
|
||||
+ self._num_of_video_tags * BACK_POINTER_SIZE
|
||||
)
|
||||
|
||||
def calc_file_size(self) -> int:
|
||||
return (
|
||||
self._size_of_flv_header
|
||||
+ BACK_POINTER_SIZE
|
||||
+ self._size_of_tags
|
||||
+ self._num_of_tags * BACK_POINTER_SIZE
|
||||
)
|
||||
|
||||
def make_keyframes(self) -> KeyFrames:
|
||||
return KeyFrames(
|
||||
times=list(map(lambda t: t / 1000, self._keyframe_timestamps)),
|
||||
filepositions=list(map(float, self._keyframe_filepositions)),
|
||||
)
|
||||
|
||||
def make_metadata(self) -> MetaData:
|
||||
assert self._has_audio == self._audio_analysed
|
||||
assert self._has_video and self._video_analysed
|
||||
assert self._resolution is not None
|
||||
|
||||
if not self._has_audio:
|
||||
audiosize = None
|
||||
audiocodecid = None
|
||||
audiodatarate = None
|
||||
audiosamplerate = None
|
||||
audiosamplesize = None
|
||||
stereo = None
|
||||
else:
|
||||
audiosize = float(self._size_of_audio_tags)
|
||||
audiocodecid = float(self._audio_codec_id)
|
||||
audiodatarate = self.calc_audio_data_rate()
|
||||
audiosamplerate = float(self._audio_sample_rate)
|
||||
audiosamplesize = float(self._audio_sample_size)
|
||||
stereo = self._stereo
|
||||
|
||||
keyframes = self.make_keyframes()
|
||||
|
||||
return MetaData(
|
||||
hasAudio=self._has_audio,
|
||||
hasVideo=self._has_video,
|
||||
hasMetadata=True,
|
||||
hasKeyframes=len(self._keyframe_timestamps) != 0,
|
||||
canSeekToEnd=(
|
||||
self._last_timestamp_of_video == self._keyframe_timestamps[-1]
|
||||
),
|
||||
duration=self._last_timestamp / 1000,
|
||||
datasize=float(self.calc_data_size()),
|
||||
filesize=float(self.calc_file_size()),
|
||||
audiosize=audiosize,
|
||||
audiocodecid=audiocodecid,
|
||||
audiodatarate=audiodatarate,
|
||||
audiosamplerate=audiosamplerate,
|
||||
audiosamplesize=audiosamplesize,
|
||||
stereo=stereo,
|
||||
videosize=float(self._size_of_video_tags),
|
||||
framerate=self.calc_frame_rate(),
|
||||
videocodecid=float(self._video_codec_id),
|
||||
videodatarate=self.calc_video_data_rate(),
|
||||
width=float(self._resolution.width),
|
||||
height=float(self._resolution.height),
|
||||
lasttimestamp=self._last_timestamp / 1000,
|
||||
lastkeyframelocation=keyframes.filepositions[-1],
|
||||
lastkeyframetimestamp=keyframes.times[-1],
|
||||
keyframes=keyframes,
|
||||
)
|
||||
|
||||
def _analyse(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
stream_index: int = -1
|
||||
subscription = SerialDisposable()
|
||||
|
||||
def push_metadata() -> None:
|
||||
try:
|
||||
metadata = self.make_metadata()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to make metadata: {repr(e)}')
|
||||
pass
|
||||
else:
|
||||
self._metadatas.on_next(metadata)
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal stream_index
|
||||
if isinstance(item, FlvHeader):
|
||||
stream_index += 1
|
||||
if stream_index > 0:
|
||||
push_metadata()
|
||||
self._reset()
|
||||
self._analyse_flv_header(item)
|
||||
else:
|
||||
self._analyse_tag(item)
|
||||
observer.on_next(item)
|
||||
|
||||
def on_completed() -> None:
|
||||
push_metadata()
|
||||
observer.on_completed()
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
push_metadata()
|
||||
observer.on_error(e)
|
||||
|
||||
def dispose() -> None:
|
||||
push_metadata()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _analyse_flv_header(self, header: FlvHeader) -> None:
|
||||
self._size_of_flv_header = header.size
|
||||
|
||||
def _analyse_tag(self, tag: FlvTag) -> None:
|
||||
if is_audio_tag(tag):
|
||||
self._analyse_audio_tag(tag)
|
||||
elif is_video_tag(tag):
|
||||
self._analyse_video_tag(tag)
|
||||
elif is_script_tag(tag):
|
||||
self._analyse_script_tag(tag)
|
||||
else:
|
||||
logger.warning(f'Invalid tag type: {tag}')
|
||||
|
||||
self._num_of_tags += 1
|
||||
self._size_of_tags += tag.tag_size
|
||||
self._size_of_data += tag.data_size
|
||||
self._last_timestamp = tag.timestamp
|
||||
|
||||
def _analyse_audio_tag(self, tag: AudioTag) -> None:
|
||||
if not self._audio_analysed:
|
||||
self._has_audio = True
|
||||
self._audio_analysed = True
|
||||
self._audio_codec_id = tag.sound_format.value
|
||||
self._audio_sample_rate = tag.sound_rate.value
|
||||
self._audio_sample_size = tag.sound_size.value
|
||||
self._stereo = tag.sound_type == SoundType.STEREO
|
||||
|
||||
self._num_of_audio_tags += 1
|
||||
self._size_of_audio_tags += tag.tag_size
|
||||
self._size_of_audio_data += tag.data_size
|
||||
self._last_timestamp_of_audio = tag.timestamp
|
||||
|
||||
def _analyse_video_tag(self, tag: VideoTag) -> None:
|
||||
if tag.is_keyframe():
|
||||
self._keyframe_timestamps.append(tag.timestamp)
|
||||
self._keyframe_filepositions.append(self.calc_file_size())
|
||||
if tag.is_avc_header():
|
||||
self._resolution = Resolution.from_aac_sequence_header(tag)
|
||||
else:
|
||||
pass
|
||||
|
||||
if not self._video_analysed:
|
||||
self._has_video = True
|
||||
self._video_analysed = True
|
||||
self._video_codec_id = tag.codec_id.value
|
||||
|
||||
self._num_of_video_tags += 1
|
||||
self._size_of_video_tags += tag.tag_size
|
||||
self._size_of_video_data += tag.data_size
|
||||
self._last_timestamp_of_video = tag.timestamp
|
||||
|
||||
def _analyse_script_tag(self, tag: ScriptTag) -> None:
|
||||
pass
|
406
src/blrec/flv/operators/concat.py
Normal file
406
src/blrec/flv/operators/concat.py
Normal file
@ -0,0 +1,406 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from enum import IntEnum, auto
|
||||
from typing import Callable, List, Optional, TypedDict, cast
|
||||
|
||||
import attr
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from ...utils.hash import cksum
|
||||
from ..common import (
|
||||
create_script_tag,
|
||||
is_audio_sequence_header,
|
||||
is_metadata_tag,
|
||||
is_script_tag,
|
||||
is_sequence_header,
|
||||
is_video_sequence_header,
|
||||
parse_scriptdata,
|
||||
)
|
||||
from ..models import AudioTag, FlvHeader, FlvTag, ScriptTag, VideoTag
|
||||
from ..scriptdata import ScriptData
|
||||
from ..utils import format_timestamp
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('concat', 'JoinPointExtractor', 'JoinPoint', 'JoinPointData')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class JoinPoint:
|
||||
seamless: bool
|
||||
timestamp: float # timestamp of previous tag in milliseconds
|
||||
crc32: str # crc32 of the previous tag
|
||||
|
||||
@classmethod
|
||||
def from_metadata_value(cls, value: JoinPointData) -> JoinPoint:
|
||||
return cls(
|
||||
timestamp=int(value['timestamp']),
|
||||
seamless=value['seamless'],
|
||||
crc32=value['crc32'],
|
||||
)
|
||||
|
||||
def to_metadata_value(self) -> JoinPointData:
|
||||
return dict(
|
||||
timestamp=float(self.timestamp), seamless=self.seamless, crc32=self.crc32
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return 'seamless: {}, timestamp: {}, crc32: {}'.format(
|
||||
'yes' if self.seamless else 'no',
|
||||
format_timestamp(int(self.timestamp)),
|
||||
self.crc32,
|
||||
)
|
||||
|
||||
|
||||
class JoinPointData(TypedDict):
|
||||
seamless: bool
|
||||
timestamp: float
|
||||
crc32: str
|
||||
|
||||
|
||||
class ACTION(IntEnum):
|
||||
NOOP = auto()
|
||||
CORRECT = auto()
|
||||
GATHER = auto()
|
||||
CANCEL = auto()
|
||||
CONCAT = auto()
|
||||
CONCAT_AND_GATHER = auto()
|
||||
|
||||
|
||||
def concat(
|
||||
num_of_last_tags: int = 3, max_duration: int = 20_000
|
||||
) -> Callable[[FLVStream], FLVStream]:
|
||||
"""Concat FLV streams.
|
||||
|
||||
num_of_last_tags: Number of tags for determining whether or not tags are duplicated
|
||||
max_duration: Max duration in milliseconds the duplicated tags might last
|
||||
"""
|
||||
|
||||
def _concat(source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
delta: int = 0
|
||||
action: ACTION = ACTION.NOOP
|
||||
last_tags: List[FlvTag] = []
|
||||
gathered_tags: List[FlvTag] = []
|
||||
last_flv_header: Optional[FlvHeader] = None
|
||||
last_audio_sequence_header: Optional[AudioTag] = None
|
||||
last_video_sequence_header: Optional[VideoTag] = None
|
||||
|
||||
def update_last_tags(tag: FlvTag) -> None:
|
||||
nonlocal last_audio_sequence_header, last_video_sequence_header
|
||||
last_tags.append(tag)
|
||||
if len(last_tags) > num_of_last_tags:
|
||||
last_tags.pop(0)
|
||||
if is_audio_sequence_header(tag):
|
||||
last_audio_sequence_header = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
last_video_sequence_header = tag
|
||||
|
||||
def gather_tags(tag: FlvTag) -> None:
|
||||
nonlocal gathered_tags
|
||||
nonlocal action
|
||||
nonlocal last_audio_sequence_header, last_video_sequence_header
|
||||
if is_audio_sequence_header(tag):
|
||||
if last_audio_sequence_header is None:
|
||||
logger.debug(
|
||||
'Cancel concat due to no last audio sequence header'
|
||||
)
|
||||
action = ACTION.CANCEL
|
||||
else:
|
||||
if not tag.is_the_same_as(last_audio_sequence_header):
|
||||
action = ACTION.CANCEL
|
||||
logger.debug(
|
||||
'Cancel concat due to audio sequence header changed'
|
||||
)
|
||||
last_audio_sequence_header = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
if last_video_sequence_header is None:
|
||||
logger.debug(
|
||||
'Cancel concat due to no last video sequence header'
|
||||
)
|
||||
action = ACTION.CANCEL
|
||||
else:
|
||||
if not tag.is_the_same_as(last_video_sequence_header):
|
||||
action = ACTION.CANCEL
|
||||
logger.debug(
|
||||
'Cancel concat due to video sequence header changed'
|
||||
)
|
||||
last_video_sequence_header = tag
|
||||
gathered_tags.append(tag)
|
||||
|
||||
def has_gathering_completed() -> bool:
|
||||
# XXX: timestamp MUST start from 0 and continuous!
|
||||
# put the correct and fix operator on upstream of this operator
|
||||
# to ensure timestamp start from 0 and continuous!
|
||||
return gathered_tags[-1].timestamp >= max_duration
|
||||
|
||||
def find_last_duplicated_tag(tags: List[FlvTag]) -> int:
|
||||
logger.debug('Finding duplicated tags...')
|
||||
|
||||
last_out_tag = last_tags[-1]
|
||||
logger.debug(f'The last output tag is {last_out_tag}')
|
||||
|
||||
for idx, tag in enumerate(tags):
|
||||
if not tag.is_the_same_as(last_out_tag):
|
||||
continue
|
||||
|
||||
if not all(
|
||||
map(
|
||||
lambda t: t[0].is_the_same_as(t[1]),
|
||||
zip(
|
||||
tags[max(0, idx - (len(last_tags) - 1)) : idx],
|
||||
last_tags[:-1],
|
||||
),
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
logger.debug(f'The last duplicated tag found at {idx} is {tag}')
|
||||
return idx
|
||||
|
||||
logger.debug('No duplicated tag found')
|
||||
return -1
|
||||
|
||||
def update_delta_duplicated(last_duplicated_tag: FlvTag) -> None:
|
||||
nonlocal delta
|
||||
delta = last_tags[-1].timestamp - last_duplicated_tag.timestamp
|
||||
|
||||
def update_delta_no_duplicated(first_data_tag: FlvTag) -> None:
|
||||
nonlocal delta
|
||||
delta = last_tags[-1].timestamp - first_data_tag.timestamp + 10
|
||||
|
||||
def correct_ts(tag: FlvTag) -> FlvTag:
|
||||
if delta == 0:
|
||||
return tag
|
||||
return tag.evolve(timestamp=tag.timestamp + delta)
|
||||
|
||||
def make_join_point_tag(tag: FlvTag, seamless: bool) -> ScriptTag:
|
||||
assert tag.body is not None
|
||||
join_point = JoinPoint(
|
||||
seamless=seamless,
|
||||
timestamp=float(tag.timestamp),
|
||||
crc32=cksum(tag.body),
|
||||
)
|
||||
script_data = ScriptData(
|
||||
name='onJoinPoint', value=attr.asdict(join_point)
|
||||
)
|
||||
script_tag = create_script_tag(script_data)
|
||||
return script_tag
|
||||
|
||||
def do_concat() -> None:
|
||||
logger.debug(
|
||||
'Concatenating... gathered {} tags, total size: {}'.format(
|
||||
len(gathered_tags), sum(t.tag_size for t in gathered_tags)
|
||||
)
|
||||
)
|
||||
|
||||
tags = list(
|
||||
filter(
|
||||
lambda tag: not is_metadata_tag(tag)
|
||||
and not is_sequence_header(tag),
|
||||
gathered_tags,
|
||||
)
|
||||
)
|
||||
logger.debug(
|
||||
'{} data tags, total size: {}'.format(
|
||||
len(tags), sum(t.tag_size for t in tags)
|
||||
)
|
||||
)
|
||||
|
||||
if not tags:
|
||||
return
|
||||
|
||||
if (index := find_last_duplicated_tag(tags)) >= 0:
|
||||
seamless = True
|
||||
update_delta_duplicated(tags[index])
|
||||
logger.debug(f'Updated delta: {delta}, seamless: {seamless}')
|
||||
tags = tags[index + 1 :]
|
||||
else:
|
||||
seamless = False
|
||||
update_delta_no_duplicated(tags[0])
|
||||
logger.debug(f'Updated delta: {delta}, seamless: {seamless}')
|
||||
|
||||
join_point_tag = make_join_point_tag(last_tags[-1], seamless)
|
||||
observer.on_next(join_point_tag)
|
||||
|
||||
for tag in tags:
|
||||
tag = correct_ts(tag)
|
||||
update_last_tags(tag)
|
||||
observer.on_next(tag)
|
||||
gathered_tags.clear()
|
||||
|
||||
def do_cancel() -> None:
|
||||
logger.debug(
|
||||
'Cancelling... gathered {} tags, total size: {}'.format(
|
||||
len(gathered_tags), sum(t.tag_size for t in gathered_tags)
|
||||
)
|
||||
)
|
||||
assert last_flv_header is not None
|
||||
observer.on_next(last_flv_header)
|
||||
for tag in gathered_tags:
|
||||
update_last_tags(tag)
|
||||
observer.on_next(tag)
|
||||
gathered_tags.clear()
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal action
|
||||
nonlocal last_flv_header
|
||||
|
||||
if isinstance(item, FlvHeader):
|
||||
if last_flv_header is None:
|
||||
logger.debug('No operation needed for the first stream')
|
||||
last_flv_header = item
|
||||
action = ACTION.NOOP
|
||||
observer.on_next(item)
|
||||
else:
|
||||
logger.debug('Gathering tags for deduplication...')
|
||||
last_flv_header = item
|
||||
if action == ACTION.GATHER:
|
||||
action = ACTION.CONCAT_AND_GATHER
|
||||
else:
|
||||
action = ACTION.GATHER
|
||||
return
|
||||
|
||||
tag = item
|
||||
|
||||
while True:
|
||||
if action == ACTION.NOOP:
|
||||
update_last_tags(tag)
|
||||
observer.on_next(tag)
|
||||
return
|
||||
|
||||
if action == ACTION.CORRECT:
|
||||
tag = correct_ts(tag)
|
||||
update_last_tags(tag)
|
||||
observer.on_next(tag)
|
||||
return
|
||||
|
||||
if action in (ACTION.CONCAT, ACTION.CONCAT_AND_GATHER):
|
||||
do_concat()
|
||||
if action == ACTION.CONCAT_AND_GATHER:
|
||||
action = ACTION.GATHER
|
||||
else:
|
||||
action = ACTION.CORRECT
|
||||
return
|
||||
|
||||
if action == ACTION.GATHER:
|
||||
gather_tags(tag)
|
||||
if action == ACTION.CANCEL:
|
||||
do_cancel()
|
||||
action = ACTION.NOOP
|
||||
return
|
||||
if has_gathering_completed():
|
||||
action = ACTION.CONCAT
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
def on_completed() -> None:
|
||||
if action == ACTION.GATHER:
|
||||
do_concat()
|
||||
observer.on_completed()
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
if action == ACTION.GATHER:
|
||||
do_concat()
|
||||
observer.on_error(e)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _concat
|
||||
|
||||
|
||||
class JoinPointExtractor:
|
||||
def __init__(self) -> None:
|
||||
self._join_points: Subject[List[JoinPoint]] = Subject()
|
||||
|
||||
@property
|
||||
def join_points(self) -> Observable[List[JoinPoint]]:
|
||||
return self._join_points
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._extract(source)
|
||||
|
||||
def _extract(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
stream_index: int = -1
|
||||
subscription = SerialDisposable()
|
||||
join_points: List[JoinPoint] = []
|
||||
join_point_tag: Optional[ScriptTag] = None
|
||||
|
||||
def push_join_points() -> None:
|
||||
self._join_points.on_next(join_points.copy())
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal stream_index
|
||||
nonlocal join_point_tag
|
||||
|
||||
if isinstance(item, FlvHeader):
|
||||
stream_index += 1
|
||||
if stream_index > 0:
|
||||
push_join_points()
|
||||
join_points.clear()
|
||||
join_point_tag = None
|
||||
observer.on_next(item)
|
||||
return
|
||||
|
||||
if join_point_tag:
|
||||
join_point = self._make_join_point(join_point_tag, item)
|
||||
join_points.append(join_point)
|
||||
logger.debug(f'{repr(join_point)}; {join_point}')
|
||||
join_point_tag = None
|
||||
|
||||
if self._is_join_point_tag(item):
|
||||
join_point_tag = item
|
||||
return
|
||||
|
||||
observer.on_next(item)
|
||||
|
||||
def on_completed() -> None:
|
||||
push_join_points()
|
||||
observer.on_completed()
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
push_join_points()
|
||||
observer.on_error(e)
|
||||
|
||||
def dispose() -> None:
|
||||
push_join_points()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _is_join_point_tag(self, tag: FlvTag) -> TypeGuard[ScriptTag]:
|
||||
if is_script_tag(tag):
|
||||
script_data = parse_scriptdata(tag)
|
||||
return script_data['name'] == 'onJoinPoint'
|
||||
return False
|
||||
|
||||
def _make_join_point(self, join_point_tag: ScriptTag, tag: FlvTag) -> JoinPoint:
|
||||
assert tag.body is not None
|
||||
script_data = parse_scriptdata(join_point_tag)
|
||||
join_point_data = cast(JoinPointData, script_data['value'])
|
||||
return JoinPoint(
|
||||
seamless=join_point_data['seamless'],
|
||||
timestamp=tag.timestamp,
|
||||
crc32=cksum(tag.body),
|
||||
)
|
64
src/blrec/flv/operators/correct.py
Normal file
64
src/blrec/flv/operators/correct.py
Normal file
@ -0,0 +1,64 @@
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import is_script_tag, is_sequence_header
|
||||
from ..models import FlvHeader, FlvTag
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('correct',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def correct() -> Callable[[FLVStream], FLVStream]:
|
||||
def _correct(source: FLVStream) -> FLVStream:
|
||||
"""Correct the timestamp offset of the FLV tags."""
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
delta: Optional[int] = None
|
||||
|
||||
def correct_ts(tag: FlvTag, delta: int) -> FlvTag:
|
||||
if delta == 0:
|
||||
return tag
|
||||
return tag.evolve(timestamp=tag.timestamp + delta)
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal delta
|
||||
|
||||
if isinstance(item, FlvHeader):
|
||||
delta = None
|
||||
observer.on_next(item)
|
||||
return
|
||||
|
||||
tag = item
|
||||
|
||||
if is_script_tag(tag):
|
||||
tag = correct_ts(tag, -tag.timestamp)
|
||||
observer.on_next(tag)
|
||||
return
|
||||
|
||||
if delta is None:
|
||||
if is_sequence_header(tag):
|
||||
tag = correct_ts(tag, -tag.timestamp)
|
||||
else:
|
||||
logger.debug(f'The first data tag: {tag}')
|
||||
delta = -tag.timestamp
|
||||
logger.debug(f'Timestamp delta: {delta}')
|
||||
tag = correct_ts(tag, delta)
|
||||
else:
|
||||
tag = correct_ts(tag, delta)
|
||||
|
||||
observer.on_next(tag)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _correct
|
113
src/blrec/flv/operators/cut.py
Normal file
113
src/blrec/flv/operators/cut.py
Normal file
@ -0,0 +1,113 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import (
|
||||
is_audio_sequence_header,
|
||||
is_metadata_tag,
|
||||
is_video_nalu_keyframe,
|
||||
is_video_sequence_header,
|
||||
)
|
||||
from ..models import AudioTag, FlvHeader, FlvTag, ScriptTag, VideoTag
|
||||
from ..utils import format_timestamp
|
||||
from .correct import correct
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Cutter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Cutter:
|
||||
def __init__(self, min_duration: int = 5_000) -> None:
|
||||
self._min_duration = min_duration # milliseconds
|
||||
self._reset()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._cutting: bool = False
|
||||
self._triggered: bool = False
|
||||
self._last_timestamp: int = 0
|
||||
self._last_flv_header: Optional[FlvHeader] = None
|
||||
self._last_metadata_tag: Optional[ScriptTag] = None
|
||||
self._last_audio_sequence_header: Optional[AudioTag] = None
|
||||
self._last_video_sequence_header: Optional[VideoTag] = None
|
||||
|
||||
def is_cutting(self) -> bool:
|
||||
return self._cutting
|
||||
|
||||
def can_cut_stream(self) -> bool:
|
||||
if self._triggered or self._cutting:
|
||||
return False
|
||||
return self._last_timestamp >= self._min_duration
|
||||
|
||||
def cut_stream(self) -> bool:
|
||||
if self.can_cut_stream():
|
||||
self._triggered = True
|
||||
return True
|
||||
return False
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._cut(source).pipe(correct())
|
||||
|
||||
def _cut(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if isinstance(item, FlvHeader):
|
||||
self._reset()
|
||||
self._update_flv_header(item)
|
||||
else:
|
||||
self._update_meta_tags(item)
|
||||
self._check_cut_point(item)
|
||||
if self._cutting:
|
||||
self._insert_header_and_tags(observer)
|
||||
self._cutting = False
|
||||
self._triggered = False
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _update_flv_header(self, header: FlvHeader) -> None:
|
||||
self._last_flv_header = header
|
||||
|
||||
def _update_meta_tags(self, tag: FlvTag) -> None:
|
||||
if is_metadata_tag(tag):
|
||||
self._last_metadata_tag = tag
|
||||
elif is_audio_sequence_header(tag):
|
||||
self._last_audio_sequence_header = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
self._last_video_sequence_header = tag
|
||||
|
||||
def _insert_header_and_tags(
|
||||
self, observer: abc.ObserverBase[FLVStreamItem]
|
||||
) -> None:
|
||||
assert self._last_flv_header is not None
|
||||
assert self._last_audio_sequence_header is not None
|
||||
assert self._last_video_sequence_header is not None
|
||||
observer.on_next(self._last_flv_header)
|
||||
if self._last_metadata_tag is not None:
|
||||
observer.on_next(self._last_metadata_tag)
|
||||
observer.on_next(self._last_audio_sequence_header)
|
||||
observer.on_next(self._last_video_sequence_header)
|
||||
|
||||
def _check_cut_point(self, tag: FlvTag) -> None:
|
||||
self._last_timestamp = tag.timestamp
|
||||
|
||||
if not self._triggered:
|
||||
return
|
||||
|
||||
if not is_video_nalu_keyframe(tag):
|
||||
return
|
||||
|
||||
self._cutting = True
|
||||
logger.info(f'Cut stream at {format_timestamp(tag.timestamp)}')
|
56
src/blrec/flv/operators/defragment.py
Normal file
56
src/blrec/flv/operators/defragment.py
Normal file
@ -0,0 +1,56 @@
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..models import FlvHeader
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('defragment',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def defragment(min_tags: int = 10) -> Callable[[FLVStream], FLVStream]:
|
||||
def _defragment(source: FLVStream) -> FLVStream:
|
||||
"""Discard fragmented FLV streams."""
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
gathering: bool = False
|
||||
gathered_items: List[FLVStreamItem] = []
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal gathering
|
||||
|
||||
if isinstance(item, FlvHeader):
|
||||
if gathered_items:
|
||||
logger.debug(
|
||||
'Discarded {} items, total size: {}'.format(
|
||||
len(gathered_items), sum(len(t) for t in gathered_items)
|
||||
)
|
||||
)
|
||||
gathered_items.clear()
|
||||
gathering = True
|
||||
logger.debug('Gathering items...')
|
||||
|
||||
if gathering:
|
||||
gathered_items.append(item)
|
||||
if len(gathered_items) > min_tags:
|
||||
for item in gathered_items:
|
||||
observer.on_next(item)
|
||||
gathered_items.clear()
|
||||
gathering = False
|
||||
logger.debug('Not a fragmented stream, stopped the gathering')
|
||||
else:
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _defragment
|
123
src/blrec/flv/operators/dump.py
Normal file
123
src/blrec/flv/operators/dump.py
Normal file
@ -0,0 +1,123 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Callable, Optional, Tuple
|
||||
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from ..io import FlvWriter
|
||||
from ..models import FlvHeader
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Dumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Dumper:
|
||||
def __init__(
|
||||
self,
|
||||
path_provider: Callable[..., Tuple[str, int]],
|
||||
buffer_size: Optional[int] = None,
|
||||
) -> None:
|
||||
self.buffer_size = buffer_size or io.DEFAULT_BUFFER_SIZE # bytes
|
||||
self._path_provider = path_provider
|
||||
self._file_opened: Subject[Tuple[str, int]] = Subject()
|
||||
self._file_closed: Subject[str] = Subject()
|
||||
self._size_updates: Subject[int] = Subject()
|
||||
self._timestamp_updates: Subject[int] = Subject()
|
||||
self._reset()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._path: str = ''
|
||||
self._file: Optional[io.BufferedReader] = None
|
||||
self._flv_writer: Optional[FlvWriter] = None
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def file_opened(self) -> Observable[Tuple[str, int]]:
|
||||
return self._file_opened
|
||||
|
||||
@property
|
||||
def file_closed(self) -> Observable[str]:
|
||||
return self._file_closed
|
||||
|
||||
@property
|
||||
def size_updates(self) -> Observable[int]:
|
||||
return self._size_updates
|
||||
|
||||
@property
|
||||
def timestamp_updates(self) -> Observable[int]:
|
||||
return self._timestamp_updates
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._dump(source)
|
||||
|
||||
def _open_file(self) -> None:
|
||||
self._path, timestamp = self._path_provider()
|
||||
self._file = open(self._path, 'wb', buffering=self.buffer_size) # type: ignore
|
||||
logger.debug(f'Opened file: {self._path}')
|
||||
self._file_opened.on_next((self._path, timestamp))
|
||||
|
||||
def _close_file(self) -> None:
|
||||
if self._file is not None and not self._file.closed:
|
||||
self._file.close()
|
||||
logger.debug(f'Closed file: {self._path}')
|
||||
self._file_closed.on_next(self._path)
|
||||
|
||||
def _dump(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
subscription = SerialDisposable()
|
||||
|
||||
self._close_file()
|
||||
self._reset()
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
try:
|
||||
if isinstance(item, FlvHeader):
|
||||
self._close_file()
|
||||
self._open_file()
|
||||
assert self._file is not None
|
||||
self._flv_writer = FlvWriter(self._file)
|
||||
size = self._flv_writer.write_header(item)
|
||||
self._size_updates.on_next(size)
|
||||
self._timestamp_updates.on_next(0)
|
||||
else:
|
||||
if self._flv_writer is not None:
|
||||
size = self._flv_writer.write_tag(item)
|
||||
self._size_updates.on_next(size)
|
||||
self._timestamp_updates.on_next(item.timestamp)
|
||||
|
||||
observer.on_next(item)
|
||||
except Exception as e:
|
||||
self._close_file()
|
||||
self._reset()
|
||||
observer.on_error(e)
|
||||
|
||||
def on_completed() -> None:
|
||||
self._close_file()
|
||||
self._reset()
|
||||
observer.on_completed()
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
self._close_file()
|
||||
self._reset()
|
||||
observer.on_error(e)
|
||||
|
||||
def dispose() -> None:
|
||||
self._close_file()
|
||||
self._reset()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
152
src/blrec/flv/operators/fix.py
Normal file
152
src/blrec/flv/operators/fix.py
Normal file
@ -0,0 +1,152 @@
|
||||
import logging
|
||||
import math
|
||||
from typing import Callable, Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import (
|
||||
is_audio_tag,
|
||||
is_metadata_tag,
|
||||
is_script_tag,
|
||||
is_video_tag,
|
||||
parse_metadata,
|
||||
)
|
||||
from ..models import AudioTag, FlvHeader, FlvTag, ScriptTag, VideoTag
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('fix',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fix() -> Callable[[FLVStream], FLVStream]:
|
||||
def _fix(source: FLVStream) -> FLVStream:
|
||||
"""Fix broken timestamps of the FLV tags."""
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
delta: int = 0
|
||||
last_tag: Optional[FlvTag] = None
|
||||
last_audio_tag: Optional[AudioTag] = None
|
||||
last_video_tag: Optional[VideoTag] = None
|
||||
frame_rate = 30.0
|
||||
video_frame_interval = math.ceil(1000 / frame_rate)
|
||||
sound_sample_interval = math.ceil(1000 / 44)
|
||||
|
||||
def reset() -> None:
|
||||
nonlocal delta, last_tag, last_audio_tag, last_video_tag
|
||||
nonlocal frame_rate, video_frame_interval, sound_sample_interval
|
||||
delta = 0
|
||||
last_tag = None
|
||||
last_audio_tag = None
|
||||
last_video_tag = None
|
||||
frame_rate = 30.0
|
||||
video_frame_interval = math.ceil(1000 / frame_rate)
|
||||
sound_sample_interval = math.ceil(1000 / 44)
|
||||
|
||||
def update_parameters(tag: ScriptTag) -> None:
|
||||
nonlocal frame_rate, video_frame_interval
|
||||
metadata = parse_metadata(tag)
|
||||
fps = metadata.get('fps') or metadata.get('framerate')
|
||||
|
||||
if not fps:
|
||||
return
|
||||
|
||||
frame_rate = fps
|
||||
video_frame_interval = math.ceil(1000 / frame_rate)
|
||||
|
||||
logger.debug(
|
||||
'frame rate: {}, video frame interval: {}'.format(
|
||||
frame_rate, video_frame_interval
|
||||
)
|
||||
)
|
||||
|
||||
def update_last_tags(tag: FlvTag) -> None:
|
||||
nonlocal last_tag, last_audio_tag, last_video_tag
|
||||
last_tag = tag
|
||||
if is_audio_tag(tag):
|
||||
last_audio_tag = tag
|
||||
elif is_video_tag(tag):
|
||||
last_video_tag = tag
|
||||
|
||||
def update_delta(tag: FlvTag) -> None:
|
||||
nonlocal delta
|
||||
assert last_tag is not None
|
||||
delta = last_tag.timestamp + delta - tag.timestamp + calc_interval(tag)
|
||||
|
||||
def correct_ts(tag: FlvTag) -> FlvTag:
|
||||
if delta == 0:
|
||||
return tag
|
||||
return tag.evolve(timestamp=tag.timestamp + delta)
|
||||
|
||||
def calc_interval(tag: FlvTag) -> int:
|
||||
if is_audio_tag(tag):
|
||||
return sound_sample_interval
|
||||
elif is_video_tag(tag):
|
||||
return video_frame_interval
|
||||
else:
|
||||
logger.warning(f'Unexpected tag type: {tag}')
|
||||
return min(sound_sample_interval, video_frame_interval)
|
||||
|
||||
def is_ts_rebounded(tag: FlvTag) -> bool:
|
||||
if is_audio_tag(tag):
|
||||
if last_audio_tag is None:
|
||||
return False
|
||||
return tag.timestamp < last_audio_tag.timestamp
|
||||
elif is_video_tag(tag):
|
||||
if last_video_tag is None:
|
||||
return False
|
||||
return tag.timestamp < last_video_tag.timestamp
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_ts_incontinuous(tag: FlvTag) -> bool:
|
||||
if last_tag is None:
|
||||
return False
|
||||
return tag.timestamp - last_tag.timestamp > max(
|
||||
sound_sample_interval, video_frame_interval
|
||||
)
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if isinstance(item, FlvHeader):
|
||||
reset()
|
||||
observer.on_next(item)
|
||||
return
|
||||
|
||||
tag = item
|
||||
|
||||
if is_script_tag(tag):
|
||||
if is_metadata_tag(tag):
|
||||
update_parameters(tag)
|
||||
observer.on_next(tag)
|
||||
return
|
||||
|
||||
if is_ts_rebounded(tag):
|
||||
update_delta(tag)
|
||||
logger.warning(
|
||||
f'Timestamp rebounded, updated delta: {delta}\n'
|
||||
f'last audio tag: {last_audio_tag}\n'
|
||||
f'last video tag: {last_video_tag}\n'
|
||||
f'current tag: {tag}'
|
||||
)
|
||||
elif is_ts_incontinuous(tag):
|
||||
update_delta(tag)
|
||||
logger.warning(
|
||||
f'Timestamp incontinuous, updated delta: {delta}\n'
|
||||
f'last tag: {last_tag}\n'
|
||||
f'current tag: {tag}'
|
||||
)
|
||||
|
||||
update_last_tags(tag)
|
||||
tag = correct_ts(tag)
|
||||
observer.on_next(tag)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _fix
|
38
src/blrec/flv/operators/helpers.py
Normal file
38
src/blrec/flv/operators/helpers.py
Normal file
@ -0,0 +1,38 @@
|
||||
import io
|
||||
import logging
|
||||
|
||||
from reactivex import of
|
||||
|
||||
from .parse import parse
|
||||
from .typing import FLVStream
|
||||
|
||||
__all__ = ('from_file', 'from_stream')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def from_stream(
|
||||
stream: io.RawIOBase,
|
||||
*,
|
||||
complete_on_eof: bool = False,
|
||||
backup_timestamp: bool = False,
|
||||
restore_timestamp: bool = False,
|
||||
) -> FLVStream:
|
||||
return of(stream).pipe(
|
||||
parse(
|
||||
complete_on_eof=complete_on_eof,
|
||||
backup_timestamp=backup_timestamp,
|
||||
restore_timestamp=restore_timestamp,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def from_file(
|
||||
path: str, *, backup_timestamp: bool = False, restore_timestamp: bool = False
|
||||
) -> FLVStream:
|
||||
return from_stream(
|
||||
open(path, 'rb'),
|
||||
complete_on_eof=True,
|
||||
backup_timestamp=backup_timestamp,
|
||||
restore_timestamp=restore_timestamp,
|
||||
)
|
93
src/blrec/flv/operators/inject.py
Normal file
93
src/blrec/flv/operators/inject.py
Normal file
@ -0,0 +1,93 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Callable, Dict, Optional, cast
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import (
|
||||
create_metadata_tag,
|
||||
enrich_metadata,
|
||||
is_metadata_tag,
|
||||
parse_metadata,
|
||||
update_metadata,
|
||||
)
|
||||
from ..models import FlvHeader, ScriptTag
|
||||
from .analyse import KeyFramesDict
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Injector',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Injector:
|
||||
def __init__(self, metadata_provider: Callable[..., Dict[str, Any]]) -> None:
|
||||
self._metadata_provider = metadata_provider
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._inject(source)
|
||||
|
||||
def _inject(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
index = 0
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal index
|
||||
if isinstance(item, FlvHeader):
|
||||
index = 0
|
||||
observer.on_next(item)
|
||||
return
|
||||
|
||||
tag = item
|
||||
index += 1
|
||||
|
||||
if index == 1:
|
||||
if is_metadata_tag(tag):
|
||||
tag = self._inject_metadata(tag)
|
||||
logger.debug('Injected metadata into the metadata tag')
|
||||
else:
|
||||
logger.debug('No metadata tag in the stream')
|
||||
tag = self._make_metadata_tag()
|
||||
logger.debug('Maked a metadata tag for metadata injection')
|
||||
observer.on_next(tag)
|
||||
logger.debug('Inserted the artificial metadata tag')
|
||||
|
||||
observer.on_next(tag)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _inject_metadata(self, tag: ScriptTag) -> ScriptTag:
|
||||
old_metadata = parse_metadata(tag)
|
||||
new_metadata = self._metadata_provider()
|
||||
final_metadata = {
|
||||
**{'duration': 0.0, 'filesize': 0.0},
|
||||
**old_metadata,
|
||||
**new_metadata,
|
||||
}
|
||||
new_tag = enrich_metadata(tag, final_metadata, offset=tag.offset)
|
||||
|
||||
if 'keyframes' in final_metadata:
|
||||
keyframes = cast(KeyFramesDict, final_metadata['keyframes'])
|
||||
offset = new_tag.tag_size - tag.tag_size
|
||||
keyframes['filepositions'] = list(
|
||||
map(lambda p: p + offset, keyframes['filepositions'])
|
||||
)
|
||||
if 'lastkeyframelocation' in final_metadata:
|
||||
final_metadata['lastkeyframelocation'] = keyframes['filepositions'][-1]
|
||||
new_tag = update_metadata(new_tag, final_metadata)
|
||||
|
||||
return new_tag
|
||||
|
||||
def _make_metadata_tag(self) -> ScriptTag:
|
||||
metadata = self._metadata_provider()
|
||||
metadata = {'duration': 0.0, 'filesize': 0.0, **metadata}
|
||||
return create_metadata_tag(metadata)
|
153
src/blrec/flv/operators/limit.py
Normal file
153
src/blrec/flv/operators/limit.py
Normal file
@ -0,0 +1,153 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import (
|
||||
is_audio_sequence_header,
|
||||
is_metadata_tag,
|
||||
is_video_nalu_keyframe,
|
||||
is_video_sequence_header,
|
||||
)
|
||||
from ..models import BACK_POINTER_SIZE, AudioTag, FlvHeader, FlvTag, ScriptTag, VideoTag
|
||||
from .correct import correct
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Limiter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
filesize_limit: int = 0, # file size in bytes, no limit by default.
|
||||
duration_limit: int = 0, # duration in seconds, no limit by default.
|
||||
) -> None:
|
||||
self.filesize_limit = filesize_limit
|
||||
self.duration_limit = duration_limit
|
||||
self._reset()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._filesize: int = 0
|
||||
self._duration: float = 0.0
|
||||
self._max_size_between_keyframes: int = 0
|
||||
self._max_duration_between_keyframes: float = 0.0
|
||||
self._first_keyframe_tag: Optional[VideoTag] = None
|
||||
self._last_keyframe_tag: Optional[VideoTag] = None
|
||||
self._last_flv_header: Optional[FlvHeader] = None
|
||||
self._last_metadata_tag: Optional[ScriptTag] = None
|
||||
self._last_audio_sequence_header: Optional[AudioTag] = None
|
||||
self._last_video_sequence_header: Optional[VideoTag] = None
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._limit(source).pipe(correct())
|
||||
|
||||
def _limit(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if isinstance(item, FlvHeader):
|
||||
self._reset()
|
||||
self._update_flv_header(item)
|
||||
else:
|
||||
self._update_meta_tags(item)
|
||||
will_over_limts = self._check_limits(item)
|
||||
if will_over_limts:
|
||||
self._insert_header_and_tags(observer)
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _insert_header_and_tags(
|
||||
self, observer: abc.ObserverBase[FLVStreamItem]
|
||||
) -> None:
|
||||
assert self._last_flv_header is not None
|
||||
assert self._last_audio_sequence_header is not None
|
||||
assert self._last_video_sequence_header is not None
|
||||
observer.on_next(self._last_flv_header)
|
||||
if self._last_metadata_tag is not None:
|
||||
observer.on_next(self._last_metadata_tag)
|
||||
observer.on_next(self._last_audio_sequence_header)
|
||||
observer.on_next(self._last_video_sequence_header)
|
||||
|
||||
self._filesize = (
|
||||
self._last_flv_header.size
|
||||
+ self._last_audio_sequence_header.tag_size
|
||||
+ self._last_video_sequence_header.tag_size
|
||||
)
|
||||
if self._last_metadata_tag is not None:
|
||||
self._filesize += self._last_metadata_tag.tag_size
|
||||
self._duration = 0.0
|
||||
self._first_keyframe_tag = self._last_keyframe_tag
|
||||
|
||||
def _will_filesize_over_limit(self) -> bool:
|
||||
return self._filesize + self._max_size_between_keyframes >= self.filesize_limit
|
||||
|
||||
def _will_duration_over_limit(self) -> bool:
|
||||
return (
|
||||
self._duration + self._max_duration_between_keyframes >= self.duration_limit
|
||||
)
|
||||
|
||||
def _update_flv_header(self, header: FlvHeader) -> None:
|
||||
self._filesize += header.size + BACK_POINTER_SIZE
|
||||
self._last_flv_header = header
|
||||
|
||||
def _update_meta_tags(self, tag: FlvTag) -> None:
|
||||
if is_metadata_tag(tag):
|
||||
self._last_metadata_tag = tag
|
||||
elif is_audio_sequence_header(tag):
|
||||
self._last_audio_sequence_header = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
self._last_video_sequence_header = tag
|
||||
|
||||
def _check_limits(self, tag: FlvTag) -> bool:
|
||||
self._filesize += tag.tag_size + BACK_POINTER_SIZE
|
||||
|
||||
if not is_video_nalu_keyframe(tag):
|
||||
return False
|
||||
|
||||
if self._first_keyframe_tag is None:
|
||||
self._first_keyframe_tag = tag
|
||||
|
||||
if self._last_keyframe_tag is not None:
|
||||
self._max_size_between_keyframes = max(
|
||||
self._max_size_between_keyframes,
|
||||
tag.offset - self._last_keyframe_tag.offset,
|
||||
)
|
||||
self._max_duration_between_keyframes = max(
|
||||
self._max_duration_between_keyframes,
|
||||
(tag.timestamp - self._last_keyframe_tag.timestamp) / 1000,
|
||||
)
|
||||
|
||||
self._last_keyframe_tag = tag
|
||||
self._duration = (
|
||||
self._last_keyframe_tag.timestamp - self._first_keyframe_tag.timestamp
|
||||
) / 1000
|
||||
|
||||
if self.filesize_limit > 0 and self._will_filesize_over_limit():
|
||||
logger.debug(
|
||||
'File size will be over the limit: {} + {}'.format(
|
||||
self._filesize, self._max_size_between_keyframes
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
if self.duration_limit > 0 and self._will_duration_over_limit():
|
||||
logger.debug(
|
||||
'Duration will be over the limit: {} + {}'.format(
|
||||
self._duration, self._max_duration_between_keyframes
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
68
src/blrec/flv/operators/parse.py
Normal file
68
src/blrec/flv/operators/parse.py
Normal file
@ -0,0 +1,68 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from ..io import FlvReader
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('parse',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse(
|
||||
*,
|
||||
ignore_eof: bool = False,
|
||||
complete_on_eof: bool = False,
|
||||
backup_timestamp: bool = False,
|
||||
restore_timestamp: bool = False,
|
||||
) -> Callable[[Observable[io.RawIOBase]], FLVStream]:
|
||||
def _parse(source: Observable[io.RawIOBase]) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
disposed = False
|
||||
subscription = SerialDisposable()
|
||||
|
||||
def on_next(stream: io.RawIOBase) -> None:
|
||||
try:
|
||||
try:
|
||||
reader = FlvReader(
|
||||
stream,
|
||||
backup_timestamp=backup_timestamp,
|
||||
restore_timestamp=restore_timestamp,
|
||||
)
|
||||
observer.on_next(reader.read_header())
|
||||
while not disposed:
|
||||
tag = reader.read_tag()
|
||||
observer.on_next(tag)
|
||||
finally:
|
||||
stream.close()
|
||||
except EOFError as e:
|
||||
if complete_on_eof:
|
||||
observer.on_completed()
|
||||
else:
|
||||
if not ignore_eof:
|
||||
observer.on_error(e)
|
||||
except Exception as e:
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_completed()
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _parse
|
79
src/blrec/flv/operators/probe.py
Normal file
79
src/blrec/flv/operators/probe.py
Normal file
@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import List, Optional, cast
|
||||
|
||||
from reactivex import Observable, Subject, abc
|
||||
|
||||
from ...utils.ffprobe import StreamProfile, ffprobe
|
||||
from ..io import FlvWriter
|
||||
from ..models import FlvHeader, FlvTag
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Prober', 'StreamProfile')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Prober:
|
||||
def __init__(self) -> None:
|
||||
self._profiles: Subject[StreamProfile] = Subject()
|
||||
|
||||
def _reset(self) -> None:
|
||||
self._gathering: bool = False
|
||||
self._gathered_items: List[FLVStreamItem] = []
|
||||
|
||||
@property
|
||||
def profiles(self) -> Observable[StreamProfile]:
|
||||
return self._profiles
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._probe(source)
|
||||
|
||||
def _probe(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
self._reset()
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if isinstance(item, FlvHeader):
|
||||
self._gathered_items.clear()
|
||||
self._gathering = True
|
||||
|
||||
if self._gathering:
|
||||
self._gathered_items.append(item)
|
||||
if len(self._gathered_items) >= 10:
|
||||
try:
|
||||
self._do_probe()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to probe stream: {repr(e)}')
|
||||
finally:
|
||||
self._gathered_items.clear()
|
||||
self._gathering = False
|
||||
|
||||
observer.on_next(item)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
def _do_probe(self) -> None:
|
||||
bytes_io = io.BytesIO()
|
||||
writer = FlvWriter(bytes_io)
|
||||
writer.write_header(cast(FlvHeader, self._gathered_items[0]))
|
||||
for tag in self._gathered_items[1:]:
|
||||
writer.write_tag(cast(FlvTag, tag))
|
||||
|
||||
def on_next(profile: StreamProfile) -> None:
|
||||
self._profiles.on_next(profile)
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
logger.warning(f'Failed to probe stream by ffprobe: {repr(e)}')
|
||||
|
||||
ffprobe(bytes_io.getvalue()).subscribe(on_next, on_error)
|
19
src/blrec/flv/operators/process.py
Normal file
19
src/blrec/flv/operators/process.py
Normal file
@ -0,0 +1,19 @@
|
||||
import logging
|
||||
from typing import Callable
|
||||
|
||||
from .concat import concat
|
||||
from .defragment import defragment
|
||||
from .fix import fix
|
||||
from .split import split
|
||||
from .typing import FLVStream
|
||||
|
||||
__all__ = ('process',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def process() -> Callable[[FLVStream], FLVStream]:
|
||||
def _process(source: FLVStream) -> FLVStream:
|
||||
return source.pipe(defragment(), split(), fix(), concat())
|
||||
|
||||
return _process
|
75
src/blrec/flv/operators/progress.py
Normal file
75
src/blrec/flv/operators/progress.py
Normal file
@ -0,0 +1,75 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from tqdm import tqdm
|
||||
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('ProgressBar',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
def __init__(
|
||||
self,
|
||||
desc: str,
|
||||
postfix: Optional[str] = None,
|
||||
total: Optional[int] = None,
|
||||
disable: Optional[bool] = False,
|
||||
) -> None:
|
||||
self._desc = desc
|
||||
self._postfix = postfix
|
||||
self._total = total
|
||||
self._disable = disable
|
||||
self._pbar: Optional[tqdm] = None
|
||||
|
||||
def __call__(self, source: FLVStream) -> FLVStream:
|
||||
return self._progress(source)
|
||||
|
||||
def _progress(self, source: FLVStream) -> FLVStream:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
subscription = SerialDisposable()
|
||||
|
||||
self._pbar = tqdm(
|
||||
disable=self._disable,
|
||||
desc=self._desc,
|
||||
total=self._total,
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
postfix=self._postfix,
|
||||
)
|
||||
self._pbar.disable
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.update(len(item))
|
||||
observer.on_next(item)
|
||||
|
||||
def on_completed() -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.close()
|
||||
self._pbar = None
|
||||
observer.on_completed()
|
||||
|
||||
def dispose() -> None:
|
||||
if self._pbar is not None:
|
||||
self._pbar.close()
|
||||
self._pbar = None
|
||||
|
||||
subscription.disposable = source.subscribe(
|
||||
on_next, observer.on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return CompositeDisposable(subscription, Disposable(dispose))
|
||||
|
||||
return Observable(subscribe)
|
100
src/blrec/flv/operators/split.py
Normal file
100
src/blrec/flv/operators/split.py
Normal file
@ -0,0 +1,100 @@
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import is_audio_sequence_header, is_metadata_tag, is_video_sequence_header
|
||||
from ..models import AudioTag, FlvHeader, ScriptTag, VideoTag
|
||||
from .correct import correct
|
||||
from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('split',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def split() -> Callable[[FLVStream], FLVStream]:
|
||||
def _split(source: FLVStream) -> FLVStream:
|
||||
"""Split the FLV stream when av parameters are changed."""
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[FLVStreamItem],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
changed: bool = False
|
||||
last_flv_header: Optional[FlvHeader] = None
|
||||
last_metadata_tag: Optional[ScriptTag] = None
|
||||
last_audio_sequence_header: Optional[AudioTag] = None
|
||||
last_video_sequence_header: Optional[VideoTag] = None
|
||||
|
||||
def reset() -> None:
|
||||
nonlocal changed
|
||||
nonlocal last_flv_header
|
||||
nonlocal last_metadata_tag
|
||||
nonlocal last_audio_sequence_header, last_video_sequence_header
|
||||
changed = False
|
||||
last_flv_header = None
|
||||
last_metadata_tag = None
|
||||
last_audio_sequence_header = last_video_sequence_header = None
|
||||
|
||||
def insert_header_and_tags() -> None:
|
||||
assert last_flv_header is not None
|
||||
assert last_audio_sequence_header is not None
|
||||
assert last_video_sequence_header is not None
|
||||
observer.on_next(last_flv_header)
|
||||
if last_metadata_tag is not None:
|
||||
observer.on_next(last_metadata_tag)
|
||||
observer.on_next(last_audio_sequence_header)
|
||||
observer.on_next(last_video_sequence_header)
|
||||
|
||||
def on_next(item: FLVStreamItem) -> None:
|
||||
nonlocal changed
|
||||
nonlocal last_flv_header
|
||||
nonlocal last_metadata_tag
|
||||
nonlocal last_audio_sequence_header, last_video_sequence_header
|
||||
|
||||
if isinstance(item, FlvHeader):
|
||||
reset()
|
||||
last_flv_header = item
|
||||
observer.on_next(item)
|
||||
return
|
||||
|
||||
tag = item
|
||||
|
||||
if is_metadata_tag(tag):
|
||||
logger.debug(f'Metadata tag: {tag}')
|
||||
last_metadata_tag = tag
|
||||
elif is_audio_sequence_header(tag):
|
||||
logger.debug(f'Audio sequence header: {tag}')
|
||||
if last_audio_sequence_header is not None:
|
||||
if not tag.is_the_same_as(last_audio_sequence_header):
|
||||
logger.warning('Audio parameters changed')
|
||||
changed = True
|
||||
last_audio_sequence_header = tag
|
||||
return
|
||||
last_audio_sequence_header = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
logger.debug(f'Video sequence header: {tag}')
|
||||
if last_video_sequence_header is not None:
|
||||
if not tag.is_the_same_as(last_video_sequence_header):
|
||||
logger.warning('Video parameters changed')
|
||||
changed = True
|
||||
last_video_sequence_header = tag
|
||||
return
|
||||
last_video_sequence_header = tag
|
||||
else:
|
||||
if changed:
|
||||
logger.debug('Splitting stream...')
|
||||
changed = False
|
||||
insert_header_and_tags()
|
||||
logger.debug('Splitted stream')
|
||||
|
||||
observer.on_next(tag)
|
||||
|
||||
return source.subscribe(
|
||||
on_next, observer.on_error, observer.on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe).pipe(correct())
|
||||
|
||||
return _split
|
8
src/blrec/flv/operators/typing.py
Normal file
8
src/blrec/flv/operators/typing.py
Normal file
@ -0,0 +1,8 @@
|
||||
from typing import Union
|
||||
|
||||
from reactivex import Observable
|
||||
|
||||
from ..models import FlvHeader, FlvTag
|
||||
|
||||
FLVStreamItem = Union[FlvHeader, FlvTag]
|
||||
FLVStream = Observable[FLVStreamItem]
|
@ -1,58 +0,0 @@
|
||||
from __future__ import annotations
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
|
||||
from .models import AudioTag, FlvTag, ScriptTag, VideoTag
|
||||
from .common import (
|
||||
is_audio_sequence_header, is_video_sequence_header, is_metadata_tag
|
||||
)
|
||||
from .exceptions import AudioParametersChanged, VideoParametersChanged
|
||||
|
||||
|
||||
__all__ = 'ParametersChecker',
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParametersChecker:
|
||||
def __init__(self) -> None:
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def last_metadata_tag(self) -> Optional[ScriptTag]:
|
||||
return self._last_metadata_tag
|
||||
|
||||
@property
|
||||
def last_audio_header_tag(self) -> Optional[AudioTag]:
|
||||
return self._last_audio_header_tag
|
||||
|
||||
@property
|
||||
def last_video_header_tag(self) -> Optional[VideoTag]:
|
||||
return self._last_video_header_tag
|
||||
|
||||
def reset(self) -> None:
|
||||
self._last_metadata_tag: Optional[ScriptTag] = None
|
||||
self._last_audio_header_tag: Optional[AudioTag] = None
|
||||
self._last_video_header_tag: Optional[VideoTag] = None
|
||||
|
||||
def check_tag(self, tag: FlvTag) -> None:
|
||||
if is_audio_sequence_header(tag):
|
||||
if self._last_audio_header_tag is not None:
|
||||
if not tag.is_the_same_as(self._last_audio_header_tag):
|
||||
logger.debug(f'Audio parameters changed: {tag}')
|
||||
self._last_audio_header_tag = tag
|
||||
raise AudioParametersChanged()
|
||||
self._last_audio_header_tag = tag
|
||||
elif is_video_sequence_header(tag):
|
||||
if self._last_video_header_tag is not None:
|
||||
if not tag.is_the_same_as(self._last_video_header_tag):
|
||||
logger.debug(f'Video parameters changed: {tag}')
|
||||
self._last_video_header_tag = tag
|
||||
raise VideoParametersChanged()
|
||||
self._last_video_header_tag = tag
|
||||
elif is_metadata_tag(tag):
|
||||
self._last_metadata_tag = tag
|
||||
else:
|
||||
pass
|
@ -1,60 +0,0 @@
|
||||
from __future__ import annotations
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
|
||||
from .models import FlvTag, VideoTag
|
||||
from .exceptions import CutStream
|
||||
from .common import is_video_nalu_keyframe
|
||||
from .utils import format_timestamp
|
||||
|
||||
|
||||
__all__ = 'StreamCutter',
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamCutter:
|
||||
def __init__(self, min_duration: int = 5_000) -> None:
|
||||
self._min_duration = min_duration # milliseconds
|
||||
self._last_position: int = 0
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def last_keyframe_tag(self) -> Optional[VideoTag]:
|
||||
return self._last_keyframe_tag
|
||||
|
||||
def is_cutting(self) -> bool:
|
||||
return self._cutting
|
||||
|
||||
def can_cut_stream(self) -> bool:
|
||||
return self._timestamp >= self._min_duration
|
||||
|
||||
def cut_stream(self) -> bool:
|
||||
if self.can_cut_stream():
|
||||
self._triggered = True
|
||||
return True
|
||||
return False
|
||||
|
||||
def reset(self) -> None:
|
||||
self._cutting = False
|
||||
self._triggered = False
|
||||
self._timestamp: int = 0
|
||||
self._last_keyframe_tag: Optional[VideoTag] = None
|
||||
|
||||
def check_tag(self, tag: FlvTag) -> None:
|
||||
self._timestamp = tag.timestamp
|
||||
|
||||
if not self._triggered:
|
||||
return
|
||||
|
||||
if not is_video_nalu_keyframe(tag):
|
||||
return
|
||||
|
||||
self._last_keyframe_tag = tag
|
||||
self._last_position += self._timestamp
|
||||
self._cutting = True
|
||||
|
||||
logger.info(f'Cut stream at: {format_timestamp(self._last_position)}')
|
||||
raise CutStream()
|
@ -1,943 +0,0 @@
|
||||
from __future__ import annotations
|
||||
import io
|
||||
import math
|
||||
from abc import ABC, abstractmethod
|
||||
import json
|
||||
import logging
|
||||
from typing import (
|
||||
Any, BinaryIO, Dict, List, Final, Iterable, Iterator, Optional, Tuple,
|
||||
Protocol, TypedDict, Union, cast, TYPE_CHECKING
|
||||
)
|
||||
|
||||
import attr
|
||||
from rx.subject import Subject
|
||||
from rx.core import Observable
|
||||
|
||||
from .models import FlvHeader, FlvTag, ScriptTag, VideoTag, AudioTag
|
||||
from .data_analyser import DataAnalyser, MetaData
|
||||
from .stream_cutter import StreamCutter
|
||||
from .limit_checker import LimitChecker
|
||||
from .parameters_checker import ParametersChecker
|
||||
from .io import FlvReader, FlvWriter
|
||||
from .io_protocols import RandomIO
|
||||
from .utils import format_offest, format_timestamp
|
||||
from .exceptions import (
|
||||
FlvTagError,
|
||||
FlvStreamCorruptedError,
|
||||
AudioParametersChanged,
|
||||
VideoParametersChanged,
|
||||
FileSizeOverLimit,
|
||||
DurationOverLimit,
|
||||
CutStream,
|
||||
)
|
||||
from .common import (
|
||||
is_metadata_tag, parse_metadata, is_audio_tag, is_video_tag,
|
||||
is_video_sequence_header, is_audio_sequence_header,
|
||||
enrich_metadata, update_metadata, is_data_tag, read_tags_in_duration,
|
||||
)
|
||||
from ..path import extra_metadata_path
|
||||
if TYPE_CHECKING:
|
||||
from ..core.stream_analyzer import StreamProfile
|
||||
|
||||
|
||||
__all__ = 'StreamProcessor', 'BaseOutputFileManager', 'JoinPoint'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamProcessor:
|
||||
# Number of tags for determining whether or not tags are duplicated
|
||||
_TAG_SEQUENCE_COUNT: Final[int] = 3
|
||||
# Max duration in milliseconds the duplicated tags might last
|
||||
_MAX_DURATION: Final[int] = 20000
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_manager: OutputFileManager,
|
||||
*,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
filesize_limit: int = 0,
|
||||
duration_limit: int = 0,
|
||||
disable_limit: bool = False,
|
||||
analyse_data: bool = False,
|
||||
dedup_join: bool = False,
|
||||
save_extra_metadata: bool = False,
|
||||
backup_timestamp: bool = False,
|
||||
restore_timestamp: bool = False,
|
||||
) -> None:
|
||||
self._file_manager = file_manager
|
||||
self._parameters_checker = ParametersChecker()
|
||||
self._stream_cutter = StreamCutter()
|
||||
if not disable_limit:
|
||||
self._limit_checker = LimitChecker(filesize_limit, duration_limit)
|
||||
if analyse_data:
|
||||
self._data_analyser = DataAnalyser()
|
||||
|
||||
self._metadata = metadata.copy() if metadata else {}
|
||||
self._metadata_tag: ScriptTag
|
||||
|
||||
self._disable_limit = disable_limit
|
||||
self._analyse_data = analyse_data
|
||||
self._dedup_join = dedup_join
|
||||
self._save_x_metadata = save_extra_metadata
|
||||
self._backup_timestamp = backup_timestamp
|
||||
self._restore_timestamp = restore_timestamp
|
||||
|
||||
self._cancelled: bool = False
|
||||
self._finalized: bool = False
|
||||
self._stream_count: int = 0
|
||||
self._size_updates = Subject()
|
||||
self._time_updates = Subject()
|
||||
self._stream_profile_updates = Subject()
|
||||
|
||||
self._delta: int = 0
|
||||
self._has_audio: bool = False
|
||||
self._last_tags: List[FlvTag] = []
|
||||
self._join_points: List[JoinPoint] = []
|
||||
self._resetting_file: bool = False
|
||||
|
||||
@property
|
||||
def filesize_limit(self) -> int:
|
||||
if self._disable_limit:
|
||||
return 0
|
||||
return self._limit_checker.filesize_limit
|
||||
|
||||
@filesize_limit.setter
|
||||
def filesize_limit(self, value: int) -> None:
|
||||
if not self._disable_limit:
|
||||
self._limit_checker.filesize_limit = value
|
||||
|
||||
@property
|
||||
def duration_limit(self) -> int:
|
||||
if self._disable_limit:
|
||||
return 0
|
||||
return self._limit_checker.duration_limit
|
||||
|
||||
@duration_limit.setter
|
||||
def duration_limit(self, value: int) -> None:
|
||||
if not self._disable_limit:
|
||||
self._limit_checker.duration_limit = value
|
||||
|
||||
@property
|
||||
def join_points(self) -> Iterator[JoinPoint]:
|
||||
for point in self._join_points:
|
||||
yield point
|
||||
|
||||
@property
|
||||
def metadata(self) -> Optional[MetaData]:
|
||||
if not self._analyse_data:
|
||||
return None
|
||||
try:
|
||||
return self._data_analyser.make_metadata()
|
||||
except AssertionError:
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.debug(f'Failed to make metadata data, due to: {repr(e)}')
|
||||
return None
|
||||
|
||||
@property
|
||||
def size_updates(self) -> Observable:
|
||||
return self._size_updates
|
||||
|
||||
@property
|
||||
def time_updates(self) -> Observable:
|
||||
return self._time_updates
|
||||
|
||||
@property
|
||||
def stream_profile_updates(self) -> Observable:
|
||||
return self._stream_profile_updates
|
||||
|
||||
@property
|
||||
def cancelled(self) -> bool:
|
||||
return self._cancelled
|
||||
|
||||
@property
|
||||
def finalized(self) -> bool:
|
||||
return self._finalized
|
||||
|
||||
def cancel(self) -> None:
|
||||
self._cancelled = True
|
||||
|
||||
def set_metadata(self, metadata: Dict[str, Any]) -> None:
|
||||
self._metadata = metadata.copy()
|
||||
|
||||
def process_stream(self, stream: RandomIO) -> None:
|
||||
assert not self._cancelled and not self._finalized, \
|
||||
'should not be called after the processing cancelled or finalized'
|
||||
self._stream_count += 1
|
||||
self._process_stream(stream)
|
||||
|
||||
def can_cut_stream(self) -> bool:
|
||||
return self._stream_cutter.can_cut_stream()
|
||||
|
||||
def cut_stream(self) -> bool:
|
||||
return self._stream_cutter.cut_stream()
|
||||
|
||||
def finalize(self) -> None:
|
||||
assert not self._finalized, \
|
||||
'should not be called after the processing finalized'
|
||||
self._finalized = True
|
||||
|
||||
if not self._need_to_finalize():
|
||||
logger.debug('No need to finalize stream processing')
|
||||
return
|
||||
|
||||
self._complete_file()
|
||||
logger.debug('Finalized stream processing')
|
||||
|
||||
def _need_to_finalize(self) -> bool:
|
||||
return self._stream_count > 0 and len(self._last_tags) > 0
|
||||
|
||||
def _reset_params(self) -> None:
|
||||
self._delta = 0
|
||||
self._has_audio = False
|
||||
self._last_tags = []
|
||||
self._join_points = []
|
||||
self._resetting_file = False
|
||||
|
||||
self._stream_cutter.reset()
|
||||
if not self._disable_limit:
|
||||
self._limit_checker.reset()
|
||||
if self._analyse_data:
|
||||
self._data_analyser.reset()
|
||||
|
||||
def _new_file(self) -> None:
|
||||
self._reset_params()
|
||||
self._out_file = self._file_manager.create_file()
|
||||
self._out_writer = FlvWriter(self._out_file)
|
||||
logger.debug(f'New file: {self._file_manager.curr_path}')
|
||||
|
||||
def _reset_file(self) -> None:
|
||||
self._reset_params()
|
||||
self._out_file.truncate(0)
|
||||
logger.debug(f'Reset file: {self._file_manager.curr_path}')
|
||||
|
||||
def _complete_file(self) -> None:
|
||||
curr_path = self._file_manager.curr_path
|
||||
|
||||
if self._save_x_metadata:
|
||||
self._save_extra_metadata()
|
||||
|
||||
self._update_metadata_tag()
|
||||
self._file_manager.close_file()
|
||||
|
||||
logger.debug(f'Complete file: {curr_path}')
|
||||
|
||||
def _process_stream(self, stream: RandomIO) -> None:
|
||||
logger.debug(f'Processing the {self._stream_count}th stream...')
|
||||
|
||||
self._in_reader = FlvReaderWithTimestampFix(
|
||||
stream,
|
||||
backup_timestamp=self._backup_timestamp,
|
||||
restore_timestamp=self._restore_timestamp,
|
||||
)
|
||||
|
||||
flv_header = self._read_header()
|
||||
self._has_audio = flv_header.has_audio()
|
||||
|
||||
try:
|
||||
first_data_tag = self._read_first_data_tag()
|
||||
if not self._last_tags:
|
||||
self._process_initial_stream(flv_header, first_data_tag)
|
||||
else:
|
||||
self._process_subsequent_stream(first_data_tag)
|
||||
except (
|
||||
AudioParametersChanged, VideoParametersChanged,
|
||||
FileSizeOverLimit, DurationOverLimit, CutStream,
|
||||
):
|
||||
self._process_split_stream(flv_header)
|
||||
|
||||
logger.debug(f'Completed processing the {self._stream_count}th stream')
|
||||
|
||||
def _process_initial_stream(
|
||||
self, flv_header: FlvHeader, first_data_tag: FlvTag
|
||||
) -> None:
|
||||
if self._resetting_file:
|
||||
self._reset_file()
|
||||
else:
|
||||
self._new_file()
|
||||
|
||||
try:
|
||||
self._write_header(self._ensure_header_correct(flv_header))
|
||||
self._transfer_meta_tags()
|
||||
self._transfer_first_data_tag(first_data_tag)
|
||||
self._update_stream_profile(flv_header, first_data_tag)
|
||||
except Exception:
|
||||
self._last_tags = []
|
||||
self._resetting_file = True
|
||||
raise
|
||||
else:
|
||||
del flv_header, first_data_tag
|
||||
|
||||
self._transfer_tags_until_complete()
|
||||
|
||||
def _process_subsequent_stream(self, first_data_tag: FlvTag) -> None:
|
||||
tags: List[FlvTag] = []
|
||||
|
||||
if self._dedup_join:
|
||||
tags, exc = self._read_tags_for_deduplication()
|
||||
|
||||
if (index := self._find_last_duplicated_tag(tags)) >= 0:
|
||||
seamless = True
|
||||
self._delta = self._calc_delta_duplicated(tags[index])
|
||||
tags = tags[index + 1:]
|
||||
if not tags:
|
||||
tags = [self._read_first_data_tag()]
|
||||
|
||||
if not self._dedup_join or index == -1:
|
||||
seamless = False
|
||||
self._delta = self._calc_delta_no_duplicated(first_data_tag)
|
||||
tags.insert(0, first_data_tag)
|
||||
|
||||
offset = self._out_file.tell()
|
||||
timestamp = tags[0].timestamp + self._delta
|
||||
self._add_join_point(offset, timestamp, seamless)
|
||||
|
||||
self._transfer_tags(tags)
|
||||
del first_data_tag, tags
|
||||
|
||||
if self._dedup_join and exc:
|
||||
raise exc
|
||||
|
||||
self._transfer_tags_until_complete()
|
||||
|
||||
def _process_split_stream(self, flv_header: FlvHeader) -> None:
|
||||
self._complete_file()
|
||||
|
||||
first_data_tag: FlvTag
|
||||
|
||||
if self._stream_cutter.is_cutting():
|
||||
assert self._stream_cutter.last_keyframe_tag is not None
|
||||
last_keyframe_tag = self._stream_cutter.last_keyframe_tag
|
||||
original_ts = last_keyframe_tag.timestamp - self._delta
|
||||
first_data_tag = last_keyframe_tag.evolve(timestamp=original_ts)
|
||||
elif (
|
||||
not self._disable_limit and (
|
||||
self._limit_checker.is_filesize_over_limit() or
|
||||
self._limit_checker.is_duration_over_limit()
|
||||
)
|
||||
):
|
||||
assert self._limit_checker.last_keyframe_tag is not None
|
||||
last_keyframe_tag = self._limit_checker.last_keyframe_tag
|
||||
original_ts = last_keyframe_tag.timestamp - self._delta
|
||||
first_data_tag = last_keyframe_tag.evolve(timestamp=original_ts)
|
||||
else:
|
||||
first_data_tag = self._read_first_data_tag()
|
||||
|
||||
try:
|
||||
self._process_initial_stream(flv_header, first_data_tag)
|
||||
except (
|
||||
AudioParametersChanged, VideoParametersChanged,
|
||||
FileSizeOverLimit, DurationOverLimit, CutStream,
|
||||
):
|
||||
self._process_split_stream(flv_header)
|
||||
|
||||
def _transfer_meta_tags(self) -> None:
|
||||
logger.debug('Transfering meta tags...')
|
||||
|
||||
if self._parameters_checker.last_metadata_tag is None:
|
||||
raise FlvStreamCorruptedError('No metadata tag in the stream')
|
||||
if self._parameters_checker.last_video_header_tag is None:
|
||||
raise FlvStreamCorruptedError('No video header tag in the stream')
|
||||
if self._has_audio:
|
||||
if self._parameters_checker.last_audio_header_tag is None:
|
||||
raise FlvStreamCorruptedError(
|
||||
'No audio header tag in the stream'
|
||||
)
|
||||
metadata_tag = self._parameters_checker.last_metadata_tag
|
||||
video_header_tag = self._parameters_checker.last_video_header_tag
|
||||
audio_header_tag = self._parameters_checker.last_audio_header_tag
|
||||
|
||||
offset = self._out_file.tell()
|
||||
self._metadata_tag = self._enrich_metadata(metadata_tag, offset)
|
||||
|
||||
offset_delta = self._metadata_tag.tag_size - metadata_tag.tag_size
|
||||
self._update_injected_metadata(offset_delta)
|
||||
|
||||
self._write_tag(
|
||||
self._correct_ts(self._metadata_tag, -self._metadata_tag.timestamp)
|
||||
)
|
||||
self._write_tag(
|
||||
self._correct_ts(video_header_tag, -video_header_tag.timestamp)
|
||||
)
|
||||
if audio_header_tag is not None:
|
||||
self._write_tag(
|
||||
self._correct_ts(audio_header_tag, -audio_header_tag.timestamp)
|
||||
)
|
||||
|
||||
logger.debug('Meta tags have been transfered')
|
||||
|
||||
def _update_stream_profile(
|
||||
self, flv_header: FlvHeader, first_data_tag: FlvTag
|
||||
) -> None:
|
||||
from ..core.stream_analyzer import ffprobe
|
||||
|
||||
if self._parameters_checker.last_metadata_tag is None:
|
||||
return
|
||||
if self._parameters_checker.last_video_header_tag is None:
|
||||
return
|
||||
|
||||
bytes_io = io.BytesIO()
|
||||
writer = FlvWriter(bytes_io)
|
||||
writer.write_header(flv_header)
|
||||
writer.write_tag(
|
||||
self._correct_ts(
|
||||
self._parameters_checker.last_metadata_tag,
|
||||
-self._parameters_checker.last_metadata_tag.timestamp,
|
||||
)
|
||||
)
|
||||
writer.write_tag(
|
||||
self._correct_ts(
|
||||
self._parameters_checker.last_video_header_tag,
|
||||
-self._parameters_checker.last_video_header_tag.timestamp,
|
||||
)
|
||||
)
|
||||
if self._parameters_checker.last_audio_header_tag is not None:
|
||||
writer.write_tag(
|
||||
self._correct_ts(
|
||||
self._parameters_checker.last_audio_header_tag,
|
||||
-self._parameters_checker.last_audio_header_tag.timestamp,
|
||||
)
|
||||
)
|
||||
writer.write_tag(self._correct_ts(first_data_tag, -first_data_tag.timestamp))
|
||||
|
||||
def on_next(profile: StreamProfile) -> None:
|
||||
self._stream_profile_updates.on_next(profile)
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
logger.warning(f'Failed to analyse stream: {repr(e)}')
|
||||
|
||||
ffprobe(bytes_io.getvalue()).subscribe(on_next, on_error)
|
||||
|
||||
def _transfer_first_data_tag(self, tag: FlvTag) -> None:
|
||||
logger.debug(f'Transfer the first data tag: {tag}')
|
||||
self._delta = -tag.timestamp
|
||||
self._transfer_tags([tag])
|
||||
logger.debug('The first data tag has been transfered')
|
||||
|
||||
def _read_tags_for_deduplication(
|
||||
self
|
||||
) -> Tuple[List[FlvTag], Optional[Exception]]:
|
||||
logger.debug('Reading tags for tag deduplication...')
|
||||
tags: List[FlvTag] = []
|
||||
try:
|
||||
for tag in read_tags_in_duration(
|
||||
self._in_reader, self._MAX_DURATION
|
||||
):
|
||||
tags.append(tag)
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to read data, due to: {repr(exc)}')
|
||||
return tags, exc
|
||||
else:
|
||||
return tags, None
|
||||
finally:
|
||||
logger.debug('Read {} tags, total size: {}'.format(
|
||||
len(tags), sum(t.tag_size for t in tags)
|
||||
))
|
||||
|
||||
def _transfer_tags_until_complete(self) -> None:
|
||||
logger.debug('Transfering tags until complete...')
|
||||
self._transfer_tags(self._read_tags_from_in_stream())
|
||||
|
||||
def _read_tags_from_in_stream(self) -> Iterator[FlvTag]:
|
||||
while not self._cancelled:
|
||||
try:
|
||||
tag = self._in_reader.read_tag()
|
||||
self._parameters_checker.check_tag(tag)
|
||||
yield tag
|
||||
except EOFError:
|
||||
logger.debug('The input stream exhausted')
|
||||
break
|
||||
except AudioParametersChanged:
|
||||
if self._analyse_data:
|
||||
logger.warning('Audio parameters changed at {}'.format(
|
||||
format_timestamp(self._data_analyser.last_timestamp),
|
||||
))
|
||||
yield tag
|
||||
except VideoParametersChanged:
|
||||
if self._analyse_data:
|
||||
logger.warning('Video parameters changed at {}'.format(
|
||||
format_timestamp(self._data_analyser.last_timestamp),
|
||||
))
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.debug(f'Failed to read data, due to: {repr(e)}')
|
||||
raise
|
||||
else:
|
||||
logger.debug('Cancelled reading tags from the input stream')
|
||||
|
||||
def _transfer_tags(self, tags: Iterable[FlvTag]) -> None:
|
||||
logger.debug(f'Transfering tags... timestamp delta: {self._delta}')
|
||||
|
||||
try:
|
||||
count: int = 0
|
||||
for tag in tags:
|
||||
self._ensure_ts_correct(tag)
|
||||
self._write_tag(self._correct_ts(tag, self._delta))
|
||||
count += 1
|
||||
finally:
|
||||
logger.debug(f'{count} tags have been transfered')
|
||||
|
||||
def _add_join_point(
|
||||
self, offset: int, timestamp: int, seamless: bool
|
||||
) -> None:
|
||||
join_point = JoinPoint(offset, timestamp, seamless)
|
||||
self._join_points.append(join_point)
|
||||
logger.debug(f'{repr(join_point)}; {join_point}')
|
||||
|
||||
def _find_last_duplicated_tag(self, tags: List[FlvTag]) -> int:
|
||||
logger.debug('Finding duplicated tags...')
|
||||
|
||||
last_out_tag = self._last_tags[0]
|
||||
logger.debug(f'The last output tag is {last_out_tag}')
|
||||
|
||||
for idx, tag in enumerate(tags):
|
||||
if not tag.is_the_same_as(last_out_tag):
|
||||
continue
|
||||
|
||||
if not all(
|
||||
map(
|
||||
lambda t: t[0].is_the_same_as(t[1]),
|
||||
zip(reversed(tags[:idx]), self._last_tags[1:])
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
logger.debug(f'The last duplicated tag found at {idx} is {tag}')
|
||||
return idx
|
||||
|
||||
logger.debug('No duplicated tags found')
|
||||
return -1
|
||||
|
||||
def _read_header(self) -> FlvHeader:
|
||||
try:
|
||||
return self._in_reader.read_header()
|
||||
except Exception as exc:
|
||||
raise FlvStreamCorruptedError(repr(exc))
|
||||
|
||||
def _read_first_data_tag(self) -> Union[AudioTag, VideoTag]:
|
||||
for tag in self._read_tags_from_in_stream():
|
||||
if is_data_tag(tag):
|
||||
return tag
|
||||
raise FlvStreamCorruptedError('No data tag found in the stream!')
|
||||
|
||||
def _read_tags(self, count: int) -> Iterator[FlvTag]:
|
||||
assert count > 0, 'count must greater than 0'
|
||||
for c, tag in enumerate(self._read_tags_from_in_stream(), start=1):
|
||||
yield tag
|
||||
if c >= count:
|
||||
break
|
||||
|
||||
def _write_header(self, header: FlvHeader) -> None:
|
||||
try:
|
||||
size = self._out_writer.write_header(header)
|
||||
logger.debug('The flv header has been copied')
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to write data, due to: {repr(exc)}')
|
||||
raise
|
||||
|
||||
if not self._disable_limit:
|
||||
self._limit_checker.check_header(header)
|
||||
if self._analyse_data:
|
||||
self._data_analyser.analyse_header(header)
|
||||
self._size_updates.on_next(size)
|
||||
|
||||
def _write_tag(self, tag: FlvTag) -> None:
|
||||
offset = self._out_file.tell()
|
||||
tag = tag.evolve(offset=offset)
|
||||
|
||||
try:
|
||||
size = self._out_writer.write_tag(tag)
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to write data, due to: {repr(exc)}')
|
||||
raise
|
||||
else:
|
||||
self._last_tags.insert(0, tag)
|
||||
if len(self._last_tags) > self._TAG_SEQUENCE_COUNT:
|
||||
self._last_tags.pop()
|
||||
|
||||
self._stream_cutter.check_tag(tag)
|
||||
if not self._disable_limit:
|
||||
self._limit_checker.check_tag(tag)
|
||||
if self._analyse_data:
|
||||
self._data_analyser.analyse_tag(tag)
|
||||
|
||||
self._size_updates.on_next(size)
|
||||
self._time_updates.on_next(tag.timestamp)
|
||||
|
||||
def _ensure_header_correct(self, header: FlvHeader) -> FlvHeader:
|
||||
header.set_video_flag(
|
||||
self._parameters_checker.last_video_header_tag is not None
|
||||
)
|
||||
header.set_audio_flag(
|
||||
self._parameters_checker.last_audio_header_tag is not None
|
||||
)
|
||||
return header
|
||||
|
||||
def _ensure_ts_correct(self, tag: FlvTag) -> None:
|
||||
if not tag.timestamp + self._delta < 0:
|
||||
return
|
||||
logger.warning(
|
||||
f'Incorrect timestamp: {tag.timestamp + self._delta}\n'
|
||||
f'last output tag: {self._last_tags[0]}\n'
|
||||
f'current tag: {tag}'
|
||||
)
|
||||
if tag.is_audio_tag() or tag.is_video_tag():
|
||||
self._delta = (
|
||||
self._last_tags[0].timestamp +
|
||||
self._in_reader.calc_interval(tag) - tag.timestamp
|
||||
)
|
||||
logger.debug(f'Updated delta: {self._delta}')
|
||||
elif tag.is_script_tag():
|
||||
self._delta = (
|
||||
self._last_tags[0].timestamp - tag.timestamp
|
||||
)
|
||||
logger.debug(f'Updated delta: {self._delta}')
|
||||
else:
|
||||
pass
|
||||
|
||||
def _correct_ts(self, tag: FlvTag, delta: int) -> FlvTag:
|
||||
if delta == 0 and tag.timestamp >= 0:
|
||||
return tag
|
||||
return tag.evolve(timestamp=tag.timestamp + delta)
|
||||
|
||||
def _calc_delta_duplicated(self, last_duplicated_tag: FlvTag) -> int:
|
||||
return self._last_tags[0].timestamp - last_duplicated_tag.timestamp
|
||||
|
||||
def _calc_delta_no_duplicated(self, first_data_tag: FlvTag) -> int:
|
||||
return (
|
||||
self._last_tags[0].timestamp - first_data_tag.timestamp +
|
||||
self._in_reader.calc_interval(first_data_tag)
|
||||
)
|
||||
|
||||
def _enrich_metadata(
|
||||
self, old_metadata_tag: ScriptTag, offset: int
|
||||
) -> ScriptTag:
|
||||
# ensure nesessary properties exists in the metadata and init them.
|
||||
metadata = parse_metadata(old_metadata_tag)
|
||||
self._metadata.update({
|
||||
'duration': 0.0,
|
||||
'filesize': 0.0,
|
||||
'framerate': metadata.get('framerate', metadata.get('fps', 0.0)),
|
||||
})
|
||||
# merge the metadata into the metadata tag
|
||||
return enrich_metadata(old_metadata_tag, self._metadata, offset)
|
||||
|
||||
def _update_injected_metadata(self, offset_delta: int) -> None:
|
||||
updated = False
|
||||
|
||||
if (keyframes := self._metadata.get('keyframes')):
|
||||
keyframes['filepositions'] = list(
|
||||
map(lambda p: p + offset_delta, keyframes['filepositions'])
|
||||
)
|
||||
if 'lastkeyframelocation' in self._metadata:
|
||||
self._metadata['lastkeyframelocation'] = \
|
||||
keyframes['filepositions'][-1]
|
||||
updated = True
|
||||
|
||||
if (join_points := self._metadata.get('joinpoints')):
|
||||
join_points = cast(List[JoinPointData], join_points)
|
||||
self._metadata['joinpoints'] = list(
|
||||
{**p, 'offset': p['offset'] + offset_delta}
|
||||
for p in join_points
|
||||
)
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
self._metadata_tag = \
|
||||
update_metadata(self._metadata_tag, self._metadata)
|
||||
|
||||
def _update_metadata_tag(self) -> None:
|
||||
last_tag = self._last_tags[0]
|
||||
duration = last_tag.timestamp / 1000
|
||||
filesize = float(last_tag.next_tag_offset)
|
||||
updates = {
|
||||
'duration': duration,
|
||||
'filesize': filesize,
|
||||
}
|
||||
if self._analyse_data:
|
||||
updates.update({
|
||||
'framerate': self._data_analyser.calc_frame_rate(),
|
||||
})
|
||||
self._metadata_tag = update_metadata(self._metadata_tag, updates)
|
||||
self._out_file.seek(self._metadata_tag.offset)
|
||||
self._out_writer.write_tag(self._metadata_tag)
|
||||
logger.debug('The metadata tag has been updated')
|
||||
|
||||
def _save_extra_metadata(self) -> None:
|
||||
if self._analyse_data:
|
||||
metadata = attr.asdict(
|
||||
self._data_analyser.make_metadata(),
|
||||
filter=lambda a, v: v is not None,
|
||||
)
|
||||
else:
|
||||
metadata = {}
|
||||
|
||||
metadata['joinpoints'] = list(
|
||||
map(lambda p: p.to_metadata_value(), self._join_points)
|
||||
)
|
||||
|
||||
assert self._file_manager.curr_path is not None
|
||||
path = extra_metadata_path(self._file_manager.curr_path)
|
||||
with open(path, 'wt', encoding='utf8') as file:
|
||||
json.dump(metadata, file)
|
||||
|
||||
logger.debug('The extra metadata has been saved')
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class JoinPoint:
|
||||
offset: int
|
||||
timestamp: int # milliseconds
|
||||
seamless: bool
|
||||
|
||||
@classmethod
|
||||
def from_metadata_value(cls, value: JoinPointData) -> JoinPoint:
|
||||
return cls(
|
||||
offset=int(value['offset']),
|
||||
timestamp=int(value['timestamp']),
|
||||
seamless=value['seamless'],
|
||||
)
|
||||
|
||||
def to_metadata_value(self) -> JoinPointData:
|
||||
return dict(
|
||||
offset=float(self.offset),
|
||||
timestamp=float(self.timestamp),
|
||||
seamless=self.seamless,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return 'offset: {}, timestamp: {}, seamless: {}'.format(
|
||||
format_offest(self.offset),
|
||||
format_timestamp(self.timestamp),
|
||||
'yes' if self.seamless else 'no',
|
||||
)
|
||||
|
||||
|
||||
class JoinPointData(TypedDict):
|
||||
offset: float
|
||||
timestamp: float
|
||||
seamless: bool
|
||||
|
||||
|
||||
class OutputFileManager(Protocol):
|
||||
@property
|
||||
def curr_path(self) -> Optional[str]:
|
||||
...
|
||||
|
||||
@property
|
||||
def curr_file(self) -> Optional[BinaryIO]:
|
||||
...
|
||||
|
||||
def create_file(self) -> BinaryIO:
|
||||
...
|
||||
|
||||
def close_file(self) -> None:
|
||||
...
|
||||
|
||||
|
||||
class BaseOutputFileManager(ABC):
|
||||
def __init__(self, buffer_size: Optional[int] = None) -> None:
|
||||
super().__init__()
|
||||
self.buffer_size = buffer_size or io.DEFAULT_BUFFER_SIZE # bytes
|
||||
self._paths: List[str] = []
|
||||
self._curr_path: Optional[str] = None
|
||||
self._curr_file: Optional[BinaryIO] = None
|
||||
|
||||
@property
|
||||
def curr_path(self) -> Optional[str]:
|
||||
return self._curr_path
|
||||
|
||||
@property
|
||||
def curr_file(self) -> Optional[BinaryIO]:
|
||||
return self._curr_file
|
||||
|
||||
def has_file(self) -> bool:
|
||||
return len(self._paths) > 1
|
||||
|
||||
def get_files(self) -> Iterator[str]:
|
||||
for file in self._paths:
|
||||
yield file
|
||||
|
||||
def clear_files(self) -> None:
|
||||
self._paths.clear()
|
||||
|
||||
def create_file(self) -> BinaryIO:
|
||||
assert self._curr_file is None
|
||||
path = self._make_path()
|
||||
file = open(path, mode='w+b', buffering=self.buffer_size)
|
||||
self._paths.append(path)
|
||||
self._curr_path = path
|
||||
self._curr_file = file
|
||||
return file
|
||||
|
||||
def close_file(self) -> None:
|
||||
assert self._curr_file is not None
|
||||
self._curr_file.close()
|
||||
self._curr_path = None
|
||||
self._curr_file = None
|
||||
|
||||
@abstractmethod
|
||||
def _make_path(self) -> str:
|
||||
...
|
||||
|
||||
|
||||
class RobustFlvReader(FlvReader):
|
||||
def read_tag(self, *, no_body: bool = False) -> FlvTag:
|
||||
count = 0
|
||||
while True:
|
||||
try:
|
||||
tag = super().read_tag(no_body=no_body)
|
||||
except FlvTagError as e:
|
||||
logger.warning(f'Invalid tag: {repr(e)}')
|
||||
self._parser.parse_previous_tag_size()
|
||||
count += 1
|
||||
if count > 3:
|
||||
raise
|
||||
else:
|
||||
count = 0
|
||||
return tag
|
||||
|
||||
|
||||
class FlvReaderWithTimestampFix(RobustFlvReader):
|
||||
def __init__(
|
||||
self,
|
||||
stream: RandomIO,
|
||||
backup_timestamp: bool = False,
|
||||
restore_timestamp: bool = False,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
stream,
|
||||
backup_timestamp=backup_timestamp,
|
||||
restore_timestamp=restore_timestamp,
|
||||
)
|
||||
self._last_tag: Optional[FlvTag] = None
|
||||
self._last_video_tag: Optional[VideoTag] = None
|
||||
self._last_audio_tag: Optional[AudioTag] = None
|
||||
self._delta = 0
|
||||
# 15 is probably the minimal frame rate
|
||||
self._frame_rate = 15.0
|
||||
self._video_frame_interval = math.ceil(1000 / self._frame_rate)
|
||||
# AAC SoundRate always is 44 KHz
|
||||
self._sound_sample_interval = math.ceil(1000 / 44)
|
||||
|
||||
@property
|
||||
def frame_rate(self) -> float:
|
||||
return self._frame_rate
|
||||
|
||||
@property
|
||||
def sound_sample_interval(self) -> int:
|
||||
return self._sound_sample_interval
|
||||
|
||||
@property
|
||||
def video_frame_interval(self) -> int:
|
||||
return self._video_frame_interval
|
||||
|
||||
def read_tag(self, *, no_body: bool = False) -> FlvTag:
|
||||
while True:
|
||||
tag = super().read_tag(no_body=no_body)
|
||||
|
||||
if self._last_tag is None:
|
||||
if is_video_tag(tag) or is_audio_tag(tag):
|
||||
self._update_last_tags(tag)
|
||||
elif is_metadata_tag(tag):
|
||||
self._update_parameters(tag)
|
||||
return tag
|
||||
|
||||
if not is_video_tag(tag) and not is_audio_tag(tag):
|
||||
return tag
|
||||
|
||||
if self._is_ts_rebounded(tag):
|
||||
self._update_delta(tag)
|
||||
logger.warning(
|
||||
f'Timestamp rebounded, updated delta: {self._delta}\n'
|
||||
f'last video tag: {self._last_video_tag}\n'
|
||||
f'last audio tag: {self._last_audio_tag}\n'
|
||||
f'current tag: {tag}'
|
||||
)
|
||||
|
||||
if self._is_ts_jumped(tag):
|
||||
self._update_delta(tag)
|
||||
logger.warning(
|
||||
f'Timestamp jumped, updated delta: {self._delta}\n'
|
||||
f'last tag: {self._last_tag}\n'
|
||||
f'current tag: {tag}'
|
||||
)
|
||||
|
||||
self._update_last_tags(tag)
|
||||
|
||||
return self._correct_ts(tag)
|
||||
|
||||
def rread_tag(self, *, no_body: bool = False) -> FlvTag:
|
||||
raise NotImplementedError()
|
||||
|
||||
def read_body(self, tag: FlvTag) -> bytes:
|
||||
raise NotImplementedError()
|
||||
|
||||
def calc_interval(self, tag: FlvTag) -> int:
|
||||
if is_audio_tag(tag):
|
||||
return self._sound_sample_interval
|
||||
elif is_video_tag(tag):
|
||||
return self._video_frame_interval
|
||||
else:
|
||||
logger.warning(f'Unexpected tag type: {tag}')
|
||||
return min(self._sound_sample_interval, self._video_frame_interval)
|
||||
|
||||
def _is_ts_rebounded(self, tag: FlvTag) -> bool:
|
||||
if is_video_tag(tag):
|
||||
if self._last_video_tag is None:
|
||||
return False
|
||||
if is_video_sequence_header(self._last_video_tag):
|
||||
return tag.timestamp < self._last_video_tag.timestamp
|
||||
else:
|
||||
return tag.timestamp <= self._last_video_tag.timestamp
|
||||
elif is_audio_tag(tag):
|
||||
if self._last_audio_tag is None:
|
||||
return False
|
||||
if is_audio_sequence_header(self._last_audio_tag):
|
||||
return tag.timestamp < self._last_audio_tag.timestamp
|
||||
else:
|
||||
return tag.timestamp <= self._last_audio_tag.timestamp
|
||||
else:
|
||||
return False
|
||||
|
||||
def _is_ts_jumped(self, tag: FlvTag) -> bool:
|
||||
assert self._last_tag is not None
|
||||
return (
|
||||
tag.timestamp - self._last_tag.timestamp >
|
||||
max(self._sound_sample_interval, self._video_frame_interval)
|
||||
)
|
||||
|
||||
def _update_last_tags(self, tag: FlvTag) -> None:
|
||||
self._last_tag = tag
|
||||
if is_video_tag(tag):
|
||||
self._last_video_tag = tag
|
||||
elif is_audio_tag(tag):
|
||||
self._last_audio_tag = tag
|
||||
|
||||
def _update_parameters(self, tag: ScriptTag) -> None:
|
||||
metadata = parse_metadata(tag)
|
||||
frame_rate = metadata.get('fps') or metadata.get('framerate')
|
||||
|
||||
if not frame_rate:
|
||||
return
|
||||
|
||||
self._frame_rate = frame_rate
|
||||
self._video_frame_interval = math.ceil(1000 / frame_rate)
|
||||
|
||||
logger.debug('frame rate: {}, video frame interval: {}'.format(
|
||||
frame_rate, self._video_frame_interval
|
||||
))
|
||||
|
||||
def _update_delta(self, tag: FlvTag) -> None:
|
||||
assert self._last_tag is not None
|
||||
self._delta = (
|
||||
self._last_tag.timestamp + self._delta - tag.timestamp +
|
||||
self.calc_interval(tag)
|
||||
)
|
||||
|
||||
def _correct_ts(self, tag: FlvTag) -> FlvTag:
|
||||
if self._delta == 0:
|
||||
return tag
|
||||
return tag.evolve(timestamp=tag.timestamp + self._delta)
|
@ -1,13 +1,12 @@
|
||||
import logging
|
||||
import json
|
||||
import logging
|
||||
from typing import Iterable, cast
|
||||
|
||||
import aiofiles
|
||||
|
||||
from .helpers import get_metadata, get_extra_metadata
|
||||
from ..flv.stream_processor import JoinPoint
|
||||
from ..flv.helpers import make_comment_for_joinpoints
|
||||
|
||||
from ..flv.operators import JoinPoint
|
||||
from .helpers import get_extra_metadata, get_metadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -27,7 +26,7 @@ async def _make_metadata_content(flv_path: str) -> str:
|
||||
comment = cast(str, metadata.get('Comment', ''))
|
||||
chapters = ''
|
||||
|
||||
if (join_points := extra_metadata.get('joinpoints')):
|
||||
if join_points := extra_metadata.get('joinpoints'):
|
||||
join_points = list(map(JoinPoint.from_metadata_value, join_points))
|
||||
comment += '\n\n' + make_comment_for_joinpoints(join_points)
|
||||
duration = int(cast(float, metadata['duration']) * 1000)
|
||||
@ -49,9 +48,7 @@ Comment={comment}
|
||||
"""
|
||||
|
||||
|
||||
def _make_chapters(
|
||||
join_points: Iterable[JoinPoint], duration: int
|
||||
) -> str:
|
||||
def _make_chapters(join_points: Iterable[JoinPoint], duration: int) -> str:
|
||||
join_points = filter(lambda p: not p.seamless, join_points)
|
||||
timestamps = list(map(lambda p: p.timestamp, join_points))
|
||||
if not timestamps:
|
||||
|
@ -1,28 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import PurePath
|
||||
from contextlib import suppress
|
||||
from pathlib import PurePath
|
||||
from typing import Any, Awaitable, Dict, Iterator, List, Optional, Union
|
||||
|
||||
from rx.core.typing import Scheduler
|
||||
from rx.scheduler.threadpoolscheduler import ThreadPoolScheduler
|
||||
from reactivex.scheduler import ThreadPoolScheduler
|
||||
|
||||
from .models import PostprocessorStatus, DeleteStrategy
|
||||
from .typing import Progress
|
||||
from .remuxer import remux_video, RemuxProgress, RemuxResult
|
||||
from .helpers import discard_file, get_extra_metadata
|
||||
from .ffmpeg_metadata import make_metadata_file
|
||||
from ..event.event_emitter import EventListener, EventEmitter
|
||||
from ..bili.live import Live
|
||||
from ..core import Recorder, RecorderEventListener
|
||||
from ..exception import submit_exception
|
||||
from ..utils.mixins import AsyncStoppableMixin, AsyncCooperationMixin
|
||||
from ..path import extra_metadata_path
|
||||
from ..flv.metadata_injector import inject_metadata, InjectProgress
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..exception import exception_callback, submit_exception
|
||||
from ..flv.helpers import is_valid_flv_file
|
||||
from ..flv.metadata_injection import InjectingProgress, inject_metadata
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
|
||||
from ..path import extra_metadata_path
|
||||
from ..utils.mixins import AsyncCooperationMixin, AsyncStoppableMixin, SupportDebugMixin
|
||||
from .ffmpeg_metadata import make_metadata_file
|
||||
from .helpers import discard_file, get_extra_metadata
|
||||
from .models import DeleteStrategy, PostprocessorStatus
|
||||
from .remux import RemuxingProgress, RemuxingResult, remux_video
|
||||
from .typing import Progress
|
||||
|
||||
__all__ = (
|
||||
'Postprocessor',
|
||||
@ -47,6 +46,7 @@ class Postprocessor(
|
||||
RecorderEventListener,
|
||||
AsyncStoppableMixin,
|
||||
AsyncCooperationMixin,
|
||||
SupportDebugMixin,
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
@ -58,6 +58,7 @@ class Postprocessor(
|
||||
delete_source: DeleteStrategy = DeleteStrategy.AUTO,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._init_for_debug(live.room_id)
|
||||
|
||||
self._live = live
|
||||
self._recorder = recorder
|
||||
@ -94,14 +95,10 @@ class Postprocessor(
|
||||
# clear completed files of previous recording
|
||||
self._completed_files.clear()
|
||||
|
||||
async def on_video_file_completed(
|
||||
self, recorder: Recorder, path: str
|
||||
) -> None:
|
||||
async def on_video_file_completed(self, recorder: Recorder, path: str) -> None:
|
||||
self._queue.put_nowait(path)
|
||||
|
||||
async def on_danmaku_file_completed(
|
||||
self, recorder: Recorder, path: str
|
||||
) -> None:
|
||||
async def on_danmaku_file_completed(self, recorder: Recorder, path: str) -> None:
|
||||
self._completed_files.append(path)
|
||||
|
||||
async def _do_start(self) -> None:
|
||||
@ -110,6 +107,7 @@ class Postprocessor(
|
||||
self._queue: asyncio.Queue[str] = asyncio.Queue()
|
||||
self._scheduler = ThreadPoolScheduler()
|
||||
self._task = asyncio.create_task(self._worker())
|
||||
self._task.add_done_callback(exception_callback)
|
||||
|
||||
logger.debug('Started postprocessor')
|
||||
|
||||
@ -150,12 +148,11 @@ class Postprocessor(
|
||||
else:
|
||||
result_path = video_path
|
||||
|
||||
await discard_file(extra_metadata_path(video_path), 'DEBUG')
|
||||
if not self._debug:
|
||||
await discard_file(extra_metadata_path(video_path), 'DEBUG')
|
||||
|
||||
self._completed_files.append(result_path)
|
||||
await self._emit(
|
||||
'video_postprocessing_completed', self, result_path,
|
||||
)
|
||||
await self._emit('video_postprocessing_completed', self, result_path)
|
||||
except Exception as exc:
|
||||
submit_exception(exc)
|
||||
finally:
|
||||
@ -164,9 +161,13 @@ class Postprocessor(
|
||||
async def _inject_extra_metadata(self, path: str) -> str:
|
||||
try:
|
||||
metadata = await get_extra_metadata(path)
|
||||
await self._inject_metadata(path, metadata, self._scheduler)
|
||||
logger.info(f"Injecting metadata for '{path}' ...")
|
||||
await self._inject_metadata(path, metadata)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to inject metadata for '{path}': {repr(e)}")
|
||||
submit_exception(e)
|
||||
else:
|
||||
logger.info(f"Successfully injected metadata for '{path}'")
|
||||
return path
|
||||
|
||||
async def _remux_flv_to_mp4(self, in_path: str) -> str:
|
||||
@ -174,12 +175,10 @@ class Postprocessor(
|
||||
logger.info(f"Remuxing '{in_path}' to '{out_path}' ...")
|
||||
|
||||
metadata_path = await make_metadata_file(in_path)
|
||||
remux_result = await self._remux_video(
|
||||
in_path, out_path, metadata_path, self._scheduler
|
||||
)
|
||||
remux_result = await self._remux_video(in_path, out_path, metadata_path)
|
||||
|
||||
if remux_result.is_successful():
|
||||
logger.info(f"Successfully remux '{in_path}' to '{out_path}'")
|
||||
logger.info(f"Successfully remuxed '{in_path}' to '{out_path}'")
|
||||
result_path = out_path
|
||||
elif remux_result.is_warned():
|
||||
logger.warning('Remuxing done, but ran into problems.')
|
||||
@ -192,65 +191,54 @@ class Postprocessor(
|
||||
|
||||
logger.debug(f'ffmpeg output:\n{remux_result.output}')
|
||||
|
||||
await discard_file(metadata_path, 'DEBUG')
|
||||
if not self._debug:
|
||||
await discard_file(metadata_path, 'DEBUG')
|
||||
if self._should_delete_source_files(remux_result):
|
||||
await discard_file(in_path)
|
||||
|
||||
return result_path
|
||||
|
||||
def _inject_metadata(
|
||||
self,
|
||||
path: str,
|
||||
metadata: Dict[str, Any],
|
||||
scheduler: Scheduler,
|
||||
) -> Awaitable[None]:
|
||||
def _inject_metadata(self, path: str, metadata: Dict[str, Any]) -> Awaitable[None]:
|
||||
future: asyncio.Future[None] = asyncio.Future()
|
||||
self._postprocessing_path = path
|
||||
|
||||
def on_next(value: InjectProgress) -> None:
|
||||
def on_next(value: InjectingProgress) -> None:
|
||||
self._postprocessing_progress = value
|
||||
|
||||
inject_metadata(
|
||||
path,
|
||||
metadata,
|
||||
report_progress=True,
|
||||
room_id=self._live.room_id,
|
||||
).subscribe(
|
||||
on_next,
|
||||
lambda e: future.set_exception(e),
|
||||
lambda: future.set_result(None),
|
||||
scheduler=scheduler,
|
||||
subscription = inject_metadata(path, metadata, show_progress=True).subscribe(
|
||||
on_next=on_next,
|
||||
on_error=lambda e: future.set_exception(e),
|
||||
on_completed=lambda: future.set_result(None),
|
||||
scheduler=self._scheduler,
|
||||
)
|
||||
future.add_done_callback(lambda f: subscription.dispose())
|
||||
|
||||
return future
|
||||
|
||||
def _remux_video(
|
||||
self,
|
||||
in_path: str,
|
||||
out_path: str,
|
||||
metadata_path: str,
|
||||
scheduler: Scheduler,
|
||||
) -> Awaitable[RemuxResult]:
|
||||
future: asyncio.Future[RemuxResult] = asyncio.Future()
|
||||
self, in_path: str, out_path: str, metadata_path: str
|
||||
) -> Awaitable[RemuxingResult]:
|
||||
future: asyncio.Future[RemuxingResult] = asyncio.Future()
|
||||
self._postprocessing_path = in_path
|
||||
|
||||
def on_next(value: Union[RemuxProgress, RemuxResult]) -> None:
|
||||
if isinstance(value, RemuxProgress):
|
||||
def on_next(value: Union[RemuxingProgress, RemuxingResult]) -> None:
|
||||
if isinstance(value, RemuxingProgress):
|
||||
self._postprocessing_progress = value
|
||||
elif isinstance(value, RemuxResult):
|
||||
elif isinstance(value, RemuxingResult):
|
||||
future.set_result(value)
|
||||
|
||||
remux_video(
|
||||
subscription = remux_video(
|
||||
in_path,
|
||||
out_path,
|
||||
metadata_path,
|
||||
report_progress=True,
|
||||
show_progress=True,
|
||||
remove_filler_data=True,
|
||||
).subscribe(
|
||||
on_next,
|
||||
lambda e: future.set_exception(e),
|
||||
scheduler=scheduler,
|
||||
on_next=on_next,
|
||||
on_error=lambda e: future.set_exception(e),
|
||||
scheduler=self._scheduler,
|
||||
)
|
||||
future.add_done_callback(lambda f: subscription.dispose())
|
||||
|
||||
return future
|
||||
|
||||
@ -258,9 +246,7 @@ class Postprocessor(
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(None, is_valid_flv_file, video_path)
|
||||
|
||||
def _should_delete_source_files(
|
||||
self, remux_result: RemuxResult
|
||||
) -> bool:
|
||||
def _should_delete_source_files(self, remux_result: RemuxingResult) -> bool:
|
||||
if self.delete_source == DeleteStrategy.AUTO:
|
||||
if not remux_result.is_failed():
|
||||
return True
|
||||
|
158
src/blrec/postprocess/remux.py
Normal file
158
src/blrec/postprocess/remux.py
Normal file
@ -0,0 +1,158 @@
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
from subprocess import PIPE, Popen
|
||||
from typing import Any, Final, List, Optional, Union
|
||||
|
||||
import attr
|
||||
from reactivex import Observable, abc, create
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from reactivex.scheduler.currentthreadscheduler import CurrentThreadScheduler
|
||||
from tqdm import tqdm
|
||||
|
||||
__all__ = 'RemuxingResult', 'remux_video'
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class RemuxingProgress:
|
||||
count: int
|
||||
total: int
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class RemuxingResult:
|
||||
return_code: int
|
||||
output: str
|
||||
|
||||
def is_done(self) -> bool:
|
||||
return self.return_code == 0
|
||||
|
||||
def is_successful(self) -> bool:
|
||||
return self.is_done() and not self.may_timestamps_incorrect()
|
||||
|
||||
def is_warned(self) -> bool:
|
||||
return self.is_done() and self.may_timestamps_incorrect()
|
||||
|
||||
def is_failed(self) -> bool:
|
||||
return not self.is_done()
|
||||
|
||||
def may_timestamps_incorrect(self) -> bool:
|
||||
return 'Non-monotonous DTS in output stream' in self.output
|
||||
|
||||
|
||||
def remux_video(
|
||||
in_path: str,
|
||||
out_path: str,
|
||||
metadata_path: Optional[str] = None,
|
||||
*,
|
||||
show_progress: bool = False,
|
||||
remove_filler_data: bool = False,
|
||||
) -> Observable[Union[RemuxingProgress, RemuxingResult]]:
|
||||
SIZE_PATTERN: Final = re.compile(r'size=\s*(?P<number>\d+)(?P<unit>[a-zA-Z]?B)')
|
||||
filesize = os.path.getsize(in_path)
|
||||
filename = os.path.basename(in_path)
|
||||
|
||||
def parse_size(line: str) -> int:
|
||||
match = SIZE_PATTERN.search(line)
|
||||
assert match is not None
|
||||
result = match.groupdict()
|
||||
|
||||
unit = result['unit']
|
||||
number = int(result['number'])
|
||||
|
||||
if unit == 'B':
|
||||
size = number
|
||||
elif unit == 'kB':
|
||||
size = 1024 * number
|
||||
elif unit == 'MB':
|
||||
size = 1024**2 * number
|
||||
elif unit == 'GB':
|
||||
size = 1024**3 * number
|
||||
else:
|
||||
raise ValueError(unit)
|
||||
|
||||
return size
|
||||
|
||||
def should_output_line(line: str) -> bool:
|
||||
line = line.strip()
|
||||
return not (line.startswith('frame=') or line.startswith('Press [q]'))
|
||||
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[Union[RemuxingProgress, RemuxingResult]],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
_scheduler = scheduler or CurrentThreadScheduler.singleton()
|
||||
|
||||
disposed = False
|
||||
cancelable = SerialDisposable()
|
||||
|
||||
def action(scheduler: abc.SchedulerBase, state: Optional[Any] = None) -> None:
|
||||
if disposed:
|
||||
return
|
||||
|
||||
with tqdm(
|
||||
desc='Remuxing',
|
||||
total=filesize,
|
||||
unit='B',
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
postfix=filename,
|
||||
disable=not show_progress,
|
||||
) as pbar:
|
||||
cmd = f'ffmpeg -i "{in_path}"'
|
||||
if metadata_path is not None:
|
||||
cmd += f' -i "{metadata_path}" -map_metadata 1'
|
||||
cmd += ' -codec copy'
|
||||
if remove_filler_data:
|
||||
# https://forum.doom9.org/showthread.php?t=152051
|
||||
# ISO_IEC_14496-10_2020(E)
|
||||
# Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes # noqa
|
||||
# 7.4.2.7 Filler data RBSP semantics
|
||||
cmd += ' -bsf:v filter_units=remove_types=12'
|
||||
cmd += f' "{out_path}" -y'
|
||||
|
||||
args = shlex.split(cmd)
|
||||
out_lines: List[str] = []
|
||||
|
||||
try:
|
||||
with Popen(
|
||||
args, stderr=PIPE, encoding='utf8', errors='backslashreplace'
|
||||
) as process:
|
||||
assert process.stderr is not None
|
||||
while not disposed:
|
||||
line = process.stderr.readline()
|
||||
if not line:
|
||||
if process.poll() is not None:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
if line.startswith('frame='):
|
||||
size = parse_size(line)
|
||||
pbar.update(size - pbar.n)
|
||||
progress = RemuxingProgress(size, filesize)
|
||||
observer.on_next(progress)
|
||||
|
||||
if should_output_line(line):
|
||||
out_lines.append(line)
|
||||
|
||||
if not disposed and process.returncode == 0:
|
||||
pbar.update(filesize)
|
||||
progress = RemuxingProgress(filesize, filesize)
|
||||
observer.on_next(progress)
|
||||
except Exception as e:
|
||||
observer.on_error(e)
|
||||
else:
|
||||
result = RemuxingResult(process.returncode, ''.join(out_lines))
|
||||
observer.on_next(result)
|
||||
observer.on_completed()
|
||||
|
||||
cancelable.disposable = _scheduler.schedule(action)
|
||||
|
||||
def dispose() -> None:
|
||||
nonlocal disposed
|
||||
disposed = True
|
||||
|
||||
return CompositeDisposable(cancelable, Disposable(dispose))
|
||||
|
||||
return create(subscribe)
|
@ -1,193 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
from subprocess import Popen, PIPE
|
||||
from typing import Final, List, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
from rx import create, operators as op
|
||||
from rx.subject import Subject
|
||||
from rx.core import Observable
|
||||
from rx.core.typing import Observer, Scheduler, Disposable
|
||||
from rx.scheduler.currentthreadscheduler import CurrentThreadScheduler
|
||||
from tqdm import tqdm
|
||||
|
||||
|
||||
__all__ = 'VideoRemuxer', 'RemuxResult', 'remux_video'
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class RemuxProgress:
|
||||
time: int
|
||||
duration: int
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class RemuxResult:
|
||||
return_code: int
|
||||
output: str
|
||||
|
||||
def is_done(self) -> bool:
|
||||
return self.return_code == 0
|
||||
|
||||
def is_successful(self) -> bool:
|
||||
return self.is_done() and not self.may_timestamps_incorrect()
|
||||
|
||||
def is_warned(self) -> bool:
|
||||
return self.is_done() and self.may_timestamps_incorrect()
|
||||
|
||||
def is_failed(self) -> bool:
|
||||
return not self.is_done()
|
||||
|
||||
def may_timestamps_incorrect(self) -> bool:
|
||||
return 'Non-monotonous DTS in output stream' in self.output
|
||||
|
||||
|
||||
class VideoRemuxer:
|
||||
_TIME_PATTERN: Final = re.compile(
|
||||
r'(?P<hour>\d+):(?P<minute>\d+):(?P<second>\d+).(?P<millisecond>\d+)'
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._duration: int = 0
|
||||
self._progress_updates = Subject()
|
||||
|
||||
@property
|
||||
def progress_updates(self) -> Observable:
|
||||
return self._progress_updates
|
||||
|
||||
def remux(
|
||||
self,
|
||||
in_path: str,
|
||||
out_path: str,
|
||||
metadata_path: Optional[str] = None,
|
||||
*,
|
||||
remove_filler_data: bool = False,
|
||||
) -> RemuxResult:
|
||||
cmd = f'ffmpeg -i "{in_path}"'
|
||||
if metadata_path is not None:
|
||||
cmd += f' -i "{metadata_path}" -map_metadata 1'
|
||||
cmd += ' -codec copy'
|
||||
if remove_filler_data:
|
||||
# https://forum.doom9.org/showthread.php?t=152051
|
||||
# ISO_IEC_14496-10_2020(E)
|
||||
# Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes # noqa
|
||||
# 7.4.2.7 Filler data RBSP semantics
|
||||
cmd += ' -bsf:v filter_units=remove_types=12'
|
||||
cmd += f' "{out_path}" -y'
|
||||
|
||||
args = shlex.split(cmd)
|
||||
out_lines: List[str] = []
|
||||
|
||||
with Popen(
|
||||
args, stderr=PIPE, encoding='utf8', errors='backslashreplace'
|
||||
) as process:
|
||||
assert process.stderr is not None
|
||||
while True:
|
||||
line = process.stderr.readline()
|
||||
if not line:
|
||||
if process.poll() is not None:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
self._parse_line(line)
|
||||
if self._should_output_line(line):
|
||||
out_lines.append(line)
|
||||
|
||||
if process.returncode == 0:
|
||||
self._complete_progress_updates()
|
||||
|
||||
return RemuxResult(process.returncode, ''.join(out_lines))
|
||||
|
||||
def _parse_line(self, line: str) -> None:
|
||||
line = line.strip()
|
||||
if line.startswith('frame='):
|
||||
self._parse_time(line)
|
||||
elif line.startswith('Duration:'):
|
||||
self._parse_duration(line)
|
||||
else:
|
||||
pass
|
||||
|
||||
def _parse_duration(self, line: str) -> None:
|
||||
match = self._TIME_PATTERN.search(line)
|
||||
if match is not None:
|
||||
self._duration = self._calc_time(match)
|
||||
self._progress_updates.on_next(RemuxProgress(0, self._duration))
|
||||
|
||||
def _parse_time(self, line: str) -> None:
|
||||
match = self._TIME_PATTERN.search(line)
|
||||
if match is not None:
|
||||
progress = RemuxProgress(self._calc_time(match), self._duration)
|
||||
self._progress_updates.on_next(progress)
|
||||
|
||||
def _complete_progress_updates(self) -> None:
|
||||
progress = RemuxProgress(self._duration, self._duration)
|
||||
self._progress_updates.on_next(progress)
|
||||
self._progress_updates.on_completed()
|
||||
|
||||
def _calc_time(self, match: Match[str]) -> int:
|
||||
result = match.groupdict()
|
||||
return (
|
||||
int(result['hour']) * 60 * 60 * 1000 +
|
||||
int(result['minute']) * 60 * 1000 +
|
||||
int(result['second']) * 1000 +
|
||||
int(result['millisecond'])
|
||||
)
|
||||
|
||||
def _should_output_line(self, line: str) -> bool:
|
||||
line = line.strip()
|
||||
return not (
|
||||
line.startswith('frame=') or
|
||||
line.startswith('Press [q]')
|
||||
)
|
||||
|
||||
|
||||
def remux_video(
|
||||
in_path: str,
|
||||
out_path: str,
|
||||
metadata_path: Optional[str] = None,
|
||||
*,
|
||||
report_progress: bool = False,
|
||||
remove_filler_data: bool = False,
|
||||
) -> Observable:
|
||||
def subscribe(
|
||||
observer: Observer[Union[RemuxProgress, RemuxResult]],
|
||||
scheduler: Optional[Scheduler] = None,
|
||||
) -> Disposable:
|
||||
_scheduler = scheduler or CurrentThreadScheduler.singleton()
|
||||
|
||||
def action(scheduler, state): # type: ignore
|
||||
remuxer = VideoRemuxer()
|
||||
file_name = os.path.basename(in_path)
|
||||
|
||||
with tqdm(desc='Remuxing', unit='ms', postfix=file_name) as pbar:
|
||||
def reset(progress: RemuxProgress) -> None:
|
||||
pbar.reset(progress.duration)
|
||||
|
||||
def update(progress: RemuxProgress) -> None:
|
||||
pbar.update(progress.time - pbar.n)
|
||||
|
||||
remuxer.progress_updates.pipe(op.first()).subscribe(reset)
|
||||
remuxer.progress_updates.pipe(op.skip(1)).subscribe(update)
|
||||
|
||||
if report_progress:
|
||||
remuxer.progress_updates.subscribe(
|
||||
lambda p: observer.on_next(p)
|
||||
)
|
||||
|
||||
try:
|
||||
result = remuxer.remux(
|
||||
in_path,
|
||||
out_path,
|
||||
metadata_path,
|
||||
remove_filler_data=remove_filler_data,
|
||||
)
|
||||
except Exception as e:
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_next(result)
|
||||
observer.on_completed()
|
||||
|
||||
return _scheduler.schedule(action)
|
||||
|
||||
return create(subscribe)
|
@ -1,9 +1,6 @@
|
||||
|
||||
from typing import Union
|
||||
|
||||
from ..flv.metadata_injection import InjectingProgress
|
||||
from .remux import RemuxingProgress
|
||||
|
||||
from .remuxer import RemuxProgress
|
||||
from ..flv.metadata_injector import InjectProgress
|
||||
|
||||
|
||||
Progress = Union[RemuxProgress, InjectProgress]
|
||||
Progress = Union[RemuxingProgress, InjectingProgress]
|
||||
|
@ -18,12 +18,11 @@ from ..bili.danmaku_client import DanmakuClient
|
||||
from ..bili.live_monitor import LiveMonitor
|
||||
from ..bili.typing import StreamFormat, QualityNumber
|
||||
from ..core import Recorder
|
||||
from ..core.stream_analyzer import StreamProfile
|
||||
from ..flv.operators import MetaData, StreamProfile
|
||||
from ..core.cover_downloader import CoverSaveStrategy
|
||||
from ..postprocess import Postprocessor, PostprocessorStatus, DeleteStrategy
|
||||
from ..postprocess.remuxer import RemuxProgress
|
||||
from ..flv.metadata_injector import InjectProgress
|
||||
from ..flv.data_analyser import MetaData
|
||||
from ..postprocess.remux import RemuxingProgress
|
||||
from ..flv.metadata_injection import InjectingProgress
|
||||
from ..event.event_submitters import (
|
||||
LiveEventSubmitter, RecorderEventSubmitter, PostprocessorEventSubmitter
|
||||
)
|
||||
@ -151,10 +150,15 @@ class RecordTask:
|
||||
status = VideoFileStatus.RECORDING
|
||||
elif path == self._postprocessor.postprocessing_path:
|
||||
progress = self._postprocessor.postprocessing_progress
|
||||
if isinstance(progress, RemuxProgress):
|
||||
if isinstance(progress, RemuxingProgress):
|
||||
status = VideoFileStatus.REMUXING
|
||||
elif isinstance(progress, InjectProgress):
|
||||
elif isinstance(progress, InjectingProgress):
|
||||
status = VideoFileStatus.INJECTING
|
||||
else:
|
||||
if self._postprocessor.remux_to_mp4:
|
||||
status = VideoFileStatus.REMUXING
|
||||
else:
|
||||
status = VideoFileStatus.INJECTING
|
||||
else:
|
||||
# disabling recorder by force or stoping task by force
|
||||
status = VideoFileStatus.BROKEN
|
||||
|
@ -13,8 +13,7 @@ from tenacity import (
|
||||
|
||||
from .task import RecordTask
|
||||
from .models import TaskData, TaskParam, VideoFileDetail, DanmakuFileDetail
|
||||
from ..flv.data_analyser import MetaData
|
||||
from ..core.stream_analyzer import StreamProfile
|
||||
from ..flv.operators import MetaData, StreamProfile
|
||||
from ..exception import submit_exception, NotFoundError
|
||||
from ..bili.exceptions import ApiRequestError
|
||||
if TYPE_CHECKING:
|
||||
|
@ -1,31 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from subprocess import Popen, PIPE
|
||||
from typing import Dict, Any, Optional
|
||||
from subprocess import PIPE, Popen
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from rx import create
|
||||
from rx.core import Observable
|
||||
from rx.core.typing import Observer, Scheduler, Disposable
|
||||
from rx.scheduler.newthreadscheduler import NewThreadScheduler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__all__ = 'ffprobe', 'StreamProfile'
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.scheduler import NewThreadScheduler
|
||||
|
||||
__all__ = ('ffprobe', 'StreamProfile')
|
||||
|
||||
StreamProfile = Dict[str, Any]
|
||||
|
||||
|
||||
def ffprobe(data: bytes) -> Observable:
|
||||
def ffprobe(data: bytes) -> Observable[StreamProfile]:
|
||||
def subscribe(
|
||||
observer: Observer[StreamProfile],
|
||||
scheduler: Optional[Scheduler] = None,
|
||||
) -> Disposable:
|
||||
observer: abc.ObserverBase[StreamProfile],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
_scheduler = scheduler or NewThreadScheduler()
|
||||
|
||||
def action(scheduler, state): # type: ignore
|
||||
def action(scheduler: abc.SchedulerBase, state: Optional[Any] = None) -> None:
|
||||
args = [
|
||||
'ffprobe',
|
||||
'-show_streams',
|
||||
@ -35,9 +29,7 @@ def ffprobe(data: bytes) -> Observable:
|
||||
'pipe:0',
|
||||
]
|
||||
|
||||
with Popen(
|
||||
args, stdin=PIPE, stdout=PIPE, stderr=PIPE
|
||||
) as process:
|
||||
with Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE) as process:
|
||||
try:
|
||||
stdout, stderr = process.communicate(data, timeout=10)
|
||||
except Exception as e:
|
||||
@ -51,4 +43,4 @@ def ffprobe(data: bytes) -> Observable:
|
||||
|
||||
return _scheduler.schedule(action)
|
||||
|
||||
return create(subscribe)
|
||||
return Observable(subscribe)
|
@ -120,7 +120,7 @@ class AsyncCooperationMixin(ABC):
|
||||
super().__init__()
|
||||
self._loop = asyncio.get_running_loop()
|
||||
|
||||
def _handle_exception(self, exc: BaseException) -> None:
|
||||
def _submit_exception(self, exc: BaseException) -> None:
|
||||
from ..exception import submit_exception
|
||||
|
||||
async def wrapper() -> None:
|
||||
|
4
src/blrec/utils/operators/__init__.py
Normal file
4
src/blrec/utils/operators/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from .replace import replace
|
||||
from .retry import retry
|
||||
|
||||
__all__ = ('replace', 'retry')
|
29
src/blrec/utils/operators/replace.py
Normal file
29
src/blrec/utils/operators/replace.py
Normal file
@ -0,0 +1,29 @@
|
||||
import os
|
||||
from typing import Callable, Optional, TypeVar
|
||||
|
||||
from reactivex import Observable, abc
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
def replace(src_path: str, dst_path: str) -> Callable[[Observable[_T]], Observable[_T]]:
|
||||
def _replace(source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[_T],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_completed() -> None:
|
||||
try:
|
||||
os.replace(src_path, dst_path)
|
||||
except Exception as e:
|
||||
observer.on_error(e)
|
||||
else:
|
||||
observer.on_completed()
|
||||
|
||||
return source.subscribe(
|
||||
observer.on_next, observer.on_error, on_completed, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _replace
|
46
src/blrec/utils/operators/retry.py
Normal file
46
src/blrec/utils/operators/retry.py
Normal file
@ -0,0 +1,46 @@
|
||||
import time
|
||||
from typing import Callable, Iterator, Optional, TypeVar
|
||||
|
||||
from reactivex import Observable, abc, catch_with_iterable
|
||||
from reactivex import operators as ops
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
def retry(
|
||||
count: Optional[int] = None,
|
||||
delay: Optional[float] = None,
|
||||
should_retry: Callable[[Exception], bool] = lambda _: True,
|
||||
) -> Callable[[Observable[_T]], Observable[_T]]:
|
||||
def _retry(source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
observer: abc.ObserverBase[_T],
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
exception: Optional[Exception] = None
|
||||
|
||||
def counter() -> Iterator[int]:
|
||||
n = 0
|
||||
while True:
|
||||
if exception is not None:
|
||||
if not should_retry(exception):
|
||||
break
|
||||
if count is not None and n > count:
|
||||
break
|
||||
if delay:
|
||||
time.sleep(delay)
|
||||
yield n
|
||||
n += 1
|
||||
|
||||
def on_error(e: Exception) -> None:
|
||||
nonlocal exception
|
||||
exception = e
|
||||
|
||||
_source = source.pipe(ops.do_action(on_error=on_error))
|
||||
return catch_with_iterable(_source for _ in counter()).subscribe(
|
||||
observer, scheduler=scheduler
|
||||
)
|
||||
|
||||
return Observable(subscribe)
|
||||
|
||||
return _retry
|
@ -7,9 +7,9 @@ import { Progress } from 'src/app/tasks/shared/task.model';
|
||||
})
|
||||
export class ProgressPipe implements PipeTransform {
|
||||
transform(progress: Progress): number {
|
||||
if (!progress || progress.duration === 0) {
|
||||
if (!progress || progress.total === 0) {
|
||||
return 0;
|
||||
}
|
||||
return Math.round((progress.time / progress.duration) * 100);
|
||||
return Math.round((progress.count / progress.total) * 100);
|
||||
}
|
||||
}
|
||||
|
@ -74,8 +74,8 @@ export enum PostprocessorStatus {
|
||||
}
|
||||
|
||||
export interface Progress {
|
||||
time: number;
|
||||
duration: number;
|
||||
count: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
export interface TaskStatus {
|
||||
|
Loading…
Reference in New Issue
Block a user