refactor: use loguru
This commit is contained in:
parent
c03a122453
commit
78c561c12e
@ -46,7 +46,7 @@ install_requires =
|
||||
requests >= 2.24.0, < 3.0.0
|
||||
aiofiles >= 22.1.0, < 23.0.0
|
||||
tenacity >= 8.0.1, < 9.0.0
|
||||
colorama >= 0.4.4, < 0.5.0
|
||||
loguru >= 0.7.2, < 0.8.0
|
||||
humanize >= 3.13.1, < 4.0.0
|
||||
tqdm >= 4.62.3, < 5.0.0
|
||||
attrs >= 21.2.0, < 22.0.0
|
||||
|
@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from contextlib import suppress
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
import attr
|
||||
import psutil
|
||||
from loguru import logger
|
||||
|
||||
from . import __prog__, __version__
|
||||
from .bili.helpers import ensure_room_id
|
||||
@ -33,8 +33,6 @@ from .task import (
|
||||
)
|
||||
from .webhook import WebHookEmitter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class AppInfo:
|
||||
@ -100,6 +98,7 @@ class Application:
|
||||
await self.exit()
|
||||
|
||||
async def launch(self) -> None:
|
||||
self._setup_logger()
|
||||
logger.info('Launching Application...')
|
||||
self._setup()
|
||||
logger.debug(f'Default umask {os.umask(0o000)}')
|
||||
@ -296,7 +295,6 @@ class Application:
|
||||
return await self._settings_manager.change_task_options(room_id, options)
|
||||
|
||||
def _setup(self) -> None:
|
||||
self._setup_logger()
|
||||
self._setup_exception_handler()
|
||||
self._setup_space_monitor()
|
||||
self._setup_space_event_submitter()
|
||||
|
@ -1,13 +1,12 @@
|
||||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Mapping, Optional, Final
|
||||
from typing import Any, Dict, Final, List, Mapping, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import aiohttp
|
||||
from loguru import logger
|
||||
from tenacity import retry, stop_after_delay, wait_exponential
|
||||
|
||||
from .exceptions import ApiRequestError
|
||||
@ -16,10 +15,6 @@ from .typing import JsonResponse, QualityNumber, ResponseData
|
||||
__all__ = 'AppApi', 'WebApi'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TRACE_API_REQ = bool(os.environ.get('BLREC_TRACE_API_REQ'))
|
||||
|
||||
BASE_HEADERS: Final = {
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en;q=0.3,en-US;q=0.2', # noqa
|
||||
@ -34,8 +29,14 @@ BASE_HEADERS: Final = {
|
||||
|
||||
class BaseApi(ABC):
|
||||
def __init__(
|
||||
self, session: aiohttp.ClientSession, headers: Optional[Dict[str, str]] = None
|
||||
self,
|
||||
session: aiohttp.ClientSession,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
*,
|
||||
room_id: Optional[int] = None,
|
||||
):
|
||||
self._logger = logger.bind(room_id=room_id or '')
|
||||
|
||||
self.base_api_urls: List[str] = ['https://api.bilibili.com']
|
||||
self.base_live_api_urls: List[str] = ['https://api.live.bilibili.com']
|
||||
self.base_play_info_api_urls: List[str] = ['https://api.live.bilibili.com']
|
||||
@ -64,13 +65,13 @@ class BaseApi(ABC):
|
||||
should_check_response = kwds.pop('check_response', True)
|
||||
kwds = {'timeout': self.timeout, 'headers': self.headers, **kwds}
|
||||
async with self._session.get(*args, **kwds) as res:
|
||||
if TRACE_API_REQ:
|
||||
logger.debug(f'Request info: {res.request_info}')
|
||||
self._logger.trace('Request: {}', res.request_info)
|
||||
self._logger.trace('Response: {}', await res.text())
|
||||
try:
|
||||
json_res = await res.json()
|
||||
except aiohttp.ContentTypeError:
|
||||
text_res = await res.text()
|
||||
logger.debug(f'Response text: {text_res[:200]}')
|
||||
self._logger.debug(f'Response text: {text_res[:200]}')
|
||||
raise
|
||||
if should_check_response:
|
||||
self._check_response(json_res)
|
||||
@ -88,8 +89,7 @@ class BaseApi(ABC):
|
||||
return await self._get_json_res(url, *args, **kwds)
|
||||
except Exception as exc:
|
||||
exception = exc
|
||||
if TRACE_API_REQ:
|
||||
logger.debug(f'Failed to get json from {url}', exc_info=exc)
|
||||
self._logger.trace('Failed to get json from {}: {}', url, repr(exc))
|
||||
else:
|
||||
assert exception is not None
|
||||
raise exception
|
||||
@ -106,14 +106,14 @@ class BaseApi(ABC):
|
||||
json_responses = []
|
||||
for idx, item in enumerate(results):
|
||||
if isinstance(item, Exception):
|
||||
if TRACE_API_REQ:
|
||||
logger.debug(f'Failed to get json from {urls[idx]}', exc_info=item)
|
||||
self._logger.trace(
|
||||
'Failed to get json from {}: {}', urls[idx], repr(item)
|
||||
)
|
||||
exceptions.append(item)
|
||||
elif isinstance(item, dict):
|
||||
json_responses.append(item)
|
||||
else:
|
||||
if TRACE_API_REQ:
|
||||
logger.debug(repr(item))
|
||||
self._logger.trace('{}', repr(item))
|
||||
if not json_responses:
|
||||
raise exceptions[0]
|
||||
return json_responses
|
||||
|
@ -1,6 +1,5 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import struct
|
||||
from contextlib import suppress
|
||||
from enum import Enum, IntEnum
|
||||
@ -11,11 +10,12 @@ import brotli
|
||||
from aiohttp import ClientSession
|
||||
from tenacity import retry, retry_if_exception_type, wait_exponential
|
||||
|
||||
from blrec.logging.context import async_task_with_logger_context
|
||||
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..exception import exception_callback
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
from ..utils.mixins import AsyncStoppableMixin
|
||||
from ..utils.string import extract_uid_from_cookie, extract_buvid_from_cookie
|
||||
from ..utils.string import extract_buvid_from_cookie, extract_uid_from_cookie
|
||||
from .api import AppApi, WebApi
|
||||
from .exceptions import DanmakuClientAuthError
|
||||
from .typing import ApiPlatform, Danmaku
|
||||
@ -23,7 +23,7 @@ from .typing import ApiPlatform, Danmaku
|
||||
__all__ = 'DanmakuClient', 'DanmakuListener', 'Danmaku', 'DanmakuCommand'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class DanmakuListener(EventListener):
|
||||
@ -57,6 +57,9 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self.session = session
|
||||
self.appapi = appapi
|
||||
self.webapi = webapi
|
||||
@ -86,24 +89,25 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
await self._update_danmu_info()
|
||||
await self._connect()
|
||||
await self._create_message_loop()
|
||||
logger.debug('Started danmaku client')
|
||||
self._logger.debug('Started danmaku client')
|
||||
|
||||
async def _do_stop(self) -> None:
|
||||
await self._terminate_message_loop()
|
||||
await self._disconnect()
|
||||
logger.debug('Stopped danmaku client')
|
||||
self._logger.debug('Stopped danmaku client')
|
||||
|
||||
@async_task_with_logger_context
|
||||
async def restart(self) -> None:
|
||||
logger.debug('Restarting danmaku client...')
|
||||
self._logger.debug('Restarting danmaku client...')
|
||||
await self.stop()
|
||||
await self.start()
|
||||
logger.debug('Restarted danmaku client')
|
||||
self._logger.debug('Restarted danmaku client')
|
||||
|
||||
async def reconnect(self) -> None:
|
||||
if self.stopped:
|
||||
return
|
||||
|
||||
logger.debug('Reconnecting...')
|
||||
self._logger.debug('Reconnecting...')
|
||||
await self._disconnect()
|
||||
await self._connect()
|
||||
await self._emit('client_reconnected')
|
||||
@ -115,7 +119,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
),
|
||||
)
|
||||
async def _connect(self) -> None:
|
||||
logger.debug('Connecting to server...')
|
||||
self._logger.debug('Connecting to server...')
|
||||
try:
|
||||
await self._connect_websocket()
|
||||
await self._send_auth()
|
||||
@ -129,7 +133,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
await self._update_danmu_info()
|
||||
raise
|
||||
else:
|
||||
logger.debug('Connected to server')
|
||||
self._logger.debug('Connected to server')
|
||||
await self._emit('client_connected')
|
||||
|
||||
async def _connect_websocket(self) -> None:
|
||||
@ -137,16 +141,16 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
self._danmu_info['host_list'][self._host_index]['host'],
|
||||
self._danmu_info['host_list'][self._host_index]['wss_port'],
|
||||
)
|
||||
logger.debug(f'Connecting WebSocket... {url}')
|
||||
self._logger.debug(f'Connecting WebSocket... {url}')
|
||||
try:
|
||||
self._ws = await self.session.ws_connect(
|
||||
url, timeout=5, headers=self.headers
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to connect WebSocket: {repr(exc)}')
|
||||
self._logger.debug(f'Failed to connect WebSocket: {repr(exc)}')
|
||||
raise
|
||||
else:
|
||||
logger.debug('Connected WebSocket')
|
||||
self._logger.debug('Connected WebSocket')
|
||||
|
||||
async def _send_auth(self) -> None:
|
||||
auth_msg = json.dumps(
|
||||
@ -161,26 +165,28 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
}
|
||||
)
|
||||
data = Frame.encode(WS.OP_USER_AUTHENTICATION, auth_msg)
|
||||
logger.debug('Sending user authentication...')
|
||||
self._logger.debug('Sending user authentication...')
|
||||
try:
|
||||
await self._ws.send_bytes(data)
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to sent user authentication: {repr(exc)}')
|
||||
self._logger.debug(f'Failed to sent user authentication: {repr(exc)}')
|
||||
raise
|
||||
else:
|
||||
logger.debug('Sent user authentication')
|
||||
self._logger.debug('Sent user authentication')
|
||||
|
||||
async def _recieve_auth_reply(self) -> aiohttp.WSMessage:
|
||||
logger.debug('Receiving user authentication reply...')
|
||||
self._logger.debug('Receiving user authentication reply...')
|
||||
try:
|
||||
msg = await self._ws.receive(timeout=5)
|
||||
if msg.type != aiohttp.WSMsgType.BINARY:
|
||||
raise aiohttp.ClientError(msg)
|
||||
except Exception as exc:
|
||||
logger.debug(f'Failed to receive user authentication reply: {repr(exc)}')
|
||||
self._logger.debug(
|
||||
f'Failed to receive user authentication reply: {repr(exc)}'
|
||||
)
|
||||
raise
|
||||
else:
|
||||
logger.debug('Recieved user authentication reply')
|
||||
self._logger.debug('Recieved user authentication reply')
|
||||
return msg
|
||||
|
||||
async def _handle_auth_reply(self, reply: aiohttp.WSMessage) -> None:
|
||||
@ -190,7 +196,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
code = cast(int, json.loads(msg)['code'])
|
||||
|
||||
if code == WS.AUTH_OK:
|
||||
logger.debug('Auth OK')
|
||||
self._logger.debug('Auth OK')
|
||||
self._create_heartbeat_task()
|
||||
elif code == WS.AUTH_TOKEN_ERROR:
|
||||
raise DanmakuClientAuthError(f'Token expired: {code}')
|
||||
@ -204,7 +210,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
self._api_platform = 'android'
|
||||
|
||||
async def _update_danmu_info(self) -> None:
|
||||
logger.debug(f'Updating danmu info via {self._api_platform} api...')
|
||||
self._logger.debug(f'Updating danmu info via {self._api_platform} api...')
|
||||
api: Union[WebApi, AppApi]
|
||||
if self._api_platform == 'web':
|
||||
api = self.webapi
|
||||
@ -213,15 +219,15 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
try:
|
||||
self._danmu_info = await api.get_danmu_info(self._room_id)
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to update danmu info: {repr(exc)}')
|
||||
self._logger.warning(f'Failed to update danmu info: {repr(exc)}')
|
||||
self._danmu_info = COMMON_DANMU_INFO
|
||||
else:
|
||||
logger.debug('Danmu info updated')
|
||||
self._logger.debug('Danmu info updated')
|
||||
|
||||
async def _disconnect(self) -> None:
|
||||
await self._cancel_heartbeat_task()
|
||||
await self._close_websocket()
|
||||
logger.debug('Disconnected from server')
|
||||
self._logger.debug('Disconnected from server')
|
||||
await self._emit('client_disconnected')
|
||||
|
||||
async def _close_websocket(self) -> None:
|
||||
@ -237,14 +243,14 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
with suppress(asyncio.CancelledError):
|
||||
await self._heartbeat_task
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _send_heartbeat(self) -> None:
|
||||
data = Frame.encode(WS.OP_HEARTBEAT, '')
|
||||
while True:
|
||||
try:
|
||||
await self._ws.send_bytes(data)
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to send heartbeat: {repr(exc)}')
|
||||
self._logger.warning(f'Failed to send heartbeat: {repr(exc)}')
|
||||
await self._emit('error_occurred', exc)
|
||||
task = asyncio.create_task(self.restart())
|
||||
task.add_done_callback(exception_callback)
|
||||
@ -254,15 +260,15 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
async def _create_message_loop(self) -> None:
|
||||
self._message_loop_task = asyncio.create_task(self._message_loop())
|
||||
self._message_loop_task.add_done_callback(exception_callback)
|
||||
logger.debug('Created message loop')
|
||||
self._logger.debug('Created message loop')
|
||||
|
||||
async def _terminate_message_loop(self) -> None:
|
||||
self._message_loop_task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await self._message_loop_task
|
||||
logger.debug('Terminated message loop')
|
||||
self._logger.debug('Terminated message loop')
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _message_loop(self) -> None:
|
||||
while True:
|
||||
for msg in await self._receive():
|
||||
@ -292,8 +298,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
else:
|
||||
await self._handle_receive_error(ValueError(wsmsg))
|
||||
|
||||
@staticmethod
|
||||
async def _handle_data(data: bytes) -> Optional[List[Dict[str, Any]]]:
|
||||
async def _handle_data(self, data: bytes) -> Optional[List[Dict[str, Any]]]:
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
try:
|
||||
@ -304,12 +309,14 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
elif op == WS.OP_HEARTBEAT_REPLY:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to handle data: {repr(e)}, data: {repr(data)}')
|
||||
self._logger.warning(
|
||||
f'Failed to handle data: {repr(e)}, data: {repr(data)}'
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
async def _handle_receive_error(self, exc: Exception) -> None:
|
||||
logger.warning(f'Failed to receive message: {repr(exc)}')
|
||||
self._logger.warning(f'Failed to receive message: {repr(exc)}')
|
||||
await self._emit('error_occurred', exc)
|
||||
if isinstance(exc, asyncio.TimeoutError):
|
||||
return
|
||||
@ -322,7 +329,7 @@ class DanmakuClient(EventEmitter[DanmakuListener], AsyncStoppableMixin):
|
||||
async def _retry(self) -> None:
|
||||
if self._retry_count < self._MAX_RETRIES:
|
||||
if self._retry_delay > 0:
|
||||
logger.debug(
|
||||
self._logger.debug(
|
||||
'Retry after {} second{}'.format(
|
||||
self._retry_delay, 's' if self._retry_delay > 1 else ''
|
||||
)
|
||||
|
@ -13,7 +13,7 @@ __all__ = 'room_init', 'ensure_room_id'
|
||||
|
||||
async def room_init(room_id: int) -> ResponseData:
|
||||
async with aiohttp.ClientSession(raise_for_status=True) as session:
|
||||
api = WebApi(session)
|
||||
api = WebApi(session, room_id=room_id)
|
||||
return await api.room_init(room_id)
|
||||
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from typing import Any, Dict, List
|
||||
@ -26,7 +25,7 @@ from .typing import ApiPlatform, QualityNumber, ResponseData, StreamCodec, Strea
|
||||
|
||||
__all__ = ('Live',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from loguru import logger
|
||||
|
||||
_INFO_PATTERN = re.compile(
|
||||
rb'<script>\s*window\.__NEPTUNE_IS_MY_WAIFU__\s*=\s*(\{.*?\})\s*</script>'
|
||||
@ -36,6 +35,8 @@ _LIVE_STATUS_PATTERN = re.compile(rb'"live_status"\s*:\s*(\d)')
|
||||
|
||||
class Live:
|
||||
def __init__(self, room_id: int, user_agent: str = '', cookie: str = '') -> None:
|
||||
self._logger = logger.bind(room_id=room_id)
|
||||
|
||||
self._room_id = room_id
|
||||
self._user_agent = user_agent
|
||||
self._cookie = cookie
|
||||
@ -47,8 +48,8 @@ class Live:
|
||||
raise_for_status=True,
|
||||
trust_env=True,
|
||||
)
|
||||
self._appapi = AppApi(self._session, self.headers)
|
||||
self._webapi = WebApi(self._session, self.headers)
|
||||
self._appapi = AppApi(self._session, self.headers, room_id=room_id)
|
||||
self._webapi = WebApi(self._session, self.headers, room_id=room_id)
|
||||
|
||||
self._room_info: RoomInfo
|
||||
self._user_info: UserInfo
|
||||
@ -189,7 +190,7 @@ class Live:
|
||||
try:
|
||||
self._user_info = await self.get_user_info(self._room_info.uid)
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to update user info: {repr(e)}')
|
||||
self._logger.error(f'Failed to update user info: {repr(e)}')
|
||||
if raise_exception:
|
||||
raise
|
||||
return False
|
||||
@ -200,7 +201,7 @@ class Live:
|
||||
try:
|
||||
self._room_info = await self.get_room_info()
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to update room info: {repr(e)}')
|
||||
self._logger.error(f'Failed to update room info: {repr(e)}')
|
||||
if raise_exception:
|
||||
raise
|
||||
return False
|
||||
@ -240,7 +241,7 @@ class Live:
|
||||
try:
|
||||
ts = await self.get_server_timestamp()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to get timestamp from server: {repr(e)}')
|
||||
self._logger.warning(f'Failed to get timestamp from server: {repr(e)}')
|
||||
ts = int(time.time())
|
||||
return ts
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
from contextlib import suppress
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from blrec.exception import exception_callback
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
from blrec.logging.context import async_task_with_logger_context
|
||||
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
@ -17,9 +18,6 @@ from .typing import Danmaku
|
||||
__all__ = 'LiveMonitor', 'LiveEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LiveEventListener(EventListener):
|
||||
async def on_live_status_changed(
|
||||
self, current_status: LiveStatus, previous_status: LiveStatus
|
||||
@ -45,6 +43,8 @@ class LiveEventListener(EventListener):
|
||||
class LiveMonitor(EventEmitter[LiveEventListener], DanmakuListener, SwitchableMixin):
|
||||
def __init__(self, danmaku_client: DanmakuClient, live: Live) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
self._danmaku_client = danmaku_client
|
||||
self._live = live
|
||||
|
||||
@ -61,13 +61,13 @@ class LiveMonitor(EventEmitter[LiveEventListener], DanmakuListener, SwitchableMi
|
||||
self._init_status()
|
||||
self._danmaku_client.add_listener(self)
|
||||
self._start_polling()
|
||||
logger.debug('Enabled live monitor')
|
||||
self._logger.debug('Enabled live monitor')
|
||||
|
||||
def _do_disable(self) -> None:
|
||||
self._danmaku_client.remove_listener(self)
|
||||
asyncio.create_task(self._stop_polling())
|
||||
asyncio.create_task(self._stop_checking())
|
||||
logger.debug('Disabled live monitor')
|
||||
self._logger.debug('Disabled live monitor')
|
||||
|
||||
def _start_polling(self) -> None:
|
||||
self._polling_task = asyncio.create_task(self._poll_live_status())
|
||||
@ -99,23 +99,23 @@ class LiveMonitor(EventEmitter[LiveEventListener], DanmakuListener, SwitchableMi
|
||||
# events if necessary.
|
||||
# make sure the recorder works well continuously after interruptions
|
||||
# such as an operating system hibernation.
|
||||
logger.warning('The Danmaku Client Reconnected')
|
||||
self._logger.warning('The Danmaku Client Reconnected')
|
||||
|
||||
await self._live.update_room_info()
|
||||
current_status = self._live.room_info.live_status
|
||||
|
||||
if current_status == self._previous_status:
|
||||
if current_status == LiveStatus.LIVE:
|
||||
logger.debug('Simulating stream reset event')
|
||||
self._logger.debug('Simulating stream reset event')
|
||||
await self._handle_status_change(current_status)
|
||||
else:
|
||||
if current_status == LiveStatus.LIVE:
|
||||
logger.debug('Simulating live began event')
|
||||
self._logger.debug('Simulating live began event')
|
||||
await self._handle_status_change(current_status)
|
||||
logger.debug('Simulating live stream available event')
|
||||
self._logger.debug('Simulating live stream available event')
|
||||
await self._handle_status_change(current_status)
|
||||
else:
|
||||
logger.debug('Simulating live ended event')
|
||||
self._logger.debug('Simulating live ended event')
|
||||
await self._handle_status_change(current_status)
|
||||
|
||||
async def on_danmaku_received(self, danmu: Danmaku) -> None:
|
||||
@ -135,7 +135,7 @@ class LiveMonitor(EventEmitter[LiveEventListener], DanmakuListener, SwitchableMi
|
||||
await self._emit('room_changed', self._live.room_info)
|
||||
|
||||
async def _handle_status_change(self, current_status: LiveStatus) -> None:
|
||||
logger.debug(
|
||||
self._logger.debug(
|
||||
'Live status changed from {} to {}'.format(
|
||||
self._previous_status.name, current_status.name
|
||||
)
|
||||
@ -163,62 +163,62 @@ class LiveMonitor(EventEmitter[LiveEventListener], DanmakuListener, SwitchableMi
|
||||
else:
|
||||
pass
|
||||
|
||||
logger.debug('Number of sequential LIVE status: {}'.format(self._status_count))
|
||||
self._logger.debug(
|
||||
'Number of sequential LIVE status: {}'.format(self._status_count)
|
||||
)
|
||||
|
||||
self._previous_status = current_status
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def check_live_status(self) -> None:
|
||||
logger.debug('Checking live status...')
|
||||
self._logger.debug('Checking live status...')
|
||||
try:
|
||||
await self._check_live_status()
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to check live status: {repr(e)}')
|
||||
logger.debug('Done checking live status')
|
||||
self._logger.warning(f'Failed to check live status: {repr(e)}')
|
||||
self._logger.debug('Done checking live status')
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def _check_live_status(self) -> None:
|
||||
await self._live.update_room_info()
|
||||
current_status = self._live.room_info.live_status
|
||||
if current_status != self._previous_status:
|
||||
await self._handle_status_change(current_status)
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _poll_live_status(self) -> None:
|
||||
logger.debug('Started polling live status')
|
||||
self._logger.debug('Started polling live status')
|
||||
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(600 + random.randrange(-60, 60))
|
||||
await self._check_live_status()
|
||||
except asyncio.CancelledError:
|
||||
logger.debug('Cancelled polling live status')
|
||||
self._logger.debug('Cancelled polling live status')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to poll live status: {repr(e)}')
|
||||
self._logger.warning(f'Failed to poll live status: {repr(e)}')
|
||||
|
||||
logger.debug('Stopped polling live status')
|
||||
self._logger.debug('Stopped polling live status')
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _check_if_stream_available(self) -> None:
|
||||
logger.debug('Started checking if stream available')
|
||||
self._logger.debug('Started checking if stream available')
|
||||
|
||||
while True:
|
||||
try:
|
||||
streams = await self._live.get_live_streams()
|
||||
if streams:
|
||||
logger.debug('live stream available')
|
||||
self._logger.debug('live stream available')
|
||||
self._stream_available = True
|
||||
flv_formats = extract_formats(streams, 'flv')
|
||||
self._live._no_flv_stream = not flv_formats
|
||||
await self._emit('live_stream_available', self._live)
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
logger.debug('Cancelled checking if stream available')
|
||||
self._logger.debug('Cancelled checking if stream available')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to check if stream available: {repr(e)}')
|
||||
self._logger.warning(f'Failed to check if stream available: {repr(e)}')
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
logger.debug('Stopped checking if stream available')
|
||||
self._logger.debug('Stopped checking if stream available')
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
@ -6,13 +5,12 @@ from typing import Optional
|
||||
|
||||
import typer
|
||||
import uvicorn
|
||||
from loguru import logger
|
||||
from uvicorn.config import LOGGING_CONFIG
|
||||
|
||||
from .. import __prog__, __version__
|
||||
from ..logging import TqdmOutputStream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
cli = typer.Typer()
|
||||
|
||||
|
||||
|
@ -1,16 +1,15 @@
|
||||
import logging
|
||||
from enum import Enum
|
||||
from threading import Lock
|
||||
from typing import Set
|
||||
|
||||
import aiofiles
|
||||
import aiohttp
|
||||
from loguru import logger
|
||||
from tenacity import retry, stop_after_attempt, wait_fixed
|
||||
|
||||
from blrec.bili.live import Live
|
||||
from blrec.event.event_emitter import EventEmitter, EventListener
|
||||
from blrec.exception import submit_exception
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
from blrec.path import cover_path
|
||||
from blrec.utils.hash import sha1sum
|
||||
from blrec.utils.mixins import SwitchableMixin
|
||||
@ -20,9 +19,6 @@ from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
__all__ = 'CoverDownloader', 'CoverDownloaderEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CoverDownloaderEventListener(EventListener):
|
||||
async def on_cover_image_downloaded(self, path: str) -> None:
|
||||
...
|
||||
@ -54,6 +50,8 @@ class CoverDownloader(
|
||||
cover_save_strategy: CoverSaveStrategy = CoverSaveStrategy.DEFAULT,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
self._live = live
|
||||
self._stream_recorder = stream_recorder
|
||||
self._lock: Lock = Lock()
|
||||
@ -64,11 +62,11 @@ class CoverDownloader(
|
||||
def _do_enable(self) -> None:
|
||||
self._sha1_set.clear()
|
||||
self._stream_recorder.add_listener(self)
|
||||
logger.debug('Enabled cover downloader')
|
||||
self._logger.debug('Enabled cover downloader')
|
||||
|
||||
def _do_disable(self) -> None:
|
||||
self._stream_recorder.remove_listener(self)
|
||||
logger.debug('Disabled cover downloader')
|
||||
self._logger.debug('Disabled cover downloader')
|
||||
|
||||
async def on_video_file_completed(self, video_path: str) -> None:
|
||||
with self._lock:
|
||||
@ -76,7 +74,6 @@ class CoverDownloader(
|
||||
return
|
||||
await self._save_cover(video_path)
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def _save_cover(self, video_path: str) -> None:
|
||||
try:
|
||||
await self._live.update_room_info()
|
||||
@ -92,10 +89,10 @@ class CoverDownloader(
|
||||
await self._save_file(path, data)
|
||||
self._sha1_set.add(sha1)
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to save cover image: {repr(e)}')
|
||||
self._logger.error(f'Failed to save cover image: {repr(e)}')
|
||||
submit_exception(e)
|
||||
else:
|
||||
logger.info(f'Saved cover image: {path}')
|
||||
self._logger.info(f'Saved cover image: {path}')
|
||||
await self._emit('cover_image_downloaded', path)
|
||||
|
||||
@retry(reraise=True, wait=wait_fixed(1), stop=stop_after_attempt(3))
|
||||
|
@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import html
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from decimal import Decimal
|
||||
from threading import Lock
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from loguru import logger
|
||||
from tenacity import AsyncRetrying, retry_if_not_exception_type, stop_after_attempt
|
||||
|
||||
from blrec import __github__, __prog__, __version__
|
||||
@ -22,7 +22,7 @@ from blrec.danmaku.models import (
|
||||
)
|
||||
from blrec.event.event_emitter import EventEmitter, EventListener
|
||||
from blrec.exception import exception_callback, submit_exception
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
from blrec.logging.context import async_task_with_logger_context
|
||||
from blrec.path import danmaku_path
|
||||
from blrec.utils.mixins import SwitchableMixin
|
||||
|
||||
@ -33,9 +33,6 @@ from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
__all__ = 'DanmakuDumper', 'DanmakuDumperEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DanmakuDumperEventListener(EventListener):
|
||||
async def on_danmaku_file_created(self, path: str) -> None:
|
||||
...
|
||||
@ -62,6 +59,8 @@ class DanmakuDumper(
|
||||
record_super_chat: bool = False,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self._live = live
|
||||
self._stream_recorder = stream_recorder
|
||||
@ -97,13 +96,13 @@ class DanmakuDumper(
|
||||
def _do_enable(self) -> None:
|
||||
self._stream_recorder.add_listener(self)
|
||||
self._statistics.reset()
|
||||
logger.debug('Enabled danmaku dumper')
|
||||
self._logger.debug('Enabled danmaku dumper')
|
||||
|
||||
def _do_disable(self) -> None:
|
||||
self._stream_recorder.remove_listener(self)
|
||||
asyncio.create_task(self._stop_dumping())
|
||||
self._statistics.freeze()
|
||||
logger.debug('Disabled danmaku dumper')
|
||||
self._logger.debug('Disabled danmaku dumper')
|
||||
|
||||
def set_live_start_time(self, time: int) -> None:
|
||||
self._live_start_time = time
|
||||
@ -141,7 +140,7 @@ class DanmakuDumper(
|
||||
self._interrupted_timestamp = timestamp
|
||||
self._duration = duration
|
||||
self._stream_recording_interrupted = True
|
||||
logger.debug(
|
||||
self._logger.debug(
|
||||
'Stream recording interrupted, '
|
||||
f'timestamp: {timestamp}, duration: {duration}'
|
||||
)
|
||||
@ -153,13 +152,13 @@ class DanmakuDumper(
|
||||
- Decimal(str(self._interrupted_timestamp))
|
||||
)
|
||||
self._stream_recording_interrupted = False
|
||||
logger.debug(
|
||||
self._logger.debug(
|
||||
'Stream recording recovered, '
|
||||
f'timestamp: {timestamp}, delta: {self._delta}'
|
||||
)
|
||||
|
||||
async def on_duration_lost(self, duration: float) -> None:
|
||||
logger.debug(f'Total duration lost: {(duration)}')
|
||||
self._logger.debug(f'Total duration lost: ≈ {(duration)} s')
|
||||
self._delta = -duration
|
||||
|
||||
def _start_dumping(self) -> None:
|
||||
@ -179,14 +178,14 @@ class DanmakuDumper(
|
||||
with suppress(asyncio.CancelledError):
|
||||
await self._dump_task
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _do_dump(self) -> None:
|
||||
assert self._path is not None
|
||||
logger.debug('Started dumping danmaku')
|
||||
self._logger.debug('Started dumping danmaku')
|
||||
|
||||
try:
|
||||
async with DanmakuWriter(self._path) as writer:
|
||||
logger.info(f"Danmaku file created: '{self._path}'")
|
||||
self._logger.info(f"Danmaku file created: '{self._path}'")
|
||||
await self._emit('danmaku_file_created', self._path)
|
||||
await writer.write_metadata(self._make_metadata())
|
||||
|
||||
@ -201,9 +200,9 @@ class DanmakuDumper(
|
||||
submit_exception(e)
|
||||
raise
|
||||
finally:
|
||||
logger.info(f"Danmaku file completed: '{self._path}'")
|
||||
self._logger.info(f"Danmaku file completed: '{self._path}'")
|
||||
await self._emit('danmaku_file_completed', self._path)
|
||||
logger.debug('Stopped dumping danmaku')
|
||||
self._logger.debug('Stopped dumping danmaku')
|
||||
|
||||
async def _dumping_loop(self, writer: DanmakuWriter) -> None:
|
||||
while True:
|
||||
@ -230,7 +229,7 @@ class DanmakuDumper(
|
||||
continue
|
||||
await writer.write_super_chat_record(self._make_super_chat_record(msg))
|
||||
else:
|
||||
logger.warning(f'Unsupported message type: {repr(msg)}')
|
||||
self._logger.warning(f'Unsupported message type: {repr(msg)}')
|
||||
|
||||
def _make_metadata(self) -> Metadata:
|
||||
return Metadata(
|
||||
|
@ -1,8 +1,10 @@
|
||||
import logging
|
||||
from asyncio import Queue, QueueFull
|
||||
from typing import Final
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from blrec.bili.danmaku_client import DanmakuClient, DanmakuCommand, DanmakuListener
|
||||
from blrec.bili.live import Live
|
||||
from blrec.bili.typing import Danmaku
|
||||
from blrec.utils.mixins import StoppableMixin
|
||||
|
||||
@ -12,25 +14,23 @@ from .typing import DanmakuMsg
|
||||
__all__ = ('DanmakuReceiver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DanmakuReceiver(DanmakuListener, StoppableMixin):
|
||||
_MAX_QUEUE_SIZE: Final[int] = 2000
|
||||
|
||||
def __init__(self, danmaku_client: DanmakuClient) -> None:
|
||||
def __init__(self, live: Live, danmaku_client: DanmakuClient) -> None:
|
||||
super().__init__()
|
||||
self._logger = logger.bind(room_id=live.room_id)
|
||||
self._danmaku_client = danmaku_client
|
||||
self._queue: Queue[DanmakuMsg] = Queue(maxsize=self._MAX_QUEUE_SIZE)
|
||||
|
||||
def _do_start(self) -> None:
|
||||
self._danmaku_client.add_listener(self)
|
||||
logger.debug('Started danmaku receiver')
|
||||
self._logger.debug('Started danmaku receiver')
|
||||
|
||||
def _do_stop(self) -> None:
|
||||
self._danmaku_client.remove_listener(self)
|
||||
self._clear_queue()
|
||||
logger.debug('Stopped danmaku receiver')
|
||||
self._logger.debug('Stopped danmaku receiver')
|
||||
|
||||
async def get_message(self) -> DanmakuMsg:
|
||||
return await self._queue.get()
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex.scheduler import NewThreadScheduler
|
||||
|
||||
from blrec.bili.live import Live
|
||||
@ -16,9 +16,6 @@ from .stream_recorder_impl import StreamRecorderImpl
|
||||
__all__ = ('FLVStreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FLVStreamRecorderImpl(StreamRecorderImpl, SupportDebugMixin):
|
||||
def __init__(
|
||||
self,
|
||||
@ -134,30 +131,33 @@ class FLVStreamRecorderImpl(StreamRecorderImpl, SupportDebugMixin):
|
||||
self._metadata_dumper.disable()
|
||||
|
||||
def _run(self) -> None:
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
self._stream_fetcher,
|
||||
self._recording_monitor,
|
||||
self._dl_statistics,
|
||||
self._stream_parser,
|
||||
self._connection_error_handler,
|
||||
self._request_exception_handler,
|
||||
flv_ops.process(sort_tags=True),
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._join_point_extractor,
|
||||
self._prober,
|
||||
self._injector,
|
||||
self._analyser,
|
||||
self._dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._exception_handler,
|
||||
with logger.contextualize(room_id=self._live.room_id):
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
self._stream_fetcher,
|
||||
self._recording_monitor,
|
||||
self._dl_statistics,
|
||||
self._stream_parser,
|
||||
self._connection_error_handler,
|
||||
self._request_exception_handler,
|
||||
flv_ops.process(sort_tags=True),
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._join_point_extractor,
|
||||
self._prober,
|
||||
self._injector,
|
||||
self._analyser,
|
||||
self._dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._exception_handler,
|
||||
)
|
||||
.subscribe(
|
||||
on_completed=self._on_completed,
|
||||
scheduler=NewThreadScheduler(
|
||||
self._thread_factory('StreamRecorder')
|
||||
),
|
||||
)
|
||||
)
|
||||
.subscribe(
|
||||
on_completed=self._on_completed,
|
||||
scheduler=NewThreadScheduler(self._thread_factory('StreamRecorder')),
|
||||
)
|
||||
)
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex.scheduler import NewThreadScheduler
|
||||
|
||||
from blrec.bili.live import Live
|
||||
@ -16,9 +16,6 @@ from .stream_recorder_impl import StreamRecorderImpl
|
||||
__all__ = ('HLSStreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HLSStreamRecorderImpl(StreamRecorderImpl):
|
||||
def __init__(
|
||||
self,
|
||||
@ -123,32 +120,37 @@ class HLSStreamRecorderImpl(StreamRecorderImpl):
|
||||
self._ff_metadata_dumper.disable()
|
||||
|
||||
def _run(self) -> None:
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
self._playlist_fetcher,
|
||||
self._recording_monitor,
|
||||
self._connection_error_handler,
|
||||
self._request_exception_handler,
|
||||
self._playlist_resolver,
|
||||
utils_ops.observe_on_new_thread(
|
||||
queue_size=60, thread_name=f'SegmentFetcher::{self._live.room_id}'
|
||||
),
|
||||
self._segment_fetcher,
|
||||
self._dl_statistics,
|
||||
self._prober,
|
||||
self._analyser,
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._segment_dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._playlist_dumper,
|
||||
self._exception_handler,
|
||||
with logger.contextualize(room_id=self._live.room_id):
|
||||
self._subscription = (
|
||||
self._stream_param_holder.get_stream_params() # type: ignore
|
||||
.pipe(
|
||||
self._stream_url_resolver,
|
||||
self._playlist_fetcher,
|
||||
self._recording_monitor,
|
||||
self._connection_error_handler,
|
||||
self._request_exception_handler,
|
||||
self._playlist_resolver,
|
||||
utils_ops.observe_on_new_thread(
|
||||
queue_size=60,
|
||||
thread_name=f'SegmentFetcher::{self._live.room_id}',
|
||||
logger_context={'room_id': self._live.room_id},
|
||||
),
|
||||
self._segment_fetcher,
|
||||
self._dl_statistics,
|
||||
self._prober,
|
||||
self._analyser,
|
||||
self._cutter,
|
||||
self._limiter,
|
||||
self._segment_dumper,
|
||||
self._rec_statistics,
|
||||
self._progress_bar,
|
||||
self._playlist_dumper,
|
||||
self._exception_handler,
|
||||
)
|
||||
.subscribe(
|
||||
on_completed=self._on_completed,
|
||||
scheduler=NewThreadScheduler(
|
||||
self._thread_factory('HLSStreamRecorder')
|
||||
),
|
||||
)
|
||||
)
|
||||
.subscribe(
|
||||
on_completed=self._on_completed,
|
||||
scheduler=NewThreadScheduler(self._thread_factory('HLSStreamRecorder')),
|
||||
)
|
||||
)
|
||||
|
@ -1,12 +1,9 @@
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import attr
|
||||
|
||||
from blrec.bili.typing import Danmaku
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class DanmuMsg:
|
||||
|
@ -1,11 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from blrec.bili.live import Live
|
||||
@ -15,8 +15,6 @@ from blrec.utils.mixins import AsyncCooperationMixin
|
||||
__all__ = ('ConnectionErrorHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
|
@ -1,20 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import logging
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from blrec.bili.exceptions import LiveRoomEncrypted, LiveRoomHidden, LiveRoomLocked
|
||||
from blrec.exception.helpers import format_exception
|
||||
from blrec.utils import operators as utils_ops
|
||||
from blrec.utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('ExceptionHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
@ -28,12 +27,11 @@ class ExceptionHandler(AsyncCooperationMixin):
|
||||
scheduler: Optional[abc.SchedulerBase] = None,
|
||||
) -> abc.DisposableBase:
|
||||
def on_error(exc: Exception) -> None:
|
||||
logger.exception(repr(exc))
|
||||
self._submit_exception(exc)
|
||||
try:
|
||||
raise exc
|
||||
except OSError as e:
|
||||
logger.critical(repr(e), exc_info=e)
|
||||
logger.critical('{}\n{}', repr(exc), format_exception(exc))
|
||||
if e.errno == errno.ENOSPC:
|
||||
# OSError(28, 'No space left on device')
|
||||
observer.on_completed()
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
@ -13,11 +12,8 @@ from blrec.flv.operators.typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('ProgressBar',)
|
||||
|
||||
|
||||
DISPLAY_PROGRESS = bool(os.environ.get('BLREC_PROGRESS'))
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
def __init__(self, live: Live) -> None:
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Callable, Final, Optional, Tuple, TypeVar
|
||||
|
||||
@ -12,8 +11,6 @@ from blrec.utils.mixins import AsyncCooperationMixin
|
||||
__all__ = ('RecordingMonitor',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
|
@ -1,13 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
import aiohttp
|
||||
import requests
|
||||
import urllib3
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex import operators as ops
|
||||
|
||||
@ -17,8 +17,6 @@ from blrec.utils import operators as utils_ops
|
||||
__all__ = ('RequestExceptionHandler',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from blrec.bili.live import Live
|
||||
@ -13,9 +13,6 @@ from blrec.utils.mixins import AsyncCooperationMixin
|
||||
__all__ = ('StreamFetcher',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamFetcher(AsyncCooperationMixin):
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,8 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable
|
||||
from reactivex import operators as ops
|
||||
|
||||
@ -16,9 +16,6 @@ from ..stream_param_holder import StreamParamHolder
|
||||
__all__ = ('StreamParser',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamParser:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Final, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex import operators as ops
|
||||
|
||||
@ -28,9 +28,6 @@ from ..stream_param_holder import StreamParamHolder, StreamParams
|
||||
__all__ = ('StreamURLResolver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamURLResolver(AsyncCooperationMixin):
|
||||
_MAX_ATTEMPTS_FOR_NO_STREAM: Final[int] = 10
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
@ -10,8 +9,6 @@ from blrec.utils.mixins import AsyncCooperationMixin
|
||||
|
||||
__all__ = ('PathProvider',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PathProvider(AsyncCooperationMixin):
|
||||
def __init__(self, live: Live, out_dir: str, path_template: str) -> None:
|
||||
|
@ -1,17 +1,17 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from threading import Lock
|
||||
|
||||
import aiofiles
|
||||
from aiofiles.threadpool.text import AsyncTextIOWrapper
|
||||
from loguru import logger
|
||||
from tenacity import AsyncRetrying, retry_if_not_exception_type, stop_after_attempt
|
||||
|
||||
from blrec.bili.live import Live
|
||||
from blrec.event.event_emitter import EventEmitter, EventListener
|
||||
from blrec.exception import exception_callback, submit_exception
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
from blrec.logging.context import async_task_with_logger_context
|
||||
from blrec.path import raw_danmaku_path
|
||||
from blrec.utils.mixins import SwitchableMixin
|
||||
|
||||
@ -21,9 +21,6 @@ from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
__all__ = 'RawDanmakuDumper', 'RawDanmakuDumperEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RawDanmakuDumperEventListener(EventListener):
|
||||
async def on_raw_danmaku_file_created(self, path: str) -> None:
|
||||
...
|
||||
@ -44,19 +41,20 @@ class RawDanmakuDumper(
|
||||
danmaku_receiver: RawDanmakuReceiver,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._live = live # @aio_task_with_room_id
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
self._stream_recorder = stream_recorder
|
||||
self._receiver = danmaku_receiver
|
||||
self._lock: Lock = Lock()
|
||||
|
||||
def _do_enable(self) -> None:
|
||||
self._stream_recorder.add_listener(self)
|
||||
logger.debug('Enabled raw danmaku dumper')
|
||||
self._logger.debug('Enabled raw danmaku dumper')
|
||||
|
||||
def _do_disable(self) -> None:
|
||||
self._stream_recorder.remove_listener(self)
|
||||
asyncio.create_task(self._stop_dumping())
|
||||
logger.debug('Disabled raw danmaku dumper')
|
||||
self._logger.debug('Disabled raw danmaku dumper')
|
||||
|
||||
async def on_video_file_created(
|
||||
self, video_path: str, record_start_time: int
|
||||
@ -87,12 +85,12 @@ class RawDanmakuDumper(
|
||||
with suppress(asyncio.CancelledError):
|
||||
await self._dump_task
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _do_dump(self) -> None:
|
||||
logger.debug('Started dumping raw danmaku')
|
||||
self._logger.debug('Started dumping raw danmaku')
|
||||
try:
|
||||
async with aiofiles.open(self._path, 'wt', encoding='utf8') as f:
|
||||
logger.info(f"Raw danmaku file created: '{self._path}'")
|
||||
self._logger.info(f"Raw danmaku file created: '{self._path}'")
|
||||
await self._emit('raw_danmaku_file_created', self._path)
|
||||
|
||||
async for attempt in AsyncRetrying(
|
||||
@ -110,9 +108,9 @@ class RawDanmakuDumper(
|
||||
json_string = json.dumps(danmu, ensure_ascii=False)
|
||||
await f.write(json_string + '\n')
|
||||
finally:
|
||||
logger.info(f"Raw danmaku file completed: '{self._path}'")
|
||||
self._logger.info(f"Raw danmaku file completed: '{self._path}'")
|
||||
await self._emit('raw_danmaku_file_completed', self._path)
|
||||
logger.debug('Stopped dumping raw danmaku')
|
||||
self._logger.debug('Stopped dumping raw danmaku')
|
||||
|
||||
async def _dumping_loop(self, file: AsyncTextIOWrapper) -> None:
|
||||
while True:
|
||||
|
@ -1,33 +1,33 @@
|
||||
import logging
|
||||
from asyncio import Queue, QueueFull
|
||||
from typing import Final
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from blrec.bili.danmaku_client import DanmakuClient, DanmakuListener
|
||||
from blrec.bili.live import Live
|
||||
from blrec.bili.typing import Danmaku
|
||||
from blrec.utils.mixins import StoppableMixin
|
||||
|
||||
__all__ = ('RawDanmakuReceiver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RawDanmakuReceiver(DanmakuListener, StoppableMixin):
|
||||
_MAX_QUEUE_SIZE: Final[int] = 2000
|
||||
|
||||
def __init__(self, danmaku_client: DanmakuClient) -> None:
|
||||
def __init__(self, live: Live, danmaku_client: DanmakuClient) -> None:
|
||||
super().__init__()
|
||||
self._logger = logger.bind(room_id=live.room_id)
|
||||
self._danmaku_client = danmaku_client
|
||||
self._queue: Queue[Danmaku] = Queue(maxsize=self._MAX_QUEUE_SIZE)
|
||||
|
||||
def _do_start(self) -> None:
|
||||
self._danmaku_client.add_listener(self)
|
||||
logger.debug('Started raw danmaku receiver')
|
||||
self._logger.debug('Started raw danmaku receiver')
|
||||
|
||||
def _do_stop(self) -> None:
|
||||
self._danmaku_client.remove_listener(self)
|
||||
self._clear_queue()
|
||||
logger.debug('Stopped raw danmaku receiver')
|
||||
self._logger.debug('Stopped raw danmaku receiver')
|
||||
|
||||
async def get_raw_danmaku(self) -> Danmaku:
|
||||
return await self._queue.get()
|
||||
|
@ -1,11 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Iterator, Optional
|
||||
|
||||
import humanize
|
||||
from loguru import logger
|
||||
|
||||
from blrec.bili.danmaku_client import DanmakuClient
|
||||
from blrec.bili.live import Live
|
||||
@ -32,9 +32,6 @@ from .stream_recorder import StreamRecorder, StreamRecorderEventListener
|
||||
__all__ = 'RecorderEventListener', 'Recorder'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecorderEventListener(EventListener):
|
||||
async def on_recording_started(self, recorder: Recorder) -> None:
|
||||
...
|
||||
@ -105,6 +102,8 @@ class Recorder(
|
||||
save_raw_danmaku: bool = False,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self._live = live
|
||||
self._danmaku_client = danmaku_client
|
||||
@ -130,9 +129,9 @@ class Recorder(
|
||||
duration_limit=duration_limit,
|
||||
)
|
||||
|
||||
self._danmaku_receiver = DanmakuReceiver(danmaku_client)
|
||||
self._danmaku_receiver = DanmakuReceiver(live, danmaku_client)
|
||||
self._danmaku_dumper = DanmakuDumper(
|
||||
self._live,
|
||||
live,
|
||||
self._stream_recorder,
|
||||
self._danmaku_receiver,
|
||||
danmu_uname=danmu_uname,
|
||||
@ -141,13 +140,13 @@ class Recorder(
|
||||
record_guard_buy=record_guard_buy,
|
||||
record_super_chat=record_super_chat,
|
||||
)
|
||||
self._raw_danmaku_receiver = RawDanmakuReceiver(danmaku_client)
|
||||
self._raw_danmaku_receiver = RawDanmakuReceiver(live, danmaku_client)
|
||||
self._raw_danmaku_dumper = RawDanmakuDumper(
|
||||
self._live, self._stream_recorder, self._raw_danmaku_receiver
|
||||
live, self._stream_recorder, self._raw_danmaku_receiver
|
||||
)
|
||||
|
||||
self._cover_downloader = CoverDownloader(
|
||||
self._live,
|
||||
live,
|
||||
self._stream_recorder,
|
||||
save_cover=save_cover,
|
||||
cover_save_strategy=cover_save_strategy,
|
||||
@ -380,12 +379,12 @@ class Recorder(
|
||||
return self._stream_recorder.cut_stream()
|
||||
|
||||
async def on_live_began(self, live: Live) -> None:
|
||||
logger.info('The live has began')
|
||||
self._logger.info('The live has began')
|
||||
self._print_live_info()
|
||||
await self._start_recording()
|
||||
|
||||
async def on_live_ended(self, live: Live) -> None:
|
||||
logger.info('The live has ended')
|
||||
self._logger.info('The live has ended')
|
||||
await asyncio.sleep(3)
|
||||
self._stream_available = False
|
||||
self._stream_recorder.stream_available_time = None
|
||||
@ -393,13 +392,13 @@ class Recorder(
|
||||
self._print_waiting_message()
|
||||
|
||||
async def on_live_stream_available(self, live: Live) -> None:
|
||||
logger.debug('The live stream becomes available')
|
||||
self._logger.debug('The live stream becomes available')
|
||||
self._stream_available = True
|
||||
self._stream_recorder.stream_available_time = await live.get_timestamp()
|
||||
await self._stream_recorder.start()
|
||||
|
||||
async def on_live_stream_reset(self, live: Live) -> None:
|
||||
logger.warning('The live stream has been reset')
|
||||
self._logger.warning('The live stream has been reset')
|
||||
if not self._recording:
|
||||
await self._start_recording()
|
||||
|
||||
@ -429,7 +428,7 @@ class Recorder(
|
||||
await self._emit('cover_image_downloaded', self, path)
|
||||
|
||||
async def on_stream_recording_completed(self) -> None:
|
||||
logger.debug('Stream recording completed')
|
||||
self._logger.debug('Stream recording completed')
|
||||
await self._stop_recording()
|
||||
|
||||
async def _do_start(self) -> None:
|
||||
@ -437,7 +436,7 @@ class Recorder(
|
||||
self._danmaku_dumper.add_listener(self)
|
||||
self._raw_danmaku_dumper.add_listener(self)
|
||||
self._cover_downloader.add_listener(self)
|
||||
logger.debug('Started recorder')
|
||||
self._logger.debug('Started recorder')
|
||||
|
||||
self._print_live_info()
|
||||
if self._live.is_living():
|
||||
@ -452,7 +451,7 @@ class Recorder(
|
||||
self._danmaku_dumper.remove_listener(self)
|
||||
self._raw_danmaku_dumper.remove_listener(self)
|
||||
self._cover_downloader.remove_listener(self)
|
||||
logger.debug('Stopped recorder')
|
||||
self._logger.debug('Stopped recorder')
|
||||
|
||||
async def _start_recording(self) -> None:
|
||||
if self._recording:
|
||||
@ -471,7 +470,7 @@ class Recorder(
|
||||
if self._stream_available:
|
||||
await self._stream_recorder.start()
|
||||
|
||||
logger.info('Started recording')
|
||||
self._logger.info('Started recording')
|
||||
await self._emit('recording_started', self)
|
||||
|
||||
async def _stop_recording(self) -> None:
|
||||
@ -489,10 +488,10 @@ class Recorder(
|
||||
self._stream_recorder.remove_listener(self)
|
||||
|
||||
if self._stopped:
|
||||
logger.info('Recording Cancelled')
|
||||
self._logger.info('Recording Cancelled')
|
||||
await self._emit('recording_cancelled', self)
|
||||
else:
|
||||
logger.info('Recording Finished')
|
||||
self._logger.info('Recording Finished')
|
||||
await self._emit('recording_finished', self)
|
||||
|
||||
async def _prepare(self) -> None:
|
||||
@ -502,7 +501,7 @@ class Recorder(
|
||||
self._stream_recorder.clear_files()
|
||||
|
||||
def _print_waiting_message(self) -> None:
|
||||
logger.info('Waiting... until the live starts')
|
||||
self._logger.info('Waiting... until the live starts')
|
||||
|
||||
def _print_live_info(self) -> None:
|
||||
room_info = self._live.room_info
|
||||
@ -537,7 +536,7 @@ description :
|
||||
{room_info.description}
|
||||
===============================================================================
|
||||
"""
|
||||
logger.info(msg)
|
||||
self._logger.info(msg)
|
||||
|
||||
def _print_changed_room_info(self, room_info: RoomInfo) -> None:
|
||||
msg = f"""
|
||||
@ -549,4 +548,4 @@ parent area id : {room_info.parent_area_id}
|
||||
parent area name : {room_info.parent_area_name}
|
||||
===============================================================================
|
||||
"""
|
||||
logger.info(msg)
|
||||
self._logger.info(msg)
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Final, Optional
|
||||
|
||||
import attr
|
||||
@ -13,9 +12,6 @@ from blrec.bili.typing import ApiPlatform, QualityNumber, StreamFormat
|
||||
__all__ = ('StreamParamHolder',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class StreamParams:
|
||||
stream_format: StreamFormat
|
||||
|
@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from typing import Iterator, Optional
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from blrec.bili.live import Live
|
||||
from blrec.bili.live_monitor import LiveMonitor
|
||||
from blrec.bili.typing import QualityNumber, StreamFormat
|
||||
@ -20,9 +21,6 @@ from .typing import MetaData
|
||||
__all__ = 'StreamRecorder', 'StreamRecorderEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamRecorder(
|
||||
StreamRecorderEventListener,
|
||||
EventEmitter[StreamRecorderEventListener],
|
||||
@ -46,6 +44,8 @@ class StreamRecorder(
|
||||
duration_limit: int = 0,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self._live = live
|
||||
self._live_monitor = live_monitor
|
||||
@ -58,7 +58,7 @@ class StreamRecorder(
|
||||
elif stream_format == 'fmp4':
|
||||
cls = HLSStreamRecorderImpl # type: ignore
|
||||
else:
|
||||
logger.warning(
|
||||
self._logger.warning(
|
||||
f'The specified stream format ({stream_format}) is '
|
||||
'unsupported, will using the stream format (flv) instead.'
|
||||
)
|
||||
@ -242,7 +242,7 @@ class StreamRecorder(
|
||||
|
||||
if self._live.has_no_flv_streams():
|
||||
if stream_format == 'flv':
|
||||
logger.warning(
|
||||
self._logger.warning(
|
||||
'The specified stream format (flv) is not available, '
|
||||
'falling back to stream format (fmp4).'
|
||||
)
|
||||
@ -250,7 +250,7 @@ class StreamRecorder(
|
||||
self.hls_stream_available_time = self.stream_available_time
|
||||
else:
|
||||
if stream_format == 'fmp4':
|
||||
logger.info('Waiting for the fmp4 stream becomes available...')
|
||||
self._logger.info('Waiting for the fmp4 stream becomes available...')
|
||||
available = await self._wait_fmp4_stream()
|
||||
if available:
|
||||
if self.stream_available_time is not None:
|
||||
@ -258,7 +258,7 @@ class StreamRecorder(
|
||||
await self._live.get_timestamp()
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
self._logger.warning(
|
||||
'The specified stream format (fmp4) is not available '
|
||||
f'in {self.fmp4_stream_timeout} seconcds, '
|
||||
'falling back to stream format (flv).'
|
||||
@ -312,7 +312,7 @@ class StreamRecorder(
|
||||
elif stream_format == 'fmp4':
|
||||
cls = HLSStreamRecorderImpl # type: ignore
|
||||
else:
|
||||
logger.warning(
|
||||
self._logger.warning(
|
||||
f'The specified stream format ({stream_format}) is '
|
||||
'unsupported, will using the stream format (flv) instead.'
|
||||
)
|
||||
@ -342,4 +342,4 @@ class StreamRecorder(
|
||||
self._impl.stream_available_time = stream_available_time
|
||||
self._impl.hls_stream_available_time = hls_stream_available_time
|
||||
|
||||
logger.debug(f'Changed stream recorder impl to {cls.__name__}')
|
||||
self._logger.debug(f'Changed stream recorder impl to {cls.__name__}')
|
||||
|
@ -7,6 +7,7 @@ from typing import Any, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
import urllib3
|
||||
from loguru import logger
|
||||
from reactivex import abc
|
||||
from reactivex.typing import StartableFactory, StartableTarget
|
||||
|
||||
@ -29,7 +30,6 @@ from .stream_param_holder import StreamParamHolder
|
||||
__all__ = ('StreamRecorderImpl',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger(urllib3.__name__).setLevel(logging.WARNING)
|
||||
|
||||
|
||||
@ -78,6 +78,8 @@ class StreamRecorderImpl(
|
||||
duration_limit: int = 0,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self._live = live
|
||||
self._live_monitor = live_monitor
|
||||
@ -282,14 +284,14 @@ class StreamRecorderImpl(
|
||||
self._completed = False
|
||||
|
||||
async def _do_start(self) -> None:
|
||||
logger.debug('Starting stream recorder...')
|
||||
self._logger.debug('Starting stream recorder...')
|
||||
self._on_start()
|
||||
self._reset()
|
||||
self._run()
|
||||
logger.debug('Started stream recorder')
|
||||
self._logger.debug('Started stream recorder')
|
||||
|
||||
async def _do_stop(self) -> None:
|
||||
logger.debug('Stopping stream recorder...')
|
||||
self._logger.debug('Stopping stream recorder...')
|
||||
self._stream_param_holder.cancel()
|
||||
thread = self._thread_factory('StreamRecorderDisposer')(self._dispose)
|
||||
thread.start()
|
||||
@ -297,7 +299,7 @@ class StreamRecorderImpl(
|
||||
await self._loop.run_in_executor(None, thread.join, 30)
|
||||
self._threads.clear()
|
||||
self._on_stop()
|
||||
logger.debug('Stopped stream recorder')
|
||||
self._logger.debug('Stopped stream recorder')
|
||||
|
||||
def _on_start(self) -> None:
|
||||
pass
|
||||
@ -311,10 +313,15 @@ class StreamRecorderImpl(
|
||||
|
||||
def _thread_factory(self, name: str) -> StartableFactory:
|
||||
def factory(target: StartableTarget) -> Thread:
|
||||
def run() -> None:
|
||||
with logger.contextualize(room_id=self._live.room_id):
|
||||
target()
|
||||
|
||||
thread = Thread(
|
||||
target=target, daemon=True, name=f'{name}::{self._live.room_id}'
|
||||
target=run, daemon=True, name=f'{name}::{self._live.room_id}'
|
||||
)
|
||||
self._threads.append(thread)
|
||||
|
||||
return thread
|
||||
|
||||
return factory
|
||||
@ -334,24 +341,24 @@ class StreamRecorderImpl(
|
||||
self._emit_event('stream_recording_completed')
|
||||
|
||||
def _on_profile_updated(self, profile: StreamProfile) -> None:
|
||||
logger.debug(f'Stream profile: {profile}')
|
||||
self._logger.debug(f'Stream profile: {profile}')
|
||||
self._stream_profile = profile
|
||||
|
||||
def _on_video_file_opened(self, args: Tuple[str, int]) -> None:
|
||||
logger.info(f"Video file created: '{args[0]}'")
|
||||
self._logger.info(f"Video file created: '{args[0]}'")
|
||||
self._files.append(args[0])
|
||||
self._record_start_time = args[1]
|
||||
self._emit_event('video_file_created', *args)
|
||||
|
||||
def _on_video_file_closed(self, path: str) -> None:
|
||||
logger.info(f"Video file completed: '{path}'")
|
||||
self._logger.info(f"Video file completed: '{path}'")
|
||||
self._emit_event('video_file_completed', path)
|
||||
|
||||
def _on_recording_interrupted(self, args: Tuple[float, float]) -> None:
|
||||
timestamp, duration = args[0], args[1]
|
||||
datetime_string = datetime.fromtimestamp(timestamp).isoformat()
|
||||
duration_string = format_timestamp(int(duration * 1000))
|
||||
logger.info(
|
||||
self._logger.warning(
|
||||
f'Recording interrupted, datetime: {datetime_string}, '
|
||||
f'duration: {duration_string}'
|
||||
)
|
||||
@ -359,11 +366,11 @@ class StreamRecorderImpl(
|
||||
|
||||
def _on_recording_recovered(self, timestamp: float) -> None:
|
||||
datetime_string = datetime.fromtimestamp(timestamp).isoformat()
|
||||
logger.info(f'Recording recovered, datetime: {(datetime_string)}')
|
||||
self._logger.warning(f'Recording recovered, datetime: {(datetime_string)}')
|
||||
self._emit_event('stream_recording_recovered', timestamp)
|
||||
|
||||
def _on_duration_lost(self, duration: float) -> None:
|
||||
logger.info(f'Total duration lost: {(duration)}')
|
||||
self._logger.warning(f'Total duration lost: ≈ {(duration)} s')
|
||||
self._emit_event('duration_lost', duration)
|
||||
|
||||
def _emit_event(self, name: str, *args: Any, **kwds: Any) -> None:
|
||||
|
@ -1,18 +1,12 @@
|
||||
import logging
|
||||
from typing import Iterable
|
||||
|
||||
|
||||
from .io import DanmakuReader, DanmakuWriter
|
||||
from .common import copy_damus
|
||||
from .io import DanmakuReader, DanmakuWriter
|
||||
from .typing import TimebaseType
|
||||
|
||||
|
||||
__all__ = 'TimebaseType', 'DanmakuCombinator'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DanmakuCombinator:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,13 +1,7 @@
|
||||
import logging
|
||||
from typing import Iterable
|
||||
|
||||
|
||||
from .io import DanmakuReader, DanmakuWriter
|
||||
from .common import copy_damus
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from .io import DanmakuReader, DanmakuWriter
|
||||
|
||||
__all__ = 'DanmakuConcatenator'
|
||||
|
||||
|
@ -2,7 +2,6 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import html
|
||||
import logging
|
||||
import unicodedata
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, AsyncIterator, Final, List
|
||||
@ -24,9 +23,6 @@ from .typing import Element
|
||||
__all__ = 'DanmakuReader', 'DanmakuWriter'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DanmakuReader: # TODO rewrite
|
||||
def __init__(self, path: str) -> None:
|
||||
self._path = path
|
||||
|
1
src/blrec/data/webapp/548.e2df47ddad764d0b.js
Normal file
1
src/blrec/data/webapp/548.e2df47ddad764d0b.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -10,6 +10,6 @@
|
||||
<body>
|
||||
<app-root></app-root>
|
||||
<noscript>Please enable JavaScript to continue using this application.</noscript>
|
||||
<script src="runtime.8688afa20dbe5cc7.js" type="module"></script><script src="polyfills.4e5433063877ea34.js" type="module"></script><script src="main.f21b7d831ad9cafb.js" type="module"></script>
|
||||
<script src="runtime.4d25db3be3119aaf.js" type="module"></script><script src="polyfills.4e5433063877ea34.js" type="module"></script><script src="main.f21b7d831ad9cafb.js" type="module"></script>
|
||||
|
||||
</body></html>
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"configVersion": 1,
|
||||
"timestamp": 1697949101780,
|
||||
"timestamp": 1699163406878,
|
||||
"index": "/index.html",
|
||||
"assetGroups": [
|
||||
{
|
||||
@ -15,14 +15,14 @@
|
||||
"/287.63ace7ac80c3d9f2.js",
|
||||
"/386.2404f3bc252e1df3.js",
|
||||
"/503.6553f508f4a9247d.js",
|
||||
"/548.f8a3199ca2412e0d.js",
|
||||
"/548.e2df47ddad764d0b.js",
|
||||
"/688.7032fddba7983cf6.js",
|
||||
"/common.1fc175bce139f4df.js",
|
||||
"/index.html",
|
||||
"/main.f21b7d831ad9cafb.js",
|
||||
"/manifest.webmanifest",
|
||||
"/polyfills.4e5433063877ea34.js",
|
||||
"/runtime.8688afa20dbe5cc7.js",
|
||||
"/runtime.4d25db3be3119aaf.js",
|
||||
"/styles.ae81e04dfa5b2860.css"
|
||||
],
|
||||
"patterns": []
|
||||
@ -1638,7 +1638,7 @@
|
||||
"/287.63ace7ac80c3d9f2.js": "7a52c7715de66142dae39668a3a0fb0f9ee4bb50",
|
||||
"/386.2404f3bc252e1df3.js": "f937945645579b9651be2666f70cec2c5de4e367",
|
||||
"/503.6553f508f4a9247d.js": "0878ea0e91bfd5458dd55875561e91060ecb0837",
|
||||
"/548.f8a3199ca2412e0d.js": "58ae6ac139c0b62ed266313e7a75a8266770387f",
|
||||
"/548.e2df47ddad764d0b.js": "0b60f5f001bd127b90d490617bba2091c4c39de3",
|
||||
"/688.7032fddba7983cf6.js": "eae55044529782a51b7e534365255bbfa5522b05",
|
||||
"/assets/animal/panda.js": "fec2868bb3053dd2da45f96bbcb86d5116ed72b1",
|
||||
"/assets/animal/panda.svg": "bebd302cdc601e0ead3a6d2710acf8753f3d83b1",
|
||||
@ -3234,11 +3234,11 @@
|
||||
"/assets/twotone/warning.js": "fb2d7ea232f3a99bf8f080dbc94c65699232ac01",
|
||||
"/assets/twotone/warning.svg": "8c7a2d3e765a2e7dd58ac674870c6655cecb0068",
|
||||
"/common.1fc175bce139f4df.js": "af1775164711ec49e5c3a91ee45bd77509c17c54",
|
||||
"/index.html": "28dc5eb629ca29943d45677dac7fded24c0362c3",
|
||||
"/index.html": "2a844a95b7b6367d4be88cef11f92da722cdfb0b",
|
||||
"/main.f21b7d831ad9cafb.js": "fc51efa446c2ac21ee17e165217dd3faeacc5290",
|
||||
"/manifest.webmanifest": "62c1cb8c5ad2af551a956b97013ab55ce77dd586",
|
||||
"/polyfills.4e5433063877ea34.js": "68159ab99e0608976404a17132f60b5ceb6f12d2",
|
||||
"/runtime.8688afa20dbe5cc7.js": "602d7051e97524a7becae76c8e76e7db29370b2b",
|
||||
"/runtime.4d25db3be3119aaf.js": "a384a1a5336bd3394ebe6d0560ed6c28b7020af9",
|
||||
"/styles.ae81e04dfa5b2860.css": "5933b4f1c4d8fcc1891b68940ee78af4091472b7"
|
||||
},
|
||||
"navigationUrls": [
|
||||
|
1
src/blrec/data/webapp/runtime.4d25db3be3119aaf.js
Normal file
1
src/blrec/data/webapp/runtime.4d25db3be3119aaf.js
Normal file
@ -0,0 +1 @@
|
||||
(()=>{"use strict";var e,v={},m={};function r(e){var n=m[e];if(void 0!==n)return n.exports;var t=m[e]={exports:{}};return v[e](t,t.exports,r),t.exports}r.m=v,e=[],r.O=(n,t,f,o)=>{if(!t){var a=1/0;for(i=0;i<e.length;i++){for(var[t,f,o]=e[i],c=!0,u=0;u<t.length;u++)(!1&o||a>=o)&&Object.keys(r.O).every(p=>r.O[p](t[u]))?t.splice(u--,1):(c=!1,o<a&&(a=o));if(c){e.splice(i--,1);var d=f();void 0!==d&&(n=d)}}return n}o=o||0;for(var i=e.length;i>0&&e[i-1][2]>o;i--)e[i]=e[i-1];e[i]=[t,f,o]},r.n=e=>{var n=e&&e.__esModule?()=>e.default:()=>e;return r.d(n,{a:n}),n},r.d=(e,n)=>{for(var t in n)r.o(n,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:n[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((n,t)=>(r.f[t](e,n),n),[])),r.u=e=>(592===e?"common":e)+"."+{103:"4a2aea63cc3bf42b",287:"63ace7ac80c3d9f2",386:"2404f3bc252e1df3",503:"6553f508f4a9247d",548:"e2df47ddad764d0b",592:"1fc175bce139f4df",688:"7032fddba7983cf6"}[e]+".js",r.miniCssF=e=>{},r.o=(e,n)=>Object.prototype.hasOwnProperty.call(e,n),(()=>{var e={},n="blrec:";r.l=(t,f,o,i)=>{if(e[t])e[t].push(f);else{var a,c;if(void 0!==o)for(var u=document.getElementsByTagName("script"),d=0;d<u.length;d++){var l=u[d];if(l.getAttribute("src")==t||l.getAttribute("data-webpack")==n+o){a=l;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",n+o),a.src=r.tu(t)),e[t]=[f];var s=(g,p)=>{a.onerror=a.onload=null,clearTimeout(b);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(p)),g)return g(p)},b=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{typeof Symbol<"u"&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tt=()=>(void 0===e&&(e={createScriptURL:n=>n},typeof trustedTypes<"u"&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e)})(),r.tu=e=>r.tt().createScriptURL(e),r.p="",(()=>{var e={666:0};r.f.j=(f,o)=>{var i=r.o(e,f)?e[f]:void 0;if(0!==i)if(i)o.push(i[2]);else if(666!=f){var a=new Promise((l,s)=>i=e[f]=[l,s]);o.push(i[2]=a);var c=r.p+r.u(f),u=new Error;r.l(c,l=>{if(r.o(e,f)&&(0!==(i=e[f])&&(e[f]=void 0),i)){var s=l&&("load"===l.type?"missing":l.type),b=l&&l.target&&l.target.src;u.message="Loading chunk "+f+" failed.\n("+s+": "+b+")",u.name="ChunkLoadError",u.type=s,u.request=b,i[1](u)}},"chunk-"+f,f)}else e[f]=0},r.O.j=f=>0===e[f];var n=(f,o)=>{var u,d,[i,a,c]=o,l=0;if(i.some(b=>0!==e[b])){for(u in a)r.o(a,u)&&(r.m[u]=a[u]);if(c)var s=c(r)}for(f&&f(o);l<i.length;l++)r.o(e,d=i[l])&&e[d]&&e[d][0](),e[d]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(n.bind(null,0)),t.push=n.bind(null,t.push.bind(t))})()})();
|
@ -1 +0,0 @@
|
||||
(()=>{"use strict";var e,v={},m={};function r(e){var n=m[e];if(void 0!==n)return n.exports;var t=m[e]={exports:{}};return v[e](t,t.exports,r),t.exports}r.m=v,e=[],r.O=(n,t,i,o)=>{if(!t){var a=1/0;for(f=0;f<e.length;f++){for(var[t,i,o]=e[f],c=!0,u=0;u<t.length;u++)(!1&o||a>=o)&&Object.keys(r.O).every(b=>r.O[b](t[u]))?t.splice(u--,1):(c=!1,o<a&&(a=o));if(c){e.splice(f--,1);var d=i();void 0!==d&&(n=d)}}return n}o=o||0;for(var f=e.length;f>0&&e[f-1][2]>o;f--)e[f]=e[f-1];e[f]=[t,i,o]},r.n=e=>{var n=e&&e.__esModule?()=>e.default:()=>e;return r.d(n,{a:n}),n},r.d=(e,n)=>{for(var t in n)r.o(n,t)&&!r.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:n[t]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce((n,t)=>(r.f[t](e,n),n),[])),r.u=e=>(592===e?"common":e)+"."+{103:"4a2aea63cc3bf42b",287:"63ace7ac80c3d9f2",386:"2404f3bc252e1df3",503:"6553f508f4a9247d",548:"f8a3199ca2412e0d",592:"1fc175bce139f4df",688:"7032fddba7983cf6"}[e]+".js",r.miniCssF=e=>{},r.o=(e,n)=>Object.prototype.hasOwnProperty.call(e,n),(()=>{var e={},n="blrec:";r.l=(t,i,o,f)=>{if(e[t])e[t].push(i);else{var a,c;if(void 0!==o)for(var u=document.getElementsByTagName("script"),d=0;d<u.length;d++){var l=u[d];if(l.getAttribute("src")==t||l.getAttribute("data-webpack")==n+o){a=l;break}}a||(c=!0,(a=document.createElement("script")).type="module",a.charset="utf-8",a.timeout=120,r.nc&&a.setAttribute("nonce",r.nc),a.setAttribute("data-webpack",n+o),a.src=r.tu(t)),e[t]=[i];var s=(g,b)=>{a.onerror=a.onload=null,clearTimeout(p);var _=e[t];if(delete e[t],a.parentNode&&a.parentNode.removeChild(a),_&&_.forEach(h=>h(b)),g)return g(b)},p=setTimeout(s.bind(null,void 0,{type:"timeout",target:a}),12e4);a.onerror=s.bind(null,a.onerror),a.onload=s.bind(null,a.onload),c&&document.head.appendChild(a)}}})(),r.r=e=>{typeof Symbol<"u"&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;r.tt=()=>(void 0===e&&(e={createScriptURL:n=>n},typeof trustedTypes<"u"&&trustedTypes.createPolicy&&(e=trustedTypes.createPolicy("angular#bundler",e))),e)})(),r.tu=e=>r.tt().createScriptURL(e),r.p="",(()=>{var e={666:0};r.f.j=(i,o)=>{var f=r.o(e,i)?e[i]:void 0;if(0!==f)if(f)o.push(f[2]);else if(666!=i){var a=new Promise((l,s)=>f=e[i]=[l,s]);o.push(f[2]=a);var c=r.p+r.u(i),u=new Error;r.l(c,l=>{if(r.o(e,i)&&(0!==(f=e[i])&&(e[i]=void 0),f)){var s=l&&("load"===l.type?"missing":l.type),p=l&&l.target&&l.target.src;u.message="Loading chunk "+i+" failed.\n("+s+": "+p+")",u.name="ChunkLoadError",u.type=s,u.request=p,f[1](u)}},"chunk-"+i,i)}else e[i]=0},r.O.j=i=>0===e[i];var n=(i,o)=>{var u,d,[f,a,c]=o,l=0;if(f.some(p=>0!==e[p])){for(u in a)r.o(a,u)&&(r.m[u]=a[u]);if(c)var s=c(r)}for(i&&i(o);l<f.length;l++)r.o(e,d=f[l])&&e[d]&&e[d][0](),e[d]=0;return r.O(s)},t=self.webpackChunkblrec=self.webpackChunkblrec||[];t.forEach(n.bind(null,0)),t.push=n.bind(null,t.push.bind(t))})()})();
|
@ -1,20 +1,16 @@
|
||||
import asyncio
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Literal
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def is_space_enough(path: str, size: int) -> bool:
|
||||
return shutil.disk_usage(path).free > size
|
||||
|
||||
|
||||
async def delete_file(
|
||||
path: str, log_level: Literal['INFO', 'DEBUG'] = 'INFO'
|
||||
) -> None:
|
||||
async def delete_file(path: str, log_level: Literal['INFO', 'DEBUG'] = 'INFO') -> None:
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
try:
|
||||
@ -22,4 +18,4 @@ async def delete_file(
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to delete {path!r}, due to: {repr(e)}')
|
||||
else:
|
||||
logger.log(logging.getLevelName(log_level), f'Deleted {path!r}')
|
||||
logger.log(log_level, f'Deleted {path!r}')
|
||||
|
@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import shutil
|
||||
from contextlib import suppress
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
from ..exception import exception_callback
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
from ..utils.mixins import AsyncStoppableMixin, SwitchableMixin
|
||||
from .helpers import is_space_enough
|
||||
from .models import DiskUsage
|
||||
@ -13,9 +13,6 @@ from .models import DiskUsage
|
||||
__all__ = 'SpaceMonitor', 'SpaceEventListener'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpaceEventListener(EventListener):
|
||||
async def on_space_no_enough(
|
||||
self, path: str, threshold: int, disk_usage: DiskUsage
|
||||
@ -76,7 +73,6 @@ class SpaceMonitor(
|
||||
with suppress(asyncio.CancelledError):
|
||||
await self._polling_task
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def _polling_loop(self) -> None:
|
||||
while True:
|
||||
if not is_space_enough(self.path, self.space_threshold):
|
||||
|
@ -1,25 +1,21 @@
|
||||
import os
|
||||
import glob
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import asyncio
|
||||
import glob
|
||||
import os
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List
|
||||
|
||||
from tenacity import retry, wait_none, stop_after_attempt, retry_if_exception_type
|
||||
from loguru import logger
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_none
|
||||
|
||||
from .helpers import delete_file, is_space_enough
|
||||
from .space_monitor import SpaceMonitor, DiskUsage, SpaceEventListener
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
|
||||
from .helpers import delete_file, is_space_enough
|
||||
from .space_monitor import DiskUsage, SpaceEventListener, SpaceMonitor
|
||||
|
||||
__all__ = ('SpaceReclaimer',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpaceReclaimer(SpaceEventListener, SwitchableMixin):
|
||||
_SUFFIX_SET = frozenset(
|
||||
(
|
||||
|
@ -1,12 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from contextlib import suppress
|
||||
from typing import Any, List, Generic, TypeVar
|
||||
|
||||
from typing import Any, Generic, List, TypeVar
|
||||
|
||||
from ..exception import ExceptionSubmitter
|
||||
|
||||
|
||||
__all__ = 'EventListener', 'EventEmitter'
|
||||
|
||||
|
||||
|
@ -1,14 +1,10 @@
|
||||
import logging
|
||||
from loguru import logger
|
||||
|
||||
|
||||
from .exception_center import ExceptionCenter
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
from .exception_center import ExceptionCenter
|
||||
from .helpers import format_exception
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__all__ = 'ExceptionHandler',
|
||||
__all__ = ('ExceptionHandler',)
|
||||
|
||||
|
||||
class ExceptionHandler(SwitchableMixin):
|
||||
@ -25,5 +21,4 @@ class ExceptionHandler(SwitchableMixin):
|
||||
self._log_exception(exc)
|
||||
|
||||
def _log_exception(self, exc: BaseException) -> None:
|
||||
exc_info = (type(exc), exc, exc.__traceback__)
|
||||
logger.critical(type(exc).__name__, exc_info=exc_info)
|
||||
logger.critical('{}\n{}', repr(exc), format_exception(exc))
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from io import SEEK_CUR
|
||||
from typing import Iterable, Iterator
|
||||
|
||||
@ -11,9 +10,6 @@ from .utils import AutoRollbacker, OffsetRepositor
|
||||
__all__ = 'FlvReader', 'FlvWriter'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlvReader:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import attr
|
||||
from loguru import logger
|
||||
from reactivex import Observable
|
||||
from reactivex import operators as ops
|
||||
|
||||
@ -13,9 +13,6 @@ from .operators.helpers import from_file
|
||||
__all__ = 'AnalysingProgress', 'analyse_metadata'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class AnalysingProgress:
|
||||
count: int
|
||||
|
@ -1,9 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
from loguru import logger
|
||||
|
||||
from ..flv import operators as flv_ops
|
||||
from ..path import extra_metadata_path
|
||||
@ -11,8 +11,6 @@ from ..utils.mixins import SwitchableMixin
|
||||
|
||||
__all__ = ('MetadataDumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetadataDumper(SwitchableMixin):
|
||||
def __init__(
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
@ -16,9 +15,6 @@ from .operators.helpers import from_file
|
||||
__all__ = 'InjectingProgress', 'inject_metadata'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class InjectingProgress:
|
||||
count: int
|
||||
|
@ -4,10 +4,10 @@ ref: https://github.com/ioppermann/yamdi/blob/master/yamdi.c
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, TypedDict
|
||||
|
||||
import attr
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -25,8 +25,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = 'Analyser', 'MetaData', 'KeyFrames'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class KeyFrames:
|
||||
|
@ -1,10 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from enum import IntEnum, auto
|
||||
from typing import Callable, List, Optional, TypedDict, cast
|
||||
|
||||
import attr
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from typing_extensions import TypeGuard
|
||||
@ -26,8 +26,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('concat', 'JoinPointExtractor', 'JoinPoint', 'JoinPointData')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class JoinPoint:
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -10,8 +10,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('correct',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def correct() -> Callable[[FLVStream], FLVStream]:
|
||||
def _correct(source: FLVStream) -> FLVStream:
|
||||
|
@ -1,8 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -20,9 +20,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
__all__ = ('Cutter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Cutter:
|
||||
def __init__(self, min_duration: int = 5_000) -> None:
|
||||
self._min_duration = min_duration # milliseconds
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -9,8 +9,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('defragment',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def defragment(min_tags: int = 10) -> Callable[[FLVStream], FLVStream]:
|
||||
def _defragment(source: FLVStream) -> FLVStream:
|
||||
|
@ -1,7 +1,7 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Callable, Optional, Tuple
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -11,8 +11,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('Dumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Dumper:
|
||||
def __init__(
|
||||
|
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import math
|
||||
from typing import Callable, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -17,8 +17,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('fix',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fix() -> Callable[[FLVStream], FLVStream]:
|
||||
def _fix(source: FLVStream) -> FLVStream:
|
||||
|
@ -1,5 +1,4 @@
|
||||
import io
|
||||
import logging
|
||||
|
||||
from reactivex import of
|
||||
|
||||
@ -8,8 +7,6 @@ from .typing import FLVStream
|
||||
|
||||
__all__ = ('from_file', 'from_stream')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def from_stream(
|
||||
stream: io.RawIOBase,
|
||||
|
@ -1,8 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Callable, Dict, Optional, cast
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
|
||||
from ..common import (
|
||||
@ -19,9 +19,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
__all__ = ('Injector',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Injector:
|
||||
def __init__(
|
||||
self, metadata_provider: Callable[[Dict[str, Any]], Dict[str, Any]]
|
||||
|
@ -1,8 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -19,9 +19,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
__all__ = ('Limiter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,10 +1,12 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from blrec.exception.helpers import format_exception
|
||||
|
||||
from ..common import create_avc_end_sequence_tag, is_avc_end_sequence
|
||||
from ..io import FlvReader
|
||||
from ..models import FlvTag
|
||||
@ -12,8 +14,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('parse',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse(
|
||||
*,
|
||||
@ -60,7 +60,9 @@ def parse(
|
||||
observer.on_error(e)
|
||||
except ValueError as e:
|
||||
logger.debug(
|
||||
f'Error occurred while parsing stream: {repr(e)}', exc_info=e
|
||||
'Error occurred while parsing stream: {}\n{}',
|
||||
repr(e),
|
||||
format_exception(e),
|
||||
)
|
||||
if not ignore_value_error:
|
||||
observer.on_error(e)
|
||||
|
@ -1,9 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import List, Optional, cast
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -16,9 +16,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
__all__ = ('Prober', 'StreamProfile')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Prober:
|
||||
def __init__(self) -> None:
|
||||
self._profiles: Subject[StreamProfile] = Subject()
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from typing import Callable
|
||||
|
||||
from reactivex import operators as ops
|
||||
@ -14,8 +13,6 @@ from .typing import FLVStream
|
||||
|
||||
__all__ = ('process',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def process(sort_tags: bool = False) -> Callable[[FLVStream], FLVStream]:
|
||||
def _process(source: FLVStream) -> FLVStream:
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from reactivex import Observable, abc
|
||||
@ -12,9 +11,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
__all__ = ('ProgressBar',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProgressBar:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -20,10 +19,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('sort',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TRACE_OP_SORT = bool(os.environ.get('BLREC_TRACE_OP_SORT'))
|
||||
|
||||
|
||||
def sort() -> Callable[[FLVStream], FLVStream]:
|
||||
"Sort tags in GOP by timestamp to ensure subsequent operators work as expected."
|
||||
@ -46,14 +41,17 @@ def sort() -> Callable[[FLVStream], FLVStream]:
|
||||
if not gop_tags:
|
||||
return
|
||||
|
||||
if TRACE_OP_SORT:
|
||||
logger.debug(
|
||||
'Tags in GOP:\n'
|
||||
f'Number of tags: {len(gop_tags)}\n'
|
||||
f'Total size of tags: {sum(map(len, gop_tags))}\n'
|
||||
f'The first tag is {gop_tags[0]}\n'
|
||||
f'The last tag is {gop_tags[-1]}'
|
||||
)
|
||||
logger.opt(lazy=True).trace(
|
||||
'Tags in GOP:\n'
|
||||
'Number of tags: {}\n'
|
||||
'Total size of tags: {}\n'
|
||||
'The first tag is {}\n'
|
||||
'The last tag is {}',
|
||||
lambda: len(gop_tags),
|
||||
lambda: sum(map(len, gop_tags)),
|
||||
lambda: gop_tags[0],
|
||||
lambda: gop_tags[-1],
|
||||
)
|
||||
|
||||
if len(gop_tags) < 10:
|
||||
avc_header_tag = find_avc_header_tag(gop_tags)
|
||||
|
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from typing import Callable, Optional
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -15,8 +15,6 @@ from .typing import FLVStream, FLVStreamItem
|
||||
|
||||
__all__ = ('split',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def split() -> Callable[[FLVStream], FLVStream]:
|
||||
def _split(source: FLVStream) -> FLVStream:
|
||||
|
@ -1,7 +1,8 @@
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from typing import Any, BinaryIO, Mapping, TypedDict
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from .amf import AMFReader, AMFWriter
|
||||
|
||||
__all__ = (
|
||||
@ -14,8 +15,6 @@ __all__ = (
|
||||
'ScriptDataDumper',
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScriptData(TypedDict):
|
||||
name: str
|
||||
|
@ -1,10 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from typing import Any, Callable, Dict, Tuple
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from blrec.path.helpers import record_metadata_path
|
||||
from blrec.utils.mixins import SwitchableMixin
|
||||
|
||||
@ -12,8 +13,6 @@ from . import operators as hls_ops
|
||||
|
||||
__all__ = ('MetadataDumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MetadataDumper(SwitchableMixin):
|
||||
def __init__(
|
||||
|
@ -1,21 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
import attr
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from .prober import Prober, StreamProfile
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
from .playlist_dumper import PlaylistDumper
|
||||
from .prober import Prober, StreamProfile
|
||||
from .segment_dumper import SegmentDumper
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('Analyser', 'MetaData')
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True, kw_only=True)
|
||||
class MetaData:
|
||||
|
@ -1,20 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, Tuple, Union
|
||||
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
from .playlist_dumper import PlaylistDumper
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('Cutter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Cutter:
|
||||
def __init__(
|
||||
self, playlist_dumper: PlaylistDumper, min_duration: float = 5.0
|
||||
|
@ -1,21 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
from .playlist_dumper import PlaylistDumper
|
||||
from .segment_dumper import SegmentDumper
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('Limiter',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -1,24 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from decimal import Decimal
|
||||
from pathlib import PurePath
|
||||
from typing import Optional, Tuple, Union
|
||||
|
||||
import m3u8
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
from ..helpler import sequence_number_of
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
from .segment_dumper import SegmentDumper
|
||||
from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('PlaylistDumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlaylistDumper:
|
||||
def __init__(self, segment_dumper: SegmentDumper) -> None:
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
@ -8,6 +7,7 @@ from typing import Optional
|
||||
import m3u8
|
||||
import requests
|
||||
import urllib3
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
from tenacity import retry, retry_if_exception_type, stop_after_delay, wait_exponential
|
||||
@ -18,9 +18,6 @@ from blrec.utils.mixins import SupportDebugMixin
|
||||
__all__ = ('PlaylistFetcher',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlaylistFetcher(SupportDebugMixin):
|
||||
def __init__(self, live: Live, session: requests.Session) -> None:
|
||||
super().__init__()
|
||||
|
@ -1,9 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import m3u8
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex import operators as ops
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
@ -17,9 +17,6 @@ from ..helpler import sequence_number_of
|
||||
__all__ = ('PlaylistResolver',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlaylistResolver:
|
||||
def __init__(self, stream_url_resolver: core_ops.StreamURLResolver) -> None:
|
||||
self._stream_url_resolver = stream_url_resolver
|
||||
|
@ -1,9 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -14,9 +14,6 @@ from .segment_fetcher import InitSectionData, SegmentData
|
||||
__all__ = ('Prober', 'StreamProfile')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Prober:
|
||||
def __init__(self) -> None:
|
||||
self._profiles: Subject[StreamProfile] = Subject()
|
||||
|
@ -1,10 +1,10 @@
|
||||
import io
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import PurePath
|
||||
from typing import Callable, Optional, Tuple, Union
|
||||
|
||||
import attr
|
||||
from loguru import logger
|
||||
from reactivex import Observable, Subject, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -15,8 +15,6 @@ from .segment_fetcher import InitSectionData, SegmentData
|
||||
|
||||
__all__ = ('SegmentDumper',)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SegmentDumper:
|
||||
def __init__(
|
||||
@ -126,7 +124,7 @@ class SegmentDumper:
|
||||
prev_audio_profile['codec_name'] != curr_audio_profile['codec_name']
|
||||
or prev_audio_profile['channels'] != curr_audio_profile['channels']
|
||||
or prev_audio_profile['sample_rate'] != curr_audio_profile['sample_rate']
|
||||
or prev_audio_profile['bit_rate'] != curr_audio_profile['bit_rate']
|
||||
or prev_audio_profile.get('bit_rate') != curr_audio_profile.get('bit_rate')
|
||||
):
|
||||
logger.warning('Audio parameters changed')
|
||||
return True
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Optional, Union
|
||||
|
||||
@ -8,6 +7,7 @@ import attr
|
||||
import m3u8
|
||||
import requests
|
||||
import urllib3
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex import operators as ops
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
@ -24,15 +24,13 @@ from blrec.bili.live import Live
|
||||
from blrec.core import operators as core_ops
|
||||
from blrec.utils import operators as utils_ops
|
||||
from blrec.utils.hash import cksum
|
||||
from blrec.exception.helpers import format_exception
|
||||
|
||||
from ..exceptions import FetchSegmentError
|
||||
|
||||
__all__ = ('SegmentFetcher', 'InitSectionData', 'SegmentData')
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class InitSectionData:
|
||||
segment: m3u8.Segment
|
||||
@ -136,7 +134,9 @@ class SegmentFetcher:
|
||||
else:
|
||||
logger.warning(f'Segment data corrupted: {url}')
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to fetch segment {url}', exc_info=exc)
|
||||
logger.warning(
|
||||
'Failed to fetch segment: {}\n{}', url, format_exception(exc)
|
||||
)
|
||||
attempts += 1
|
||||
if attempts > 3:
|
||||
attempts = 0
|
||||
|
@ -1,8 +1,3 @@
|
||||
from .configure_logging import configure_logger, TqdmOutputStream, ConsoleHandler
|
||||
from .configure_logging import TqdmOutputStream, configure_logger
|
||||
|
||||
|
||||
__all__ = (
|
||||
'configure_logger',
|
||||
'TqdmOutputStream',
|
||||
'ConsoleHandler',
|
||||
)
|
||||
__all__ = ('configure_logger', 'TqdmOutputStream')
|
||||
|
@ -1,85 +1,46 @@
|
||||
import asyncio
|
||||
import atexit
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from logging import Handler, LogRecord
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from typing import Any, List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from colorama import Back, Fore, Style, deinit, init
|
||||
from loguru import logger
|
||||
from tqdm import tqdm
|
||||
|
||||
from .typing import LOG_LEVEL
|
||||
|
||||
__all__ = 'configure_logger', 'ConsoleHandler', 'TqdmOutputStream'
|
||||
__all__ = 'configure_logger', 'TqdmOutputStream'
|
||||
|
||||
LOGURU_CONSOLE_FORMAT = (
|
||||
'<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | '
|
||||
'<level>{level}</level> | '
|
||||
'<cyan>{module}</cyan>:<cyan>{line}</cyan> | '
|
||||
'<level>{extra[room_id]}</level> - '
|
||||
'<level>{message}</level>'
|
||||
)
|
||||
|
||||
DISPLAY_PROGRESS = bool(os.environ.get('BLREC_PROGRESS'))
|
||||
LOGURU_FILE_FORMAT = (
|
||||
'<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | '
|
||||
'<level>{level}</level> | '
|
||||
'<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> | '
|
||||
'<level>{extra[room_id]}</level> - '
|
||||
'<level>{message}</level>'
|
||||
)
|
||||
|
||||
|
||||
class TqdmOutputStream:
|
||||
def write(self, string: str = '') -> None:
|
||||
tqdm.write(string, end='')
|
||||
tqdm.write(string, file=sys.stderr, end='')
|
||||
|
||||
def isatty(self) -> bool:
|
||||
return sys.stderr.isatty()
|
||||
|
||||
|
||||
class ConsoleHandler(logging.StreamHandler):
|
||||
def __init__(self, stream=None) -> None: # type: ignore
|
||||
super().__init__(stream)
|
||||
_console_handler_id: Optional[int] = None
|
||||
_file_handler_id: Optional[int] = None
|
||||
|
||||
def format(self, record: LogRecord) -> str:
|
||||
msg = super().format(record)
|
||||
|
||||
level = record.levelno
|
||||
if level == logging.DEBUG:
|
||||
style = Fore.GREEN
|
||||
elif level == logging.WARNING:
|
||||
style = Fore.YELLOW
|
||||
elif level == logging.ERROR:
|
||||
style = Fore.RED
|
||||
elif level == logging.CRITICAL:
|
||||
style = Fore.WHITE + Back.RED + Style.BRIGHT
|
||||
else:
|
||||
style = ''
|
||||
|
||||
return style + msg + Style.RESET_ALL if style else msg
|
||||
|
||||
|
||||
_old_factory = logging.getLogRecordFactory()
|
||||
|
||||
|
||||
def obtain_room_id() -> str:
|
||||
try:
|
||||
task = asyncio.current_task()
|
||||
assert task is not None
|
||||
except Exception:
|
||||
name = threading.current_thread().getName()
|
||||
else:
|
||||
name = task.get_name()
|
||||
|
||||
if '::' in name:
|
||||
if room_id := name.split('::')[-1]:
|
||||
return room_id
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def record_factory(*args: Any, **kwargs: Any) -> LogRecord:
|
||||
record = _old_factory(*args, **kwargs)
|
||||
|
||||
if room_id := obtain_room_id():
|
||||
record.roomid = '[' + room_id + '] ' # type: ignore
|
||||
else:
|
||||
record.roomid = '' # type: ignore
|
||||
|
||||
return record
|
||||
|
||||
|
||||
logging.setLogRecordFactory(record_factory)
|
||||
|
||||
|
||||
_old_handlers: List[Handler] = []
|
||||
_old_log_dir: Optional[str] = None
|
||||
_old_console_log_level: Optional[LOG_LEVEL] = None
|
||||
_old_backup_count: Optional[int] = None
|
||||
|
||||
|
||||
def configure_logger(
|
||||
@ -88,52 +49,55 @@ def configure_logger(
|
||||
console_log_level: LOG_LEVEL = 'INFO',
|
||||
backup_count: Optional[int] = None,
|
||||
) -> None:
|
||||
# config root logger
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
global _console_handler_id, _file_handler_id
|
||||
global _old_log_dir, _old_console_log_level, _old_backup_count
|
||||
|
||||
# config formatter
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s] [%(levelname)s] [%(module)s] %(roomid)s%(message)s'
|
||||
)
|
||||
logger.configure(extra={'room_id': ''})
|
||||
|
||||
# logging to console
|
||||
if DISPLAY_PROGRESS:
|
||||
console_handler = ConsoleHandler(TqdmOutputStream())
|
||||
else:
|
||||
console_handler = ConsoleHandler()
|
||||
console_handler.setLevel(logging.getLevelName(console_log_level))
|
||||
console_handler.setFormatter(formatter)
|
||||
logger.addHandler(console_handler)
|
||||
if console_log_level != _old_console_log_level:
|
||||
if _console_handler_id is not None:
|
||||
logger.remove(_console_handler_id)
|
||||
else:
|
||||
logger.remove() # remove the default stderr handler
|
||||
|
||||
# logging to file
|
||||
log_file_path = make_log_file_path(log_dir)
|
||||
file_handler = TimedRotatingFileHandler(
|
||||
filename=log_file_path,
|
||||
when='MIDNIGHT',
|
||||
backupCount=backup_count or 0,
|
||||
encoding='utf-8',
|
||||
)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(formatter)
|
||||
logger.addHandler(file_handler)
|
||||
if bool(os.environ.get('BLREC_PROGRESS')):
|
||||
_console_handler_id = logger.add(
|
||||
TqdmOutputStream(),
|
||||
level=console_log_level,
|
||||
format=LOGURU_CONSOLE_FORMAT,
|
||||
)
|
||||
else:
|
||||
_console_handler_id = logger.add(
|
||||
sys.stderr, level=console_log_level, format=LOGURU_CONSOLE_FORMAT
|
||||
)
|
||||
|
||||
# remove old handlers after re-configured
|
||||
for handler in _old_handlers:
|
||||
logger.removeHandler(handler)
|
||||
_old_console_log_level = console_log_level
|
||||
|
||||
# retain old handlers for the removing
|
||||
_old_handlers.append(console_handler)
|
||||
_old_handlers.append(file_handler)
|
||||
if log_dir != _old_log_dir or backup_count != _old_backup_count:
|
||||
log_file_path = make_log_file_path(log_dir)
|
||||
logger.info(f'log file: {log_file_path}')
|
||||
|
||||
logger.info(f'log file: {log_file_path}')
|
||||
file_handler_id = logger.add(
|
||||
log_file_path,
|
||||
level='TRACE' if bool(os.environ.get('BLREC_TRACE')) else 'DEBUG',
|
||||
format=LOGURU_FILE_FORMAT,
|
||||
enqueue=True,
|
||||
rotation="00:00",
|
||||
retention=backup_count,
|
||||
backtrace=True,
|
||||
diagnose=True,
|
||||
)
|
||||
|
||||
if _file_handler_id is not None:
|
||||
logger.remove(_file_handler_id)
|
||||
|
||||
_file_handler_id = file_handler_id
|
||||
|
||||
_old_log_dir = log_dir
|
||||
_old_backup_count = backup_count
|
||||
|
||||
|
||||
def make_log_file_path(log_dir: str) -> str:
|
||||
data_time_string = datetime.now().strftime("%Y-%m-%d-%H%M%S-%f")
|
||||
data_time_string = datetime.now().strftime("%Y-%m-%d_%H-%M-%S_%f")
|
||||
filename = f'blrec_{data_time_string}.log'
|
||||
return os.path.abspath(os.path.join(log_dir, filename))
|
||||
|
||||
|
||||
init()
|
||||
atexit.register(deinit)
|
||||
|
19
src/blrec/logging/context.py
Normal file
19
src/blrec/logging/context.py
Normal file
@ -0,0 +1,19 @@
|
||||
from functools import wraps
|
||||
from typing import Any, Awaitable, Callable, TypeVar
|
||||
|
||||
from loguru import logger
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
__all__ = ('async_task_with_logger_context',)
|
||||
|
||||
|
||||
def async_task_with_logger_context(
|
||||
func: Callable[..., Awaitable[_T]]
|
||||
) -> Callable[..., Awaitable[_T]]:
|
||||
@wraps(func)
|
||||
async def wrapper(obj: Any, *arg: Any, **kwargs: Any) -> _T:
|
||||
with logger.contextualize(**obj._logger_context):
|
||||
return await func(obj, *arg, **kwargs)
|
||||
|
||||
return wrapper
|
@ -1,46 +0,0 @@
|
||||
import threading
|
||||
import asyncio
|
||||
from functools import wraps
|
||||
from typing import Any, Awaitable, Callable, TypeVar
|
||||
|
||||
|
||||
_T = TypeVar('_T')
|
||||
|
||||
|
||||
def with_room_id(room_id: int): # type: ignore
|
||||
def decorate(func: Callable[..., _T]) -> Callable[..., _T]:
|
||||
@wraps(func)
|
||||
def wrapper(*arg: Any, **kwargs: Any) -> _T:
|
||||
curr_thread = threading.current_thread()
|
||||
old_name = curr_thread.getName()
|
||||
curr_thread.setName(f'{func.__qualname__}::{room_id}')
|
||||
try:
|
||||
return func(*arg, **kwargs)
|
||||
finally:
|
||||
curr_thread.setName(old_name)
|
||||
return wrapper
|
||||
return decorate
|
||||
|
||||
|
||||
def aio_task_with_room_id(
|
||||
func: Callable[..., Awaitable[_T]]
|
||||
) -> Callable[..., Awaitable[_T]]:
|
||||
@wraps(func)
|
||||
async def wrapper(obj: Any, *arg: Any, **kwargs: Any) -> _T:
|
||||
if hasattr(obj, '_room_id'):
|
||||
room_id = obj._room_id
|
||||
elif hasattr(obj, '_live'):
|
||||
room_id = obj._live.room_id
|
||||
else:
|
||||
room_id = ''
|
||||
|
||||
curr_task = asyncio.current_task()
|
||||
assert curr_task is not None
|
||||
old_name = curr_task.get_name()
|
||||
curr_task.set_name(f'{func.__qualname__}::{room_id}')
|
||||
try:
|
||||
return await func(obj, *arg, **kwargs)
|
||||
finally:
|
||||
curr_task.set_name(old_name)
|
||||
|
||||
return wrapper
|
@ -1,20 +1,5 @@
|
||||
from typing import Literal
|
||||
|
||||
# CRITICAL = 50
|
||||
# FATAL = CRITICAL
|
||||
# ERROR = 40
|
||||
# WARNING = 30
|
||||
# WARN = WARNING
|
||||
# INFO = 20
|
||||
# DEBUG = 10
|
||||
# NOTSET = 0
|
||||
|
||||
LOG_LEVEL = Literal[
|
||||
'CRITICAL',
|
||||
'FATAL',
|
||||
'ERROR',
|
||||
'WARNING',
|
||||
'INFO',
|
||||
'DEBUG',
|
||||
'NOTSET', # equivalent to verbose
|
||||
'TRACE', 'DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL',
|
||||
]
|
||||
|
@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
@ -30,13 +29,13 @@ from ..exception import ExceptionCenter, format_exception
|
||||
from ..setting.typing import MessageType
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
from .providers import (
|
||||
Bark,
|
||||
EmailService,
|
||||
MessagingProvider,
|
||||
Pushdeer,
|
||||
Pushplus,
|
||||
Serverchan,
|
||||
Telegram,
|
||||
Bark,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
@ -51,7 +50,7 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class Notifier(SwitchableMixin, ABC):
|
||||
|
@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import smtplib
|
||||
import ssl
|
||||
from abc import ABC, abstractmethod
|
||||
@ -11,13 +10,13 @@ from urllib.parse import urljoin
|
||||
import aiohttp
|
||||
|
||||
from ..setting.typing import (
|
||||
BarkMessageType,
|
||||
EmailMessageType,
|
||||
MessageType,
|
||||
PushdeerMessageType,
|
||||
PushplusMessageType,
|
||||
ServerchanMessageType,
|
||||
TelegramMessageType,
|
||||
BarkMessageType,
|
||||
)
|
||||
from ..utils.patterns import Singleton
|
||||
|
||||
@ -32,9 +31,6 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MessagingProvider(Singleton, ABC):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
@ -2,13 +2,13 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from decimal import Decimal
|
||||
from typing import Iterable, List, Tuple, cast
|
||||
|
||||
import aiofiles
|
||||
import m3u8
|
||||
from loguru import logger
|
||||
|
||||
from blrec.flv.helpers import make_comment_for_joinpoints
|
||||
from blrec.flv.operators import JoinPoint
|
||||
@ -17,8 +17,6 @@ from blrec.path.helpers import ffmpeg_metadata_path, playlist_path
|
||||
|
||||
from .helpers import get_extra_metadata, get_metadata, get_record_metadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def make_metadata_file(video_path: str) -> str:
|
||||
path = ffmpeg_metadata_path(video_path)
|
||||
|
@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from typing import Any, Dict, Iterable, Literal
|
||||
|
||||
import aiofiles
|
||||
from loguru import logger
|
||||
|
||||
from blrec.path.helpers import (
|
||||
cover_path,
|
||||
@ -18,8 +18,6 @@ from blrec.path.helpers import (
|
||||
from ..flv.helpers import get_extra_metadata as _get_extra_metadata
|
||||
from ..flv.helpers import get_metadata as _get_metadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def discard_files(
|
||||
paths: Iterable[str], log_level: Literal['INFO', 'DEBUG'] = 'INFO'
|
||||
@ -41,7 +39,7 @@ async def discard_dir(path: str, log_level: Literal['INFO', 'DEBUG'] = 'INFO') -
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to delete {path!r}, due to: {repr(e)}')
|
||||
else:
|
||||
logger.log(logging.getLevelName(log_level), f'Deleted {path!r}')
|
||||
logger.log(log_level, f'Deleted {path!r}')
|
||||
|
||||
|
||||
def files_related(video_path: str) -> Iterable[str]:
|
||||
|
@ -1,14 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from contextlib import suppress
|
||||
from pathlib import PurePath
|
||||
from typing import Any, Awaitable, Dict, Final, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from loguru import logger
|
||||
from reactivex.scheduler import ThreadPoolScheduler
|
||||
|
||||
from blrec.logging.context import async_task_with_logger_context
|
||||
|
||||
from ..bili.live import Live
|
||||
from ..core import Recorder, RecorderEventListener
|
||||
from ..event.event_emitter import EventEmitter, EventListener
|
||||
@ -16,7 +18,6 @@ from ..exception import exception_callback, submit_exception
|
||||
from ..flv.helpers import is_valid_flv_file
|
||||
from ..flv.metadata_analysis import analyse_metadata
|
||||
from ..flv.metadata_injection import InjectingProgress, inject_metadata
|
||||
from ..logging.room_id import aio_task_with_room_id
|
||||
from ..path import (
|
||||
extra_metadata_path,
|
||||
ffmpeg_metadata_path,
|
||||
@ -40,8 +41,6 @@ __all__ = (
|
||||
|
||||
DISPLAY_PROGRESS = bool(os.environ.get('BLREC_PROGRESS'))
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PostprocessorEventListener(EventListener):
|
||||
async def on_video_postprocessing_completed(
|
||||
@ -75,6 +74,8 @@ class Postprocessor(
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self._init_for_debug(live.room_id)
|
||||
self._logger_context = {'room_id': live.room_id}
|
||||
self._logger = logger.bind(**self._logger_context)
|
||||
|
||||
self._live = live
|
||||
self._recorder = recorder
|
||||
@ -125,7 +126,7 @@ class Postprocessor(
|
||||
self._task = asyncio.create_task(self._worker())
|
||||
self._task.add_done_callback(exception_callback)
|
||||
|
||||
logger.debug('Started postprocessor')
|
||||
self._logger.debug('Started postprocessor')
|
||||
|
||||
async def _do_stop(self) -> None:
|
||||
self._recorder.remove_listener(self)
|
||||
@ -139,9 +140,9 @@ class Postprocessor(
|
||||
del self._scheduler
|
||||
del self._task
|
||||
|
||||
logger.debug('Stopped postprocessor')
|
||||
self._logger.debug('Stopped postprocessor')
|
||||
|
||||
@aio_task_with_room_id
|
||||
@async_task_with_logger_context
|
||||
async def _worker(self) -> None:
|
||||
while True:
|
||||
await self._postprocess()
|
||||
@ -155,7 +156,7 @@ class Postprocessor(
|
||||
self._completed_files.append(video_path)
|
||||
|
||||
async with self._worker_semaphore:
|
||||
logger.debug(f'Postprocessing... {video_path}')
|
||||
self._logger.debug(f'Postprocessing... {video_path}')
|
||||
await self._wait_for_metadata_file(video_path)
|
||||
|
||||
try:
|
||||
@ -180,7 +181,7 @@ class Postprocessor(
|
||||
|
||||
async def _process_flv(self, video_path: str) -> str:
|
||||
if not await self._is_vaild_flv_file(video_path):
|
||||
logger.warning(f'The flv file may be invalid: {video_path}')
|
||||
self._logger.warning(f'The flv file may be invalid: {video_path}')
|
||||
if os.path.getsize(video_path) < 1024**2:
|
||||
return video_path
|
||||
|
||||
@ -217,28 +218,28 @@ class Postprocessor(
|
||||
return result_path
|
||||
|
||||
async def _inject_extra_metadata(self, path: str) -> str:
|
||||
logger.info(f"Injecting metadata for '{path}' ...")
|
||||
self._logger.info(f"Injecting metadata for '{path}' ...")
|
||||
try:
|
||||
try:
|
||||
metadata = await get_extra_metadata(path)
|
||||
except Exception as e:
|
||||
logger.warning(f'Failed to get extra metadata: {repr(e)}')
|
||||
logger.info(f"Analysing metadata for '{path}' ...")
|
||||
self._logger.warning(f'Failed to get extra metadata: {repr(e)}')
|
||||
self._logger.info(f"Analysing metadata for '{path}' ...")
|
||||
await self._analyse_metadata(path)
|
||||
metadata = await get_extra_metadata(path)
|
||||
else:
|
||||
if 'keyframes' not in metadata:
|
||||
logger.warning('The keyframes metadata lost')
|
||||
logger.info(f"Analysing metadata for '{path}' ...")
|
||||
self._logger.warning('The keyframes metadata lost')
|
||||
self._logger.info(f"Analysing metadata for '{path}' ...")
|
||||
await self._analyse_metadata(path)
|
||||
new_metadata = await get_extra_metadata(path)
|
||||
metadata.update(new_metadata)
|
||||
await self._inject_metadata(path, metadata)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to inject metadata for '{path}': {repr(e)}")
|
||||
self._logger.error(f"Failed to inject metadata for '{path}': {repr(e)}")
|
||||
submit_exception(e)
|
||||
else:
|
||||
logger.info(f"Successfully injected metadata for '{path}'")
|
||||
self._logger.info(f"Successfully injected metadata for '{path}'")
|
||||
return path
|
||||
|
||||
async def _remux_video_to_mp4(self, in_path: str) -> Tuple[str, RemuxingResult]:
|
||||
@ -255,22 +256,22 @@ class Postprocessor(
|
||||
else:
|
||||
raise NotImplementedError(in_path)
|
||||
|
||||
logger.info(f"Remuxing '{in_path}' to '{out_path}' ...")
|
||||
self._logger.info(f"Remuxing '{in_path}' to '{out_path}' ...")
|
||||
remux_result = await self._remux_video(in_path, out_path, metadata_path)
|
||||
|
||||
if remux_result.is_failed():
|
||||
logger.error(f"Failed to remux '{in_path}' to '{out_path}'")
|
||||
self._logger.error(f"Failed to remux '{in_path}' to '{out_path}'")
|
||||
result_path = _in_path if ext == 'm4s' else in_path
|
||||
elif remux_result.is_warned():
|
||||
logger.warning('Remuxing done, but ran into problems.')
|
||||
self._logger.warning('Remuxing done, but ran into problems.')
|
||||
result_path = out_path
|
||||
elif remux_result.is_successful():
|
||||
logger.info(f"Successfully remuxed '{in_path}' to '{out_path}'")
|
||||
self._logger.info(f"Successfully remuxed '{in_path}' to '{out_path}'")
|
||||
result_path = out_path
|
||||
else:
|
||||
pass
|
||||
|
||||
logger.debug(f'ffmpeg output:\n{remux_result.output}')
|
||||
self._logger.debug(f'ffmpeg output:\n{remux_result.output}')
|
||||
|
||||
if not self._debug and ext == '.flv':
|
||||
await discard_file(metadata_path, 'DEBUG')
|
||||
@ -370,7 +371,7 @@ class Postprocessor(
|
||||
if await loop.run_in_executor(None, os.path.isfile, path):
|
||||
break
|
||||
else:
|
||||
logger.debug(f'Not found metadata file: {path}')
|
||||
self._logger.debug(f'Not found metadata file: {path}')
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
logger.warning(f'No such metadata file: {path}')
|
||||
self._logger.warning(f'No such metadata file: {path}')
|
||||
|
@ -1,6 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import ClassVar, Collection, Final, List, Optional, TypeVar
|
||||
@ -28,9 +27,6 @@ from .typing import (
|
||||
TelegramMessageType,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__all__ = (
|
||||
'DEFAULT_SETTINGS_FILE',
|
||||
'EnvSettings',
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import os
|
||||
from contextlib import suppress
|
||||
from pathlib import PurePath
|
||||
@ -19,7 +18,6 @@ from blrec.event.event_submitters import (
|
||||
)
|
||||
from blrec.flv.metadata_injection import InjectingProgress
|
||||
from blrec.flv.operators import StreamProfile
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
from blrec.postprocess import DeleteStrategy, Postprocessor, PostprocessorStatus
|
||||
from blrec.postprocess.remux import RemuxingProgress
|
||||
from blrec.setting.typing import RecordingMode
|
||||
@ -36,9 +34,6 @@ from .models import (
|
||||
__all__ = ('RecordTask',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecordTask:
|
||||
def __init__(
|
||||
self,
|
||||
@ -444,19 +439,16 @@ class RecordTask:
|
||||
def cut_stream(self) -> bool:
|
||||
return self._recorder.cut_stream()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def setup(self) -> None:
|
||||
await self._live.init()
|
||||
await self._setup()
|
||||
self._ready = True
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def destroy(self) -> None:
|
||||
await self._destroy()
|
||||
await self._live.deinit()
|
||||
self._ready = False
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def enable_monitor(self) -> None:
|
||||
if self._monitor_enabled:
|
||||
return
|
||||
@ -465,7 +457,6 @@ class RecordTask:
|
||||
await self._danmaku_client.start()
|
||||
self._live_monitor.enable()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def disable_monitor(self) -> None:
|
||||
if not self._monitor_enabled:
|
||||
return
|
||||
@ -474,7 +465,6 @@ class RecordTask:
|
||||
self._live_monitor.disable()
|
||||
await self._danmaku_client.stop()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def enable_recorder(self) -> None:
|
||||
if self._recorder_enabled:
|
||||
return
|
||||
@ -483,7 +473,6 @@ class RecordTask:
|
||||
await self._postprocessor.start()
|
||||
await self._recorder.start()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def disable_recorder(self, force: bool = False) -> None:
|
||||
if not self._recorder_enabled:
|
||||
return
|
||||
@ -496,11 +485,9 @@ class RecordTask:
|
||||
await self._recorder.stop()
|
||||
await self._postprocessor.stop()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def update_info(self, raise_exception: bool = False) -> bool:
|
||||
return await self._live.update_info(raise_exception=raise_exception)
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def restart_danmaku_client(self) -> None:
|
||||
await self._danmaku_client.restart()
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Iterator, Optional
|
||||
|
||||
import aiohttp
|
||||
@ -19,6 +18,8 @@ from .task import RecordTask
|
||||
if TYPE_CHECKING:
|
||||
from ..setting import SettingsManager
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from ..setting import (
|
||||
BiliApiSettings,
|
||||
DanmakuSettings,
|
||||
@ -32,9 +33,6 @@ from ..setting import (
|
||||
__all__ = ('RecordTaskManager',)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RecordTaskManager:
|
||||
def __init__(self, settings_manager: SettingsManager) -> None:
|
||||
self._settings_manager = settings_manager
|
||||
|
@ -5,8 +5,6 @@ from abc import ABC, abstractmethod
|
||||
from concurrent.futures import Future
|
||||
from typing import Awaitable, TypeVar, final
|
||||
|
||||
from blrec.logging.room_id import aio_task_with_room_id
|
||||
|
||||
|
||||
class SwitchableMixin(ABC):
|
||||
def __init__(self) -> None:
|
||||
@ -140,10 +138,6 @@ class AsyncCooperationMixin(ABC):
|
||||
future = self._run_coroutine(coro)
|
||||
return future.result()
|
||||
|
||||
@aio_task_with_room_id
|
||||
async def _with_room_id(self, coro: Awaitable[_T]) -> _T:
|
||||
return await coro
|
||||
|
||||
|
||||
class SupportDebugMixin(ABC):
|
||||
def __init__(self) -> None:
|
||||
|
@ -1,7 +1,8 @@
|
||||
from queue import Queue
|
||||
from threading import Thread, current_thread
|
||||
from typing import Any, Callable, Optional, TypeVar
|
||||
from typing import Any, Callable, Dict, Optional, TypeVar
|
||||
|
||||
from loguru import logger
|
||||
from reactivex import Observable, abc
|
||||
from reactivex.disposable import CompositeDisposable, Disposable, SerialDisposable
|
||||
|
||||
@ -9,7 +10,9 @@ _T = TypeVar('_T')
|
||||
|
||||
|
||||
def observe_on_new_thread(
|
||||
queue_size: Optional[int] = None, thread_name: Optional[str] = None
|
||||
queue_size: Optional[int] = None,
|
||||
thread_name: Optional[str] = None,
|
||||
logger_context: Optional[Dict[str, Any]] = None,
|
||||
) -> Callable[[Observable[_T]], Observable[_T]]:
|
||||
def observe_on(source: Observable[_T]) -> Observable[_T]:
|
||||
def subscribe(
|
||||
@ -21,8 +24,9 @@ def observe_on_new_thread(
|
||||
queue: Queue[Callable[..., Any]] = Queue(maxsize=queue_size or 0)
|
||||
|
||||
def run() -> None:
|
||||
while not disposed:
|
||||
queue.get()()
|
||||
with logger.contextualize(**(logger_context or {})):
|
||||
while not disposed:
|
||||
queue.get()()
|
||||
|
||||
thread = Thread(target=run, name=thread_name, daemon=True)
|
||||
thread.start()
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional, Tuple
|
||||
|
||||
@ -22,9 +21,6 @@ from . import security
|
||||
from .routers import application, settings, tasks, update, validation, websockets
|
||||
from .schemas import ResponseMessage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_env_settings = EnvSettings()
|
||||
_path = os.path.abspath(os.path.expanduser(_env_settings.settings_file))
|
||||
if not file_exists(_path):
|
||||
|
@ -1,32 +1,21 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter
|
||||
from loguru import logger
|
||||
|
||||
from ... import __prog__
|
||||
from ...application import Application
|
||||
from ...update.helpers import get_latest_version_string
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
app: Application = None # type: ignore # bypass flake8 F821
|
||||
|
||||
router = APIRouter(
|
||||
prefix='/api/v1/update',
|
||||
tags=['update'],
|
||||
)
|
||||
router = APIRouter(prefix='/api/v1/update', tags=['update'])
|
||||
|
||||
|
||||
@router.get(
|
||||
'/version/latest',
|
||||
)
|
||||
@router.get('/version/latest')
|
||||
async def get_latest_version() -> str:
|
||||
try:
|
||||
return await get_latest_version_string(__prog__) or ''
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning(
|
||||
'Timeout error occurred while getting the latest version string'
|
||||
)
|
||||
logger.warning('Timeout error occurred while getting the latest version string')
|
||||
return ''
|
||||
|
@ -1,28 +1,21 @@
|
||||
import logging
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
WebSocket,
|
||||
WebSocketDisconnect,
|
||||
)
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
from loguru import logger
|
||||
from websockets.exceptions import ConnectionClosed
|
||||
|
||||
from ...application import Application
|
||||
from ...event import EventCenter
|
||||
from ...event.typing import Event
|
||||
from ...exception import ExceptionCenter, format_exception
|
||||
from ...application import Application
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.getLogger('websockets').setLevel(logging.WARNING)
|
||||
|
||||
app: Application = None # type: ignore # bypass flake8 F821
|
||||
|
||||
router = APIRouter(
|
||||
tags=['websockets'],
|
||||
)
|
||||
router = APIRouter(tags=['websockets'])
|
||||
|
||||
|
||||
@router.websocket('/ws/v1/events')
|
||||
@ -75,7 +68,6 @@ async def receive_exception(websocket: WebSocket) -> None:
|
||||
def on_exception(exc: BaseException) -> None:
|
||||
asyncio.create_task(send_exception(exc))
|
||||
|
||||
subscription = ExceptionCenter.get_instance().exceptions \
|
||||
.subscribe(on_exception)
|
||||
subscription = ExceptionCenter.get_instance().exceptions.subscribe(on_exception)
|
||||
|
||||
await future
|
||||
|
@ -1,13 +1,9 @@
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Dict, Optional, Set
|
||||
|
||||
from fastapi import Header, Request, status
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
api_key = ''
|
||||
|
||||
MAX_WHITELIST = 100
|
||||
|
@ -1,35 +1,31 @@
|
||||
from __future__ import annotations
|
||||
import logging
|
||||
|
||||
from typing import Set, Tuple, Type
|
||||
|
||||
import attr
|
||||
|
||||
from ..setting import WebHookSettings
|
||||
from ..event import (
|
||||
CoverImageDownloadedEvent,
|
||||
DanmakuFileCompletedEvent,
|
||||
DanmakuFileCreatedEvent,
|
||||
LiveBeganEvent,
|
||||
LiveEndedEvent,
|
||||
RoomChangeEvent,
|
||||
RecordingStartedEvent,
|
||||
RecordingFinishedEvent,
|
||||
RecordingCancelledEvent,
|
||||
VideoFileCreatedEvent,
|
||||
VideoFileCompletedEvent,
|
||||
DanmakuFileCreatedEvent,
|
||||
DanmakuFileCompletedEvent,
|
||||
RawDanmakuFileCreatedEvent,
|
||||
RawDanmakuFileCompletedEvent,
|
||||
CoverImageDownloadedEvent,
|
||||
SpaceNoEnoughEvent,
|
||||
VideoPostprocessingCompletedEvent,
|
||||
PostprocessingCompletedEvent,
|
||||
RawDanmakuFileCompletedEvent,
|
||||
RawDanmakuFileCreatedEvent,
|
||||
RecordingCancelledEvent,
|
||||
RecordingFinishedEvent,
|
||||
RecordingStartedEvent,
|
||||
RoomChangeEvent,
|
||||
SpaceNoEnoughEvent,
|
||||
VideoFileCompletedEvent,
|
||||
VideoFileCreatedEvent,
|
||||
VideoPostprocessingCompletedEvent,
|
||||
)
|
||||
from ..event.typing import Event
|
||||
from ..setting import WebHookSettings
|
||||
|
||||
|
||||
__all__ = 'WebHook',
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
__all__ = ('WebHook',)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
|
@ -1,37 +1,25 @@
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import aiohttp
|
||||
from tenacity import (
|
||||
AsyncRetrying,
|
||||
wait_exponential,
|
||||
stop_after_delay,
|
||||
)
|
||||
from loguru import logger
|
||||
from tenacity import AsyncRetrying, stop_after_delay, wait_exponential
|
||||
|
||||
from .models import WebHook
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
from ..exception import ExceptionCenter
|
||||
from ..event import EventCenter, Error, ErrorData
|
||||
from ..event.typing import Event
|
||||
from .. import __prog__, __version__
|
||||
from ..event import Error, ErrorData, EventCenter
|
||||
from ..event.typing import Event
|
||||
from ..exception import ExceptionCenter
|
||||
from ..utils.mixins import SwitchableMixin
|
||||
from .models import WebHook
|
||||
|
||||
|
||||
__all__ = 'WebHookEmitter',
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
__all__ = ('WebHookEmitter',)
|
||||
|
||||
|
||||
class WebHookEmitter(SwitchableMixin):
|
||||
def __init__(
|
||||
self, webhooks: List[WebHook] = []
|
||||
) -> None:
|
||||
def __init__(self, webhooks: List[WebHook] = []) -> None:
|
||||
super().__init__()
|
||||
self.webhooks = webhooks
|
||||
self.headers = {
|
||||
'User-Agent': f'{__prog__}/{__version__}'
|
||||
}
|
||||
self.headers = {'User-Agent': f'{__prog__}/{__version__}'}
|
||||
|
||||
def _do_enable(self) -> None:
|
||||
events = EventCenter.get_instance().events
|
||||
@ -63,25 +51,19 @@ class WebHookEmitter(SwitchableMixin):
|
||||
def _send_request(self, url: str, payload: Dict[str, Any]) -> None:
|
||||
asyncio.create_task(self._send_request_async(url, payload))
|
||||
|
||||
async def _send_request_async(
|
||||
self, url: str, payload: Dict[str, Any]
|
||||
) -> None:
|
||||
async def _send_request_async(self, url: str, payload: Dict[str, Any]) -> None:
|
||||
try:
|
||||
async for attempt in AsyncRetrying(
|
||||
stop=stop_after_delay(180),
|
||||
wait=wait_exponential(max=15),
|
||||
stop=stop_after_delay(180), wait=wait_exponential(max=15)
|
||||
):
|
||||
with attempt:
|
||||
await self._post(url, payload)
|
||||
except Exception as e:
|
||||
logger.warning('Failed to send a request to {}: {}'.format(
|
||||
url, repr(e)
|
||||
))
|
||||
logger.warning('Failed to send a request to {}: {}'.format(url, repr(e)))
|
||||
|
||||
async def _post(self, url: str, payload: Dict[str, Any]) -> None:
|
||||
async with aiohttp.ClientSession(
|
||||
headers=self.headers,
|
||||
raise_for_status=True,
|
||||
headers=self.headers, raise_for_status=True
|
||||
) as session:
|
||||
async with session.post(url, json=payload):
|
||||
pass
|
||||
|
@ -34,7 +34,6 @@ export class LoggingSettingsComponent implements OnInit, OnChanges {
|
||||
readonly syncFailedWarningTip = SYNC_FAILED_WARNING_TIP;
|
||||
|
||||
readonly logLevelOptions = [
|
||||
{ label: 'VERBOSE', value: 'NOTSET' },
|
||||
{ label: 'DEBUG', value: 'DEBUG' },
|
||||
{ label: 'INFO', value: 'INFO' },
|
||||
{ label: 'WARNING', value: 'WARNING' },
|
||||
|
@ -103,12 +103,13 @@ export interface OutputSettings {
|
||||
export type OutputOptions = Nullable<Omit<OutputSettings, 'outDir'>>;
|
||||
|
||||
export type LogLevel =
|
||||
| 'CRITICAL'
|
||||
| 'ERROR'
|
||||
| 'WARNING'
|
||||
| 'INFO'
|
||||
| 'TRACE'
|
||||
| 'DEBUG'
|
||||
| 'NOTSET';
|
||||
| 'INFO'
|
||||
| 'SUCCESS'
|
||||
| 'WARNING'
|
||||
| 'ERROR'
|
||||
| 'CRITICAL';
|
||||
|
||||
export interface LoggingSettings {
|
||||
logDir: string;
|
||||
|
Loading…
Reference in New Issue
Block a user