cp-library

This documentation is automatically generated by online-judge-tools/verification-helper

View the Project on GitHub kobejean/cp-library

:heavy_check_mark: cp_library/test/unittest_helper.py

Depends on

Verified with

Code

"""
Helper for making unittest files compatible with verification-helper.

This module provides a helper function to run a dummy Library Checker test
so that unittest files can be verified by oj-verify.
"""

def run_verification_helper_unittest():
    """
    Run a dummy Library Checker test for verification-helper compatibility.
    
    This function should be called in the __main__ block of unittest files
    that need to be compatible with verification-helper.
    
    The function:
    1. Reads A and B from input
    2. Writes A+B to output  
    3. If the result is the expected value (1198300249), runs pytest
    4. Exits with the pytest result code
    """
    import sys
    from cp_library.io.read_fn import read
    from cp_library.io.write_fn import write
    
    A, B = read()
    write(C := A + B)
    if C != 1198300249: 
        sys.exit(0)
    
    import pytest
    import io
    from contextlib import redirect_stdout, redirect_stderr

    # Capture all output during test execution
    output = io.StringIO()
    with redirect_stdout(output), redirect_stderr(output):
        # Get the calling module's file path
        frame = sys._getframe(1)
        test_file = frame.f_globals.get('__file__')
        if test_file is None:
            test_file = sys.argv[0]
        result = pytest.main([test_file])
    
    if result != 0: 
        print(output.getvalue())
    sys.exit(result)
"""
Helper for making unittest files compatible with verification-helper.

This module provides a helper function to run a dummy Library Checker test
so that unittest files can be verified by oj-verify.
"""

def run_verification_helper_unittest():
    """
    Run a dummy Library Checker test for verification-helper compatibility.
    
    This function should be called in the __main__ block of unittest files
    that need to be compatible with verification-helper.
    
    The function:
    1. Reads A and B from input
    2. Writes A+B to output  
    3. If the result is the expected value (1198300249), runs pytest
    4. Exits with the pytest result code
    """
    import sys
    '''
    ╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
                 https://kobejean.github.io/cp-library               
    '''
    
    from typing import Type, Union, overload
    '''
    ╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
                 https://kobejean.github.io/cp-library               
    '''
    import typing
    from collections import deque
    from numbers import Number
    from types import GenericAlias 
    from typing import Callable, Collection, Iterator, Union
    '''
    ╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
                 https://kobejean.github.io/cp-library               
    '''
    import os
    import sys
    from io import BytesIO, IOBase
    
    
    class FastIO(IOBase):
        BUFSIZE = 8192
        newlines = 0
    
        def __init__(self, file):
            self._fd = file.fileno()
            self.buffer = BytesIO()
            self.writable = "x" in file.mode or "r" not in file.mode
            self.write = self.buffer.write if self.writable else None
    
        def read(self):
            BUFSIZE = self.BUFSIZE
            while True:
                b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
                if not b: break
                ptr = self.buffer.tell()
                self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
            self.newlines = 0
            return self.buffer.read()
    
        def readline(self):
            BUFSIZE = self.BUFSIZE
            while self.newlines == 0:
                b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
                self.newlines = b.count(b"\n") + (not b)
                ptr = self.buffer.tell()
                self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
            self.newlines -= 1
            return self.buffer.readline()
    
        def flush(self):
            if self.writable:
                os.write(self._fd, self.buffer.getvalue())
                self.buffer.truncate(0), self.buffer.seek(0)
    
    
    class IOWrapper(IOBase):
        stdin: 'IOWrapper' = None
        stdout: 'IOWrapper' = None
        
        def __init__(self, file):
            self.buffer = FastIO(file)
            self.flush = self.buffer.flush
            self.writable = self.buffer.writable
    
        def write(self, s):
            return self.buffer.write(s.encode("ascii"))
        
        def read(self):
            return self.buffer.read().decode("ascii")
        
        def readline(self):
            return self.buffer.readline().decode("ascii")
    try:
        sys.stdin = IOWrapper.stdin = IOWrapper(sys.stdin)
        sys.stdout = IOWrapper.stdout = IOWrapper(sys.stdout)
    except:
        pass
    from typing import TypeVar
    _S = TypeVar('S')
    _T = TypeVar('T')
    _U = TypeVar('U')
    
    class TokenStream(Iterator):
        stream = IOWrapper.stdin
    
        def __init__(self):
            self.queue = deque()
    
        def __next__(self):
            if not self.queue: self.queue.extend(self._line())
            return self.queue.popleft()
        
        def wait(self):
            if not self.queue: self.queue.extend(self._line())
            while self.queue: yield
     
        def _line(self):
            return TokenStream.stream.readline().split()
    
        def line(self):
            if self.queue:
                A = list(self.queue)
                self.queue.clear()
                return A
            return self._line()
    TokenStream.default = TokenStream()
    
    class CharStream(TokenStream):
        def _line(self):
            return TokenStream.stream.readline().rstrip()
    CharStream.default = CharStream()
    
    ParseFn = Callable[[TokenStream],_T]
    class Parser:
        def __init__(self, spec: Union[type[_T],_T]):
            self.parse = Parser.compile(spec)
    
        def __call__(self, ts: TokenStream) -> _T:
            return self.parse(ts)
        
        @staticmethod
        def compile_type(cls: type[_T], args = ()) -> _T:
            if issubclass(cls, Parsable):
                return cls.compile(*args)
            elif issubclass(cls, (Number, str)):
                def parse(ts: TokenStream): return cls(next(ts))              
                return parse
            elif issubclass(cls, tuple):
                return Parser.compile_tuple(cls, args)
            elif issubclass(cls, Collection):
                return Parser.compile_collection(cls, args)
            elif callable(cls):
                def parse(ts: TokenStream):
                    return cls(next(ts))              
                return parse
            else:
                raise NotImplementedError()
        
        @staticmethod
        def compile(spec: Union[type[_T],_T]=int) -> ParseFn[_T]:
            if isinstance(spec, (type, GenericAlias)):
                cls = typing.get_origin(spec) or spec
                args = typing.get_args(spec) or tuple()
                return Parser.compile_type(cls, args)
            elif isinstance(offset := spec, Number): 
                cls = type(spec)  
                def parse(ts: TokenStream): return cls(next(ts)) + offset
                return parse
            elif isinstance(args := spec, tuple):      
                return Parser.compile_tuple(type(spec), args)
            elif isinstance(args := spec, Collection):
                return Parser.compile_collection(type(spec), args)
            elif isinstance(fn := spec, Callable): 
                def parse(ts: TokenStream): return fn(next(ts))
                return parse
            else:
                raise NotImplementedError()
    
        @staticmethod
        def compile_line(cls: _T, spec=int) -> ParseFn[_T]:
            if spec is int:
                fn = Parser.compile(spec)
                def parse(ts: TokenStream): return cls([int(token) for token in ts.line()])
                return parse
            else:
                fn = Parser.compile(spec)
                def parse(ts: TokenStream): return cls([fn(ts) for _ in ts.wait()])
                return parse
    
        @staticmethod
        def compile_repeat(cls: _T, spec, N) -> ParseFn[_T]:
            fn = Parser.compile(spec)
            def parse(ts: TokenStream): return cls([fn(ts) for _ in range(N)])
            return parse
    
        @staticmethod
        def compile_children(cls: _T, specs) -> ParseFn[_T]:
            fns = tuple((Parser.compile(spec) for spec in specs))
            def parse(ts: TokenStream): return cls([fn(ts) for fn in fns])  
            return parse
                
        @staticmethod
        def compile_tuple(cls: type[_T], specs) -> ParseFn[_T]:
            if isinstance(specs, (tuple,list)) and len(specs) == 2 and specs[1] is ...:
                return Parser.compile_line(cls, specs[0])
            else:
                return Parser.compile_children(cls, specs)
    
        @staticmethod
        def compile_collection(cls, specs):
            if not specs or len(specs) == 1 or isinstance(specs, set):
                return Parser.compile_line(cls, *specs)
            elif (isinstance(specs, (tuple,list)) and len(specs) == 2 and isinstance(specs[1], int)):
                return Parser.compile_repeat(cls, specs[0], specs[1])
            else:
                raise NotImplementedError()
    
    class Parsable:
        @classmethod
        def compile(cls):
            def parser(ts: TokenStream): return cls(next(ts))
            return parser
        
        @classmethod
        def __class_getitem__(cls, item):
            return GenericAlias(cls, item)
    from typing import TypeVar
    _S = TypeVar('S')
    _T = TypeVar('T')
    _U = TypeVar('U')
    
    @overload
    def read() -> list[int]: ...
    @overload
    def read(spec: Type[_T], char=False) -> _T: ...
    @overload
    def read(spec: _U, char=False) -> _U: ...
    @overload
    def read(*specs: Type[_T], char=False) -> tuple[_T, ...]: ...
    @overload
    def read(*specs: _U, char=False) -> tuple[_U, ...]: ...
    def read(*specs: Union[Type[_T],_U], char=False):
        if not char and not specs: return [int(s) for s in TokenStream.default.line()]
        parser: _T = Parser.compile(specs[0] if len(specs) == 1 else specs)
        return parser(CharStream.default if char else TokenStream.default)
    '''
    ╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
                 https://kobejean.github.io/cp-library               
    '''
    '''
    ╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
                 https://kobejean.github.io/cp-library               
    '''
    import os
    import sys
    from io import BytesIO, IOBase
    
    
    class FastIO(IOBase):
        BUFSIZE = 8192
        newlines = 0
    
        def __init__(self, file):
            self._fd = file.fileno()
            self.buffer = BytesIO()
            self.writable = "x" in file.mode or "r" not in file.mode
            self.write = self.buffer.write if self.writable else None
    
        def read(self):
            BUFSIZE = self.BUFSIZE
            while True:
                b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
                if not b: break
                ptr = self.buffer.tell()
                self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
            self.newlines = 0
            return self.buffer.read()
    
        def readline(self):
            BUFSIZE = self.BUFSIZE
            while self.newlines == 0:
                b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
                self.newlines = b.count(b"\n") + (not b)
                ptr = self.buffer.tell()
                self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
            self.newlines -= 1
            return self.buffer.readline()
    
        def flush(self):
            if self.writable:
                os.write(self._fd, self.buffer.getvalue())
                self.buffer.truncate(0), self.buffer.seek(0)
    
    
    class IOWrapper(IOBase):
        stdin: 'IOWrapper' = None
        stdout: 'IOWrapper' = None
        
        def __init__(self, file):
            self.buffer = FastIO(file)
            self.flush = self.buffer.flush
            self.writable = self.buffer.writable
    
        def write(self, s):
            return self.buffer.write(s.encode("ascii"))
        
        def read(self):
            return self.buffer.read().decode("ascii")
        
        def readline(self):
            return self.buffer.readline().decode("ascii")
    try:
        sys.stdin = IOWrapper.stdin = IOWrapper(sys.stdin)
        sys.stdout = IOWrapper.stdout = IOWrapper(sys.stdout)
    except:
        pass
    
    def write(*args, **kwargs):
        '''Prints the values to a stream, or to stdout_fast by default.'''
        sep, file = kwargs.pop("sep", " "), kwargs.pop("file", IOWrapper.stdout)
        at_start = True
        for x in args:
            if not at_start:
                file.write(sep)
            file.write(str(x))
            at_start = False
        file.write(kwargs.pop("end", "\n"))
        if kwargs.pop("flush", False):
            file.flush()
    
    A, B = read()
    write(C := A + B)
    if C != 1198300249: 
        sys.exit(0)
    
    import pytest
    import io
    from contextlib import redirect_stdout, redirect_stderr

    # Capture all output during test execution
    output = io.StringIO()
    with redirect_stdout(output), redirect_stderr(output):
        # Get the calling module's file path
        frame = sys._getframe(1)
        test_file = frame.f_globals.get('__file__')
        if test_file is None:
            test_file = sys.argv[0]
        result = pytest.main([test_file])
    
    if result != 0: 
        print(output.getvalue())
    sys.exit(result)
Back to top page