cp-library

This documentation is automatically generated by online-judge-tools/verification-helper

View the Project on GitHub kobejean/cp-library

:heavy_check_mark: test/aoj/grl/grl_2_a_edge_list_kruskal.test.py

Depends on

Code

# verification-helper: PROBLEM https://onlinejudge.u-aizu.ac.jp/courses/library/5/GRL/2/GRL_2_A

def main():
    N, M = read((int,int))
    E = read(EdgeListWeighted[N,M,0])
    MST = E.sub(E.kruskal())
    ans = sum(MST.W)
    write(ans)

from cp_library.io.read_fn import read
from cp_library.io.write_fn import write
from cp_library.alg.graph.edge.edge_list_weighted_cls import EdgeListWeighted

if __name__ == '__main__':
    main()
# verification-helper: PROBLEM https://onlinejudge.u-aizu.ac.jp/courses/library/5/GRL/2/GRL_2_A

def main():
    N, M = read((int,int))
    E = read(EdgeListWeighted[N,M,0])
    MST = E.sub(E.kruskal())
    ans = sum(MST.W)
    write(ans)

'''
╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╸
             https://kobejean.github.io/cp-library               
'''

from typing import Type, Union, overload
import typing
from collections import deque
from numbers import Number
from types import GenericAlias 
from typing import Callable, Collection, Iterator, Union
import os
import sys
from io import BytesIO, IOBase


class FastIO(IOBase):
    BUFSIZE = 8192
    newlines = 0

    def __init__(self, file):
        self._fd = file.fileno()
        self.buffer = BytesIO()
        self.writable = "x" in file.mode or "r" not in file.mode
        self.write = self.buffer.write if self.writable else None

    def read(self):
        BUFSIZE = self.BUFSIZE
        while True:
            b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
            if not b: break
            ptr = self.buffer.tell()
            self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
        self.newlines = 0
        return self.buffer.read()

    def readline(self):
        BUFSIZE = self.BUFSIZE
        while self.newlines == 0:
            b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
            self.newlines = b.count(b"\n") + (not b)
            ptr = self.buffer.tell()
            self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
        self.newlines -= 1
        return self.buffer.readline()

    def flush(self):
        if self.writable:
            os.write(self._fd, self.buffer.getvalue())
            self.buffer.truncate(0), self.buffer.seek(0)


class IOWrapper(IOBase):
    stdin: 'IOWrapper' = None
    stdout: 'IOWrapper' = None
    
    def __init__(self, file):
        self.buffer = FastIO(file)
        self.flush = self.buffer.flush
        self.writable = self.buffer.writable

    def write(self, s):
        return self.buffer.write(s.encode("ascii"))
    
    def read(self):
        return self.buffer.read().decode("ascii")
    
    def readline(self):
        return self.buffer.readline().decode("ascii")
try:
    sys.stdin = IOWrapper.stdin = IOWrapper(sys.stdin)
    sys.stdout = IOWrapper.stdout = IOWrapper(sys.stdout)
except:
    pass
from typing import TypeVar
_S = TypeVar('S')
_T = TypeVar('T')
_U = TypeVar('U')

class TokenStream(Iterator):
    stream = IOWrapper.stdin

    def __init__(self):
        self.queue = deque()

    def __next__(self):
        if not self.queue: self.queue.extend(self._line())
        return self.queue.popleft()
    
    def wait(self):
        if not self.queue: self.queue.extend(self._line())
        while self.queue: yield
 
    def _line(self):
        return TokenStream.stream.readline().split()

    def line(self):
        if self.queue:
            A = list(self.queue)
            self.queue.clear()
            return A
        return self._line()
TokenStream.default = TokenStream()

class CharStream(TokenStream):
    def _line(self):
        return TokenStream.stream.readline().rstrip()
CharStream.default = CharStream()

ParseFn = Callable[[TokenStream],_T]
class Parser:
    def __init__(self, spec: Union[type[_T],_T]):
        self.parse = Parser.compile(spec)

    def __call__(self, ts: TokenStream) -> _T:
        return self.parse(ts)
    
    @staticmethod
    def compile_type(cls: type[_T], args = ()) -> _T:
        if issubclass(cls, Parsable):
            return cls.compile(*args)
        elif issubclass(cls, (Number, str)):
            def parse(ts: TokenStream): return cls(next(ts))              
            return parse
        elif issubclass(cls, tuple):
            return Parser.compile_tuple(cls, args)
        elif issubclass(cls, Collection):
            return Parser.compile_collection(cls, args)
        elif callable(cls):
            def parse(ts: TokenStream):
                return cls(next(ts))              
            return parse
        else:
            raise NotImplementedError()
    
    @staticmethod
    def compile(spec: Union[type[_T],_T]=int) -> ParseFn[_T]:
        if isinstance(spec, (type, GenericAlias)):
            cls = typing.get_origin(spec) or spec
            args = typing.get_args(spec) or tuple()
            return Parser.compile_type(cls, args)
        elif isinstance(offset := spec, Number): 
            cls = type(spec)  
            def parse(ts: TokenStream): return cls(next(ts)) + offset
            return parse
        elif isinstance(args := spec, tuple):      
            return Parser.compile_tuple(type(spec), args)
        elif isinstance(args := spec, Collection):
            return Parser.compile_collection(type(spec), args)
        elif isinstance(fn := spec, Callable): 
            def parse(ts: TokenStream): return fn(next(ts))
            return parse
        else:
            raise NotImplementedError()

    @staticmethod
    def compile_line(cls: _T, spec=int) -> ParseFn[_T]:
        if spec is int:
            fn = Parser.compile(spec)
            def parse(ts: TokenStream): return cls([int(token) for token in ts.line()])
            return parse
        else:
            fn = Parser.compile(spec)
            def parse(ts: TokenStream): return cls([fn(ts) for _ in ts.wait()])
            return parse

    @staticmethod
    def compile_repeat(cls: _T, spec, N) -> ParseFn[_T]:
        fn = Parser.compile(spec)
        def parse(ts: TokenStream): return cls([fn(ts) for _ in range(N)])
        return parse

    @staticmethod
    def compile_children(cls: _T, specs) -> ParseFn[_T]:
        fns = tuple((Parser.compile(spec) for spec in specs))
        def parse(ts: TokenStream): return cls([fn(ts) for fn in fns])  
        return parse
            
    @staticmethod
    def compile_tuple(cls: type[_T], specs) -> ParseFn[_T]:
        if isinstance(specs, (tuple,list)) and len(specs) == 2 and specs[1] is ...:
            return Parser.compile_line(cls, specs[0])
        else:
            return Parser.compile_children(cls, specs)

    @staticmethod
    def compile_collection(cls, specs):
        if not specs or len(specs) == 1 or isinstance(specs, set):
            return Parser.compile_line(cls, *specs)
        elif (isinstance(specs, (tuple,list)) and len(specs) == 2 and isinstance(specs[1], int)):
            return Parser.compile_repeat(cls, specs[0], specs[1])
        else:
            raise NotImplementedError()

class Parsable:
    @classmethod
    def compile(cls):
        def parser(ts: TokenStream): return cls(next(ts))
        return parser
    
    @classmethod
    def __class_getitem__(cls, item):
        return GenericAlias(cls, item)

@overload
def read() -> list[int]: ...
@overload
def read(spec: Type[_T], char=False) -> _T: ...
@overload
def read(spec: _U, char=False) -> _U: ...
@overload
def read(*specs: Type[_T], char=False) -> tuple[_T, ...]: ...
@overload
def read(*specs: _U, char=False) -> tuple[_U, ...]: ...
def read(*specs: Union[Type[_T],_U], char=False):
    if not char and not specs: return [int(s) for s in TokenStream.default.line()]
    parser: _T = Parser.compile(specs[0] if len(specs) == 1 else specs)
    return parser(CharStream.default if char else TokenStream.default)

def write(*args, **kwargs):
    '''Prints the values to a stream, or to stdout_fast by default.'''
    sep, file = kwargs.pop("sep", " "), kwargs.pop("file", IOWrapper.stdout)
    at_start = True
    for x in args:
        if not at_start:
            file.write(sep)
        file.write(str(x))
        at_start = False
    file.write(kwargs.pop("end", "\n"))
    if kwargs.pop("flush", False):
        file.flush()




def argsort(A: list[int], reverse=False):
    P = Packer(len(I := list(A))-1); P.ienumerate(I, reverse); I.sort(); P.iindices(I)
    return I



class Packer:
    __slots__ = 's', 'm'
    def __init__(P, mx: int): P.s = mx.bit_length(); P.m = (1 << P.s) - 1
    def enc(P, a: int, b: int): return a << P.s | b
    def dec(P, x: int) -> tuple[int, int]: return x >> P.s, x & P.m
    def enumerate(P, A, reverse=False): P.ienumerate(A:=list(A), reverse); return A
    def ienumerate(P, A, reverse=False):
        if reverse:
            for i,a in enumerate(A): A[i] = P.enc(-a, i)
        else:
            for i,a in enumerate(A): A[i] = P.enc(a, i)
    def indices(P, A: list[int]): P.iindices(A:=list(A)); return A
    def iindices(P, A):
        for i,a in enumerate(A): A[i] = P.m&a


def isort_parallel(*L: list, reverse=False):
    inv, order = [0]*len(L[0]), argsort(L[0], reverse=reverse)
    for i, j in enumerate(order): inv[j] = i
    for i, j in enumerate(order):
        for A in L: A[i], A[j] = A[j], A[i]
        order[inv[i]], inv[j] = j, inv[i]
    return L

def sort_parallel(*L: list, reverse=False):
    N, K, order = len(L[0]), len(L), argsort(L[0], reverse)
    R = tuple([0]*N for _ in range(K))
    for k, Lk in enumerate(L):
        Rk = R[k]
        for i, j in enumerate(order): Rk[i] = Lk[j]
    return R


class EdgeListWeighted(Parsable):
    def __init__(E, N: int, U: list[int], V: list[int], W: list[int]): E.N, E.M, E.U, E.V, E.W = N, len(U), U, V, W
    def __len__(E): return E.M
    def __getitem__(E, e): return E.U[e], E.V[e], E.W[e]
    @classmethod
    def compile(cls, N: int, M: int, I: int = -1):
        def parse(ts: TokenStream):
            U, V, W = [0]*M, [0]*M, [0]*M
            for e in range(M): u, v, w = ts.line(); U[e], V[e], W[e] = int(u)+I, int(v)+I, int(w)
            return cls(N, U, V, W)
        return parse
    def kruskal(E):
        dsu, I = DSU(E.N), elist(E.N-1)
        for e in argsort(E.W):
            x, y = dsu.merge(E.U[e], E.V[e])
            if x != y: I.append(e)
        return I
    def edmond(E, root):
        U, W, F, pkr, dsu = [0]*E.N, [0]*E.N, SkewHeapForrest(E.N, E.M), Packer(E.M), DSU(E.N)
        Ein, stem, cyc, I, C = [-1]*E.M, [-1]*E.N, [], [], []; vis = [0]*E.N; vis[root] = 2
        for e in range(E.M): F.push(E.V[e], pkr.enc(E.W[e], e))
        for v in range(E.N):
            if vis[v := dsu.root(v)]: continue
            cycle = 0; C.clear()
            while vis[v] != 2:
                if F.empty(v): return None
                vis[v] = 1; cyc.append(v); W[v], e = pkr.dec(F.pop(v)); U[v] = dsu.root(E.U[e])
                if stem[v] == -1: stem[v] = e
                if U[v] == v: continue
                while cycle: Ein[C.pop()] = e; cycle -= 1
                I.append(e); C.append(e)
                if vis[U[v]] == 1:
                    if not F.empty(v): F.add(v, -W[v]<<pkr.s)
                    U[v] = p = dsu.root(U[v]); cycle += 1
                    while p != v:
                        if not F.empty(p): F.add(p, -W[p]<<pkr.s)
                        F.roots[v := dsu.merge_dest(v, p)] = F.merge(F.roots[v], F.roots[p])
                        U[p] = p = dsu.root(U[p]); cycle += 1
                else:
                    v = U[v]
            while cyc: vis[cyc.pop()] = 2

        vis, T = [0]*E.M, []
        for e in reversed(I):
            if vis[e]: continue
            f = stem[E.V[e]]; T.append(e)
            while f != e: vis[f] = 1; f = Ein[f]
        return T
    
    def sort(E):
        isort_parallel(E.W, E.U, E.V)

    def sub(E, I: list[int]):
        U, V, W = elist(E.N-1), elist(E.N-1), elist(E.N-1)
        for e in I: U.append(E.U[e]); V.append(E.V[e]); W.append(E.W[e])
        return E.__class__(E.N, U, V, W)



class DSU(Parsable):
    def __init__(dsu, N): dsu.N, dsu.cc, dsu.par = N, N, [-1]*N
    def merge(dsu, u, v):
        x, y = dsu.root(u), dsu.root(v)
        if x == y: return x,y
        if dsu.par[x] > dsu.par[y]: x, y = y, x
        dsu.par[x] += dsu.par[y]; dsu.par[y] = x; dsu.cc -= 1
        return x, y
    def root(dsu, i) -> int:
        p = (par := dsu.par)[i]
        while p >= 0:
            if par[p] < 0: return p
            par[i], i, p = par[p], par[p], par[par[p]]
        return i
    def groups(dsu) -> 'CSRIncremental[int]':
        sizes, row, p = [0]*dsu.cc, [-1]*dsu.N, 0
        for i in range(dsu.cc):
            while dsu.par[p] >= 0: p += 1
            sizes[i], row[p] = -dsu.par[p], i; p += 1
        csr = CSRIncremental(sizes)
        for i in range(dsu.N): csr.append(row[dsu.root(i)], i)
        return csr
    __iter__ = groups
    def merge_dest(dsu, u, v): return dsu.merge(u, v)[0]
    def same(dsu, u: int, v: int):  return dsu.root(u) == dsu.root(v)
    def size(dsu, i) -> int: return -dsu.par[dsu.root(i)]
    def __len__(dsu): return dsu.cc
    def __contains__(dsu, uv): u, v = uv; return dsu.same(u, v)
    @classmethod
    def compile(cls, N: int, M: int, shift = -1):
        def parse_fn(ts: TokenStream):
            dsu = cls(N)
            for _ in range(M): u, v = ts._line(); dsu.merge(int(u)+shift, int(v)+shift)
            return dsu
        return parse_fn
from typing import Sequence


class CSRIncremental(Sequence[list[_T]]):
    def __init__(csr, sizes: list[int]):
        csr.L, N = [0]*len(sizes), 0
        for i,sz in enumerate(sizes):
            csr.L[i] = N; N += sz
        csr.R, csr.A = csr.L[:], [0]*N

    def append(csr, i: int, x: _T):
        csr.A[csr.R[i]] = x; csr.R[i] += 1
    
    def __iter__(csr):
        for i,l in enumerate(csr.L):
            yield csr.A[l:csr.R[i]]
    
    def __getitem__(csr, i: int) -> _T:
        return csr.A[i]
    
    def __len__(dsu):
        return len(dsu.L)

    def range(csr, i: int) -> _T:
        return range(csr.L[i], csr.R[i])

def elist(est_len: int) -> list: ...
try:
    from __pypy__ import newlist_hint
except:
    def newlist_hint(hint):
        return []
elist = newlist_hint
    
import operator
from typing import Generic


class SkewHeapForrest(Generic[_T]):
    def __init__(shf, N, M, e: _T = 0, op = operator.add):
        shf.V, shf.A, shf.L, shf.R, shf.roots = [e]*M, [e]*M, [-1]*M, [-1]*M, [-1]*N
        shf.id, shf.st, shf.e, shf.op = 0, elist(M), e, op
    
    def propagate(shf, u: int):
        if (a := shf.A[u]) != shf.e:
            if ~(l := shf.L[u]): shf.A[l] = shf.op(shf.A[l], a)
            if ~(r := shf.R[u]): shf.A[r] = shf.op(shf.A[r], a)
            shf.V[u] = shf.op(shf.V[u], a); shf.A[u] = shf.e

    def merge(shf, u: int, v: int):
        while ~u and ~v:
            shf.propagate(u); shf.propagate(v)
            if shf.V[v] < shf.V[u]: u, v = v, u
            shf.st.append(u); shf.R[u], u = shf.L[u], shf.R[u]
        u = u if ~u else v
        while shf.st: shf.L[u := shf.st.pop()] = u
        return u
    
    def min(shf, i: int):
        assert ~(root := shf.roots[i])
        shf.propagate(root)
        return shf.V[root]

    def push(shf, i: int, x: _T):
        shf.V[shf.id] = x
        shf.roots[i] = shf.merge(shf.roots[i], shf.id)
        shf.id += 1

    def pop(shf, i: int) -> _T:
        assert ~(root := shf.roots[i])
        shf.propagate(root)
        val, shf.roots[i] = shf.V[root], shf.merge(shf.L[root], shf.R[root])
        return val
    
    def add(shf, i: int, val: _T): shf.A[shf.roots[i]] = shf.op(shf.A[shf.roots[i]], val)
    def empty(shf, i: int): return shf.roots[i] == -1
    

if __name__ == '__main__':
    main()
Back to top page