Reference: Ch 14 of A. El Gamal and Y.-H. Kim, Network Information Theory, Cambridge University Press, 2011.
Author: Cheuk Ting Li
from psitip import *
PsiOpts.setting(
solver = "ortools.GLOP", # Set linear programming solver
repr_latex = True, # Jupyter Notebook LaTeX display
venn_latex = True, # LaTeX in diagrams
proof_note_color = "blue", # Reasons in proofs are blue
)
X1, X2 = rv_array("X", 1, 3)
Y = rv("Y")
U1, U2 = rv_array("U", 1, 3)
M1, M2 = rv_array("M", 1, 3)
R1, R2 = real_array("R", 1, 3)
model = CodingModel() # Define multiple access channel
model.add_edge(U1, U2) # Correlated sources U1, U2
model.add_node(U1, X1, label = "Enc 1") # Encoder 1 maps U1 to X1
model.add_node(U2, X2, label = "Enc 2") # Encoder 2 maps U2 to X2
model.add_edge(X1+X2, Y) # Channel X1,X2 -> Y
model.add_node(Y, U1+U2, label = "Dec") # Decoder maps Y to U1,U2
# model.partition = [U1+U2, X1+X2+Y]
model.graph() # Draw diagram
r = model.get_inner(is_proof=True) # Automatic inner bound
r.display(note=True) # Include reasons in blue
# The inner bound in [Cover-El Gamal-Salehi 1980],
# which is equivalent to the above
Q = rv("Q")
r_in = region(
H(U1 | U2) <= I(X1 & Y | U2+X2+Q),
H(U2 | U1) <= I(X2 & Y | U1+X1+Q),
H(U1+U2) <= I(X1+X2 & Y | Q),
bnet((U1, U2), (U1+Q, X1), (U2+Q, X2), (X1+X2, Y))
).exists(Q)
r_in
(r_in >> r).proof() # Prove one direction of equivalence
(r >> r_in).proof() # Prove another direction of equivalence
X1, X2 = rv_array("X", 1, 3)
Y = rv("Y")
U0, U1, U2 = rv_array("U", 0, 3)
M1, M2 = rv_array("M", 1, 3)
R1, R2 = real_array("R", 1, 3)
model = CodingModel() # Define multiple access channel
model.add_edge(U0, U1) # Correlated sources U0, U1, U2
model.add_edge(U0+U1, U2) # Correlated sources U0, U1, U2
model.add_node(U0+U1, X1, label = "Enc 1") # Encoder 1 maps U0,U1 to X1
model.add_node(U0+U2, X2, label = "Enc 2") # Encoder 2 maps U0,U2 to X2
model.add_edge(X1+X2, Y) # Channel X1,X2 -> Y
model.add_node(Y, U0+U1+U2, label = "Dec") # Decoder maps Y to U0,U1,U2
# model.partition = [U0+U1+U2, X1+X2+Y]
model.graph() # Draw diagram
r = model.get_inner(is_proof=True) # Automatic inner bound
r.display(note=True) # Include reasons in blue
# The inner bound in [Cover-El Gamal-Salehi 1980]
W = rv("W")
r_in = region(
H(U1 | U0+U2) <= I(X1 & Y | U0+U2+X2+W),
H(U2 | U0+U1) <= I(X2 & Y | U0+U1+X1+W),
H(U1+U2 | U0) <= I(X1+X2 & Y | U0+W),
H(U0+U1+U2) <= I(X1+X2 & Y),
bnet((U0, U1), (U0+U1, U2), (U0+U1+W, X1), (U0+U2+W, X2), (X1+X2, Y))
).exists(W)
r_in
(r_in >> r).proof() # Our bound is at least as large as Cover-El Gamal-Salehi
X, Y, U = rv("X, Y, U")
M0, M1, M2 = rv_array("M", 3)
R0, R1, R2 = real_array("R", 3)
model = CodingModel() # Define Gray-Wyner system
model.set_rate(M0, R0) # The rate of M0, M1, M2 are R0, R1, R2 resp.
model.set_rate(M1, R1)
model.set_rate(M2, R2)
model.add_edge(X, Y) # X, Y are correlated source
model.add_node(X+Y, M0+M1+M2,
label = "Enc") # Encoder maps X,Y to M0,M1,M2
model.add_node(M0+M1, X,
label = "Dec 1") # Decoder 1 maps M0,M1 to X
model.add_node(M0+M2, Y,
label = "Dec 2") # Decoder 2 maps M0,M2 to Y
model.graph() # Draw diagram
# Automatic outer bound with 1 auxiliary, gets Gray-Wyner region [Gray-Wyner 1974]
r_gw = model.get_outer(1)
r_gw
r = model.get_inner(is_proof=True) # Automatic inner bound
r.display(note=True) # Include reasons in blue
# Although the above region does not look like the Gray-Wyner region,
# they are actually equivalent.
(r >> r_gw).solve().display() # r implies r_gw
(r_gw >> r).solve().display() # r_gw implies r
# Output converse proof (is_proof = True for shorter proof)
# Lower search level to avoid modification to regions
with PsiOpts(auxsearch_level = 1):
model.proof_outer(r_gw).display()
# Minimum sum rate is H(X, Y)
sumrate = r.minimum(R0 + R1 + R2, [R0, R1, R2])
sumrate
# Minimum weighted sum rate when R0 is counted twice is H(X)+H(Y)
wsumrate = r.minimum(R0 * 2 + R1 + R2, [R0, R1, R2])
wsumrate
# Minimum symmetric rate
symrate = r.minimum(emax(R0, R1, R2), [R0, R1, R2])
symrate
# The corner point max R0 s.t. R0 + R1 = H(X), R0 + R2 = H(Y)
corner1 = (r & (R0 + R1 == H(X)) & (R0 + R2 == H(Y))).maximum(R0, [R0, R1, R2])
corner1
# This is the Gacs-Korner common information [Gacs-Korner 1973]
(corner1 == gacs_korner(X & Y)).solve()
# The corner point min R0 s.t. R0 + R1 + R2 = H(X,Y)
corner2 = (r & (R0 + R1 + R2 == H(X+Y))).minimum(R0, [R0, R1, R2])
corner2
# This is Wyner's common information [Wyner 1975]
(corner2 == wyner_ci(X & Y)).solve()
# The corner point min R0 s.t. R0 + R2 = H(Y), R1 = H(X|Y)
corner3 = (r & (R0 + R2 == H(Y)) & (R1 == H(X|Y))).minimum(R0, [R0, R1, R2])
corner3
# This is the necessary conditional entropy [Cuff-Permuter-Cover 2010] plus I(X;Y)
# We need the double Markov property [Csiszar-Korner 2011] to prove this
with dblmarkov().assumed():
(corner3 <= H_nec(Y | X) + I(X & Y)).solve().display()
(corner3 >= H_nec(Y | X) + I(X & Y)).solve().display()
Skipped