My idea of this program is to get a code review from an AI LLM and get a second AI LLM to provide its suggestions also to improve the code review.
Because AI is the hot trend right now and $ puts food on the table and a roof over me, the use of the script is dual license, free for non-commercial use, $299 one time license per seat for commercial use email me, Stan Switaj, at opensource3@yahoo.com
Description highlights
1. connect to a server via ssh, via two factor authentication of a public key and user account password
2. created a custom pipe to log text a POpen session
3. after the 1st AI LLM code reviews a program's source code (multiple file code review ok) and transcribes a log text
4. the 1st AI LLM code review's log file is then transferred to a second AI LLM's Popen session via ssh for the 2nd AI LLM code review
5. a second log text of the 2nd code review is transcribed
Two AI LLMs to Paired Programming Code Review
Creator: Stan Switaj
import os
import queue
import subprocess
import tempfile
import re
import sys
import threading
class TwoPipe:
def __init__(self, log_path=None, log_cleaner=None):
self._r_fd, self._w_fd = os.pipe()
self._log_path = log_path
self._log_cleaner = log_cleaner
self._thread = None
self._queue = queue.Queue()
# Popen calls fileno() to get the real fd to hand to the child process
def fileno(self):
return self._w_fd
def start(self):
# Close the write end in the parent — the child process owns it now.
# Without this the read end never sees EOF when the child exits.
os.close(self._w_fd)
self._thread = threading.Thread(target=self._drain, daemon=True)
self._thread.start()
def _drain(self):
log = open(self._log_path, "w") if self._log_path else None
try:
with os.fdopen(self._r_fd, "rb") as pipe:
for raw in iter(pipe.readline, b""):
text = raw.decode(errors="replace")
sys.stdout.write(text)
sys.stdout.flush()
if log:
log.write(self._log_cleaner(text) if self._log_cleaner else text)
log.flush()
self._queue.put(text)
finally:
self._queue.put(None) # sentinel so readline() sees EOF
if log:
log.close()
def readline(self):
"""Block until the next line is available; returns '' on EOF."""
item = self._queue.get()
return "" if item is None else item
def stop(self):
"""Force-close the read end to unblock a stuck readline() in the drain thread."""
try:
os.close(self._r_fd)
except OSError:
pass
def wait(self, timeout=10):
if self._thread:
self._thread.join(timeout=timeout)
if self._thread.is_alive():
self.stop()
self._thread.join(timeout=3)
# ----------------------------
# LOAD CREDENTIALS
# ----------------------------
with open('/path/to/auth.py') as f:
exec(f.read())
MODEL = "gemma4:e2b"
MODEL2 = "obulanik/Glm5.1:latest"
MAX_CHARS = 200000
# ----------------------------
# CLEAN TERMINAL OUTPUT
# ----------------------------
ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
osc_escape = re.compile(r'\x1B\][^\x07]*(\x07|\x1B\\)')
def clean(text):
text = ansi_escape.sub('', text)
text = osc_escape.sub('', text)
return text
# ----------------------------
# DECODE LOG OUTPUT
# Strips the full range of terminal escape sequences that end up in
# the log file: CSI (ANSI colours/cursor), OSC (title/hyperlink),
# DCS/PM/APC/SOS string sequences, bare Fe two-char escapes, and
# carriage-return noise from expect / pty output.
# ----------------------------
_re_csi = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') # ESC [ ... final
_re_osc = re.compile(r'\x1B\][^\x07\x1B]*(?:\x07|\x1B\\)') # ESC ] ... BEL/ST
_re_dcs = re.compile(r'\x1B[PX^_][^\x1B]*\x1B\\') # DCS/SOS/PM/APC
_re_fe = re.compile(r'\x1B[@-Z\\-_]') # bare Fe (ESC + 1 char)
def decode_log_output(text):
text = _re_csi.sub('', text)
text = _re_osc.sub('', text)
text = _re_dcs.sub('', text)
text = _re_fe.sub('', text)
text = text.replace('\r\n', '\n').replace('\r', '\n') # CR noise from pty
return text
# ----------------------------
# COLLECT CODE
# ----------------------------
def collect_python_code(root_dir):
parts = []
for root, dirs, files in os.walk(root_dir):
dirs[:] = [
d for d in dirs
if d not in (
"venv",
"__pycache__",
".git",
".mypy_cache",
".pytest_cache"
)
]
for file in files:
if file.endswith(".py"):
path = os.path.join(root, file)
try:
with open(path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as e:
content = f"<<ERROR: {e}>>"
parts.append(
f"\n### FILE: {path}\n"
f"```python\n{content}\n```"
)
return "\n".join(parts)[:MAX_CHARS]
# ----------------------------
# INITIAL CONTEXT
# ----------------------------
def build_initial_prompt(code):
return f"""
You are a senior software engineer performing a code review.
You now have the following project loaded into context.
From this point forward:
- Answer all questions about this codebase
- Suggest fixes
- Explain bugs
- Recommend refactors
- Help write new code
IMPORTANT:
- Stay focused ONLY on this codebase
- Keep conversational context between questions
PROJECT SOURCE CODE:
{code}
END OF PROJECT
"""
def build_first_report_and_code(log_data_report_and_code):
return f"""
You are a senior software engineer performing a code review.
You are pair programming and received this report of your paired programmer to review and the purpose is to produce quality complete code:
You now have the following project loaded into context.
From this point forward:
- Answer all questions about this review and code
- Suggest fixes
- Explain bugs
- Recommend refactors
- Help write new code
IMPORTANT:
- Stay focused ONLY on this codebase received
- Keep conversational context between questions
PROJECT SOURCE CODE:
{log_data_report_and_code}
END OF PROJECT
"""
# ----------------------------
# INTERACTIVE EXPECT SESSION
# ----------------------------
def interactive_review_session(
user,
host,
key,
key_pass,
server_pw,
initial_prompt
):
print("\nConnecting to remote Ollama session...\n")
# ---------------------------------
# EXPECT SCRIPT
# ---------------------------------
script = f'''
set timeout -1
spawn ssh -tt -i {key} {user}@{host}
expect {{
"*Enter passphrase*" {{
send "{key_pass}\\r"
exp_continue
}}
"*password:*" {{
send "{server_pw}\\r"
}}
}}
expect "$ "
send "ollama run {MODEL}\\r"
interact
'''
with tempfile.NamedTemporaryFile(
"w",
delete=False
) as f:
f.write(script)
expect_path = f.name
env = os.environ.copy()
env["TERM"] = "dumb"
env["NO_COLOR"] = "1"
LOG = "mini-two-outputs.log"
pipe = TwoPipe(log_path=LOG, log_cleaner=decode_log_output)
# ---------------------------------
# START EXPECT
# ---------------------------------
proc = subprocess.Popen(
["expect", expect_path],
stdin=subprocess.PIPE,
stdout=pipe,
stderr=pipe,
text=True,
bufsize=1,
env=env
)
pipe.start() # close write end in parent, start drain + queue thread
# ---------------------------------
# SEND INITIAL PROJECT CONTEXT
# ---------------------------------
proc.stdin.write(initial_prompt + "\n")
proc.stdin.flush()
print("\nProject context loaded into model.\n")
print("Type questions about the code.")
print("Type 'exit' to disconnect.\n")
# ---------------------------------
# READ INITIAL MODEL OUTPUT
# ---------------------------------
while True:
line = pipe.readline()
if not line:
break
line = clean(line)
print(line, end="")
# crude detection that model is ready
if ">>> " in line or ">" in line:
break
# ---------------------------------
# INTERACTIVE LOOP
# ---------------------------------
while True:
try:
question = input("QUESTION> ")
question += ' and ask me another question '
except KeyboardInterrupt:
question = "exit"
if question.strip().lower().startswith("exit"):
proc.stdin.write("/bye\n")
proc.stdin.flush()
proc.stdin.write("exit\n")
proc.stdin.flush()
break
# send user question
proc.stdin.write(question + "\n")
proc.stdin.flush()
print("\nMODEL:\n")
# stream model output
while True:
line = pipe.readline()
if not line:
break
line = clean(line)
print(line, end="")
# crude prompt detection
if ">>> " in line or line.strip().endswith(">"):
break
try:
proc.wait(timeout=10)
except subprocess.TimeoutExpired:
proc.terminate() # SIGTERM — polite shutdown
try:
proc.wait(timeout=5) # give it a moment to exit cleanly
except subprocess.TimeoutExpired:
proc.kill() # SIGKILL — force
proc.wait()
pipe.wait() # wait for thread to finish draining
os.system(f""" sed -i '1,9d' {LOG} """)
log_data = ''
expect_path2 = ''
with open(LOG, 'r') as f:
log_data = f.readlines()
initial_prompt2 = build_first_report_and_code(log_data)
# ---------------------------------
# EXPECT SCRIPT
# ---------------------------------
script2 = f'''
set timeout -1
spawn ssh -tt -i {key} {user}@{host}
expect {{
"*Enter passphrase*" {{
send "{key_pass}\\r"
exp_continue
}}
"*password:*" {{
send "{server_pw}\\r"
}}
}}
expect "$ "
send "ollama run {MODEL2}\\r"
interact
'''
with tempfile.NamedTemporaryFile(
"w",
delete=False
) as f:
f.write(script2) # was incorrectly writing script (first session)
expect_path2 = f.name
env = os.environ.copy()
env["TERM"] = "dumb"
env["NO_COLOR"] = "1"
LOG2 = "hawk2.log"
pipe2 = TwoPipe(log_path=LOG2, log_cleaner=decode_log_output)
# ---------------------------------
# START EXPECT
# ---------------------------------
proc2 = subprocess.Popen(
["expect", expect_path2],
stdin=subprocess.PIPE,
stdout=pipe2,
stderr=pipe2,
text=True,
bufsize=1,
env=env
)
pipe2.start()
# ---------------------------------
# SEND INITIAL PROJECT CONTEXT
# ---------------------------------
proc2.stdin.write(initial_prompt2 + "\n")
proc2.stdin.flush()
print("\nProject context loaded into model.\n")
print("Type questions about the code.")
print("Type 'exit' to disconnect.\n")
# ---------------------------------
# READ INITIAL MODEL OUTPUT
# ---------------------------------
while True:
line = pipe2.readline()
if not line:
break
line = clean(line)
print(line, end="")
# crude detection that model is ready
if ">>> " in line or ">" in line:
break
# ---------------------------------
# INTERACTIVE LOOP
# ---------------------------------
while True:
try:
question = input("QUESTION2> ")
except KeyboardInterrupt:
question = "exit"
#if question.strip().lower() == "exit":
if question.strip().lower().startswith("exit"):
proc2.stdin.write("/bye\n")
proc2.stdin.flush()
proc2.stdin.write("exit\n")
proc2.stdin.flush()
break
# send user question
proc2.stdin.write(question + "\n")
proc2.stdin.flush()
print("\nMODEL:\n")
# stream model output
while True:
line = pipe2.readline()
if not line:
break
line = clean(line)
print(line, end="")
# crude prompt detection
if ">>> " in line or line.strip().endswith(">"):
break
try:
proc2.wait(timeout=10)
except subprocess.TimeoutExpired:
proc2.terminate()
try:
proc2.wait(timeout=5)
except subprocess.TimeoutExpired:
proc2.kill()
proc2.wait()
pipe2.wait()
print("\nDisconnected.\n")
# ----------------------------
# MAIN
# ----------------------------
if __name__ == "__main__":
project_root = "/path/to/project1"
print("\nCollecting source code...\n")
code = collect_python_code(project_root)
initial_prompt = build_initial_prompt(code)
interactive_review_session(
user,
host,
key_path,
key_passphrase,
server_pw,
initial_prompt
)Thank you.
0 Comments
No comments yet. Be the first!
Leave a Comment