enowars5-service-stldoctor

STL-Analyzing A/D Service for ENOWARS5 in 2021
git clone https://git.sinitax.com/sinitax/enowars5-service-stldoctor
Log | Files | Refs | README | LICENSE | sfeed.txt

commit 166b238ace53d6ba5ccdc4a160778379a10b9967
parent 7c92f5fbbfc813916bc22fe2dfd6ad709c8bb7d7
Author: Louis Burda <quent.burda@gmail.com>
Date:   Tue,  6 Jul 2021 15:23:26 +0200

use session logger in service functions

Diffstat:
Mchecker/src/checker.py | 177+++++++++++++++++++++++++++++++++----------------------------------------------
1 file changed, 74 insertions(+), 103 deletions(-)

diff --git a/checker/src/checker.py b/checker/src/checker.py @@ -375,9 +375,9 @@ async def getdb(db: ChainDB, key: str) -> tuple[Any, ...]: async def do_auth( - session: Session, logger: LoggerAdapter, authstr: bytes, check: bool = True + session: Session, authstr: bytes, check: bool = True ) -> Optional[bool]: - logger.debug(f"Logging in with {authstr!r}") + session.logger.debug(f"Logging in with {authstr!r}") session.write(b"auth\n") session.write(authstr + b"\n") await session.drain() @@ -386,22 +386,20 @@ async def do_auth( resp = await session.readline(ctx="Reading auth response (1)") if b"ERR:" in resp: if check: - logger.critical(f"Failed to login with {authstr!r}:\n{resp!r}") + session.logger.critical(f"Failed to login with {authstr!r}:\n{resp!r}") raise MumbleException("Authentication not working properly") return None # Also check success message resp += await session.readuntil(prompt, ctx="Reading auth response (2)") if b"Success!" not in resp: - logger.critical(f"Login with pass {authstr!r} failed") + session.logger.critical(f"Login with pass {authstr!r} failed") raise MumbleException("Authentication not working properly") return b"Welcome back" in resp -async def do_list( - session: Session, logger: LoggerAdapter, check: bool = True -) -> Optional[bytes]: +async def do_list(session: Session, check: bool = True) -> Optional[bytes]: session.write(b"list\n") await session.drain() resp = await session.readuntil(prompt, ctx="reading list response") @@ -409,7 +407,7 @@ async def do_list( # Check for errors if b"ERR:" in resp and b">> " not in resp: if check: - logger.critical(f"Failed to list private files:\n{resp!r}") + session.logger.critical(f"Failed to list private files:\n{resp!r}") raise MumbleException("File listing not working properly") return None @@ -418,13 +416,12 @@ async def do_list( async def do_upload( session: Session, - logger: LoggerAdapter, modelname: bytes, stlfile: bytes, check: bool = True, ) -> Optional[bytes]: # Upload file - logger.debug(f"Uploading model with name {modelname!r}") + session.logger.debug(f"Uploading model with name {modelname!r}") session.write(b"upload\n") session.write(modelname + b"\n") session.write(f"{len(stlfile)}\n".encode()) @@ -437,7 +434,7 @@ async def do_upload( resp += await session.readline(ctx="Reading upload response (2)") if b"ERR:" in resp: if check: - logger.critical(f"Failed to upload model {modelname!r}:\n{resp!r}") + session.logger.critical(f"Failed to upload model {modelname!r}:\n{resp!r}") raise MumbleException("File upload not working properly") await session.readuntil(prompt, ctx="Waiting for prompt") return None @@ -448,7 +445,9 @@ async def do_upload( if modelid == b"": raise Exception except: - logger.critical(f"Invalid response during upload of {modelname!r}:\n{resp!r}") + session.logger.critical( + f"Invalid response during upload of {modelname!r}:\n{resp!r}" + ) raise MumbleException("File upload not working properly") await session.readuntil(prompt, ctx="Waiting for prompt") @@ -457,7 +456,6 @@ async def do_upload( async def do_search( session: Session, - logger: LoggerAdapter, modelname: bytes, download: bool = False, check: bool = True, @@ -465,7 +463,7 @@ async def do_search( modelname = ensure_bytes(modelname) # Initiate download - logger.debug(f"Retrieving model with name {modelname!r}") + session.logger.debug(f"Retrieving model with name {modelname!r}") session.write(b"search " + modelname + b"\n") session.write(b"0\n") # first result session.write(b"y\n" if download else b"n\n") @@ -476,7 +474,9 @@ async def do_search( line = await session.readline() if b"ERR:" in line: if check: - logger.critical(f"Failed to retrieve model {modelname!r}:\n{line!r}") + session.logger.critical( + f"Failed to retrieve model {modelname!r}:\n{line!r}" + ) raise MumbleException("File search not working properly") if b"Couldn't find a matching scan result" in line: # collect all the invalid commands sent after (hacky) @@ -503,7 +503,7 @@ async def do_search( f"Received invalid download size, response:\n{resp!r}" ) - logger.debug(f"Download size: {size}") + session.logger.debug(f"Download size: {size}") stlfile = await session.read(size, ctx="Reading stl contents") await session.readuntil(prompt) @@ -513,58 +513,54 @@ async def do_search( # CHECK WRAPPERS # -async def check_line(session: Session, logger: LoggerAdapter, context: str) -> bytes: +async def check_line(session: Session, context: str) -> bytes: line = await session.readline() if b"ERR:" in line: - logger.critical(f"{context}: Unexpected error message\n") + session.logger.critical(f"{context}: Unexpected error message\n") raise MumbleException("Service returned error during valid interaction") return line -async def check_listed( - session: Session, logger: LoggerAdapter, includes: list[bytes] -) -> bytes: - resp = await do_list(session, logger, check=True) +async def check_listed(session: Session, includes: list[bytes]) -> bytes: + resp = await do_list(session, check=True) assert resp is not None if not includes_all(resp, includes): - logger.critical(f"Failed to find {includes} in listing:\n{resp!r}") + session.logger.critical(f"Failed to find {includes} in listing:\n{resp!r}") raise MumbleException("File listing not working properly") return resp async def check_not_listed( session: Session, - logger: LoggerAdapter, excludes: list[bytes], fail: bool = False, ) -> Optional[bytes]: - resp = await do_list(session, logger, check=False) + resp = await do_list(session, check=False) if resp is not None: if fail: - logger.critical(f"Expected list to fail, but returned:\n{resp!r}") + session.logger.critical(f"Expected list to fail, but returned:\n{resp!r}") raise MumbleException("File listing not working properly") if includes_any(resp, excludes): - logger.critical( + session.logger.critical( f"Unexpectedly found one of {excludes} in listing:\n{resp!r}" ) raise MumbleException("File listing not working properly") elif not fail: - logger.critical(f"list failed unexpectedly:\n{resp!r}") + session.logger.critical(f"list failed unexpectedly:\n{resp!r}") raise MumbleException("File listing not working properly") return resp async def check_in_search( session: Session, - logger: LoggerAdapter, modelname: bytes, includes: list[bytes], download: bool = False, ) -> tuple[bytes, bytes]: - resp = await do_search(session, logger, modelname, download, check=True) + resp = await do_search(session, modelname, download, check=True) assert resp is not None if not includes_all(resp[0] + resp[1], includes): - logger.critical( + session.logger.critical( f"Retrieved info for {modelname!r} is missing {includes}: {resp[0]+resp[1]!r}" ) raise MumbleException("File search not working properly") @@ -573,27 +569,26 @@ async def check_in_search( async def check_not_in_search( session: Session, - logger: LoggerAdapter, modelname: bytes, excludes: list[bytes], download: bool = False, fail: bool = False, ) -> Optional[tuple[bytes, bytes]]: - resp = await do_search(session, logger, modelname, download, check=False) + resp = await do_search(session, modelname, download, check=False) if resp is not None: combined = resp[0] + resp[1] if fail: - logger.critical( + session.logger.critical( "Search for {modelname!r} succeeded unexpectedly:\n{combined!r}" ) raise MumbleException("File search not working properly") if includes_any(combined, excludes): - logger.critical( + session.logger.critical( f"Unexpectedly {modelname!r} info contains one of {excludes}: {combined!r}" ) raise MumbleException("File search not working properly") elif not fail: - logger.critical(f"Search for {modelname!r} failed unexpectedly") + session.logger.critical(f"Search for {modelname!r} failed unexpectedly") raise MumbleException("File search not working properly") return resp @@ -687,21 +682,18 @@ async def test_good_upload( modelname, authstr = fakeids(2, havoc=True) stlfile = genfile(solidname, filetype) ref_info = parse_stlinfo(stlfile) - logger = await di.get(LoggerAdapter) # Create new session, register and upload file session = await di.get(Session) if register: - await do_auth(session, logger, authstr, check=True) - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + await do_auth(session, authstr, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) assert modelid is not None check_hash(modelid) expected = [modelname, solidname, stlfile, modelid] - info, stlfile = await check_in_search( - session, logger, modelname, expected, download=True - ) + info, stlfile = await check_in_search(session, modelname, expected, download=True) check_stlinfo( - logger, + session.logger, info, ref_info, ref_modelname=modelname, @@ -709,33 +701,33 @@ async def test_good_upload( ref_solidname=solidname, ) if register: - await check_listed(session, logger, [modelname, modelid + b"-"]) + await check_listed(session, [modelname, modelid + b"-"]) # Try getting file from a new session session = await di.get(Session) if register: await check_not_in_search( - session, logger, modelname, expected, download=True, fail=True + session, modelname, expected, download=True, fail=True ) - await do_auth(session, logger, authstr, check=True) + await do_auth(session, authstr, check=True) info, stlfile = await check_in_search( - session, logger, modelname, expected, download=True + session, modelname, expected, download=True ) check_stlinfo( - logger, + session.logger, info, ref_info, ref_modelname=modelname, ref_modelid=modelid, ref_solidname=solidname, ) - await check_listed(session, logger, [modelname, modelid + b"-"]) + await check_listed(session, [modelname, modelid + b"-"]) else: info, stlfile = await check_in_search( - session, logger, modelname, expected, download=True + session, modelname, expected, download=True ) check_stlinfo( - logger, + session.logger, info, ref_info, ref_modelname=modelname, @@ -747,26 +739,24 @@ async def test_good_upload( async def test_bad_upload(di: DependencyInjector, filetype: str, variant: int) -> None: modelname, solidname = fakeids(2) stlfile = genfile(solidname, filetype, malformed=variant) - logger = await di.get(LoggerAdapter) # Ensure a malformed file causes an error session = await di.get(Session) - if await do_upload(session, logger, modelname, stlfile, check=False): - logger.critical(f"Able to upload malformed file:\n{stlfile!r}") + if await do_upload(session, modelname, stlfile, check=False): + session.logger.critical(f"Able to upload malformed file:\n{stlfile!r}") raise MumbleException("Upload validation not working properly") async def test_search(di: DependencyInjector, registered: bool = False) -> None: solidname, modelname, authstr = fakeids(3) - logger = await di.get(LoggerAdapter) # Ensure searching for a file that doesnt exist causes an error session = await di.get(Session) if registered: - await do_auth(session, logger, authstr, check=True) - if resp := await do_search(session, logger, modelname, download=False, check=False): + await do_auth(session, authstr, check=True) + if resp := await do_search(session, modelname, download=False, check=False): assert resp is not None - logger.critical( + session.logger.critical( f"Search for file that shouldn't exist succeeded:\n{resp[0]+resp[1]!r}" ) raise MumbleException("File search not working properly") @@ -780,13 +770,12 @@ async def putflag_guest( task: PutflagCheckerTaskMessage, di: DependencyInjector ) -> None: modelname = fakeid() - logger = await di.get(LoggerAdapter) db = await di.get(ChainDB) # Generate a file with flag in solidname and upload it (unregistered, ascii) session = await di.get(Session) stlfile = genfile(task.flag.encode(), "ascii") - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) assert modelid is not None await db.set("info", (modelname, modelid)) @@ -797,14 +786,13 @@ async def putflag_private( task: PutflagCheckerTaskMessage, di: DependencyInjector ) -> None: modelname, authstr = fakeids(2) - logger = await di.get(LoggerAdapter) stlfile = genfile(task.flag.encode(), "bin") db = await di.get(ChainDB) # Generate a file with flag in solidname and upload it (registered, bin) session = await di.get(Session) - await do_auth(session, logger, authstr, check=True) - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + await do_auth(session, authstr, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) assert modelid is not None await db.set("info", (modelname, modelid, authstr)) @@ -816,11 +804,10 @@ async def getflag_guest( ) -> None: db = await di.get(ChainDB) modelname, modelid = await getdb(db, "info") - logger = await di.get(LoggerAdapter) # Retrieve flag file info via search and ensure flag's included session = await di.get(Session) - resp = await do_search(session, logger, modelname, download=True, check=True) + resp = await do_search(session, modelname, download=True, check=True) assert resp is not None assert_in(task.flag.encode(), resp[0], "Flag is missing from stl info") assert_in(task.flag.encode(), resp[1], "Flag is missing from stl file") @@ -832,16 +819,15 @@ async def getflag_private( ) -> None: db = await di.get(ChainDB) modelname, modelid, authstr = await getdb(db, "info") - logger = await di.get(LoggerAdapter) # Retrieve private flag file info via search / list and ensure flag's included session = await di.get(Session) - await do_auth(session, logger, authstr, check=True) - search_resp = await do_search(session, logger, modelname, download=True, check=True) + await do_auth(session, authstr, check=True) + search_resp = await do_search(session, modelname, download=True, check=True) assert search_resp is not None assert_in(task.flag.encode(), search_resp[0], "Flag is missing from stl info") assert_in(task.flag.encode(), search_resp[1], "Flag is missing from stl file") - list_resp = await do_list(session, logger, check=True) + list_resp = await do_list(session, check=True) assert list_resp is not None assert_in(task.flag.encode(), list_resp, "Flag is missing from list") @@ -851,13 +837,12 @@ async def putnoise_guest( task: PutnoiseCheckerTaskMessage, di: DependencyInjector ) -> None: modelname, solidname = fakeids(2) - logger = await di.get(LoggerAdapter) db = await di.get(ChainDB) # Generate a random file and upload it (unregistered, bin / ascii) session = await di.get(Session) stlfile = genfile(solidname, "ascii" if task.variant_id == 0 else "bin") - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) await db.set("info", (modelid, modelname, solidname, stlfile)) @@ -867,14 +852,13 @@ async def putnoise_priv( task: PutnoiseCheckerTaskMessage, di: DependencyInjector ) -> None: modelname, solidname, authstr = fakeids(3) - logger = await di.get(LoggerAdapter) db = await di.get(ChainDB) # Generate a random file and upload it (registered, bin / ascii) session = await di.get(Session) stlfile = genfile(solidname, "ascii" if task.variant_id == 0 else "bin") - await do_auth(session, logger, authstr, check=True) - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + await do_auth(session, authstr, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) await db.set("info", (modelid, modelname, solidname, stlfile, authstr)) @@ -885,13 +869,11 @@ async def getnoise_guest( ) -> None: db = await di.get(ChainDB) modelid, modelname, solidname, stlfile = await getdb(db, "info") - logger = await di.get(LoggerAdapter) # Retrieve noise file by name via search session = await di.get(Session) await check_in_search( session, - logger, modelname, [modelname, solidname, stlfile, modelid], download=True, @@ -904,19 +886,17 @@ async def getnoise_priv( ) -> None: db = await di.get(ChainDB) modelid, modelname, solidname, stlfile, authstr = await getdb(db, "info") - logger = await di.get(LoggerAdapter) # Retrieve noise file by name via search and search (registered) session = await di.get(Session) - await do_auth(session, logger, authstr, check=True) + await do_auth(session, authstr, check=True) await check_in_search( session, - logger, modelname, [modelname, solidname, stlfile, modelid], download=True, ) - await check_listed(session, logger, [modelname, solidname, modelid]) + await check_listed(session, [modelname, solidname, modelid]) @checker.havoc(0) @@ -991,13 +971,11 @@ async def havoc_test_search_priv(di: DependencyInjector) -> None: @checker.havoc(14) async def havoc_test_list_guest(di: DependencyInjector) -> None: - logger = await di.get(LoggerAdapter) - # Ensure that list does not work for unregistered users session = await di.get(Session) - resp = await do_list(session, logger, check=False) + resp = await do_list(session, check=False) if resp is not None: - logger.critical("Unregistered user can run list without ERR!") + session.logger.critical("Unregistered user can run list without ERR!") raise MumbleException("User authentication not working properly") @@ -1009,32 +987,26 @@ async def havoc_fluff_upload(di: DependencyInjector) -> None: modelname = os.path.basename(model).split("-")[0].encode() modelname += bytes([rand.choice(b"0123456789") for i in range(5)]) stlfile = open(model, "rb").read() - logger = await di.get(LoggerAdapter) # Simple Upload session = await di.get(Session) - modelid = await do_upload(session, logger, modelname, stlfile, check=True) + modelid = await do_upload(session, modelname, stlfile, check=True) assert modelid is not None await check_in_search( - session, logger, modelname, [modelname, modelid, stlfile], download=True + session, modelname, [modelname, modelid, stlfile], download=True ) @checker.exploit(0) async def exploit_prefix_truncation(di: DependencyInjector) -> bytes: modelname = fakeid() - logger = await di.get(LoggerAdapter) searcher = await di.get(FlagSearcher) # Upload evil file for parse via search for hash truncation session = await di.get(Session) - logger.debug("Uploading evil file for hash truncation") - await do_upload( - session, logger, modelname, stlfile=search_truncation_payload, check=True - ) - search_resp = await do_search( - session, logger, modelname, download=False, check=True - ) + session.logger.debug("Uploading evil file for hash truncation") + await do_upload(session, modelname, stlfile=search_truncation_payload, check=True) + search_resp = await do_search(session, modelname, download=False, check=True) assert search_resp is not None info, contents = search_resp session.write(b"search last\n") @@ -1047,12 +1019,12 @@ async def exploit_prefix_truncation(di: DependencyInjector) -> bytes: raise MumbleException("Failed to list files through search") # Use it to enumerate other files and grab contents - logger.debug( + session.logger.debug( "Targets:\n" + "\n".join([" - " + l.decode("latin1") for l in filelist]) ) flag = None for i, fhash in enumerate(filelist): - logger.debug(f"Retrieving file {fhash} at index {i}") + session.logger.debug(f"Retrieving file {fhash} at index {i}") session.write(f"{i}\nn\n".encode()) await session.drain() filelist_resp = await session.readuntil( @@ -1075,7 +1047,6 @@ async def exploit_prefix_truncation(di: DependencyInjector) -> bytes: @checker.exploit(1) async def exploit_hash_overflow(di: DependencyInjector) -> None: - logger = await di.get(LoggerAdapter) searcher = await di.get(FlagSearcher) # Overflow loggedin variable @@ -1090,22 +1061,22 @@ async def exploit_hash_overflow(di: DependencyInjector) -> None: raise MumbleException("Exploit did not set 'loggedin' variable via overflow") # Get private user hashes via 'list' - resp = await do_list(session, logger, check=False) + resp = await do_list(session, check=False) if not resp: raise MumbleException("") - logger.debug("list response: " + str(resp)) + session.logger.debug("list response: " + str(resp)) users = [l.split(b" .")[1] for l in resp.split(b"\n") if b">> ." in l] await session.exit() # Login as each private user for userhash in users: # Find preimage of user hash - logger.debug(f"Logging in as user with id {userhash!r}") + session.logger.debug(f"Logging in as user with id {userhash!r}") authstr = reverse_hash(userhash.decode()) # Authenticate and check if the user is new session = await di.get(Session) - if not await do_auth(session, logger, authstr, check=True): + if not await do_auth(session, authstr, check=True): await session.exit() # We dont raise an exception, because it could be that user dir was cleaned # up just before we logged in, not necessarily because of an invalid prehash. @@ -1114,7 +1085,7 @@ async def exploit_hash_overflow(di: DependencyInjector) -> None: continue # list all private files of user - resp = await do_list(session, logger, check=True) + resp = await do_list(session, check=True) await session.exit() # Search for flag in solid names