diff --git a/hate_crack/api.py b/hate_crack/api.py index fb9909f..0fec8d8 100644 --- a/hate_crack/api.py +++ b/hate_crack/api.py @@ -918,7 +918,9 @@ class HashviewAPI: # Append found hashes to the left file with open(output_abs, "a", encoding="utf-8") as lf: - with open(found_hashes_file, "r", encoding="utf-8", errors="ignore") as fhf: + with open( + found_hashes_file, "r", encoding="utf-8", errors="ignore" + ) as fhf: for line in fhf: line = line.strip() if line: @@ -930,16 +932,22 @@ class HashviewAPI: if potfile_path: appended = 0 with open(potfile_path, "a", encoding="utf-8") as pf: - with open(found_file, "r", encoding="utf-8", errors="ignore") as ff: + with open( + found_file, "r", encoding="utf-8", errors="ignore" + ) as ff: for line in ff: line = line.strip() if line and ":" in line: pf.write(line + "\n") appended += 1 combined_count = appended - print(f"✓ Appended {appended} found hashes to potfile: {potfile_path}") + print( + f"✓ Appended {appended} found hashes to potfile: {potfile_path}" + ) else: - print("Warning: No potfile path configured, skipping potfile update") + print( + "Warning: No potfile path configured, skipping potfile update" + ) # Clean up the two found_ files for f_path in (found_file, found_hashes_file, found_clears_file): @@ -1023,6 +1031,11 @@ class HashviewAPI: output_file = os.path.basename(match.group(1)) else: output_file = f"wordlist_{wordlist_id}.gz" + if not os.path.isabs(output_file): + dest_dir = get_hcat_wordlists_dir() + output_file = os.path.join(dest_dir, output_file) + os.makedirs(os.path.dirname(output_file), exist_ok=True) + total = int(resp.headers.get("content-length", 0)) downloaded = 0 chunk_size = 8192 diff --git a/tests/test_hashview.py b/tests/test_hashview.py index 8a1df3c..973d733 100644 --- a/tests/test_hashview.py +++ b/tests/test_hashview.py @@ -315,6 +315,52 @@ class TestHashviewAPI: assert "Cookie" in auth_headers or "uuid" in str(auth_headers) assert HASHVIEW_API_KEY in str(auth_headers) + def test_download_wordlist_saves_to_wordlists_dir(self, api, tmp_path): + """When output_file is relative, it should resolve to get_hcat_wordlists_dir().""" + wordlists_dir = tmp_path / "wordlists" + wordlists_dir.mkdir() + + mock_response = Mock() + mock_response.content = b"gzipdata" + mock_response.raise_for_status = Mock() + mock_response.headers = { + "content-length": "8", + "content-disposition": 'attachment; filename="mylist.txt.gz"', + } + mock_response.iter_content = lambda chunk_size=8192: iter( + [mock_response.content] + ) + api.session.get.return_value = mock_response + + with patch( + "hate_crack.api.get_hcat_wordlists_dir", return_value=str(wordlists_dir) + ): + result = api.download_wordlist(99) + + expected_path = str(wordlists_dir / "mylist.txt.gz") + assert result["output_file"] == expected_path + assert os.path.exists(expected_path) + with open(expected_path, "rb") as f: + assert f.read() == b"gzipdata" + + def test_download_wordlist_absolute_path_unchanged(self, api, tmp_path): + """When output_file is absolute, it should not be redirected.""" + abs_output = str(tmp_path / "direct_output.gz") + + mock_response = Mock() + mock_response.content = b"data" + mock_response.raise_for_status = Mock() + mock_response.headers = {"content-length": "4"} + mock_response.iter_content = lambda chunk_size=8192: iter( + [mock_response.content] + ) + api.session.get.return_value = mock_response + + result = api.download_wordlist(99, output_file=abs_output) + + assert result["output_file"] == abs_output + assert os.path.exists(abs_output) + def test_list_wordlists_live(self): """Live test for Hashview wordlist listing with auth headers.""" # Only run this test if explicitly enabled @@ -600,7 +646,9 @@ class TestHashviewAPI: # Verify found files are cleaned up after merge found_file = tmp_path / "found_1_2.txt" - assert not os.path.exists(found_file), "Found file should be deleted after merge" + assert not os.path.exists(found_file), ( + "Found file should be deleted after merge" + ) found_hashes_file = tmp_path / "found_hashes_1_2.txt" found_clears_file = tmp_path / "found_clears_1_2.txt"