Hello, what do you think about this idea?
I hope it is useful and I am waiting for your comments
Good idea. It will only take 100,000 years to find puzzle 71 private key.

Here is the AI script
import mnemonic
import secp256k1 as ice
import multiprocessing
import time
import random
# Configuration
TARGET_HASH160 = "f6f5431d25bbf7b12e8add9af5e3475c44a0a5b8" # target hash160
FIXED_WORDS = ["abandon"] * 16 + ["about"] # Fixed part of mnemonic
LANGUAGE = 'english'
BATCH_SIZE = 10000 # Print progress every X valid attempts
# Initialize components
mnemo = mnemonic.Mnemonic(LANGUAGE)
wordlist = mnemo.wordlist
target_binary = bytes.fromhex(TARGET_HASH160)
# Shared counters
attempts = multiprocessing.Value('i', 0) # Total tries
valid_attempts = multiprocessing.Value('i', 0) # Valid mnemonics only
start_time = time.time()
lock = multiprocessing.Lock()
def worker(result_queue):
while True:
try:
# Generate 7 random words
random_words = random.choices(wordlist, k=7)
# Build full mnemonic phrase
candidate_words = FIXED_WORDS + random_words
mnemonic_phrase = ' '.join(candidate_words)
#Skip if mnemonic is not valid
if not mnemo.check(mnemonic_phrase):
with lock:
attempts.value += 1 # Count total attempts
continue
# Valid mnemonic found
with lock:
attempts.value += 1
valid_attempts.value += 1
# Print progress based on valid attempts
if valid_attempts.value % BATCH_SIZE == 0:
elapsed = time.time() - start_time
rate = valid_attempts.value / max(1, elapsed)
print(f"[Valid] {valid_attempts.value:,} | {rate:,.0f} valid/sec | Current: {' '.join(random_words)}")
# Convert to private key
entropy = mnemo.to_entropy(mnemonic_phrase.split())
private_key_int = int.from_bytes(entropy, 'big')
# Generate hash160
h160 = ice.privatekey_to_h160(0, True, private_key_int)
# Check for match
if h160 == target_binary:
result_queue.put((
True,
mnemonic_phrase,
private_key_int.to_bytes(32, 'big').hex(),
h160.hex()
))
return
except Exception as e:
continue # Skip on error
if __name__ == '__main__':
print(f"Starting random mnemonic search with {multiprocessing.cpu_count()} workers...")
result_queue = multiprocessing.Queue()
processes = []
# Start workers
for _ in range(multiprocessing.cpu_count()):
p = multiprocessing.Process(target=worker, args=(result_queue,))
processes.append(p)
p.start()
# Wait for results
while True:
success, mnemonic_phrase, privkey_hex, found_hash160 = result_queue.get()
if success:
print("\nSUCCESS! Found matching mnemonic:")
print(f"Full Mnemonic: {mnemonic_phrase}")
print(f"Private Key: {privkey_hex}")
print(f"Hash160: {found_hash160}")
# Terminate all workers
for p in processes:
p.terminate()
break
# Cleanup
for p in processes:
p.join()
elapsed = time.time() - start_time
print(f"\nTotal attempts: {attempts.value:,}")
print(f"Valid mnemonics tested: {valid_attempts.value:,}")
print(f"Time elapsed: {elapsed:.2f} seconds")
print(f"Speed: {valid_attempts.value / max(1, elapsed):,.0f} valid mnemonics/sec")