Hey everyone, If you have a high-core CPU (e.g., 16 cores or more), could you kindly run this script for me and send me back the generated found_seeds.txt file?.. Thanks
import random
import multiprocessing
import sys
# Shared counter and total for progress
counter = None
found_counter = None
total = None
def init_globals(shared_counter, shared_found_counter, total_count):
global counter, found_counter, total
counter = shared_counter
found_counter = shared_found_counter
total = total_count
def check_seed(a):
random.seed(a)
b = random.randrange(2**31, 2**32)
target_values = {
int('b862a62e', 16), int('9de820a7', 16), int('e9ae4933', 16),
int('e02b35a3', 16), int('ade6d7ce', 16), int('efae164c', 16),
int('9d18b63a', 16), int('fc07a182', 16), int('f7051f27', 16),
int('bebb3940', 16)}
match = b in target_values
if match:
with found_counter.get_lock():
found_counter.value += 1
with open("found_seeds.txt", "a") as f:
f.write(f"seed: {a} - FOUND! {hex(b)}\n")
with counter.get_lock():
counter.value += 1
percent = counter.value / total
bar_length = 30
filled = int(bar_length * percent)
bar = '█' * filled + '░' * (bar_length - filled)
print(f"\rProgress: [{bar}] {percent * 100:.2f}% | Found_Seed: {found_counter.value}", end='', flush=True)
def chunked_range(start, end, chunk_size):
for i in range(start, end, chunk_size):
yield range(i, min(i + chunk_size, end))
if __name__ == '__main__':
start = 1706200000
end = 10_000_000_000
chunk_size = 100_000_0
cpu_count = multiprocessing.cpu_count()
total_seeds = end - start
shared_counter = multiprocessing.Value('i', 0)
shared_found_counter = multiprocessing.Value('i', 0)
with multiprocessing.Pool(cpu_count, initializer=init_globals,
initargs=(shared_counter, shared_found_counter, total_seeds)) as pool:
for subrange in chunked_range(start, end, chunk_size):
pool.map(check_seed, subrange)
print(input("\nDone."))