sorry for jumping in and having to ask what i have to change
binary_bsgs_v1.py using this one starting at another point instead of 1 using this
"lm_upub= ice.scalar_multiplication(Add+(lm*i))"
will go for 120 bit db how to setup after 125 any help on this , mean where are you're specific scripts,
you all have your own scripts share it is care for it..
Example for 65bit
It won't work if there are no chunks. It will throw out that you don't have enough memory even if you have 128GB of RAM.
So this is will create baby_steps_binary.bin from 46117 stages....
Size of file over 5GB ....
So calculate size for 130 bit.

import secp256k1 as ice
from bitstring import BitArray
print("creating Baby Step")
# create baby step
num = 92233720368 # Keys number in Binary Babystep. same m in the search script
Low_m = 20
lm = num // Low_m
Add = 18446744073709551615
Add_pub = ice.scalar_multiplication(Add)
# Function to process a chunk and write to binary file
def process_and_write_chunk(start, end):
res = ice.point_sequential_increment(end - start, Add_pub)
# Ensure the length of res is a multiple of 65
res_length = len(res)
if res_length % 65 != 0:
res = res[:-(res_length % 65)]
binary = ''
for t in range(start, end):
h = res[t * 65 : (t + 1) * 65].hex()
hc = int(h[2:], 16) if h else 0 # Handle the case when h is an empty string
if str(hc).endswith(('0', '2', '4', '6', '8')):
A = "0"
binary += ''.join(str(A))
if str(hc).endswith(('1', '3', '5', '7', '9')):
A = "1"
binary += ''.join(str(A))
my_str = BitArray(bin=binary)
binary_file = open('baby_steps_binary.bin', 'ab')
my_str.tofile(binary_file)
binary_file.close()
# Process the remaining chunks with a smaller chunk size
chunk_size = 100000
for i in range(0, lm, chunk_size):
print("stage: " + str(i // chunk_size + 1) + "/" + str((lm + chunk_size - 1) // chunk_size))
end = min(i + chunk_size, lm)
process_and_write_chunk(i, end)