Since two days before this post I have been running the script below, that automatically converts "old-style" Inkbunny sources (with submissionview.php) to "current style" sources (with /s/).
#!/usr/bin/env python3
import re
import time
import requests
# replace these with your own
username = "hexerade"
api_key = "1nHrmzmsvJf26EhU1F7CjnjC"
s = requests.Session()
s.auth = (username, api_key)
s.headers.update({"User-Agent": f"Inkbunny URL Normalisation/1.0 (by {username} on e621)"})
ib_re = re.compile(r"https://inkbunny.net/submissionview.php\?id=(\d+)(?:&page=(\d+))?(?:#pictop)?")
def replace_inkbunny(num, sources):
diff = []
for source in sources:
if (m := ib_re.fullmatch(source)):
diff.append("-" + source)
psuf = f"-p{m[2]}" if m[2] and int(m[2]) > 1 else ""
diff.append(f"https://inkbunny.net/s/{m[1]}{psuf}")
if diff:
replace_params = {"post[source_diff]": "\n".join(diff),
"post[edit_reason]": "Inkbunny URL normalisation"}
t0 = time.time_ns()
r = s.patch(f"https://e621.net/posts/{num}.json", params=replace_params)
t1 = time.time_ns()
d = t1 - t0
if r.ok:
print(f"Success on https://e621.net/posts/{num} ({round(d / 10**6)}ms)")
else:
print(f"!!! FAILED on https://e621.net/posts/{num} !!!")
time.sleep(max(0, 24 - d / 10**9))
search_params = {"tags": "source:https://inkbunny.net/submissionview.php* order:random"}
while True:
r = s.get(f"https://e621.net/posts.json", params=search_params)
if not r.ok:
raise ValueError(f"{r.status_code} while retrieving posts")
print("------------------------------------------------------------")
time.sleep(0.5)
posts = r.json()["posts"]
for entry in posts:
replace_inkbunny(entry["id"], entry["sources"])
I would really appreciate it if other people ran this script on their own sides, so that old-style Inkbunny URLs get normalised faster. As of this post there are 55725 posts to process, but I can only process 150 posts per hour, so I would need around 15.5 full days to normalise all the URLs if I was alone in this endeavour.