SuperWiki-1.5 / Scripts /JSONLMerge.py
KaraKaraWitch's picture
Inital Commit
365cd4e
import multiprocessing
import pathlib
import warnings
import ftfy
import orjson
import tqdm
import typer
app = typer.Typer()
@app.command()
def json_lfix(in_file: pathlib.Path, output_file: pathlib.Path):
with open(in_file, "rb") as fin, open(output_file, "wb") as fout:
for line in tqdm.tqdm(fin, desc=f"{in_file.name}"):
try:
z = orjson.loads(line.rstrip())
except orjson.JSONDecodeError:
print(line)
continue
if z.get("htmltext_filtered") and not z.get("text"):
try:
fxed, explained = ftfy.fix_and_explain(z["htmltext_filtered"])
with warnings.catch_warnings(record=True) as w:
z["text"] = fxed.encode("utf-8", errors="replace").decode(
"unicode_escape", errors="ignore"
)
if len(w) > 0 and issubclass(
w[-1].category, DeprecationWarning
):
# TBH, I have no idea why this would be fucky.
# - Shinon (This happens for Persian Wikipedia for example.)
pass
# print(explained,"Escape Fail?")
del z["htmltext_filtered"]
except Exception as e:
# Eventually I would like to properly clean it. So here is a bandaid solution for this.
print("Closing eyes", e)
continue
fout.write(orjson.dumps(z))
fout.write(b"\n")
@app.command()
def main(in_folder: pathlib.Path, output_folder: pathlib.Path, processes:int=64):
print(in_folder,output_folder,processes)
with multiprocessing.Pool(processes) as pool:
tasks = []
for file in pathlib.Path(in_folder).glob("*.jsonl"):
tasks.append(
pool.apply_async(json_lfix, args=(file, output_folder / file.name))
)
for task in tasks:
task.wait()
pool.close()
pool.join()
@app.command()
def fd_processor(in_folder: pathlib.Path, out_folder: pathlib.Path, processes:int=64):
for sub_folder in in_folder.iterdir():
(out_folder / f"{sub_folder.stem}").mkdir(exist_ok=True)
main(sub_folder, out_folder / f"{sub_folder.stem}", processes=processes)
if __name__ == "__main__":
app()