|
import json
|
|
import os
|
|
from typing import Iterator
|
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
|
|
def remove_duplicate_outputs(input_file_name: str, output_file_name: str, num_threads: int = 10) -> None:
|
|
"""
|
|
Removes duplicate entries in the "output" field from the input file and writes the filtered lines to the output file.
|
|
|
|
:param input_file_name: Name of the input JSON file.
|
|
:param output_file_name: Name of the output JSON file.
|
|
:param num_threads: Number of threads to use for parallel processing (defaults to 10).
|
|
"""
|
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
input_file_path = os.path.join(script_dir, input_file_name)
|
|
output_file_path = os.path.join(script_dir, output_file_name)
|
|
|
|
with open(input_file_path, 'r', encoding='utf-8') as input_fp:
|
|
lines = input_fp.readlines()
|
|
|
|
with ThreadPoolExecutor(max_workers=num_threads) as executor:
|
|
futures = [executor.submit(load_and_filter_lines, lines) for _ in range(num_threads)]
|
|
filtered_lines = []
|
|
for future in as_completed(futures):
|
|
filtered_lines.extend(future.result())
|
|
|
|
seen_outputs = set()
|
|
with open(output_file_path, 'w', encoding='utf-8') as output_fp:
|
|
for line in filtered_lines:
|
|
try:
|
|
data = json.loads(line)
|
|
output_value = data.get('output', '')
|
|
if output_value not in seen_outputs:
|
|
seen_outputs.add(output_value)
|
|
output_fp.write(line)
|
|
except json.JSONDecodeError:
|
|
continue
|
|
|
|
def load_and_filter_lines(lines: list[str]) -> list[str]:
|
|
"""
|
|
Loads and filters lines from the input dataset.
|
|
|
|
:param lines: List of lines from the input dataset.
|
|
:return: List of filtered lines.
|
|
"""
|
|
filtered_lines = []
|
|
for line in lines:
|
|
try:
|
|
data = json.loads(line)
|
|
output_value = data.get('output', '')
|
|
if output_value:
|
|
filtered_lines.append(line)
|
|
except json.JSONDecodeError:
|
|
continue
|
|
return filtered_lines
|
|
|
|
|
|
input_file_name = 'Input_File.json'
|
|
output_file_name = 'Output_File.json'
|
|
|
|
remove_duplicate_outputs(input_file_name, output_file_name, num_threads=10) |