mirror of
https://github.com/OMGeeky/json-summary.git
synced 2025-12-26 17:02:28 +01:00
Enhance JSON file reading and processing
- Update read_json_file to return a list of objects, handling both single and multiple JSON objects. - Modify merge_objects to include total_files in the statistics. - Adjust print_summary to display the number of files processed alongside total objects. - Refactor JSON file reading loop to accumulate objects from multiple files.
This commit is contained in:
24
main.py
24
main.py
@@ -20,17 +20,19 @@ def find_json_files(path: str) -> List[str]:
|
|||||||
return json_files
|
return json_files
|
||||||
|
|
||||||
|
|
||||||
def read_json_file(file_path: str) -> Any:
|
def read_json_file(file_path: str) -> List[Any]:
|
||||||
"""Read and parse a JSON file."""
|
"""Read and parse a JSON file, returning a list of objects."""
|
||||||
try:
|
try:
|
||||||
with open(file_path, 'r') as f:
|
with open(file_path, 'r') as f:
|
||||||
return json.load(f)
|
content = json.load(f)
|
||||||
|
# If root is an array, return its elements; otherwise, return content as a single-element list
|
||||||
|
return content if isinstance(content, list) else [content]
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
print(f"Error parsing {file_path}: {e}")
|
print(f"Error parsing {file_path}: {e}")
|
||||||
return None
|
return []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error reading {file_path}: {e}")
|
print(f"Error reading {file_path}: {e}")
|
||||||
return None
|
return []
|
||||||
|
|
||||||
|
|
||||||
def init_stats_dict() -> Dict:
|
def init_stats_dict() -> Dict:
|
||||||
@@ -115,10 +117,11 @@ def analyze_value(value: Any, stats: Dict, depth: int = 0, max_depth: int = 5) -
|
|||||||
stats['examples'].add(str(item))
|
stats['examples'].add(str(item))
|
||||||
|
|
||||||
|
|
||||||
def merge_objects(objects: List[Any]) -> Dict:
|
def merge_objects(objects: List[Any], files) -> Dict:
|
||||||
"""Merge multiple JSON objects and analyze their structure."""
|
"""Merge multiple JSON objects and analyze their structure."""
|
||||||
stats = init_stats_dict()
|
stats = init_stats_dict()
|
||||||
stats['total_objects'] = len(objects)
|
stats['total_objects'] = len(objects)
|
||||||
|
stats['total_files'] = len(files)
|
||||||
|
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
if obj is not None:
|
if obj is not None:
|
||||||
@@ -189,7 +192,7 @@ def print_field_stats(stats: Dict, prefix: str = "") -> None:
|
|||||||
def print_summary(stats: Dict) -> None:
|
def print_summary(stats: Dict) -> None:
|
||||||
"""Print a formatted summary of the JSON structure."""
|
"""Print a formatted summary of the JSON structure."""
|
||||||
print("\n=== JSON Structure Summary ===")
|
print("\n=== JSON Structure Summary ===")
|
||||||
print(f"\nTotal objects processed: {stats['total_objects']}")
|
print(f"\nTotal objects processed: {stats['total_objects']} (in {stats['total_files']} files)")
|
||||||
print(f"Root level types found: {', '.join(stats['types'])}")
|
print(f"Root level types found: {', '.join(stats['types'])}")
|
||||||
|
|
||||||
print("\nField Analysis:")
|
print("\nField Analysis:")
|
||||||
@@ -217,16 +220,15 @@ def main():
|
|||||||
# Read and process all JSON files
|
# Read and process all JSON files
|
||||||
objects = []
|
objects = []
|
||||||
for file_path in json_files:
|
for file_path in json_files:
|
||||||
obj = read_json_file(file_path)
|
file_objects = read_json_file(file_path)
|
||||||
if obj is not None:
|
objects.extend(file_objects)
|
||||||
objects.append(obj)
|
|
||||||
|
|
||||||
if not objects:
|
if not objects:
|
||||||
print("No valid JSON objects found in the specified files.")
|
print("No valid JSON objects found in the specified files.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Analyze and print summary
|
# Analyze and print summary
|
||||||
stats = merge_objects(objects)
|
stats = merge_objects(objects, json_files)
|
||||||
print_summary(stats)
|
print_summary(stats)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user