-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbatch_processor.py
More file actions
219 lines (175 loc) · 8.24 KB
/
batch_processor.py
File metadata and controls
219 lines (175 loc) · 8.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
"""
批量处理工具
用于批量处理多个文件的数据质量检测
"""
import os
import json
from pathlib import Path
from typing import List, Dict, Any
from data_quality_checker import DataQualityChecker
from concurrent.futures import ThreadPoolExecutor, as_completed
import logging
class BatchProcessor:
"""批量处理器"""
def __init__(self, config_path: str = "config.yaml", max_workers: int = 4):
self.checker = DataQualityChecker(config_path)
self.max_workers = max_workers
self.logger = logging.getLogger(__name__)
def process_files(self, file_paths: List[str], output_dir: str = "output") -> Dict[str, Any]:
"""批量处理文件"""
output_path = Path(output_dir)
output_path.mkdir(exist_ok=True)
results = {
"total_files": len(file_paths),
"processed_files": 0,
"failed_files": 0,
"summary": {},
"details": {}
}
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
# 提交所有任务
future_to_file = {
executor.submit(self._process_single_file, file_path, output_path): file_path
for file_path in file_paths
}
# 收集结果
for future in as_completed(future_to_file):
file_path = future_to_file[future]
try:
file_result = future.result()
results["details"][file_path] = file_result
results["processed_files"] += 1
# 更新汇总统计
self._update_summary(results["summary"], file_result)
except Exception as e:
self.logger.error(f"处理文件 {file_path} 失败: {e}")
results["failed_files"] += 1
results["details"][file_path] = {"error": str(e)}
# 保存批量处理报告
report_path = output_path / "batch_report.json"
with open(report_path, 'w', encoding='utf-8') as f:
json.dump(results, f, ensure_ascii=False, indent=2)
self.logger.info(f"批量处理完成,报告保存到: {report_path}")
return results
def _process_single_file(self, file_path: str, output_dir: Path) -> Dict[str, Any]:
"""处理单个文件"""
try:
with open(file_path, 'r', encoding='utf-8') as f:
text = f.read()
# 执行质量检测
detection_results = self.checker.check_text_quality(text)
# 生成文件特定的报告
file_name = Path(file_path).stem
report_path = output_dir / f"{file_name}_report.json"
file_result = {
"file_path": file_path,
"file_size": len(text),
"detection_results": {}
}
# 收集检测结果
for rule_name, result in detection_results.items():
file_result["detection_results"][rule_name] = {
"passed": result.passed,
"issues_count": len(result.issues),
"issues": result.issues,
"suggestions": result.suggestions,
"has_cleaned_text": result.cleaned_text is not None
}
# 保存单文件报告
with open(report_path, 'w', encoding='utf-8') as f:
json.dump(file_result, f, ensure_ascii=False, indent=2)
# 如果有清洗后的文本,保存清洗后的文件
cleaned_text = text
has_changes = False
for result in detection_results.values():
if result.cleaned_text:
cleaned_text = result.cleaned_text
has_changes = True
if has_changes:
cleaned_path = output_dir / f"{file_name}_cleaned{Path(file_path).suffix}"
with open(cleaned_path, 'w', encoding='utf-8') as f:
f.write(cleaned_text)
file_result["cleaned_file"] = str(cleaned_path)
return file_result
except Exception as e:
raise Exception(f"处理文件 {file_path} 时发生错误: {e}")
def _update_summary(self, summary: Dict[str, Any], file_result: Dict[str, Any]):
"""更新汇总统计"""
for rule_name, rule_result in file_result["detection_results"].items():
if rule_name not in summary:
summary[rule_name] = {
"total_files": 0,
"passed_files": 0,
"failed_files": 0,
"total_issues": 0
}
summary[rule_name]["total_files"] += 1
if rule_result["passed"]:
summary[rule_name]["passed_files"] += 1
else:
summary[rule_name]["failed_files"] += 1
summary[rule_name]["total_issues"] += rule_result["issues_count"]
def process_directory(self, directory: str, pattern: str = "*.txt",
output_dir: str = "output", recursive: bool = True) -> Dict[str, Any]:
"""处理目录中的所有文件"""
dir_path = Path(directory)
if recursive:
file_paths = list(dir_path.rglob(pattern))
else:
file_paths = list(dir_path.glob(pattern))
file_paths = [str(p) for p in file_paths if p.is_file()]
self.logger.info(f"找到 {len(file_paths)} 个文件进行处理")
return self.process_files(file_paths, output_dir)
def main():
"""主函数示例"""
import argparse
parser = argparse.ArgumentParser(description='批量数据质量检测工具')
parser.add_argument('input', help='输入目录或文件列表')
parser.add_argument('-c', '--config', default='config.yaml', help='配置文件路径')
parser.add_argument('-o', '--output', default='output', help='输出目录')
parser.add_argument('-p', '--pattern', default='*.txt', help='文件匹配模式')
parser.add_argument('-w', '--workers', type=int, default=4, help='并发处理数')
parser.add_argument('-r', '--recursive', action='store_true', help='递归处理子目录')
# 导入命令行规则配置功能
from check_quality import add_rule_arguments
add_rule_arguments(parser)
args = parser.parse_args()
# 设置日志
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
try:
processor = BatchProcessor(args.config, args.workers)
# 应用命令行规则覆盖
from check_quality import apply_command_line_overrides, merge_config
config_overrides = apply_command_line_overrides(args)
if config_overrides:
processor.checker.config = merge_config(processor.checker.config, config_overrides)
print(f"应用了 {len(config_overrides)} 个配置覆盖")
input_path = Path(args.input)
if input_path.is_dir():
results = processor.process_directory(
args.input, args.pattern, args.output, args.recursive
)
elif input_path.is_file():
# 处理单个文件
results = processor.process_files([args.input], args.output)
else:
print(f"错误: 输入路径 {args.input} 不存在")
return 1
# 打印结果摘要
print(f"\n=== 批量处理结果 ===")
print(f"总文件数: {results['total_files']}")
print(f"成功处理: {results['processed_files']}")
print(f"处理失败: {results['failed_files']}")
if results['summary']:
print(f"\n=== 规则统计 ===")
for rule_name, stats in results['summary'].items():
print(f"{rule_name}: {stats['passed_files']}/{stats['total_files']} 通过")
return 0
except Exception as e:
print(f"错误: {e}")
return 1
if __name__ == "__main__":
exit(main())