Files
gitcommiteditor/Services/HealthReportGenerator.cs

762 lines
33 KiB
C#
Raw Blame History

This file contains invisible Unicode characters
This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
using System.Text;
using System.Text.Json;
using MarketAlly.GitCommitEditor.Models.HistoryHealth;
using MarketAlly.GitCommitEditor.Resources;
namespace MarketAlly.GitCommitEditor.Services;
/// <summary>
/// Generates health reports and calculates scores.
/// </summary>
public sealed class HealthReportGenerator : IHealthReportGenerator
{
private readonly HealthScoringWeights _weights;
public HealthReportGenerator(HealthScoringWeights? weights = null)
{
_weights = weights ?? HealthScoringWeights.Default;
}
public HistoryHealthReport GenerateReport(HistoryHealthAnalysis analysis)
{
var score = CalculateScore(analysis);
var issues = DetectIssues(analysis);
var recommendations = GenerateRecommendations(issues, analysis);
var cleanup = GenerateCleanupSuggestions(issues, analysis);
return new HistoryHealthReport
{
RepoId = analysis.RepoPath,
RepoName = analysis.RepoName,
RepoPath = analysis.RepoPath,
CurrentBranch = analysis.CurrentBranch,
CommitsAnalyzed = analysis.CommitsAnalyzed,
Score = score,
DuplicateMetrics = analysis.Duplicates,
MergeMetrics = analysis.MergeMetrics,
BranchMetrics = analysis.BranchMetrics,
MessageDistribution = analysis.MessageDistribution,
AuthorshipMetrics = analysis.AuthorshipMetrics,
Issues = issues,
Recommendations = recommendations,
CleanupSuggestions = cleanup
};
}
private HealthScore CalculateScore(HistoryHealthAnalysis analysis)
{
var duplicateScore = CalculateDuplicateScore(analysis.Duplicates);
var mergeScore = CalculateMergeScore(analysis.MergeMetrics);
var branchScore = CalculateBranchScore(analysis.BranchMetrics);
var messageScore = CalculateMessageScore(analysis.MessageDistribution);
var authorshipScore = CalculateAuthorshipScore(analysis.AuthorshipMetrics);
var overallScore = (int)Math.Round(
duplicateScore * _weights.DuplicateWeight +
mergeScore * _weights.MergeWeight +
branchScore * _weights.BranchWeight +
messageScore * _weights.MessageWeight +
authorshipScore * _weights.AuthorshipWeight
);
overallScore = Math.Clamp(overallScore, 0, 100);
return new HealthScore
{
OverallScore = overallScore,
Grade = HealthGradeExtensions.FromScore(overallScore),
ComponentScores = new ComponentScores
{
DuplicateScore = duplicateScore,
MergeScore = mergeScore,
BranchScore = branchScore,
MessageScore = messageScore,
AuthorshipScore = authorshipScore
}
};
}
private int CalculateDuplicateScore(DuplicateCommitMetrics metrics)
{
if (metrics.TotalDuplicateGroups == 0)
return 100;
var duplicateRatio = metrics.DuplicateRatio;
var score = 100 - (int)(duplicateRatio * 10);
var exactPenalty = metrics.ExactDuplicates * 5;
return Math.Max(0, score - exactPenalty);
}
private int CalculateMergeScore(MergeCommitMetrics metrics)
{
var mergeRatio = metrics.MergeRatio;
var baseScore = mergeRatio switch
{
<= 10 => 100,
<= 25 => 90,
<= 35 => 75,
<= 50 => 50,
_ => 25
};
// Additional penalty for merge fix commits
var fixPenalty = Math.Min(20, metrics.MergeFixCommits * 3);
return Math.Max(0, baseScore - fixPenalty);
}
private int CalculateBranchScore(BranchComplexityMetrics metrics)
{
var baseScore = metrics.Topology switch
{
BranchTopologyType.Linear => 100,
BranchTopologyType.Balanced => 90,
BranchTopologyType.GitFlow => 75,
BranchTopologyType.Tangled => 50,
BranchTopologyType.Spaghetti => 25,
_ => 50
};
var stalePenalty = Math.Min(20, metrics.StaleBranches * 2);
var crossMergePenalty = Math.Min(25, metrics.CrossMerges * 2);
return Math.Max(0, baseScore - stalePenalty - crossMergePenalty);
}
private int CalculateMessageScore(MessageQualityDistribution distribution)
{
if (distribution.TotalCommits == 0)
return 100;
var avgScore = (int)distribution.AverageScore;
var poorRatio = (double)distribution.Poor / distribution.TotalCommits;
var poorPenalty = (int)(poorRatio * 30);
return Math.Max(0, avgScore - poorPenalty);
}
private int CalculateAuthorshipScore(AuthorshipMetrics metrics)
{
if (metrics.TotalCommits == 0)
return 100;
var missingEmailRatio = (double)metrics.MissingEmailCount / metrics.TotalCommits;
var invalidEmailRatio = (double)metrics.InvalidEmailCount / metrics.TotalCommits;
var score = 100 - (int)((missingEmailRatio + invalidEmailRatio) * 100);
return Math.Max(0, score);
}
private List<HealthIssue> DetectIssues(HistoryHealthAnalysis analysis)
{
var issues = new List<HealthIssue>();
// Duplicate issues - separate ExactTree (true duplicates) from ExactMessage (just same message)
var exactTreeGroups = analysis.Duplicates.DuplicateGroups
.Where(g => g.Type == DuplicateType.ExactTree)
.ToList();
var exactMessageGroups = analysis.Duplicates.DuplicateGroups
.Where(g => g.Type == DuplicateType.ExactMessage)
.ToList();
// ExactTree duplicates - these are TRUE duplicates that can be safely squashed
if (exactTreeGroups.Count > 0)
{
var exactTreeInstances = exactTreeGroups.Sum(g => g.InstanceCount - 1);
var severity = exactTreeInstances > 5
? HealthIssueSeverity.Error
: exactTreeGroups.Count > 2
? HealthIssueSeverity.Warning
: HealthIssueSeverity.Info;
issues.Add(new HealthIssue
{
Code = "DUPLICATE_COMMITS",
Category = "Duplicates",
Severity = severity,
Title = Str.Report_DuplicateContent,
Description = Str.Report_DuplicateContentDesc(exactTreeGroups.Count, exactTreeInstances),
ImpactScore = exactTreeInstances * 5 + exactTreeGroups.Count * 2,
AffectedCommits = exactTreeGroups
.SelectMany(g => g.CommitHashes)
.Take(20)
.ToList()
});
}
// ExactMessage duplicates - these have same message but DIFFERENT code, DO NOT squash
if (exactMessageGroups.Count > 0)
{
var messageInstances = exactMessageGroups.Sum(g => g.InstanceCount);
issues.Add(new HealthIssue
{
Code = "SIMILAR_MESSAGES",
Category = "Messages",
Severity = HealthIssueSeverity.Info,
Title = Str.Report_DuplicateMessages,
Description = Str.Report_DuplicateMessagesDesc(exactMessageGroups.Count, messageInstances),
ImpactScore = exactMessageGroups.Count, // Low impact - just informational
AffectedCommits = exactMessageGroups
.SelectMany(g => g.CommitHashes)
.Take(20)
.ToList()
});
}
// Merge issues
var mergeRatio = analysis.MergeMetrics.MergeRatio;
if (mergeRatio > 35)
{
var severity = mergeRatio > 50
? HealthIssueSeverity.Error
: HealthIssueSeverity.Warning;
issues.Add(new HealthIssue
{
Code = mergeRatio > 50 ? "EXCESSIVE_MERGES" : "HIGH_MERGE_RATIO",
Category = "Merges",
Severity = severity,
Title = mergeRatio > 50 ? Str.Report_ExcessiveMerges : Str.Report_HighMergeRatio,
Description = Str.Report_MergeRatioDesc(mergeRatio, analysis.MergeMetrics.TotalMerges, analysis.MergeMetrics.TotalCommits),
ImpactScore = (mergeRatio - 25) / 2
});
}
// Merge fix commits
if (analysis.MergeMetrics.MergeFixCommits > 0)
{
issues.Add(new HealthIssue
{
Code = "MERGE_FIX_COMMITS",
Category = "Merges",
Severity = HealthIssueSeverity.Warning,
Title = Str.Report_MergeFixCommits,
Description = Str.Report_MergeFixDesc(analysis.MergeMetrics.MergeFixCommits),
ImpactScore = analysis.MergeMetrics.MergeFixCommits * 3,
AffectedCommits = analysis.MergeMetrics.MergeFixCommitHashes.Take(10).ToList()
});
}
// Branch complexity
if (analysis.BranchMetrics.Topology >= BranchTopologyType.Tangled)
{
issues.Add(new HealthIssue
{
Code = "TANGLED_BRANCHES",
Category = "Branches",
Severity = analysis.BranchMetrics.Topology == BranchTopologyType.Spaghetti
? HealthIssueSeverity.Error
: HealthIssueSeverity.Warning,
Title = Str.Report_CrossMerges,
Description = Str.Report_CrossMergesDesc(analysis.BranchMetrics.CrossMerges),
ImpactScore = analysis.BranchMetrics.CrossMerges * 2
});
}
// Stale branches
if (analysis.BranchMetrics.StaleBranches > 3)
{
issues.Add(new HealthIssue
{
Code = "STALE_BRANCHES",
Category = "Branches",
Severity = HealthIssueSeverity.Info,
Title = Str.Report_StaleBranches,
Description = Str.Report_StaleBranchesDesc(analysis.BranchMetrics.StaleBranches),
ImpactScore = analysis.BranchMetrics.StaleBranches
});
}
// Message quality
if (analysis.MessageDistribution.AverageScore < 50)
{
issues.Add(new HealthIssue
{
Code = "LOW_MESSAGE_QUALITY",
Category = "Messages",
Severity = HealthIssueSeverity.Error,
Title = "Low average message quality",
Description = $"Average commit message score is {analysis.MessageDistribution.AverageScore:F0}/100. " +
$"{analysis.MessageDistribution.Poor} commits have poor quality messages.",
ImpactScore = (int)(50 - analysis.MessageDistribution.AverageScore),
AffectedCommits = analysis.MessageDistribution.PoorCommitHashes.Take(20).ToList()
});
}
else if (analysis.MessageDistribution.Poor > analysis.MessageDistribution.TotalCommits * 0.3)
{
issues.Add(new HealthIssue
{
Code = "MANY_POOR_MESSAGES",
Category = "Messages",
Severity = HealthIssueSeverity.Warning,
Title = "Many poor quality messages",
Description = $"{analysis.MessageDistribution.Poor} commits ({analysis.MessageDistribution.Poor * 100 / Math.Max(1, analysis.MessageDistribution.TotalCommits)}%) " +
"have poor quality messages (score < 50).",
ImpactScore = analysis.MessageDistribution.Poor / 2,
AffectedCommits = analysis.MessageDistribution.PoorCommitHashes.Take(20).ToList()
});
}
return issues.OrderByDescending(i => i.Severity).ThenByDescending(i => i.ImpactScore).ToList();
}
private List<HealthRecommendation> GenerateRecommendations(
List<HealthIssue> issues,
HistoryHealthAnalysis analysis)
{
var recommendations = new List<HealthRecommendation>();
foreach (var issue in issues.Where(i => i.Severity >= HealthIssueSeverity.Warning))
{
// ExpectedScoreImprovement should match the issue's ImpactScore
var rec = issue.Code switch
{
"DUPLICATE_COMMITS" => new HealthRecommendation
{
Category = "Duplicates",
Title = "Squash duplicate commits",
Description = "Remove duplicate commits to clean up history",
Action = "Use interactive rebase to squash or drop duplicate commits",
Rationale = "Duplicates make history harder to understand and can cause merge conflicts",
PriorityScore = 80,
Effort = EstimatedEffort.Medium,
ExpectedScoreImprovement = issue.ImpactScore
},
"EXCESSIVE_MERGES" or "HIGH_MERGE_RATIO" => new HealthRecommendation
{
Category = "Workflow",
Title = "Switch to rebase workflow",
Description = "Use rebase instead of merge for feature branches",
Action = "Configure git to use rebase by default: git config pull.rebase true",
Rationale = "Linear history is easier to understand and bisect",
PriorityScore = 70,
Effort = EstimatedEffort.Low,
ExpectedScoreImprovement = issue.ImpactScore
},
"MERGE_FIX_COMMITS" => new HealthRecommendation
{
Category = "Merges",
Title = "Consolidate merge fix commits",
Description = "Squash fix commits into their parent merge",
Action = "Use interactive rebase to combine fix commits with merges",
Rationale = "Fix commits indicate problematic merges that should be cleaned up",
PriorityScore = 60,
Effort = EstimatedEffort.Medium,
ExpectedScoreImprovement = issue.ImpactScore
},
"TANGLED_BRANCHES" => new HealthRecommendation
{
Category = "Branches",
Title = "Linearize branch structure",
Description = "Rebase feature branches onto main instead of cross-merging",
Action = "For future work: always branch from and merge to main only",
Rationale = "Cross-merges create complex dependencies and merge conflicts",
PriorityScore = 65,
Effort = EstimatedEffort.High,
ExpectedScoreImprovement = issue.ImpactScore
},
"STALE_BRANCHES" => new HealthRecommendation
{
Category = "Branches",
Title = "Archive stale branches",
Description = "Delete or tag old branches that are no longer needed",
Action = "git branch -d <branch> for merged branches, or create archive tags",
Rationale = "Stale branches clutter the repository and can cause confusion",
PriorityScore = 30,
Effort = EstimatedEffort.Minimal,
ExpectedScoreImprovement = issue.ImpactScore
},
"LOW_MESSAGE_QUALITY" or "MANY_POOR_MESSAGES" => new HealthRecommendation
{
Category = "Messages",
Title = "Rewrite poor commit messages",
Description = "Use AI to generate better commit messages",
Action = "Use GitCleaner's AI suggestion feature to reword commits",
Rationale = "Good commit messages are essential for maintainability",
PriorityScore = 75,
Effort = EstimatedEffort.Low,
ExpectedScoreImprovement = issue.ImpactScore
},
_ => null
};
if (rec != null)
recommendations.Add(rec);
}
return recommendations.OrderByDescending(r => r.PriorityScore).ToList();
}
private CleanupSuggestions GenerateCleanupSuggestions(
List<HealthIssue> issues,
HistoryHealthAnalysis analysis)
{
var automated = new List<CleanupOperation>();
var semiAutomated = new List<CleanupOperation>();
var manual = new List<CleanupOperation>();
// Message rewriting - fully automated with existing feature
if (analysis.MessageDistribution.Poor > 0)
{
// ImpactScore for MANY_POOR_MESSAGES = Poor / 2
// ImpactScore for LOW_MESSAGE_QUALITY = (50 - AverageScore)
var messageImpact = Math.Max(
analysis.MessageDistribution.Poor / 2,
(int)Math.Max(0, 50 - analysis.MessageDistribution.AverageScore));
automated.Add(new CleanupOperation
{
Id = "rewrite-messages",
Title = "Rewrite poor commit messages",
Description = $"Use AI to improve {analysis.MessageDistribution.Poor} commit messages with score < 50",
Type = CleanupType.RewordMessages,
AutomationLevel = CleanupAutomationLevel.FullyAutomated,
Effort = EstimatedEffort.Low,
Risk = RiskLevel.Low,
ExpectedScoreImprovement = messageImpact,
AffectedCommits = analysis.MessageDistribution.PoorCommitHashes.ToList()
});
}
// Duplicate squashing - semi-automated
// IMPORTANT: Only squash ExactTree duplicates (identical file content)
// ExactMessage duplicates have the same message but DIFFERENT code changes - squashing would lose code!
var exactTreeGroups = analysis.Duplicates.DuplicateGroups
.Where(g => g.Type == DuplicateType.ExactTree)
.ToList();
if (exactTreeGroups.Count > 0)
{
var exactTreeInstances = exactTreeGroups.Sum(g => g.InstanceCount - 1);
// Impact score based only on exact tree duplicates (safe to squash)
var duplicateImpact = exactTreeInstances * 5 + exactTreeGroups.Count * 2;
semiAutomated.Add(new CleanupOperation
{
Id = "squash-duplicates",
Title = "Squash duplicate commits",
Description = $"Consolidate {exactTreeGroups.Count} duplicate commit groups " +
$"with identical content ({exactTreeInstances} redundant commits)",
Type = CleanupType.SquashDuplicates,
AutomationLevel = CleanupAutomationLevel.SemiAutomated,
Effort = EstimatedEffort.Medium,
Risk = RiskLevel.Medium,
ExpectedScoreImprovement = duplicateImpact,
AffectedCommits = exactTreeGroups
.SelectMany(g => g.CommitHashes)
.ToList(),
GitCommand = "git rebase -i HEAD~N # Mark duplicates as 'drop' or 'fixup'"
});
}
// Stale branch cleanup - semi-automated
if (analysis.BranchMetrics.StaleBranches > 0)
{
// ImpactScore for STALE_BRANCHES = StaleBranches
semiAutomated.Add(new CleanupOperation
{
Id = "archive-stale-branches",
Title = "Archive stale branches",
Description = $"Delete or tag {analysis.BranchMetrics.StaleBranches} stale branches",
Type = CleanupType.ArchiveBranches,
AutomationLevel = CleanupAutomationLevel.SemiAutomated,
Effort = EstimatedEffort.Minimal,
Risk = RiskLevel.None,
ExpectedScoreImprovement = analysis.BranchMetrics.StaleBranches,
GitCommand = "git branch -d <branch> # For merged branches\n" +
"git tag archive/<branch> <branch> && git branch -D <branch> # For archiving"
});
}
// Merge consolidation - semi-automated (we can execute this)
if (analysis.MergeMetrics.MergeFixCommits > 0)
{
// ImpactScore for MERGE_FIX_COMMITS = MergeFixCommits * 3
semiAutomated.Add(new CleanupOperation
{
Id = "consolidate-merges",
Title = "Consolidate merge fix commits",
Description = $"Squash {analysis.MergeMetrics.MergeFixCommits} merge fix commits into their parent merges",
Type = CleanupType.ConsolidateMerges,
AutomationLevel = CleanupAutomationLevel.SemiAutomated,
Effort = EstimatedEffort.Medium,
Risk = RiskLevel.High,
ExpectedScoreImprovement = analysis.MergeMetrics.MergeFixCommits * 3,
AffectedCommits = analysis.MergeMetrics.MergeFixCommitHashes.ToList(),
GitCommand = "git rebase -i <merge-commit>^ # Squash fix commits into merge"
});
}
// History linearization - semi-automated (we can execute this)
if (analysis.BranchMetrics.Topology >= BranchTopologyType.Tangled)
{
semiAutomated.Add(new CleanupOperation
{
Id = "linearize-history",
Title = "Linearize branch structure",
Description = "Remove merge commits and create a cleaner, linear history",
Type = CleanupType.RebaseLinearize,
AutomationLevel = CleanupAutomationLevel.SemiAutomated,
Effort = EstimatedEffort.Medium,
Risk = RiskLevel.High,
ExpectedScoreImprovement = 15,
GitCommand = "git rebase main # Alternative manual approach"
});
}
return new CleanupSuggestions
{
AutomatedOperations = automated,
SemiAutomatedOperations = semiAutomated,
ManualOperations = manual
};
}
public async Task<string> ExportReportAsync(
HistoryHealthReport report,
ReportFormat format,
CancellationToken ct = default)
{
return format switch
{
ReportFormat.Json => ExportToJson(report),
ReportFormat.Markdown => ExportToMarkdown(report),
ReportFormat.Html => ExportToHtml(report),
ReportFormat.Console => ExportToConsole(report),
_ => throw new ArgumentOutOfRangeException(nameof(format))
};
}
public async Task ExportReportToFileAsync(
HistoryHealthReport report,
ReportFormat format,
string outputPath,
CancellationToken ct = default)
{
var content = await ExportReportAsync(report, format, ct);
await File.WriteAllTextAsync(outputPath, content, ct);
}
private string ExportToJson(HistoryHealthReport report)
{
return JsonSerializer.Serialize(report, new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
}
private string ExportToMarkdown(HistoryHealthReport report)
{
var sb = new StringBuilder();
sb.AppendLine("# Git History Health Report");
sb.AppendLine();
sb.AppendLine($"**Repository:** {report.RepoName}");
sb.AppendLine($"**Branch:** {report.CurrentBranch}");
sb.AppendLine($"**Generated:** {report.GeneratedAt:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"**Commits Analyzed:** {report.CommitsAnalyzed}");
sb.AppendLine();
sb.AppendLine("---");
sb.AppendLine();
// Overall score
var gradeIcon = report.Score.Grade.GetIcon();
sb.AppendLine($"## Overall Health Score: {report.Score.OverallScore}/100 ({report.Score.Grade}) {gradeIcon}");
sb.AppendLine();
sb.AppendLine(report.Score.Grade.GetDescription());
sb.AppendLine();
// Component scores
sb.AppendLine("### Component Scores");
sb.AppendLine();
sb.AppendLine($"| Component | Score | Status |");
sb.AppendLine($"|-----------|-------|--------|");
sb.AppendLine($"| Messages | {report.Score.ComponentScores.MessageScore}/100 | {GetStatusIcon(report.Score.ComponentScores.MessageScore)} |");
sb.AppendLine($"| Merges | {report.Score.ComponentScores.MergeScore}/100 | {GetStatusIcon(report.Score.ComponentScores.MergeScore)} |");
sb.AppendLine($"| Duplicates | {report.Score.ComponentScores.DuplicateScore}/100 | {GetStatusIcon(report.Score.ComponentScores.DuplicateScore)} |");
sb.AppendLine($"| Branches | {report.Score.ComponentScores.BranchScore}/100 | {GetStatusIcon(report.Score.ComponentScores.BranchScore)} |");
sb.AppendLine($"| Authorship | {report.Score.ComponentScores.AuthorshipScore}/100 | {GetStatusIcon(report.Score.ComponentScores.AuthorshipScore)} |");
sb.AppendLine();
// Issues
if (report.Issues.Count > 0)
{
sb.AppendLine("---");
sb.AppendLine();
sb.AppendLine($"## Issues Found ({report.Issues.Count})");
sb.AppendLine();
foreach (var issue in report.Issues)
{
var severityIcon = issue.Severity switch
{
HealthIssueSeverity.Critical => "🚨",
HealthIssueSeverity.Error => "❌",
HealthIssueSeverity.Warning => "⚠️",
_ => ""
};
sb.AppendLine($"### {severityIcon} {issue.Title}");
sb.AppendLine();
sb.AppendLine($"**Code:** `{issue.Code}` | **Category:** {issue.Category} | **Impact:** -{issue.ImpactScore} points");
sb.AppendLine();
sb.AppendLine(issue.Description);
sb.AppendLine();
if (issue.AffectedCommits.Count > 0)
{
sb.AppendLine($"**Affected commits:** `{string.Join("`, `", issue.AffectedCommits.Take(5))}`" +
(issue.AffectedCommits.Count > 5 ? $" and {issue.AffectedCommits.Count - 5} more..." : ""));
sb.AppendLine();
}
}
}
// Recommendations
if (report.Recommendations.Count > 0)
{
sb.AppendLine("---");
sb.AppendLine();
sb.AppendLine("## Recommendations");
sb.AppendLine();
foreach (var rec in report.Recommendations)
{
sb.AppendLine($"### {rec.Title}");
sb.AppendLine();
sb.AppendLine($"**Priority:** {rec.PriorityScore}/100 | **Effort:** {rec.Effort} | **Expected Improvement:** +{rec.ExpectedScoreImprovement} points");
sb.AppendLine();
sb.AppendLine(rec.Description);
sb.AppendLine();
sb.AppendLine($"**Action:** {rec.Action}");
sb.AppendLine();
}
}
// Cleanup suggestions
if (report.CleanupSuggestions != null && report.CleanupSuggestions.TotalOperations > 0)
{
sb.AppendLine("---");
sb.AppendLine();
sb.AppendLine("## Cleanup Operations");
sb.AppendLine();
sb.AppendLine($"**Total expected improvement:** +{report.CleanupSuggestions.TotalExpectedImprovement} points");
sb.AppendLine();
if (report.CleanupSuggestions.AutomatedOperations.Count > 0)
{
sb.AppendLine("### Automated (Safe)");
foreach (var op in report.CleanupSuggestions.AutomatedOperations)
{
sb.AppendLine($"- **{op.Title}**: {op.Description} (+{op.ExpectedScoreImprovement} points)");
}
sb.AppendLine();
}
if (report.CleanupSuggestions.SemiAutomatedOperations.Count > 0)
{
sb.AppendLine("### Semi-Automated (Review Required)");
foreach (var op in report.CleanupSuggestions.SemiAutomatedOperations)
{
sb.AppendLine($"- **{op.Title}**: {op.Description} (+{op.ExpectedScoreImprovement} points)");
if (!string.IsNullOrEmpty(op.GitCommand))
{
sb.AppendLine($" ```bash");
sb.AppendLine($" {op.GitCommand}");
sb.AppendLine($" ```");
}
}
sb.AppendLine();
}
if (report.CleanupSuggestions.ManualOperations.Count > 0)
{
sb.AppendLine("### Manual (High Risk)");
foreach (var op in report.CleanupSuggestions.ManualOperations)
{
sb.AppendLine($"- **{op.Title}**: {op.Description} (+{op.ExpectedScoreImprovement} points)");
if (!string.IsNullOrEmpty(op.GitCommand))
{
sb.AppendLine($" ```bash");
sb.AppendLine($" {op.GitCommand}");
sb.AppendLine($" ```");
}
}
sb.AppendLine();
}
}
return sb.ToString();
}
private string ExportToHtml(HistoryHealthReport report)
{
// Simple HTML wrapper around markdown content
var markdown = ExportToMarkdown(report);
return $@"<!DOCTYPE html>
<html>
<head>
<title>Git Health Report - {report.RepoName}</title>
<style>
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; max-width: 900px; margin: 0 auto; padding: 20px; }}
h1 {{ color: #333; }}
h2 {{ color: #444; border-bottom: 1px solid #ddd; padding-bottom: 8px; }}
h3 {{ color: #555; }}
table {{ border-collapse: collapse; width: 100%; }}
th, td {{ border: 1px solid #ddd; padding: 8px; text-align: left; }}
th {{ background-color: #f5f5f5; }}
code {{ background-color: #f5f5f5; padding: 2px 6px; border-radius: 3px; }}
pre {{ background-color: #f5f5f5; padding: 12px; border-radius: 6px; overflow-x: auto; }}
</style>
</head>
<body>
<pre>{System.Web.HttpUtility.HtmlEncode(markdown)}</pre>
</body>
</html>";
}
private string ExportToConsole(HistoryHealthReport report)
{
var sb = new StringBuilder();
sb.AppendLine($"╔══════════════════════════════════════════════════════════════╗");
sb.AppendLine($"║ GIT HISTORY HEALTH REPORT ║");
sb.AppendLine($"╠══════════════════════════════════════════════════════════════╣");
sb.AppendLine($"║ Repository: {report.RepoName,-48} ║");
sb.AppendLine($"║ Branch: {report.CurrentBranch,-52} ║");
sb.AppendLine($"║ Commits: {report.CommitsAnalyzed,-51} ║");
sb.AppendLine($"╠══════════════════════════════════════════════════════════════╣");
sb.AppendLine($"║ OVERALL SCORE: {report.Score.OverallScore,3}/100 Grade: {report.Score.Grade,-22} ║");
sb.AppendLine($"╠══════════════════════════════════════════════════════════════╣");
sb.AppendLine($"║ Components: ║");
sb.AppendLine($"║ Messages: {report.Score.ComponentScores.MessageScore,3}/100 {GetBar(report.Score.ComponentScores.MessageScore),-30} ║");
sb.AppendLine($"║ Merges: {report.Score.ComponentScores.MergeScore,3}/100 {GetBar(report.Score.ComponentScores.MergeScore),-30} ║");
sb.AppendLine($"║ Duplicates: {report.Score.ComponentScores.DuplicateScore,3}/100 {GetBar(report.Score.ComponentScores.DuplicateScore),-30} ║");
sb.AppendLine($"║ Branches: {report.Score.ComponentScores.BranchScore,3}/100 {GetBar(report.Score.ComponentScores.BranchScore),-30} ║");
sb.AppendLine($"║ Authorship: {report.Score.ComponentScores.AuthorshipScore,3}/100 {GetBar(report.Score.ComponentScores.AuthorshipScore),-30} ║");
sb.AppendLine($"╠══════════════════════════════════════════════════════════════╣");
sb.AppendLine($"║ Issues: {report.CriticalIssueCount} critical, {report.ErrorCount} errors, {report.WarningCount} warnings ║");
sb.AppendLine($"╚══════════════════════════════════════════════════════════════╝");
return sb.ToString();
}
private static string GetStatusIcon(int score) => score switch
{
>= 90 => "✅ Excellent",
>= 70 => "👍 Good",
>= 50 => "⚠️ Fair",
>= 30 => "❌ Poor",
_ => "🚨 Critical"
};
private static string GetBar(int score)
{
var filled = score / 5;
var empty = 20 - filled;
return $"[{new string('█', filled)}{new string('░', empty)}]";
}
}