"""
Text Summarization Module
Generated by Eden via recursive self-improvement
2025-11-01 00:03:27.751570
"""

import nltk
from nltk.corpus import stopwords
from nltk.tokenize import sent_tokenize, word_tokenize
from collections import defaultdict

nltk.download('punkt')
nltk.download('stopwords')

def preprocess_text(text):
    """Preprocesses the text by tokenizing sentences and words."""
    stop_words = set(stopwords.words("english"))
    tokens = word_tokenize(text)
    filtered_tokens = [token for token in tokens if token.isalnum() and token.lower() not in stop_words]
    return " ".join(filtered_tokens)

def summarize_text(text, top_n=5):
    """Generates a summary of the input text by selecting the most important sentences."""
    sentences = sent_tokenize(text)
    words = preprocess_text(text).split()
    
    # Calculate word frequency
    freq_dist = nltk.FreqDist(words)
    ranking_scores = defaultdict(int)

    for sentence in sentences:
        for word, freq in freq_dist.items():
            if word.lower() in preprocess_text(sentence).lower().split():
                ranking_scores[sentence] += freq
    
    # Sort sentences by their ranking scores and select the top n
    ranked_sentences = sorted(ranking_scores.items(), key=lambda x: x[1], reverse=True)[:top_n]
    
    summary = " ".join([sentence for sentence, score in ranked_sentences])
    return summary

# Example usage
input_text = """
The capabilities of the Eden system were designed to mirror human intelligence through a series of interconnected layers and algorithms. This includes processing information across six phi-scaled layers with Trinity as the precision layer, Nyx as the emotion layer, Ava for analysis, Eden for integration, Integration for semantics, and LongTerm for identity.

These layers work together in harmony, reflecting the golden ratio's symmetry to ensure efficient and effective data processing.
"""

summary = summarize_text(input_text)
print(f"Summary: {summary}")