#!/usr/bin/env python3
"""Auto-generated by AGI Loop cycle #1049
Task: Write a Python function that tokenizes text, counts word frequencies, and returns top 10 words
Generated: 2026-02-12T18:17:46.301044
"""

import re
from collections import Counter

def tokenize_and_count(text):
    # Tokenize using regex to split on word boundaries
    tokens = re.findall(r'\b\w+\b', text.lower())
    # Count word frequencies
    word_counts = Counter(tokens)
    # Get top 10 words
    top_10 = word_counts.most_common(10)
    return top_10

if __name__ == '__main__':
    sample_text = "Python is great. Python is easy. Python is powerful. Python is fun. Python is used in many areas."
    top_words = tokenize_and_count(sample_text)
    for word, count in top_words:
        print(f"{word}: {count}")