Advanced Topics: Decorators, Generators, Context Managers

These advanced Python features make your code more elegant, efficient, and Pythonic. Decorators modify function behavior, Generators produce sequences efficiently without storing them in memory, and Context Managers handle resource management automatically.

# ADVANCED PYTHON TOPICS
import time
import contextlib
from functools import wraps

print("ADVANCED PYTHON TOPICS")
print("=" * 60)

# ========== DECORATORS ==========
print("\n1. DECORATORS")
print("-" * 30)

print("Decorators wrap functions to add functionality.")
print("Common uses: Logging, timing, authentication, caching")

# Example 1: Basic decorator
def simple_decorator(func):
    """A simple decorator that prints before and after function call"""
    def wrapper():
        print("Before function call")
        func()
        print("After function call")
    return wrapper

@simple_decorator
def say_hello():
    print("Hello!")

print("\nExample 1 - Basic decorator:")
say_hello()

# Example 2: Decorator with arguments
def repeat(n_times):
    """Decorator that repeats function call n times"""
    def decorator(func):
        @wraps(func)  # Preserves function metadata
        def wrapper(*args, **kwargs):
            for _ in range(n_times):
                result = func(*args, **kwargs)
            return result
        return wrapper
    return decorator

@repeat(n_times=3)
def greet(name):
    print(f"Hello, {name}!")

print("\nExample 2 - Decorator with arguments:")
greet("Alice")

# Example 3: Timing decorator
def timer(func):
    """Decorator that measures function execution time"""
    @wraps(func)
    def wrapper(*args, **kwargs):
        start_time = time.time()
        result = func(*args, **kwargs)
        end_time = time.time()
        print(f"{func.__name__} took {end_time - start_time:.4f} seconds")
        return result
    return wrapper

@timer
def slow_function():
    """A function that takes some time"""
    time.sleep(0.5)
    return "Done"

print("\nExample 3 - Timing decorator:")
result = slow_function()
print(f"Result: {result}")

# Example 4: Caching decorator (memoization)
def cache(func):
    """Decorator that caches function results"""
    cache_dict = {}
    
    @wraps(func)
    def wrapper(*args):
        if args in cache_dict:
            print(f"Cache hit for {args}")
            return cache_dict[args]
        print(f"Cache miss for {args}")
        result = func(*args)
        cache_dict[args] = result
        return result
    return wrapper

@cache
def expensive_computation(n):
    """Simulate expensive computation"""
    time.sleep(0.2)
    return n * n

print("\nExample 4 - Caching decorator:")
print(f"First call: {expensive_computation(5)}")
print(f"Second call (cached): {expensive_computation(5)}")
print(f"New call: {expensive_computation(10)}")

# Example 5: Authorization decorator
def requires_auth(role="user"):
    """Decorator that checks authorization"""
    def decorator(func):
        @wraps(func)
        def wrapper(user_role, *args, **kwargs):
            if user_role == role or user_role == "admin":
                return func(*args, **kwargs)
            else:
                return f"Access denied. Required role: {role}"
        return wrapper
    return decorator

@requires_auth(role="admin")
def delete_user(user_id):
    return f"User {user_id} deleted"

print("\nExample 5 - Authorization decorator:")
print(f"Admin try: {delete_user('admin', 'user123')}")
print(f"User try: {delete_user('user', 'user123')}")

# ========== GENERATORS ==========
print("\n\n2. GENERATORS")
print("-" * 30)

print("Generators produce values one at a time, saving memory.")
print("Use 'yield' instead of 'return'")

# Example 6: Basic generator
def count_up_to(n):
    """Generator that counts up to n"""
    count = 1
    while count <= n:
        yield count
        count += 1

print("\nExample 6 - Basic generator:")
print("Counting to 5:")
for number in count_up_to(5):
    print(f"  {number}")

# Example 7: Infinite generator
def fibonacci_generator():
    """Generator that produces Fibonacci sequence"""
    a, b = 0, 1
    while True:
        yield a
        a, b = b, a + b

print("\nExample 7 - Fibonacci generator (first 10):")
fib = fibonacci_generator()
for i in range(10):
    print(f"  Fibonacci {i}: {next(fib)}")

# Example 8: Generator expression
print("\nExample 8 - Generator expression:")
# Similar to list comprehension but lazy
squares = (x*x for x in range(10))
print("First 5 squares:")
for i in range(5):
    print(f"  {next(squares)}")

# Example 9: Pipeline with generators
def read_lines(filename):
    """Generator that reads file line by line"""
    # Simulate file reading
    lines = ["Line 1: Hello", "Line 2: World", "Line 3: Python", "Line 4: Generator"]
    for line in lines:
        yield line

def filter_lines(lines, keyword):
    """Generator that filters lines containing keyword"""
    for line in lines:
        if keyword in line:
            yield line

def process_lines(lines):
    """Generator that processes lines"""
    for line in lines:
        yield line.upper()

print("\nExample 9 - Generator pipeline:")
# Create pipeline
lines = read_lines("sample.txt")
filtered = filter_lines(lines, "Python")
processed = process_lines(filtered)

print("Pipeline output:")
for line in processed:
    print(f"  {line}")

# Example 10: Generator for large datasets
def process_large_dataset(n):
    """Simulate processing large dataset"""
    for i in range(n):
        # Simulate processing each item
        data = f"Item {i}: Data point {i*10}"
        yield data

print("\nExample 10 - Large dataset processing:")
print("Processing 1 million items (in memory efficient way):")
# Only processes one item at a time
for i, item in enumerate(process_large_dataset(5)):  # Using 5 for demo
    print(f"  {item}")
    if i >= 2:  # Stop early for demo
        print("  ... (stopping early for demo)")
        break

# ========== CONTEXT MANAGERS ==========
print("\n\n3. CONTEXT MANAGERS")
print("-" * 30)

print("Context managers handle setup and teardown automatically.")
print("Use 'with' statement for automatic cleanup.")

# Example 11: Basic context manager with class
class TimerContext:
    """Context manager for timing code blocks"""
    def __enter__(self):
        self.start_time = time.time()
        print("Starting timer...")
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        self.end_time = time.time()
        print(f"Time elapsed: {self.end_time - self.start_time:.4f} seconds")
        # Return False to propagate exceptions, True to suppress
        return False

print("\nExample 11 - Context manager class:")
with TimerContext() as timer:
    time.sleep(0.3)
    print("  Doing some work inside context...")

# Example 12: Context manager for file handling
class SafeFileWriter:
    """Context manager for safe file writing"""
    def __init__(self, filename, mode="w"):
        self.filename = filename
        self.mode = mode
        self.file = None
    
    def __enter__(self):
        self.file = open(self.filename, self.mode)
        return self.file
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        if self.file:
            self.file.close()
        if exc_type is not None:
            print(f"Error occurred: {exc_val}")
        print(f"File '{self.filename}' closed safely")
        # Don't suppress exceptions
        return False

print("\nExample 12 - File context manager:")
try:
    with SafeFileWriter("test.txt") as f:
        f.write("Hello, Context Manager!\n")
        f.write("This is automatically closed.")
        # Simulate error
        # raise ValueError("Test error")
    print("  File written successfully")
except Exception as e:
    print(f"  Error: {e}")

# Example 13: Using contextlib for simpler context managers
@contextlib.contextmanager
def temporary_change(obj, attr, new_value):
    """Temporarily change an object's attribute"""
    old_value = getattr(obj, attr)
    setattr(obj, attr, new_value)
    try:
        yield
    finally:
        setattr(obj, attr, old_value)

class Settings:
    def __init__(self):
        self.mode = "light"
        self.language = "en"

print("\nExample 13 - Context manager with contextlib:")
settings = Settings()
print(f"Original mode: {settings.mode}")

with temporary_change(settings, "mode", "dark"):
    print(f"  Inside context: {settings.mode}")

print(f"After context: {settings.mode}")

# Example 14: Database connection context manager
class DatabaseConnection:
    """Simulated database connection"""
    def __init__(self, db_name):
        self.db_name = db_name
        self.connected = False
    
    def connect(self):
        print(f"Connecting to {self.db_name}...")
        self.connected = True
        return self
    
    def disconnect(self):
        if self.connected:
            print(f"Disconnecting from {self.db_name}...")
            self.connected = False
    
    def execute_query(self, query):
        if not self.connected:
            raise ConnectionError("Not connected to database")
        print(f"Executing: {query}")
        return f"Results for: {query}"

class DBContextManager:
    """Context manager for database connections"""
    def __init__(self, db_name):
        self.db_name = db_name
        self.conn = None
    
    def __enter__(self):
        self.conn = DatabaseConnection(self.db_name).connect()
        return self.conn
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        if self.conn:
            self.conn.disconnect()
        if exc_type:
            print(f"Database error: {exc_val}")
        return False  # Don't suppress exceptions

print("\nExample 14 - Database context manager:")
with DBContextManager("mydatabase.db") as db:
    result = db.execute_query("SELECT * FROM users")
    print(f"  {result}")

# Example 15: Combining all concepts
print("\n\n4. COMBINING ALL CONCEPTS")
print("-" * 30)

@timer
def process_with_generators(n):
    """Process data using generators with timing"""
    data_gen = (i for i in range(n))
    
    @cache
    def square(x):
        return x * x
    
    results = []
    for item in data_gen:
        results.append(square(item))
    
    return sum(results)

class ProcessContext:
    """Context manager for processing"""
    def __enter__(self):
        print("Starting processing context...")
        self.start_memory = "Simulated memory check"
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        print("Cleaning up processing context...")
        self.end_memory = "Simulated memory check"
        print("Processing complete")
        return False

print("\nCombined example:")
with ProcessContext():
    result = process_with_generators(1000)
    print(f"Result: {result}")

# Example 16: Real-world use case - API rate limiting
import time

class RateLimiter:
    """Context manager for API rate limiting"""
    def __init__(self, calls_per_second=1):
        self.calls_per_second = calls_per_second
        self.min_interval = 1.0 / calls_per_second
        self.last_call = 0
    
    def __enter__(self):
        current_time = time.time()
        elapsed = current_time - self.last_call
        
        if elapsed < self.min_interval:
            sleep_time = self.min_interval - elapsed
            print(f"Rate limiting: sleeping for {sleep_time:.2f} seconds")
            time.sleep(sleep_time)
        
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        self.last_call = time.time()
        return False

def make_api_call(endpoint):
    """Simulated API call"""
    print(f"Calling API: {endpoint}")
    return f"Response from {endpoint}"

print("\nExample 16 - API rate limiter:")
limiter = RateLimiter(calls_per_second=2)  # Max 2 calls per second

for i in range(3):
    with limiter:
        response = make_api_call(f"/api/data/{i}")
        print(f"  {response}")

# Example 17: Generator-based pagination
print("\n\n5. PRACTICAL EXAMPLE: PAGINATION WITH GENERATORS")
print("-" * 30)

def paginate_data(total_items, page_size):
    """Generator for paginating through data"""
    total_pages = (total_items + page_size - 1) // page_size
    
    for page in range(total_pages):
        start = page * page_size
        end = min(start + page_size, total_items)
        yield {
            "page": page + 1,
            "start": start,
            "end": end,
            "data": list(range(start, end))  # Simulated data
        }

print("Pagination example (20 items, 5 per page):")
for page_info in paginate_data(20, 5):
    print(f"Page {page_info['page']}: Items {page_info['start']}-{page_info['end']-1}")
    print(f"  Data: {page_info['data']}")