Published on

Sorting Algorithms at a glance

Sorting Algorithms

Selection Sort

selection_sort.py
from typing import List

class Solution:
    def sortArray(self, nums: List[int]) -> List[int]:
        for i in range(0, len(nums)):
            min_ele = i
            for j in range(i + 1, len(nums)):
                if nums[j] < nums[min_ele]:
                    min_ele = j
            self.swap(nums, i, min_ele)

    def swap(self, arr: List[int], i: int, j: int) -> None:
        arr[i], arr[j] = arr[j], arr[i]

Bubble Sort

bubble_sort.py
from typing import List

class Solution:
    def sortArray(self, nums: List[int]) -> List[int]:
        n = len(nums)
        for i in range(n):
            # Flag to check if any swaps were made in this pass
            swapped = False
            for j in range(0, n-i-1):
                if nums[j] > nums[j+1]:
                    nums[j], nums[j+1] = nums[j+1], nums[j]
                    swapped = True
            # If no swaps were made, the array is already sorted
            if not swapped:
                break

        return nums

Insertion Sort

insertion_sort.py
class Solution:
    def sortArray(self, nums: List[int]) -> List[int]:
        for i in range(0, len(nums)):
            j = i
            while(j > 0 and nums[j-1] > nums[j]):
                nums[j-1], nums[j] = nums[j], nums[j-1]
                j -= 1

Merge Sort

merge_sort.py
def merge_sort(arr):
    if len(arr) <= 1:
        return arr

    mid = len(arr) // 2
    left = merge_sort(arr[:mid])
    right = merge_sort(arr[mid:])

    return merge(left, right)

def merge(left, right):
    merged = []
    l, r = 0, 0

    while l < len(left) and r < len(right):
        if left[l] <= right[r]:
            merged.append(left[l])
            l += 1
        else:
            merged.append(right[r])
            r += 1

    merged += left[l:] + right[r:]
    return merged

Quick Sort

quick_sort.py
# Quick sort in Python

# function to find the partition position
def partition(array, low, high):

  # choose the rightmost element as pivot
  pivot = array[high]

  # pointer for greater element
  i = low - 1

  # traverse through all elements
  # compare each element with pivot
  for j in range(low, high):
    if array[j] <= pivot: # the condition is <= since, if all elements are equal to the pivot, using < would result in no partitioning, leading to worst-case O(n²) performance. Using <= helps avoid this scenario.
      # if element smaller than pivot is found
      # swap it with the greater element pointed by i
      i = i + 1

      # swapping element at i with element at j
      (array[i], array[j]) = (array[j], array[i])

  # swap the pivot element with the greater element specified by i
  (array[i + 1], array[high]) = (array[high], array[i + 1])

  # return the position from where partition is done
  return i + 1

# function to perform quicksort
def quickSort(array, low, high):
  if low < high:

    # find pivot element such that
    # element smaller than pivot are on the left
    # element greater than pivot are on the right
    pi = partition(array, low, high)

    # recursive call on the left of pivot
    quickSort(array, low, pi - 1)

    # recursive call on the right of pivot
    quickSort(array, pi + 1, high)

Counting Sort

counting_sort.py
def counting_sort(arr):
    # Find the range of the array
    if not arr:
        return arr
    
    max_val = max(arr)
    min_val = min(arr)
    range_of_values = max_val - min_val + 1

    # Initialize the counting array
    count = [0] * range_of_values

    # Count the occurrences of each element
    for num in arr:
        count[num - min_val] += 1

    # Reconstruct the sorted array
    sorted_arr = []
    for i in range(range_of_values):
        sorted_arr.extend([i + min_val] * count[i])

    return sorted_arr

# Example usage:
if __name__ == "__main__":
    arr = [4, 2, 2, 8, 3, 3, 1]
    sorted_arr = counting_sort(arr)
    print("Original array:", arr)
    print("Sorted array:", sorted_arr)

Radix Sort

radix_sort.py
def counting_sort(arr, exp):
    n = len(arr)
    output = [0] * n
    count = [0] * 10

    # Store count of occurrences in count[]
    for i in range(n):
        index = arr[i] // exp
        count[index % 10] += 1

    # Change count[i] so that count[i] now contains actual
    # position of this digit in output[]
    for i in range(1, 10):
        count[i] += count[i - 1]

    # Build the output array
    i = n - 1
    while i >= 0:
        index = arr[i] // exp
        output[count[index % 10] - 1] = arr[i]
        count[index % 10] -= 1
        i -= 1

    # Copy the output array to arr[], so that arr[] now
    # contains sorted numbers according to current digit
    for i in range(n):
        arr[i] = output[i]

def radix_sort(arr):
    # Find the maximum number to know number of digits
    max_num = max(arr)

    # Do counting sort for every digit
    exp = 1
    while max_num // exp > 0:
        counting_sort(arr, exp)
        exp *= 10

# Example usage
if __name__ == "__main__":
    arr = [170, 45, 75, 90, 802, 24, 2, 66]
    print("Original array:", arr)
    
    radix_sort(arr)
    print("Sorted array:", arr)
Radix Sort - Python Code. Courtesy of: Programiz

Amortization of Sorting Algorithms

Sorting AlgorithmWorst-case Time ComplexityAverage-case Time ComplexityBest-case Time ComplexitySpace ComplexityAmortized Analysis
Selection SortO(n²)O(n²)O(n²)O(1)Not applicable
Bubble SortO(n²)O(n²)O(n)O(1)Not typically applicable (O(n) with optimization)
Insertion SortO(n²)O(n²)O(n)O(1)Not typically applicable (O(n) for nearly sorted data)
Merge SortO(n log n)O(n log n)O(n log n)O(n)Not applicable
Quick SortO(n²)O(n log n)O(n log n)O(log n avg, O(n) worst case)Average-case O(n log n considered as amortized
Counting SortO(n + k)O(n + k)O(n + k)O(n + k)Not applicable
Radix SortO(d(n + k))O(d(n + k))O(d(n + k))O(n + k)Not applicable
Table 1: Comparison of Sorting Algorithms. Courtesy of: Benny Daniel T.

Sorting Algorithm Usage Scenarios

Scenario - Algorithm

  1. Small dataset with limited memory - Selection Sort

  2. Nearly sorted data - Insertion Sort

  3. Data stream with values close to their final positions - Bubble Sort

  4. Large datasets with O(n log n) complexity requirement - Merge Sort

  5. Large datasets with good average-case performance and in-place sorting - Quick Sort

  6. Sorting integers within a specific, small range - Counting Sort

  7. Sorting strings or integers with fixed-number of digits - Radix Sort

  8. Need for a stable sort with O(n log n) complexity - Merge Sort

  9. Sorting linked lists - Merge Sort

  10. Very large datasets that don't fit into memory - External Merge Sort

  11. Small datasets or partially sorted arrays - Insertion Sort

  12. Educational purposes to understand basic sorting concepts - Bubble Sort

  13. Sorting data with expensive comparison operations - Heap Sort

  14. Need for worst-case O(n log n) guarantee - Heap Sort or Merge Sort

  15. Sorting data with many repeated elements - Counting Sort

  16. Real-time or embedded systems with space constraints - In-place Quick Sort

  17. Parallel processing environments - Merge Sort or Parallel Quick Sort

  18. When stability is not important and quick average performance is needed - Quick Sort

  19. Sorting data with known distribution - Bucket Sort

  20. Need for adaptive sorting (performs better on partially sorted data) - Timsort (hybrid of Merge Sort and Insertion Sort)