#!/usr/bin/env python # coding: utf-8 # # Week 2, Day 4 Afternoon Exercises: Time Complexity # In[ ]: # RUN THIS BLOCK OF CODE BEFORE RUNNING ANY CELLS def check_test(actual, expected): if expected != actual: print(f"Function should return the value {expected}, it is returning the value {actual}") else: print(f"Congratulations, the test case passed!") # ## Unrelated Warmup: Default Parameters # # Let's do one practice problem on default parameters, which we covered in lecture earlier in the week. # In[ ]: def print_menu(name, meat="beef", dish="stew"): print("Hi " + name + ", today's special is: " + meat + " " + dish) # What will each of the following print? Write it in the comment above the function call. # (1): print_menu("Abeba") # (2): print_menu("Desta", "lamb", "tibbs") # (2): print_menu("Amara", "chicken") # (3): print_menu("Aku", dish="fish") # (4): print_menu("Girma", meat="lamb") # (5): print_menu("Hakim", meat="chicken", dish="pasta") # # Recap of Time Complexity # # Recall that **time complexity** refers to the computational complexity that describes the amount of computer time taken by an algorithm to run, **as a function of the size of the input to the program**. The time complexity is usually expressed in big O notation, which describes the worst-case scenario, and can be used to describe the execution time required or the space used (e.g., in memory or on disk) by an algorithm. # # # ## Understanding Time Complexity and Big-O Notation # # Let's take a few common examples to illustrate time complexity: # # ### Constant Time: $O(1)$ # An algorithm is said to have a constant time when **it is not dependent on the input data size**. Whether your input data size is 1 or 1000 or 1 million, the time taken to compute the result is constant. # # ``` # def get_first_item(items): # return items[0] # ``` # # In the above Python function, no matter how large the list items is, the function would take the same amount of time to get the first item in the list. This is thus $O(1)$ time complexity. # # # ### Linear Time: $O(n)$ # An algorithm is said to have a linear time complexity when the running time **increases linearly** with the size of the input data. # # ``` # def linear_search(items, target): # for i in range(len(items)): # if items[i] == target: # return i # return -1 # ``` # # In the function above, in the worst-case scenario (if the target is at the end of the list or not in the list at all), the function will have to look at each item in the list once. Therefore, the time complexity is $O(n)$, where n is the number of items in the list. # # # ### Quadratic Time: $O(n^2)$ # An algorithm is said to have a quadratic time complexity when the time execution is proportional to the square of the input size. A common example of this is the nested for loop. # # ``` # def bubble_sort(items): # n = len(items) # for i in range(n): # for j in range(0, n - i - 1): # if items[j] > items[j + 1]: # items[j], items[j + 1] = items[j + 1], items[j] # # ``` # # In the bubble sort function above, for each item in the list, we're potentially looking at every other item in the list. As such, this gives us a time complexity of $O(n^2)$. # # ### Why It's Important? # Understanding time complexity is crucial for writing efficient code, especially when dealing with large volumes of data. Poorly optimized code can take significantly longer to run, which can be problematic in real-world applications where performance is important. # # # # Important: Show your answer to a TA when you are done!!!!! # # Question 1 # # Consider the following code: # # ``` # def addNums(n): # answer = 0 # for i in range(n): # answer += i # # return answer # ``` # # In[ ]: # For how many iterations is the loop going to run? # Put your answer below. # In[ ]: # In[ ]: # What is the time complexity of the code in terms of big O notation? # Put your answer below. # In[ ]: # Now let's consider another function addNumList(n). It takes in a list of numbers, and calls the addNums function for each item in the list. # # # # ``` # def addNumList(lst): # for i in lst: # addNums(i) # ``` # # # # In[ ]: # How many times does the addNumList function call addNums()? # [Your answer here] # In[ ]: # Now, how many addition operations are going to be performed in total? # [Your answer here] # In[ ]: # What is the time complexity of the code in terms of big O notation? # [Your answer here] # In[ ]: # # Question 2: Image Filters Revisited # # ### 2.1. Filling in colors # # Remember the filters from Day 7? # # ```python # canvas = SimpleImage.blank(600, 255) # for column in range(canvas.width): # for row in range(canvas.height): # # If we want to make sure this works for any size image: # red = int((column / canvas.width) * 255) # green = 0 # blue = 0 # canvas.set_rgb(column, row, red, green, blue) # canvas.show() # ``` # # Here is that filter that paints a red gradient. We saw this during the lecture. If the width and height of the image is given by `n` , what is its time complexity? # # Your answer here (pick one): # 1. $O(1)$ # 2. $O(\log n)$ # 3. $O(n)$ # 4. $O(n^2)$ # 5. $O(n^3)$ # ### 2.2. Horizontal Line # # What about this program that draws a horizontal line across the canvas? # # ```python # for col in range(10): # canvas.set_rgb(col, 0, 255, 0, 0) # canvas.show(resize_width=200) # ``` # # Your answer here (pick one): # 1. $O(1)$ # 2. $O(\log n)$ # 3. $O(n)$ # 4. $O(n^2)$ # 5. $O(n^3)$ # In[ ]: # # Question 3 # # Give the worst-case big-O time complexity of each of the following python functions that take a list L as input. # # Your answers MUST be selected from $O(1)$, $O(log n)$, $O(n)$, $O(n \log n)$, $O(n^2)$, and $O(n^3)$. You should assume that functions `constantfun` and `linearfun`, when run on lists of `n` elements, have worst-case time complexity $O(1)$ and $O(n)$, respectively. # # You may assume the `len`` function in Python runs in constant time. Make sure to justify each of your answers. # # # ```python # def fun1(L): # n = len(L) # constantfun(L) # for i in range(0, n//10): # linearfun(L) # # def fun2(L): # n = len(L) # for i in range(0,20000): # for j in range(0, 20000): # constantfun(L) # constantfun(L) # # def fun3(L): # n = len(L) # for i in range(0,n-3): # constantfun(L) # for j in range(0,n-3): # constantfun(L) # constantfun(L) # # def fun4(L): # n = len(L) # while (n>0): # constantfun(L) # n = n//3 # ``` # # Your answer should be like this: # - `fun1` has big-O time complexity O(?) because... # - `fun2` has big-O time complexity O(?) because... # - `fun3` has big-O time complexity O(?) because... # - `fun4` has big-O time complexity O(?) because... # # # Question 4 # # You are interested in solving a particular problem on a list of `n` numbers. A friend says she has found three possible ways to solve the problem. # # The first solution divides the list into 2 equal sized parts in O(n) time, recursively solves each half, and then combines the solutions with an O(n2) combine step. # # The second solution does more work up front and takes O(n2) time to process the list and divide it into two lists of n/2 elements each. Then, four recursive calls are made on these lists of size n/2. The combine step is O(n). # # The third solution uses the following code. You should assume linearfun runs in O(n) time. # # ```python # def third_alg(x): # n = len(x) # if (n <= 1): # return 0 # left = linearfun(x[:n // 2]) # right = linearfun(x[n // 2:]) # #left and right are each lists of size n/2 # if (left[0] <= right[0]): # soln = third_alg(left) # else: # soln = third_alg(right) # return soln # ``` # # For each of the above three proposals analyze their time complexity. Which of the three algorithms would you choose to solve the problem? Explain your answer. # In[ ]: # # Question 5 # # Consider the following code: # # ```python # def fun(n): # if n <= 1: # return 1 # else: # return fun(n-1) + fun(n-1) # ``` # # What is the time complexity of `fun`? Explain your answer. # # In[ ]: # # Question 6 # # Consider the following code: # # ```python # def fun(n): # i = 1 # while i < n: # i = i * 2 # ``` # # What is the time complexity of `fun`? Explain your answer. # In[ ]: # # Question 7 # # Consider the following code: # # ```python # def fun(arr): # n = len(arr) # result = [0] * n # for i in range(n): # result[i] = max(arr[:i+1]) # return result # ``` # # What is the time complexity of `fun`? Explain your answer. # # # Question 8 # # Implement a function called `linear_search` that takes in two parameters: a list of integers and a target integer. The function should return the index of the target integer if it is found in the list, or `-1` if it is not present. Use the linear search algorithm to solve this problem. # # For example, given the list `[4, 2, 7, 1, 9, 5]` and the target integer `7` , the function should return `2` since `7` is located at index `2` in the list. # # # In[ ]: def linear_search(array, target): # WRITE YOUR CODE HERE pass # TEST CASES check_test(linear_search([1, 2, 3, 4, 5], 3), 2) check_test(linear_search([1, 2, 3, 4, 5], 6), -1) check_test(linear_search([1, 2, 3, 4, 5], 1), 0) check_test(linear_search([1, 2, 3, 4, 5], 5), 4) check_test(linear_search([1, 2, 3, 4, 5], 4), 3) check_test(linear_search([4, 2, 7, 1, 9, 5], 7), 2) check_test(linear_search([1, 2, 3, 4, 5], 5), 4) check_test(linear_search([4, 2, 7, 1, 9, 5], 3), -1)