def power(x, n):
    '''Return x^n, for a float x and non-negative integer n'''
    if n == 0:
        return 1
    
    return x * power(x, n - 1)
#Complexity: O(n), because power() is called n times, and each copy of the
#of the functiontakes the same amount of time every time
#T(n): the amount of time it takes power(x, n) to complete (in seconds)
#T(n) is proportional to n <=> T(n) is O(n) <=> for large n, 
#T(n) is proportional to c*n for some constant c <=> 
#T(n) is O(f(n)) where f(n) = n

################################################################################
def interleave(L1, L2):
    if len(L1) == 0:
        return []
    return [L1[0], L2[0]] + interleave(L1[1:], L2[1:])
    


def reverse_list(lis, ind_to_swap = 0):
    '''Reverse the lis[ind_to_swap:-ind_to_swap] part of lis if int_to_swap != 0
    , reverse the list lis if it is
    
    '''
    ind_to_swap_r = len(lis) - ind_to_swap - 1
    if ind_to_swap >= ind_to_swap_r:
        return
    lis[ind_to_swap], lis[ind_to_swap_r] = lis[ind_to_swap_r], lis[ind_to_swap]
    reverse_list(lis, ind_to_swap + 1)
    
#[0,1,2,3,4,5]
#[5,1,2,3,4,0]
#[5,4,2,3,1,0]
#[5,4,3,2,1,0]

#Complexity: O(n) 
#reverse_list() is called n/2 times, and each copy of the function takes the 
#same amount of time every time O(n/2) = O(n), but O(n) is a simpler expression


###############################################################################
def zigzag2(lis):
    '''Print lis[n/2] lis[n/2-1] lis[n/2+1] lis[n/2-2] lis[n/2+2] lis[n/2-3] 
    lis[n/2+3] ... lis[n-1] for len(lis) = n
    
    '''
    
    #Idea: just switch the order of the last to lines of zigzag1
    #See lecture, where we used the same trick to change a function that
    #printed a list to a function that printed a list in reverse
    
    if len(lis) == 0:
        print("", end = " ")
    elif len(lis) == 1:
        print(lis[0], end = " ")
    else:
        zigzag2(lis[1:-1])
        print(lis[0], lis[-1], end = " ")
###############################################################################
def is_balanced(s):
    '''Return True iff the parentheses in s are balanced.
    
    '''
    #Idea: find the innermost (...) pair, and remove it, then see if the 
    #resultant expression is balanced.
    
    paren_end = s.find(')')                #The first ')' in the string
    if paren_end == -1:
        return '(' not in s
    paren_start = s[:paren_end].rfind('(') #The first '(' to the left of the
                                           #first ')' in the string
    if paren_start == -1:
        return False
    
    if paren_end < paren_start:
        return False
    
    return is_balanced(s[:paren_start] + s[paren_end + 1:])

#Complexity:
#s.find() has to go throught the entire string, so the runtime of is_balanced
#depends on the length of s  (n = len(s)). Specifically, the runtime is c*n
#for some constant c.
#Each time is_balanced is called, s gets shorter by at least 2 characters.
#So the total runtime, in the worst case, is
# c*n + c*(n-2) + c*(n-4) + ... + 0 = c*(n + n - 2 + n - 4 + ... + 0) = 
#c* ( (n/2)*(n/2) ) #is O(n^2)
#
#(Note that this is the upper bound on the worst case complexity. If s doesn't
#contain parentheses, is_balanced is only called once and returns immediately)

#############################################################################


#count: # of opening parens encountered - # of closing parens encountered
def is_balanced_linear(s, count = 0):
    if count < 0:
        return False
    if len(s) == 0:
        return count == 0
    
    if s[0] == '(':
        return is_balanced_linear(s[1:], count + 1)
    elif s[0] == ')':
        return is_balanced_linear(s[1:], count - 1)
    else:
        return is_balanced_linear(s[1:], count)
    '''
    count_resp = {'(':1, ')':-1}
    return is_balanced_linear(s[1:], count + count_resp.get(s[0], 0))
    '''
#Runtime complexity: O(n) where n = len(s). Each time is_balanced_linear is 
#called, s gets shorter by one, so there are n calls in total. Assuming
#the slicing operation takes a constant amount of time (a good assumption for
#small sizes), each copy of the function takes in the same amount of time.
#Therefore, the complexity is O(n)
