In [4]:

# binary tree
class Node:
def __init__(self,val):
self.l=None
self.r=None
self.v=val

class Tree:
def __init__(self):
self.root = None

def getRoot(self):
return self.root

if (self.root == None):
self.root = Node(val)
else:

def _add(self, val, node):
if(val <node.v):
if (node.l != None):
else:
node.l = Node(val)
else:
if(node.r !=None):
else:
node.r = Node(val)

def find(self, val):
if(self.root != None):
return self._find(val, self.root)
else:
return None

def _find(self, val, node):
if (val== node.v):
return node
elif(val < node.v and node.l !=None):
self._find(val, node.l)
elif(val >node.v and node.r !=None):
self._find(val, node.r)

def deleteTree(self):
# garbage collector will do this for us
self.root =None

def printTree(self):
if (self.root != None):
self._printTree(self.root)

def _printTree(self, node):
if (node!= None):
self._printTree(node.l)
print str(node.v) + ' '
self._printTree(node.r)




In [6]:

tree = Tree()
tree.printTree()
print (tree.find(3)).v
print tree.find(10)
tree.deleteTree()
tree.printTree()




0
2
3
4
8
3
None



From Rahul's answer, I got to this tutorial: 6.13. Search Tree Implementation



In [8]:

myTree = ['a', # root
['b', # left subtree
['d', [], []],
['e', [], []] ],
['c', # right subtree
['f', [], []],
[]]]




In [9]:

myTree




Out[9]:

['a', ['b', ['d', [], []], ['e', [], []]], ['c', ['f', [], []], []]]



Nice features of list of lists approach:

• stucture of a list representing subtree adheres to structre defined for a tree; structure itself is recursive!
• is that it generalizes to a tree that has many subtrees. In case, where tree is more than a binary tree, another subtree is just another list.


In [10]:

print(myTree)
print('left subtree = ', myTree[1])
print('root = ', myTree[0])
print('right subtree = ', myTree[2])




['a', ['b', ['d', [], []], ['e', [], []]], ['c', ['f', [], []], []]]
('left subtree = ', ['b', ['d', [], []], ['e', [], []]])
('root = ', 'a')
('right subtree = ', ['c', ['f', [], []], []])




In [11]:

def BinaryTree(r):
""" @name  BinaryTree
@brief  Construct list with root node and 2 empty sublists fo children
"""
return [r, [], []]




In [13]:

def insertLeft(root, newBranch):
""" @name  insertLeft
"""
t = root.pop(1) # remove element indexed at i=1 and return it (in this case, to t)
if len(t) > 1:  # should be another binary tree with root and 2 children
root.insert(1,[newBranch,t,[]])  #
else:
root.insert(1,[newBranch,[],[]])
return root




In [12]:

# Miscellaneous
child = []
print(len(child))




0




In [14]:

def insertRight(root,newBranch):
t = root.pop(2)
if len(t) > 1:
root.insert(2,[newBranch,[],t])
else:
root.insert(2,[newBranch,[],[]])
return root



To round out this set of tree-making functions, write a couple of access functions for getting and setting the root value, as well as getting the left or right subtrees.



In [16]:

def getRootVal(root):
return root[0]

def setRootVal(root, newVal):
root[0] = newVal

def getLeftChild(root):
return root[1]

def getRightChild(root):
return root[2]




In [18]:

r = BinaryTree(3)
print(r)
insertLeft(r,4)
print(r)
insertLeft(r,5)
print(r)
insertRight(r,6)
print(r)
insertRight(r,7)
print(r)




[3, [], []]
[3, [4, [], []], []]
[3, [5, [4, [], []], []], []]
[3, [5, [4, [], []], []], [6, [], []]]
[3, [5, [4, [], []], []], [7, [], [6, [], []]]]




In [19]:

l = getLeftChild(r)
print(l)




[5, [4, [], []], []]




In [20]:

setRootVal(l,9)
print(r)




[3, [9, [4, [], []], []], [7, [], [6, [], []]]]




In [21]:

insertLeft(l,11)
print(r)




[3, [9, [11, [4, [], []], []], []], [7, [], [6, [], []]]]




In [22]:

print(getRightChild(getRightChild(r)))




[6, [], []]



Self Check

Q-26



In [23]:

x = BinaryTree('a')
insertLeft(x,'b')
insertRight(x, 'c')
insertRight(getRightChild(x),'d')
insertLeft(getRightChild(getRightChild(x)),'e')




Out[23]:

['d', ['e', [], []], []]




In [24]:

print(x)




['a', ['b', [], []], ['c', [], ['d', ['e', [], []], []]]]




In [25]:

x = BinaryTree('a')
insertLeft(x, 'd')
insertLeft(x, 'b')
insertRight(x,'f')
insertRight(x,'c')
insertLeft( getRightChild(x), 'e')




Out[25]:

['c', ['e', [], []], ['f', [], []]]




In [26]:

print(x)




['a', ['b', ['d', [], []], []], ['c', ['e', [], []], ['f', [], []]]]



## Nodes and References, Sec. 6.5 of Problem Solving with Algorithms and Data Structures



In [27]:

class BinaryTree:
def __init__(self, rootObj):
self.key = rootObj
self.leftChild = None
self.rightChild = None

def insertLeft(self, newNode):
if self.leftChild == None:
self.leftChild = BinaryTree(newNode)
else:
t = BinaryTree(newNode)
t.leftChild = self.leftChild
self.leftChild = t

def insertRight(self, newNode):
if self.rightChild == None:
self.rightChild = BinaryTree(newNode)
else:
t = BinaryTree(newNode)
t.rightChild = self.rightChild
self.rightChild = t

def getRightChild(self):
return self.rightChild

def getLeftChild(self):
return self.leftChild

def setRootVal(self,obj):
self.key = obj

def getRootVal(self):
return self.key




In [30]:

r = BinaryTree('a')
print(r.getRootVal())
print(r.getLeftChild())
r.insertLeft('b')
print(r.getLeftChild())
print(r.getLeftChild().getRootVal())
r.insertRight('c')




a
None
<__main__.BinaryTree instance at 0x7f0f306a57e8>
b



## Parse Tree; 6.6 Parse Tree

1st. tokenize expression string (i.e. breakup each term in an expression string as a token so to create a list of tokens). The different kinds of tokens will say whether to create a new tree, finished an expression, leaf node, or if it'll have left and right child.

Initially, start out with parse tree that consists of empty root node.



In [33]:

from __future__ import print_function

class BinaryTree:
""" @name  BinaryTree
@brief  Recursive implementation of Binary Tree, using links and nodes approach.

@note  Modified to allow for trees to be constructed from other trees rather than
always creating a new tree in the insertLeft or insertRight """
def __init__(self,rootObj):
self.key = rootObj
self.leftChild = None
self.rightChild = None

def insertLeft(self,newNode):
if isinstance(newNode, BinaryTree):
t = newNode
else:
t= BinaryTree(newNode)

if self.leftChild is not None:
t.left = self.leftChild

self.leftChild = t

def insertRight(self, newNode):
if isinstance(newNode, BinaryTree):
t = newNode
else:
t = BinaryTree(newNode)

if self.rightChild is not None:
t.right = self.rightChild
self.rightChild = t

def isLeaf(self):
return ((not self.leftChild) and (not self.rightChild))

def getRightChild(self):
return self.rightChild

def getLeftChild(self):
return self.leftChild

def setRootVal(self,obj):
self.key = obj

def getRootVal(self,):
return self.key

def inorder(self):
if self.leftChild:
self.leftChild.inorder()
print(self.key)
if self.rightChild:
self.rightChild.inorder()

def postorder(self):
if self.leftChild:
self.leftChild.postorder()
if self.rightChild:
self.rightChild.postorder()
print(self.key)

def preorder(self):
print(self.key)
if self.leftChild:
self.leftChild.preorder()
if self.rightChild:
self.rightChild.preorder()

def printexp(self):
if self.leftChild:
print('(', end=' ')
self.leftChild.printexp()
print(self.key, end=' ')
if self.rightChild:
self.rightChild.printexp()
print(')', end=' ')

def postordereval(self):
opers = {'+':operator.add, '-':operator.sub, '*':operator.mul, '/':operator.truediv}
res1 = None
res2 = None
if self.leftChild:
res1 = self.leftChild.postordereval()  #// \label{peleft}
if self.rightChild:
res2 = self.rightChild.postoreval() # // \label{peright}
if res1 and res2:
return opers[self.key](res1,res2) #// \label{peeval}
else:
return self.key




In [35]:

# stack.py,
# cf. https://github.com/bnmnetp/pythonds/blob/master/basic/stack.py

class Stack:
def __init__(self):
self.items = []

def isEmpty(self):
return self.items == []

def push(self, item):
self.items.append(item)

def pop(self):
return self.items.pop()

def peek(self):
return self.items[len(self.items)-1]

def size(self):
return len(self.items)




In [36]:

def buildParseTree(fpexp):
""" @name  buildParseTree
@param fpexp - string with a mathematical expression (assumed to be correct)
"""
fplist = fpexp.split()
pStack = Stack()
eTree  = BinaryTree('')
pStack.push(eTree)
currentTree = eTree
for i in fplist:
if i == '(':  # create new node as left child of root; make current node this new child
currentTree.insertLeft('')
pStack.push(currentTree)
currentTree=currentTree.getLeftChild()
elif i not in ['+','-','*','/',')']:  # case of a number; set root value of current node to number and go back up the tree to the parent
currentTree.setRootVal(int(i))
parent = pStack.pop()
currentTree=parent
elif i in ['+', '-','*','/']: # set root value of current node to this operator and add new node as right child; new right child becomes current node
currentTree.setRootVal(i)
currentTree.insertRight('')
pStack.push(currentTree)
currentTree = currentTree.getRightChild()
elif i ==')': # make parent of operator current node
currentTree= pStack.pop()
else:
raise ValueError
return eTree




In [38]:

pt = buildParseTree("( ( 10 + 5 ) * 3 )")




In [39]:

pt.postorder()




10
5
+
3
*



As we have done with past recursive algorithms, we'll begin the design for the recursive evaluation function by identifying the base case.

A natural base case for recursive algorithms that operate on trees is to check for a leaf node.



In [43]:

# https://docs.python.org/2/library/operator.html
# Standard operators as functions
import operator

# operator.add(a,b) return a+b

# operator.div(a,b) return a/b when __future__.division is not in effect.  This is also known as "classic" division
operator.div

# operator.truediv(a,b) return a/b when __future__.division is in effect.  This is also known as "true" division.
operator.truediv




Out[43]:

<function operator.truediv>




In [44]:

def evaluate(parseTree):
opers = {'+':operator.add, '-':operator.sub, '*':operator.mul, '/':operator.truediv}

leftC  = parseTree.getLeftChild()
rightC = parseTree.getRightChild()

if leftC and rightC:  # recursive step that moves function toward base case
fn = opers[parseTree.getRootVal()]
return fn(evaluate(leftC),evaluate(rightC))  # recursive call effectively moves us down the tree, toward a leaf node
else: # base case, check for a leaf node; in parse tree, leaf nodes will always be operands
return parseTree.getRootVal()




In [45]:

evaluate(pt)




Out[45]:

45



## Tree Traversals, 6.7 Tree Traversals



In [46]:

def preorder(tree):
if tree:
print(tree.getRootVal())
preorder(tree.getLeftChild())
preorder(tree.getRightChild())




In [47]:

preorder(r)




a
b
c




In [48]:

def postorder(tree):
if tree != None:
postorder(tree.getLeftChild())
postorder(tree.getRightChild())
print(tree.getRootVal())




In [49]:

def postordereval(tree):
opers = {'+':operator.add, '-':operator.sub, '*':operator.mul, '/':operator.truediv}
res1=None
res2=None
if tree:
res1 = postordereval(tree.getLeftChild())
res2=postordereval(tree.getRightChild())
if res1 and res2:
return opers[tree.getRootVal()](res1,res2)
else:
return tree.getRootVal()




In [50]:

def inorder(tree):
if tree != None:
inorder(tree.getLeftChild())
print(tree.getRootVal())
inorder(tree.getRightChild())




In [51]:

def printexp(tree):
eVal = ""
if tree:
sVal = '(' + printexp(tree.getLeftChild())
sVal = sVal + str(Tree.getRootVal())
sVal = sVal + printexp(tree.getRightChild())+')'
return sVal




In [ ]:

# delMin() returns item with minimum key value, removing item from the heap



### Binary Heap binheap.py



In [64]:

# this heap takes key value pairs, we'll assume that keys are integers

class BinHeap:
def __init__(self):
self.heapList = [0]  # 1-based counting
self.currentSize = 0

def buildHeap(self,alist):
""" @fn  buildHeap(self,alist)
&brief  builds a new heap from a list of keys
"""
i = len(alist) // 2 # integer division is //
self.currentSize = len(alist)
self.heapList = [0] + alist[:]
print(len(self.heapList),i)
while (i>0):
print(self.heapList, i )
self.percDown(i)
i=i-1
print(self.heapList,i)

def percDown(self,i):
while(i*2) <= self.currentSize:  # check with i*2 because we need to know if i has children or not
mc = self.minChild(i)
if self.heapList[i] > self.heapList[mc]: # violates heap order property that parent is less than child in value
tmp = self.heapList[i]
self.heapList[i] = self.heapList[mc] # do the switch between them
self.heapList[mc] = tmp
i = mc # check now the children of the child of i

def minChild(self,i):
""" @fn  minChild
@brief  finding the minimum amongst the children of parent ith node
"""
if i * 2 + 1 > self.currentSize:
return i * 2
else:
if self.heapList[i*2] < self.heapList[i * 2 + 1]:
return i * 2
else:
return i * 2 + 1

def percUp(self,i):
while i // 2 > 0:  # i // 2 is integer division (node of parent of ith node)
if self.heapList[i] < self.heapList[i//2]:  # violates heap order property
tmp = self.heapList[i//2] # parent is now tmp
self.heapList[i//2]= self.heapList[i]  # percolate up the ith node to where its parent was
self.heapList[i] = tmp # put parent in where i was
i = i // 2 # keep doing this check for heap order property violations

def insert(self,k):
""" @fn  insert
@brief  adds new item to the heap
"""

self.heapList.append(k)
self.currentSize = self.currentSize + 1
self.percUp(self.currentSize)  # check for heap order property violations

def delMin(self):
""" @fn  delMin
@brief  returns item with minimum key value, leaving item in the heap

@note  Since heap order property requires root of tree be smallest item in tree, finding the minimum
item is easy.  Hard part is restoring full compliance with heap structure and heap order property.
"""
retval = self.heapList[1]
self.heapList[1] = self.heapList[self.currentSize]
self.currentSize = self.currentSize - 1 # take last item in list and move it to root position
self.heapList.pop()
self.percDown(1)
return retval

def isEmpty(self):
""" @fn  isEmpty
@brief  returns true if the heap is empty, false otherwise
"""
if currentSize == 0:
return True
else:
return False




In [65]:

print( 5 // 2 )
bh = BinHeap()
print(bh.heapList)
bh.insert(5)
print(bh.heapList)
bh.insert(7)
print(bh.heapList)
bh.insert(3)
print(bh.heapList)
bh.insert(11)
print(bh.heapList)




2
[0]
[0, 5]
[0, 5, 7]
[0, 3, 7, 5]
[0, 3, 7, 5, 11]




In [66]:

print(bh.delMin())
print(bh.delMin())
print(bh.delMin())
print(bh.delMin())




3
5
7
11



# Binary Heap Implementation; 6.10. Binary Heap Implmentation

## The Structure Property 6.10.1 The Structure Property

In order to make our heap work efficiently, take advantage of the logarithmic nature of the binary tree, to represent our heap. In order to guarantee logarithmic performance, keep tree balanced.

A balanced binary tree has roughly the same number of nodes in the left and right subtrees of the root.

In our heap implementation, we keep the tree balanced by creating a complete binary tree. A complete binary tree is a tree in which each level has all of its nodes. Exception to this is the bottom level of the tree, which we fill in from left to right.

Another interesting property of a complete tree is that we can represent it using a single list.

heap order property -

• every node is less than or equal to its children
• or every node is greater than or equal to its children

and so the root node is always the smallest node, or the largest, depending on the heap order.

With heap order property, each path is sorted, but the subtrees are not sorted (necessarily) relative to each other.

### Complexity

Assertion: build heap in $O(n)$.

Key to understanding the proof is you can build the heap in $O(n)$ because remember, that the $\log{n}$ factor is derived from height of the tree. For most of the work in buildHeap, tree is shorter than $\log{n}$.

Using fact that you can build a heap from a list in $O(n)$ time, you'll construct sorting algorithm that uses heap and sorts a list in $O(n\log{n})$ as exercise at the end.

Priority Queue wants

1. Get top priority element (get minimum or maximum)
2. insert an element
3. remove top priority element
4. decrease key

binary heap's time complexities:

1. O(1)
2. O($\log{n}$)
3. O($\log{n}$)
4. O($\log{n}$)

build heap in $O(n)$.

It's not $O(n\log{n})$ even though heapify costs $O(\log{n})$

observe heapify run time depends on height of tree $h$, (which is equal to $\log{n}$, and heights of most subtrees are small.

$T(n) = \sum_{h=0}^{\log{n}} \frac{n}{2^{h+1} }$

# Binary Search Trees, 6.11 Binary Search Trees, 6.13. Search Tree Implementation

From wikipedia, the binary search tree property states that the key in each node must be greater than or equal to any key stored in the left sub-tree, and less than or equal to any key stored in the right sub-tree.

From 6.13. Search Tree Implementation, binary search tree property (bst property), that keys that are less than the parent are found in the left subtree, and keys that are greater than the parent are found in the right subtree.

Because we must be able to create and work with a binary search tree that's empty, our implementation will use 2 classes.

The BinarySearchTree has reference to TreeNode that is the root of the binary search tree. "In most cases, the external methods defined in outer class simply check to see if tree is empty". If there are nodes in the tree, the request is just passed on to a private method defined in the BinarySearchTree class that takes the root as a parameter.

In the case where the tree is empty or we want to delete the key at the root of the tree, we must take special action.



In [69]:

import os, sys
print( os.getcwd() )
print(os.listdir( os.getcwd() ))




/home/topolo/PropD/CompPhys/crack
['crack.ipynb', '.ipynb_checkpoints', 'TreesGraphs.ipynb', 'recursion.py', 'trees_arbres']




In [72]:

sys.path.insert(0, os.getcwd() + '/trees_arbres/')
from bst import BinarySearchTree, TreeNode




In [78]:

mytree = BinarySearchTree()
print( mytree.length() )
print( mytree.root )
print( mytree.size)

mytree[3] = "red"
print( mytree.length() )
print( mytree.root )
print( mytree.root.key )
print( mytree.root.leftChild )
print( mytree.root.rightChild )
print( mytree.root.parent )
print( mytree.root.balanceFactor )
print( mytree.size)




0
None
0
1
<bst.TreeNode instance at 0x7f0f35b8cef0>
3
red
None
None
None
0
1




In [79]:

mytree[4] = "blue"
print( mytree.length() )
print( mytree.root )
print( mytree.root.key )
print( mytree.root.leftChild )
print( mytree.root.rightChild )
print( mytree.root.parent )
print( mytree.root.balanceFactor )
print( mytree.size)




2
<bst.TreeNode instance at 0x7f0f35b8cef0>
3
red
None
<bst.TreeNode instance at 0x7f0f3052c950>
None
0
2




In [80]:

mytree[6] = "yellow"
print( mytree.length() )
print( mytree.root )
print( mytree.root.key )
print( mytree.root.leftChild )
print( mytree.root.rightChild )
print( mytree.root.parent )
print( mytree.root.balanceFactor )
print( mytree.size)




3
<bst.TreeNode instance at 0x7f0f35b8cef0>
3
red
None
<bst.TreeNode instance at 0x7f0f3052c950>
None
0
3




In [81]:

mytree[2] = "at"
print( mytree.length() )
print( mytree.root )
print( mytree.root.key )
print( mytree.root.leftChild )
print( mytree.root.rightChild )
print( mytree.root.parent )
print( mytree.root.balanceFactor )
print( mytree.size)




4
<bst.TreeNode instance at 0x7f0f35b8cef0>
3
red
<bst.TreeNode instance at 0x7f0f305188c0>
<bst.TreeNode instance at 0x7f0f3052c950>
None
0
4




In [82]:

print(mytree[6])




yellow




In [83]:

print(mytree[2])




at



$n =$ number of nodes in the tree
$h =$ height of tree is going to be, $h=\log_2{n}$
$2^h = n$

# AVL Tree, Adelson-Velskii, Landis, cf. 6.15. Balanced Binary Search Trees

automatically makes sure tree remains balanced.

For each node, $$\text{balanceFactor} = \text{height}(\text{leftSubTree}) - \text{height}(\text{rightSubTree}) = h(\text{leftSubTree}) - h(\text{rightSubTree})$$

For purposes of implementing an AVL tree, and gaining the benefit of having a balanced tree, we'll define a tree to be in balance if the balance factor is $-1,0,1$.



In [85]:

from bst import BinarySearchTree, TreeNode




In [86]:

class AVLTree(BinarySearchTree):
"""
@brief Implement a binary search tree with the following interface
@details functions:
__contains__(y) <==> y in x
__getitem__(y) <==> x[y]
__init__()
__len__() <==> len(x)
__setitem__(k,v) <==> x[k] = v
clear()
get(k)
has_key(k)
items()
keys()
values()
put(k,v)
"""
def _put(self,key,val,currentNode):
if key < currentNode.key:
if currentNode.hasLeftChild():
self._put(key,val,currentNode.leftChild)
else: # current Node doesn't have a left child
currentNode.leftChild = TreeNode(key,val,parent=currentNode)
self.updateBalance(currentNode.leftChild)
else:
if currentNode.hasRightChild():
self._put(key,val,currentNode.rightChild)
else:
currentNode.rightChild = TreeNode(key,val,parent=currentNode)
self.updateBalance(currentNode.rightChild)

def updateBalance(self,node):
if node.balanceFactor > 1 or node.balanceFactor < -1:
self.rebalance(node)
return
if node.parent != None:
if node.isLeftChild():
node.parent.balanceFactor += 1
elif node.isRightChild():
node.parent.balanceFactor -= 1

if node.parent.balanceFactor != 0: # then algorithm continues to work its way up tree toward root
self.updateBalance(node.parent)

def rebalance(self,node):
if node.balanceFactor <0:
if node.rightChild.balanceFactor >0:
# Do an LR Rotation
self.rotateRight(node.rightChild)
self.rotateLeft(node)
else:
# single left
self.rotateLeft(node)
elif node.balanceFactor > 0:
if node.leftChild.balanceFactor <0:
# Do a RL Rotation
self.rotateLeft(node.leftChild)
self.rotateRight(node)
else:
# single right
self.rotateRight(node)

def rotateLeft(self,rotRoot): # left rotation around the subtree rooted at rotRoot
""" @fn rotateLeft
@brief left rotation around subtree rooted at rotRoot
"""
newRoot = rotRoot.rightChild # promote right child to be root of the subtree
rotRoot.rightChild = newRoot.leftChild  # replace right child of old root with left child of new
# next step is to adjust the parent pointers of the 2 nodes
if newRoot.leftChild != None: # if new root B already had left child, then make it right child of new left child A
newRoot.leftChild.parent = rotRoot
newRoot.parent = rotRoot.parent
if rotRoot.isRoot():
self.root = newRoot
else:
if rotRoot.isLeftChild():
rotRoot.parent.leftChild = newRoot
else:
rotRoot.parent.rightChild = newRoot
newRoot.leftChild = rotRoot # move old root to be leftchild of new root
rotRoot.parent = newRoot
rotRoot.balanceFactor = rotRoot.balanceFactor + 1 - min(newRoot.balanceFactor,0)
newRoot.balanceFactor = newRoot.balanceFactor + 1 - max(rotRoot.balanceFactor, 0)

def rotateRight(self,rotRoot):
newRoot = rotRoot.leftChild # promote left child of rotRoot to be root of the subtree
rotRoot.leftChild = newRoot.rightChild # replace left child of old root with right child of new
# next step is to adjust the parent pointers of the 2 nodes
if newRoot.rightChild != None: # then make it the left child of new right child
newRoot.rightChild.parent = rotRoot
newRoot.parent = rotRoot.parent
if rotRoot.isRoot():
self.root = newRoot
else:
if rotRoot.isRightChild():
rotRoot.parent.rightChild = newRoot
else:
rotRoot.parent.leftChild = newRoot
newRoot.rightChild = rotRoot # move old root to be right child of new root
rotRoot.parent = newRoot
rotRoot.balanceFactor = rotRoot.balanceFactor - 1 - max(newRoot.balanceFactor,0) #
newRoot.balanceFactor = newRoot.balanceFactor - 1 + min(rotRoot.balanceFactor,0)




In [1]:

import os, sys
print( os.getcwd() )
print(os.listdir( os.getcwd() ))




/home/topolo/PropD/CompPhys/crack
['crack.ipynb', '.ipynb_checkpoints', 'TreesGraphs.ipynb', 'recursion.py', 'BigO.ipynb', 'trees_arbres', 'DataStruct.ipynb']



# Binary Tree



In [2]:

sys.path.insert(0, os.getcwd() + '/trees_arbres/')
from binaryTree import binaryTree




In [3]:

root = binaryTree(1)
root.insertl(2)
root.insertr(3)
root.insertl(4)
root.l.insertr(5)




In [4]:

root.inorder()




2
4
5
1
3




In [5]:

root.preorder()




1
2
4
5
3




In [6]:

root.postorder()




2
4
5
3
1




In [7]:

root=binaryTree(1)
root.insertl(2)
root.insertr(3)
root.l.insertl(4)
root.l.insertr(5)
root.inorder()
root.preorder()
root.postorder()




4
2
5
1
3
1
4
2
5
3
4
2
5
3
1




In [8]:

root=binaryTree(1)
root.l=binaryTree(2)
root.r=binaryTree(3)
root.l.l=binaryTree(4)
root.l.r =binaryTree(5)
root.inorder()
root.preorder()
root.postorder()




4
2
5
1
3
1
4
2
5
3
4
2
5
3
1



### Time Complexity of Binary Search , O(n)

$O(n)$

Complexity function $T(n)$ - for all problem where tree traversal is involved, can be defined as:
$T(n) = T(k) + T(n-k+1) + c$
where $k$ is number of nodes on 1 side of root, and $n-k-1$ on the other side

Auxiliary Space: If we don't consider size of stack for function calls, then $O(1)$, otherwise $O(n)$.



In [ ]: