Partition deepmind directory into two parts
Since I did not pass my one-site interview with DM, but I have been invited to attempt again, I decided to partition this directory into two parts: 1. part_one: Hosting the exercises that I completed before my first attempt at earning the job. 2. part_two: Hosting the exercise that I will complete before my second attempt at earning the job.
This commit is contained in:
		
							parent
							
								
									b4dd290745
								
							
						
					
					
						commit
						5df3bb4e40
					
				
					 12 changed files with 57 additions and 0 deletions
				
			
		
							
								
								
									
										26
									
								
								deepmind/part_one/dijkstra.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								deepmind/part_one/dijkstra.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | |||
| # Doing a practice implementation of Dijkstra's algorithm: a priority-first | ||||
| # search. | ||||
| from heapq import heappush, heappop | ||||
| 
 | ||||
| 
 | ||||
| class Node(object): | ||||
|     def __init__(self, value, children): | ||||
|         self.value = value | ||||
|         self.children = children | ||||
| 
 | ||||
| 
 | ||||
| def shortest_path(a, b): | ||||
|     """Return the shortest path from `a` to `b`.""" | ||||
|     q = [] | ||||
|     seen = set() | ||||
|     heappush((a.value, a, [a]), q) | ||||
| 
 | ||||
|     while q: | ||||
|         d, node, path = heappop(q) | ||||
|         if node == b: | ||||
|             return path | ||||
|         seen.add(node) | ||||
|         for child in node.children: | ||||
|             if child not in seen: | ||||
|                 heappush((d + child.value, child, path + [child]), q) | ||||
|     raise Exception("Path between nodes A and B does not exist.") | ||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue