#Andrew Samuels
#linear equation solver. Does Gauss-Jordon elimination, then back substituion.
from scipy import *
from scipy import linalg
from pdb import *
#function to compute dot product
def dot(a, b):
    return reduce(lambda sum, p: sum + p[0]*p[1], zip(a,b), 0)
matrix = zeros((3,4))
#aug = zeros((3,1))
matrix[0,0] = 1
matrix[0,1] = 1
matrix[0,2] = 1
matrix[0,3] = 0
matrix[1,0] = 1
matrix[1,1] = -2
matrix[1,2] = 2
matrix[1,3] = 4
matrix[2,0] = 1
matrix[2,1] = 2
matrix[2,2] = -1
matrix[2,3] = -1
pivot = 0
ncols = 4
nrows = 3
#for all pivots
for col in range(ncols-1):
    #divide pivot by itself to make it 1
    matrix[pivot,:] = matrix[pivot,:]/matrix[pivot,pivot]
   
    #for all pivots that haven't been done yet
    for row in range(pivot+1,nrows):
        matrix[row,:] = matrix[row,:] - matrix[pivot,:]
    pivot = pivot + 1
print " "
print "Row Echelon Form: "   
print matrix 
print "\n"
#solve the matrix and print solution
for pivot in range(nrows-1,-1,-1):
    matrix[pivot] = (matrix[pivot] - dot(matrix[pivot,pivot+1:nrows],matrix[pivot+1:nrows]))/matrix[pivot,pivot]
print "Solved by Back Subst.: "
print matrix 
print "\n"  
#check with scipy
a = array([[1,1,1],[1,-2,2],[1,2,-1]])
b = array([[0],[4],[-1]])
sol = linalg.solve(a,b)
print "Solved by Scipy (check): "
print sol
print "\n"
I am a software engineer with a computational physics focus. The purpose of this blog is to showcase my ideas and works. I can create value and reduce costs for your company.
20111228
Fun with Python: Linear Algebra Solver
This program is similar to my Gaussian Elimination script only this time the matrix is solved using Back Substitution, then checked for accuracy using the Sci-Py module.
