JS matrix multiplication problem
I am having problems with matrix multiplication code in JavaScript. If I run the following function with the following two matrices:
var m1 = [ [ 1, 0, 0 ],
[ 0, 1, 0 ],
[ 1, 1, 0 ],
[ 0, 0, 1 ],
[ 1, 0, 1 ],
[ 0, 1, 1 ],
[ 1, 1, 1 ] ];
var m2 = [ [ '0', '1', '1', '0', '0', '1', '1' ] ];
var matrixMult = function (m1, m2) {
console.log(m1);
console.log(m2);
console.log("m1 length: %d, m2[0].length: %d", m1.length, m2[0].length);
if (m1.length != m2[0].length) {
console.error("Incompatible matrix dimensions for multiplication.");
return false;
}
var result = [];
for (var i = 0; i < m1[0].length; i++) {
result[i] = [];
for (var j = 0; j < m2.length; j++) {
var sum = 0;
for (var k = 0; k < m1.length; k++) {
sum += m1[i][k] * m2[k][j];
}
result[i][j] = sum;
}
}
return result;
}
I am getting this error:
/path/to/file.js:58
sum += m1[i][k] * m2[k][j];
^
TypeError: Cannot read property '0' of undefined
at matrixMult (...)
What will go wrong? Could the problem be that m2.length
- only 1?
source to share
There is only m2[0]
, but your inner loop for
runs from 0
to m1.length
, which is greater than 0
. So when it tries to access m2[1]
, it throws an error.
Also, following the definition of matrix multiplication
The multiplication of two matrices is determined only if the number of columns of the left matrix is ββthe same as the number of rows of the right matrix.
(Source: Wikipedia )
you cannot multiply your sample matrices because it m1
has 3 columns but m2
only has one row.
EDIT
Now that I understand your question correctly, I wrote a small function that might help you:
function multiplyMatrix(m1, m2) {
var result = [];
for(var j = 0; j < m2.length; j++) {
result[j] = [];
for(var k = 0; k < m1[0].length; k++) {
var sum = 0;
for(var i = 0; i < m1.length; i++) {
sum += m1[i][k] * m2[j][i];
}
result[j].push(sum);
}
}
return result;
}
multiplyMatrix(m1, m2);
// => [ [2, 4, 2] ]
source to share