I'm doing a function that multiplies 2 matrices. The matrices will always have the same number of rows and columns. (2x2, 5x5, 23x23, ...)
When I print it, it doesn't work. Why?
For example, if I create two 2x2 matrices:
matrixA:
[1][2]
[3][4]
matrixB:
[5][6]
[7][8]
The result should be:
[19][22]
[43][50]
(http://ncalculators.com/matrix/2x2-matrix-multiplication-calculator.htm)
But, I get:
[19][undefined]
[22][indefined]
function multiplyMatrix(matrixA, matrixB)
{
var result = new Array();//declare an array
//var numColsRows=$("#matrixRC").val();
numColsRows=2;
//iterating through first matrix rows
for (var i = 0; i < numColsRows; i++)
{
//iterating through second matrix columns
for (var j = 0; j < numColsRows; j++)
{
var matrixRow = new Array();//declare an array
var rrr = new Array();
var resu = new Array();
//calculating sum of pairwise products
for (var k = 0; k < numColsRows; k++)
{
rrr.push(parseInt(matrixA[i][k])*parseInt(matrixB[k][j]));
}//for 3
resu.push(parseInt(rrr[i])+parseInt(rrr[i+1]));
result.push(resu);
//result.push(matrixRow);
}//for 2
}//for 1
return result;
}// function multiplyMatrix
You're getting confused with your various temporary arrays. The undefined
values are caused by out-of-bounds access on the line below your innermost loop.
I recommend that you stick to making a single array for the result of the multiplication. As you're probably aware, the hitch is that JavaScript doesn't allow you to initialize a multi-dimensional array. To make a two-dimensional array, you have to initialize a one-dimensional array, then iterate over its elements and initialize each one to a one-dimensional array.
function multiply(a, b) {
var aNumRows = a.length, aNumCols = a[0].length,
bNumRows = b.length, bNumCols = b[0].length,
m = new Array(aNumRows); // initialize array of rows
for (var r = 0; r < aNumRows; ++r) {
m[r] = new Array(bNumCols); // initialize the current row
for (var c = 0; c < bNumCols; ++c) {
m[r][c] = 0; // initialize the current cell
for (var i = 0; i < aNumCols; ++i) {
m[r][c] += a[r][i] * b[i][c];
}
}
}
return m;
}
function display(m) {
for (var r = 0; r < m.length; ++r) {
document.write(' '+m[r].join(' ')+'<br />');
}
}
var a = [[8, 3], [2, 4], [3, 6]],
b = [[1, 2, 3], [4, 6, 8]];
document.write('matrix a:<br />');
display(a);
document.write('matrix b:<br />');
display(b);
document.write('a * b =<br />');
display(multiply(a, b));
You can use multiplyMatrices() function from: http://tech.pro/tutorial/1527/matrix-multiplication-in-functional-javascript it works like charm. Example (You can print a matrix with style in Chrome and Firefox console with console.table() ):
function multiplyMatrices(m1, m2) {
var result = [];
for (var i = 0; i < m1.length; i++) {
result[i] = [];
for (var j = 0; j < m2[0].length; j++) {
var sum = 0;
for (var k = 0; k < m1[0].length; k++) {
sum += m1[i][k] * m2[k][j];
}
result[i][j] = sum;
}
}
return result;
}
var m1 = [[1,2],[3,4]]
var m2 = [[5,6],[7,8]]
var mResult = multiplyMatrices(m1, m2)
/*In Google Chrome and Firefox you can do:*/
console.table(mResult) /* it shows the matrix in a table */

I know it's an old question but I recommend to switch to my answer.
My solution's got good performance because it uses
Map
Reduce
functions
//The chosen one
function matrixDot (A, B) {
var result = new Array(A.length).fill(0).map(row => new Array(B[0].length).fill(0));
return result.map((row, i) => {
return row.map((val, j) => {
return A[i].reduce((sum, elm, k) => sum + (elm*B[k][j]) ,0)
})
})
}
var print = m => m.forEach(r => document.write(` ${r.join(' ')}<br/>`))
var a = [[8, 3], [2, 4], [3, 6]]
var b = [[1, 2, 3], [4, 6, 8]]
document.write('matrix a:<br />');
print(a);
document.write('matrix b:<br />');
print(b);
document.write('a * b =<br />');
print(matrixDot(a,b));
If you wanted to go the bonkers route, you could also possibly do something with the vertices transformation in WebGL facilities now available in some modern browsers.
Not really sure if this would work in the same way as one might approach vector transformation in OpenCL (**in fact they're type-equivalent / interoperable), but the general idea is:
adding your values to a buffer
"pretending" it's an array of vertices
transforming en-mass using the GPU engine
retrieving the revised values from the vector
(see demo here) http://www.html5rocks.com/en/tutorials/webgl/webgl_transforms/
Just an alternative to the usual loop-in-loop approach. And to be honest, a bit of a fiddle, given that OpenCL was designed for this kind of thing
Within the OpenCL 1.2 spec vertex buffers from OpenGL can be loaded and transformed using OpenCL (see. https://software.intel.com/en-us/articles/opencl-and-opengl-interoperability-tutorial)
You can solve this problem with dynamic programming using Memoization. It is a term describing an optimization technique where you cache previously computed results, and return the cached result when the same computation is needed again.
let mat1 = [[1, 2, 3], [2, 1, 2]];
let mat2 = [[1, 2], [1, 2], [1, 2]];
function matrixMulti(x, y) {
let saveComputation = {};
let finalMat = [],
length=x.length,
length1 = y[0].length,
length2 = y.length;
for (let i = 0; i < length; i++) {
finalMat.push([]);
for (let j = 0; j < length1; j++) {
finalMat[i][j] = 0;
for (let k = 0; k < length2; k++) {
// check if we already computed this calculation or not
if (saveComputation[y[k][j] + '*' + x[i][k]] || saveComputation[x[i][k] + '*' + y[k][j]]) {
finalMat[i][j] = finalMat[i][j] + saveComputation[y[k][j] + '*' + x[i][k]];
} else {
// save if not computed
saveComputation[x[i][k] + '*' + y[k][j]] = x[i][k] * y[k][j]; // check format below how it is saved.
saveComputation[y[k][j] + '*' + x[i][k]] = x[i][k] * y[k][j];
finalMat[i][j] = finalMat[i][j] + saveComputation[y[k][j] + '*' + x[i][k]];
}
}
}
}
console.log(finalMat);
}
matrixMulti(mat1, mat2);
For the above input value of saveComputation will be
{ '1*1': 1,
'2*1': 2,
'1*2': 2,
'3*1': 3,
'1*3': 3,
'2*2': 4,
'3*2': 6,
'2*3': 6 }
npm install express
node server.js
var express = require('express');
var app = express();
var A=new Array(3);
var B=new Array(3);
var preA = [ 1, 2, 3, 4, 5, 6,7, 8, 9 ];
var preB = [ 1,1 ,1,2,2, 2,3, 3, 3 ];
//#########################preparing blank 3*3 matrix A and B###############
for(i=0;i<3;i++){
A[i]=new Array(3);
B[i]=new Array(3);
}
//#####################Assigning values to matrix places from predefine arrays preA and preB #####
var k=0;
for(i=0;i<3;i++){
for(j=0;j<3;j++){
A[i][j]=preA[k];
B[i][j]=preB[k];
k++;
}
};
console.log('################################');
console.log('First matrix:');
console.log(A[0]);
console.log(A[1]);
console.log(A[2]);
console.log('');
console.log('################################');
console.log('Second matrix:');
console.log(B[0]);
console.log(B[1]);
console.log(B[2]);
//###################### multiplication logic as disscussed ################
var result =[];
for (var i = 0; i < 3; i++) {
result[i] = new Array(3);
for (var j = 0; j < 3; j++) {
var sum = 0;
for (var k = 0; k < 3; k++) {
sum += A[i][k] * B[k][j];
}
result[i][j] = sum;
}
}
console.log('');
console.log('################################');
console.log('################################');
console.log('After Multiplication');
console.log(result[0]);
console.log(result[1]);
console.log(result[2]);
app.listen(9999);
来源:https://stackoverflow.com/questions/27205018/multiply-2-matrices-in-javascript