Searched defs:row (Results 251 - 275 of 652) sorted by relevance

<<11121314151617181920>>

/external/eigen/Eigen/src/Core/
H A DSelfAdjointView.h83 inline Scalar coeff(Index row, Index col) const argument
85 Base::check_coordinates_internal(row, col);
86 return m_matrix.coeff(row, col);
92 inline Scalar& coeffRef(Index row, Index col) argument
94 Base::check_coordinates_internal(row, col);
95 return m_matrix.const_cast_derived().coeffRef(row, col);
209 row = (UnrollCount-1) % Derived1::RowsAtCompileTime enumerator in enum:Eigen::internal::triangular_assignment_selector::__anon20696
216 if(row == col)
217 dst.coeffRef(row, col) = numext::real(src.coeff(row, co
234 row = (UnrollCount-1) % Derived1::RowsAtCompileTime enumerator in enum:Eigen::internal::triangular_assignment_selector::__anon20697
[all...]
/external/eigen/Eigen/src/Core/products/
H A DCoeffBasedProduct.h163 EIGEN_STRONG_INLINE const Scalar coeff(Index row, Index col) const argument
166 ScalarCoeffImpl::run(row, col, m_lhs, m_rhs, res);
176 const Index row = RowsAtCompileTime == 1 ? 0 : index; local
178 ScalarCoeffImpl::run(row, col, m_lhs, m_rhs, res);
183 EIGEN_STRONG_INLINE const PacketScalar packet(Index row, Index col) const argument
189 ::run(row, col, m_lhs, m_rhs, res);
242 static EIGEN_STRONG_INLINE void run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, RetScalar &res) argument
244 product_coeff_impl<DefaultTraversal, UnrollingIndex-1, Lhs, Rhs, RetScalar>::run(row, col, lhs, rhs, res);
245 res += lhs.coeff(row, UnrollingIndex) * rhs.coeff(UnrollingIndex, col);
253 static EIGEN_STRONG_INLINE void run(Index row, Inde argument
263 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, RetScalar& res) argument
281 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, typename Lhs::PacketScalar &pres) argument
292 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, typename Lhs::PacketScalar &pres) argument
304 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, RetScalar &res) argument
317 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, typename Lhs::Scalar &res) argument
339 run(Index row, Index , const Lhs& lhs, const Rhs& rhs, typename Lhs::Scalar &res) argument
359 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, typename Lhs::Scalar &res) argument
373 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet &res) argument
384 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet &res) argument
395 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet &res) argument
405 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet &res) argument
415 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet& res) argument
428 run(Index row, Index col, const Lhs& lhs, const Rhs& rhs, Packet& res) argument
[all...]
/external/eigen/Eigen/src/Geometry/
H A DHomogeneous.h78 inline Scalar coeff(Index row, Index col) const argument
80 if( (int(Direction)==Vertical && row==m_matrix.rows())
83 return m_matrix.coeff(row, col);
135 * \returns a matrix expression of homogeneous column (or row) vectors
295 dst += m_rhs.row(m_rhs.rows()-1).colwise()
/external/eigen/Eigen/src/SparseCore/
H A DMappedSparseMatrix.h68 inline Scalar coeff(Index row, Index col) const argument
70 const Index outer = IsRowMajor ? row : col;
71 const Index inner = IsRowMajor ? col : row;
87 inline Scalar& coeffRef(Index row, Index col) argument
89 const Index outer = IsRowMajor ? row : col;
90 const Index inner = IsRowMajor ? col : row;
135 inline Index row() const { return IsRowMajor ? m_outer : index(); } function in class:Eigen::MappedSparseMatrix::InnerIterator
166 inline Index row() const { return IsRowMajor ? m_outer : index(); } function in class:Eigen::MappedSparseMatrix::ReverseInnerIterator
H A DSparseBlock.h35 inline Index row() const { return IsRowMajor ? m_outer : this->index(); } function in class:Eigen::BlockImpl::InnerIterator
47 inline Index row() const { return IsRowMajor ? m_outer : this->index(); } function in class:Eigen::BlockImpl::ReverseInnerIterator
98 inline Index row() const { return IsRowMajor ? m_outer : this->index(); } function in class:Eigen::BlockImpl::InnerIterator
109 inline Index row() const { return IsRowMajor ? m_outer : this->index(); } function in class:Eigen::BlockImpl::ReverseInnerIterator
250 /** \returns the \a outer -th column (resp. row) of the matrix \c *this if \c *this
251 * is col-major (resp. row-major).
257 /** \returns the \a outer -th column (resp. row) of the matrix \c *this if \c *this
258 * is col-major (resp. row-major). Read-only.
264 /** \returns the \a outer -th column (resp. row) of the matrix \c *this if \c *this
265 * is col-major (resp. row
320 coeffRef(int row, int col) argument
326 coeff(int row, int col) const argument
365 inline Index row() const { return Base::row() - m_block.m_startRow.value(); } function in class:Eigen::BlockImpl::InnerIterator
388 inline Index row() const { return Base::row() - m_block.m_startRow.value(); } function in class:Eigen::BlockImpl::ReverseInnerIterator
[all...]
H A DSparseCwiseBinaryOp.h145 EIGEN_STRONG_INLINE Index row() const { return Lhs::IsRowMajor ? m_lhsIter.row() : index(); } function in class:Eigen::internal::sparse_cwise_binary_op_inner_iterator_selector
201 EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); } function in class:Eigen::internal::sparse_cwise_binary_op_inner_iterator_selector
241 EIGEN_STRONG_INLINE Index row() const { return m_lhsIter.row(); } function in class:Eigen::internal::sparse_cwise_binary_op_inner_iterator_selector
281 EIGEN_STRONG_INLINE Index row() const { return m_rhsIter.row(); } function in class:Eigen::internal::sparse_cwise_binary_op_inner_iterator_selector
H A DSparseDenseProduct.h127 inline Index row() const { return Transpose ? m_outer : Base::index(); } function in class:Eigen::SparseDenseOuterProduct::InnerIterator
223 typename Res::RowXpr res_j(res.row(j));
225 res_j += (alpha*it.value()) * rhs.row(it.index());
242 typename Rhs::ConstRowXpr rhs_j(rhs.row(j));
244 res.row(it.index()) += (alpha*it.value()) * rhs_j;
H A DSparseDiagonalProduct.h18 // 1 - diag * row-major sparse
179 inline Index row() const { return m_outer; } function in class:Eigen::internal::sparse_diagonal_product_inner_iterator_selector
/external/eigen/Eigen/src/plugins/
H A DBlockMethods.h17 /** \internal expression type of a row */
42 * \param startRow the first row in the block
469 * \param startRow the index of the first row in the block
491 * \param startRow the index of the first row in the block
668 * \param startRow the first row in the block
696 * \param startRow the first row in the block
731 * \sa row(), class Block */
743 /** \returns an expression of the \a i-th row of *this. Note that the numbering starts at 0.
749 inline RowXpr row(Index i) function
754 /** This is the const version of row()
755 inline ConstRowXpr row(Index i) const function
[all...]
/external/eigen/bench/
H A Dsparse_setter.cpp309 //compute number of non-zero entries per row of A coo_tocsr
316 //cumsum the nnz per row to get Bp[]
326 int row = Aij[n].x(); local
327 int dest = Bp[row];
332 Bp[row]++;
/external/eigen/unsupported/Eigen/src/KroneckerProduct/
H A DKroneckerTensorProduct.h49 Scalar coeff(Index row, Index col) const argument
51 return m_A.coeff(row / m_B.rows(), col / m_B.cols()) *
52 m_B.coeff(row % m_B.rows(), col % m_B.cols());
141 const Index i = itA.row() * Br + itB.row(),
/external/eigen/unsupported/Eigen/src/Skyline/
H A DSkylineMatrix.h146 inline Scalar coeff(Index row, Index col) const { argument
147 const Index outer = IsRowMajor ? row : col;
148 const Index inner = IsRowMajor ? col : row;
195 inline Scalar& coeffRef(Index row, Index col) { argument
196 const Index outer = IsRowMajor ? row : col;
197 const Index inner = IsRowMajor ? col : row;
206 if (col > row) //upper matrix
212 if (col < row) //lower matrix
240 inline Scalar coeffLower(Index row, Index col) const {
241 const Index outer = IsRowMajor ? row
199 eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); if (outer == inner) return this->m_data.diag(outer); if (IsRowMajor) { if (col > row) argument
235 eigen_assert(idx < outerSize()); eigen_assert(idx < innerSize()); return this->m_data.diag(idx); } inline Scalar coeffLower(Index row, Index col) const { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minInnerIndex = outer - m_data.lowerProfile(outer); if (inner >= minInnerIndex) return this->m_data.lower(m_rowStartIndex[outer] + inner - (outer - m_data.lowerProfile(outer))); else return Scalar(0); } else { const Index maxInnerIndex = outer + m_data.lowerProfile(outer); if (inner <= maxInnerIndex) return this->m_data.lower(m_rowStartIndex[outer] + (inner - outer)); else return Scalar(0); } } inline Scalar coeffUpper(Index row, Index col) const { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minOuterIndex = inner - m_data.upperProfile(inner); if (outer >= minOuterIndex) return this->m_data.upper(m_colStartIndex[inner] + outer - (inner - m_data.upperProfile(inner))); else return Scalar(0); } else { const Index maxOuterIndex = inner + m_data.upperProfile(inner); if (outer <= maxOuterIndex) return this->m_data.upper(m_colStartIndex[inner] + (outer - inner)); else return Scalar(0); } } inline Scalar& coeffRefDiag(Index idx) { eigen_assert(idx < outerSize()); eigen_assert(idx < innerSize()); return this->m_data.diag(idx); } inline Scalar& coeffRefLower(Index row, Index col) { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minInnerIndex = outer - m_data.lowerProfile(outer); eigen_assert(inner >= minInnerIndex && �); return this->m_data.lower(m_rowStartIndex[outer] + inner - (outer - m_data.lowerProfile(outer))); } else { const Index maxInnerIndex = outer + m_data.lowerProfile(outer); eigen_assert(inner <= maxInnerIndex && �); return this->m_data.lower(m_rowStartIndex[outer] + (inner - outer)); } } inline bool coeffExistLower(Index row, Index col) { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minInnerIndex = outer - m_data.lowerProfile(outer); return inner >= minInnerIndex; } else { const Index maxInnerIndex = outer + m_data.lowerProfile(outer); return inner <= maxInnerIndex; } } inline Scalar& coeffRefUpper(Index row, Index col) { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minOuterIndex = inner - m_data.upperProfile(inner); eigen_assert(outer >= minOuterIndex && �); return this->m_data.upper(m_colStartIndex[inner] + outer - (inner - m_data.upperProfile(inner))); } else { const Index maxOuterIndex = inner + m_data.upperProfile(inner); eigen_assert(outer <= maxOuterIndex && �); return this->m_data.upper(m_colStartIndex[inner] + (outer - inner)); } } inline bool coeffExistUpper(Index row, Index col) { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); eigen_assert(inner != outer); if (IsRowMajor) { const Index minOuterIndex = inner - m_data.upperProfile(inner); return outer >= minOuterIndex; } else { const Index maxOuterIndex = inner + m_data.upperProfile(inner); return outer <= maxOuterIndex; } } protected: public: class InnerUpperIterator; class InnerLowerIterator; class OuterUpperIterator; class OuterLowerIterator; inline void setZero() { m_data.clear(); memset(m_colStartIndex, 0, (m_outerSize + 1) * sizeof (Index)); memset(m_rowStartIndex, 0, (m_outerSize + 1) * sizeof (Index)); } inline Index nonZeros() const { return m_data.diagSize() + m_data.upperSize() + m_data.lowerSize(); } inline void reserve(Index reserveSize, Index reserveUpperSize, Index reserveLowerSize) { m_data.reserve(reserveSize, reserveUpperSize, reserveLowerSize); } EIGEN_DONT_INLINE Scalar & insert(Index row, Index col) { const Index outer = IsRowMajor ? row : col; const Index inner = IsRowMajor ? col : row; eigen_assert(outer < outerSize()); eigen_assert(inner < innerSize()); if (outer == inner) return m_data.diag(col); if (IsRowMajor) { if (outer < inner) { Index minOuterIndex = 0; minOuterIndex = inner - m_data.upperProfile(inner); if (outer < minOuterIndex) { const Index previousProfile = m_data.upperProfile(inner); m_data.upperProfile(inner) = inner - outer; const Index bandIncrement = m_data.upperProfile(inner) - previousProfile; const Index stop = m_colStartIndex[cols()]; const Index start = m_colStartIndex[inner]; for (Index innerIdx = stop; innerIdx >= start; innerIdx--) { m_data.upper(innerIdx + bandIncrement) = m_data.upper(innerIdx); } for (Index innerIdx = cols(); innerIdx > inner; innerIdx--) { m_colStartIndex[innerIdx] += bandIncrement; } memset(this->_upperPtr() + start, 0, (bandIncrement - 1) * sizeof (Scalar)); return m_data.upper(m_colStartIndex[inner]); } else { return m_data.upper(m_colStartIndex[inner] + outer - (inner - m_data.upperProfile(inner))); } } if (outer > inner) { const Index minInnerIndex = outer - m_data.lowerProfile(outer); if (inner < minInnerIndex) { const Index previousProfile = m_data.lowerProfile(outer); m_data.lowerProfile(outer) = outer - inner; const Index bandIncrement = m_data.lowerProfile(outer) - previousProfile; const Index stop = m_rowStartIndex[rows()]; const Index start = m_rowStartIndex[outer]; for (Index innerIdx = stop; innerIdx >= start; innerIdx--) { m_data.lower(innerIdx + bandIncrement) = m_data.lower(innerIdx); } for (Index innerIdx = rows(); innerIdx > outer; innerIdx--) { m_rowStartIndex[innerIdx] += bandIncrement; } memset(this->_lowerPtr() + start, 0, (bandIncrement - 1) * sizeof (Scalar)); return m_data.lower(m_rowStartIndex[outer]); } else { return m_data.lower(m_rowStartIndex[outer] + inner - (outer - m_data.lowerProfile(outer))); } } } else { if (outer > inner) { const Index maxOuterIndex = inner + m_data.upperProfile(inner); if (outer > maxOuterIndex) { const Index previousProfile = m_data.upperProfile(inner); m_data.upperProfile(inner) = outer - inner; const Index bandIncrement = m_data.upperProfile(inner) - previousProfile; const Index stop = m_rowStartIndex[rows()]; const Index start = m_rowStartIndex[inner + 1]; for (Index innerIdx = stop; innerIdx >= start; innerIdx--) { m_data.upper(innerIdx + bandIncrement) = m_data.upper(innerIdx); } for (Index innerIdx = inner + 1; innerIdx < outerSize() + 1; innerIdx++) { m_rowStartIndex[innerIdx] += bandIncrement; } memset(this->_upperPtr() + m_rowStartIndex[inner] + previousProfile + 1, 0, (bandIncrement - 1) * sizeof (Scalar)); return m_data.upper(m_rowStartIndex[inner] + m_data.upperProfile(inner)); } else { return m_data.upper(m_rowStartIndex[inner] + (outer - inner)); } } if (outer < inner) { const Index maxInnerIndex = outer + m_data.lowerProfile(outer); if (inner > maxInnerIndex) { const Index previousProfile = m_data.lowerProfile(outer); m_data.lowerProfile(outer) = inner - outer; const Index bandIncrement = m_data.lowerProfile(outer) - previousProfile; const Index stop = m_colStartIndex[cols()]; const Index start = m_colStartIndex[outer + 1]; for (Index innerIdx = stop; innerIdx >= start; innerIdx--) argument
[all...]
/external/eigen/unsupported/Eigen/src/SparseExtra/
H A DDynamicSparseMatrix.h89 /** \returns the coefficient value at given position \a row, \a col
92 inline Scalar coeff(Index row, Index col) const argument
94 const Index outer = IsRowMajor ? row : col;
95 const Index inner = IsRowMajor ? col : row;
99 /** \returns a reference to the coefficient value at given position \a row, \a col
103 inline Scalar& coeffRef(Index row, Index col) argument
105 const Index outer = IsRowMajor ? row : col;
106 const Index inner = IsRowMajor ? col : row;
145 /** \returns a reference to the non zero coefficient at position \a row, \a col assuming that:
150 inline Scalar& insertBack(Index row, Inde argument
[all...]
H A DMarketIO.h85 inline void PutMatrixElt(Scalar value, int row, int col, std::ofstream& out) argument
87 out << row << " "<< col << " " << value << "\n";
90 inline void PutMatrixElt(std::complex<Scalar> value, int row, int col, std::ofstream& out) argument
92 out << row << " " << col << " " << value.real() << " " << value.imag() << "\n";
242 internal::PutMatrixElt(it.value(), it.row()+1, it.col()+1, out);
243 // out << it.row()+1 << " " << it.col()+1 << " " << it.value() << "\n";
H A DRandomSetter.h292 /** \returns a reference to the coefficient at given coordinates \a row, \a col */
293 Scalar& operator() (Index row, Index col) argument
295 const Index outer = SetterRowMajor ? row : col;
296 const Index inner = SetterRowMajor ? col : row;
/external/guava/guava/src/com/google/common/collect/
H A DRegularImmutableTable.java97 * expected and that isn't documented in the Javadoc. If a row Comparator
98 * is provided, cellSet() iterates across the columns in the first row,
99 * the columns in the second row, etc. If a column Comparator is provided
100 * but a row Comparator isn't, cellSet() iterates across the rows in the
234 @Override public ImmutableMap<C, V> row(R rowKey) { method in class:RegularImmutableTable.SparseImmutableTable
251 Map<C, V> row = rowMap.get(rowKey);
252 return (row != null) && row.containsKey(columnKey);
265 Map<C, V> row = rowMap.get(rowKey);
266 return (row
384 @Override public ImmutableMap<C, V> row(R rowKey) { method in class:RegularImmutableTable.DenseImmutableTable
[all...]
H A DTreeBasedTable.java40 * Implementation of {@code Table} whose row keys and column keys are ordered
42 * {@code TreeBasedTable}, you may provide comparators for the row keys and
51 * all optional operations are supported. Null row keys, columns keys, and
54 * <p>Lookups by row key are often faster than lookups by column key, because
56 * column(columnKey).get(rowKey)} still runs quickly, since the row key is
58 * iteration across all row keys occurs.
61 * row, both {@code row(rowKey)} and {@code rowMap().get(rowKey)} are {@link
93 * of both row and column keys.
110 * @param rowComparator the comparator that orders the row key
172 public SortedMap<C, V> row(R rowKey) { method in class:TreeBasedTable
[all...]
/external/icu/icu4c/source/common/
H A Dpropsvec.c33 int32_t prevRow; /* search optimization: remember last row seen */
44 uint32_t *v, *row; local
70 /* set the all-Unicode row and the special-value rows */
71 row=pv->v;
72 uprv_memset(row, 0, pv->rows*columns*4);
73 row[0]=0;
74 row[1]=0x110000;
75 row+=columns;
77 row[0]=cp;
78 row[
94 uint32_t *row; local
279 uint32_t *row; local
293 uint32_t *row; local
335 uint32_t *row; local
496 upvec_compactToUTrie2Handler(void *context, UChar32 start, UChar32 end, int32_t rowIndex, uint32_t *row, int32_t columns, UErrorCode *pErrorCode) argument
[all...]
/external/jpeg/
H A Djcdctmgr.c93 * coefficients scaled by scalefactor[row]*scalefactor[col], where
131 * coefficients scaled by scalefactor[row]*scalefactor[col], where
139 int row, col; local
152 for (row = 0; row < DCTSIZE; row++) {
156 aanscalefactor[row] * aanscalefactor[col] * 8.0)));
H A Djcsample.c10 * Downsampling input data is counted in "row groups". A row group
13 * A single row group is processed in each call to the downsampler module.
23 * one row group's worth of pixels above and below the passed-in data;
25 * the first or last real pixel row.
93 int row; local
97 for (row = 0; row < num_rows; row++) {
98 ptr = image_data[row]
[all...]
H A Drdbmp.c59 jvirt_sarray_ptr whole_image; /* Needed to reverse row order */
60 JDIMENSION source_row; /* Current source row number */
112 * Read one row of pixels.
114 * unprocessed. We must read it out in top-to-bottom row order, and if
129 /* Fetch next row from virtual array */
158 /* Fetch next row from virtual array */
194 JDIMENSION row, col; local
197 /* Read the data into a virtual array in input-file row order. */
198 for (row = 0; row < cinf
[all...]
H A Drdtarga.c55 jvirt_sarray_ptr whole_image; /* Needed if funny input row order */
56 JDIMENSION current_row; /* Current logical row number to read */
70 /* This saves the correct pixel-row-expansion method for preload_image */
167 * Read one row of pixels.
268 * row order. The entire image has already been read into whole_image
269 * with proper conversion of pixel format, but it's in a funny row order.
278 /* Compute row of source that maps to current_row of normal order */
283 /* Fetch that row from virtual array */
303 JDIMENSION row; local
306 /* Read the data into a virtual array in input-file row orde
[all...]
H A Dwrbmp.c49 jvirt_sarray_ptr whole_image; /* needed to reverse row order */
50 JDIMENSION data_width; /* JSAMPLEs per row */
51 JDIMENSION row_width; /* physical width of one row in the BMP file */
52 int pad_bytes; /* number of padding bytes needed per row */
53 JDIMENSION cur_output_row; /* next row# to write to virtual array */
81 /* Access next row in virtual array */
116 /* Access next row in virtual array */
350 JDIMENSION row; local
361 for (row = cinfo->output_height; row >
[all...]
/external/libhevc/common/x86/
H A Dihevc_chroma_intra_pred_filters_sse42_intr.c130 WORD32 row, col; local
160 // pu1_ref[2*(two_nt - 1 - row)]
182 for(row = 0; row < nt; row++)
187 const_temp2_4x32b = _mm_set_epi16(pu1_ref[2 * (two_nt - 1 - row) + 1], pu1_ref[2 * (two_nt - 1 - row)], pu1_ref[2 * (two_nt - 1 - row) + 1],
188 pu1_ref[2 * (two_nt - 1 - row)], pu1_ref[2 * (two_nt - 1 - row)
286 WORD32 row; local
[all...]
/external/libhevc/decoder/
H A Dihevcd_deblk.c119 WORD32 col, row; local
184 for(row = 0; row < (ctb_size >> 3) + 1; row++)
185 au2_ctb_no_loop_filter_flag[row] = ps_deblk->au2_ctb_no_loop_filter_flag[row] >> (ctb_size >> 3);
247 /* BS for the column - Last row is excluded and the top row is included*/
258 for(row = 0; row < ctb_siz
[all...]

Completed in 1157 milliseconds

<<11121314151617181920>>