1 /*
2  *
3  *
4  * Copyright (c) 1994
5  * Hewlett-Packard Company
6  *
7  * Copyright (c) 1996,1997
8  * Silicon Graphics Computer Systems, Inc.
9  *
10  * Copyright (c) 1997
11  * Moscow Center for SPARC Technology
12  *
13  * Copyright (c) 1999
14  * Boris Fomitchev
15  *
16  * This material is provided "as is", with absolutely no warranty expressed
17  * or implied. Any use is at your own risk.
18  *
19  * Permission to use or copy this software for any purpose is hereby granted
20  * without fee, provided the above notices are retained on all copies.
21  * Permission to modify the code and to distribute modified code is granted,
22  * provided the above notices are retained, and a notice that the code was
23  * modified is included with the above copyright notice.
24  *
25  */
26 #ifndef _STLP_VALARRAY_C
27 #define _STLP_VALARRAY_C
28 
29 #ifndef _STLP_VALARRAY_H
30 # include <stl/_valarray.h>
31 #endif
32 
33 _STLP_BEGIN_NAMESPACE
34 
35 template <class _Tp>
36 _Valarray_bool valarray<_Tp>:: operator!() const {
37   _Valarray_bool __tmp(this->size(), _Valarray_bool::_NoInit());
38   for (size_t __i = 0; __i < this->size(); ++__i)
39     __tmp[__i] = !(*this)[__i];
40   return __tmp;
41 }
42 
43 // Behavior is undefined if __x and *this have different sizes
44 template <class _Tp>
45 valarray<_Tp>& valarray<_Tp>::operator=(const slice_array<_Tp>& __x) {
46   _STLP_ASSERT(__x._M_slice.size() == this->size())
47   size_t __index = __x._M_slice.start();
48   for (size_t __i = 0;
49        __i < __x._M_slice.size();
50        ++__i, __index += __x._M_slice.stride())
51     (*this)[__i] = __x._M_array[__index];
52   return *this;
53 }
54 
55 template <class _Tp>
56 valarray<_Tp> valarray<_Tp>::operator[](slice __slice) const {
57   valarray<_Tp> __tmp(__slice.size(), _NoInit());
58   size_t __index = __slice.start();
59   for (size_t __i = 0;
60        __i < __slice.size();
61        ++__i, __index += __slice.stride())
62     __tmp[__i] = (*this)[__index];
63   return __tmp;
64 }
65 
66 template <class _Size>
_M_incr()67 bool _Gslice_Iter_tmpl<_Size>::_M_incr() {
68   size_t __dim = _M_indices.size() - 1;
69   ++_M_step;
70   for (;;) {
71     _M_1d_idx += _M_gslice._M_strides[__dim];
72     if (++_M_indices[__dim] != _M_gslice._M_lengths[__dim])
73       return true;
74     else if (__dim != 0) {
75       _M_1d_idx -= _M_gslice._M_strides[__dim] * _M_gslice._M_lengths[__dim];
76       _M_indices[__dim] = 0;
77       --__dim;
78     }
79     else
80       return false;
81   }
82 }
83 
84 // Behavior is undefined if __x and *this have different sizes, or if
85 // __x was constructed from a degenerate gslice.
86 template <class _Tp>
87 valarray<_Tp>& valarray<_Tp>::operator=(const gslice_array<_Tp>& __x) {
88   if (this->size() != 0) {
89     _Gslice_Iter __i(__x._M_gslice);
90     do
91       (*this)[__i._M_step] = __x._M_array[__i._M_1d_idx];
92     while(__i._M_incr());
93   }
94   return *this;
95 }
96 
97 template <class _Tp>
98 valarray<_Tp> valarray<_Tp>::operator[](const gslice& __slice) const {
99   valarray<_Tp> __tmp(__slice._M_size(), _NoInit());
100   if (__tmp.size() != 0) {
101     _Gslice_Iter __i(__slice);
102     do __tmp[__i._M_step] = (*this)[__i._M_1d_idx]; while(__i._M_incr());
103   }
104   return __tmp;
105 }
106 
107 template <class _Tp>
108 valarray<_Tp> valarray<_Tp>::operator[](const _Valarray_bool& __mask) const {
109   size_t _p_size = 0;
110   {
111     for (size_t __i = 0; __i < __mask.size(); ++__i)
112       if (__mask[__i]) ++_p_size;
113   }
114 
115   valarray<_Tp> __tmp(_p_size, _NoInit());
116   size_t __idx = 0;
117   {
118     for (size_t __i = 0; __i < __mask.size(); ++__i)
119       if (__mask[__i]) __tmp[__idx++] = (*this)[__i];
120   }
121 
122   return __tmp;
123 }
124 
125 template <class _Tp>
126 valarray<_Tp>& valarray<_Tp>::operator=(const indirect_array<_Tp>& __x) {
127   _STLP_ASSERT(__x._M_addr.size() == this->size())
128   for (size_t __i = 0; __i < __x._M_addr.size(); ++__i)
129     (*this)[__i] = __x._M_array[__x._M_addr[__i]];
130   return *this;
131 }
132 
133 template <class _Tp>
134 valarray<_Tp>
135 valarray<_Tp>::operator[](const _Valarray_size_t& __addr) const {
136   valarray<_Tp> __tmp(__addr.size(), _NoInit());
137   for (size_t __i = 0; __i < __addr.size(); ++__i)
138     __tmp[__i] = (*this)[__addr[__i]];
139   return __tmp;
140 }
141 
142 //----------------------------------------------------------------------
143 // Other valarray noninline member functions
144 
145 // Shift and cshift
146 
147 template <class _Tp>
shift(int __n)148 valarray<_Tp> valarray<_Tp>::shift(int __n) const {
149   valarray<_Tp> __tmp(this->size());
150 
151   if (__n >= 0) {
152     if (__n < this->size())
153       copy(this->_M_first + __n, this->_M_first + this->size(),
154            __tmp._M_first);
155   }
156   else {
157     if (-__n < this->size())
158       copy(this->_M_first, this->_M_first + this->size() + __n,
159            __tmp._M_first - __n);
160   }
161   return __tmp;
162 }
163 
164 template <class _Tp>
cshift(int __m)165 valarray<_Tp> valarray<_Tp>::cshift(int __m) const {
166   valarray<_Tp> __tmp(this->size());
167 
168   // Reduce __m to an equivalent number in the range [0, size()).  We
169   // have to be careful with negative numbers, since the sign of a % b
170   // is unspecified when a < 0.
171   long __n = __m;
172   if (this->size() < (numeric_limits<long>::max)())
173     __n %= long(this->size());
174   if (__n < 0)
175     __n += this->size();
176 
177   copy(this->_M_first,       this->_M_first + __n,
178        __tmp._M_first + (this->size() - __n));
179   copy(this->_M_first + __n, this->_M_first + this->size(),
180        __tmp._M_first);
181 
182   return __tmp;
183 }
184 
185 _STLP_END_NAMESPACE
186 
187 #endif /*  _STLP_VALARRAY_C */
188 
189 // Local Variables:
190 // mode:C++
191 // End:
192