diff --git a/doc/excuter/op-mem-cuda/list.md b/doc/excuter/op-mem-cuda/list.md index 36d05762..e9f62d96 100644 --- a/doc/excuter/op-mem-cuda/list.md +++ b/doc/excuter/op-mem-cuda/list.md @@ -6,13 +6,12 @@ |-----------|--------|------------|--------------|----------------| | reducemax | miaobyte | reducemax(tensor A, vector dims, var keepdims)->(tensor B) | B = reducemax(A, axis=[1 2], keepdims=false) | reducemax(tensor A, vector dims, var keepdims)->(tensor B) | | broadcastTo | miaobyte | broadcastTo(tensor A, vector new_shape)->(tensor B) | T2 = T1.broadcastTo(new_shape=[4,3,2]) | broadcastTo(tensor A, vector new_shape)->(tensor B) | -| concat | miaobyte | concat(listtensor tensors, var axis)->(tensor result) | Tresult = concat([T1, T2...], axis=3) | concat(listtensor tensors, var axis)->(tensor result) | +| concat | miaobyte | concat(listtensor tensors, var dim)->(tensor result) | Tresult = concat([T1, T2...], axis=3) | concat(listtensor tensors, var dim)->(tensor result) | | transpose | miaobyte | transpose(tensor A, vector dim_order)->(tensor C) | T2 = T1.transpose(dimorder=[1,0]) | transpose(tensor A, vector dim_order)->(tensor C) | | reshape | miaobyte | reshape(tensor A, vector shape)->(tensor B) | T1.reshape(shape)->T2 | reshape(tensor A, vector shape)->(tensor B) | | matmul | cublas | matmul(tensor A, tensor B)->(tensor C) | T3=T1 @ T2 | matmul(tensor A, tensor B)->(tensor C) | | comparescalar | miaobyte | comparescalar(tensor A, var scalar)->(tensor mask) | mask=compare(T1, scalar) | comparescalar(tensor A, var scalar)->(tensor mask) | -| add | cublas | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | -| add | miaobyte | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| compare | miaobyte | compare(tensor A, tensor B)->(tensor mask) | mask=compare(T1, T2) | compare(tensor A, tensor B)->(tensor mask) | | uniform | miaobyte | uniform(tensor t, var low, var high, var seed)->() | uniform(T1,low,high,seed) | uniform(tensor t, var low, var high, var seed)->() | | addscalar | miaobyte | addscalar(tensor A, var b)->(tensor C) | T3=T1+scalar | addscalar(tensor A, var b)->(tensor C) | | log | miaobyte | log(tensor A)->(tensor C) | T3=log(T1) | log(tensor A)->(tensor C) | @@ -20,29 +19,32 @@ | divscalar | miaobyte | divscalar(tensor A, var scalar)->(tensor C) | T3=scalar/T1 | divscalar(tensor A, var scalar)->(tensor C) | | sin | miaobyte | sin(tensor A)->(tensor C) | T3=sin(T1) | sin(tensor A)->(tensor C) | | tan | miaobyte | tan(tensor A)->(tensor C) | T3=tan(T1) | tan(tensor A)->(tensor C) | +| add | cublas | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| add | miaobyte | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| copytensor | none | copytensor(tensor src, tensor dst)->() | T2.data = T1.data | copytensor(tensor src, tensor dst)->() | +| prod | miaobyte | prod(tensor A, vector dims, var keepdims)->(tensor B) | B = prod(A, axis=[1 2], keepdims=false) | prod(tensor A, vector dims, var keepdims)->(tensor B) | +| min | miaobyte | min(tensor A, tensor B)->(tensor C) | T3=min(T1, T2) | min(tensor A, tensor B)->(tensor C) | | print | miaobyte | print(tensor )->() | print(T1) | print(tensor )->() | | print | miaobyte | print(tensor , var )->() | print(T1) | print(tensor , var )->() | | newtensor | none | newtensor(vector shape)->(tensor tensor1) | T1 = zeros(shape) | newtensor(vector shape)->(tensor tensor1) | | newtensor | none | newtensor(var shape)->(tensor tensor1) | T1 = zeros(shape) | newtensor(var shape)->(tensor tensor1) | +| minscalar | miaobyte | minscalar(tensor A, var scalar)->(tensor C) | T3=min(T1, scalar) | minscalar(tensor A, var scalar)->(tensor C) | +| rdivscalar | miaobyte | rdivscalar(var scalar, tensor A)->(tensor C) | T3=scalar/T1 | rdivscalar(var scalar, tensor A)->(tensor C) | +| constant | miaobyte | constant(tensor t, var value)->() | constant(T1) | constant(tensor t, var value)->() | +| powscalar | miaobyte | powscalar(tensor A, var scalar)->(tensor C) | T3=pow(T1, scalar) | powscalar(tensor A, var scalar)->(tensor C) | | vecset | none | vecset(vector value)->(vector name) | shape = [3 4 5] | vecset(vector value)->(vector name) | | reducemin | miaobyte | reducemin(tensor A, vector dims, var keepdims)->(tensor B) | B = reducemin(A, axis=[1 2], keepdims=false) | reducemin(tensor A, vector dims, var keepdims)->(tensor B) | | subscalar | miaobyte | subscalar(tensor A, var b)->(tensor C) | T3=T1-scalar | subscalar(tensor A, var b)->(tensor C) | | sqrt | miaobyte | sqrt(tensor A)->(tensor C) | T3=sqrt(T1) | sqrt(tensor A)->(tensor C) | +| sub | miaobyte | sub(tensor A, tensor B)->(tensor C) | T3=T1-T2 | sub(tensor A, tensor B)->(tensor C) | | sum | miaobyte | sum(tensor A, vector dims, var keepdims)->(tensor B) | B = sum(A, axis=[1 2], keepdims=false) | sum(tensor A, vector dims, var keepdims)->(tensor B) | | argset | none | argset(var value)->(var name) | var argname = argvalue | argset(var value)->(var name) | -| sub | miaobyte | sub(tensor A, tensor B)->(tensor C) | T3=T1-T2 | sub(tensor A, tensor B)->(tensor C) | | mulscalar | miaobyte | mulscalar(tensor A, var b)->(tensor C) | T3=T1*scalar | mulscalar(tensor A, var b)->(tensor C) | | div | miaobyte | div(tensor A, tensor B)->(tensor C) | T3=T1/T2 | div(tensor A, tensor B)->(tensor C) | -| constant | miaobyte | constant(tensor t, var value)->() | constant(T1) | constant(tensor t, var value)->() | -| powscalar | miaobyte | powscalar(tensor A, var scalar)->(tensor C) | T3=pow(T1, scalar) | powscalar(tensor A, var scalar)->(tensor C) | | max | miaobyte | max(tensor A, tensor B)->(tensor C) | T3=max(T1, T2) | max(tensor A, tensor B)->(tensor C) | | pow | miaobyte | pow(tensor A, tensor B)->(tensor C) | T3=pow(T1, T2) | pow(tensor A, tensor B)->(tensor C) | | maxscalar | miaobyte | maxscalar(tensor A, var scalar)->(tensor C) | T3=max(T1, scalar) | maxscalar(tensor A, var scalar)->(tensor C) | | mul | miaobyte | mul(tensor A, tensor B)->(tensor C) | T3=T1*T2 | mul(tensor A, tensor B)->(tensor C) | | exp | miaobyte | exp(tensor A)->(tensor C) | T3=exp(T1) | exp(tensor A)->(tensor C) | -| rdivscalar | miaobyte | rdivscalar(var scalar, tensor A)->(tensor C) | T3=scalar/T1 | rdivscalar(var scalar, tensor A)->(tensor C) | -| minscalar | miaobyte | minscalar(tensor A, var scalar)->(tensor C) | T3=min(T1, scalar) | minscalar(tensor A, var scalar)->(tensor C) | +| deltensor | none | deltensor(tensor t)->() | del T1 | deltensor(tensor t)->() | | cos | miaobyte | cos(tensor A)->(tensor C) | T3=cos(T1) | cos(tensor A)->(tensor C) | -| prod | miaobyte | prod(tensor A, vector dims, var keepdims)->(tensor B) | B = prod(A, axis=[1 2], keepdims=false) | prod(tensor A, vector dims, var keepdims)->(tensor B) | -| min | miaobyte | min(tensor A, tensor B)->(tensor C) | T3=min(T1, T2) | min(tensor A, tensor B)->(tensor C) | -| compare | miaobyte | compare(tensor A, tensor B)->(tensor mask) | mask=compare(T1, T2) | compare(tensor A, tensor B)->(tensor mask) | diff --git a/doc/excuter/op-mem-ompsimd/list.md b/doc/excuter/op-mem-ompsimd/list.md index 2466718d..9593ef2f 100644 --- a/doc/excuter/op-mem-ompsimd/list.md +++ b/doc/excuter/op-mem-ompsimd/list.md @@ -8,30 +8,36 @@ | broadcastTo | miaobyte | broadcastTo(tensor A, vector new_shape)->(tensor B) | T2 = T1.broadcastTo(new_shape=[4,3,2]) | broadcastTo(tensor A, vector new_shape)->(tensor B) | | concat | miaobyte | concat(listtensor tensors, var dim)->(tensor result) | Tresult = concat([T1, T2...], axis=3) | concat(listtensor tensors, var dim)->(tensor result) | | transpose | miaobyte | transpose(tensor A, vector dim_order)->(tensor C) | T1.transpose(dimorder=[1,0])->T2 | transpose(tensor A, vector dim_order)->(tensor C) | -| add | cblas | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | -| add | miaobyte | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| reshape | miaobyte | reshape(tensor A, vector shape)->(tensor B) | T1.reshape(shape)->T2 | reshape(tensor A, vector shape)->(tensor B) | +| matmul | cblas | matmul(tensor A, tensor B)->(tensor C) | T3=T1 @ T2 | matmul(tensor A, tensor B)->(tensor C) | +| matmul | miaobyte | matmul(tensor A, tensor B)->(tensor C) | T3=T1 @ T2 | matmul(tensor A, tensor B)->(tensor C) | | comparescalar | miaobyte | comparescalar(tensor A, var scalar)->(tensor mask) | mask=compare(T1,scalar) | comparescalar(tensor A, var scalar)->(tensor mask) | | uniform | miaobyte | uniform(tensor t, var low, var high, var seed)->() | uniform(T1,low,high,seed) | uniform(tensor t, var low, var high, var seed)->() | | addscalar | miaobyte | addscalar(tensor a, var scalar)->(tensor c) | T3=T1+scalar | addscalar(tensor a, var scalar)->(tensor c) | | log | miaobyte | log(tensor A)->(tensor C) | T3=log(T1) | log(tensor A)->(tensor C) | -| reshape | miaobyte | reshape(tensor A, vector shape)->(tensor B) | T1.reshape(shape)->T2 | reshape(tensor A, vector shape)->(tensor B) | | arange | miaobyte | arange(tensor t, var start, var step)->() | arange(T1,start,step) | arange(tensor t, var start, var step)->() | | divscalar | miaobyte | divscalar(tensor A, var scalar)->(tensor C) | T3=T1/scalar | divscalar(tensor A, var scalar)->(tensor C) | +| add | cblas | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| add | miaobyte | add(tensor a, tensor b)->(tensor c) | T3=T1+T2 | add(tensor a, tensor b)->(tensor c) | +| copytensor | none | copytensor(tensor src, tensor dst)->() | T2.data = T1.data | copytensor(tensor src, tensor dst)->() | +| prod | miaobyte | prod(tensor A, vector axis, var keepdims)->(tensor B) | B = prod(A, axis=[1 2], keepdims=false) | prod(tensor A, vector axis, var keepdims)->(tensor B) | +| min | miaobyte | min(tensor A, tensor B)->(tensor C) | T3=min(T1,T2) | min(tensor A, tensor B)->(tensor C) | | print | miaobyte | print(tensor )->() | print(T1) | print(tensor )->() | | print | miaobyte | print(tensor , var )->() | print(T1) | print(tensor , var )->() | | newtensor | none | newtensor(vector shape)->(tensor tensor1) | T1 =Tensor(shape=[...]) | newtensor(vector shape)->(tensor tensor1) | -| newtensor | none | newtensor(var shape)->(tensor tensor1) | T1 =Tensor(shape=[...]) | newtensor(var shape)->(tensor tensor1) | +| newtensor | none | newtensor(var shape)->(tensor t) | T1 =Tensor(shape=[...]) | newtensor(var shape)->(tensor t) | +| deltensor | none | deltensor(tensor t)->() | del T1 | deltensor(tensor t)->() | +| constant | miaobyte | constant(tensor t, var value)->() | constant(T1,value) | constant(tensor t, var value)->() | +| powscalar | miaobyte | powscalar(tensor A, var scalar)->(tensor C) | T3=T1^scalar | powscalar(tensor A, var scalar)->(tensor C) | | vecset | none | vecset(vector value)->(vector name) | shape = [3 4 5] | vecset(vector value)->(vector name) | | reducemin | miaobyte | reducemin(tensor A, vector axis, var keepdims)->(tensor B) | B = reducemin(A, axis=[1 2], keepdims=false) | reducemin(tensor A, vector axis, var keepdims)->(tensor B) | | subscalar | miaobyte | subscalar(tensor a, var scalar)->(tensor c) | T3=T1-scalar | subscalar(tensor a, var scalar)->(tensor c) | | sqrt | miaobyte | sqrt(tensor A)->(tensor C) | T3=sqrt(T1) | sqrt(tensor A)->(tensor C) | +| sub | miaobyte | sub(tensor a, tensor b)->(tensor c) | T3=T1-T2 | sub(tensor a, tensor b)->(tensor c) | | sum | miaobyte | sum(tensor A, vector axis, var keepdims)->(tensor B) | B = sum(A, axis=[1 2], keepdims=false) | sum(tensor A, vector axis, var keepdims)->(tensor B) | | argset | none | argset(var value)->(var name) | var argname = argvalue | argset(var value)->(var name) | -| sub | miaobyte | sub(tensor a, tensor b)->(tensor c) | T3=T1-T2 | sub(tensor a, tensor b)->(tensor c) | | mulscalar | miaobyte | mulscalar(tensor A, var b)->(tensor C) | T3=T1*scalar | mulscalar(tensor A, var b)->(tensor C) | | div | miaobyte | div(tensor A, tensor B)->(tensor C) | T3=T1/T2 | div(tensor A, tensor B)->(tensor C) | -| constant | miaobyte | constant(tensor t, var value)->() | constant(T1,value) | constant(tensor t, var value)->() | -| powscalar | miaobyte | powscalar(tensor A, var scalar)->(tensor C) | T3=T1^scalar | powscalar(tensor A, var scalar)->(tensor C) | | max | miaobyte | max(tensor A, tensor B)->(tensor C) | T3=max(T1,T2) | max(tensor A, tensor B)->(tensor C) | | pow | miaobyte | pow(tensor A, tensor B)->(tensor C) | T3=T1^T2 | pow(tensor A, tensor B)->(tensor C) | | maxscalar | miaobyte | maxscalar(tensor A, var scalar)->(tensor C) | T3=max(T1,scalar) | maxscalar(tensor A, var scalar)->(tensor C) | @@ -39,8 +45,4 @@ | exp | miaobyte | exp(tensor A)->(tensor C) | T3=exp(T1) | exp(tensor A)->(tensor C) | | rdivscalar | miaobyte | rdivscalar(var scalar, tensor A)->(tensor C) | T3=scalar/T1 | rdivscalar(var scalar, tensor A)->(tensor C) | | minscalar | miaobyte | minscalar(tensor A, var scalar)->(tensor C) | T3=min(T1,scalar) | minscalar(tensor A, var scalar)->(tensor C) | -| prod | miaobyte | prod(tensor A, vector axis, var keepdims)->(tensor B) | B = prod(A, axis=[1 2], keepdims=false) | prod(tensor A, vector axis, var keepdims)->(tensor B) | -| min | miaobyte | min(tensor A, tensor B)->(tensor C) | T3=min(T1,T2) | min(tensor A, tensor B)->(tensor C) | | compare | miaobyte | compare(tensor A, tensor B)->(tensor mask) | mask=compare(T1,T2) | compare(tensor A, tensor B)->(tensor mask) | -| matmul | cblas | matmul(tensor A, tensor B)->(tensor C) | T3=T1 @ T2 | matmul(tensor A, tensor B)->(tensor C) | -| matmul | miaobyte | matmul(tensor A, tensor B)->(tensor C) | T3=T1 @ T2 | matmul(tensor A, tensor B)->(tensor C) | diff --git a/excuter/cpp-common/src/deepx/dtype.hpp b/excuter/cpp-common/src/deepx/dtype.hpp index ae69e4ab..810566c8 100644 --- a/excuter/cpp-common/src/deepx/dtype.hpp +++ b/excuter/cpp-common/src/deepx/dtype.hpp @@ -10,7 +10,7 @@ namespace deepx { - using namespace std; + using namespace std; template T to(const std::string &textvalue) @@ -226,15 +226,19 @@ namespace deepx return value != other.value; } - // 判断当前类型是否在other类型的精度范围内 - bool in(const TypeDef &other) const + // 判断other是否在当前类型的精度范围内 + bool match(const TypeDef &other) const { // 类型必须相同 - if (parts.category != other.parts.category) + uint8_t this_cat = static_cast(parts.category); + uint8_t other_cat = static_cast(other.parts.category); + if ((this_cat & other_cat) != this_cat) { return false; } - // other的精度必须包含当前精度(通过位与运算判断) + + // 使用位操作检查precision + // 检查this的precision位是否都在other的precision中 uint16_t this_prec = static_cast(parts.precision); uint16_t other_prec = static_cast(other.parts.precision); return (this_prec & other_prec) == this_prec; diff --git a/excuter/cpp-common/src/deepx/tensorfunc/elementwise.hpp b/excuter/cpp-common/src/deepx/tensorfunc/elementwise.hpp index 4f50c6a2..8c9ebd94 100644 --- a/excuter/cpp-common/src/deepx/tensorfunc/elementwise.hpp +++ b/excuter/cpp-common/src/deepx/tensorfunc/elementwise.hpp @@ -323,6 +323,21 @@ namespace deepx::tensorfunc { comparescalarDispatcher::comparescalar(A, scalar, mask); } + + // 判断两个张量是否相等,TODO + template + struct equalDispatcher + { + static bool equal(const Tensor &A, const Tensor &B, float epsilon=1e-6) = delete; + }; + + template + bool equal(const Tensor &A, const Tensor &B,float epsilon=1e-6) + { + return equalDispatcher::equal(A, B, epsilon); + } + + } // namespace deepx::tensorfunc #endif // DEEPX_TENSORFUNC_ELEMENTWISE_HPP diff --git a/excuter/cpp-common/src/deepx/tensorfunc/tensorlife.hpp b/excuter/cpp-common/src/deepx/tensorfunc/tensorlife.hpp new file mode 100644 index 00000000..0dc884ac --- /dev/null +++ b/excuter/cpp-common/src/deepx/tensorfunc/tensorlife.hpp @@ -0,0 +1,23 @@ +#ifndef DEEPX_TENSORFUNC_TENSORLIFE_HPP +#define DEEPX_TENSORFUNC_TENSORLIFE_HPP + +#include "deepx/tensor.hpp" + +namespace deepx::tensorfunc +{ + //New + template < typename T> + Tensor New(const std::vector &shape); + + template + Tensor New(const std::initializer_list &shape){ + std::vector shape_vec(shape); + return New(shape_vec); + } + + //copy + template + void copy(const Tensor &src,Tensor &dst); + +} +#endif \ No newline at end of file diff --git a/excuter/cpp-common/src/deepx/tf/tffactory.hpp b/excuter/cpp-common/src/deepx/tf/tffactory.hpp index 66f121ca..3262ffaf 100644 --- a/excuter/cpp-common/src/deepx/tf/tffactory.hpp +++ b/excuter/cpp-common/src/deepx/tf/tffactory.hpp @@ -18,23 +18,14 @@ namespace deepx::tf } private: - static bool is_compatible_types(const vector &a, const vector &b) + static bool is_compatible_types(const vector &def, const vector &other) { - if (a.size() != b.size()) + if (def.size() != other.size()) return false; - for (size_t i = 0; i < a.size(); i++) + for (size_t i = 0; i < def.size(); i++) { - if ((static_cast(a[i].parts.category) & - static_cast(b[i].parts.category)) == 0) - { + if (!def[i].match(other[i])) return false; - } - if (a[i].parts.precision != Precision::Any && - b[i].parts.precision != Precision::Any && - a[i].parts.precision != b[i].parts.precision) - { - return false; - } } return true; } diff --git a/excuter/op-mem-cuda/src/client/tfs.cpp b/excuter/op-mem-cuda/src/client/tfs.cpp index 741cff51..f8e79c7b 100644 --- a/excuter/op-mem-cuda/src/client/tfs.cpp +++ b/excuter/op-mem-cuda/src/client/tfs.cpp @@ -1,6 +1,6 @@ #include "deepx/tf/arg.hpp" #include "deepx/tf/tf.hpp" -#include "deepx/tf/new.hpp" +#include "deepx/tf/tensorlife.hpp" #include "deepx/tf/io.hpp" #include "deepx/tf/init.hpp" #include "deepx/tf/elementwise_basic.hpp" @@ -52,7 +52,19 @@ namespace deepx::tf { Param("tensor1", DataCategory::Tensor, Precision::Any), }))); - // opfactory.add_op(DelTensor()); + //copytensor + tffactory.add_tf(std::make_shared(vector( + { + Param("src", DataCategory::Tensor, Precision::Any), + Param("dst", DataCategory::Tensor, Precision::Any), + }), + vector())); + //deltensor + tffactory.add_tf(std::make_shared(vector( + { + Param("t", DataCategory::Tensor, Precision::Any), + }), + vector())); } // init diff --git a/excuter/op-mem-cuda/src/deepx/tensorfunc/new.hpp b/excuter/op-mem-cuda/src/deepx/tensorfunc/tensorlife_miaobyte.hpp similarity index 50% rename from excuter/op-mem-cuda/src/deepx/tensorfunc/new.hpp rename to excuter/op-mem-cuda/src/deepx/tensorfunc/tensorlife_miaobyte.hpp index fe184dd7..acf28e9f 100644 --- a/excuter/op-mem-cuda/src/deepx/tensorfunc/new.hpp +++ b/excuter/op-mem-cuda/src/deepx/tensorfunc/tensorlife_miaobyte.hpp @@ -1,10 +1,11 @@ -#ifndef DEEPX_OP_CUDA_NEW_HPP -#define DEEPX_OP_CUDA_NEW_HPP +#ifndef DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP +#define DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP #include #include #include "deepx/tensor.hpp" #include "deepx/dtype_cuda.hpp" +#include "deepx/tensorfunc/tensorlife.hpp" // 具体的张量类 namespace deepx::tensorfunc { @@ -32,7 +33,7 @@ namespace deepx::tensorfunc } template - Tensor New(const std::vector &shapedata, T *data = nullptr) + Tensor New(const std::vector &shapedata) { Shape shape(shapedata); shape.dtype=precision(); @@ -42,39 +43,15 @@ namespace deepx::tensorfunc tensor.copyer = dataCopy; tensor.newer = dataNew; - if (data != nullptr) { - tensor.data = data; - } else { - tensor.data = dataNew(shape.size); - } - return tensor; - } - - template - Tensor New(const std::initializer_list &shapedata, T *data = nullptr) - { - Shape shape(shapedata); - shape.dtype=precision(); - Tensor tensor(shape); - tensor.device = CUDA; // 使用 CUDA 设备 - tensor.deleter = dataFree; - tensor.copyer = dataCopy; - tensor.newer = dataNew; - - if (data != nullptr) { - tensor.data = data; - } else { - tensor.data = dataNew(shape.size); - } + tensor.data = dataNew(shape.size); return tensor; } - + template - Tensor clone(const Tensor &tensor) + void copy(const Tensor &src,Tensor &dst) { - Tensor result = New(tensor.shape.shape); - tensor.copyer(tensor.data, result.data, tensor.shape.size); - return result; + dst.shape=src.shape; + dst.copyer(src.data, dst.data, src.shape.size); } } -#endif // DEEPX_OP_CUDA_NEW_HPP +#endif // DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP diff --git a/excuter/op-mem-cuda/src/deepx/tf/changeshape.hpp b/excuter/op-mem-cuda/src/deepx/tf/changeshape.hpp index 8b6bba4d..a45f5ba8 100644 --- a/excuter/op-mem-cuda/src/deepx/tf/changeshape.hpp +++ b/excuter/op-mem-cuda/src/deepx/tf/changeshape.hpp @@ -160,7 +160,7 @@ namespace deepx::tf } int run(shared_ptr mem, string &error) override { - if (!checktensors({ this->returns[0].textvalue}, mem, error)!=0) + if (!checktensors({ this->returns[0].textvalue}, mem, error)!=0) { return 1; } diff --git a/excuter/op-mem-cuda/src/deepx/tf/matmul.hpp b/excuter/op-mem-cuda/src/deepx/tf/matmul.hpp index 76c444fd..9a4c5bc6 100644 --- a/excuter/op-mem-cuda/src/deepx/tf/matmul.hpp +++ b/excuter/op-mem-cuda/src/deepx/tf/matmul.hpp @@ -22,16 +22,6 @@ namespace deepx::tf this->args = args; this->returns = returns; } - - MatMul(string text) - { - this->parse(text); - this->author = Author::name(); - if (this->name != "matmul") - { - throw std::runtime_error("Invalid name: " + this->name); - } - } string math_formula() const override { return "T3=T1 @ T2"; diff --git a/excuter/op-mem-cuda/src/deepx/tf/new.hpp b/excuter/op-mem-cuda/src/deepx/tf/tensorlife.hpp similarity index 62% rename from excuter/op-mem-cuda/src/deepx/tf/new.hpp rename to excuter/op-mem-cuda/src/deepx/tf/tensorlife.hpp index 26c9bd61..8e303037 100644 --- a/excuter/op-mem-cuda/src/deepx/tf/new.hpp +++ b/excuter/op-mem-cuda/src/deepx/tf/tensorlife.hpp @@ -1,12 +1,12 @@ -#ifndef DEEPX_TF_NEW_HPP -#define DEEPX_TF_NEW_HPP +#ifndef DEEPX_TF_TENSORLIFE_HPP +#define DEEPX_TF_TENSORLIFE_HPP #include #include #include "deepx/tf/tf.hpp" #include "deepx/dtype.hpp" #include "deepx/mem/mem.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "stdutil/num.hpp" namespace deepx::tf @@ -20,15 +20,6 @@ namespace deepx::tf this->args = args; this->returns = returns; } - - NewTensor(string text) - { - this->parse(text); - if (this->name != "newtensor") - { - throw std::runtime_error("Invalid name: " + this->name); - } - } int run(shared_ptr mem, string &error) override { string name = this->returns[0].textvalue; @@ -38,8 +29,8 @@ namespace deepx::tf error = "newtensor: return type must include tensor category"; return 1; } - vector shape=this->getvector(0); - + vector shape = this->getvector(0); + switch (type.precision()) { case Precision::Float32: @@ -58,7 +49,7 @@ namespace deepx::tf { Tensor<__half> t = tensorfunc::New<__half>(shape); mem->addtensor(name, t); - break; + break; } case Precision::BFloat16: { @@ -144,25 +135,78 @@ namespace deepx::tf class CopyTensor : public TF { public: - CopyTensor() + CopyTensor(vector args, vector returns) { this->name = "copytensor"; - + this->args = args; + this->returns = returns; } - CopyTensor(string text) + int run(shared_ptr mem, string &error) override { - this->parse(text); - if (this->name != "copytensor") + if (!checktensors({this->args[0].textvalue, this->args[1].textvalue}, mem, error) != 0) + { + return 1; + } + Precision input_type = mem->gettensor(this->args[0].textvalue).get()->shape.dtype; + Precision type = mem->gettensor(this->args[1].textvalue).get()->shape.dtype; + if (input_type != type) { - throw std::runtime_error("Invalid name: " + this->name); + error = "copytensor: input type and return type must be the same"; + return 1; } - } - int run(shared_ptr mem, string &error) override - { - // TODO - // auto src=mem.gettensor(this->args[0].name); - // auto dst=mem.gettensor(this->returns[0].name); - // tensorfunc::copytensor(*src,*dst); + switch (input_type) + { + case Precision::Float64: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Float32: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Float16: + { + tensorfunc::copy(*mem->gettensor<__half>(this->args[0].textvalue), *mem->gettensor<__half>(this->args[1].textvalue)); + break; + } + case Precision::BFloat16: + { + tensorfunc::copy(*mem->gettensor<__nv_bfloat16>(this->args[0].textvalue), *mem->gettensor<__nv_bfloat16>(this->args[1].textvalue)); + break; + } + case Precision::Int64: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int32: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int16: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int8: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Bool: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + default: + { + error = "copytensor: unsupported precision"; + return 1; + } + }; return 0; } @@ -176,49 +220,14 @@ namespace deepx::tf } }; - class CloneTensor : public TF - { - public: - CloneTensor() - { - this->name = "clonetensor"; - - } - int run(shared_ptr mem, string &error) override - { - // TODO - // auto src=mem.gettensor(this->args[0]); - // string dst=this->returns[0]; - // mem.addtensor(dst,tensorfunc::clone(*src)); - return 0; - } - - - string math_formula() const override - { - return "T2 = T1.clone()"; - } - shared_ptr clone() const override - { - return make_shared(*this); - } - }; - class DelTensor : public TF { public: - DelTensor() + DelTensor(vector args, vector returns) { this->name = "deltensor"; - - } - DelTensor(string text) - { - this->parse(text); - if (this->name != "deltensor") - { - throw std::runtime_error("Invalid name: " + this->name); - } + this->args = args; + this->returns = returns; } int run(shared_ptr mem, string &error) override { @@ -237,4 +246,4 @@ namespace deepx::tf } }; } -#endif +#endif // DEEPX_TF_TENSORLIFE_HPP diff --git a/excuter/op-mem-cuda/test/tensorfunc/0_new.cpp b/excuter/op-mem-cuda/test/tensorfunc/0_new.cpp index 5896850c..de24e903 100644 --- a/excuter/op-mem-cuda/test/tensorfunc/0_new.cpp +++ b/excuter/op-mem-cuda/test/tensorfunc/0_new.cpp @@ -1,6 +1,6 @@ #include "deepx/tensorfunc/init.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-cuda/test/tensorfunc/1_cublas_add.cpp b/excuter/op-mem-cuda/test/tensorfunc/1_cublas_add.cpp index 5904b91c..fe625241 100644 --- a/excuter/op-mem-cuda/test/tensorfunc/1_cublas_add.cpp +++ b/excuter/op-mem-cuda/test/tensorfunc/1_cublas_add.cpp @@ -1,6 +1,6 @@ #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/elementwise.hpp" #include "deepx/tensorfunc/elementwise_cublas_basic.hpp" diff --git a/excuter/op-mem-cuda/test/tensorfunc/1_cublas_matmul.cpp b/excuter/op-mem-cuda/test/tensorfunc/1_cublas_matmul.cpp index 3a2ac4e0..8d9187ec 100644 --- a/excuter/op-mem-cuda/test/tensorfunc/1_cublas_matmul.cpp +++ b/excuter/op-mem-cuda/test/tensorfunc/1_cublas_matmul.cpp @@ -1,6 +1,6 @@ #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/matmul.hpp" #include "deepx/tensorfunc/matmul_cublas.hpp" diff --git a/excuter/op-mem-cuda/test/tensorfunc/2_changeshape.cpp b/excuter/op-mem-cuda/test/tensorfunc/2_changeshape.cpp index f18671c5..e27ab760 100644 --- a/excuter/op-mem-cuda/test/tensorfunc/2_changeshape.cpp +++ b/excuter/op-mem-cuda/test/tensorfunc/2_changeshape.cpp @@ -1,6 +1,6 @@ #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/changeshape_miaobyte.hpp" using namespace deepx::tensorfunc; diff --git a/excuter/op-mem-ompsimd/src/client/tfs.cpp b/excuter/op-mem-ompsimd/src/client/tfs.cpp index f992497b..e769edc6 100644 --- a/excuter/op-mem-ompsimd/src/client/tfs.cpp +++ b/excuter/op-mem-ompsimd/src/client/tfs.cpp @@ -2,7 +2,7 @@ #include "deepx/dtype.hpp" #include "deepx/tf/arg.hpp" -#include "deepx/tf/new.hpp" +#include "deepx/tf/tensorlife.hpp" #include "deepx/tf/init.hpp" #include "deepx/tf/io.hpp" #include "deepx/tf/changeshape.hpp" @@ -53,8 +53,21 @@ namespace deepx::tf }), vector( { - Param("tensor1", DataCategory::Tensor, Precision::Any), + Param("t", DataCategory::Tensor, Precision::Any), }))); + //copytensor + tffactory.add_tf(std::make_shared(vector( + { + Param("src", DataCategory::Tensor, Precision::Any), + Param("dst", DataCategory::Tensor, Precision::Any), + }), + vector())); + //deltensor + tffactory.add_tf(std::make_shared(vector( + { + Param("t", DataCategory::Tensor, Precision::Any), + }), + vector())); } // init diff --git a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/changeshape_miaobyte.hpp b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/changeshape_miaobyte.hpp index 46f5a0b8..1644c045 100644 --- a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/changeshape_miaobyte.hpp +++ b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/changeshape_miaobyte.hpp @@ -6,7 +6,6 @@ #include "deepx/tensor.hpp" #include "deepx/shape_changeshape.hpp" -#include "deepx/tensorfunc/new.hpp" #include "deepx/tensorfunc/changeshape.hpp" #include "deepx/tensorfunc/authors.hpp" namespace deepx::tensorfunc diff --git a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/io_miaobyte.hpp b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/io_miaobyte.hpp index e6feebcc..f65166ca 100644 --- a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/io_miaobyte.hpp +++ b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/io_miaobyte.hpp @@ -8,8 +8,7 @@ #include "stdutil/print.hpp" #include "deepx/tensorfunc/authors.hpp" #include "deepx/tensorfunc/io.hpp" -#include "deepx/tensorfunc/new.hpp" - +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" namespace deepx::tensorfunc { // 通用模板特化 diff --git a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/new.hpp b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/new.hpp deleted file mode 100644 index 03e3f2c1..00000000 --- a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/new.hpp +++ /dev/null @@ -1,86 +0,0 @@ -#ifndef DEEPX_TENSORFUNC_NEW_HPP -#define DEEPX_TENSORFUNC_NEW_HPP - -#include "deepx/tensor.hpp" -#include "deepx/dtype.hpp" -#include "deepx/dtype_ompsimd.hpp" -#include "deepx/tensorfunc/new_mempool.hpp" - -// 具体的张量类 -namespace deepx::tensorfunc -{ - template - static T* dataNew(int size) - { - return static_cast(MemoryPool::Malloc(size * sizeof(T))); - } - - template - static void dataFree(T *data) - { - MemoryPool::Free(data); - } - - template - static void dataCopy(T *data,T *data2,int size) - { - std::copy(data,data+size,data2); - } - - template - Tensor New(const std::vector &shapedata,T *data=nullptr) - { - Shape shape(shapedata); - shape.dtype=precision(); - // 分配内存 - - // 创建tensor并返回 - Tensor tensor(shape); - tensor.device = CPU; - tensor.deleter = dataFree; - tensor.copyer = dataCopy; - tensor.newer = dataNew; - if (data!=nullptr){ - tensor.data = data; - }else{ - tensor.data = dataNew(shape.size); - } - return tensor; - } - - template - Tensor New(const std::initializer_list &shapedata,T *data=nullptr) - { - Shape shape(shapedata); - shape.dtype=precision(); - // 分配内存 - // 创建tensor并返回 - Tensor tensor(shape); - tensor.device = CPU; - tensor.deleter = dataFree; - tensor.copyer = dataCopy; - tensor.newer = dataNew; - if (data!=nullptr){ - tensor.data = data; - }else{ - tensor.data = dataNew(shape.size); - } - return tensor; - } - - template - void copytensor(const Tensor &src,Tensor &dst) - { - dst.shape=src.shape; - dst.copyer(src.data,dst.data,src.shape.size); - } - - template - Tensor clone(const Tensor &tensor) - { - Tensor result = New(tensor.shape.shape); - tensor.copyer(tensor.data,result.data,tensor.shape.size); - return result; - } -} -#endif // DEEPX_TENSORFUNC_NEW_HPP \ No newline at end of file diff --git a/excuter/op-mem-ompsimd/src/deepx/tensorfunc/tensorlife_miaobyte.hpp b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/tensorlife_miaobyte.hpp new file mode 100644 index 00000000..1f9c9cda --- /dev/null +++ b/excuter/op-mem-ompsimd/src/deepx/tensorfunc/tensorlife_miaobyte.hpp @@ -0,0 +1,56 @@ +#ifndef DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP +#define DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP + +#include "deepx/tensorfunc/tensorlife.hpp" +#include "deepx/tensorfunc/authors.hpp" +#include "deepx/tensor.hpp" +#include "deepx/dtype.hpp" +#include "deepx/dtype_ompsimd.hpp" +#include "deepx/tensorfunc/new_mempool.hpp" + +namespace deepx::tensorfunc +{ + + template + static T *dataNew(int size) + { + return static_cast(MemoryPool::Malloc(size * sizeof(T))); + } + + template + static void dataFree(T *data) + { + MemoryPool::Free(data); + } + + template + static void dataCopy(T *data, T *data2, int size) + { + std::copy(data, data + size, data2); + } + + // New + template + Tensor New(const std::vector &shapedata) + { + Shape shape(shapedata); + shape.dtype = precision(); + + Tensor tensor(shape); + tensor.device = CPU; + tensor.deleter = dataFree; + tensor.copyer = dataCopy; + tensor.newer = dataNew; + tensor.data = dataNew(shape.size); + return tensor; + }; + + template + void copy(const Tensor &src, Tensor &dst) + { + dst.shape = src.shape; + dst.copyer(src.data, dst.data, src.shape.size); + } + +} +#endif // DEEPX_TENSORFUNC_TENSORLIFE_MIAOBYTE_HPP \ No newline at end of file diff --git a/excuter/op-mem-ompsimd/src/deepx/tf/elementwise.hpp b/excuter/op-mem-ompsimd/src/deepx/tf/elementwise.hpp index 4296557f..8f1e3d0c 100644 --- a/excuter/op-mem-ompsimd/src/deepx/tf/elementwise.hpp +++ b/excuter/op-mem-ompsimd/src/deepx/tf/elementwise.hpp @@ -978,11 +978,10 @@ namespace deepx::tf int run(shared_ptr mem, string &error) override { Precision a_type = mem->gettensor(this->args[0].textvalue).get()->shape.dtype; - Precision b_type = mem->gettensor(this->args[1].textvalue).get()->shape.dtype; Precision c_type = mem->gettensor(this->returns[0].textvalue).get()->shape.dtype; - if (a_type != b_type || a_type != c_type) + if ( a_type != c_type) { - error = "Type mismatch: " + precision_str(a_type) + " != " + precision_str(b_type) + " != " + precision_str(c_type); + error = "Type mismatch: " + precision_str(a_type) + " != " + precision_str(c_type); return 1; } switch (a_type) diff --git a/excuter/op-mem-ompsimd/src/deepx/tf/new.hpp b/excuter/op-mem-ompsimd/src/deepx/tf/tensorlife.hpp similarity index 65% rename from excuter/op-mem-ompsimd/src/deepx/tf/new.hpp rename to excuter/op-mem-ompsimd/src/deepx/tf/tensorlife.hpp index 3c40fedf..92a45098 100644 --- a/excuter/op-mem-ompsimd/src/deepx/tf/new.hpp +++ b/excuter/op-mem-ompsimd/src/deepx/tf/tensorlife.hpp @@ -1,10 +1,10 @@ -#ifndef DEEPX_TF_NEW_HPP -#define DEEPX_TF_NEW_HPP +#ifndef DEEPX_TF_TENSORLIFE_HPP +#define DEEPX_TF_TENSORLIFE_HPP #include "deepx/tf/tf.hpp" #include "deepx/dtype.hpp" #include "deepx/mem/mem.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "stdutil/num.hpp" namespace deepx::tf @@ -134,24 +134,69 @@ namespace deepx::tf class CopyTensor : public TF { public: - CopyTensor() + CopyTensor(vector args, vector returns) { this->name = "copytensor"; + this->args = args; + this->returns = returns; } - CopyTensor(string text) + + int run(shared_ptr mem, string &error) override { - this->parse(text); - if (this->name != "copytensor") + if (!checktensors({this->args[0].textvalue, this->args[1].textvalue}, mem, error) != 0) { - throw std::runtime_error("Invalid name: " + this->name); + return 1; } - } - int run(shared_ptr mem, string &error) override - { - // TODO - // auto src=mem.gettensor(this->args[0].name); - // auto dst=mem.gettensor(this->returns[0].name); - // tensorfunc::copytensor(*src,*dst); + Precision input_type = mem->gettensor(this->args[0].textvalue).get()->shape.dtype; + Precision type = mem->gettensor(this->args[1].textvalue).get()->shape.dtype; + if (input_type != type) + { + error = "copytensor: input type and return type must be the same"; + return 1; + } + switch (input_type) + { + case Precision::Float64: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Float32: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int64: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int32: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int16: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Int8: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + case Precision::Bool: + { + tensorfunc::copy(*mem->gettensor(this->args[0].textvalue), *mem->gettensor(this->args[1].textvalue)); + break; + } + default: + { + error = "copytensor: unsupported precision"; + return 1; + } + }; return 0; } @@ -165,46 +210,14 @@ namespace deepx::tf } }; - class CloneTensor : public TF - { - public: - CloneTensor() - { - this->name = "clonetensor"; - } - int run(shared_ptr mem, string &error) override - { - // TODO - // auto src=mem.gettensor(this->args[0]); - // string dst=this->returns[0]; - // mem.addtensor(dst,tensorfunc::clone(*src)); - return 0; - } - - string math_formula() const override - { - return "T2 = T1.clone()"; - } - shared_ptr clone() const override - { - return make_shared(*this); - } - }; - class DelTensor : public TF { public: - DelTensor() + DelTensor(vector args, vector returns) { this->name = "deltensor"; - } - DelTensor(string text) - { - this->parse(text); - if (this->name != "deltensor") - { - throw std::runtime_error("Invalid name: " + this->name); - } + this->args = args; + this->returns = returns; } int run(shared_ptr mem, string &error) override { diff --git a/excuter/op-mem-ompsimd/test/op/1_mem.cpp b/excuter/op-mem-ompsimd/test/op/1_mem.cpp index 6433d086..a3a89f9a 100644 --- a/excuter/op-mem-ompsimd/test/op/1_mem.cpp +++ b/excuter/op-mem-ompsimd/test/op/1_mem.cpp @@ -1,7 +1,7 @@ #include #include "deepx/mem/mem_ompsimd.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_equal.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_equal.cpp index 7818658a..0dd7b65e 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_equal.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_equal.cpp @@ -3,7 +3,7 @@ #include "deepx/tensorfunc/equal.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" using namespace deepx; using namespace deepx::tensorfunc; diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_new.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_new.cpp index 05a0df43..5b62463d 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_new.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_new.cpp @@ -3,7 +3,7 @@ #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_range.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_range.cpp index b1d5dcb8..a7f3eeec 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_range.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/2_tensor_range.cpp @@ -2,7 +2,7 @@ #include #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/3_tensor_print.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/3_tensor_print.cpp index 6657ecce..d965ec77 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/3_tensor_print.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/3_tensor_print.cpp @@ -2,12 +2,14 @@ #include "deepx/tensor.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" + +using namespace deepx::tensorfunc; int main(){ - deepx::Tensor t=deepx::tensorfunc::New({2, 3,4}); + deepx::Tensor t=New({2, 3,4}); std::iota(t.data, t.data+t.shape.size, 0); - deepx::tensorfunc::print(t); - deepx::tensorfunc::save(t,"3_tensor_print"); + print(t); + save(t,"3_tensor_print"); return 0; } \ No newline at end of file diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_add.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_add.cpp index 35a57404..dec63830 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_add.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_add.cpp @@ -5,7 +5,7 @@ #include "deepx/tensorfunc/elementwise.hpp" #include "deepx/tensorfunc/elementwise_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init.hpp" #include "tensorutil.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_matmul.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_matmul.cpp index 0b1b791e..60027015 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_matmul.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_matmul.cpp @@ -5,7 +5,7 @@ #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensor.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/matmul.hpp" #include "deepx/tensorfunc/matmul_miaobyte.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_max.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_max.cpp index a2438adf..7cd8a086 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_max.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_max.cpp @@ -4,7 +4,7 @@ #include "deepx/tensor.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" #include "tensorutil.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_mul.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_mul.cpp index c93e42d9..2e4dd40f 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_mul.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_mul.cpp @@ -5,7 +5,7 @@ #include "deepx/tensorfunc/elementwise.hpp" #include "deepx/tensorfunc/elementwise_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "tensorutil.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_sub.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_sub.cpp index 0cbd2b86..4f28b20b 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_sub.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/4_tensor_sub.cpp @@ -4,7 +4,7 @@ #include "deepx/tensor.hpp" #include "deepx/tensorfunc/elementwise.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init.hpp" #include "deepx/tensorfunc/authors.hpp" #include "deepx/tensorfunc/elementwise_miaobyte.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/5_tensor_sum.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/5_tensor_sum.cpp index 79530b6b..e902e717 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/5_tensor_sum.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/5_tensor_sum.cpp @@ -9,9 +9,8 @@ #include "stdutil/vector.hpp" #include "deepx/vector_combination.hpp" #include "deepx/shape_reduce.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/6_tensor_broadcast.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/6_tensor_broadcast.cpp index 6658be10..d7209708 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/6_tensor_broadcast.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/6_tensor_broadcast.cpp @@ -3,7 +3,7 @@ #include "deepx/tensor.hpp" #include "deepx/tensorfunc/changeshape.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/elementwise.hpp" #include "deepx/tensorfunc/elementwise_cblas.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/7_tensor_transpose.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/7_tensor_transpose.cpp index a97fbcd7..db6f6cb4 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/7_tensor_transpose.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/7_tensor_transpose.cpp @@ -4,7 +4,7 @@ #include "deepx/tensor.hpp" #include "deepx/tensorfunc/changeshape_miaobyte.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/authors.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "stdutil/vector.hpp" diff --git a/excuter/op-mem-ompsimd/test/tensorfunc/8_tensor_concat.cpp b/excuter/op-mem-ompsimd/test/tensorfunc/8_tensor_concat.cpp index 0922d9bc..74fd575b 100644 --- a/excuter/op-mem-ompsimd/test/tensorfunc/8_tensor_concat.cpp +++ b/excuter/op-mem-ompsimd/test/tensorfunc/8_tensor_concat.cpp @@ -7,7 +7,7 @@ #include "deepx/tensor.hpp" #include "deepx/shape.hpp" #include "deepx/shape_changeshape.hpp" -#include "deepx/tensorfunc/new.hpp" +#include "deepx/tensorfunc/tensorlife_miaobyte.hpp" #include "deepx/tensorfunc/init_miaobyte.hpp" #include "deepx/tensorfunc/io_miaobyte.hpp" #include "stdutil/vector.hpp" diff --git a/front/py/deepx/autograd/__init__.py b/front/py/deepx/autograd/__init__.py deleted file mode 100644 index 2e337bd7..00000000 --- a/front/py/deepx/autograd/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from .graph import Graph -from .graph_viz import to_dot -from .node import Node -from .nodetype import NodeType -from ._datanode import DataNode -from ._opnode import OpNode -from .function import Function,Context -__all__ = [ - 'Graph', - 'Node', - 'NodeType', - 'DataNode', - 'OpNode', - 'Function', - 'Context', - ] \ No newline at end of file diff --git a/front/py/deepx/nn/deepxir.py b/front/py/deepx/nn/deepxir.py index 830a7452..33b7e037 100644 --- a/front/py/deepx/nn/deepxir.py +++ b/front/py/deepx/nn/deepxir.py @@ -2,7 +2,10 @@ import time from datetime import datetime # 添加datetime模块 from deepx.tensor import Tensor + + class Param: + def __init__(self,textvalue:str, category:str=None,precision:str=None): self._textvalue=textvalue self._category=category @@ -21,14 +24,10 @@ def __str__(self): def tensorName(cls,name:str,dtype:str): return Param(name,category="tensor",precision=dtype) + @classmethod def tensor(cls,t:Tensor): - name=None - if t.name is not None: - name=t.name - else: - name=id(t) - return Param(name, category="tensor", precision=t.dtype) + return Param(t.name, category="tensor", precision=t.dtype) @classmethod diff --git a/front/py/deepx/nn/functional/__init__.py b/front/py/deepx/nn/functional/__init__.py index 1aee9d1e..9cacf7d4 100644 --- a/front/py/deepx/nn/functional/__init__.py +++ b/front/py/deepx/nn/functional/__init__.py @@ -1,5 +1,5 @@ -from .leaffunc_new import newtensor,deltensor +from .leaffunc_life import newtensor,copytensor,deltensor from .leaffunc_io import printtensor from .leaffunc_init import * from .leaffunc_changeshape import * @@ -7,20 +7,28 @@ from .leaffunc_matmul import matmul from .leaffunc_reduce import reducemax,reducemin,sum,prod +from .authormap import defaultauthor + from .reduce import mean -from .activite import relu,sigmoid,swish +from .activite import * + +from .elementwise import * +from .normalization import * __all__ = [ #leaffunc "newtensor", "printtensor", "constant","constant_","full","zeros","ones","uniform","uniform_","arange","arange_","kaiming_uniform","kaiming_uniform_","calculate_fan_in_and_fan_out", - "add","sub","mul","div","sqrt","pow","exp","log","rsqrt", + "add","sub","mul","div","sqrt","pow","exp","log", "leaffunc_matmul", "reducemax","reducemin","sum","prod", "reshape","permute","transpose","concat","broadcastTo", - "relu","sigmoid","swish", - #func + #functional + "relu","sigmoid","swish", "mean", + "rsqrt", + "softmax", + ] \ No newline at end of file diff --git a/front/py/deepx/nn/functional/activite.py b/front/py/deepx/nn/functional/activite.py index 7a90b2e3..97be29f3 100644 --- a/front/py/deepx/nn/functional/activite.py +++ b/front/py/deepx/nn/functional/activite.py @@ -1,58 +1,34 @@ -from typing import Optional,Union -from deepx import Tensor +from typing import Union +from deepx.tensor import Tensor +from deepx.nn.functional import newtensor -def relu( - t: Tensor, - inplace:bool=False, - out:Union[Tensor,str]='')->Tensor: +# 数学公式:relu(x) = max(0, x) +def relu(t: Tensor)->Tensor: from .leaffunc_elementwise import max as max_func - return max_func(t,0,out) + outtensor=t + if t.name!=None: + outtensor=newtensor(t.shape, dtype=t.dtype) + else:#inplace操作 + pass + return max_func(t,0,outtensor) # 数学公式:σ(x) = 1 / (1 + exp(-x)) -def sigmoid( - t: Tensor, - inplace:bool=False, - out:Union[Tensor,str]='')->Tensor: - """Sigmoid激活函数 - - .. math:: - \sigma(x) = \frac{1}{1 + e^{-x}} - - Args: - t: 输入张量 - inplace: 是否原地操作 - out: 输出张量或名称 - - Returns: - 输出张量 - """ - outtensor=None - if inplace: - outtensor=t - else: - if isinstance(out,str): - outtensor=Tensor(shape=t.shape, dtype=t.dtype, device=t.device) - outtensor.addtograph(out) - else: - outtensor=out - from .leaffunc_elementwise import exp - outtensor = 1 / ((t*-1).exp()+1) +def sigmoid(t: Tensor)->Tensor: + outtensor=t + if t.name is not None: + outtensor=newtensor(t.shape, dtype=t.dtype) + t.mul(-1,out=outtensor) + outtensor.exp_() + outtensor.add_(1) + outtensor.rdiv_(1) return outtensor -def swish( - x: Tensor, - beta: float = 1.0, - out: Union[Tensor,str] = '') -> Tensor: - """Swish激活函数 - .. math:: - \text{swish}(x) = x \cdot \sigma(\beta x) - 其中 :math:`\sigma(x)` 是sigmoid函数。 - Args: - x: 输入张量 - beta: 缩放因子,控制sigmoid的陡峭程度 - out: 输出张量或名称 - - Returns: - 输出张量 - """ - return x*sigmoid(x*beta,out=out) +# 数学公式:swish(x) = x * σ(βx) +def swish(x: Tensor,beta: float = 1.0) -> Tensor: + outtensor=x + if x.name is not None: + outtensor=newtensor(x.shape, dtype=x.dtype) + x.mul(beta,out=outtensor) + outtensor=sigmoid(outtensor) + outtensor.mul_(x) + return outtensor diff --git a/front/py/deepx/nn/functional/authormap.py b/front/py/deepx/nn/functional/authormap.py new file mode 100644 index 00000000..5b42d56f --- /dev/null +++ b/front/py/deepx/nn/functional/authormap.py @@ -0,0 +1,36 @@ +defaultauthor=dict({ + #elementwise + 'add':'miaobyte', + 'addscalar':'miaobyte', + 'sub':'miaobyte', + 'subscalar':'miaobyte', + 'mul':'miaobyte', + 'mulscalar':'miaobyte', + 'div':'miaobyte', + 'divscalar':'miaobyte', + 'rdiv':'miaobyte', + 'rdivscalar':'miaobyte', + + 'compare':'miaobyte', + 'min':'miaobyte', + 'minscalar':'miaobyte', + 'max':'miaobyte', + 'maxscalar':'miaobyte', + 'exp':'miaobyte', + 'log':'miaobyte', + 'pow':'miaobyte', + 'powscalar':'miaobyte', + 'sqrt':'miaobyte', + #changeshape + 'reshape':'miaobyte', + 'transpose':'miaobyte', + 'broadcastTo':'miaobyte', + 'concat':'miaobyte', + #matmul + 'matmul':'miaobyte', + #reduce + 'sum':'miaobyte', + 'prod':'miaobyte', + 'reducemax':'miaobyte', + 'reducemin':'miaobyte' + }) \ No newline at end of file diff --git a/front/py/deepx/nn/functional/elementwise.py b/front/py/deepx/nn/functional/elementwise.py new file mode 100644 index 00000000..7adcb28f --- /dev/null +++ b/front/py/deepx/nn/functional/elementwise.py @@ -0,0 +1,12 @@ +from deepx.tensor import Tensor +from deepx.nn.functional import newtensor + +def rsqrt(input:Tensor)->Tensor: + from .leaffunc_elementwise import sqrt,div + outtensor=input + if input.name is not None: + outtensor=newtensor(input.shape, dtype=input.dtype) + sqrt(input,out= outtensor) + return div(1,outtensor,outtensor) + + diff --git a/front/py/deepx/nn/functional/leaffunc.py b/front/py/deepx/nn/functional/leaffunc.py index 84a20916..62fbb767 100644 --- a/front/py/deepx/nn/functional/leaffunc.py +++ b/front/py/deepx/nn/functional/leaffunc.py @@ -1,10 +1,75 @@ -from typing import Union,Tuple +from typing import Union +import importlib + + from deepx.tensor import Tensor,Shape -from deepx.nn.deepxir import DeepxIR -from deepx.scheduler import send -from deepx.autograd import OpNode,Function,Context +from .leaffunc_life import newtensor +from .authormap import defaultauthor # inplace操作的函数,其名为_后缀, 返回值为空 # 非inplace操作的函数,其名为_后缀, 返回值为Tensor -def buildgraph(a:Tensor,dim:tuple[int],keepdim:bool=False,out:Tensor, author:str='miaobyte')->Tensor: \ No newline at end of file + +def create_A_B_tf_C(op_name): + """创建元素级操作函数""" + def op_func( + a: Tensor, + b: Union[Tensor, float, int] = None, + out: Union[Tensor, str] = None) -> Tensor: + outtensor = out + if isinstance(out, str): + outtensor = newtensor(a.shape, dtype=a.dtype, name=out) + + rtf_module = importlib.import_module('deepx.nn.functional.rtf_elementwise') + if isinstance(b, Tensor): + an=a + bn=b + if a.shape != b.shape: + newshape = Shape.broadcast_shape(a.shape, b.shape) + an = a.broadcastTo(newshape) + bn = b.broadcastTo(newshape) + rtf_func = getattr(rtf_module, f'rtf_{op_name}') + rtf_func(an, bn, outtensor, defaultauthor[op_name]) + else: + rtf_func = getattr(rtf_module, f'rtf_{op_name}scalar') + rtf_func(a, b, outtensor, defaultauthor[f'{op_name}scalar']) + return outtensor + op_func.__name__ = op_name + op_func.__qualname__ = op_name + return op_func + +def create_A_tf_C(op_name): + def op_func( + a:Tensor, + out:Union[Tensor,str]=None)->Tensor: + outtensor=out + if isinstance(out,str): + outtensor=newtensor(a.shape,dtype=a.dtype,name=out) + rtf_module = importlib.import_module('deepx.nn.functional.rtf_elementwise') + rtf_func = getattr(rtf_module, f'rtf_{op_name}') + rtf_func(a,outtensor,defaultauthor[op_name]) + return outtensor + op_func.__name__ = op_name + op_func.__qualname__ = op_name + return op_func + + +def create_A_dim_keepdim_tf_C(op_name): + def op_func( + a:Tensor, + dim:list[int], + keepdim:bool=False, + out:Union[Tensor,str]='', + author:str='miaobyte', + requires_grad:bool=False)->Tensor: + if dim is None: + dim=tuple(range(a.ndim)) + result=out + if isinstance(out,str): + resultshape=Shape.reduceshape(a.shape,dim,keepdim) + result=newtensor(resultshape, dtype=a.dtype,name=out) + rtf_module = importlib.import_module('deepx.nn.functional.rtf_reduce') + rtf_func = getattr(rtf_module, f'rtf_{op_name}') + rtf_func(a, dim, keepdim, result, author) + return result + return op_func \ No newline at end of file diff --git a/front/py/deepx/nn/functional/leaffunc_changeshape.py b/front/py/deepx/nn/functional/leaffunc_changeshape.py index 6f830bc2..25599498 100644 --- a/front/py/deepx/nn/functional/leaffunc_changeshape.py +++ b/front/py/deepx/nn/functional/leaffunc_changeshape.py @@ -1,183 +1,67 @@ -from typing import Union,Tuple +from typing import Union from deepx.tensor import Tensor,Shape -from deepx.nn.deepxir import DeepxIR -from deepx.scheduler import send -from deepx.autograd import Function,Context -from .leaffunc_new import newtensor +from .leaffunc_life import newtensor +from .authormap import defaultauthor -class Reshape(Function): - @staticmethod - def forward(ctx:Context,t:Tensor,shape:list[int],out:Union[Tensor,str],authormap:dict): - if ctx.requires_grad: - ctx.save_data('oldshape',t.shape) - ctx.save_tensors('t',t) - ctx.set_authormap(authormap) - - total_size=1 - for i in shape: - total_size*=i - if total_size!=t.numel(): - raise ValueError(f"reshape失败:{t.shape} 无法reshape为 {shape} ") +def reshape(t:Tensor,shape:list[int],out:Union[Tensor,str]='')->Tensor: + outtensor=out + if isinstance(out,str): + outshape=shape + outtensor=newtensor(outshape,dtype=t.dtype,name=out) + else: outtensor=out - if isinstance(out,str): - outshape=shape - outtensor=newtensor(outshape,dtype=t.dtype,name=out) - else: - outtensor=out - outtensor._shape=Shape(shape) - from .rtf_changeshape import rtf_reshape - rtf_reshape(t,shape,outtensor,ctx.authormap['reshape']) - return outtensor - - @staticmethod - def backward(ctx:Context,t_grad:Tensor,out_grad:Tensor): - oldshape=ctx.get_data('oldshape') - t=ctx.get_tensor('t') - from .rtf_changeshape import rtf_reshape - rtf_reshape(out_grad,oldshape,t_grad,ctx.authormap['reshape']) - return t_grad - -def reshape(t:Tensor,shape:list[int],out:Union[Tensor,str]='',requires_grad:bool=False,author='miaobyte')->Tensor: - return Reshape.apply(t,shape,out,{'reshape':author},requires_grad=requires_grad) - + outtensor._shape=Shape(shape) + from .rtf_changeshape import rtf_reshape + rtf_reshape(t,shape,outtensor,defaultauthor['reshape']) + return outtensor -class Permute(Function): - @staticmethod - def forward(ctx:Context, - t:Tensor, - dimorder:list[int], - out:Union[Tensor,str]='', - authormap:dict={'transpose':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_data('dimorder',dimorder) - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outshape = [t.shape[dim] for dim in dimorder] - outtensor=newtensor(outshape,dtype=t.dtype,name=out) - - from .rtf_changeshape import rtf_transpose - rtf_transpose(t,dimorder,outtensor,ctx.authormap['transpose']) - return outtensor - - - @staticmethod - def backward(ctx:Context,in_grad,out_grad): - dimorder=ctx.get_data('dimorder') - inverse_dimorder = [0] * len(dimorder) - for i, j in enumerate(dimorder): - inverse_dimorder[j] = i - from .rtf_changeshape import rtf_transpose - rtf_transpose(out_grad,inverse_dimorder,in_grad,ctx.authormap['transpose']) - return in_grad def permute(t:Tensor, dimorder:list[int], - out:Union[Tensor,str]='', - requires_grad:bool=False, - author='miaobyte')->Tensor: - if t.dim!=len(dimorder): - raise ValueError(f"shape参数不合法,当前输入维度数:{len(dimorder)},张量维度数:{t.dim}") + out:Union[Tensor,str]='')->Tensor: + if t.ndim!=len(dimorder): + raise ValueError(f"shape参数不合法,当前输入维度数:{len(dimorder)},张量维度数:{t.ndim}") dimorder = [d % t.ndim for d in dimorder] - return Permute.apply(t,dimorder,out,{'transpose':author},requires_grad=requires_grad) - -def transpose(t:Tensor,out:Union[Tensor,str]='',requires_grad:bool=False,author='miaobyte')->Tensor: + outtensor=out + if isinstance(out,str): + outshape = [t.shape[dim] for dim in dimorder] + outtensor=newtensor(outshape,dtype=t.dtype,name=out) + + from .rtf_changeshape import rtf_transpose + rtf_transpose(t,dimorder,outtensor,defaultauthor['transpose']) + return outtensor + +def transpose(t:Tensor,out:Union[Tensor,str]='')->Tensor: dimorder = list(range(t.ndim)) dimorder[-1],dimorder[-2]=dimorder[-2],dimorder[-1] - return Permute.apply(t,dimorder,out,{'transpose':author},requires_grad=requires_grad) - + return permute(t,dimorder,out) -class Concat(Function): - @staticmethod - def forward(ctx:Context, - tensors:list[Tensor], - dim:int, - out:Union[Tensor,str]='', - authormap:dict={'concat':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_data('dim',dim) - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outshape=list(tensors[0].shape) - outshape[dim]=sum(t.shape[dim] for t in tensors) - outtensor=newtensor(outshape,dtype=tensors[0].dtype,name=out) - from .rtf_changeshape import rtf_concat - rtf_concat(tensors,dim,outtensor,ctx.authormap['concat']) - return outtensor - - @staticmethod - def backward(ctx:Context,out_grad): - dim=ctx.get_data('dim') - #todo: 反向传播 -def concat(t:Tensor,dim:int,out:Union[Tensor,str]='',requires_grad:bool=False,author='miaobyte')->Tensor: - return Concat.apply(t,dim,out,{"concat":author},requires_grad=requires_grad) - -def broadcast_shape(shape_a: tuple[int], shape_b: tuple[int]) -> tuple[int]: - """计算两个形状的广播后形状""" - # 获取形状的长度 - len_a, len_b = len(shape_a), len(shape_b) - - # 创建结果形状 - result_shape = [] - - # 从右往左对齐并计算每个维度 - for i in range(1, min(len_a, len_b) + 1): - dim_a = shape_a[-i] - dim_b = shape_b[-i] - - if dim_a == 1 or dim_b == 1: - # 广播规则:如果一个维度为1,取另一个维度的值 - result_shape.insert(0, max(dim_a, dim_b)) - elif dim_a == dim_b: - # 维度相同,保持不变 - result_shape.insert(0, dim_a) - else: - # 维度不同且都不为1,无法广播 - raise ValueError(f"无法广播的形状:{shape_a} 和 {shape_b}") - - # 添加较长形状中多出的前导维度 - if len_a > len_b: - result_shape = list(shape_a[:len_a - len_b]) + result_shape - elif len_b > len_a: - result_shape = list(shape_b[:len_b - len_a]) + result_shape - - return tuple(result_shape) - -class BroadcastTo(Function): - @staticmethod - def forward(ctx:Context, - t:Tensor, - new_shape:tuple[int], - out:Union[Tensor,str]='', - authormap:dict={'broadcastTo':'miaobyte'})->Tensor: - bshape=broadcast_shape(t.shape,new_shape) - if bshape!=new_shape: - raise ValueError(f"广播失败:{t.shape} 无法广播为 {new_shape} ") - - if ctx.requires_grad: - ctx.save_data('new_shape',new_shape) - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outshape=new_shape - outtensor=newtensor(outshape,dtype=t.dtype,name=out) - from .rtf_changeshape import rtf_broadcastTo - rtf_broadcastTo(t,new_shape,outtensor,ctx.authormap['broadcastTo']) - return outtensor - - #todo: 反向传播 - @staticmethod - def backward(ctx:Context,out_grad): - new_shape=ctx.get_data('new_shape') - #todo: 反向传播 +def concat(tensors:Union[list[Tensor],tuple[Tensor]],dim:int,out:Union[Tensor,str]='')->Tensor: + outtensor=out + if isinstance(out,str): + outshape=list(tensors[0].shape) + outshape[dim]=sum(t.shape[dim] for t in tensors) + outtensor=newtensor(outshape,dtype=tensors[0].dtype,name=out) + from .rtf_changeshape import rtf_concat + rtf_concat(tensors,dim,outtensor,defaultauthor['concat']) + return outtensor def broadcastTo(t:Tensor,new_shape:tuple[int],out:Union[Tensor,str]='',requires_grad:bool=False,author='miaobyte')->Tensor: - return BroadcastTo.apply(t,new_shape,out,{'broadcastTo':author},requires_grad=requires_grad) - + if t.shape==new_shape: + return t + bshape=Shape.broadcast_shape(t.shape,new_shape) + if bshape!=new_shape: + raise ValueError(f"广播失败:{t.shape} 无法广播为 {new_shape} ") + outtensor=out + if isinstance(out,str): + outshape=new_shape + outtensor=newtensor(outshape,dtype=t.dtype,name=out) + from .rtf_changeshape import rtf_broadcastTo + rtf_broadcastTo(t,new_shape,outtensor,defaultauthor['broadcastTo']) + return outtensor broadcast_to = broadcastTo # def unsqueeze(t:Tensor,dim:int)->Tensor: diff --git a/front/py/deepx/nn/functional/leaffunc_elementwise.py b/front/py/deepx/nn/functional/leaffunc_elementwise.py index 75b0f546..3cfe5157 100644 --- a/front/py/deepx/nn/functional/leaffunc_elementwise.py +++ b/front/py/deepx/nn/functional/leaffunc_elementwise.py @@ -1,186 +1,16 @@ from typing import Optional, Union -from deepx import Tensor -from deepx.autograd import Function,Context +from deepx import Tensor,Shape -from .leaffunc_new import newtensor +from .leaffunc import create_A_B_tf_C,create_A_tf_C +from .leaffunc_life import newtensor +from .authormap import defaultauthor - -class Add(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'add':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_add - rtf_add(a,b,outtensor,ctx.authormap['add']) - return outtensor - @staticmethod - def backward(ctx:Context,out_grad): - return out_grad, out_grad - -class AddScalar(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'addscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_addscalar - rtf_addscalar(a,b,outtensor,ctx.authormap['addscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, grad_output): - return grad_output, None -def add( - a:Tensor, - b: Optional[Union[Tensor, float, int]] = None, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,Tensor): - return Add.apply(a,b,out,{'add':author},requires_grad=requires_grad) - else: - return AddScalar.apply(a,b,out,{'addscalar':author},requires_grad=requires_grad) - - -#sub - -class Sub(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'sub':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_sub - rtf_sub(a,b,outtensor,ctx.authormap['sub']) - return outtensor - - @staticmethod - def backward(ctx:Context, grad_output): - return grad_output, -grad_output - -class SubScalar(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'subscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_subscalar - rtf_subscalar(a,b,outtensor,ctx.authormap['subscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, grad_output): - return grad_output, None -def sub( - a:Tensor, - b: Optional[Union[Tensor, float, int]] = None, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,Tensor): - return Sub.apply(a,b,out,{'sub':author},requires_grad=requires_grad) - else: - return SubScalar.apply(a,b,out,{'subscalar':author},requires_grad=requires_grad) - -#mul - -class Mul(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'mul':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_mul - rtf_mul(a,b,outtensor,ctx.authormap['mul']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a,b=ctx.get_tensor - return out_grad * b, out_grad * a - -class MulScalar(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'mulscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_mulscalar - rtf_mulscalar(a,b,outtensor,ctx.authormap['mulscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - b=ctx.get_data('b') - return out_grad * b, None -def mul( - a:Tensor, - b: Optional[Union[Tensor, float, int]] = None, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,Tensor): - return Mul.apply(a,b,out,{'mul':author},requires_grad=requires_grad) - else: - return MulScalar.apply(a,b,out,{'mulscalar':author},requires_grad=requires_grad) - +# 创建具体操作函数 +add = create_A_B_tf_C('add') +sub = create_A_B_tf_C('sub') +mul = create_A_B_tf_C('mul') #div - -class Div(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'div':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_div - rtf_div(a,b,outtensor,ctx.authormap['div']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a,b=ctx.get_tensor - return out_grad / b, -out_grad * a / b / b - -class DivScalar(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'divscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_divscalar - rtf_divscalar(a,b,outtensor,ctx.authormap['divscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - b=ctx.get_data('b') - return out_grad / b, None - -class RDivScalar(Function): - @staticmethod - def forward(ctx:Context, a:float,b:Tensor,out:Union[Tensor,str],authormap:dict={'rdivscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(b.shape,dtype=b.dtype,name=out) - from .rtf_elementwise import rtf_rdivscalar - rtf_rdivscalar(a,b,outtensor,ctx.authormap['rdivscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - b=ctx.get_data('b') - return out_grad * b, None def div( a: Optional[Union[Tensor, float, int]] = None, b: Optional[Union[Tensor, float, int]] = None, @@ -188,245 +18,44 @@ def div( requires_grad:bool=False, author='miaobyte')->Tensor: if isinstance(b,Tensor) and isinstance(a,Tensor): - return Div.apply(a,b,out,{'div':author},requires_grad=requires_grad) - else: - if isinstance(a,Tensor): - #C=A/b - return DivScalar.apply(a,b,out,{'divscalar':author},requires_grad=requires_grad) - else: - #C=a/B - return RDivScalar.apply(a,b,out,{'rdivscalar':author},requires_grad=requires_grad) - - - -class Max(Function): - @staticmethod - def forward(ctx:Context,a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'max':'miaobyte'})->Tensor : - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_max - rtf_max(a,b,outtensor,ctx.authormap['max']) - return outtensor - - @staticmethod - def backward(ctx:Context,out_grad): - mask_a=ctx.get_tensor - mask_b=1-mask_a - return out_grad*mask_a, out_grad*mask_b - -class MaxScalar(Function): - @staticmethod - def forward(ctx:Context,a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'maxscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) + #C=A/B outtensor=out if isinstance(out,str): outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_maxscalar - rtf_maxscalar(a,b,outtensor,ctx.authormap['maxscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context,out_grad): - b=ctx.get_data('b') - return out_grad, out_grad - - -def max( - a:Tensor, - b:Union[int,float,Tensor,]=0, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,int) or isinstance(b,float): - return MaxScalar.apply(a,b,out,{'maxscalar':author},requires_grad=requires_grad) - else: - return Max.apply(a,b,out,{'max':author},requires_grad=requires_grad) - - -class Min(Function): - @staticmethod - def forward(ctx:Context,a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'min':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_min - rtf_min(a,b,outtensor,ctx.authormap['min']) - return outtensor - - @staticmethod - def backward(ctx:Context,out_grad): - a,b=ctx.get_tensor - return out_grad, out_grad - -class MinScalar(Function): - @staticmethod - def forward(ctx:Context,a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'minscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_minscalar - rtf_minscalar(a,b,outtensor,ctx.authormap['minscalar']) + an=a + bn=b + if a.shape!=b.shape: + newshape=Shape.broadcast_shape(a.shape,b.shape) + an=a.broadcastTo(newshape) + bn=b.broadcastTo(newshape) + from .rtf_elementwise import rtf_div + rtf_div(an,bn,outtensor,defaultauthor['div']) return outtensor - - @staticmethod - def backward(ctx:Context,out_grad): - b=ctx.get_data('b') - return out_grad, out_grad - -def min( - a:Tensor, - b:Union[int,float,Tensor,]=0, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,int) or isinstance(b,float): - return MinScalar.apply(a,b,out,{'minscalar':author},requires_grad=requires_grad) else: - return Min.apply(a,b,out,{'min':author},requires_grad=requires_grad) - + if isinstance(a,Tensor): + #C=A/b + outtensor=out + if isinstance(out,str): + outtensor=newtensor(a.shape,dtype=a.dtype,name=out) + from .rtf_elementwise import rtf_divscalar + rtf_divscalar(a,b,outtensor,defaultauthor['divscalar']) + return outtensor + elif isinstance(a,float) or isinstance(a,int): + #C=a/B + outtensor=out + if isinstance(out,str): + outtensor=newtensor(b.shape,dtype=b.dtype,name=out) + from .rtf_elementwise import rtf_rdivscalar + rtf_rdivscalar(a,b,outtensor,defaultauthor['rdivscalar']) + return outtensor + +max=create_A_B_tf_C('max') +min=create_A_B_tf_C('min') + +#pow +pow=create_A_B_tf_C('pow') #sqrt - -class Sqrt(Function): - @staticmethod - def forward(ctx:Context, a:Tensor,out:Union[Tensor,str],authormap:dict={'sqrt':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_sqrt - rtf_sqrt(a,outtensor,ctx.authormap['sqrt']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a=ctx.get_tensor - return out_grad / (2 * sqrt(a)), None - -def sqrt( - input:Tensor, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - return Sqrt.apply(input,out,{'sqrt':author},requires_grad=requires_grad) - - -class Pow(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:Tensor,out:Union[Tensor,str],authormap:dict={'pow':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_pow - rtf_pow(a,b,outtensor,ctx.authormap['pow']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a,b=ctx.get_tensor - return out_grad * b * pow(a,b-1), out_grad * pow(a,b) * log(a) - -class PowScalar(Function): - @staticmethod - def forward(ctx:Context, a:Tensor, b:float,out:Union[Tensor,str],authormap:dict={'powscalar':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_powscalar - rtf_powscalar(a,b,outtensor,ctx.authormap['powscalar']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - b=ctx.get_data('b') - return out_grad * b * pow(a,b-1), out_grad * pow(a,b) * log(a) - -def pow( - a:Tensor, - b:Union[int,float,Tensor,]=0, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - if isinstance(b,int) or isinstance(b,float): - return PowScalar.apply(a,b,out,{'powscalar':author},requires_grad=requires_grad) - else: - return Pow.apply(a,b,out,{'pow':author},requires_grad=requires_grad) - -#exp - -class Exp(Function): - @staticmethod - def forward(ctx:Context, a:Tensor,out:Union[Tensor,str],authormap:dict={'exp':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_exp - rtf_exp(a,outtensor,ctx.authormap['exp']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a=ctx.get_tensor - return out_grad * exp(a), None - -def exp( - a:Tensor, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - return Exp.apply(a,out,{'exp':author},requires_grad=requires_grad) -#log - -class Log(Function): - @staticmethod - def forward(ctx:Context, a:Tensor,out:Union[Tensor,str],authormap:dict={'log':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_log - rtf_log(a,outtensor,ctx.authormap['log']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a=ctx.get_tensor - return out_grad / a, None - -def log( - a:Tensor, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - return Log.apply(a,out,{'log':author},requires_grad=requires_grad) - -class Rsqrt(Function): - @staticmethod - def forward(ctx:Context, a:Tensor,out:Union[Tensor,str],authormap:dict={'rsqrt':'miaobyte'})->Tensor: - ctx.set_authormap(authormap) - outtensor=out - if isinstance(out,str): - outtensor=newtensor(a.shape,dtype=a.dtype,name=out) - from .rtf_elementwise import rtf_rsqrt - rtf_rsqrt(a,outtensor,ctx.authormap['rsqrt']) - return outtensor - - @staticmethod - def backward(ctx:Context, out_grad): - a=ctx.get_tensor - return -out_grad / (2 * a * sqrt(a)), None - -def rsqrt( - input:Tensor, - out:Union[Tensor,str]=None, - requires_grad:bool=False, - author='miaobyte')->Tensor: - return Rsqrt.apply(input,out,{'rsqrt':author},requires_grad=requires_grad) - \ No newline at end of file +sqrt=create_A_tf_C('sqrt') +exp=create_A_tf_C('exp') +log=create_A_tf_C('log') \ No newline at end of file diff --git a/front/py/deepx/nn/functional/leaffunc_init.py b/front/py/deepx/nn/functional/leaffunc_init.py index eb3d7792..e0b0da90 100644 --- a/front/py/deepx/nn/functional/leaffunc_init.py +++ b/front/py/deepx/nn/functional/leaffunc_init.py @@ -1,6 +1,6 @@ from typing import Union import math -from .leaffunc_new import newtensor,parse_shape +from .leaffunc_life import newtensor,parse_shape from .rtf_init import * from deepx import Tensor diff --git a/front/py/deepx/nn/functional/leaffunc_new.py b/front/py/deepx/nn/functional/leaffunc_life.py similarity index 81% rename from front/py/deepx/nn/functional/leaffunc_new.py rename to front/py/deepx/nn/functional/leaffunc_life.py index af65fcd8..cf4d0905 100644 --- a/front/py/deepx/nn/functional/leaffunc_new.py +++ b/front/py/deepx/nn/functional/leaffunc_life.py @@ -9,14 +9,14 @@ def parse_shape(shape:Union[tuple,list])->tuple[int, ...]: def newtensor(*shape,dtype:str='float32',name:str=None): s=parse_shape(shape) t=Tensor(shape=s,dtype=dtype,name=name) - from .rtf_new import rtf_newtensor + from .rtf_life import rtf_newtensor rtf_newtensor(t) return t def copytensor(t:Tensor,out:Tensor): - from .rtf_new import rtf_copytensor + from .rtf_life import rtf_copytensor rtf_copytensor(t,out) def deltensor(t:Tensor): - from .rtf_new import rtf_deltensor + from .rtf_life import rtf_deltensor rtf_deltensor(t) diff --git a/front/py/deepx/nn/functional/leaffunc_matmul.py b/front/py/deepx/nn/functional/leaffunc_matmul.py index 73d5303c..11b793a4 100644 --- a/front/py/deepx/nn/functional/leaffunc_matmul.py +++ b/front/py/deepx/nn/functional/leaffunc_matmul.py @@ -1,40 +1,13 @@ from typing import Union from deepx import Tensor -from deepx.autograd import Function,Context -from .leaffunc_new import newtensor - -class Matmul(Function): - @staticmethod - def forward(ctx:Context, - a:Tensor, - b: Tensor, - out:Union[Tensor,str]='', - authormap:dict={'matmul':'cublas'}): - ctx.save_tensors(a,b) - ctx.set_authormap(authormap) - - outtensor=None - if isinstance(out,str): - matmulshape=a.Shape.matmul(b.shape) - outtensor=newtensor(matmulshape, dtype=a.dtype,name=out) - else: - outtensor=out +from .leaffunc_life import newtensor +from .authormap import defaultauthor - from .rtf_matmul import rtf_matmul - rtf_matmul(a,b,outtensor,ctx.authormap['matmul']) - return outtensor - - @staticmethod - def backward(ctx:Context,out_grad:Tensor,a_grad:Union[Tensor,str],b_grad:Union[Tensor,str]): - a,b=ctx.get_tensors() - if isinstance(a_grad,str): - a_grad=newtensor(shape=a.shape,dtype=a.dtype,name=a_grad) - if isinstance(b_grad,str): - b_grad=newtensor(shape=b.shape,dtype=b.dtype,name=b_grad) - from .rtf_matmul import rtf_matmul_backward - rtf_matmul_backward(out_grad,a,b,a_grad,b_grad,ctx.authormap['matmul']) - return a_grad,b_grad - -def matmul(a:Tensor,b:Tensor,out:Union[Tensor,str]='',author:str='cublas')->Tensor: - return Matmul.apply(a,b,out,author) +def matmul(a:Tensor,b:Tensor,out:Union[Tensor,str]='')->Tensor: + outtensor=out + if isinstance(out,str): + outtensor=newtensor(a.shape,dtype=a.dtype,name=out) + from .rtf_matmul import rtf_matmul + rtf_matmul(a,b,outtensor,defaultauthor['matmul']) + return outtensor diff --git a/front/py/deepx/nn/functional/leaffunc_reduce.py b/front/py/deepx/nn/functional/leaffunc_reduce.py index 0c51ea0a..b09b5b2a 100644 --- a/front/py/deepx/nn/functional/leaffunc_reduce.py +++ b/front/py/deepx/nn/functional/leaffunc_reduce.py @@ -1,179 +1,11 @@ -from typing import Optional, Union - -from deepx.tensor import Tensor -from deepx.autograd import Function,Context -from .leaffunc_new import newtensor -def reduceshape(inshape: Union[list[int], tuple[int]], - dim: Union[list[int], tuple[int]], - keepdim: bool) -> tuple[int]: - """计算维度缩减后的形状 - - Args: - inshape: 输入形状,如(2,3,4) - dim: 要缩减的维度列表,支持负数索引,如[-1] - keepdim: 是否保留缩减后的维度为1 - - Returns: - 缩减后的形状元组 - - Example: - >>> reduceshape((2,3,4), [1], True) - (2, 1, 4) - >>> reduceshape((2,3,4), [1], False) - (2, 4) - """ - ndim = len(inshape) - # 处理负数维度 - normalized_dim = [d % ndim for d in dim] - # 去重并排序 - unique_dim = sorted(set(normalized_dim)) - - if keepdim: - return tuple(1 if i in unique_dim else s - for i, s in enumerate(inshape)) - else: - return tuple(s for i, s in enumerate(inshape) - if i not in unique_dim) - -#sum +from .leaffunc import create_A_dim_keepdim_tf_C -class Sum(Function): - @staticmethod - def forward(ctx:Context,a:Tensor,dims:tuple[int]=None,keepdim:bool=False,out:Union[Tensor,str]='',authormap:dict={'sum':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_tensors('a',a) - ctx.save_data('dims',dims) - ctx.save_data('keepdim',keepdim) - ctx.set_authormap(authormap) - if dims is None: - dims=tuple(range(a.ndim)) - - result=out - if isinstance(out,str): - resultshape=reduceshape(a.shape,dims,keepdim) - result=newtensor(resultshape, dtype=a.dtype,name=out) - from .rtf_reduce import rtf_sum - rtf_sum(a,dims,keepdim,result,ctx.authormap['sum']) - return result - - @staticmethod - def backward(ctx:Context,out_grad): - pass - - - -def sum( - a:Tensor, - dims:list[int], - keepdim:bool=False, - out:Union[Tensor,str]='', - author:str='miaobyte', - requires_grad:bool=False)->Tensor: - return Sum.apply(a,dims,keepdim,out,{'sum':author},requires_grad=requires_grad) - -#prod - -class Prod(Function): - @staticmethod - def forward(ctx:Context,a:Tensor,dims:tuple[int]=None,keepdim:bool=False,out:Union[Tensor,str]='',authormap:dict={'prod':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_tensors('a',a) - ctx.save_data('dims',dims) - ctx.save_data('keepdim',keepdim) - ctx.set_authormap(authormap) - if dims is None: - dims=tuple(range(a.ndim)) - - result=out - if isinstance(out,str): - resultshape=reduceshape(a.shape,dims,keepdim) - result=newtensor(resultshape, dtype=a.dtype,name=out) - from .rtf_reduce import rtf_prod - rtf_prod(a,dims,keepdim,result,ctx.authormap['prod']) - return result - - @staticmethod - def backward(ctx:Context,out_grad): - pass -def prod( - a:Tensor, - dims:list[int], - keepdim:bool=False, - out:Union[Tensor,str]='', - author:str='miaobyte', - requires_grad:bool=False)->Tensor: - return Prod.apply(a,dims,keepdim,out,{'prod':author},requires_grad=requires_grad) - -#max - -class ReduceMax(Function): - @staticmethod - def forward(ctx:Context,a:Tensor,dims:tuple[int]=None,keepdim:bool=False,out:Union[Tensor,str]='',authormap:dict={'reducemax':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_tensors('a',a) - ctx.save_data('dims',dims) - ctx.save_data('keepdim',keepdim) - ctx.set_authormap(authormap) - if dims is None: - dims=tuple(range(a.ndim)) - - result=out - if isinstance(out,str): - resultshape=reduceshape(a.shape,dims,keepdim) - result=newtensor(resultshape, dtype=a.dtype,name=out) - from .rtf_reduce import rtf_reducemax - rtf_reducemax(a,dims,keepdim,result,ctx.authormap['reducemax']) - return result - - @staticmethod - def backward(ctx:Context,out_grad): - pass - - - -def reducemax( - a:Tensor, - dims:list[int], - keepdim:bool=False, - out:Union[Tensor,str]='', - author:str='miaobyte', - requires_grad:bool=False)->Tensor: - return ReduceMax.apply(a,dims,keepdim,out,{'reducemax':author},requires_grad=requires_grad) -#min -class ReduceMin(Function): - @staticmethod - def forward(ctx:Context,a:Tensor,dims:tuple[int]=None,keepdim:bool=False,out:Union[Tensor,str]='',authormap:dict={'reducemin':'miaobyte'})->Tensor: - if ctx.requires_grad: - ctx.save_tensors('a',a) - ctx.save_data('dims',dims) - ctx.save_data('keepdim',keepdim) - ctx.set_authormap(authormap) - if dims is None: - dims=tuple(range(a.ndim)) - result=out - if isinstance(out,str): - resultshape=reduceshape(a.shape,dims,keepdim) - result=newtensor(resultshape, dtype=a.dtype,name=out) - from .rtf_reduce import rtf_reducemin - rtf_reducemin(a,dims,keepdim,result,ctx.authormap['reducemin']) - return result - - @staticmethod - def backward(ctx:Context,out_grad): - pass - - +sum=create_A_dim_keepdim_tf_C('sum') +prod=create_A_dim_keepdim_tf_C('prod') +reducemin=create_A_dim_keepdim_tf_C('reducemin') +reducemax=create_A_dim_keepdim_tf_C('reducemax') + -def reducemin( - a:Tensor, - dims:list[int], - keepdim:bool=False, - out:Union[Tensor,str]='', - author:str='miaobyte', - requires_grad:bool=False)->Tensor: - return ReduceMin.apply(a,dims,keepdim,out,{'reducemin':author},requires_grad=requires_grad) - - \ No newline at end of file diff --git a/front/py/deepx/nn/functional/normalization.py b/front/py/deepx/nn/functional/normalization.py index 7705f05b..87eacbbe 100644 --- a/front/py/deepx/nn/functional/normalization.py +++ b/front/py/deepx/nn/functional/normalization.py @@ -1,43 +1,23 @@ from typing import Union from deepx import Tensor - -def softmax( - t: Tensor, - dim: int = -1, - out: Union[Tensor, str] = '') -> Tensor: - """Softmax激活函数 - - 数学公式分三个层级理解: - 1. 标准公式: - .. math:: - \text{softmax}(x_i) = \frac{e^{x_i}}{\sum_j e^{x_j}} - - 2. 数值稳定版本(实现采用): - .. math:: - \text{softmax}(x_i) = \frac{e^{x_i - \max(x)}}{\sum_j e^{x_j - \max(x)}} - - 3. 对数空间计算(理论等价): - .. math:: - \text{softmax}(x_i) = e^{\log(\text{softmax}(x_i))} = e^{x_i - \log\sum_j e^{x_j}} +from deepx.nn.functional import sub +from deepx.nn.functional import newtensor - Args: - t: 输入张量 - dim: 计算维度,默认为最后一个维度 - inplace: 是否原地操作(注意:可能影响梯度计算) - out: 输出张量或名称 +# 数学公式:softmax(x_i) = e^{x_i} / sum(e^{x_j}) +def softmax(t: Tensor,dim:int=-1)->Tensor: - Returns: - 输出张量 - """ # 数值稳定性处理:减去最大值防止指数爆炸 - max_val = t.max(dim=dim, keepdim=True) # 保持维度用于广播 - - # 实现公式:exp(t - max) / sum(exp(t - max)) - exp_t = (t - max_val).exp() - sum_exp = exp_t.sum(dim=dim, keepdim=True) - + if dim is not None: + reducemax_t = t.reducemax(dim=[dim], keepdim=True) # 保持维度用于广播 + else: + reducemax_t = t.reducemax(keepdim=True) + t_subed=t.clone() + t_subed.sub_(reducemax_t) + + # 实现公式:exp(t_subed) / sum(exp(t_subed)) + exp_t = t_subed.exp() + expt_sum=exp_t.sum(dim=[dim], keepdim=True) # 处理输出张量(参考sigmoid的实现模式) - out_tensor = exp_t / sum_exp - - return out_tensor \ No newline at end of file + exp_t.div(expt_sum,out=t_subed) + return t_subed \ No newline at end of file diff --git a/front/py/deepx/nn/functional/reduce.py b/front/py/deepx/nn/functional/reduce.py index 6d8a20a2..c5998e6a 100644 --- a/front/py/deepx/nn/functional/reduce.py +++ b/front/py/deepx/nn/functional/reduce.py @@ -1,25 +1,24 @@ -from deepx.tensor import Tensor +from deepx.tensor import Tensor,Shape from typing import Optional,Union from .leaffunc_reduce import sum - +from .leaffunc_life import newtensor #mean -def mean( - a:Tensor, - dims:Optional[Union[list[int],tuple[int]]]=None, - keepdim:bool=False, - out:Union[str]='')->Tensor: +def mean(a:Tensor,dim:tuple[int]=None,keepdim:bool=False)->Tensor: # 如果dim为None,则对所有维度求平均 - if dims is None: - dims = list(range(a.ndim)) - elif isinstance(dims, int): - dims = [dims] + if dim is None: + dim = list(range(a.ndim)) + elif isinstance(dim, int): + dim = [dim] else: - dims = list(dims) + dim = list(dim) total = 1 - for i in dims: + for i in dim: if i < 0: - dims[i] = i + a.dim() + dim[i] = i + a.dim() total *= a.shape[i] - result = sum(a, dims, keepdim, out)/total - return result + reduceshape=Shape.reduceshape(a.shape,dim,keepdim) + out=newtensor(reduceshape,dtype=a.dtype) + sum(a, dim, keepdim, out) + out.div_(total) + return out diff --git a/front/py/deepx/nn/functional/rtf_new.py b/front/py/deepx/nn/functional/rtf_life.py similarity index 89% rename from front/py/deepx/nn/functional/rtf_new.py rename to front/py/deepx/nn/functional/rtf_life.py index a0f8ab75..b233df70 100644 --- a/front/py/deepx/nn/functional/rtf_new.py +++ b/front/py/deepx/nn/functional/rtf_life.py @@ -10,8 +10,8 @@ def rtf_newtensor(t:Tensor): def rtf_copytensor(t:Tensor,out:Tensor): - args=[Param.tensor(t)] - returns=[Param.tensor(out)] + args=[Param.tensor(t),Param.tensor(out)] + returns=[] ir=DeepxIR("copytensor", args, returns,'') send(ir) diff --git a/front/py/deepx/scheduler/client/udpconn.py b/front/py/deepx/scheduler/client/udpconn.py index a25b0963..6a12c26a 100644 --- a/front/py/deepx/scheduler/client/udpconn.py +++ b/front/py/deepx/scheduler/client/udpconn.py @@ -3,7 +3,7 @@ import select class UDPConn: - def __init__(self, endpoint: str = "localhost:8080"): + def __init__(self, endpoint: str = "localhost:9090"): # 解析endpoint self._host, port_str = endpoint.split(':') self._port = int(port_str) diff --git a/front/py/deepx/tensor/changeshape.py b/front/py/deepx/tensor/changeshape.py index fafd6ea5..a9776f3e 100644 --- a/front/py/deepx/tensor/changeshape.py +++ b/front/py/deepx/tensor/changeshape.py @@ -14,15 +14,15 @@ def reshape_(self,*shape)->Tensor: return result @tensor_method -def permute(self,*axes,out:Union[Tensor,str]=''): +def permute(self,*dimorder,out:Union[Tensor,str]=''): from deepx.nn.functional import permute as permute_func - result=permute_func(self,axes,out) + result=permute_func(self,dimorder,out) return result @tensor_method -def permute_(self,*axes): +def permute_(self,*dimorder): from deepx.nn.functional import permute as permute_func - permute_func(self,axes,self) + permute_func(self,dimorder,self) return self @tensor_method diff --git a/front/py/deepx/tensor/elementwise.py b/front/py/deepx/tensor/elementwise.py index ce56d7c8..b6cb2ef9 100644 --- a/front/py/deepx/tensor/elementwise.py +++ b/front/py/deepx/tensor/elementwise.py @@ -10,7 +10,7 @@ def add(self, return add_func(self,other,out) @tensor_method -def add_(self, other): +def add_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import add as add_func add_func(self,other,self) @@ -22,7 +22,7 @@ def sub(self, other:Union[Tensor,float,int], return sub_func(self,other,out) @tensor_method -def sub_(self, other): +def sub_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import sub as sub_func sub_func(self,other,self) @@ -33,7 +33,7 @@ def mul(self, other:Union[Tensor,float,int], return mul_func(self,other,out) @tensor_method -def mul_(self, other): +def mul_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import mul as mul_func mul_func(self,other,self) @@ -46,20 +46,20 @@ def div(self, other:Union[Tensor,float,int], @tensor_method -def div_(self, other): +def div_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import div as div_func div_func(self,other,self) @tensor_method -def rdiv(self, other:Union[Tensor,float,int], +def rdiv(self,other:Union[float,int], out:Union[Tensor,str]='')->Tensor: from deepx.nn.functional import div as div_func return div_func(other,self,out) @tensor_method -def rdiv_(self, other): +def rdiv_(self, other:Union[float,int]): from deepx.nn.functional import div as div_func div_func(other,self,self) return self @@ -72,7 +72,7 @@ def min(self, other:Union[Tensor,float,int], @tensor_method -def min_(self, other): +def min_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import min as min_func min_func(self,other,self) return self @@ -85,7 +85,7 @@ def max(self, other:Union[Tensor,float,int], return out @tensor_method -def max_(self, other): +def max_(self, other:Union[Tensor,float,int]): from deepx.nn.functional import max as max_func max_func(self,other,self) @@ -98,9 +98,9 @@ def clamp(self, min:Union[float,int], max:Union[float,int], @tensor_method -def clamp_(self, min, max): - from deepx.nn.functional import clamp as clamp_func - clamp_func(self,min,max,self) +def clamp_(self, min:Union[float,int], max:Union[float,int]): + #todo + pass @tensor_method diff --git a/front/py/deepx/tensor/init.py b/front/py/deepx/tensor/init.py index 41e7cdd8..f50073ca 100644 --- a/front/py/deepx/tensor/init.py +++ b/front/py/deepx/tensor/init.py @@ -1,7 +1,8 @@ +from typing import Union from deepx.tensor import tensor_method @tensor_method -def full_(self,value): +def full_(self,value:Union[float,int]): from deepx.nn.functional import constant as constant_func constant_func(self,value=value) diff --git a/front/py/deepx/tensor/matmul.py b/front/py/deepx/tensor/matmul.py index 9388873f..1a0cee75 100644 --- a/front/py/deepx/tensor/matmul.py +++ b/front/py/deepx/tensor/matmul.py @@ -1,8 +1,8 @@ -from typing import Optional,Union +from typing import Union from .tensor import Tensor,tensor_method @tensor_method -def matmul(self,other,out:Union[Tensor,str]='',author='miaobyte'): +def matmul(self:Tensor,other:Tensor,out:Union[Tensor,str]=''): from deepx.nn.functional import matmul as matmul_func - return matmul_func(self,other,out,author) \ No newline at end of file + return matmul_func(self,other,out) \ No newline at end of file diff --git a/front/py/deepx/tensor/reduce.py b/front/py/deepx/tensor/reduce.py index feb6e42c..b6d5bc3f 100644 --- a/front/py/deepx/tensor/reduce.py +++ b/front/py/deepx/tensor/reduce.py @@ -1,33 +1,31 @@ -from typing import Optional,Union +from typing import Union from deepx.tensor import Tensor,tensor_method -from deepx.autograd.graph import OpNode - @tensor_method -def reduce_max(self, dim,keepdim=False,out:Union[Tensor,str]=''): - from deepx.nn.functional import reduce_max as reduce_max_func +def reducemax(self, dim:list[int],keepdim:bool=False,out:Union[Tensor,str]=''): + from deepx.nn.functional import reducemax as reduce_max_func return reduce_max_func(self,dim,keepdim,out) @tensor_method -def reduce_min(self, dim,keepdim=False,out:Union[Tensor,str]=''): - from deepx.nn.functional import reduce_min as reduce_min_func +def reducemin(self, dim:list[int],keepdim:bool=False,out:Union[Tensor,str]=''): + from deepx.nn.functional import reducemin as reduce_min_func return reduce_min_func(self,dim,keepdim,out) @tensor_method -def sum(self, dim,keepdim=False,out:Union[Tensor,str]=''): +def sum(self, dim:list[int],keepdim:bool=False,out:Union[Tensor,str]=''): from deepx.nn.functional import sum as sum_func return sum_func(self,dim,keepdim,out) @tensor_method -def prod(self, dim,keepdim=False,out:Union[Tensor,str]=''): +def prod(self, dim:list[int],keepdim:bool=False,out:Union[Tensor,str]=''): from deepx.nn.functional import prod as prod_func return prod_func(self,dim,keepdim,out) @tensor_method -def mean(self, dim,keepdim=False,out:Union[Tensor,str]=''): +def mean(self, dim:list[int],keepdim:bool=False,out:Union[Tensor,str]=''): from deepx.nn.functional import mean as mean_func return mean_func(self,dim,keepdim,out) \ No newline at end of file diff --git a/front/py/deepx/tensor/shape.py b/front/py/deepx/tensor/shape.py index 020ccd66..436f6bc6 100644 --- a/front/py/deepx/tensor/shape.py +++ b/front/py/deepx/tensor/shape.py @@ -98,19 +98,75 @@ def __hash__(self): """使Shape可哈希,便于在字典和集合中使用""" return hash(self.shape) - def transpose(self,dimorder:list[int]=None): + @classmethod + def total_size(cls,other:tuple[int])->int: + total_size=1 + for i in other: + total_size*=i + return total_size + + + @classmethod + def transpose(cls,shape:tuple[int],dimorder:list[int]=None): if dimorder is None: - dimorder=list(range(self.ndimension)) - return Shape(tuple(self.shape[i] for i in dimorder)) + dimorder=list(range(len(shape))) + return Shape(tuple(shape[i] for i in dimorder)) - def matmul(self,other:'Shape')->'Shape': - if len(self)<2 or len(other)<2: + @classmethod + def matmul(cls,shape:tuple[int],other:tuple[int])->tuple[int]: + if len(shape)<2 or len(other)<2: raise ValueError(f"matmul: self.ndimension()<2 or other.ndimension()<2") - if len(self)!=len(other): + if len(shape)!=len(other): raise ValueError(f"matmul: self.ndimension()!=other.ndimension()") - if self[-1]!=other[-2]: + if shape[-1]!=other[-2]: raise ValueError(f"matmul: self.shape[-1]!=other.shape[-2]") - resultshape=list(self._shape) + resultshape=list(shape) resultshape[-1]=other[-1] return tuple(resultshape) - \ No newline at end of file + + @classmethod + def broadcast_shape(cls,shape_a: tuple[int], shape_b: tuple[int]) -> tuple[int]: + """计算两个形状的广播后形状""" + # 获取形状的长度 + len_a, len_b = len(shape_a), len(shape_b) + + # 创建结果形状 + result_shape = [] + + # 从右往左对齐并计算每个维度 + for i in range(1, min(len_a, len_b) + 1): + dim_a = shape_a[-i] + dim_b = shape_b[-i] + + if dim_a == 1 or dim_b == 1: + # 广播规则:如果一个维度为1,取另一个维度的值 + result_shape.insert(0, max(dim_a, dim_b)) + elif dim_a == dim_b: + # 维度相同,保持不变 + result_shape.insert(0, dim_a) + else: + # 维度不同且都不为1,无法广播 + raise ValueError(f"无法广播的形状:{shape_a} 和 {shape_b}") + + # 添加较长形状中多出的前导维度 + if len_a > len_b: + result_shape = list(shape_a[:len_a - len_b]) + result_shape + elif len_b > len_a: + result_shape = list(shape_b[:len_b - len_a]) + result_shape + + return tuple(result_shape) + + @classmethod + def reduceshape(cls,shape:tuple[int],dim:list[int],keepdim:bool)->tuple[int]: + ndim = len(shape) + # 处理负数维度 + normalized_dim = [d % ndim for d in dim] + # 去重并排序 + unique_dim = sorted(set(normalized_dim)) + + if keepdim: + return tuple(1 if i in unique_dim else s + for i, s in enumerate(shape)) + else: + return tuple(s for i, s in enumerate(shape) + if i not in unique_dim) \ No newline at end of file diff --git a/front/py/deepx/tensor/tensor.py b/front/py/deepx/tensor/tensor.py index c121e4c2..ee60948d 100644 --- a/front/py/deepx/tensor/tensor.py +++ b/front/py/deepx/tensor/tensor.py @@ -1,17 +1,19 @@ -import uuid from typing import Optional,Union from .shape import Shape +tensorid=1 class Tensor: - #生命周期 + #life def __init__(self,shape:Union[tuple[int],list[int],Shape],dtype:str='float32',name:str=None): # name self._name = name - if name =='': - self._name =None + if name is None or name =='': + global tensorid + self._name =tensorid + tensorid+=1 # dtype self._dtype = dtype @@ -28,15 +30,15 @@ def __init__(self,shape:Union[tuple[int],list[int],Shape],dtype:str='float32',na self._graph = None self._node = None + def copy_to(self,t:'Tensor'): + from deepx.nn.functional import copytensor + copytensor(self,t) - # todo,待实现eager模式下的tensor释放 - def __del__(self): - try: - if self.graph.eager: - from deepx.nn.functional import deltensor - deltensor(self) - except: - pass + def clone(self,name:str=None): + from deepx.nn.functional import copytensor,newtensor + t=newtensor(self.shape,dtype=self.dtype,name=name) + copytensor(self,t) + return t # name @property @@ -130,11 +132,11 @@ def autoformat(self): self._format = '%s' def set_format(self,format:str): self._format = format - def __repr__(self) -> str: + def print(self): from deepx.nn.functional import printtensor - s=printtensor(self,format=self._format) - return s - + printtensor(self,format=self._format) + def __repr__(self) -> str: + return 'Tensor(shape={},dtype={},name={})'.format(self.shape,self.dtype,self.name) def tensor_method(f): setattr(Tensor, f.__name__, f) diff --git a/front/py/examples/1_tensor/1_clone.py b/front/py/examples/1_tensor/1_clone.py new file mode 100644 index 00000000..78654fb5 --- /dev/null +++ b/front/py/examples/1_tensor/1_clone.py @@ -0,0 +1,9 @@ +from deepx.tensor import Tensor + +def clonetest(): + t1=Tensor(shape=(1,2,3),dtype='float32',name='t1') + t2=t1.clone(name='t2') + print(t2) + +if __name__ == "__main__": + clonetest() \ No newline at end of file diff --git a/front/py/examples/1_tensor/1_copy.py b/front/py/examples/1_tensor/1_copy.py new file mode 100644 index 00000000..2afeddcb --- /dev/null +++ b/front/py/examples/1_tensor/1_copy.py @@ -0,0 +1,12 @@ +from deepx.tensor import Tensor + +def copytest(): + from deepx.nn.functional import newtensor + t1= newtensor(1, 2, 3,name='t1') + t2= newtensor(1, 2, 3,name='t2') + t1.copy_to(t2) + print(t2) + + +if __name__ == "__main__": + copytest() \ No newline at end of file diff --git a/front/py/examples/2_ir/1_init_zeroones.dot b/front/py/examples/2_ir/1_init_zeroones.dot deleted file mode 100644 index c9bab983..00000000 --- a/front/py/examples/2_ir/1_init_zeroones.dot +++ /dev/null @@ -1,56 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 137557518290656 [label="t1 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555544580416 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555544579360 [label="var_1 -0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555544579552 [label="t2 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542939472 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542939616 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542939520 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542939712 [label="t3 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542939232 [label="t4 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940144 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542940000 [label="var_3 -0.5" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940096 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542939760 [label="t5 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940816 [label="tensor_6 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940288 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542940336 [label="var_4 -0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940864 [label=uniform color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137555542940624 [label="var_5 --0.5477225575051661" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940960 [label="var_6 -0.5477225575051661" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555542940720 [label="var_7 -0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137555544580416 -> 137557518290656 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555544579360 -> 137555544580416 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542939472 -> 137555544579552 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542939616 -> 137555542939472 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137557518290656 -> 137555542939520 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555544579552 -> 137555542939520 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542939520 -> 137555542939712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940144 -> 137555542939232 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940000 -> 137555542940144 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542939232 -> 137555542940096 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542939712 -> 137555542940096 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940096 -> 137555542939760 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940288 -> 137555542940816 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940864 -> 137555542940816 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940336 -> 137555542940288 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940624 -> 137555542940864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940960 -> 137555542940864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137555542940720 -> 137555542940864 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/1_init_zeroones.dot.svg b/front/py/examples/2_ir/1_init_zeroones.dot.svg deleted file mode 100644 index 2b4da486..00000000 --- a/front/py/examples/2_ir/1_init_zeroones.dot.svg +++ /dev/null @@ -1,254 +0,0 @@ - - - - - - -%3 - - - -137557518290656 - -t1 -(3, 4, 5) - - - -137555542939520 - -add - - - -137557518290656->137555542939520 - - - - - -137555544580416 - -constant - - - -137555544580416->137557518290656 - - - - - -137555544579360 - -var_1 -0 - - - -137555544579360->137555544580416 - - - - - -137555544579552 - -t2 -(3, 4, 5) - - - -137555544579552->137555542939520 - - - - - -137555542939472 - -constant - - - -137555542939472->137555544579552 - - - - - -137555542939616 - -var_2 -1 - - - -137555542939616->137555542939472 - - - - - -137555542939712 - -t3 -(3, 4, 5) - - - -137555542939520->137555542939712 - - - - - -137555542940096 - -add - - - -137555542939712->137555542940096 - - - - - -137555542939232 - -t4 -(3, 4, 5) - - - -137555542939232->137555542940096 - - - - - -137555542940144 - -constant - - - -137555542940144->137555542939232 - - - - - -137555542940000 - -var_3 -0.5 - - - -137555542940000->137555542940144 - - - - - -137555542939760 - -t5 -(3, 4, 5) - - - -137555542940096->137555542939760 - - - - - -137555542940816 - -tensor_6 -(3, 4, 5) - - - -137555542940288 - -constant - - - -137555542940288->137555542940816 - - - - - -137555542940336 - -var_4 -0 - - - -137555542940336->137555542940288 - - - - - -137555542940864 - -uniform - - - -137555542940864->137555542940816 - - - - - -137555542940624 - -var_5 --0.5477225575051661 - - - -137555542940624->137555542940864 - - - - - -137555542940960 - -var_6 -0.5477225575051661 - - - -137555542940960->137555542940864 - - - - - -137555542940720 - -var_7 -0 - - - -137555542940720->137555542940864 - - - - - diff --git a/front/py/examples/2_ir/2_elementwise_add.dot b/front/py/examples/2_ir/2_elementwise_add.dot deleted file mode 100644 index 63aa4be2..00000000 --- a/front/py/examples/2_ir/2_elementwise_add.dot +++ /dev/null @@ -1,31 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 134156391383104 [label="t1 -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156391386944 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134156679069024 [label="var_1 -10" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156679055728 [label="t2 -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156322789056 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134156322789248 [label="var_2 -10" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156322789200 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134156322789296 [label="t3 -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156322789872 [label=addscalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134156322789440 [label="var_3 -0.5" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134156391386944 -> 134156391383104 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156679069024 -> 134156391386944 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789056 -> 134156679055728 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789248 -> 134156322789056 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156391383104 -> 134156322789200 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156679055728 -> 134156322789200 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789200 -> 134156322789296 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789872 -> 134156322789296 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789296 -> 134156322789872 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134156322789440 -> 134156322789872 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/2_elementwise_add.dot.svg b/front/py/examples/2_ir/2_elementwise_add.dot.svg deleted file mode 100644 index 8b146b34..00000000 --- a/front/py/examples/2_ir/2_elementwise_add.dot.svg +++ /dev/null @@ -1,139 +0,0 @@ - - - - - - -%3 - - - -134156391383104 - -t1 -(2, 3, 4) - - - -134156322789200 - -add - - - -134156391383104->134156322789200 - - - - - -134156391386944 - -constant - - - -134156391386944->134156391383104 - - - - - -134156679069024 - -var_1 -10 - - - -134156679069024->134156391386944 - - - - - -134156679055728 - -t2 -(2, 3, 4) - - - -134156679055728->134156322789200 - - - - - -134156322789056 - -constant - - - -134156322789056->134156679055728 - - - - - -134156322789248 - -var_2 -10 - - - -134156322789248->134156322789056 - - - - - -134156322789296 - -t3 -(2, 3, 4) - - - -134156322789200->134156322789296 - - - - - -134156322789872 - -addscalar - - - -134156322789296->134156322789872 - - - - - -134156322789872->134156322789296 - - - - - -134156322789440 - -var_3 -0.5 - - - -134156322789440->134156322789872 - - - - - diff --git a/front/py/examples/2_ir/2_elementwise_add.py b/front/py/examples/2_ir/2_elementwise_add.py index 5e4c896b..ceba8d73 100644 --- a/front/py/examples/2_ir/2_elementwise_add.py +++ b/front/py/examples/2_ir/2_elementwise_add.py @@ -2,7 +2,7 @@ import torch torch_t1 = torch.full((2,3,4, ), 10, dtype=torch.float32) -torch_t2 = torch.full((2,3,4, ), 5, dtype=torch.float32) +torch_t2 = torch_t1.clone() torch_t3 = torch_t1 + torch_t2 torch_t3.add_(0.5) print() @@ -15,10 +15,7 @@ print() t1 = full(2,3,4, value=10,dtype="float32") -print(t1) -t2 = full(2,3,4, value=5,dtype="float32") -print(t2) +t2 = t1.clone() t3 = t1+t2 -print(t3) t3.add_(0.5) -print(t3) +print(t3) \ No newline at end of file diff --git a/front/py/examples/2_ir/2_elementwise_operator.dot b/front/py/examples/2_ir/2_elementwise_operator.dot deleted file mode 100644 index 46fb2a94..00000000 --- a/front/py/examples/2_ir/2_elementwise_operator.dot +++ /dev/null @@ -1,64 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 130352998993200 [label="t1 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524192 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350741117520 [label="var_1 -0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350741118576 [label="t2 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524336 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739524480 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524384 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739524672 [label="t3 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524528 [label="t4 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524864 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739525152 [label="var_3 -0.5" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739525104 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739525296 [label="t5 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739525440 [label="t6 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739525632 [label=div color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739525776 [label=rdivscalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739525968 [label="var_4 -0.05" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739526016 [label="t7 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739526304 [label=mulscalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739526352 [label="var_5 -2.5" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739526544 [label=mul color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130350739526256 [label="t8 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130350739524192 -> 130352998993200 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350741117520 -> 130350739524192 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524336 -> 130350741118576 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524480 -> 130350739524336 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130352998993200 -> 130350739524384 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350741118576 -> 130350739524384 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524384 -> 130350739524672 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524864 -> 130350739524528 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739525152 -> 130350739524864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524528 -> 130350739525104 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739524672 -> 130350739525104 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739525104 -> 130350739525296 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739525632 -> 130350739525440 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130352998993200 -> 130350739525632 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350741118576 -> 130350739525632 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739525968 -> 130350739525776 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350741118576 -> 130350739525776 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739525776 -> 130350739526016 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739526304 -> 130350739526016 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739526016 -> 130350739526304 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739526352 -> 130350739526304 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739526016 -> 130350739526544 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350741118576 -> 130350739526544 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130350739526544 -> 130350739526256 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/2_elementwise_operator.dot.svg b/front/py/examples/2_ir/2_elementwise_operator.dot.svg deleted file mode 100644 index bc9389b5..00000000 --- a/front/py/examples/2_ir/2_elementwise_operator.dot.svg +++ /dev/null @@ -1,302 +0,0 @@ - - - - - - -%3 - - - -130352998993200 - -t1 -(3, 4, 5) - - - -130350739524384 - -add - - - -130352998993200->130350739524384 - - - - - -130350739525632 - -div - - - -130352998993200->130350739525632 - - - - - -130350739524192 - -constant - - - -130350739524192->130352998993200 - - - - - -130350741117520 - -var_1 -0 - - - -130350741117520->130350739524192 - - - - - -130350741118576 - -t2 -(3, 4, 5) - - - -130350741118576->130350739524384 - - - - - -130350741118576->130350739525632 - - - - - -130350739525776 - -rdivscalar - - - -130350741118576->130350739525776 - - - - - -130350739526544 - -mul - - - -130350741118576->130350739526544 - - - - - -130350739524336 - -constant - - - -130350739524336->130350741118576 - - - - - -130350739524480 - -var_2 -1 - - - -130350739524480->130350739524336 - - - - - -130350739524672 - -t3 -(3, 4, 5) - - - -130350739524384->130350739524672 - - - - - -130350739525104 - -add - - - -130350739524672->130350739525104 - - - - - -130350739524528 - -t4 -(3, 4, 5) - - - -130350739524528->130350739525104 - - - - - -130350739524864 - -constant - - - -130350739524864->130350739524528 - - - - - -130350739525152 - -var_3 -0.5 - - - -130350739525152->130350739524864 - - - - - -130350739525296 - -t5 -(3, 4, 5) - - - -130350739525104->130350739525296 - - - - - -130350739525440 - -t6 -(3, 4, 5) - - - -130350739525632->130350739525440 - - - - - -130350739526016 - -t7 -(3, 4, 5) - - - -130350739525776->130350739526016 - - - - - -130350739525968 - -var_4 -0.05 - - - -130350739525968->130350739525776 - - - - - -130350739526304 - -mulscalar - - - -130350739526016->130350739526304 - - - - - -130350739526016->130350739526544 - - - - - -130350739526304->130350739526016 - - - - - -130350739526352 - -var_5 -2.5 - - - -130350739526352->130350739526304 - - - - - -130350739526256 - -t8 -(3, 4, 5) - - - -130350739526544->130350739526256 - - - - - diff --git a/front/py/examples/2_ir/2_elementwise_sqrtlog.dot b/front/py/examples/2_ir/2_elementwise_sqrtlog.dot deleted file mode 100644 index fa272de2..00000000 --- a/front/py/examples/2_ir/2_elementwise_sqrtlog.dot +++ /dev/null @@ -1,35 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 127569128262912 [label="t1 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127569128261520 [label="t2 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868400736 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127566868400688 [label="var_1 -2" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868400880 [label=sqrt color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127566868401024 [label="t3 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868401456 [label=log color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127566868401360 [label="t4 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868401792 [label=exp color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127566868401696 [label="t5 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868402128 [label=pow color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127566868402032 [label="t6 -(60,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127566868400736 -> 127569128261520 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868400688 -> 127566868400736 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127569128262912 -> 127566868400880 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868400880 -> 127566868401024 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127569128261520 -> 127566868401456 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868401456 -> 127566868401360 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868401360 -> 127566868401792 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868401792 -> 127566868401696 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868401696 -> 127566868402128 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868401024 -> 127566868402128 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127566868402128 -> 127566868402032 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/2_elementwise_sqrtlog.dot.svg b/front/py/examples/2_ir/2_elementwise_sqrtlog.dot.svg deleted file mode 100644 index 3ca8df83..00000000 --- a/front/py/examples/2_ir/2_elementwise_sqrtlog.dot.svg +++ /dev/null @@ -1,158 +0,0 @@ - - - - - - -%3 - - - -127569128262912 - -t1 -(60,) - - - -127566868400880 - -sqrt - - - -127569128262912->127566868400880 - - - - - -127569128261520 - -t2 -(60,) - - - -127566868401456 - -log - - - -127569128261520->127566868401456 - - - - - -127566868400736 - -constant - - - -127566868400736->127569128261520 - - - - - -127566868400688 - -var_1 -2 - - - -127566868400688->127566868400736 - - - - - -127566868401024 - -t3 -(60,) - - - -127566868400880->127566868401024 - - - - - -127566868402128 - -pow - - - -127566868401024->127566868402128 - - - - - -127566868401360 - -t4 -(60,) - - - -127566868401456->127566868401360 - - - - - -127566868401792 - -exp - - - -127566868401360->127566868401792 - - - - - -127566868401696 - -t5 -(60,) - - - -127566868401792->127566868401696 - - - - - -127566868401696->127566868402128 - - - - - -127566868402032 - -t6 -(60,) - - - -127566868402128->127566868402032 - - - - - diff --git a/front/py/examples/2_ir/3_matmul.dot b/front/py/examples/2_ir/3_matmul.dot deleted file mode 100644 index 232ef59c..00000000 --- a/front/py/examples/2_ir/3_matmul.dot +++ /dev/null @@ -1,25 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 137036194614528 [label="t1 -(3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137033935129088 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137033936738800 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137033936739856 [label="t2 -(4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137033935129232 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137033935129376 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137033935129280 [label=matmul color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137033935129472 [label="tensor_3 -(3, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137033935129088 -> 137036194614528 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137033936738800 -> 137033935129088 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137033935129232 -> 137033936739856 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137033935129376 -> 137033935129232 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137036194614528 -> 137033935129280 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137033936739856 -> 137033935129280 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137033935129280 -> 137033935129472 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/3_matmul.dot.svg b/front/py/examples/2_ir/3_matmul.dot.svg deleted file mode 100644 index d14a0efd..00000000 --- a/front/py/examples/2_ir/3_matmul.dot.svg +++ /dev/null @@ -1,108 +0,0 @@ - - - - - - -%3 - - - -137036194614528 - -t1 -(3, 4) - - - -137033935129280 - -matmul - - - -137036194614528->137033935129280 - - - - - -137033935129088 - -constant - - - -137033935129088->137036194614528 - - - - - -137033936738800 - -var_1 -1 - - - -137033936738800->137033935129088 - - - - - -137033936739856 - -t2 -(4, 5) - - - -137033936739856->137033935129280 - - - - - -137033935129232 - -constant - - - -137033935129232->137033936739856 - - - - - -137033935129376 - -var_2 -1 - - - -137033935129376->137033935129232 - - - - - -137033935129472 - -tensor_3 -(3, 5) - - - -137033935129280->137033935129472 - - - - - diff --git a/front/py/examples/2_ir/4_changeshape_broadcast.dot b/front/py/examples/2_ir/4_changeshape_broadcast.dot deleted file mode 100644 index 63f24c90..00000000 --- a/front/py/examples/2_ir/4_changeshape_broadcast.dot +++ /dev/null @@ -1,35 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 139182860375312 [label="a -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181167609744 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 139182860384960 [label="vector_1 -(4, 2, 3)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181166122448 [label="b -(2, 1)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181166122592 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 139181166122688 [label="vector_2 -(2, 1)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181166122736 [label="b.broadcasted -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181166123168 [label=broadcastTo color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 139181166122928 [label="vector_3 -(4, 2, 3)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181166123216 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 139181166123264 [label="tensor_4 -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 139181167609744 -> 139182860375312 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139182860375312 -> 139181167609744 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139182860384960 -> 139181167609744 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122592 -> 139181166122448 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122448 -> 139181166122592 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122688 -> 139181166122592 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166123168 -> 139181166122736 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122448 -> 139181166123168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122928 -> 139181166123168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139182860375312 -> 139181166123216 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166122736 -> 139181166123216 [arrowsize=0.8 color=gray40 penwidth=1.2] - 139181166123216 -> 139181166123264 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/4_changeshape_broadcast.dot.svg b/front/py/examples/2_ir/4_changeshape_broadcast.dot.svg deleted file mode 100644 index 5ee163ac..00000000 --- a/front/py/examples/2_ir/4_changeshape_broadcast.dot.svg +++ /dev/null @@ -1,158 +0,0 @@ - - - - - - -%3 - - - -139182860375312 - -a -(4, 2, 3) - - - -139181167609744 - -reshape - - - -139182860375312->139181167609744 - - - - - -139181166123216 - -add - - - -139182860375312->139181166123216 - - - - - -139181167609744->139182860375312 - - - - - -139182860384960 - -vector_1 -(4, 2, 3) - - - -139182860384960->139181167609744 - - - - - -139181166122448 - -b -(2, 1) - - - -139181166122592 - -reshape - - - -139181166122448->139181166122592 - - - - - -139181166123168 - -broadcastTo - - - -139181166122448->139181166123168 - - - - - -139181166122592->139181166122448 - - - - - -139181166122688 - -vector_2 -(2, 1) - - - -139181166122688->139181166122592 - - - - - -139181166122736 - -b.broadcasted -(4, 2, 3) - - - -139181166122736->139181166123216 - - - - - -139181166123168->139181166122736 - - - - - -139181166122928 - -vector_3 -(4, 2, 3) - - - -139181166122928->139181166123168 - - - - - -139181166123264 - -tensor_4 -(4, 2, 3) - - - -139181166123216->139181166123264 - - - - - diff --git a/front/py/examples/2_ir/4_changeshape_broadcast_add.dot b/front/py/examples/2_ir/4_changeshape_broadcast_add.dot deleted file mode 100644 index 2354eb65..00000000 --- a/front/py/examples/2_ir/4_changeshape_broadcast_add.dot +++ /dev/null @@ -1,41 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 137164688866560 [label="a -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862768 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137164688862720 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862816 [label="b -(2, 1)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862624 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137164688862672 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862480 [label="tensor_3 -(1, 2, 1)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862240 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137164688862288 [label="vector_1 -[1, 2, 1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862144 [label="tensor_4 -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688861856 [label=expand color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137164688861904 [label="vector_2 -(4, 2, 3)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688861760 [label=add color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137164688861616 [label="tensor_5 -(4, 2, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137164688862768 -> 137164688866560 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862720 -> 137164688862768 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862624 -> 137164688862816 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862672 -> 137164688862624 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862240 -> 137164688862480 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862816 -> 137164688862240 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862288 -> 137164688862240 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688861856 -> 137164688862144 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862480 -> 137164688861856 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688861904 -> 137164688861856 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688866560 -> 137164688861760 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688862144 -> 137164688861760 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137164688861760 -> 137164688861616 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/4_changeshape_broadcast_add.dot.svg b/front/py/examples/2_ir/4_changeshape_broadcast_add.dot.svg deleted file mode 100644 index 1e94ce7d..00000000 --- a/front/py/examples/2_ir/4_changeshape_broadcast_add.dot.svg +++ /dev/null @@ -1,184 +0,0 @@ - - - - - - -%3 - - - -137164688866560 - -a -(4, 2, 3) - - - -137164688861760 - -add - - - -137164688866560->137164688861760 - - - - - -137164688862768 - -constant - - - -137164688862768->137164688866560 - - - - - -137164688862720 - -var_1 -1 - - - -137164688862720->137164688862768 - - - - - -137164688862816 - -b -(2, 1) - - - -137164688862240 - -reshape - - - -137164688862816->137164688862240 - - - - - -137164688862624 - -constant - - - -137164688862624->137164688862816 - - - - - -137164688862672 - -var_2 -1 - - - -137164688862672->137164688862624 - - - - - -137164688862480 - -tensor_3 -(1, 2, 1) - - - -137164688861856 - -expand - - - -137164688862480->137164688861856 - - - - - -137164688862240->137164688862480 - - - - - -137164688862288 - -vector_1 -[1, 2, 1] - - - -137164688862288->137164688862240 - - - - - -137164688862144 - -tensor_4 -(4, 2, 3) - - - -137164688862144->137164688861760 - - - - - -137164688861856->137164688862144 - - - - - -137164688861904 - -vector_2 -(4, 2, 3) - - - -137164688861904->137164688861856 - - - - - -137164688861616 - -tensor_5 -(4, 2, 3) - - - -137164688861760->137164688861616 - - - - - diff --git a/front/py/examples/2_ir/4_changeshape_concat.dot b/front/py/examples/2_ir/4_changeshape_concat.dot deleted file mode 100644 index a78082a7..00000000 --- a/front/py/examples/2_ir/4_changeshape_concat.dot +++ /dev/null @@ -1,36 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 127589636296416 [label="t1 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660992560 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127587662470000 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587662471056 [label="t2 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660992704 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127587660992848 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660992368 [label="t3 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660992992 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127587660993232 [label="var_3 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660993184 [label="t -(3, 12, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660993616 [label=concat color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 127587660993568 [label="var_4 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 127587660992560 -> 127589636296416 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587662470000 -> 127587660992560 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660992704 -> 127587662471056 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660992848 -> 127587660992704 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660992992 -> 127587660992368 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660993232 -> 127587660992992 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660993616 -> 127587660993184 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127589636296416 -> 127587660993616 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587662471056 -> 127587660993616 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660992368 -> 127587660993616 [arrowsize=0.8 color=gray40 penwidth=1.2] - 127587660993568 -> 127587660993616 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/4_changeshape_concat.dot.svg b/front/py/examples/2_ir/4_changeshape_concat.dot.svg deleted file mode 100644 index 5388daf1..00000000 --- a/front/py/examples/2_ir/4_changeshape_concat.dot.svg +++ /dev/null @@ -1,159 +0,0 @@ - - - - - - -%3 - - - -127589636296416 - -t1 -(3, 4, 5) - - - -127587660993616 - -concat - - - -127589636296416->127587660993616 - - - - - -127587660992560 - -constant - - - -127587660992560->127589636296416 - - - - - -127587662470000 - -var_1 -1 - - - -127587662470000->127587660992560 - - - - - -127587662471056 - -t2 -(3, 4, 5) - - - -127587662471056->127587660993616 - - - - - -127587660992704 - -constant - - - -127587660992704->127587662471056 - - - - - -127587660992848 - -var_2 -1 - - - -127587660992848->127587660992704 - - - - - -127587660992368 - -t3 -(3, 4, 5) - - - -127587660992368->127587660993616 - - - - - -127587660992992 - -constant - - - -127587660992992->127587660992368 - - - - - -127587660993232 - -var_3 -1 - - - -127587660993232->127587660992992 - - - - - -127587660993184 - -t -(3, 12, 5) - - - -127587660993616->127587660993184 - - - - - -127587660993568 - -var_4 -1 - - - -127587660993568->127587660993616 - - - - - diff --git a/front/py/examples/2_ir/4_changeshape_reshape.dot b/front/py/examples/2_ir/4_changeshape_reshape.dot deleted file mode 100644 index 9102e255..00000000 --- a/front/py/examples/2_ir/4_changeshape_reshape.dot +++ /dev/null @@ -1,33 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 135050279734960 [label="t1 -(3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341280 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 135048021923184 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341328 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 135048021923520 [label="vector_1 -(3, 2, 2)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341664 [label="tensor_2 -(3, 2, 2)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341712 [label="tensor_3 -(4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341472 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 135048020342096 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341856 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 135048020341520 [label="vector_2 -(20,)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 135048020341280 -> 135050279734960 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048021923184 -> 135048020341280 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135050279734960 -> 135048020341328 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048021923520 -> 135048020341328 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020341328 -> 135048020341664 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020341472 -> 135048020341712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020341856 -> 135048020341712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020342096 -> 135048020341472 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020341712 -> 135048020341856 [arrowsize=0.8 color=gray40 penwidth=1.2] - 135048020341520 -> 135048020341856 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/4_changeshape_reshape.dot.svg b/front/py/examples/2_ir/4_changeshape_reshape.dot.svg deleted file mode 100644 index e5caae8f..00000000 --- a/front/py/examples/2_ir/4_changeshape_reshape.dot.svg +++ /dev/null @@ -1,146 +0,0 @@ - - - - - - -%3 - - - -135050279734960 - -t1 -(3, 4) - - - -135048020341328 - -reshape - - - -135050279734960->135048020341328 - - - - - -135048020341280 - -constant - - - -135048020341280->135050279734960 - - - - - -135048021923184 - -var_1 -1 - - - -135048021923184->135048020341280 - - - - - -135048020341664 - -tensor_2 -(3, 2, 2) - - - -135048020341328->135048020341664 - - - - - -135048021923520 - -vector_1 -(3, 2, 2) - - - -135048021923520->135048020341328 - - - - - -135048020341712 - -tensor_3 -(4, 5) - - - -135048020341856 - -reshape - - - -135048020341712->135048020341856 - - - - - -135048020341472 - -constant - - - -135048020341472->135048020341712 - - - - - -135048020342096 - -var_2 -1 - - - -135048020342096->135048020341472 - - - - - -135048020341856->135048020341712 - - - - - -135048020341520 - -vector_2 -(20,) - - - -135048020341520->135048020341856 - - - - - diff --git a/front/py/examples/2_ir/4_changeshape_transpose.dot b/front/py/examples/2_ir/4_changeshape_transpose.dot deleted file mode 100644 index 676d92a1..00000000 --- a/front/py/examples/2_ir/4_changeshape_transpose.dot +++ /dev/null @@ -1,35 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 129501590933168 [label="t1 -(3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615596016 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 129499617100368 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615595968 [label="t2 -(4, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615596304 [label=transpose color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 129499615596256 [label="vector_1 -[1, 0]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499617100464 [label="t3 -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615596496 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 129499615596160 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615596832 [label="t4 -(2, 4, 3)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615597072 [label=transpose color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 129499615597024 [label="vector_2 -[0, 2, 1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 129499615596016 -> 129501590933168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499617100368 -> 129499615596016 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615596304 -> 129499615595968 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129501590933168 -> 129499615596304 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615596256 -> 129499615596304 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615596496 -> 129499617100464 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615596160 -> 129499615596496 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615597072 -> 129499615596832 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499617100464 -> 129499615597072 [arrowsize=0.8 color=gray40 penwidth=1.2] - 129499615597024 -> 129499615597072 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/4_changeshape_transpose.dot.svg b/front/py/examples/2_ir/4_changeshape_transpose.dot.svg deleted file mode 100644 index 4e152a70..00000000 --- a/front/py/examples/2_ir/4_changeshape_transpose.dot.svg +++ /dev/null @@ -1,153 +0,0 @@ - - - - - - -%3 - - - -129501590933168 - -t1 -(3, 4) - - - -129499615596304 - -transpose - - - -129501590933168->129499615596304 - - - - - -129499615596016 - -constant - - - -129499615596016->129501590933168 - - - - - -129499617100368 - -var_1 -1 - - - -129499617100368->129499615596016 - - - - - -129499615595968 - -t2 -(4, 3) - - - -129499615596304->129499615595968 - - - - - -129499615596256 - -vector_1 -[1, 0] - - - -129499615596256->129499615596304 - - - - - -129499617100464 - -t3 -(2, 3, 4) - - - -129499615597072 - -transpose - - - -129499617100464->129499615597072 - - - - - -129499615596496 - -constant - - - -129499615596496->129499617100464 - - - - - -129499615596160 - -var_2 -1 - - - -129499615596160->129499615596496 - - - - - -129499615596832 - -t4 -(2, 4, 3) - - - -129499615597072->129499615596832 - - - - - -129499615597024 - -vector_2 -[0, 2, 1] - - - -129499615597024->129499615597072 - - - - - diff --git a/front/py/examples/2_ir/5_reduce_sum_keepdim.dot b/front/py/examples/2_ir/5_reduce_sum_keepdim.dot deleted file mode 100644 index fdc348fe..00000000 --- a/front/py/examples/2_ir/5_reduce_sum_keepdim.dot +++ /dev/null @@ -1,38 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 134483478251840 [label="t -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481220276224 [label="s -(1, 4, 1)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754432 [label="vector_1 -[0, 2]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754576 [label=sum color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134481218754336 [label="p -(3, 1, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754672 [label="vector_2 -[1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754720 [label=prod color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134483478251408 [label="t1 -(4, 5, 6)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218755152 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134481218754912 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754864 [label="t2 -(1, 1, 6)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218754528 [label="vector_3 -[0, 1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 134481218755392 [label=sum color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 134481218754576 -> 134481220276224 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134483478251840 -> 134481218754576 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218754432 -> 134481218754576 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218754720 -> 134481218754336 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134483478251840 -> 134481218754720 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218754672 -> 134481218754720 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218755152 -> 134483478251408 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218754912 -> 134481218755152 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218755392 -> 134481218754864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134483478251408 -> 134481218755392 [arrowsize=0.8 color=gray40 penwidth=1.2] - 134481218754528 -> 134481218755392 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/5_reduce_sum_keepdim.dot.svg b/front/py/examples/2_ir/5_reduce_sum_keepdim.dot.svg deleted file mode 100644 index 3786069a..00000000 --- a/front/py/examples/2_ir/5_reduce_sum_keepdim.dot.svg +++ /dev/null @@ -1,166 +0,0 @@ - - - - - - -%3 - - - -134483478251840 - -t -(3, 4, 5) - - - -134481218754576 - -sum - - - -134483478251840->134481218754576 - - - - - -134481218754720 - -prod - - - -134483478251840->134481218754720 - - - - - -134481220276224 - -s -(1, 4, 1) - - - -134481218754432 - -vector_1 -[0, 2] - - - -134481218754432->134481218754576 - - - - - -134481218754576->134481220276224 - - - - - -134481218754336 - -p -(3, 1, 5) - - - -134481218754672 - -vector_2 -[1] - - - -134481218754672->134481218754720 - - - - - -134481218754720->134481218754336 - - - - - -134483478251408 - -t1 -(4, 5, 6) - - - -134481218755392 - -sum - - - -134483478251408->134481218755392 - - - - - -134481218755152 - -constant - - - -134481218755152->134483478251408 - - - - - -134481218754912 - -var_1 -1 - - - -134481218754912->134481218755152 - - - - - -134481218754864 - -t2 -(1, 1, 6) - - - -134481218754528 - -vector_3 -[0, 1] - - - -134481218754528->134481218755392 - - - - - -134481218755392->134481218754864 - - - - - diff --git a/front/py/examples/2_ir/5_reduce_sum_keepdim.py b/front/py/examples/2_ir/5_reduce_sum_keepdim.py index 28222941..3a582b47 100644 --- a/front/py/examples/2_ir/5_reduce_sum_keepdim.py +++ b/front/py/examples/2_ir/5_reduce_sum_keepdim.py @@ -22,16 +22,16 @@ t=arange(3,4,5,name='t') t.set_format("%.0f") print(t) -s=sum(t,dims=[0,2],out="s",keepdim=True) +s=sum(t,dim=[0,2],out="s",keepdim=True) s.set_format("%.0f") print(s) -p=prod(t,dims=[1],out="p",keepdim=True) +p=prod(t,dim=[1],out="p",keepdim=True) p.set_format("%.0f") print(p) t1=ones(4,5,6,name="t1") t1.set_format("%.0f") print(t1) -t2=sum(t1,dims=[0,1],out='t2',keepdim=True) +t2=sum(t1,dim=[0,1],out='t2',keepdim=True) t2.set_format("%.0f") print(t2) diff --git a/front/py/examples/2_ir/5_reduce_sumprod.dot b/front/py/examples/2_ir/5_reduce_sumprod.dot deleted file mode 100644 index ecaf7883..00000000 --- a/front/py/examples/2_ir/5_reduce_sumprod.dot +++ /dev/null @@ -1,38 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 137168675850560 [label="t -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168368280016 [label="s -(4,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309032976 [label="vector_1 -[0, 2]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309032592 [label=sum color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137168309032832 [label="p -(3, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033120 [label="vector_2 -[1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033168 [label=prod color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137168675850128 [label="t1 -(4, 5, 6)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033600 [label=constant color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137168309033360 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033312 [label="t2 -(6,)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033840 [label="vector_3 -[0, 1]" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 137168309033648 [label=sum color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 137168309032592 -> 137168368280016 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168675850560 -> 137168309032592 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309032976 -> 137168309032592 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033168 -> 137168309032832 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168675850560 -> 137168309033168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033120 -> 137168309033168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033600 -> 137168675850128 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033360 -> 137168309033600 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033648 -> 137168309033312 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168675850128 -> 137168309033648 [arrowsize=0.8 color=gray40 penwidth=1.2] - 137168309033840 -> 137168309033648 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/2_ir/5_reduce_sumprod.dot.svg b/front/py/examples/2_ir/5_reduce_sumprod.dot.svg deleted file mode 100644 index e98414d2..00000000 --- a/front/py/examples/2_ir/5_reduce_sumprod.dot.svg +++ /dev/null @@ -1,166 +0,0 @@ - - - - - - -%3 - - - -137168675850560 - -t -(3, 4, 5) - - - -137168309032592 - -sum - - - -137168675850560->137168309032592 - - - - - -137168309033168 - -prod - - - -137168675850560->137168309033168 - - - - - -137168368280016 - -s -(4,) - - - -137168309032976 - -vector_1 -[0, 2] - - - -137168309032976->137168309032592 - - - - - -137168309032592->137168368280016 - - - - - -137168309032832 - -p -(3, 5) - - - -137168309033120 - -vector_2 -[1] - - - -137168309033120->137168309033168 - - - - - -137168309033168->137168309032832 - - - - - -137168675850128 - -t1 -(4, 5, 6) - - - -137168309033648 - -sum - - - -137168675850128->137168309033648 - - - - - -137168309033600 - -constant - - - -137168309033600->137168675850128 - - - - - -137168309033360 - -var_1 -1 - - - -137168309033360->137168309033600 - - - - - -137168309033312 - -t2 -(6,) - - - -137168309033840 - -vector_3 -[0, 1] - - - -137168309033840->137168309033648 - - - - - -137168309033648->137168309033312 - - - - - diff --git a/front/py/examples/2_ir/5_reduce_sumprod.py b/front/py/examples/2_ir/5_reduce_sumprod.py index ca969655..cc4360f5 100644 --- a/front/py/examples/2_ir/5_reduce_sumprod.py +++ b/front/py/examples/2_ir/5_reduce_sumprod.py @@ -23,16 +23,16 @@ t.arange_(0,1) t.set_format("%.0f") print(t) -s=sum(t,dims=[0,2],out="s") +s=sum(t,dim=[0,2],out="s") s.set_format("%.0f") print(s) -p=prod(t,dims=[1],out="p") +p=prod(t,dim=[1],out="p") p.set_format("%.0f") print(p) t1=ones(4,5,6,name="t1") t1.set_format("%.0f") print(t1) -t2=sum(t1,dims=[0,1],out='t2') +t2=sum(t1,dim=[0,1],out='t2') t2.set_format("%.0f") print(t2) diff --git a/front/py/examples/3_functional/1_mean.py b/front/py/examples/3_functional/1_mean.py index f0c9229e..12f4c0f5 100644 --- a/front/py/examples/3_functional/1_mean.py +++ b/front/py/examples/3_functional/1_mean.py @@ -2,7 +2,7 @@ import torch -torch_t3 = torch.arange(0, 120).reshape(4, 5, 6) +torch_t3 = torch.arange(0, 120,dtype=torch.float).reshape(4, 5, 6) print(torch_t3) torch_t3_mean = torch.mean(torch_t3, dim=[0, 1]) print(torch_t3_mean) @@ -13,13 +13,8 @@ from deepx.nn.functional import mean -t3=arange(0,120,1,name="t3").reshape_(4,5,6) +t3=arange(4,5,6,name="t3") print(t3) -t3_mean=mean(t3,dim=[0,1],out='t3_mean') +t3_mean=mean(t3,dim=(0,1)) print(t3_mean) - -import os -script_name = os.path.splitext(os.path.basename( os.path.abspath(__file__)))[0] -str=t3.graph.to_dot() -str.render(script_name+".dot", format='svg') \ No newline at end of file diff --git a/front/py/examples/3_functional/1_relu.dot b/front/py/examples/3_functional/1_relu.dot deleted file mode 100644 index ab6b00a0..00000000 --- a/front/py/examples/3_functional/1_relu.dot +++ /dev/null @@ -1,23 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 132560372090576 [label="t -(10, 10)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132560372104592 [label="var_1 --1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132558398381392 [label="var_2 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132558397716416 [label=uniform color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 132558396599424 [label="relu_t -(10, 10)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132558396599568 [label=max_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 132558396599808 [label="var_3 -0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132558397716416 -> 132560372090576 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132560372104592 -> 132558397716416 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132558398381392 -> 132558397716416 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132558396599568 -> 132558396599424 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132560372090576 -> 132558396599568 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132558396599808 -> 132558396599568 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/3_functional/1_relu.dot.svg b/front/py/examples/3_functional/1_relu.dot.svg deleted file mode 100644 index 741276a6..00000000 --- a/front/py/examples/3_functional/1_relu.dot.svg +++ /dev/null @@ -1,96 +0,0 @@ - - - - - - -%3 - - - -132560372090576 - -t -(10, 10) - - - -132558396599568 - -max_scalar - - - -132560372090576->132558396599568 - - - - - -132560372104592 - -var_1 --1 - - - -132558397716416 - -uniform - - - -132560372104592->132558397716416 - - - - - -132558398381392 - -var_2 -1 - - - -132558398381392->132558397716416 - - - - - -132558397716416->132560372090576 - - - - - -132558396599424 - -relu_t -(10, 10) - - - -132558396599568->132558396599424 - - - - - -132558396599808 - -var_3 -0 - - - -132558396599808->132558396599568 - - - - - diff --git a/front/py/examples/3_functional/1_relu.py b/front/py/examples/3_functional/1_relu.py index 33294521..22b1e8cc 100644 --- a/front/py/examples/3_functional/1_relu.py +++ b/front/py/examples/3_functional/1_relu.py @@ -10,17 +10,18 @@ ############-------DEEPX-------################ from deepx import Tensor,ones -from deepx.nn.functional import relu +from deepx.nn.functional import relu,uniform -t=Tensor(shape=(10,10)) -t.addtograph("t").uniform_(low=-1,high=1) -print((t)) -relu_t=relu(t,out='relu_t') +t=uniform(10,10,low=-1,high=1,name='t') + +print(t) +relu_t=relu(t) print(relu_t) +# 当tensor.name为str时,说明其是中间变量,执行inplace操作 +t2=uniform(10,10,low=-1,high=1) +print(t2) +relu_t2=relu(t2) +print(relu_t2) -import os -script_name = os.path.splitext(os.path.basename( os.path.abspath(__file__)))[0] # 获取不带后缀的脚本名 -str=relu_t.graph.to_dot() -str.render(script_name+".dot", format='svg') diff --git a/front/py/examples/3_functional/1_rsqrt.dot b/front/py/examples/3_functional/1_rsqrt.dot deleted file mode 100644 index 104876d8..00000000 --- a/front/py/examples/3_functional/1_rsqrt.dot +++ /dev/null @@ -1,26 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 132047699877248 [label="t -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132049957750528 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 132049957751296 [label="vector_1 -(2, 3, 4)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132047698095568 [label="rsqrt_t -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132047698095712 [label=sqrt color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 132047698095376 [label=rdiv_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 132047698095616 [label="var_1 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132047698095856 [label="tensor_3 -(2, 3, 4)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 132049957750528 -> 132047699877248 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047699877248 -> 132049957750528 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132049957751296 -> 132049957750528 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047698095712 -> 132047698095568 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047699877248 -> 132047698095712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047698095616 -> 132047698095376 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047698095568 -> 132047698095376 [arrowsize=0.8 color=gray40 penwidth=1.2] - 132047698095376 -> 132047698095856 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/3_functional/1_rsqrt.dot.svg b/front/py/examples/3_functional/1_rsqrt.dot.svg deleted file mode 100644 index 9b3e5b4b..00000000 --- a/front/py/examples/3_functional/1_rsqrt.dot.svg +++ /dev/null @@ -1,114 +0,0 @@ - - - - - - -%3 - - - -132047699877248 - -t -(2, 3, 4) - - - -132049957750528 - -reshape - - - -132047699877248->132049957750528 - - - - - -132047698095712 - -sqrt - - - -132047699877248->132047698095712 - - - - - -132049957750528->132047699877248 - - - - - -132049957751296 - -vector_1 -(2, 3, 4) - - - -132049957751296->132049957750528 - - - - - -132047698095568 - -rsqrt_t -(2, 3, 4) - - - -132047698095376 - -rdiv_scalar - - - -132047698095568->132047698095376 - - - - - -132047698095712->132047698095568 - - - - - -132047698095856 - -tensor_3 -(2, 3, 4) - - - -132047698095376->132047698095856 - - - - - -132047698095616 - -var_1 -1 - - - -132047698095616->132047698095376 - - - - - diff --git a/front/py/examples/3_functional/1_rsqrt.py b/front/py/examples/3_functional/1_rsqrt.py index 9f937637..c0706691 100644 --- a/front/py/examples/3_functional/1_rsqrt.py +++ b/front/py/examples/3_functional/1_rsqrt.py @@ -9,15 +9,10 @@ ############-------DEEPX-------################ -from deepx import Tensor,ones,arange +from deepx import arange from deepx.nn.functional import rsqrt -t=arange(0,24,1,name='t').reshape_(2,3,4) +t=arange(2,3,4,name='t') print((t)) -rsqrt_t=rsqrt(t,out='rsqrt_t') +rsqrt_t=rsqrt(t) print(rsqrt_t) - -import os -script_name = os.path.splitext(os.path.basename( os.path.abspath(__file__)))[0] -str=rsqrt_t.graph.to_dot() -str.render(script_name+".dot", format='svg') diff --git a/front/py/examples/3_functional/1_sigmoid.dot b/front/py/examples/3_functional/1_sigmoid.dot deleted file mode 100644 index 77d98e9e..00000000 --- a/front/py/examples/3_functional/1_sigmoid.dot +++ /dev/null @@ -1,56 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 130115172244864 [label="x -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130117430101712 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130117430101376 [label="vector_1 -(3, 4, 5)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569184 [label=div_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130117430113280 [label="var_1 -10.0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569280 [label=add_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130115170569136 [label="var_2 --3.0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569088 [label="out -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569520 [label=mul_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130115170569472 [label="var_3 --1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569664 [label="tensor_3 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170569904 [label=exp color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130115170570000 [label="tensor_4 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170576384 [label=add_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130115170576336 [label="var_4 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170576528 [label="tensor_5 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170576768 [label=rdiv_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 130115170576720 [label="var_5 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130115170576912 [label="tensor_6 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 130117430101712 -> 130115172244864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569184 -> 130115172244864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569280 -> 130115172244864 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115172244864 -> 130117430101712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130117430101376 -> 130117430101712 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115172244864 -> 130115170569184 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130117430113280 -> 130115170569184 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115172244864 -> 130115170569280 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569136 -> 130115170569280 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115172244864 -> 130115170569520 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569472 -> 130115170569520 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569520 -> 130115170569664 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569664 -> 130115170569904 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170569904 -> 130115170570000 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170570000 -> 130115170576384 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170576336 -> 130115170576384 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170576384 -> 130115170576528 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170576720 -> 130115170576768 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170576528 -> 130115170576768 [arrowsize=0.8 color=gray40 penwidth=1.2] - 130115170576768 -> 130115170576912 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/3_functional/1_sigmoid.dot.svg b/front/py/examples/3_functional/1_sigmoid.dot.svg deleted file mode 100644 index 41b2932f..00000000 --- a/front/py/examples/3_functional/1_sigmoid.dot.svg +++ /dev/null @@ -1,259 +0,0 @@ - - - - - - -%3 - - - -130115172244864 - -x -(3, 4, 5) - - - -130117430101712 - -reshape - - - -130115172244864->130117430101712 - - - - - -130115170569184 - -div_scalar - - - -130115172244864->130115170569184 - - - - - -130115170569280 - -add_scalar - - - -130115172244864->130115170569280 - - - - - -130115170569520 - -mul_scalar - - - -130115172244864->130115170569520 - - - - - -130117430101712->130115172244864 - - - - - -130117430101376 - -vector_1 -(3, 4, 5) - - - -130117430101376->130117430101712 - - - - - -130115170569184->130115172244864 - - - - - -130117430113280 - -var_1 -10.0 - - - -130117430113280->130115170569184 - - - - - -130115170569280->130115172244864 - - - - - -130115170569136 - -var_2 --3.0 - - - -130115170569136->130115170569280 - - - - - -130115170569088 - -out -(3, 4, 5) - - - -130115170569664 - -tensor_3 -(3, 4, 5) - - - -130115170569520->130115170569664 - - - - - -130115170569472 - -var_3 --1 - - - -130115170569472->130115170569520 - - - - - -130115170569904 - -exp - - - -130115170569664->130115170569904 - - - - - -130115170570000 - -tensor_4 -(3, 4, 5) - - - -130115170569904->130115170570000 - - - - - -130115170576384 - -add_scalar - - - -130115170570000->130115170576384 - - - - - -130115170576528 - -tensor_5 -(3, 4, 5) - - - -130115170576384->130115170576528 - - - - - -130115170576336 - -var_4 -1 - - - -130115170576336->130115170576384 - - - - - -130115170576768 - -rdiv_scalar - - - -130115170576528->130115170576768 - - - - - -130115170576912 - -tensor_6 -(3, 4, 5) - - - -130115170576768->130115170576912 - - - - - -130115170576720 - -var_5 -1 - - - -130115170576720->130115170576768 - - - - - diff --git a/front/py/examples/3_functional/1_sigmoid.py b/front/py/examples/3_functional/1_sigmoid.py index d46ea1b0..1eace7bf 100644 --- a/front/py/examples/3_functional/1_sigmoid.py +++ b/front/py/examples/3_functional/1_sigmoid.py @@ -12,21 +12,16 @@ ############-------DEEPX-------################ from deepx import Tensor,ones,zeros,arange -from deepx import sigmoid,swish,swiglu +from deepx import sigmoid # 使用相同的初始化方式 -x = arange(0,60,1,name="x").reshape_(3,4,5) +x = arange(3,4,5,name="x") x.div_(10.0) x.sub_(3.0) print("\nDEEPX tensor:") print(x) -out=sigmoid(x,out="out") +out=sigmoid(x) print("\nDEEPX sigmoid result:") print(out) - -import os -script_name = os.path.splitext(os.path.basename( os.path.abspath(__file__)))[0] # 获取不带后缀的脚本名 -str=out.graph.to_dot() -str.render(script_name+".dot", format='svg') \ No newline at end of file diff --git a/front/py/examples/3_functional/1_softmax.py b/front/py/examples/3_functional/1_softmax.py new file mode 100644 index 00000000..f3b78b35 --- /dev/null +++ b/front/py/examples/3_functional/1_softmax.py @@ -0,0 +1,27 @@ +############-------PyTorch-------################ +import torch + +# 使用arange创建连续数据 +x_torch = torch.arange(60, dtype=torch.float32).reshape(3, 4, 5) / 10.0 - 3.0 +print("PyTorch tensor:") +print(x_torch) + +out_torch = torch.softmax(x_torch,-1) +print("\nPyTorch sigmoid result:") +print(out_torch) + +############-------DEEPX-------################ +from deepx import Tensor,ones,zeros,arange +from deepx import softmax + +# 使用相同的初始化方式 +x = arange(3,4,5,name="x") +x.div_(10.0) +x.sub_(3.0) + +print("\nDEEPX tensor:") +x.print() + +out=softmax(x,-1) +print("\nDEEPX sigmoid result:") +out.print() diff --git a/front/py/examples/3_functional/1_swish.dot b/front/py/examples/3_functional/1_swish.dot deleted file mode 100644 index 987ab9e5..00000000 --- a/front/py/examples/3_functional/1_swish.dot +++ /dev/null @@ -1,70 +0,0 @@ -// Computational Graph -digraph { - rankdir=TB - node [shape=record] - 126069248056752 [label="x -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126071540500176 [label=reshape color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126071540499840 [label="vector_1 -(3, 4, 5)" color=darkseagreen fillcolor=honeydew fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246242448 [label=div_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126071540511648 [label="var_1 -10.0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246242544 [label=add_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246242400 [label="var_2 --3.0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246242352 [label=mul_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246242592 [label="var_3 -1.0" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246242688 [label="tensor_2 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246242928 [label="out -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246243168 [label=mul_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246243120 [label="var_4 --1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246243312 [label="tensor_4 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246243552 [label=exp color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246243648 [label="tensor_5 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246243888 [label=add_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246243840 [label="var_5 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246244032 [label="tensor_6 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246244272 [label=rdiv_scalar color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246244224 [label="var_6 -1" color=orange fillcolor=moccasin fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246244416 [label="tensor_7 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126069246244656 [label=mul color=darkslategray fillcolor=lightgray fontname="Courier Bold" labeljust=l shape=box style=filled] - 126069246244752 [label="tensor_8 -(3, 4, 5)" color=skyblue fillcolor=aliceblue fontname="Sans-Serif" labeljust=l shape=box style=filled] - 126071540500176 -> 126069248056752 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242448 -> 126069248056752 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242544 -> 126069248056752 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069248056752 -> 126071540500176 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126071540499840 -> 126071540500176 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069248056752 -> 126069246242448 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126071540511648 -> 126069246242448 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069248056752 -> 126069246242544 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242400 -> 126069246242544 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069248056752 -> 126069246242352 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242592 -> 126069246242352 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242352 -> 126069246242688 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246242688 -> 126069246243168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243120 -> 126069246243168 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243168 -> 126069246243312 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243312 -> 126069246243552 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243552 -> 126069246243648 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243648 -> 126069246243888 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243840 -> 126069246243888 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246243888 -> 126069246244032 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246244224 -> 126069246244272 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246244032 -> 126069246244272 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246244272 -> 126069246244416 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069248056752 -> 126069246244656 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246244416 -> 126069246244656 [arrowsize=0.8 color=gray40 penwidth=1.2] - 126069246244656 -> 126069246244752 [arrowsize=0.8 color=gray40 penwidth=1.2] -} diff --git a/front/py/examples/3_functional/1_swish.dot.svg b/front/py/examples/3_functional/1_swish.dot.svg deleted file mode 100644 index 0bcb715a..00000000 --- a/front/py/examples/3_functional/1_swish.dot.svg +++ /dev/null @@ -1,328 +0,0 @@ - - - - - - -%3 - - - -126069248056752 - -x -(3, 4, 5) - - - -126071540500176 - -reshape - - - -126069248056752->126071540500176 - - - - - -126069246242448 - -div_scalar - - - -126069248056752->126069246242448 - - - - - -126069246242544 - -add_scalar - - - -126069248056752->126069246242544 - - - - - -126069246242352 - -mul_scalar - - - -126069248056752->126069246242352 - - - - - -126069246244656 - -mul - - - -126069248056752->126069246244656 - - - - - -126071540500176->126069248056752 - - - - - -126071540499840 - -vector_1 -(3, 4, 5) - - - -126071540499840->126071540500176 - - - - - -126069246242448->126069248056752 - - - - - -126071540511648 - -var_1 -10.0 - - - -126071540511648->126069246242448 - - - - - -126069246242544->126069248056752 - - - - - -126069246242400 - -var_2 --3.0 - - - -126069246242400->126069246242544 - - - - - -126069246242688 - -tensor_2 -(3, 4, 5) - - - -126069246242352->126069246242688 - - - - - -126069246242592 - -var_3 -1.0 - - - -126069246242592->126069246242352 - - - - - -126069246243168 - -mul_scalar - - - -126069246242688->126069246243168 - - - - - -126069246242928 - -out -(3, 4, 5) - - - -126069246243312 - -tensor_4 -(3, 4, 5) - - - -126069246243168->126069246243312 - - - - - -126069246243120 - -var_4 --1 - - - -126069246243120->126069246243168 - - - - - -126069246243552 - -exp - - - -126069246243312->126069246243552 - - - - - -126069246243648 - -tensor_5 -(3, 4, 5) - - - -126069246243552->126069246243648 - - - - - -126069246243888 - -add_scalar - - - -126069246243648->126069246243888 - - - - - -126069246244032 - -tensor_6 -(3, 4, 5) - - - -126069246243888->126069246244032 - - - - - -126069246243840 - -var_5 -1 - - - -126069246243840->126069246243888 - - - - - -126069246244272 - -rdiv_scalar - - - -126069246244032->126069246244272 - - - - - -126069246244416 - -tensor_7 -(3, 4, 5) - - - -126069246244272->126069246244416 - - - - - -126069246244224 - -var_6 -1 - - - -126069246244224->126069246244272 - - - - - -126069246244416->126069246244656 - - - - - -126069246244752 - -tensor_8 -(3, 4, 5) - - - -126069246244656->126069246244752 - - - - - diff --git a/front/py/examples/3_functional/1_swish.py b/front/py/examples/3_functional/1_swish.py index f894609e..d2ce1082 100644 --- a/front/py/examples/3_functional/1_swish.py +++ b/front/py/examples/3_functional/1_swish.py @@ -15,18 +15,13 @@ from deepx import arange,swish # 使用相同的初始化方式 -x = arange(0,60,1,name="x").reshape_(3,4,5) +x = arange(3,4,5,name="x") x.div_(10.0) x.sub_(3.0) print("\nDEEPX tensor:") print(x) -out=swish(x,out="out") +out=swish(x) print("\nDEEPX swish result:") print(out) - -import os -script_name = os.path.splitext(os.path.basename( os.path.abspath(__file__)))[0] # 获取不带后缀的脚本名 -str=out.graph.to_dot() -str.render(script_name+".dot", format='svg') \ No newline at end of file diff --git a/scheduler/autograd/__init__.py b/scheduler/autograd/__init__.py new file mode 100644 index 00000000..85011f2b --- /dev/null +++ b/scheduler/autograd/__init__.py @@ -0,0 +1,11 @@ + +from .function import Function,Context +__all__ = [ + 'Graph', + 'Node', + 'NodeType', + 'DataNode', + 'OpNode', + 'Function', + 'Context', + ] \ No newline at end of file diff --git a/front/py/deepx/autograd/function.py b/scheduler/autograd/function.py similarity index 100% rename from front/py/deepx/autograd/function.py rename to scheduler/autograd/function.py diff --git a/front/py/deepx/autograd/_controlflownode.py b/scheduler/autograd/graph/_controlflownode.py similarity index 100% rename from front/py/deepx/autograd/_controlflownode.py rename to scheduler/autograd/graph/_controlflownode.py diff --git a/front/py/deepx/autograd/_datanode.py b/scheduler/autograd/graph/_datanode.py similarity index 100% rename from front/py/deepx/autograd/_datanode.py rename to scheduler/autograd/graph/_datanode.py diff --git a/front/py/deepx/autograd/_opnode.py b/scheduler/autograd/graph/_opnode.py similarity index 100% rename from front/py/deepx/autograd/_opnode.py rename to scheduler/autograd/graph/_opnode.py diff --git a/front/py/deepx/autograd/graph.py b/scheduler/autograd/graph/graph.py similarity index 100% rename from front/py/deepx/autograd/graph.py rename to scheduler/autograd/graph/graph.py diff --git a/front/py/deepx/autograd/graph_viz.py b/scheduler/autograd/graph/graph_viz.py similarity index 100% rename from front/py/deepx/autograd/graph_viz.py rename to scheduler/autograd/graph/graph_viz.py diff --git a/front/py/deepx/autograd/node.py b/scheduler/autograd/graph/node.py similarity index 100% rename from front/py/deepx/autograd/node.py rename to scheduler/autograd/graph/node.py diff --git a/front/py/deepx/autograd/nodetype.py b/scheduler/autograd/graph/nodetype.py similarity index 100% rename from front/py/deepx/autograd/nodetype.py rename to scheduler/autograd/graph/nodetype.py