From b868731ad8974a0fb29975f4162335bb34f8c1a3 Mon Sep 17 00:00:00 2001 From: Michael Reed <18372368+chakravala@users.noreply.github.com> Date: Tue, 12 Jan 2021 11:25:51 -0500 Subject: [PATCH] collection of various fixes and improvements --- Project.toml | 2 +- docs/make.jl | 2 +- src/Grassmann.jl | 18 +++++++- src/algebra.jl | 100 +++++++++++++++++++++++++++++--------------- src/multivectors.jl | 36 ++++++++++++---- src/products.jl | 26 +++++++----- 6 files changed, 127 insertions(+), 57 deletions(-) diff --git a/Project.toml b/Project.toml index d4f12e7..2421e23 100644 --- a/Project.toml +++ b/Project.toml @@ -17,7 +17,7 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" julia = "1" Leibniz = "0.1" DirectSum = "0.7" -AbstractTensors = "0.6" +AbstractTensors = "0.6.3" ComputedFieldTypes = "0.1" Requires = "1" diff --git a/docs/make.jl b/docs/make.jl index edf2195..c49428c 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,4 +1,4 @@ -# This file is part of Grassmann.jl. It is licensed under the GPL license +# This file is part of Grassmann.jl. It is licensed under the AGPL license # Grassmann Copyright (C) 2019 Michael Reed using Documenter, AbstractTensors, DirectSum, Leibniz, Grassmann, StaticArrays diff --git a/src/Grassmann.jl b/src/Grassmann.jl index 957f3a5..50d2441 100644 --- a/src/Grassmann.jl +++ b/src/Grassmann.jl @@ -11,7 +11,8 @@ export ⊕, ℝ, @V_str, @S_str, @D_str, Manifold, SubManifold, Signature, Diago export @basis, @basis_str, @dualbasis, @dualbasis_str, @mixedbasis, @mixedbasis_str, Λ export ℝ0, ℝ1, ℝ2, ℝ3, ℝ4, ℝ5, ℝ6, ℝ7, ℝ8, ℝ9, mdims, tangent -import Base: @pure, print, show, getindex, setindex!, promote_rule, ==, convert, adjoint +import Base: @pure, ==, isapprox +import Base: print, show, getindex, setindex!, promote_rule, convert, adjoint import DirectSum: V0, ⊕, generate, basis, getalgebra, getbasis, dual import Leibniz: hasinf, hasorigin, dyadmode, value, pre, vsn, metric, mdims import Leibniz: Bits, bit2int, indexbits, indices, diffvars, diffmask @@ -443,8 +444,11 @@ function __init__() Leibniz.extend_field(Reduce.RExpr) parsym = (parsym...,Reduce.RExpr) for T ∈ (:RExpr,:Symbol,:Expr) + @eval *(a::Reduce.$T,b::Chain{V,G,Any}) where {V,G} = (a*one(V))*b + @eval *(a::Chain{V,G,Any},b::Reduce.$T) where {V,G} = a*(b*one(V)) generate_inverses(:(Reduce.Algebra),T) generate_derivation(:(Reduce.Algebra),T,:df,:RExpr) + #generate_algebra(:(Reduce.Algebra),T,:df,:RExpr) end end @require SymPy="24249f21-da20-56a4-8eb1-6a02cf4ae2e6" begin @@ -510,9 +514,19 @@ function __init__() end end @require StaticArrays="90137ffa-7385-5640-81b9-e52037218182" begin - StaticArrays.SMatrix(m::Chain{V,1,<:Chain{W,1}} where {V,W}) = hcat(value.(value(m))...) + StaticArrays.SMatrix(m::Chain{V,1,<:Chain{W,1}}) where {V,W} = StaticArrays.SMatrix{mdims(W),mdims(V)}(vcat(value.(value(m))...)) + DyadicChain(m::StaticArrays.SMatrix{N,N}) where N = Chain{SubManifold(N),1}(m) Chain{V,1}(m::StaticArrays.SMatrix{N,N}) where {V,N} = Chain{V,1}(Chain{V,1}.(getindex.(Ref(m),:,StaticArrays.SVector{N}(1:N)))) Chain{V,1,Chain{W,1}}(m::StaticArrays.SMatrix{M,N}) where {V,W,M,N} = Chain{V,1}(Chain{W,1}.(getindex.(Ref(m),:,StaticArrays.SVector{N}(1:N)))) + Base.exp(A::Chain{V,1,<:Chain{V,1}}) where V = Chain{V,1}(exp(StaticArrays.SMatrix(A))) + Base.log(A::Chain{V,1,<:Chain{V,1}}) where V = Chain{V,1}(log(StaticArrays.SMatrix(A))) + LinearAlgebra.eigvals(A::Chain{V,1,<:Chain{V,1}}) where V = Chain(Values{mdims(V)}(eigvals(StaticArrays.SMatrix(A)))) + LinearAlgebra.eigvecs(A::Chain{V,1,<:Chain{V,1}}) where V = Chain(Chain.(Values{mdims(A)}.(getindex.(Ref(eigvecs(StaticArrays.SMatrix(A))),:,list(1,mdims(A)))))) + function LinearAlgebra.eigen(A::Chain{V,1,<:Chain{V,1}}) where V + E,N = eigen(StaticArrays.SMatrix(A)),mdims(V) + e = Chain(Chain.(Values{N}.(getindex.(Ref(E.vectors),:,list(1,N))))) + Proj(e,Chain(Values{N}(E.values))) + end end @require GeometryBasics = "5c1252a2-5f33-56bf-86c9-59e7332b4326" begin GeometryBasics.Point(t::Values) = GeometryBasics.Point(Tuple(t.v)) diff --git a/src/algebra.jl b/src/algebra.jl index 792bcbf..43329c4 100644 --- a/src/algebra.jl +++ b/src/algebra.jl @@ -18,7 +18,7 @@ Geometric algebraic product: ω⊖η = (-1)ᵖdet(ω∩η)⊗(Λ(ω⊖η)∪L(ω @pure *(a::SubManifold{V},b::SubManifold{V}) where V = mul(a,b) *(a::X,b::Y,c::Z...) where {X<:TensorAlgebra,Y<:TensorAlgebra,Z<:TensorAlgebra} = *(a*b,c...) -function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b),1,true)) where V +@pure function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b),1,true)) where V ba,bb = bits(a),bits(b) (diffcheck(V,ba,bb) || iszero(der)) && (return g_zero(V)) A,B,Q,Z = symmetricmask(V,bits(a),bits(b)) @@ -30,7 +30,7 @@ function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b end function *(a::Simplex{V},b::SubManifold{V}) where V - v = derive_mul(V,bits(basis(a)),bits(b),a.v,true) + v = derive_mul(V,UInt(basis(a)),UInt(b),a.v,true) bas = mul(basis(a),b,v) order(a.v)+order(bas)>diffmode(V) ? zero(V) : Simplex{V}(v,bas) end @@ -180,7 +180,7 @@ function contraction(a::X,b::Y) where {X<:TensorTerm{V},Y<:TensorTerm{V}} where ba,bb = bits(basis(a)),bits(basis(b)) g,C,t,Z = interior(V,ba,bb) !t && (return g_zero(V)) - v = derive_mul(V,ba,bb,value(a),value(b),AbstractTensors.∏) + v = derive_mul(V,ba,bb,value(a),value(b),AbstractTensors.dot) if istangent(V) && !iszero(Z) _,_,Q,_ = symmetricmask(V,bits(basis(a)),bits(basis(b))) v = !(typeof(v)<:TensorTerm) ? Simplex{V}(v,getbasis(V,Z)) : Simplex{V}(v,getbasis(loworder(V),Z)) @@ -205,32 +205,42 @@ outer(a::Leibniz.Derivation,b::Chain{V,1}) where V= outer(V(a),b) outer(a::Chain{W},b::Leibniz.Derivation{T,1}) where {W,T} = outer(a,W(b)) outer(a::Chain{W},b::Chain{V,1}) where {W,V} = Chain{V,1}(a.*value(b)) -contraction(a::Proj,b::TensorGraded) = a.v⊗(a.v⋅b) +contraction(a::Proj,b::TensorGraded) = a.v⊗(a.λ*(a.v⋅b)) contraction(a::Dyadic,b::TensorGraded) = a.x⊗(a.y⋅b) contraction(a::TensorGraded,b::Dyadic) = (a⋅b.x)⊗b.y -contraction(a::TensorGraded,b::Proj) = (a⋅b.v)⊗b.v +contraction(a::TensorGraded,b::Proj) = ((a⋅b.v)*b.λ)⊗b.v contraction(a::Dyadic,b::Dyadic) = (a.x*(a.y⋅b.x))⊗b.y -contraction(a::Dyadic,b::Proj) = (a.x*(a.y⋅b.v))⊗b.v -contraction(a::Proj,b::Dyadic) = (a.v*(a.v⋅b.x))⊗b.y -contraction(a::Proj,b::Proj) = (a.v*(a.v⋅b.v))⊗b.v +contraction(a::Dyadic,b::Proj) = (a.x*((a.y⋅b.v)*b.λ))⊗b.v +contraction(a::Proj,b::Dyadic) = (a.v*(a.λ*(a.v⋅b.x)))⊗b.y +contraction(a::Proj,b::Proj) = (a.v*((a.λ*b.λ)*(a.v⋅b.v)))⊗b.v contraction(a::Dyadic{V},b::TensorGraded{V,0}) where V = Dyadic{V}(a.x*b,a.y) -contraction(a::Proj{V},b::TensorGraded{V,0}) where V = valuetype(b)<:Complex ? Proj{V}(a.v*sqrt(b)) : Dyadic{V}(a.v*b,a.v) +contraction(a::Proj{V},b::TensorTerm{V,0}) where V = Proj{V}(a.v,a.λ*value(b)) +contraction(a::Proj{V},b::Chain{V,0}) where V = Proj{V}(a.v,a.λ*b[1]) contraction(a::Proj{V,<:Chain{V,1,<:TensorNested}},b::TensorGraded{V,0}) where V = Proj(Chain{V,1}(contraction.(value(a.v),b))) -contraction(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅b) +#contraction(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅b) contraction(a::Chain{W,1,<:Dyadic{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅Ref(b)) contraction(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}},b::Chain{V,1}) where {W,V} = a.v:b contraction(a::Chain{W,G},b::Chain{V,1,<:Chain}) where {W,G,V} = Chain{V,1}(column(Ref(a).⋅value(b))) contraction(a::Chain{W,G,<:Chain},b::Chain{V,1,<:Chain}) where {W,G,V} = Chain{V,1}(Ref(a).⋅value(b)) Base.:(:)(a::Chain{V,1,<:Chain},b::Chain{V,1,<:Chain}) where V = sum(value(a).⋅value(b)) Base.:(:)(a::Chain{W,1,<:Dyadic{V}},b::Chain{V,1}) where {W,V} = sum(value(a).⋅Ref(b)) -Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(⋅,value(a),Ref(b))) +#Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(⋅,value(a),Ref(b))) + +contraction(a::Dyadic{V,<:Chain{V,1,<:Chain},<:Chain{V,1,<:Chain}} where V,b::TensorGraded) = sum(value(a.x).⊗(value(a.y).⋅b)) +contraction(a::Dyadic{V,<:Chain{V,1,<:Chain}} where V,b::TensorGraded) = sum(value(a.x).⊗(a.y.⋅b)) +contraction(a::Dyadic{V,T,<:Chain{V,1,<:Chain}} where {V,T},b::TensorGraded) = sum(a.x.⊗(value(a.y).⋅b)) +contraction(a::Proj{V,<:Chain{W,1,<:Chain} where W} where V,b::TensorGraded) = sum(value(a.v).⊗(value(a.λ).*value(a.v).⋅b)) +contraction(a::Proj{V,<:Chain{W,1,<:Chain{V,1}} where W},b::TensorGraded{V,1}) where V = sum(value(a.v).⊗(value(a.λ).*column(value(a.v).⋅b))) -+(a::Proj{V}...) where V = Proj(Chain(a...)) ++(a::Proj{V}...) where V = Proj{V}(Chain(Values(eigvec.(a)...)),Chain(Values(eigval.(a)...))) +(a::Dyadic{V}...) where V = Proj(Chain(a...)) +(a::TensorNested{V}...) where V = Proj(Chain(Dyadic.(a)...)) +(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W,b::TensorNested{V}) where V = +(value(a.v)...,b) +(a::TensorNested{V},b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(a,value(b.v)...) +(a::Proj{M,<:Chain{M,1,<:TensorNested{V}}} where M,b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(value(a.v)...,value(b.v)...) ++(a::Proj{M,<:Chain{M,1,<:Chain{V}}} where M,b::Proj{W,<:Chain{W,1,<:Chain{V}}} where W) where V = Chain(Values(value(a.v)...,value(b.v)...)) +#+(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W,b::TensorNested{V}) where V = +(b,Proj.(value(a.v),value(a.λ))...) +#+(a::TensorNested{V},b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(a,value(b.v)...) -(a::TensorNested) where V = -1a -(a::TensorNested,b::TensorNested) where V = a+(-b) @@ -240,7 +250,13 @@ Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(⋅ @inline *(a::TensorNested{V},b::TensorGraded{V,0}) where V = a⋅b @inline *(a::TensorGraded{V,0},b::Proj{V,<:Chain{V,1,<:TensorNested}}) where V = Proj{V}(a*b.v) @inline *(a::Proj{V,<:Chain{V,1,<:TensorNested}},b::TensorGraded{V,0}) where V = Proj{V}(a.v*b) -Base.:∘(a::A,b::B) where {A<:TensorAlgebra,B<:TensorAlgebra} = a⋅b + +@inline *(a::DyadicChain,b::DyadicChain) where V = a⋅b +@inline *(a::DyadicChain,b::Chain) where V = a⋅b +@inline *(a::DyadicChain,b::TensorTerm) where V = a⋅b +@inline *(a::TensorGraded,b::DyadicChain) where V = a⋅b +@inline *(a::DyadicChain,b::TensorNested) where V = a⋅b +@inline *(a::TensorNested,b::DyadicChain) where V = a⋅b # dyadic identity element @@ -320,9 +336,9 @@ export ⊘ for X ∈ TAG, Y ∈ TAG @eval ⊘(x::X,y::Y) where {X<:$X{V},Y<:$Y{V}} where V = diffvars(V)≠0 ? conj(y)*x*y : y\x*involute(y) end -for Z ∈ TAG +#=for Z ∈ TAG @eval ⊘(x::Chain{V,G},y::T) where {V,G,T<:$Z} = diffvars(V)≠0 ? conj(y)*x*y : ((~y)*x*involute(y))(Val(G))/abs2(y) -end +end=# @doc """ @@ -353,9 +369,16 @@ export ⟂, ∥ function Base.:^(v::T,i::S) where {T<:TensorTerm,S<:Integer} i == 0 && (return getbasis(Manifold(v),0)) - out = basis(v) - for k ∈ 1:(i-1)%4 - out *= basis(v) + i == 1 && (return v) + j,bas = (i-1)%4,basis(v) + out = if j == 0 + bas + elseif j == 1 + bas*bas + elseif j == 2 + bas*bas*bas + elseif j == 3 + bas*bas*bas*bas end return typeof(v)<:SubManifold ? out : out*AbstractTensors.:^(value(v),i) end @@ -363,6 +386,11 @@ end function Base.:^(v::T,i::S) where {T<:TensorAlgebra,S<:Integer} V = Manifold(v) isone(i) && (return v) + if T<:Chain && diffvars(v)==0 + sq,d = contraction2(~v,v),i÷2 + val = isone(d) ? sq : sq^d + return iszero(i%2) ? val : val*v + end out = one(V) if i < 8 # optimal choice ? for k ∈ 1:i @@ -460,8 +488,9 @@ subvec(a,b,s) = isfixed(a,b) ? (s ? (:($Sym.:-),:($Sym.:∑),:svec) : (:($Sym.: subvec(b) = isfixed(valuetype(b)) ? (:($Sym.:-),:svec,:($Sym.:∏)) : (:-,:mvec,:*) conjvec(b) = isfixed(valuetype(b)) ? (:($Sym.conj),:svec) : (:conj,:mvec) +mulvec(a,b,c) = c≠:contraction ? mulvec(a,b) : isfixed(a,b) ? (:($Sym.dot),:svec) : (:dot,:mvec) mulvec(a,b) = isfixed(a,b) ? (:($Sym.:∏),:svec) : (:*,:mvec) -isfixed(a,b) = isfixed(valuetype(a))&&isfixed(valuetype(b)) +isfixed(a,b) = isfixed(valuetype(a))||isfixed(valuetype(b)) isfixed(::Type{Rational{BigInt}}) = true isfixed(::Type{BigFloat}) = true isfixed(::Type{BigInt}) = true @@ -514,7 +543,8 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op) @noinline function adder(a::Type{<:TensorTerm{V,G}},b::Type{<:Chain{V,G,T}},op,swap=false) where {V,G,T} left,right,VEC = addvec(a,b,swap,op) if binomial(mdims(V),G)<(1<,:(G+L),:exter),(:∨,:<,:(G+L-mdims(V)),:meet $(insert_expr((:N,:t,:μ),VEC)...) ia = indexbasis(mdims(w),G) ib = indexbasis(mdims(W),L) - out = zeros(μ $VEC(N,t) : $VEC(N,$$GL,t)) - CA,CB = isdual(L),isdual(R) - for i ∈ 1:binomial(mdims(w),L) + out = zeros(μ ? $VEC(N,t) : $VEC(N,$$GL,t)) + CA,CB = isdual(w),isdual(W) + for i ∈ 1:binomial(mdims(w),G) @inbounds v,iai = a[i],ia[i] x = CA ? dual(V,iai) : iai - v≠0 && for j ∈ 1:binomial(mdims(W),G) + v≠0 && for j ∈ 1:binomial(mdims(W),L) X = @inbounds CB ? dual(V,ib[j]) : ib[j] if μ if @inbounds $$grassaddmulti!(V,out,x,X,derive_mul(V,x,X,v,b[j],$MUL)) @@ -885,9 +917,11 @@ for (op,product!) ∈ ((:∧,:exteraddmulti!),(:*,:geomaddmulti!), prop = op≠:* ? Symbol(:product_,op) : :product @eval $prop(a,b,swap=false) = $prop(typeof(a),typeof(b),swap) @eval @noinline function $prop(a::Type{S},b::Type{<:MultiVector{V,T}},swap=false) where S<:TensorGraded{V,G} where {V,G,T} - MUL,VEC = mulvec(a,b) + MUL,VEC = mulvec(a,b,$(QuoteNode(op))) if mdims(V)T,mdims(V)))) ⊗(a::Type{<:Chain{V}},b::Type{<:Chain{W}}) where {V,W} = Chain{V,1,Chain{W,1,Float64,mdims(W)},mdims(V)} @@ -336,26 +348,32 @@ getindex(m::MultiVector{V},i::SubManifold{V}) where V = m[basisindex(mdims(V),UI export Projector, Dyadic, Proj -struct Projector{V,T} <: TensorNested{V} +struct Projector{V,T,Λ} <: TensorNested{V} v::T - Projector{V,T}(v::T) where {T<:Manifold{V}} where V = new{V,T}(v) - Projector{V}(v::T) where {T<:Manifold{V}} where V = new{V,T}(v) + λ::Λ + Projector{V,T,Λ}(v::T,λ::Λ=1) where {T<:Manifold{V},Λ} where V = new{V,T,Λ}(v,λ) + Projector{V,T}(v::T,λ::Λ=1) where {T<:Manifold{V},Λ} where V = new{V,T,Λ}(v,λ) + Projector{V}(v::T,λ::Λ=1) where {T<:Manifold{V},Λ} where V = new{V,T,Λ}(v,λ) end const Proj = Projector -Proj(v::T) where T<:TensorGraded{V} where V = Proj{V}(v/abs(v)) -Proj(v::Chain{V,1,<:TensorNested}) where V = Proj{V}(v) +Proj(v::T,λ=1) where T<:TensorGraded{V} where V = Proj{V}(v/abs(v),λ) +Proj(v::Chain{W,1,<:Chain{V}},λ=1) where {V,W} = Proj{V}(Chain(value(v)./abs.(value(v))),λ) +#Proj(v::Chain{V,1,<:TensorNested},λ=1) where V = Proj{V}(v,λ) (P::Projector)(x) = contraction(P,x) getindex(P::Proj,i::Int,j::Int) = P.v[i]*P.v[j] -getindex(P::Proj{V,<:Chain{V,1,<:TensorNested}} where V,i::Int,j::Int) = sum(getindex.(value(P.v),i,j)) +getindex(P::Proj{V,<:Chain{W,1,<:Chain}} where {V,W},i::Int,j::Int) = sum(column(P.v,i).*column(P.v,j)) +#getindex(P::Proj{V,<:Chain{V,1,<:TensorNested}} where V,i::Int,j::Int) = sum(getindex.(value(P.v),i,j)) -show(io::IO,P::Projector) = print(io,"Proj(",P.v,")") +show(io::IO,P::Proj{V,T,Λ}) where {V,T,Λ<:Real} = print(io,isone(P.λ) ? "" : P.λ,"Proj(",P.v,")") +show(io::IO,P::Proj{V,T,Λ}) where {V,T,Λ} = print(io,"(",P.λ,")Proj(",P.v,")") -DyadicChain{V,T}(P::Proj{V,T}) where {V,T} = outer(P.v,P.v) -DyadicChain{V,T}(P::Proj{V,T}) where {V,T<:Chain{V,1,<:TensorNested}} = sum(DyadicChain.(value(P.v))) +DyadicChain{V,T}(P::Proj{V,T}) where {V,T} = outer(P.v*P.λ,P.v) +DyadicChain{V,T}(P::Proj{V,T}) where {V,T<:Chain{V,1,<:Chain}} = sum(outer.(value(P.v).*value(P.λ),P.v)) +#DyadicChain{V,T}(P::Proj{V,T}) where {V,T<:Chain{V,1,<:TensorNested}} = sum(DyadicChain.(value(P.v))) DyadicChain{V}(P::Proj{V,T}) where {V,T} = DyadicChain{V,T}(P) DyadicChain(P::Proj{V,T}) where {V,T} = DyadicChain{V,T}(P) diff --git a/src/products.jl b/src/products.jl index bdae4a9..f259462 100644 --- a/src/products.jl +++ b/src/products.jl @@ -2,8 +2,9 @@ # This file is part of Grassmann.jl. It is licensed under the AGPL license # Grassmann Copyright (C) 2019 Michael Reed -@pure tvec(N,G,t::Symbol=:Any) = :(Values{$(binomial(N,G)),$t}) -@pure tvec(N,t::Symbol=:Any) = :(Values{$(1<