Skip to content

Commit

Permalink
collection of various fixes and improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
chakravala committed Jan 12, 2021
1 parent 473b348 commit b868731
Show file tree
Hide file tree
Showing 6 changed files with 127 additions and 57 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
julia = "1"
Leibniz = "0.1"
DirectSum = "0.7"
AbstractTensors = "0.6"
AbstractTensors = "0.6.3"
ComputedFieldTypes = "0.1"
Requires = "1"

Expand Down
2 changes: 1 addition & 1 deletion docs/make.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file is part of Grassmann.jl. It is licensed under the GPL license
# This file is part of Grassmann.jl. It is licensed under the AGPL license
# Grassmann Copyright (C) 2019 Michael Reed

using Documenter, AbstractTensors, DirectSum, Leibniz, Grassmann, StaticArrays
Expand Down
18 changes: 16 additions & 2 deletions src/Grassmann.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ export ⊕, ℝ, @V_str, @S_str, @D_str, Manifold, SubManifold, Signature, Diago
export @basis, @basis_str, @dualbasis, @dualbasis_str, @mixedbasis, @mixedbasis_str, Λ
export ℝ0, ℝ1, ℝ2, ℝ3, ℝ4, ℝ5, ℝ6, ℝ7, ℝ8, ℝ9, mdims, tangent

import Base: @pure, print, show, getindex, setindex!, promote_rule, ==, convert, adjoint
import Base: @pure, ==, isapprox
import Base: print, show, getindex, setindex!, promote_rule, convert, adjoint
import DirectSum: V0, , generate, basis, getalgebra, getbasis, dual
import Leibniz: hasinf, hasorigin, dyadmode, value, pre, vsn, metric, mdims
import Leibniz: Bits, bit2int, indexbits, indices, diffvars, diffmask
Expand Down Expand Up @@ -443,8 +444,11 @@ function __init__()
Leibniz.extend_field(Reduce.RExpr)
parsym = (parsym...,Reduce.RExpr)
for T (:RExpr,:Symbol,:Expr)
@eval *(a::Reduce.$T,b::Chain{V,G,Any}) where {V,G} = (a*one(V))*b
@eval *(a::Chain{V,G,Any},b::Reduce.$T) where {V,G} = a*(b*one(V))
generate_inverses(:(Reduce.Algebra),T)
generate_derivation(:(Reduce.Algebra),T,:df,:RExpr)
#generate_algebra(:(Reduce.Algebra),T,:df,:RExpr)
end
end
@require SymPy="24249f21-da20-56a4-8eb1-6a02cf4ae2e6" begin
Expand Down Expand Up @@ -510,9 +514,19 @@ function __init__()
end
end
@require StaticArrays="90137ffa-7385-5640-81b9-e52037218182" begin
StaticArrays.SMatrix(m::Chain{V,1,<:Chain{W,1}} where {V,W}) = hcat(value.(value(m))...)
StaticArrays.SMatrix(m::Chain{V,1,<:Chain{W,1}}) where {V,W} = StaticArrays.SMatrix{mdims(W),mdims(V)}(vcat(value.(value(m))...))
DyadicChain(m::StaticArrays.SMatrix{N,N}) where N = Chain{SubManifold(N),1}(m)
Chain{V,1}(m::StaticArrays.SMatrix{N,N}) where {V,N} = Chain{V,1}(Chain{V,1}.(getindex.(Ref(m),:,StaticArrays.SVector{N}(1:N))))
Chain{V,1,Chain{W,1}}(m::StaticArrays.SMatrix{M,N}) where {V,W,M,N} = Chain{V,1}(Chain{W,1}.(getindex.(Ref(m),:,StaticArrays.SVector{N}(1:N))))
Base.exp(A::Chain{V,1,<:Chain{V,1}}) where V = Chain{V,1}(exp(StaticArrays.SMatrix(A)))
Base.log(A::Chain{V,1,<:Chain{V,1}}) where V = Chain{V,1}(log(StaticArrays.SMatrix(A)))
LinearAlgebra.eigvals(A::Chain{V,1,<:Chain{V,1}}) where V = Chain(Values{mdims(V)}(eigvals(StaticArrays.SMatrix(A))))
LinearAlgebra.eigvecs(A::Chain{V,1,<:Chain{V,1}}) where V = Chain(Chain.(Values{mdims(A)}.(getindex.(Ref(eigvecs(StaticArrays.SMatrix(A))),:,list(1,mdims(A))))))
function LinearAlgebra.eigen(A::Chain{V,1,<:Chain{V,1}}) where V
E,N = eigen(StaticArrays.SMatrix(A)),mdims(V)
e = Chain(Chain.(Values{N}.(getindex.(Ref(E.vectors),:,list(1,N)))))
Proj(e,Chain(Values{N}(E.values)))
end
end
@require GeometryBasics = "5c1252a2-5f33-56bf-86c9-59e7332b4326" begin
GeometryBasics.Point(t::Values) = GeometryBasics.Point(Tuple(t.v))
Expand Down
100 changes: 67 additions & 33 deletions src/algebra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Geometric algebraic product: ω⊖η = (-1)ᵖdet(ω∩η)⊗(Λ(ω⊖η)∪L(ω
@pure *(a::SubManifold{V},b::SubManifold{V}) where V = mul(a,b)
*(a::X,b::Y,c::Z...) where {X<:TensorAlgebra,Y<:TensorAlgebra,Z<:TensorAlgebra} = *(a*b,c...)

function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b),1,true)) where V
@pure function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b),1,true)) where V
ba,bb = bits(a),bits(b)
(diffcheck(V,ba,bb) || iszero(der)) && (return g_zero(V))
A,B,Q,Z = symmetricmask(V,bits(a),bits(b))
Expand All @@ -30,7 +30,7 @@ function mul(a::SubManifold{V},b::SubManifold{V},der=derive_mul(V,bits(a),bits(b
end

function *(a::Simplex{V},b::SubManifold{V}) where V
v = derive_mul(V,bits(basis(a)),bits(b),a.v,true)
v = derive_mul(V,UInt(basis(a)),UInt(b),a.v,true)
bas = mul(basis(a),b,v)
order(a.v)+order(bas)>diffmode(V) ? zero(V) : Simplex{V}(v,bas)
end
Expand Down Expand Up @@ -180,7 +180,7 @@ function contraction(a::X,b::Y) where {X<:TensorTerm{V},Y<:TensorTerm{V}} where
ba,bb = bits(basis(a)),bits(basis(b))
g,C,t,Z = interior(V,ba,bb)
!t && (return g_zero(V))
v = derive_mul(V,ba,bb,value(a),value(b),AbstractTensors.)
v = derive_mul(V,ba,bb,value(a),value(b),AbstractTensors.dot)
if istangent(V) && !iszero(Z)
_,_,Q,_ = symmetricmask(V,bits(basis(a)),bits(basis(b)))
v = !(typeof(v)<:TensorTerm) ? Simplex{V}(v,getbasis(V,Z)) : Simplex{V}(v,getbasis(loworder(V),Z))
Expand All @@ -205,32 +205,42 @@ outer(a::Leibniz.Derivation,b::Chain{V,1}) where V= outer(V(a),b)
outer(a::Chain{W},b::Leibniz.Derivation{T,1}) where {W,T} = outer(a,W(b))
outer(a::Chain{W},b::Chain{V,1}) where {W,V} = Chain{V,1}(a.*value(b))

contraction(a::Proj,b::TensorGraded) = a.v(a.vb)
contraction(a::Proj,b::TensorGraded) = a.v(a.λ*(a.vb))
contraction(a::Dyadic,b::TensorGraded) = a.x(a.yb)
contraction(a::TensorGraded,b::Dyadic) = (ab.x)b.y
contraction(a::TensorGraded,b::Proj) = (ab.v)b.v
contraction(a::TensorGraded,b::Proj) = ((ab.v)*b.λ)b.v
contraction(a::Dyadic,b::Dyadic) = (a.x*(a.yb.x))b.y
contraction(a::Dyadic,b::Proj) = (a.x*(a.yb.v))b.v
contraction(a::Proj,b::Dyadic) = (a.v*(a.vb.x))b.y
contraction(a::Proj,b::Proj) = (a.v*(a.vb.v))b.v
contraction(a::Dyadic,b::Proj) = (a.x*((a.yb.v)*b.λ))b.v
contraction(a::Proj,b::Dyadic) = (a.v*(a.λ*(a.vb.x)))b.y
contraction(a::Proj,b::Proj) = (a.v*((a.λ*b.λ)*(a.vb.v)))b.v
contraction(a::Dyadic{V},b::TensorGraded{V,0}) where V = Dyadic{V}(a.x*b,a.y)
contraction(a::Proj{V},b::TensorGraded{V,0}) where V = valuetype(b)<:Complex ? Proj{V}(a.v*sqrt(b)) : Dyadic{V}(a.v*b,a.v)
contraction(a::Proj{V},b::TensorTerm{V,0}) where V = Proj{V}(a.v,a.λ*value(b))
contraction(a::Proj{V},b::Chain{V,0}) where V = Proj{V}(a.v,a.λ*b[1])
contraction(a::Proj{V,<:Chain{V,1,<:TensorNested}},b::TensorGraded{V,0}) where V = Proj(Chain{V,1}(contraction.(value(a.v),b)))
contraction(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅b)
#contraction(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅b)
contraction(a::Chain{W,1,<:Dyadic{V}},b::Chain{V,1}) where {W,V} = Chain{W,1}(value(a).⋅Ref(b))
contraction(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}},b::Chain{V,1}) where {W,V} = a.v:b
contraction(a::Chain{W,G},b::Chain{V,1,<:Chain}) where {W,G,V} = Chain{V,1}(column(Ref(a).⋅value(b)))
contraction(a::Chain{W,G,<:Chain},b::Chain{V,1,<:Chain}) where {W,G,V} = Chain{V,1}(Ref(a).⋅value(b))
Base.:(:)(a::Chain{V,1,<:Chain},b::Chain{V,1,<:Chain}) where V = sum(value(a).⋅value(b))
Base.:(:)(a::Chain{W,1,<:Dyadic{V}},b::Chain{V,1}) where {W,V} = sum(value(a).⋅Ref(b))
Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(,value(a),Ref(b)))
#Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(⋅,value(a),Ref(b)))

contraction(a::Dyadic{V,<:Chain{V,1,<:Chain},<:Chain{V,1,<:Chain}} where V,b::TensorGraded) = sum(value(a.x).⊗(value(a.y).⋅b))
contraction(a::Dyadic{V,<:Chain{V,1,<:Chain}} where V,b::TensorGraded) = sum(value(a.x).⊗(a.y.⋅b))
contraction(a::Dyadic{V,T,<:Chain{V,1,<:Chain}} where {V,T},b::TensorGraded) = sum(a.x.⊗(value(a.y).⋅b))
contraction(a::Proj{V,<:Chain{W,1,<:Chain} where W} where V,b::TensorGraded) = sum(value(a.v).⊗(value(a.λ).*value(a.v).⋅b))
contraction(a::Proj{V,<:Chain{W,1,<:Chain{V,1}} where W},b::TensorGraded{V,1}) where V = sum(value(a.v).⊗(value(a.λ).*column(value(a.v).⋅b)))

+(a::Proj{V}...) where V = Proj(Chain(a...))
+(a::Proj{V}...) where V = Proj{V}(Chain(Values(eigvec.(a)...)),Chain(Values(eigval.(a)...)))
+(a::Dyadic{V}...) where V = Proj(Chain(a...))
+(a::TensorNested{V}...) where V = Proj(Chain(Dyadic.(a)...))
+(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W,b::TensorNested{V}) where V = +(value(a.v)...,b)
+(a::TensorNested{V},b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(a,value(b.v)...)
+(a::Proj{M,<:Chain{M,1,<:TensorNested{V}}} where M,b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(value(a.v)...,value(b.v)...)
+(a::Proj{M,<:Chain{M,1,<:Chain{V}}} where M,b::Proj{W,<:Chain{W,1,<:Chain{V}}} where W) where V = Chain(Values(value(a.v)...,value(b.v)...))
#+(a::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W,b::TensorNested{V}) where V = +(b,Proj.(value(a.v),value(a.λ))...)
#+(a::TensorNested{V},b::Proj{W,<:Chain{W,1,<:TensorNested{V}}} where W) where V = +(a,value(b.v)...)

-(a::TensorNested) where V = -1a
-(a::TensorNested,b::TensorNested) where V = a+(-b)
Expand All @@ -240,7 +250,13 @@ Base.:(:)(a::Chain{W,1,<:Proj{V}},b::Chain{V,1}) where {W,V} = sum(broadcast(⋅
@inline *(a::TensorNested{V},b::TensorGraded{V,0}) where V = ab
@inline *(a::TensorGraded{V,0},b::Proj{V,<:Chain{V,1,<:TensorNested}}) where V = Proj{V}(a*b.v)
@inline *(a::Proj{V,<:Chain{V,1,<:TensorNested}},b::TensorGraded{V,0}) where V = Proj{V}(a.v*b)
Base.:(a::A,b::B) where {A<:TensorAlgebra,B<:TensorAlgebra} = ab

@inline *(a::DyadicChain,b::DyadicChain) where V = ab
@inline *(a::DyadicChain,b::Chain) where V = ab
@inline *(a::DyadicChain,b::TensorTerm) where V = ab
@inline *(a::TensorGraded,b::DyadicChain) where V = ab
@inline *(a::DyadicChain,b::TensorNested) where V = ab
@inline *(a::TensorNested,b::DyadicChain) where V = ab

# dyadic identity element

Expand Down Expand Up @@ -320,9 +336,9 @@ export ⊘
for X TAG, Y TAG
@eval (x::X,y::Y) where {X<:$X{V},Y<:$Y{V}} where V = diffvars(V)0 ? conj(y)*x*y : y\x*involute(y)
end
for Z TAG
#=for Z ∈ TAG
@eval ⊘(x::Chain{V,G},y::T) where {V,G,T<:$Z} = diffvars(V)≠0 ? conj(y)*x*y : ((~y)*x*involute(y))(Val(G))/abs2(y)
end
end=#


@doc """
Expand Down Expand Up @@ -353,16 +369,28 @@ export ⟂, ∥

function Base.:^(v::T,i::S) where {T<:TensorTerm,S<:Integer}
i == 0 && (return getbasis(Manifold(v),0))
out = basis(v)
for k 1:(i-1)%4
out *= basis(v)
i == 1 && (return v)
j,bas = (i-1)%4,basis(v)
out = if j == 0
bas
elseif j == 1
bas*bas
elseif j == 2
bas*bas*bas
elseif j == 3
bas*bas*bas*bas
end
return typeof(v)<:SubManifold ? out : out*AbstractTensors.:^(value(v),i)
end

function Base.:^(v::T,i::S) where {T<:TensorAlgebra,S<:Integer}
V = Manifold(v)
isone(i) && (return v)
if T<:Chain && diffvars(v)==0
sq,d = contraction2(~v,v),i÷2
val = isone(d) ? sq : sq^d
return iszero(i%2) ? val : val*v
end
out = one(V)
if i < 8 # optimal choice ?
for k 1:i
Expand Down Expand Up @@ -460,8 +488,9 @@ subvec(a,b,s) = isfixed(a,b) ? (s ? (:($Sym.:-),:($Sym.:∑),:svec) : (:($Sym.:
subvec(b) = isfixed(valuetype(b)) ? (:($Sym.:-),:svec,:($Sym.:∏)) : (:-,:mvec,:*)
conjvec(b) = isfixed(valuetype(b)) ? (:($Sym.conj),:svec) : (:conj,:mvec)

mulvec(a,b,c) = c:contraction ? mulvec(a,b) : isfixed(a,b) ? (:($Sym.dot),:svec) : (:dot,:mvec)
mulvec(a,b) = isfixed(a,b) ? (:($Sym.:∏),:svec) : (:*,:mvec)
isfixed(a,b) = isfixed(valuetype(a))&&isfixed(valuetype(b))
isfixed(a,b) = isfixed(valuetype(a))||isfixed(valuetype(b))
isfixed(::Type{Rational{BigInt}}) = true
isfixed(::Type{BigFloat}) = true
isfixed(::Type{BigInt}) = true
Expand Down Expand Up @@ -514,7 +543,8 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
@noinline function adder(a::Type{<:TensorTerm{V,G}},b::Type{<:Chain{V,G,T}},op,swap=false) where {V,G,T}
left,right,VEC = addvec(a,b,swap,op)
if binomial(mdims(V),G)<(1<<cache_limit)
$(insert_expr((:N,:ib,:t),:svec)...)
$(insert_expr((:N,:ib),:svec)...)
t = promote_type(valuetype(a),valuetype(b))
out = zeros(svec(N,G,Any))
X = UInt(basis(a))
for k 1:binomial(N,G)
Expand All @@ -523,7 +553,7 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
val = B==X ? Expr(:call,left,val,:(value(a,$t))) : val
@inbounds setblade!_pre(out,val,ib[k],Val{N}())
end
return :(Chain{V,G}($(Expr(:call,tvec(N,G,:T),out...))))
return :(Chain{V,G}($(Expr(:call,tvec(N,G,t),out...))))
else return if !swap; quote
$(insert_expr((:N,:t),VEC)...)
out = convert($VEC(N,G,t),$(bcast(right,:(value(b,$VEC(N,G,t)),))))
Expand All @@ -539,7 +569,8 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
@noinline function adder(a::Type{<:TensorTerm{V,L}},b::Type{<:Chain{V,G,T}},op,swap=false) where {V,G,T,L}
left,right,VEC = addvec(a,b,swap,op)
if mdims(V)<cache_limit
$(insert_expr((:N,:ib,:bn,:t),:svec)...)
$(insert_expr((:N,:ib,:bn),:svec)...)
t = promote_type(valuetype(a),valuetype(b))
out = zeros(svec(N,Any))
X = UInt(basis(a))
for k 1:binomial(N,G)
Expand All @@ -559,7 +590,7 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
end
end
end
return :(MultiVector{V}($(Expr(:call,tvec(N,:T),out...))))
return :(MultiVector{V}($(Expr(:call,tvec(N,t),out...))))
else return if !swap; quote
$(insert_expr((:N,:t,:out,:r,:bng),VEC)...)
@inbounds out[r+1:r+bng] = $(bcast(right,:(value(b,$VEC(N,G,t)),)))
Expand All @@ -575,7 +606,8 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
@noinline function adder(a::Type{<:TensorTerm{V,G}},b::Type{<:MultiVector{V,T}},op,swap=false) where {V,G,T}
left,right,VEC = addvec(a,b,swap,op)
if mdims(V)<cache_limit
$(insert_expr((:N,:bs,:bn,:t),:svec)...)
$(insert_expr((:N,:bs,:bn),:svec)...)
t = promote_type(valuetype(a),valuetype(b))
out = zeros(svec(N,Any))
X = UInt(basis(a))
for g 1:N+1
Expand All @@ -587,7 +619,7 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
@inbounds setmulti!_pre(out,val,B,Val(N))
end
end
return :(MultiVector{V}($(Expr(:call,tvec(N,:T),out...))))
return :(MultiVector{V}($(Expr(:call,tvec(N,t),out...))))
else return if !swap; quote
$(insert_expr((:N,:t),VEC)...)
out = convert($VEC(N,t),$(bcast(right,:(value(b,$VEC(N,t)),))))
Expand Down Expand Up @@ -668,8 +700,8 @@ adder(a,b,op=:+) = adder(typeof(a),typeof(b),op)
return MultiVector{V}(out)
end end
end
@noinline function product_contraction(a::Type{S},b::Type{<:Chain{V,G,T}},swap=false) where S<:TensorGraded{V,L} where {V,G,T,L}
MUL,VEC = mulvec(a,b)
@noinline function product_contraction(a::Type{S},b::Type{<:Chain{V,G,T}},swap=false,contr=:contraction) where S<:TensorGraded{V,L} where {V,G,T,L}
MUL,VEC = mulvec(a,b,contr)
(swap ? G<L : L<G) && (!istangent(V)) && (return g_zero(V))
GL = swap ? G-L : L-G
if binomial(mdims(V),G)*(S<:Chain ? binomial(mdims(V),L) : 1)<(1<<cache_limit)
Expand Down Expand Up @@ -827,12 +859,12 @@ for (op,po,GL,grass) ∈ ((:∧,:>,:(G+L),:exter),(:∨,:<,:(G+L-mdims(V)),:meet
$(insert_expr((:N,:t,),VEC)...)
ia = indexbasis(mdims(w),G)
ib = indexbasis(mdims(W),L)
out = zeros$VEC(N,t) : $VEC(N,$$GL,t))
CA,CB = isdual(L),isdual(R)
for i 1:binomial(mdims(w),L)
out = zeros? $VEC(N,t) : $VEC(N,$$GL,t))
CA,CB = isdual(w),isdual(W)
for i 1:binomial(mdims(w),G)
@inbounds v,iai = a[i],ia[i]
x = CA ? dual(V,iai) : iai
v0 && for j 1:binomial(mdims(W),G)
v0 && for j 1:binomial(mdims(W),L)
X = @inbounds CB ? dual(V,ib[j]) : ib[j]
if μ
if @inbounds $$grassaddmulti!(V,out,x,X,derive_mul(V,x,X,v,b[j],$MUL))
Expand Down Expand Up @@ -885,9 +917,11 @@ for (op,product!) ∈ ((:∧,:exteraddmulti!),(:*,:geomaddmulti!),
prop = op:* ? Symbol(:product_,op) : :product
@eval $prop(a,b,swap=false) = $prop(typeof(a),typeof(b),swap)
@eval @noinline function $prop(a::Type{S},b::Type{<:MultiVector{V,T}},swap=false) where S<:TensorGraded{V,G} where {V,G,T}
MUL,VEC = mulvec(a,b)
MUL,VEC = mulvec(a,b,$(QuoteNode(op)))
if mdims(V)<cache_limit
$(insert_expr((:N,:t,:out,:ib,:bs,:bn,),:svec)...)
$(insert_expr((:N,:t,:ib,:bs,:bn,),:svec)...)
out = zeros(svec(N,Any))
t = promote_type(valuetype(a),valuetype(b))
for g 1:N+1
ia = indexbasis(N,g-1)
@inbounds for i 1:bn[g]
Expand Down
Loading

2 comments on commit b868731

@chakravala
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register()

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/27847

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.7.2 -m "<description of version>" b868731ad8974a0fb29975f4162335bb34f8c1a3
git push origin v0.7.2

Please sign in to comment.