diff --git a/core/MarkovProcesses.jl b/core/MarkovProcesses.jl
index 623d1fd2b91945b1b7e6abe96672574ffea09b68..26aafb06fa0cedf3dbc14fe4c75dbf59ee489f08 100644
--- a/core/MarkovProcesses.jl
+++ b/core/MarkovProcesses.jl
@@ -13,7 +13,8 @@ export Distribution, Product, Uniform, Normal
 # Common types and constructors
 export Observations, AbstractTrajectory, Trajectory, SynchronizedTrajectory
 export Model, ContinuousTimeModel, SynchronizedModel, ParametricModel
-export LHA, StateLHA, Edge
+export VariableModel, ParameterModel, Transition
+export LHA, StateLHA, Edge, Location, VariableAutomaton
 
 # Trajectory related methods
 export +, -, δ, dist_lp
diff --git a/core/common.jl b/core/common.jl
index b3b129f10ac8c42bcbbe14d0e296198be87b2237..86cd7b91da1b6b5f444d73a92aeab24b19ac72bc 100644
--- a/core/common.jl
+++ b/core/common.jl
@@ -5,11 +5,11 @@ import Distributions: Distribution, Univariate, Continuous, UnivariateDistributi
 abstract type Model end 
 abstract type AbstractTrajectory end
 
+const VariableModel = Symbol
+const ParameterModel = Symbol
 const Transition = Union{Symbol,Nothing}
 const Location = Symbol
 const VariableAutomaton = Symbol
-const VariableModel = Symbol
-const ParameterModel = Symbol
 
 mutable struct ContinuousTimeModel <: Model
     name::String
diff --git a/core/model.jl b/core/model.jl
index 88c3e103fe9efa0cb0249d0dc82125c83e4d5d4d..4d3547884863f9c2c38f9a9c0d633930fb1ee52f 100644
--- a/core/model.jl
+++ b/core/model.jl
@@ -341,13 +341,20 @@ end
 Distribute over workers the computation of the mean value 
 of a LHA over `nbr_sim` simulations of the model.
 """
-function distribute_mean_value_lha(sm::SynchronizedModel, sym_var::Union{VariableAutomaton,Vector{VariableAutomaton}}, nbr_sim::Int)
+function distribute_mean_value_lha(sm::SynchronizedModel, sym_var::VariableAutomaton, nbr_sim::Int)
     sum_val = @distributed (+) for i = 1:nbr_sim 
         volatile_simulate(sm)[sym_var] 
     end
     return sum_val / nbr_sim
 end
 
+function distribute_mean_value_lha(sm::SynchronizedModel, sym_var::Vector{VariableAutomaton}, nbr_sim::Int)
+    sum_val = @distributed (+) for i = 1:nbr_sim 
+        volatile_simulate(sm)[sym_var]
+    end
+    return sum_val / nbr_sim
+end
+
 function mean_value_lha(sm::SynchronizedModel, sym_var::VariableAutomaton, nbr_sim::Int)
     sum_val = 0.0 
     for i = 1:nbr_sim 
diff --git a/core/network_model.jl b/core/network_model.jl
index 3d70ac44e11fd3e6014b0da9cb4b8bbdb9d03216..b1938b0ef8876040c25b304385b3cbea21a4564a 100644
--- a/core/network_model.jl
+++ b/core/network_model.jl
@@ -102,7 +102,7 @@ macro network_model(expr_network,expr_name...)
     nbr_rand = rand(1:1000)
     nbr_reactions = length(list_expr_reactions)
     basename_func = "$(replace(model_name, ' '=>'_'))_$(nbr_rand)"
-    expr_model_f! = "function $(basename_func)_f!(xnplus1::Vector{Int}, l_t::Vector{Float64}, l_tr::Vector{Union{Nothing,String}}, xn::Vector{Int}, tn::Float64, p::Vector{Float64})\n\t"
+    expr_model_f! = "function $(basename_func)_f!(xnplus1::Vector{Int}, l_t::Vector{Float64}, l_tr::Vector{Transition}, xn::Vector{Int}, tn::Float64, p::Vector{Float64})\n\t"
     # Computation of nu and propensity functions in f!
     str_l_a = "l_a = ("
     str_test_isabsorbing = "@inbounds("
diff --git a/tests/unit/macro_network_model.jl b/tests/unit/macro_network_model.jl
index 9644a0e70a36702c724990f030bfbfcefde3b33c..4447a0707b9cb50298f7d14d8f53d88df5f69fda 100644
--- a/tests/unit/macro_network_model.jl
+++ b/tests/unit/macro_network_model.jl
@@ -7,6 +7,7 @@ model_SIR = @network_model begin
 end "SIR"
 set_x0!(model_SIR, [95,5,0])
 set_param!(model_SIR, [0.012, 0.05])
+simulate(model_SIR)
 
 model_unnamed_SIR = @network_model begin
     R1: (S+I => 2I, ki*S*I)
@@ -14,6 +15,7 @@ model_unnamed_SIR = @network_model begin
 end
 set_x0!(model_unnamed_SIR, [95,5,0])
 set_param!(model_unnamed_SIR, [0.012, 0.05])
+simulate(model_unnamed_SIR)
 
 model_ER = @network_model begin
     R1: (E+S => ES, k1*E*S)
@@ -22,6 +24,7 @@ model_ER = @network_model begin
 end "ER"
 set_x0!(model_ER, [100,100,0,0])
 set_param!(model_ER, [1.0,1.0,1.0])
+simulate(model_ER)
 
 return true