From 1cc3ea105b46bdc50c2b80863bb6277d556f5f93 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Thu, 3 Mar 2022 21:24:56 -0500 Subject: [PATCH 01/10] Initial transform data structure changes --- src/InfiniteOpt.jl | 37 +- src/TranscriptionOpt/derivatives.jl | 206 ++++++ src/TranscriptionOpt/measures.jl | 191 ++++++ src/TranscriptionOpt/supports.jl | 954 ++++++++++++++++++++++++++++ src/datatypes.jl | 871 ++++++++++--------------- src/general_variables.jl | 8 +- src/infinite_domains.jl | 342 ---------- src/objective.jl | 19 +- src/optimize.jl | 14 +- src/scalar_parameters.jl | 924 +++------------------------ src/show.jl | 121 ++-- 11 files changed, 1840 insertions(+), 1847 deletions(-) create mode 100644 src/TranscriptionOpt/derivatives.jl create mode 100644 src/TranscriptionOpt/supports.jl diff --git a/src/InfiniteOpt.jl b/src/InfiniteOpt.jl index ec7678559..18018b3be 100644 --- a/src/InfiniteOpt.jl +++ b/src/InfiniteOpt.jl @@ -7,7 +7,6 @@ Reexport.@reexport using JuMP # Import the necessary packages. import Distributions import DataStructures -import FastGaussQuadrature import AbstractTrees import LeftChildRightSiblingTrees import LinearAlgebra @@ -31,41 +30,41 @@ include("Collections/Collections.jl") include("datatypes.jl") include("infinite_domains.jl") include("scalar_parameters.jl") -include("array_parameters.jl") -include("variable_basics.jl") -include("infinite_variables.jl") -include("semi_infinite_variables.jl") -include("point_variables.jl") -include("finite_variables.jl") +# include("array_parameters.jl") +# include("variable_basics.jl") +# include("infinite_variables.jl") +# include("semi_infinite_variables.jl") +# include("point_variables.jl") +# include("finite_variables.jl") include("nlp.jl") include("expressions.jl") -include("measures.jl") +# include("measures.jl") # Import and export MeasureToolbox -include("MeasureToolbox/MeasureToolbox.jl") -Reexport.@reexport using .MeasureToolbox +# include("MeasureToolbox/MeasureToolbox.jl") +# Reexport.@reexport using .MeasureToolbox # import more core methods -include("derivatives.jl") -include("constraints.jl") +# include("derivatives.jl") +# include("constraints.jl") include("macros.jl") include("objective.jl") -include("measure_expansions.jl") -include("derivative_evaluations.jl") +# include("measure_expansions.jl") +# include("derivative_evaluations.jl") include("optimize.jl") -include("results.jl") +# include("results.jl") include("show.jl") -include("utilities.jl") +# include("utilities.jl") include("general_variables.jl") # Import and export TranscriptionOpt -include("TranscriptionOpt/TranscriptionOpt.jl") -Reexport.@reexport using .TranscriptionOpt +# include("TranscriptionOpt/TranscriptionOpt.jl") +# Reexport.@reexport using .TranscriptionOpt # Define additional stuff that should not be exported const _EXCLUDE_SYMBOLS = [Symbol(@__MODULE__), :eval, :include] -# Following JuMP, export everything that doesn't start with a _ +# Following JuMP, export everything that doesn't start with a `_` for sym in names(@__MODULE__, all = true) sym_string = string(sym) if sym in _EXCLUDE_SYMBOLS || startswith(sym_string, "_") || startswith(sym_string, "@_") diff --git a/src/TranscriptionOpt/derivatives.jl b/src/TranscriptionOpt/derivatives.jl new file mode 100644 index 000000000..ca1de570d --- /dev/null +++ b/src/TranscriptionOpt/derivatives.jl @@ -0,0 +1,206 @@ +################################################################################ +# BASIC DERIVATIVE EVALUATION TYPES +################################################################################ +""" + AbstractDerivativeMethod + +An abstract type for storing derivative evaluation data that is pertinent to its +reformation/transcription. +""" +abstract type AbstractDerivativeMethod end + +""" + GenerativeDerivativeMethod <: AbstractDerivativeMethod + +An abstract type for derivative evaluation method types that will require support +generation when employed (e.g., internal node points associated with orthogonal +collocation). Such methods can be used with derivatives that depend on independent +infinite parameters, but cannot be used for ones that depend on dependent parameters. +""" +abstract type GenerativeDerivativeMethod <: AbstractDerivativeMethod end + +""" + NonGenerativeDerivativeMethod <: AbstractDerivativeMethod + +An abstract type for derivative evaluation method types that do not require the +definition of additional support points. Such methods are amendable to any +derivative in InfiniteOpt including those with dependent infinite parameter +dependencies. +""" +abstract type NonGenerativeDerivativeMethod <: AbstractDerivativeMethod end + +""" + FDTechnique + +An abstract data type for labels of specific techniques applied in the finite +difference method in derivative evaluation. +""" +abstract type FDTechnique end + +""" + Forward <: FDTechnique + +A technique label for finite difference method that implements a forward +difference approximation. +""" +struct Forward <: FDTechnique end + +""" + Central <: FDTechnique + +A technique label for finite difference method that implements a central +difference approximation. +""" +struct Central <: FDTechnique end + +""" + Backward <: FDTechnique + +A technique label for finite difference method that implements a backward +difference approximation. +""" +struct Backward <: FDTechnique end + +""" + FiniteDifference{T <: FDTechnique} <: NonGenerativeDerivativeMethod + +A `DataType` for information about finite difference method applied to +a derivative evaluation. Note that the constructor is of the form: +```julia + FiniteDifference([technique::FDTechnique = Backward()], + [add_boundary_constr::Bool = true]) +``` +where `technique` is the indicated finite difference method to be applied and +`add_boundary_constr` indicates if the finite difference equation corresponding to +a boundary support should be included. Thus, for backward difference since +corresponds to the terminal point and for forward difference this corresponds to +the initial point. We recommend using `add_boundary_constr = false` when an final +condition is given with a backward method or when an initial condition is given +with a forward method. Note that this argument is ignored for central finite +difference which cannot include any boundary points. + +**Fields** +- `technique::T`: Mathematical technqiue behind finite difference +- `add_boundary_constraint::Bool`: Indicate if the boundary constraint should be + included in the transcription (e.g., the terminal boundary backward equation for + backward difference) +""" +struct FiniteDifference{T <: FDTechnique} <: NonGenerativeDerivativeMethod + technique::T + add_boundary_constraint::Bool + # set the constructor + function FiniteDifference(technique::T = Backward(), + add_boundary_constr::Bool = true) where {T <: FDTechnique} + return new{T}(technique, add_boundary_constr) + end +end + +################################################################################ +# DERIVATIVE METHOD FUNCTIONS +################################################################################ +# Determine if any derivatives have derivative constraints +function has_derivative_constraints(pref::IndependentParameterRef)::Bool + return _data_object(pref).has_deriv_constrs +end + +# Make update function for whether it has derivative supports +function _set_has_derivative_constraints(pref::IndependentParameterRef, + status::Bool)::Nothing + _data_object(pref).has_deriv_constrs = status + return +end + +""" + derivative_method(pref::IndependentParameterRef)::AbstractDerivativeMethod + +Returns the numerical derivative evaluation method employed with `pref` when it +is used as an operator parameter in a derivative. + +**Example** +```julia-repl +julia> derivative_method(pref) +FiniteDifference(Backward, true) +``` +""" +function derivative_method(pref::IndependentParameterRef)::AbstractDerivativeMethod + return _core_variable_object(pref).derivative_method +end + +# Make method to reset derivative constraints (supports are handled separately) +function _reset_derivative_constraints(pref::Union{IndependentParameterRef, + DependentParameterRef})::Nothing + if has_derivative_constraints(pref) + @warn("Support/method changes will invalidate existing derivative evaluation " * + "constraints that have been added to the InfiniteModel. Thus, " * + "these are being deleted.") + for idx in _derivative_dependencies(pref) + delete_derivative_constraints(DerivativeRef(JuMP.owner_model(pref), idx)) + end + _set_has_derivative_constraints(pref, false) + end + return +end + +""" + set_derivative_method(pref::IndependentParameterRef, + method::AbstractDerivativeMethod)::Nothing + +Specfies the desired derivative evaluation method `method` for derivatives that are +taken with respect to `pref`. Any internal supports exclusively associated with +the previous method will be deleted. Also, if any derivatives were evaluated +manually, the associated derivative evaluation constraints will be deleted. Errors +if new derivative method generates supports that are incompatible with existing +measures. + +**Example** +```julia-repl +julia> set_derivative_method(d, OrthogonalCollocation(2)) + +``` +""" +function set_derivative_method(pref::IndependentParameterRef, + method::NonGenerativeDerivativeMethod + )::Nothing + old_param = _core_variable_object(pref) + domain = _parameter_domain(pref) + supps = _parameter_supports(pref) + sig_figs = significant_digits(pref) + if isempty(_generative_measures(pref)) + _reset_generative_supports(pref) + new_param = IndependentParameter(domain, supps, sig_figs, method, + NoGenerativeSupports()) + else + info = generative_support_info(pref) + new_param = IndependentParameter(domain, supps, sig_figs, method, info) + end + _reset_derivative_constraints(pref) + _set_core_variable_object(pref, new_param) + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + return +end + +# GenerativeDerivativeMethod +function set_derivative_method(pref::IndependentParameterRef, + method::GenerativeDerivativeMethod + )::Nothing + new_info = generative_support_info(method) + old_info = generative_support_info(pref) + if !isempty(_generative_measures(pref)) && new_info != old_info + error("Generative derivative method conflicts with existing generative " * + "measures.") + end + old_param = _core_variable_object(pref) + domain = _parameter_domain(pref) + supps = _parameter_supports(pref) + sig_figs = significant_digits(pref) + new_param = IndependentParameter(domain, supps, sig_figs, method, new_info) + _reset_derivative_constraints(pref) + _reset_generative_supports(pref) + _set_core_variable_object(pref, new_param) + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + return +end \ No newline at end of file diff --git a/src/TranscriptionOpt/measures.jl b/src/TranscriptionOpt/measures.jl index 8c4a1d3cd..76bd526f1 100644 --- a/src/TranscriptionOpt/measures.jl +++ b/src/TranscriptionOpt/measures.jl @@ -1,3 +1,194 @@ +""" + DiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + N, B <: Union{Float64, Vector{Float64}}, + F <: Function + } <: AbstractMeasureData + +A DataType for immutable measure abstraction data where the +abstraction is of the form: +``measure = \\int_{\\tau \\in T} f(\\tau) w(\\tau) d\\tau \\approx \\sum_{i = 1}^N \\alpha_i f(\\tau_i) w(\\tau_i)``. +The supports and coefficients are immutable (i.e., they will not change +even if supports are changed for the underlying infinite parameter.) This +type can be used for both 1-dimensional and multi-dimensional measures. + +**Fields** +- `parameter_refs::P`: The infinite parameter(s) over which the integration occurs. + These can be comprised of multiple independent parameters, + but dependent parameters cannot be mixed with other types. +- `coefficients::Vector{Float64}`: Coefficients ``\\alpha_i`` for the above + measure abstraction. +- `supports::Array{Float64, N}`: Supports points ``\\tau_i``. This is a `Vector` + if only one parameter is given, otherwise it is + a `Matrix` where the supports are stored column-wise. +- `label::DataType`: Label for the support points ``\\tau_i`` when stored in the + infinite parameter(s), stemming from [`AbstractSupportLabel`](@ref). +- `weight_function::F`: Weighting function ``w`` must map an individual + support value to a `Real` scalar value. +- `lower_bounds::B`: Lower bound in accordance with ``T``, this denotes the + intended interval of the measure and should be `NaN` if ignored +- `upper_bounds::B`: Same as above but the upper bound. +- `is_expect::Bool`: Is this data associated with an expectation call? +""" +struct DiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + N, B <: Union{Float64, Vector{Float64}}, + F <: Function + } <: AbstractMeasureData + parameter_refs::P + coefficients::Vector{Float64} + supports::Array{Float64, N} # supports are stored column-wise + label::DataType # label that will used when the supports are added to the model + weight_function::F # single support --> weight value + lower_bounds::B + upper_bounds::B + is_expect::Bool + # scalar constructor + function DiscreteMeasureData( + param_ref::V, coeffs::Vector{<:Real}, + supps::Vector{<:Real}, + label::DataType, + weight_func::F, + lower_bound::Real, + upper_bound::Real, + expect::Bool + ) where {V <: JuMP.AbstractVariableRef, F <: Function} + return new{V, 1, Float64, F}(param_ref, coeffs, supps, label, weight_func, + lower_bound, upper_bound, expect) + end + # multi constructor + function DiscreteMeasureData( + param_refs::Vector{V}, + coeffs::Vector{<:Real}, + supps::Matrix{<:Real}, + label::DataType, + weight_func::F, + lower_bound::Vector{<:Real}, + upper_bound::Vector{<:Real}, + expect::Bool + ) where {V <: JuMP.AbstractVariableRef, F <: Function} + return new{Vector{V}, 2, Vector{Float64}, F}(param_refs, coeffs, supps, + label, weight_func, lower_bound, + upper_bound, expect) + end +end + +""" + FunctionalDiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + B <: Union{Float64, Vector{Float64}}, + I <: AbstractGenerativeInfo, + F1 <: Function, + F2 <: Function + } <: AbstractMeasureData + +A DataType for mutable measure abstraction data where the +abstraction is of the form: +``measure = \\int_{\\tau \\in T} f(\\tau) w(\\tau) d\\tau \\approx \\sum_{i = 1}^N \\alpha_i f(\\tau_i) w(\\tau_i)``. +This abstraction is equivalent to that of [`DiscreteMeasureData`](@ref), but +the difference is that the supports are not fully known at the time of measure +creation. Thus, functions are stored that will be used to generate the +concrete support points ``\\tau_i`` and their coefficients ``\\alpha_i`` when +the measure is evaluated (expanded). These supports are identified/generated +in accordance with the `label` with a gaurantee that at least `num_supports` are +generated. For example, if `label = MCSample` and `num_supports = 100` then +the measure will use all of the supports stored in the `parameter_refs` with the +label `MCSample` and will ensure there are at least 100 are generated. This +type can be used for both 1-dimensional and multi-dimensional measures. + +For 1-dimensional measures over independent infinite parameters, the +`generative_supp_info` specifies the info needed to make generative supports based +on those with that exist with `label`. Note that only 1 kind of generative +supports are allowed for each infinite parameter. + +**Fields** +- `parameter_refs::P`: The infinite parameter(s) over which the integration occurs. + These can be comprised of multiple independent parameters, + but dependent parameters cannot be mixed with other types. +- `coeff_function::F1`: Coefficient generation function making ``\\alpha_i`` + for the above measure abstraction. It should take + all the supports as input (formatted as an Array) + and return the corresponding vector of coefficients. +- `min_num_supports::Int`: Specifies the minimum number of supports ``\\tau_i`` + desired in association with `parameter_refs` and `label`. +- `label::DataType`: Label for the support points ``\\tau_i`` which are/will be + stored in the infinite parameter(s), stemming from [`AbstractSupportLabel`](@ref). +- `generative_supp_info::I`: Information needed to generate supports based on other + existing ones. +- `weight_function::F2`: Weighting function ``w`` must map an individual + support value to a `Real` scalar value. +- `lower_bounds::B`: Lower bounds in accordance with ``T``, this denotes the + intended interval of the measure and should be `NaN` if ignored +- `upper_bounds::B`: Same as above but the upper bounds. +- `is_expect::Bool`: Is this data associated with an expectation call? +""" +struct FunctionalDiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + B <: Union{Float64, Vector{Float64}}, + I <: AbstractGenerativeInfo, + F1 <: Function, + F2 <: Function + } <: AbstractMeasureData + parameter_refs::P + coeff_function::F1 # supports (excluding generative)--> coefficient vector (includes generative) + min_num_supports::Int # minimum number of supports + label::DataType # support label of included supports + generative_supp_info::I + weight_function::F2 # single support --> weight value + lower_bounds::B + upper_bounds::B + is_expect::Bool + # scalar constructor + function FunctionalDiscreteMeasureData( + param_ref::V, + coeff_func::F1, + num_supps::Int, + label::DataType, + gen_info::I, + weight_func::F2, + lower_bound::Real, + upper_bound::Real, + expect::Bool + ) where {V <: JuMP.AbstractVariableRef, I <: AbstractGenerativeInfo, + F1 <: Function, F2 <: Function} + return new{V, Float64, I, F1, F2}(param_ref, coeff_func, num_supps, label, + gen_info, weight_func, lower_bound, + upper_bound, expect) + end + # multi constructor + function FunctionalDiscreteMeasureData( + param_refs::Vector{V}, + coeff_func::F1, + num_supps::Int, + label::DataType, + weight_func::F2, + lower_bound::Vector{<:Real}, + upper_bound::Vector{<:Real}, + expect::Bool + ) where {V <: JuMP.AbstractVariableRef, F1 <: Function, F2 <: Function} + return new{Vector{V}, Vector{Float64}, NoGenerativeSupports, F1, F2}( + param_refs, coeff_func, num_supps, label, NoGenerativeSupports(), + weight_func, lower_bound, upper_bound, expect) + end +end + +# Convenient Dispatch constructor +function FunctionalDiscreteMeasureData( + param_refs::Vector{V}, + coeff_func::Function, + num_supps::Int, + label::DataType, + info::NoGenerativeSupports, + weight_func::Function, + lower_bound::Vector{<:Real}, + upper_bound::Vector{<:Real}, + expect::Bool + ) where {V <: JuMP.AbstractVariableRef} + return FunctionalDiscreteMeasureData(param_refs, coeff_func, num_supps, + label, weight_func, lower_bound, + upper_bound, expect) +end + """ InfiniteOpt.add_point_variable(model::JuMP.Model, var::InfiniteOpt.PointVariable, diff --git a/src/TranscriptionOpt/supports.jl b/src/TranscriptionOpt/supports.jl new file mode 100644 index 000000000..7fe84609c --- /dev/null +++ b/src/TranscriptionOpt/supports.jl @@ -0,0 +1,954 @@ +################################################################################ +# GENERATIVE SUPPORT INFORMATION TYPES +################################################################################ +""" + AbstractGenerativeInfo + +An abstract type for storing information about generating supports that are made +based on existing supports as required by certain measures and/or derivatives +that depend on a certain independent infinite parameter. Such as the case with +internal collocation supports. +""" +abstract type AbstractGenerativeInfo end + +""" + NoGenerativeSupports <: AbstractGenerativeInfo + +A `DataType` to signify that no generative supports will be generated for the +measures and/or the derivatives. Has no fields. +""" +struct NoGenerativeSupports <: AbstractGenerativeInfo end + +""" + UniformGenerativeInfo <: AbstractGenerativeInfo + +A `DataType` for generative supports that will be generated in a uniform manner +over finite elements (i.e., in between the existing supports). These generative +supports are described by the `support_basis` which lie in a nominal domain [0, 1]. +The constructor is of the form: +``` + UniformGenerativeInfo(support_basis::Vector{<:Real}, label::DataType, + [lb::Real = 0, ub::Real = 1]) +``` +where the `support_basis` is defined over [`lb`, `ub`]. + +**Fields** +- `support_basis::Vector{Float64}`: The basis of generative supports defined in + [0, 1] that will be transformed for each finite element. +- `label::DataType`: The unique label to be given to each generative support. +""" +struct UniformGenerativeInfo <: AbstractGenerativeInfo + support_basis::Vector{Float64} + label::DataType + function UniformGenerativeInfo(basis::Vector{<:Real}, label::DataType, + lb::Real = 0, ub::Real = 1) + if minimum(basis) < lb || maximum(basis) > ub + error("Support basis violate the given lower and upper bounds. " * + "Please specify the appropriate lower bound and upper bounds.") + end + return new((basis .- lb) ./ (ub - lb), label) + end +end + +# Extend Base.:(==) +function Base.:(==)(info1::UniformGenerativeInfo, info2::UniformGenerativeInfo) + return info1.support_basis == info2.support_basis && info1.label == info2.label +end + +################################################################################ +# SUPPORT AND LABEL GENERATION +################################################################################ +""" + AbstractSupportLabel + +An abstract type for support label types. These are used to distinguish different +kinds of supports that are added to infinite parameters. +""" +abstract type AbstractSupportLabel end + +""" + All <: AbstractSupportLabel + +This support label is unique in that it isn't associated with a particular set of +supports, but rather is used used to indicate that all supports should be used. +""" +struct All <: AbstractSupportLabel end + +# Filler label for NoGenerativeSupports +struct _NoLabel <: AbstractSupportLabel end + +""" + PublicLabel <: AbstractSupportLabel + +An abstract type used to denote that labels that should be given to the user by +default. +""" +abstract type PublicLabel <: AbstractSupportLabel end + +""" + UserDefined <: PublicLabel + +A support label for supports that are supplied by the user directly to an infinite +parameter. +""" +struct UserDefined <: PublicLabel end + +""" + UniformGrid <: PublicLabel + +A support label for supports that are generated uniformly accross a given interval. +""" +struct UniformGrid <: PublicLabel end + +""" + SampleLabel <: PublicLabel + +An abstract type for labels of supports that are generated via some sampling technique. +""" +abstract type SampleLabel <: PublicLabel end + +""" + MCSample <: SampleLabel + +A support label for supports that are generated via Monte Carlo Sampling. +""" +struct MCSample <: SampleLabel end + +""" + WeightedSample <: SampleLabel + +A support label for supports that are generated by sampling from a statistical +distribution. +""" +struct WeightedSample <: SampleLabel end + +""" + Mixture <: PublicLabel + +A support label for multi-dimensional supports that are generated from a variety +of methods. +""" +struct Mixture <: PublicLabel end + +""" + UniqueMeasure{S::Symbol} <: PublicLabel + +A support label for supports that are provided from the `DiscreteMeasureData` +associated with a measure where a unique label is generated to distinguish those +supports. This is done by invoking [`generate_unique_label`](@ref). +""" +struct UniqueMeasure{S} <: PublicLabel end + +""" + MeasureBound <: PublicLabel + +A support label for supports that are generated using the upper and lower bounds +for `FunctionalDiscreteMeasureData`. +""" +struct MeasureBound <: PublicLabel end + +""" + InternalLabel <: AbstractSupportLabel + +An abstract type for support labels that are associated with supports that should +not be reported to the user by default. +""" +abstract type InternalLabel <: AbstractSupportLabel end + +""" + generate_unique_label()::Type{UniqueMeasure} + +Generate and return a unique support label for measures. +""" +function generate_unique_label()::DataType + return UniqueMeasure{gensym()} +end + +# Define default values of num_supports keyword +const DefaultNumSupports = 10 + +# a user interface of generate_support_values +""" + generate_supports(domain::AbstractInfiniteDomain + [method::Type{<:AbstractSupportLabel}]; + [num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits] + )::Tuple{Array{<:Real}, DataType} + +Generate `num_supports` support values with `sig_digits` significant digits in +accordance with `domain` and return them along with the correct generation label(s). +`IntervalDomain`s generate supports uniformly with label `UniformGrid` and +distribution domains generate them randomly accordingly to the +underlying distribution. Moreover, `method` indicates the generation method that +should be used. These `methods` correspond to parameter support labels. Current +labels that can be used as generation methods include (but may not be defined +for certain domain types): +- [`MCSample`](@ref): Uniformly distributed Monte Carlo samples. +- [`WeightedSample`](@ref): Monte Carlo samples that are weighted by an underlying PDF. +- [`UniformGrid`](@ref): Samples that are generated uniformly over the domain. + +Extensions that employ user-defined infinite domain types and/or methods +should extend [`generate_support_values`](@ref) to enable this. Errors if the +`domain` type and /or methods are unrecognized. This is intended as an internal +method to be used by methods such as [`generate_and_add_supports!`](@ref). +""" +function generate_supports(domain::AbstractInfiniteDomain; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple + return generate_support_values(domain, num_supports = num_supports, + sig_digits = sig_digits) +end + +# 2 arguments +function generate_supports(domain::AbstractInfiniteDomain, + method::Type{<:AbstractSupportLabel}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple + return generate_support_values(domain, method, + num_supports = num_supports, + sig_digits = sig_digits) +end + +""" + generate_support_values(domain::AbstractInfiniteDomain, + [method::Type{MyMethod} = MyMethod]; + [num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits] + )::Tuple{Array{<:Real}, Symbol} + +A multiple dispatch method for [`generate_supports`](@ref). This will return +a tuple where the first element are the supports and the second is their +label. This can be extended for user-defined infinite domains and/or generation +methods. When defining a new domain type the default method dispatch should +make `method` an optional argument (making it the default). Otherwise, other +method dispatches for a given domain must ensure that `method` is positional +argument without a default value (contrary to the definition above). Note that the +`method` must be a subtype of either [`PublicLabel`](@ref) or [`InternalLabel`](@ref). +""" +function generate_support_values(domain::AbstractInfiniteDomain, + args...; kwargs...) + if isempty(args) + error("`generate_support_values` has not been extended for infinite domains " * + "of type `$(typeof(domain))`. This automatic support generation is not " * + "implemented.") + else + error("`generate_support_values` has not been extended for infinite domains " * + "of type `$(typeof(domain))` with the generation method `$(args[1])`. " * + "This automatic support generation is not implemented.") + end +end + +# IntervalDomain and UniformGrid +function generate_support_values(domain::IntervalDomain, + method::Type{UniformGrid} = UniformGrid; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits, + )::Tuple{Vector{<:Real}, DataType} + lb = JuMP.lower_bound(domain) + ub = JuMP.upper_bound(domain) + new_supports = round.(range(lb, stop = ub, length = num_supports), + sigdigits = sig_digits) + return new_supports, method +end + +# IntervalDomain and MCSample +function generate_support_values(domain::IntervalDomain, + method::Type{MCSample}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits, + )::Tuple{Vector{<:Real}, DataType} + lb = JuMP.lower_bound(domain) + ub = JuMP.upper_bound(domain) + dist = Distributions.Uniform(lb, ub) + new_supports = round.(Distributions.rand(dist, num_supports), + sigdigits = sig_digits) + return new_supports, method +end + +# UniDistributionDomain and MultiDistributionDomain (with multivariate only) +function generate_support_values( + domain::Union{UniDistributionDomain, MultiDistributionDomain{<:Distributions.MultivariateDistribution}}, + method::Type{WeightedSample} = WeightedSample; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + dist = domain.distribution + new_supports = round.(Distributions.rand(dist, num_supports), + sigdigits = sig_digits) + return new_supports, method +end + +# UniDistributionDomain and MCSample +function generate_support_values( + domain::UniDistributionDomain, + method::Type{MCSample}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Vector{Float64}, DataType} + return generate_support_values(domain, WeightedSample; num_supports = num_supports, + sig_digits = sig_digits)[1], method # TODO use an unwieghted sample... +end + +# MultiDistributionDomain (matrix-variate distribution) +function generate_support_values( + domain::MultiDistributionDomain{<:Distributions.MatrixDistribution}, + method::Type{WeightedSample} = WeightedSample; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{Float64, 2}, DataType} + dist = domain.distribution + raw_supports = Distributions.rand(dist, num_supports) + new_supports = Array{Float64}(undef, length(dist), num_supports) + for i in 1:size(new_supports, 2) + new_supports[:, i] = round.(reduce(vcat, raw_supports[i]), + sigdigits = sig_digits) + end + return new_supports, method +end + +# Generate the supports for a collection domain +function _generate_collection_supports(domain::CollectionDomain, num_supports::Int, + sig_digits::Int)::Array{Float64, 2} + domains = collection_domains(domain) + # build the support array transpose to fill in column order (leverage locality) + trans_supports = Array{Float64, 2}(undef, num_supports, length(domains)) + for i in eachindex(domains) + @inbounds trans_supports[:, i] = generate_support_values(domains[i], + num_supports = num_supports, + sig_digits = sig_digits)[1] + end + return permutedims(trans_supports) +end + +function _generate_collection_supports(domain::CollectionDomain, + method::Type{<:AbstractSupportLabel}, + num_supports::Int, + sig_digits::Int)::Array{Float64, 2} + domains = collection_domains(domain) + # build the support array transpose to fill in column order (leverage locality) + trans_supports = Array{Float64, 2}(undef, num_supports, length(domains)) + for i in eachindex(domains) + @inbounds trans_supports[:, i] = generate_support_values(domains[i], + method, + num_supports = num_supports, + sig_digits = sig_digits)[1] + end + return permutedims(trans_supports) +end + +# CollectionDomain (IntervalDomains) +function generate_support_values(domain::CollectionDomain{IntervalDomain}, + method::Type{UniformGrid} = UniformGrid; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + new_supports = _generate_collection_supports(domain, num_supports, sig_digits) + return new_supports, method +end + +function generate_support_values(domain::CollectionDomain{IntervalDomain}, + method::Type{MCSample}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + new_supports = _generate_collection_supports(domain, method, num_supports, sig_digits) + return new_supports, method +end + +# CollectionDomain (UniDistributionDomains) +function generate_support_values(domain::CollectionDomain{<:UniDistributionDomain}, + method::Type{WeightedSample} = WeightedSample; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + new_supports = _generate_collection_supports(domain, num_supports, sig_digits) + return new_supports, method +end + +# CollectionDomain (InfiniteScalarDomains) +function generate_support_values(domain::CollectionDomain, + method::Type{Mixture} = Mixture; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + new_supports = _generate_collection_supports(domain, num_supports, sig_digits) + return new_supports, method +end + +# CollectionDomain (InfiniteScalarDomains) using purely MC sampling +# this is useful for measure support generation +function generate_support_values(domain::CollectionDomain, + method::Type{MCSample}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits + )::Tuple{Array{<:Real}, DataType} + new_supports = _generate_collection_supports(domain, method, num_supports, sig_digits) + return new_supports, method +end + +# For label All: dispatch to default methods +function generate_support_values(domain::AbstractInfiniteDomain, ::Type{All}; + num_supports::Int = DefaultNumSupports, + sig_digits::Int = DefaultSigDigits) + return generate_support_values(domain, num_supports = num_supports, + sig_digits = sig_digits) +end + +################################################################################ +# GENERATIVE SUPPORT FUNCTIONS +################################################################################ +# Extend copy for NoGenerativeSupports +function Base.copy(d::NoGenerativeSupports)::NoGenerativeSupports + return NoGenerativeSupports() +end + +# Extend copy for UniformGenerativeInfo +function Base.copy(d::UniformGenerativeInfo)::UniformGenerativeInfo + return UniformGenerativeInfo(copy(d.support_basis), d.label) +end + +""" + support_label(info::AbstractGenerativeInfo)::DataType + +Return the support label to be associated with generative supports produced in +accordance with `info`. This is intended an internal method that should be +extended for user defined types of [`AbstractGenerativeInfo`](@ref). +""" +function support_label(info::AbstractGenerativeInfo) + error("`support_label` not defined for generative support info type " * + "$(typeof(info)).") +end + +# UniformGenerativeInfo +function support_label(info::UniformGenerativeInfo)::DataType + return info.label +end + +# NoGenerativeSupports +function support_label(info::NoGenerativeSupports)::DataType + return _NoLabel +end + +""" + generative_support_info(pref::IndependentParameterRef)::AbstractGenerativeInfo + +Return the generative support information associated with `pref`. +""" +function generative_support_info(pref::IndependentParameterRef)::AbstractGenerativeInfo + return _core_variable_object(pref).generative_supp_info +end + +""" + has_generative_supports(pref::IndependentParameterRef)::Bool + +Return whether generative supports have been added to `pref` in accordance +with its generative support info. +""" +function has_generative_supports(pref::IndependentParameterRef)::Bool + return _data_object(pref).has_generative_supports +end + +# Specify if a parameter has generative supports +function _set_has_generative_supports(pref::IndependentParameterRef, + status::Bool)::Nothing + _data_object(pref).has_generative_supports = status + return +end + +# Reset (remove) the generative supports if needed +function _reset_generative_supports(pref::IndependentParameterRef)::Nothing + if has_generative_supports(pref) + label = support_label(generative_support_info(pref)) + delete_supports(pref, label = label) # this also calls _set_has_generative_supports + end + return +end + +# Specify the generative_support_info +function _set_generative_support_info(pref::IndependentParameterRef, + info::AbstractGenerativeInfo)::Nothing + sig_digits = significant_digits(pref) + method = derivative_method(pref) + domain = _parameter_domain(pref) + supps = _parameter_supports(pref) + new_param = IndependentParameter(domain, supps, sig_digits, method, info) + _reset_generative_supports(pref) + _set_core_variable_object(pref, new_param) + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + return +end + +""" + make_generative_supports(info::AbstractGenerativeInfo, + pref::IndependentParameterRef, + existing_supps::Vector{Float64} + )::Vector{Float64} + +Generate the generative supports for `pref` in accordance with `info` and the +`existing_supps` that `pref` has. The returned supports should not include +`existing_supps`. This is intended as internal method to enable +[`add_generative_supports`](@ref) and should be extended for any user defined +`info` types that are created to enable new measure and/or derivative evaluation +techniques that require the creation of generative supports. +""" +function make_generative_supports(info::AbstractGenerativeInfo, pref, supps) + error("`make_generative_supports` is not defined for generative support " * + "info of type $(typeof(info)).") +end + +# UniformGenerativeInfo +function make_generative_supports(info::UniformGenerativeInfo, + pref, supps)::Vector{Float64} + # collect the preliminaries + basis = info.support_basis + num_internal = length(basis) + num_existing = length(supps) + num_existing <= 1 && error("$(pref) does not have enough supports for " * + "creating generative supports.") + internal_nodes = Vector{Float64}(undef, num_internal * (num_existing - 1)) + # generate the internal node supports + for i in Iterators.take(eachindex(supps), num_existing - 1) + lb = supps[i] + ub = supps[i+1] + internal_nodes[(i-1)*num_internal+1:i*num_internal] = basis * (ub - lb) .+ lb + end + return internal_nodes +end + +## Define internal dispatch methods for adding generative supports +# AbstractGenerativeInfo +function _add_generative_supports(pref, info::AbstractGenerativeInfo)::Nothing + if !has_generative_supports(pref) + existing_supps = supports(pref, label = All) + supps = make_generative_supports(info, pref, existing_supps) + add_supports(pref, supps, label = support_label(info)) + _set_has_generative_supports(pref, true) + end + return +end + +# NoGenerativeSupports +function _add_generative_supports(pref, info::NoGenerativeSupports)::Nothing + return +end + +""" + add_generative_supports(pref::IndependentParameterRef)::Nothing + +Create generative supports for `pref` if needed in accordance with its +generative support info using [`make_generative_supports`](@ref) and add them to +`pref`. This is intended as an internal function, but can be useful user defined +optimizer model extensions that utlize our support system. +""" +function add_generative_supports(pref::IndependentParameterRef)::Nothing + info = generative_support_info(pref) + _add_generative_supports(pref, info) + return +end + +################################################################################ +# SUPPORT FUNCTIONS +################################################################################ +# Internal functions +function _parameter_supports(pref::IndependentParameterRef) + return _core_variable_object(pref).supports +end +function _parameter_support_values(pref::IndependentParameterRef)::Vector{Float64} + return collect(keys(_parameter_supports(pref))) +end +function _update_parameter_supports(pref::IndependentParameterRef, + supports::DataStructures.SortedDict{Float64, Set{DataType}})::Nothing + domain = _parameter_domain(pref) + method = derivative_method(pref) + sig_figs = significant_digits(pref) + info = generative_support_info(pref) + new_param = IndependentParameter(domain, supports, sig_figs, method, info) + _set_core_variable_object(pref, new_param) + _reset_derivative_constraints(pref) + _set_has_generative_supports(pref, false) + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + return +end + +""" + has_internal_supports(pref::Union{IndependentParameterRef, DependentParameterRef})::Bool + +Indicate if `pref` has internal supports that will be hidden from the user by +default. +""" +function has_internal_supports( + pref::Union{IndependentParameterRef, DependentParameterRef} + )::Bool + return _data_object(pref).has_internal_supports +end + +# update has internal supports +function _set_has_internal_supports( + pref::Union{IndependentParameterRef, DependentParameterRef}, + status::Bool + )::Nothing + _data_object(pref).has_internal_supports = status + return +end + +""" + significant_digits(pref::IndependentParameterRef)::Int + +Return the number of significant digits enforced on the supports of `pref`. + +**Example** +```julia-repl +julia> significant_digits(t) +12 +``` +""" +function significant_digits(pref::IndependentParameterRef)::Int + return _core_variable_object(pref).sig_digits +end + +""" + num_supports(pref::IndependentParameterRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel])::Int + +Return the number of support points associated with `pref`. By default, only the +number of public supports are counted. The full amount can be determined by setting +`label = All`. Moreover, the amount of labels that satisfy `label` is obtained +using an [`AbstractSupportLabel`](@ref). + +**Example** +```julia-repl +julia> num_supports(t) +2 +``` +""" +function num_supports(pref::IndependentParameterRef; + label::Type{<:AbstractSupportLabel} = PublicLabel)::Int + supports_dict = _parameter_supports(pref) + if label == All || (!has_internal_supports(pref) && label == PublicLabel) + return length(supports_dict) + else + return count(p -> any(v -> v <: label, p[2]), supports_dict) + end +end + +""" + has_supports(pref::IndependentParameterRef)::Bool + +Return true if `pref` has supports or false otherwise. + +**Example** +```julia-repl +julia> has_supports(t) +true +``` +""" +has_supports(pref::IndependentParameterRef)::Bool = !isempty(_parameter_supports(pref)) + +""" + supports(pref::IndependentParameterRef; + [label::Type{<:AbstractSupportLabel} = PublicLabel])::Vector{Float64} + +Return the support points associated with `pref`. Errors if there are no +supports. Users can query just support points generated by a certain method +using the keyword argument `label`. By default, the function returns all public +support points regardless of the associated label. The full collection is given by setting +`label = All`. Moreover, the amount of labels that satisfy `label` is obtained +using an [`AbstractSupportLabel`](@ref). + +**Example** +```julia-repl +julia> supports(t) +2-element Array{Float64,1}: + 0.0 + 1.0 +``` +""" +function supports(pref::IndependentParameterRef; + label::Type{<:AbstractSupportLabel} = PublicLabel)::Vector{Float64} + if label == All || (!has_internal_supports(pref) && label == PublicLabel) + return _parameter_support_values(pref) + else + return findall(x -> any(v -> v <: label, x), _parameter_supports(pref)) + end +end + +# Return a matrix os supports when given a vector of IndependentParameterRefs (for measures) +function supports(prefs::Vector{IndependentParameterRef}; + label::Type{<:AbstractSupportLabel} = PublicLabel, + use_combinatorics::Bool = true)::Matrix{Float64} + # generate the support matrix considering all the unique combinations + if use_combinatorics + supp_list = Tuple(supports(p, label = label) for p in prefs) + inds = CartesianIndices(ntuple(i -> 1:length(supp_list[i]), length(prefs))) + supps = Matrix{Float64}(undef, length(prefs), length(inds)) + for (k, idx) in enumerate(inds) + supps[:, k] = [supp_list[i][j] for (i, j) in enumerate(idx.I)] + end + return supps + # generate the support matrix while negating the unique combinations + else + num_supps = num_supports(first(prefs), label = label) + trans_supps = Matrix{Float64}(undef, num_supps, length(prefs)) + for i in eachindex(prefs) + supp = supports(prefs[i], label = label) + if length(supp) != num_supps + error("Cannot simultaneously query the supports of multiple " * + "independent parameters if the support dimensions do not match " * + "while ignoring the combinatorics. Try setting `use_combinatorics = true`.") + else + @inbounds trans_supps[:, i] = supp + end + end + return permutedims(trans_supps) + end +end + +""" + set_supports(pref::IndependentParameterRef, supports::Vector{<:Real}; + [force::Bool = false, + label::Type{<:AbstractSupportLabel} = UserDefined] + )::Nothing + +Specify the support points for `pref`. Errors if the supports violate the bounds +associated with the infinite domain. Warns if the points are not unique. If `force` +this will overwrite exisiting supports otherwise it will error if there are +existing supports. + +**Example** +```julia-repl +julia> set_supports(t, [0, 1]) + +julia> supports(t) +2-element Array{Int64,1}: + 0 + 1 +``` +""" +function set_supports(pref::IndependentParameterRef, supports::Vector{<:Real}; + force::Bool = false, + label::Type{<:AbstractSupportLabel} = UserDefined + )::Nothing + if has_supports(pref) && !force + error("Unable set supports for $pref since it already has supports." * + " Consider using `add_supports` or use `force = true` to " * + "overwrite the existing supports.") + end + domain = _parameter_domain(pref) + supports = round.(supports, sigdigits = significant_digits(pref)) + _check_supports_in_bounds(error, supports, domain) + supports_dict = DataStructures.SortedDict{Float64, Set{DataType}}( + i => Set([label]) for i in supports) + if length(supports_dict) != length(supports) + @warn("Support points are not unique, eliminating redundant points.") + end + _update_parameter_supports(pref, supports_dict) + _set_has_internal_supports(pref, label <: InternalLabel) + return +end + +""" + add_supports(pref::IndependentParameterRef, + supports::Union{Real, Vector{<:Real}}; + [label::Type{<:AbstractSupportLabel} = UserDefined])::Nothing + +Add additional support points for `pref` with identifying label `label`. + +**Example** +```julia-repl +julia> add_supports(t, 0.5) + +julia> supports(t) +3-element Array{Float64,1}: + 0.0 + 0.5 + 1.0 + +julia> add_supports(t, [0.25, 1]) + +julia> supports(t) +4-element Array{Float64,1}: + 0.0 + 0.25 + 0.5 + 1.0 +``` +""" +function add_supports(pref::IndependentParameterRef, + supports::Union{Real, Vector{<:Real}}; + label::Type{<:AbstractSupportLabel} = UserDefined, + check::Bool = true)::Nothing + domain = infinite_domain(pref) + supports = round.(supports, sigdigits = significant_digits(pref)) + check && _check_supports_in_bounds(error, supports, domain) + supports_dict = _parameter_supports(pref) + added_new_support = false + for s in supports + if haskey(supports_dict, s) + push!(supports_dict[s], label) + else + supports_dict[s] = Set([label]) + added_new_support = true + end + end + if label <: InternalLabel + _set_has_internal_supports(pref, true) + end + if added_new_support + _reset_derivative_constraints(pref) + _reset_generative_supports(pref) + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + end + return +end + +""" + delete_supports(pref::IndependentParameterRef; + [label::Type{<:AbstractSupportLabel} = All])::Nothing + +Delete the support points for `pref`. If `label != All` then delete `label` and +any supports that solely depend on it. + +**Example** +```julia-repl +julia> delete_supports(t) + +julia> supports(t) +ERROR: Parameter t does not have supports. +``` +""" +function delete_supports(pref::IndependentParameterRef; + label::Type{<:AbstractSupportLabel} = All)::Nothing + supp_dict = _parameter_supports(pref) + if has_derivative_constraints(pref) + @warn("Deleting supports invalidated derivative evaluations. Thus, these " * + "are being deleted as well.") + for idx in _derivative_dependencies(pref) + delete_derivative_constraints(DerivativeRef(JuMP.owner_model(pref), idx)) + end + _set_has_derivative_constraints(pref, false) + end + if label == All + if used_by_measure(pref) + error("Cannot delete the supports of $pref since it is used by " * + "a measure.") + end + empty!(supp_dict) + _set_has_generative_supports(pref, false) + _set_has_internal_supports(pref, false) + else + if has_generative_supports(pref) && support_label(generative_support_info(pref)) != label + label = Union{label, support_label(generative_support_info(pref))} + end + _set_has_generative_supports(pref, false) + filter!(p -> !all(v -> v <: label, p[2]), supp_dict) + for (k, v) in supp_dict + filter!(l -> !(l <: label), v) + end + if has_internal_supports(pref) && num_supports(pref, label = InternalLabel) == 0 + _set_has_internal_supports(pref, false) + end + end + if is_used(pref) + set_optimizer_model_ready(JuMP.owner_model(pref), false) + end + return +end + +# Make dispatch for an array of parameters +function delete_supports(prefs::AbstractArray{<:IndependentParameterRef}; + label::Type{<:AbstractSupportLabel} = All)::Nothing + delete_supports.(prefs, label = label) + return +end + +""" + fill_in_supports!(pref::IndependentParameterRef; + [num_supports::Int = DefaultNumSupports])::Nothing + +Automatically generate support points for a particular independent parameter `pref`. +Generating `num_supports` for the parameter. The supports are generated uniformly +if the underlying infinite domain is an `IntervalDomain` or they are generating randomly +accordingly to the distribution if the domain is a `UniDistributionDomain`. +Will add nothing if there are supports +and `modify = false`. Extensions that use user defined domain types should extend +[`generate_and_add_supports!`](@ref) and/or [`generate_support_values`](@ref) +as needed. Errors if the infinite domain type is not recognized. + +**Example** +```julia-repl +julia> fill_in_supports!(x, num_supports = 4) + +julia> supports(x) +4-element Array{Number,1}: + 0.0 + 0.333 + 0.667 + 1.0 + +``` +""" +function fill_in_supports!(pref::IndependentParameterRef; + num_supports::Int = DefaultNumSupports, + modify::Bool = true)::Nothing + domain = infinite_domain(pref) + current_amount = length(_parameter_supports(pref)) + if (modify || current_amount == 0) && current_amount < num_supports + generate_and_add_supports!(pref, domain, + num_supports = num_supports - current_amount, + adding_extra = (current_amount > 0)) + end + return +end + +""" + generate_and_add_supports!(pref::IndependentParameterRef, + domain::AbstractInfiniteDomain, + [method::Type{<:AbstractSupportLabel}]; + [num_supports::Int = DefaultNumSupports])::Nothing + +Generate supports for independent parameter `pref` via [`generate_support_values`](@ref) +and add them to `pref`. This is intended as an extendable internal method for +[`fill_in_supports!`](@ref fill_in_supports!(::IndependentParameterRef)). +Most extensions that empoy user-defined infinite domains can typically enable this +by extending [`generate_support_values`](@ref). Errors if the infinite domain type +is not recognized. +""" +function generate_and_add_supports!(pref::IndependentParameterRef, + domain::AbstractInfiniteDomain; + num_supports::Int = DefaultNumSupports, + adding_extra::Bool = false)::Nothing + sig_digits = significant_digits(pref) + if isa(domain, IntervalDomain) && adding_extra + supports, label = generate_support_values(domain, MCSample, + num_supports = num_supports, + sig_digits = sig_digits) + else + supports, label = generate_supports(domain, + num_supports = num_supports, + sig_digits = sig_digits) + end + add_supports(pref, supports, label = label) + return +end + +# Dispatch with method +function generate_and_add_supports!(pref::IndependentParameterRef, + domain::AbstractInfiniteDomain, + method::Type{<:AbstractSupportLabel}; + num_supports::Int = DefaultNumSupports, + adding_extra::Bool = false)::Nothing + sig_digits = significant_digits(pref) + supports, label = generate_supports(domain, method, + num_supports = num_supports, + sig_digits = sig_digits) + add_supports(pref, supports, label = label) + return +end diff --git a/src/datatypes.jl b/src/datatypes.jl index 2158a4f93..d7953c618 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -290,160 +290,6 @@ struct CollectionDomain{T <: InfiniteScalarDomain} <: InfiniteArrayDomain domains::Vector{T} end -################################################################################ -# GENERATIVE SUPPORT INFORMATION TYPES -################################################################################ -""" - AbstractGenerativeInfo - -An abstract type for storing information about generating supports that are made -based on existing supports as required by certain measures and/or derivatives -that depend on a certain independent infinite parameter. Such as the case with -internal collocation supports. -""" -abstract type AbstractGenerativeInfo end - -""" - NoGenerativeSupports <: AbstractGenerativeInfo - -A `DataType` to signify that no generative supports will be generated for the -measures and/or the derivatives. Has no fields. -""" -struct NoGenerativeSupports <: AbstractGenerativeInfo end - -""" - UniformGenerativeInfo <: AbstractGenerativeInfo - -A `DataType` for generative supports that will be generated in a uniform manner -over finite elements (i.e., in between the existing supports). These generative -supports are described by the `support_basis` which lie in a nominal domain [0, 1]. -The constructor is of the form: -``` - UniformGenerativeInfo(support_basis::Vector{<:Real}, label::DataType, - [lb::Real = 0, ub::Real = 1]) -``` -where the `support_basis` is defined over [`lb`, `ub`]. - -**Fields** -- `support_basis::Vector{Float64}`: The basis of generative supports defined in - [0, 1] that will be transformed for each finite element. -- `label::DataType`: The unique label to be given to each generative support. -""" -struct UniformGenerativeInfo <: AbstractGenerativeInfo - support_basis::Vector{Float64} - label::DataType - function UniformGenerativeInfo(basis::Vector{<:Real}, label::DataType, - lb::Real = 0, ub::Real = 1) - if minimum(basis) < lb || maximum(basis) > ub - error("Support basis violate the given lower and upper bounds. " * - "Please specify the appropriate lower bound and upper bounds.") - end - return new((basis .- lb) ./ (ub - lb), label) - end -end - -# Extend Base.:(==) -function Base.:(==)(info1::UniformGenerativeInfo, info2::UniformGenerativeInfo)::Bool - return info1.support_basis == info2.support_basis && info1.label == info2.label -end - -################################################################################ -# BASIC DERIVATIVE EVALUATION TYPES -################################################################################ -""" - AbstractDerivativeMethod - -An abstract type for storing derivative evaluation data that is pertinent to its -reformation/transcription. -""" -abstract type AbstractDerivativeMethod end - -""" - GenerativeDerivativeMethod <: AbstractDerivativeMethod - -An abstract type for derivative evaluation method types that will require support -generation when employed (e.g., internal node points associated with orthogonal -collocation). Such methods can be used with derivatives that depend on independent -infinite parameters, but cannot be used for ones that depend on dependent parameters. -""" -abstract type GenerativeDerivativeMethod <: AbstractDerivativeMethod end - -""" - NonGenerativeDerivativeMethod <: AbstractDerivativeMethod - -An abstract type for derivative evaluation method types that do not require the -definition of additional support points. Such methods are amendable to any -derivative in InfiniteOpt including those with dependent infinite parameter -dependencies. -""" -abstract type NonGenerativeDerivativeMethod <: AbstractDerivativeMethod end - -""" - FDTechnique - -An abstract data type for labels of specific techniques applied in the finite -difference method in derivative evaluation. -""" -abstract type FDTechnique end - -""" - Forward <: FDTechnique - -A technique label for finite difference method that implements a forward -difference approximation. -""" -struct Forward <: FDTechnique end - -""" - Central <: FDTechnique - -A technique label for finite difference method that implements a central -difference approximation. -""" -struct Central <: FDTechnique end - -""" - Backward <: FDTechnique - -A technique label for finite difference method that implements a backward -difference approximation. -""" -struct Backward <: FDTechnique end - -""" - FiniteDifference{T <: FDTechnique} <: NonGenerativeDerivativeMethod - -A `DataType` for information about finite difference method applied to -a derivative evaluation. Note that the constructor is of the form: -```julia - FiniteDifference([technique::FDTechnique = Backward()], - [add_boundary_constr::Bool = true]) -``` -where `technique` is the indicated finite difference method to be applied and -`add_boundary_constr` indicates if the finite difference equation corresponding to -a boundary support should be included. Thus, for backward difference since -corresponds to the terminal point and for forward difference this corresponds to -the initial point. We recommend using `add_boundary_constr = false` when an final -condition is given with a backward method or when an initial condition is given -with a forward method. Note that this argument is ignored for central finite -difference which cannot include any boundary points. - -**Fields** -- `technique::T`: Mathematical technqiue behind finite difference -- `add_boundary_constraint::Bool`: Indicate if the boundary constraint should be - included in the transcription (e.g., the terminal boundary backward equation for - backward difference) -""" -struct FiniteDifference{T <: FDTechnique} <: NonGenerativeDerivativeMethod - technique::T - add_boundary_constraint::Bool - # set the constructor - function FiniteDifference(technique::T = Backward(), - add_boundary_constr::Bool = true) where {T <: FDTechnique} - return new{T}(technique, add_boundary_constr) - end -end - ################################################################################ # PARAMETER TYPES ################################################################################ @@ -462,31 +308,15 @@ An abstract type for scalar parameters used in InfiniteOpt. abstract type ScalarParameter <: InfOptParameter end """ - IndependentParameter{T <: InfiniteScalarDomain, - M <: AbstractDerivativeMethod, - I <: AbstractGenerativeInfo} <: ScalarParameter + IndependentParameter{T <: InfiniteScalarDomain} <: ScalarParameter A `DataType` for storing independent scalar infinite parameters. **Fields** - `domain::T`: The infinite domain that characterizes the parameter. -- `supports::DataStructures.SortedDict{Float64, Set{DataType}}`: The support points - used to discretize the parameter and their associated type labels stored as - `DataTypes`s which should be a subtype of [`AbstractSupportLabel`](@ref). -- `sig_digits::Int`: The number of significant digits used to round the support values. -- `derivative_method::M`: The derivative evaluation method used for derivatives that - are conducted with respect to this parameter. -- `gnerative_supp_info::I`: The info associated with any generative supports that will - need to be generated for measures and/or derivatives based on existing supports. -""" -struct IndependentParameter{T <: InfiniteScalarDomain, - M <: AbstractDerivativeMethod, - I <: AbstractGenerativeInfo} <: ScalarParameter +""" +struct IndependentParameter{T <: InfiniteScalarDomain} <: ScalarParameter domain::T - supports::DataStructures.SortedDict{Float64, Set{DataType}} # Support to label set - sig_digits::Int - derivative_method::M - generative_supp_info::I end """ @@ -510,18 +340,9 @@ A `DataType` for storing a collection of dependent infinite parameters. **Fields** - `domain::T`: The infinite domain that characterizes the parameters. -- `supports::Dict{Vector{Float64}, Set{DataType}}`: Support dictionary where keys - are supports and the values are the set of labels for each support. -- `sig_digits::Int`: The number of significant digits used to round the support values. -- `derivative_methods::Vector{M}`: The derivative evaluation methods associated with - each parameter. -""" -struct DependentParameters{T <: InfiniteArrayDomain, - M <: NonGenerativeDerivativeMethod} <: InfOptParameter +""" +struct DependentParameters{T <: InfiniteArrayDomain} <: InfOptParameter domain::T - supports::Dict{Vector{Float64}, Set{DataType}} # Support to label set - sig_digits::Int - derivative_methods::Vector{M} end # Define convenient alias for infinite types @@ -556,9 +377,6 @@ A mutable `DataType` for storing `ScalarParameter`s and their data. - `measure_indices::Vector{MeasureIndex}`: Indices of dependent measures. - `constraint_indices::Vector{InfOptConstraintIndex}`: Indices of dependent constraints. - `in_objective::Bool`: Is this used in objective? This should be true only for finite parameters. -- `generative_measures::Vector{MeasureIndex}`: Indices of measures that use `parameter.generative_supp_info`. -- `has_internal_supports::Bool`: Does this parameter have internal supports? -- `has_generative_supports::Bool`: Have any generative supports been added? - `has_deriv_constrs::Bool`: Have any derivative evaluation constraints been added to the infinite model associated with this parameter? """ @@ -573,10 +391,7 @@ mutable struct ScalarParameterData{P <: ScalarParameter} <: AbstractDataObject measure_indices::Vector{MeasureIndex} constraint_indices::Vector{InfOptConstraintIndex} in_objective::Bool - generative_measures::Vector{MeasureIndex} - has_internal_supports::Bool - has_generative_supports::Bool - has_deriv_constrs::Bool + has_deriv_constrs::Bool # TODO maybe remove this? end # Convenient constructor @@ -588,8 +403,7 @@ function ScalarParameterData(param::P, return ScalarParameterData{P}(param, object_num, parameter_num, name, ParameterFunctionIndex[], InfiniteVariableIndex[], DerivativeIndex[], MeasureIndex[], - InfOptConstraintIndex[], false, MeasureIndex[], - false, false, false) + InfOptConstraintIndex[], false, false) end """ @@ -612,7 +426,6 @@ A mutable `DataType` for storing [`DependentParameters`](@ref) and their data. - `measure_indices::Vector{Vector{MeasureIndex}}`: Indices of dependent measures. - `constraint_indices::Vector{Vector{InfOptConstraintIndex}}`: Indices of dependent constraints. -- `has_internal_supports::Bool`: Does this parameter have internal supports? - `has_deriv_constrs::Bool`: Have any derivative evaluation constraints been added to the infinite model associated with this parameter? """ @@ -626,8 +439,7 @@ mutable struct MultiParameterData{P <: DependentParameters} <: AbstractDataObjec derivative_indices::Vector{Vector{DerivativeIndex}} measure_indices::Vector{Vector{MeasureIndex}} constraint_indices::Vector{Vector{InfOptConstraintIndex}} - has_internal_supports::Bool - has_deriv_constrs::Vector{Bool} + has_deriv_constrs::Vector{Bool} # TODO maybe remove? end # Convenient constructor @@ -641,7 +453,7 @@ function MultiParameterData(params::P, [DerivativeIndex[] for i in eachindex(names)], [MeasureIndex[] for i in eachindex(names)], [InfOptConstraintIndex[] for i in eachindex(names)], - false, zeros(Bool, length(names))) + zeros(Bool, length(names))) end ################################################################################ @@ -854,6 +666,7 @@ end ################################################################################ # DERIVATIVE TYPES ################################################################################ +# TODO modify to store derivatives of arbitrary order """ Derivative{F <: Function, V <: GeneralVariableRef} <: JuMP.AbstractVariable @@ -889,212 +702,97 @@ end """ AbstractMeasureData -An abstract type to define data for measures to define the behavior of +An abstract type for measures to define the behavior of a [`Measure`](@ref). """ abstract type AbstractMeasureData end """ - DiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, - Vector{<:JuMP.AbstractVariableRef}}, - N, B <: Union{Float64, Vector{Float64}}, - F <: Function - } <: AbstractMeasureData + IntegralData{P <: Union{JuMP.AbstractVariableRef, Vector{<:JuMP.AbstractVariableRef}}, + D <: AbstractInfiniteDomain, + F <: Union{Nothing, Function}} <: AbstractMeasureData -A DataType for immutable measure abstraction data where the -abstraction is of the form: -``measure = \\int_{\\tau \\in T} f(\\tau) w(\\tau) d\\tau \\approx \\sum_{i = 1}^N \\alpha_i f(\\tau_i) w(\\tau_i)``. -The supports and coefficients are immutable (i.e., they will not change -even if supports are changed for the underlying infinite parameter.) This -type can be used for both 1-dimensional and multi-dimensional measures. +A `DataType` for defining integral measures and storing their needed canonical +information; namely, the infinite parameter(s) that act as the independent +variable, the domain of the integral, and a weighting function (if there is +one). **Fields** -- `parameter_refs::P`: The infinite parameter(s) over which the integration occurs. - These can be comprised of multiple independent parameters, - but dependent parameters cannot be mixed with other types. -- `coefficients::Vector{Float64}`: Coefficients ``\\alpha_i`` for the above - measure abstraction. -- `supports::Array{Float64, N}`: Supports points ``\\tau_i``. This is a `Vector` - if only one parameter is given, otherwise it is - a `Matrix` where the supports are stored column-wise. -- `label::DataType`: Label for the support points ``\\tau_i`` when stored in the - infinite parameter(s), stemming from [`AbstractSupportLabel`](@ref). -- `weight_function::F`: Weighting function ``w`` must map an individual - support value to a `Real` scalar value. -- `lower_bounds::B`: Lower bound in accordance with ``T``, this denotes the - intended interval of the measure and should be `NaN` if ignored -- `upper_bounds::B`: Same as above but the upper bound. -- `is_expect::Bool`: Is this data associated with an expectation call? -""" -struct DiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, - Vector{<:JuMP.AbstractVariableRef}}, - N, B <: Union{Float64, Vector{Float64}}, - F <: Function - } <: AbstractMeasureData +- `parameter_refs::P`: The infinite parameter(s) that act as the independent +variable(s). +- `domain::D`: The domain of the integral (must be a sub-domain of the parameter +domain). +- `weight_func::F`: A function of the form `w(d)::Float64`, where `d` are the +infinite parameters, that is multiplied against the integrant function. +""" +struct IntegralData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + D <: AbstractInfiniteDomain, + F <: Union{Nothing, Function}} <: AbstractMeasureData parameter_refs::P - coefficients::Vector{Float64} - supports::Array{Float64, N} # supports are stored column-wise - label::DataType # label that will used when the supports are added to the model - weight_function::F # single support --> weight value - lower_bounds::B - upper_bounds::B - is_expect::Bool - # scalar constructor - function DiscreteMeasureData( - param_ref::V, coeffs::Vector{<:Real}, - supps::Vector{<:Real}, - label::DataType, - weight_func::F, - lower_bound::Real, - upper_bound::Real, - expect::Bool - ) where {V <: JuMP.AbstractVariableRef, F <: Function} - return new{V, 1, Float64, F}(param_ref, coeffs, supps, label, weight_func, - lower_bound, upper_bound, expect) + domain::D # needs to be a subset or equal to the parameter domain(s) + weight_func::F + + # Univariate constructor + function IntegralData( + pref::P, + domain::D, + func::F = nothing + ) where {P <: JuMP.AbstractVariableRef, + D <: InfiniteScalarDomain, + F <: Union{Nothing, Function}} + return new{P, D, F}(pref, domain, func) end - # multi constructor - function DiscreteMeasureData( - param_refs::Vector{V}, - coeffs::Vector{<:Real}, - supps::Matrix{<:Real}, - label::DataType, - weight_func::F, - lower_bound::Vector{<:Real}, - upper_bound::Vector{<:Real}, - expect::Bool - ) where {V <: JuMP.AbstractVariableRef, F <: Function} - return new{Vector{V}, 2, Vector{Float64}, F}(param_refs, coeffs, supps, - label, weight_func, lower_bound, - upper_bound, expect) + + # Multivariate constructor + function IntegralData( + prefs::P, + domain::D, + func::F = nothing + ) where {P <: Vector{<:JuMP.AbstractVariableRef}, + D <: InfiniteArrayDomain, + F <: Union{Nothing, Function}} + return new{P, D, F}(prefs, domain, func) end end """ - FunctionalDiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, - Vector{<:JuMP.AbstractVariableRef}}, - B <: Union{Float64, Vector{Float64}}, - I <: AbstractGenerativeInfo, - F1 <: Function, - F2 <: Function - } <: AbstractMeasureData - -A DataType for mutable measure abstraction data where the -abstraction is of the form: -``measure = \\int_{\\tau \\in T} f(\\tau) w(\\tau) d\\tau \\approx \\sum_{i = 1}^N \\alpha_i f(\\tau_i) w(\\tau_i)``. -This abstraction is equivalent to that of [`DiscreteMeasureData`](@ref), but -the difference is that the supports are not fully known at the time of measure -creation. Thus, functions are stored that will be used to generate the -concrete support points ``\\tau_i`` and their coefficients ``\\alpha_i`` when -the measure is evaluated (expanded). These supports are identified/generated -in accordance with the `label` with a gaurantee that at least `num_supports` are -generated. For example, if `label = MCSample` and `num_supports = 100` then -the measure will use all of the supports stored in the `parameter_refs` with the -label `MCSample` and will ensure there are at least 100 are generated. This -type can be used for both 1-dimensional and multi-dimensional measures. - -For 1-dimensional measures over independent infinite parameters, the -`generative_supp_info` specifies the info needed to make generative supports based -on those with that exist with `label`. Note that only 1 kind of generative -supports are allowed for each infinite parameter. + ExpectationData{P <: Union{JuMP.AbstractVariableRef, Vector{<:JuMP.AbstractVariableRef}}, + F <: Union{Nothing, Function}} <: AbstractMeasureData + +A `DataType` for storing expectation operators. Principally, this includes +the infinite parameter(s) the expectation is with respect to and the underlying +probability density function. **Fields** -- `parameter_refs::P`: The infinite parameter(s) over which the integration occurs. - These can be comprised of multiple independent parameters, - but dependent parameters cannot be mixed with other types. -- `coeff_function::F1`: Coefficient generation function making ``\\alpha_i`` - for the above measure abstraction. It should take - all the supports as input (formatted as an Array) - and return the corresponding vector of coefficients. -- `min_num_supports::Int`: Specifies the minimum number of supports ``\\tau_i`` - desired in association with `parameter_refs` and `label`. -- `label::DataType`: Label for the support points ``\\tau_i`` which are/will be - stored in the infinite parameter(s), stemming from [`AbstractSupportLabel`](@ref). -- `generative_supp_info::I`: Information needed to generate supports based on other - existing ones. -- `weight_function::F2`: Weighting function ``w`` must map an individual - support value to a `Real` scalar value. -- `lower_bounds::B`: Lower bounds in accordance with ``T``, this denotes the - intended interval of the measure and should be `NaN` if ignored -- `upper_bounds::B`: Same as above but the upper bounds. -- `is_expect::Bool`: Is this data associated with an expectation call? -""" -struct FunctionalDiscreteMeasureData{P <: Union{JuMP.AbstractVariableRef, - Vector{<:JuMP.AbstractVariableRef}}, - B <: Union{Float64, Vector{Float64}}, - I <: AbstractGenerativeInfo, - F1 <: Function, - F2 <: Function - } <: AbstractMeasureData - parameter_refs::P - coeff_function::F1 # supports (excluding generative)--> coefficient vector (includes generative) - min_num_supports::Int # minimum number of supports - label::DataType # support label of included supports - generative_supp_info::I - weight_function::F2 # single support --> weight value - lower_bounds::B - upper_bounds::B - is_expect::Bool - # scalar constructor - function FunctionalDiscreteMeasureData( - param_ref::V, - coeff_func::F1, - num_supps::Int, - label::DataType, - gen_info::I, - weight_func::F2, - lower_bound::Real, - upper_bound::Real, - expect::Bool - ) where {V <: JuMP.AbstractVariableRef, I <: AbstractGenerativeInfo, - F1 <: Function, F2 <: Function} - return new{V, Float64, I, F1, F2}(param_ref, coeff_func, num_supps, label, - gen_info, weight_func, lower_bound, - upper_bound, expect) - end - # multi constructor - function FunctionalDiscreteMeasureData( - param_refs::Vector{V}, - coeff_func::F1, - num_supps::Int, - label::DataType, - weight_func::F2, - lower_bound::Vector{<:Real}, - upper_bound::Vector{<:Real}, - expect::Bool - ) where {V <: JuMP.AbstractVariableRef, F1 <: Function, F2 <: Function} - return new{Vector{V}, Vector{Float64}, NoGenerativeSupports, F1, F2}( - param_refs, coeff_func, num_supps, label, NoGenerativeSupports(), - weight_func, lower_bound, upper_bound, expect) +- `parameter_refs::P`: The infinite parameter(s) the expectation is over. +- `pdf::F`: The pdf function used by the expectation. +""" +struct ExpectationData{P <: Union{JuMP.AbstractVariableRef, + Vector{<:JuMP.AbstractVariableRef}}, + F <: Union{Nothing, Function}} <: AbstractMeasureData + parameter_refs::P # these also define the domain implicitly + pdf::F + + # Expectation constructor + function ExpectationData( + prefs::P, + func::F = nothing + ) where {P <: Union{JuMP.AbstractVariableRef, Vector{<:JuMP.AbstractVariableRef}}, + F <: Union{Nothing, Function}} + return new{P, F}(prefs, func) end end -# Convenient Dispatch constructor -function FunctionalDiscreteMeasureData( - param_refs::Vector{V}, - coeff_func::Function, - num_supps::Int, - label::DataType, - info::NoGenerativeSupports, - weight_func::Function, - lower_bound::Vector{<:Real}, - upper_bound::Vector{<:Real}, - expect::Bool - ) where {V <: JuMP.AbstractVariableRef} - return FunctionalDiscreteMeasureData(param_refs, coeff_func, num_supps, - label, weight_func, lower_bound, - upper_bound, expect) -end - """ Measure{T <: JuMP.AbstractJuMPScalar, V <: AbstractMeasureData} -A `DataType` for measure abstractions. The abstraction is determined by `data` -and is enacted on `func` when the measure is evaluated (expended). +A `DataType` for measure objects. The type of measure is determined by `data` +and is enacted on `func` when the measure is evaluated (expanded). **Fields** - `func::T` The `InfiniteOpt` expression to be measured. -- `data::V` Data of the abstraction as described in a `AbstractMeasureData` - concrete subtype. +- `data::V` Data needed to describe the measure. - `object_nums::Vector{Int}`: The parameter object numbers of the evaluated measure expression (i.e., the object numbers of `func` excluding those that belong to `data`). @@ -1122,7 +820,6 @@ A mutable `DataType` for storing [`Measure`](@ref)s and their data. **Fields** - `measure::M`: The measure structure. -- `name::String`: The base name used for printing `name(meas_expr d(par))`. - `measure_indices::Vector{MeasureIndex}`: Indices of dependent measures. - `constraint_indices::Vector{InfOptConstraintIndex}`: Indices of dependent constraints. - `derivative_indices::Vector{DerivativeIndex}`: Indices of dependent derivatives. @@ -1130,15 +827,14 @@ A mutable `DataType` for storing [`Measure`](@ref)s and their data. """ mutable struct MeasureData{M <: Measure} <: AbstractDataObject measure::M - name::String measure_indices::Vector{MeasureIndex} constraint_indices::Vector{InfOptConstraintIndex} derivative_indices::Vector{DerivativeIndex} in_objective::Bool end -function MeasureData(measure::M, name::String = "measure") where {M <: Measure} - return MeasureData{M}(measure, name, MeasureIndex[], InfOptConstraintIndex[], +function MeasureData(measure::M) where {M <: Measure} + return MeasureData{M}(measure, MeasureIndex[], InfOptConstraintIndex[], DerivativeIndex[], false) end @@ -1187,65 +883,185 @@ mutable struct ConstraintData{C <: JuMP.AbstractConstraint} <: AbstractDataObjec is_info_constraint::Bool end +################################################################################ +# TRANSFORM API +################################################################################ +""" + AbstractTransformAttr + +An abstract type for attributes used by a transformation backend that are stored +in an `InfiniteModel`'s cache as the model is created. +""" +abstract type AbstractTransformAttr end + +""" + FiniteParameterAttr <: AbstractTransformAttr + +A finite parameter attribute that is used by a transformation backend. This +is intended to be used by transformation backends that require/allow additional +information about finite parameters to be specified by the user. +""" +abstract type FiniteParameterAttr <: AbstractTransformAttr end + +""" + InfiniteParameterAttr <: AbstractTransformAttr + +An infinite parameter attribute that is used by a transformation backend. This +is intended to be used by transformation backends that require/allow additional +information about infinite parameters to be specified by the user. +""" +abstract type InfiniteParameterAttr <: AbstractTransformAttr end + +""" + Supports <: InfiniteParameterAttr + +An attribute for storing support (discretization) points that will be used +by infinite parameter to approximate the infinite domain of a problem. +""" +struct Supports <: InfiniteParameterAttr end + +""" + VariableAttr <: AbstractTransformAttr + +A variable attribute that is used by a transformation backend. This is +intended to be used by transformation backends that require/allow additional +information about variables to be specified by the user. +""" +abstract type VariableAttr <: AbstractTransformAttr end + +""" + DerivativeAttr <: AbstractTransformAttr + +A derivative attribute that is used by a transformation backend. This is +intended to be used by transformation backends that require/allow additional +information about derivatives to be specified by the user. +""" +abstract type DerivativeAttr <: AbstractTransformAttr end + +""" + MeasureAttr <: AbstractTransformAttr + +A measure attribute that is used by a transformation backend. This is +intended to be used by transformation backends that require/allow additional +information about measure to be specified by the user. +""" +abstract type MeasureAttr <: AbstractTransformAttr end + +""" + ConstraintAttr <: AbstractTransformAttr + +A constraint attribute that is used by a transformation backend. This is +intended to be used by transformation backends that require/allow additional +information about constraints to be specified by the user. +""" +abstract type ConstraintAttr <: AbstractTransformAttr end + +""" + BoundaryCondition <: ConstraintAttr + +An attribute to indicate whether a constraint is a boundary condition. +""" +struct BoundaryCondition <: ConstraintAttr end # TODO maybe do something else + +""" + ModelAttr <: AbstractTransformAttr + +A model attribute that is used by a transformation backend. This is +intended to be used by transformation backends that require/allow additional +information about the model to be specified by the user. +""" +abstract type ModelAttr <: AbstractTransformAttr end + +""" + TransformAttrCache + +A convenient container for storing all the transformation attributes stored in +an `InfiniteModel` that can be used by the transformation backend. +""" +struct TransformAttrCache + finite_params::Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any} + indep_params::Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any} + depend_params::Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any} # TODO fix this + infinite_vars::Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any} + semi_vars::Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any} + point_vars::Dict{Tuple{PointVariableIndex, VariableAttr}, Any} + derivatives::Dict{Tuple{DerivativeIndex, DerivativeAttr}, Any} + measures::Dict{Tuple{MeasureIndex, MeasureAttr}, Any} + constraints::Dict{Tuple{InfOptConstraintIndex, ConstraintAttr}, Any} + model::Dict{ModelAttr, Any} + + # Constructor + function TransformAttrCache() + return new( + Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any}(), + Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any}(), + Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any}(), + Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any}(), + Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any}(), + Dict{Tuple{PointVariableIndex, VariableAttr}, Any}(), + Dict{Tuple{DerivativeIndex, DerivativeAttr}, Any}(), + Dict{Tuple{MeasureIndex, MeasureAttr}, Any}(), + Dict{Tuple{InfOptConstraintIndex, ConstraintAttr}, Any}(), + Dict{ModelAttr, Any}() + ) + end +end + +""" + AbstractTransformBackend + +An abstract type for transformation interfaces/models that act as a backend for +`InfiniteModel`s. +""" +abstract type AbstractTransformBackend end + +# TODO maybe add more types if needed + +################################################################################ +# BASIC SUPPORT LABELS +################################################################################ +""" + All + +This abstract support label is unique in that it isn't associated with a particular set of +supports, but rather is used used to indicate that all supports should be used. +""" +abstract type All end + +""" + PublicLabel <: All + +An abstract label used to denote that labels that should be given to the user by +default. +""" +abstract type PublicLabel <: All end + +""" + UserDefined <: PublicLabel + +A support label for supports that are supplied by the user directly to an infinite +parameter. +""" +struct UserDefined <: PublicLabel end + +""" + InternalLabel <: All + +An abstract label for support labels that are associated with supports that should +not be reported to the user by default. +""" +abstract type InternalLabel <: All end + ################################################################################ # INFINITE MODEL ################################################################################ +const DefaultSigDigits = 12 + """ InfiniteModel <: JuMP.AbstractModel A `DataType` for storing all of the mathematical modeling information needed to model an optmization problem with an infinite-dimensional decision space. - -**Fields** -- `independent_params::MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{IndependentParameter}}`: - The independent parameters and their mapping information. -- `dependent_params::MOIUC.CleverDict{DependentParametersIndex, MultiParameterData}`: - The dependent parameters and their mapping information. -- `finite_params::MOIUC.CleverDict{FiniteParameterIndex, ScalarParameterData{FiniteParameter}}`: - The finite parameters and their mapping information. -- `name_to_param::Union{Dict{String, AbstractInfOptIndex}, Nothing}`: - Field to help find a parameter given the name. -- `last_param_num::Int`: The last parameter number to be used. -- `param_object_indices::Vector{Union{IndependentParameterIndex, DependentParametersIndex}}`: - The collection of parameter object indices in creation order. -- `param_functions::MOIUC.CleverDict{ParameterFunctionIndex, ParameterFunctionData{ParameterFunction}}`: - The infinite parameter functions and their mapping information. -- `infinite_vars::MOIUC.CleverDict{InfiniteVariableIndex, <:VariableData{<:InfiniteVariable}}`: - The infinite variables and their mapping information. -- `semi_infinite_vars::MOIUC.CleverDict{SemiInfiniteVariableIndex, <:VariableData{<:SemiInfiniteVariable}}`: - The semi-infinite variables and their mapping information. -- `semi_lookup::Dict{<:Tuple, SemiInfiniteVariableIndex}`: Look-up if a variable already already exists. -- `point_vars::MOIUC.CleverDict{PointVariableIndex, <:VariableData{<:PointVariable}}`: - The point variables and their mapping information. -- `point_lookup::Dict{<:Tuple, PointVariableIndex}`: Look-up if a variable already exists. -- `finite_vars::MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}}`: - The finite variables and their mapping information. -- `name_to_var::Union{Dict{String, AbstractInfOptIndex}, Nothing}`: - Field to help find a variable given the name. -- `derivatives::MOIUC.CleverDict{DerivativeIndex, <:VariableData{<:Derivative}}`: - The derivatives and their mapping information. -- `deriv_lookup::Dict{<:Tuple, DerivativeIndex}`: Map derivative variable-parameter - pairs to a derivative index to prevent duplicates. -- `measures::MOIUC.CleverDict{MeasureIndex, <:MeasureData}`: - The measures and their mapping information. -- `integral_defaults::Dict{Symbol}`: - The default keyword arguments for [`integral`](@ref). -- `constraints::MOIUC.CleverDict{InfOptConstraintIndex, <:ConstraintData}`: - The constraints and their mapping information. -- `constraint_restrictions::Dict{InfOptConstraintIndex, <:DomainRestrictions}` Map constraints - to their domain restrictions if they have any. -- `name_to_constr::Union{Dict{String, InfOptConstraintIndex}, Nothing}`: - Field to help find a constraint given the name. -- `objective_sense::MOI.OptimizationSense`: Objective sense. -- `objective_function::JuMP.AbstractJuMPScalar`: Finite scalar function. -- `objective_has_measures::Bool`: Does the objective contain measures? -- `registrations::Vector{RegisteredFunction}`: The nonlinear registered functions. -- `Dict{Tuple{Symbol, Int}, Function}`: Map a name and number of arguments to a registered function. -- `obj_dict::Dict{Symbol, Any}`: Store Julia symbols used with `InfiniteModel` -- `optimizer_constructor`: MOI optimizer constructor (e.g., Gurobi.Optimizer). -- `optimizer_model::JuMP.Model`: Model used to solve `InfiniteModel` -- `ready_to_optimize::Bool`: Is the optimizer_model up to date. -- `ext::Dict{Symbol, Any}`: Store arbitrary extension information. """ mutable struct InfiniteModel <: JuMP.AbstractModel # Parameter Data @@ -1265,6 +1081,7 @@ mutable struct InfiniteModel <: JuMP.AbstractModel point_lookup::Dict{<:Tuple, PointVariableIndex} finite_vars::MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}} name_to_var::Union{Dict{String, AbstractInfOptIndex}, Nothing} + significant_digits::Int # Derivative Data derivatives::MOIUC.CleverDict{DerivativeIndex, <:VariableData{<:Derivative}} @@ -1281,7 +1098,7 @@ mutable struct InfiniteModel <: JuMP.AbstractModel # Objective Data objective_sense::MOI.OptimizationSense objective_function::JuMP.AbstractJuMPScalar - objective_has_measures::Bool + objective_has_measures::Bool # TODO maybe remove this? # Function Registration registrations::Vector{Any} @@ -1290,129 +1107,76 @@ mutable struct InfiniteModel <: JuMP.AbstractModel # Objects obj_dict::Dict{Symbol, Any} - # Optimize Data - optimizer_constructor::Any - optimizer_model::JuMP.Model - ready_to_optimize::Bool + # Transformation/Optimize Data + transform_attrs::TransformAttrCache + transform_backend::Union{AbstractTransformBackend, Nothing} + backend_ready::Bool # Extensions ext::Dict{Symbol, Any} end """ - InfiniteModel([optimizer_constructor]; - [OptimizerModel::Function = TranscriptionModel, - add_bridges::Bool = true, optimizer_model_kwargs...]) + InfiniteModel([backend::AbstractTransformBackend]; [sig_digits::Int = DefaultSigDigits, kwargs..]) -Return a new infinite model where an optimizer is specified if an -`optimizer_constructor` is given. The optimizer -can also later be set with the [`JuMP.set_optimizer`](@ref) call. By default -the `optimizer_model` data field is initialized with a -[`TranscriptionModel`](@ref), but a different type of model can be assigned via -[`set_optimizer_model`](@ref) as can be required by extensions. +The core modeling object used to store infinite-dimensional optimization formulations. +Optionally, a transformation backend `backend` can be specified that will altimately +be used to transform and solve a model. Can specify the number of significant digits +that should be used to process supports (e.g., variable points). Certain backends +and/or extension packages might allow the specification of keyword arguments to +provide [`ModelAttr`](@ref)s. **Example** -```jldoctest -julia> using InfiniteOpt, JuMP, Ipopt; - +```julia-repl julia> model = InfiniteModel() -An InfiniteOpt Model -Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Measures: 0 -Derivatives: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: NO_OPTIMIZER -Solver name: No optimizer attached. - -julia> model = InfiniteModel(Ipopt.Optimizer) -An InfiniteOpt Model -Feasibility problem with: -Finite Parameters: 0 -Infinite Parameters: 0 -Variables: 0 -Measures: 0 -Derivatives: 0 -Optimizer model backend information: -Model mode: AUTOMATIC -CachingOptimizer state: EMPTY_OPTIMIZER -Solver name: Ipopt + +TODO finish ``` """ -function InfiniteModel(; - OptimizerModel::Function = TranscriptionModel, - kwargs... - )::InfiniteModel - return InfiniteModel(# Parameters - MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{<:IndependentParameter}}(), - MOIUC.CleverDict{DependentParametersIndex, MultiParameterData}(), - MOIUC.CleverDict{FiniteParameterIndex, ScalarParameterData{FiniteParameter}}(), - nothing, 0, - Union{IndependentParameterIndex, DependentParametersIndex}[], - MOIUC.CleverDict{ParameterFunctionIndex, ParameterFunctionData{<:ParameterFunction}}(), - # Variables - MOIUC.CleverDict{InfiniteVariableIndex, VariableData{<:InfiniteVariable}}(), - MOIUC.CleverDict{SemiInfiniteVariableIndex, VariableData{SemiInfiniteVariable{GeneralVariableRef}}}(), - Dict{Tuple{GeneralVariableRef, Dict{Int, Float64}}, SemiInfiniteVariableIndex}(), - MOIUC.CleverDict{PointVariableIndex, VariableData{PointVariable{GeneralVariableRef}}}(), - Dict{Tuple{GeneralVariableRef, Vector{Float64}}, PointVariableIndex}(), - MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}}(), - nothing, - # Derivatives - MOIUC.CleverDict{DerivativeIndex, VariableData{<:Derivative}}(), - Dict{Tuple{GeneralVariableRef, GeneralVariableRef}, DerivativeIndex}(), - # Measures - MOIUC.CleverDict{MeasureIndex, MeasureData{<:Measure}}(), - # Constraints - MOIUC.CleverDict{InfOptConstraintIndex, ConstraintData{<:JuMP.AbstractConstraint}}(), - Dict{InfOptConstraintIndex, DomainRestrictions{GeneralVariableRef}}(), - nothing, - # Objective - MOI.FEASIBILITY_SENSE, - zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}), - false, - # registration - RegisteredFunction[], - Dict{Tuple{Symbol, Int}, Function}(), - # Object dictionary - Dict{Symbol, Any}(), - # Optimize data - nothing, OptimizerModel(; kwargs...), false, - # Extensions - Dict{Symbol, Any}() - ) -end - -## Set the optimizer_constructor depending on what it is -# MOI.OptimizerWithAttributes -function _set_optimizer_constructor( - model::InfiniteModel, - constructor::MOI.OptimizerWithAttributes - )::Nothing - model.optimizer_constructor = constructor.optimizer_constructor - return -end - -# No attributes -function _set_optimizer_constructor(model::InfiniteModel, constructor)::Nothing - model.optimizer_constructor = constructor - return -end - -# Dispatch for InfiniteModel call with optimizer constructor -function InfiniteModel( - optimizer_constructor; - OptimizerModel::Function = TranscriptionModel, - kwargs... - )::InfiniteModel - model = InfiniteModel() - model.optimizer_model = OptimizerModel(optimizer_constructor; kwargs...) - _set_optimizer_constructor(model, optimizer_constructor) - return model -end +function InfiniteModel(; sig_digits::Int = DefaultSigDigits) + return InfiniteModel( + # Parameters + MOIUC.CleverDict{IndependentParameterIndex, ScalarParameterData{<:IndependentParameter}}(), + MOIUC.CleverDict{DependentParametersIndex, MultiParameterData}(), + MOIUC.CleverDict{FiniteParameterIndex, ScalarParameterData{FiniteParameter}}(), + nothing, 0, + Union{IndependentParameterIndex, DependentParametersIndex}[], + MOIUC.CleverDict{ParameterFunctionIndex, ParameterFunctionData{<:ParameterFunction}}(), + # Variables + MOIUC.CleverDict{InfiniteVariableIndex, VariableData{<:InfiniteVariable}}(), + MOIUC.CleverDict{SemiInfiniteVariableIndex, VariableData{SemiInfiniteVariable{GeneralVariableRef}}}(), + Dict{Tuple{GeneralVariableRef, Dict{Int, Float64}}, SemiInfiniteVariableIndex}(), + MOIUC.CleverDict{PointVariableIndex, VariableData{PointVariable{GeneralVariableRef}}}(), + Dict{Tuple{GeneralVariableRef, Vector{Float64}}, PointVariableIndex}(), + MOIUC.CleverDict{FiniteVariableIndex, VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}}}(), + nothing, sig_digits, + # Derivatives + MOIUC.CleverDict{DerivativeIndex, VariableData{<:Derivative}}(), + Dict{Tuple{GeneralVariableRef, GeneralVariableRef}, DerivativeIndex}(), + # Measures + MOIUC.CleverDict{MeasureIndex, MeasureData{<:Measure}}(), + # Constraints + MOIUC.CleverDict{InfOptConstraintIndex, ConstraintData{<:JuMP.AbstractConstraint}}(), + Dict{InfOptConstraintIndex, DomainRestrictions{GeneralVariableRef}}(), + nothing, + # Objective + MOI.FEASIBILITY_SENSE, + zero(JuMP.GenericAffExpr{Float64, GeneralVariableRef}), + false, + # registration + RegisteredFunction[], + Dict{Tuple{Symbol, Int}, Function}(), + # Object dictionary + Dict{Symbol, Any}(), + # Transform data + TransformAttrCache(), nothing, false, + # Extensions + Dict{Symbol, Any}() + ) +end + +# TODO make constructor with transform method + # Define basic InfiniteModel extension functions Base.broadcastable(model::InfiniteModel) = Ref(model) @@ -1426,16 +1190,14 @@ registered to a specific symbol in the macros. For example, `@variable(model, x[1:2, 1:2])` registers the array of variables `x` to the symbol `:x`. """ -JuMP.object_dictionary(model::InfiniteModel)::Dict{Symbol, Any} = model.obj_dict +JuMP.object_dictionary(model::InfiniteModel) = model.obj_dict """ Base.empty!(model::InfiniteModel)::InfiniteModel -Clear out `model` of everything except the optimizer information and return the -cleared model. +Clear out `model` of everything. """ -function Base.empty!(model::InfiniteModel)::InfiniteModel - # Clear everything except the solver information +function Base.empty!(model::InfiniteModel) # parameters empty!(model.independent_params) empty!(model.dependent_params) @@ -1452,6 +1214,7 @@ function Base.empty!(model::InfiniteModel)::InfiniteModel empty!(model.point_lookup) empty!(model.finite_vars) model.name_to_var = nothing + model.significant_digits = DefaultSigDigits # derivatives and measures empty!(model.derivatives) empty!(model.deriv_lookup) @@ -1467,14 +1230,16 @@ function Base.empty!(model::InfiniteModel)::InfiniteModel # other stuff empty!(model.registrations) empty!(model.obj_dict) - empty!(model.optimizer_model) - model.ready_to_optimize = false + empty!(model.func_lookup) + model.transform_attrs = TransformAttrCache() + model.transform_backend = nothing + model.backend_ready = false empty!(model.ext) return model end # Define basic accessors -_last_param_num(model::InfiniteModel)::Int = model.last_param_num +_last_param_num(model::InfiniteModel) = model.last_param_num _param_object_indices(model::InfiniteModel) = model.param_object_indices ################################################################################ diff --git a/src/general_variables.jl b/src/general_variables.jl index 3a13d2056..84384cba9 100644 --- a/src/general_variables.jl +++ b/src/general_variables.jl @@ -553,7 +553,7 @@ function set_supports( pref::GeneralVariableRef, supports::Union{Real, Vector{<:Real}}; force::Bool = false, - label::Type{<:AbstractSupportLabel} = UserDefined + label::Type{<:All} = UserDefined )::Nothing return set_supports(dispatch_variable_ref(pref), supports, force = force, label = label) @@ -573,7 +573,7 @@ dependent infinite parameters. function set_supports( prefs::AbstractArray{<:GeneralVariableRef}, supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; - label::Type{<:AbstractSupportLabel} = UserDefined, + label::Type{<:All} = UserDefined, force::Bool = false )::Nothing return set_supports(dispatch_variable_ref.(prefs), supports, label = label, @@ -598,7 +598,7 @@ function add_supports( pref::GeneralVariableRef, supports::Union{Real, Vector{<:Real}}; check::Bool = true, - label::Type{<:AbstractSupportLabel} = UserDefined + label::Type{<:All} = UserDefined )::Nothing return add_supports(dispatch_variable_ref(pref), supports, check = check, label = label) @@ -617,7 +617,7 @@ dependent infinite parameters. function add_supports( prefs::AbstractArray{<:GeneralVariableRef}, supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; - label::Type{<:AbstractSupportLabel} = UserDefined, + label::Type{<:All} = UserDefined, check::Bool = true )::Nothing return add_supports(dispatch_variable_ref.(prefs), supports, label = label, diff --git a/src/infinite_domains.jl b/src/infinite_domains.jl index 5378b9c21..db3a63b82 100644 --- a/src/infinite_domains.jl +++ b/src/infinite_domains.jl @@ -312,345 +312,3 @@ function JuMP.set_upper_bound(domain::CollectionDomain, lower::Vector{<:Real}):: new_domains = [JuMP.set_upper_bound(domains[i], lower[i]) for i in eachindex(domains)] return CollectionDomain(new_domains) end - -################################################################################ -# SUPPORT AND LABEL GENERATION -################################################################################ -""" - AbstractSupportLabel - -An abstract type for support label types. These are used to distinguish different -kinds of supports that are added to infinite parameters. -""" -abstract type AbstractSupportLabel end - -""" - All <: AbstractSupportLabel - -This support label is unique in that it isn't associated with a particular set of -supports, but rather is used used to indicate that all supports should be used. -""" -struct All <: AbstractSupportLabel end - -# Filler label for NoGenerativeSupports -struct _NoLabel <: AbstractSupportLabel end - -""" - PublicLabel <: AbstractSupportLabel - -An abstract type used to denote that labels that should be given to the user by -default. -""" -abstract type PublicLabel <: AbstractSupportLabel end - -""" - UserDefined <: PublicLabel - -A support label for supports that are supplied by the user directly to an infinite -parameter. -""" -struct UserDefined <: PublicLabel end - -""" - UniformGrid <: PublicLabel - -A support label for supports that are generated uniformly accross a given interval. -""" -struct UniformGrid <: PublicLabel end - -""" - SampleLabel <: PublicLabel - -An abstract type for labels of supports that are generated via some sampling technique. -""" -abstract type SampleLabel <: PublicLabel end - -""" - MCSample <: SampleLabel - -A support label for supports that are generated via Monte Carlo Sampling. -""" -struct MCSample <: SampleLabel end - -""" - WeightedSample <: SampleLabel - -A support label for supports that are generated by sampling from a statistical -distribution. -""" -struct WeightedSample <: SampleLabel end - -""" - Mixture <: PublicLabel - -A support label for multi-dimensional supports that are generated from a variety -of methods. -""" -struct Mixture <: PublicLabel end - -""" - UniqueMeasure{S::Symbol} <: PublicLabel - -A support label for supports that are provided from the `DiscreteMeasureData` -associated with a measure where a unique label is generated to distinguish those -supports. This is done by invoking [`generate_unique_label`](@ref). -""" -struct UniqueMeasure{S} <: PublicLabel end - -""" - MeasureBound <: PublicLabel - -A support label for supports that are generated using the upper and lower bounds -for `FunctionalDiscreteMeasureData`. -""" -struct MeasureBound <: PublicLabel end - -""" - InternalLabel <: AbstractSupportLabel - -An abstract type for support labels that are associated with supports that should -not be reported to the user by default. -""" -abstract type InternalLabel <: AbstractSupportLabel end - -""" - generate_unique_label()::Type{UniqueMeasure} - -Generate and return a unique support label for measures. -""" -function generate_unique_label()::DataType - return UniqueMeasure{gensym()} -end - -# Define default values of sig_digits and num_supports keywords -const DefaultSigDigits = 12 -const DefaultNumSupports = 10 - -# a user interface of generate_support_values -""" - generate_supports(domain::AbstractInfiniteDomain - [method::Type{<:AbstractSupportLabel}]; - [num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits] - )::Tuple{Array{<:Real}, DataType} - -Generate `num_supports` support values with `sig_digits` significant digits in -accordance with `domain` and return them along with the correct generation label(s). -`IntervalDomain`s generate supports uniformly with label `UniformGrid` and -distribution domains generate them randomly accordingly to the -underlying distribution. Moreover, `method` indicates the generation method that -should be used. These `methods` correspond to parameter support labels. Current -labels that can be used as generation methods include (but may not be defined -for certain domain types): -- [`MCSample`](@ref): Uniformly distributed Monte Carlo samples. -- [`WeightedSample`](@ref): Monte Carlo samples that are weighted by an underlying PDF. -- [`UniformGrid`](@ref): Samples that are generated uniformly over the domain. - -Extensions that employ user-defined infinite domain types and/or methods -should extend [`generate_support_values`](@ref) to enable this. Errors if the -`domain` type and /or methods are unrecognized. This is intended as an internal -method to be used by methods such as [`generate_and_add_supports!`](@ref). -""" -function generate_supports(domain::AbstractInfiniteDomain; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple - return generate_support_values(domain, num_supports = num_supports, - sig_digits = sig_digits) -end - -# 2 arguments -function generate_supports(domain::AbstractInfiniteDomain, - method::Type{<:AbstractSupportLabel}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple - return generate_support_values(domain, method, - num_supports = num_supports, - sig_digits = sig_digits) -end - -""" - generate_support_values(domain::AbstractInfiniteDomain, - [method::Type{MyMethod} = MyMethod]; - [num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits] - )::Tuple{Array{<:Real}, Symbol} - -A multiple dispatch method for [`generate_supports`](@ref). This will return -a tuple where the first element are the supports and the second is their -label. This can be extended for user-defined infinite domains and/or generation -methods. When defining a new domain type the default method dispatch should -make `method` an optional argument (making it the default). Otherwise, other -method dispatches for a given domain must ensure that `method` is positional -argument without a default value (contrary to the definition above). Note that the -`method` must be a subtype of either [`PublicLabel`](@ref) or [`InternalLabel`](@ref). -""" -function generate_support_values(domain::AbstractInfiniteDomain, - args...; kwargs...) - if isempty(args) - error("`generate_support_values` has not been extended for infinite domains " * - "of type `$(typeof(domain))`. This automatic support generation is not " * - "implemented.") - else - error("`generate_support_values` has not been extended for infinite domains " * - "of type `$(typeof(domain))` with the generation method `$(args[1])`. " * - "This automatic support generation is not implemented.") - end -end - -# IntervalDomain and UniformGrid -function generate_support_values(domain::IntervalDomain, - method::Type{UniformGrid} = UniformGrid; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits, - )::Tuple{Vector{<:Real}, DataType} - lb = JuMP.lower_bound(domain) - ub = JuMP.upper_bound(domain) - new_supports = round.(range(lb, stop = ub, length = num_supports), - sigdigits = sig_digits) - return new_supports, method -end - -# IntervalDomain and MCSample -function generate_support_values(domain::IntervalDomain, - method::Type{MCSample}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits, - )::Tuple{Vector{<:Real}, DataType} - lb = JuMP.lower_bound(domain) - ub = JuMP.upper_bound(domain) - dist = Distributions.Uniform(lb, ub) - new_supports = round.(Distributions.rand(dist, num_supports), - sigdigits = sig_digits) - return new_supports, method -end - -# UniDistributionDomain and MultiDistributionDomain (with multivariate only) -function generate_support_values( - domain::Union{UniDistributionDomain, MultiDistributionDomain{<:Distributions.MultivariateDistribution}}, - method::Type{WeightedSample} = WeightedSample; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - dist = domain.distribution - new_supports = round.(Distributions.rand(dist, num_supports), - sigdigits = sig_digits) - return new_supports, method -end - -# UniDistributionDomain and MCSample -function generate_support_values( - domain::UniDistributionDomain, - method::Type{MCSample}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Vector{Float64}, DataType} - return generate_support_values(domain, WeightedSample; num_supports = num_supports, - sig_digits = sig_digits)[1], method # TODO use an unwieghted sample... -end - -# MultiDistributionDomain (matrix-variate distribution) -function generate_support_values( - domain::MultiDistributionDomain{<:Distributions.MatrixDistribution}, - method::Type{WeightedSample} = WeightedSample; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{Float64, 2}, DataType} - dist = domain.distribution - raw_supports = Distributions.rand(dist, num_supports) - new_supports = Array{Float64}(undef, length(dist), num_supports) - for i in 1:size(new_supports, 2) - new_supports[:, i] = round.(reduce(vcat, raw_supports[i]), - sigdigits = sig_digits) - end - return new_supports, method -end - -# Generate the supports for a collection domain -function _generate_collection_supports(domain::CollectionDomain, num_supports::Int, - sig_digits::Int)::Array{Float64, 2} - domains = collection_domains(domain) - # build the support array transpose to fill in column order (leverage locality) - trans_supports = Array{Float64, 2}(undef, num_supports, length(domains)) - for i in eachindex(domains) - @inbounds trans_supports[:, i] = generate_support_values(domains[i], - num_supports = num_supports, - sig_digits = sig_digits)[1] - end - return permutedims(trans_supports) -end - -function _generate_collection_supports(domain::CollectionDomain, - method::Type{<:AbstractSupportLabel}, - num_supports::Int, - sig_digits::Int)::Array{Float64, 2} - domains = collection_domains(domain) - # build the support array transpose to fill in column order (leverage locality) - trans_supports = Array{Float64, 2}(undef, num_supports, length(domains)) - for i in eachindex(domains) - @inbounds trans_supports[:, i] = generate_support_values(domains[i], - method, - num_supports = num_supports, - sig_digits = sig_digits)[1] - end - return permutedims(trans_supports) -end - -# CollectionDomain (IntervalDomains) -function generate_support_values(domain::CollectionDomain{IntervalDomain}, - method::Type{UniformGrid} = UniformGrid; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - new_supports = _generate_collection_supports(domain, num_supports, sig_digits) - return new_supports, method -end - -function generate_support_values(domain::CollectionDomain{IntervalDomain}, - method::Type{MCSample}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - new_supports = _generate_collection_supports(domain, method, num_supports, sig_digits) - return new_supports, method -end - -# CollectionDomain (UniDistributionDomains) -function generate_support_values(domain::CollectionDomain{<:UniDistributionDomain}, - method::Type{WeightedSample} = WeightedSample; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - new_supports = _generate_collection_supports(domain, num_supports, sig_digits) - return new_supports, method -end - -# CollectionDomain (InfiniteScalarDomains) -function generate_support_values(domain::CollectionDomain, - method::Type{Mixture} = Mixture; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - new_supports = _generate_collection_supports(domain, num_supports, sig_digits) - return new_supports, method -end - -# CollectionDomain (InfiniteScalarDomains) using purely MC sampling -# this is useful for measure support generation -function generate_support_values(domain::CollectionDomain, - method::Type{MCSample}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits - )::Tuple{Array{<:Real}, DataType} - new_supports = _generate_collection_supports(domain, method, num_supports, sig_digits) - return new_supports, method -end - -# For label All: dispatch to default methods -function generate_support_values(domain::AbstractInfiniteDomain, ::Type{All}; - num_supports::Int = DefaultNumSupports, - sig_digits::Int = DefaultSigDigits) - return generate_support_values(domain, num_supports = num_supports, - sig_digits = sig_digits) -end diff --git a/src/objective.jl b/src/objective.jl index 7b21fd0ef..5b1dac149 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -13,7 +13,7 @@ julia> objective_sense(model) MIN_SENSE::OptimizationSense = 0 ``` """ -function JuMP.objective_sense(model::InfiniteModel)::MOI.OptimizationSense +function JuMP.objective_sense(model::InfiniteModel) return model.objective_sense end @@ -29,7 +29,7 @@ julia> objective_function(model) 1 ``` """ -function JuMP.objective_function(model::InfiniteModel)::JuMP.AbstractJuMPScalar +function JuMP.objective_function(model::InfiniteModel) return model.objective_function end @@ -55,8 +55,7 @@ julia> objective_function_type(model) GenericAffExpr{Float64,GeneralVariableRef} ``` """ -function JuMP.objective_function_type(model::InfiniteModel - )::Type{<:JuMP.AbstractJuMPScalar} +function JuMP.objective_function_type(model::InfiniteModel) return typeof(JuMP.objective_function(model)) end @@ -65,7 +64,7 @@ end Return `Bool` whether the objective function contains any measures. """ -function objective_has_measures(model::InfiniteModel)::Bool +function objective_has_measures(model::InfiniteModel) return model.objective_has_measures end @@ -91,7 +90,7 @@ julia> objective_function(model) function JuMP.set_objective_function( model::InfiniteModel, func::JuMP.AbstractJuMPScalar - )::Nothing + ) # gather the unique list of variable references for testing and mapping new_vrefs = _all_function_variables(func) # test in the model @@ -134,7 +133,7 @@ julia> objective_function(model) 3 ``` """ -function JuMP.set_objective_function(model::InfiniteModel, func::Real)::Nothing +function JuMP.set_objective_function(model::InfiniteModel, func::Real) # delete old mappings old_vrefs = _all_function_variables(JuMP.objective_function(model)) for vref in old_vrefs @@ -164,7 +163,7 @@ MIN_SENSE::OptimizationSense = 0 function JuMP.set_objective_sense( model::InfiniteModel, sense::MOI.OptimizationSense - )::Nothing + ) model.objective_sense = sense set_optimizer_model_ready(model, false) return @@ -190,7 +189,7 @@ function JuMP.set_objective( model::InfiniteModel, sense::MOI.OptimizationSense, func::Union{JuMP.AbstractJuMPScalar, Real} - )::Nothing + ) JuMP.set_objective_sense(model, sense) JuMP.set_objective_function(model, func) return @@ -235,7 +234,7 @@ function JuMP.set_objective_coefficient( model::InfiniteModel, variable::GeneralVariableRef, coeff::Real - )::Nothing + ) new_expr = _set_variable_coefficient!(JuMP.objective_function(model), variable, coeff) JuMP.set_objective_function(model, new_expr) diff --git a/src/optimize.jl b/src/optimize.jl index a44fdc441..a83e5d757 100644 --- a/src/optimize.jl +++ b/src/optimize.jl @@ -62,25 +62,25 @@ julia> optimizer_model_ready(model) false ``` """ -optimizer_model_ready(model::InfiniteModel)::Bool = model.ready_to_optimize +backend_ready(model::InfiniteModel) = model.ready_to_optimize """ - set_optimizer_model_ready(model::InfiniteModel, status::Bool) + set_backend_ready(model::InfiniteModel, status::Bool) Set the status of the optimizer model to whether it is up to date or not. Note is more intended as an internal function, but is useful for extensions. **Example** ```julia-repl -julia> set_optimizer_model_ready(model, true) +julia> set_backend_ready(model, true) -julia> optimizer_model_ready(model) +julia> backend_ready(model) true ``` """ -function set_optimizer_model_ready(model::InfiniteModel, status::Bool) - model.ready_to_optimize = status - return +function set_backend_ready(model::InfiniteModel, status::Bool) + model.backend_ready = status + return end """ diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 6ba985310..7e95055f0 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -2,55 +2,59 @@ # CORE DISPATCHVARIABLEREF METHOD EXTENSIONS ################################################################################ # Extend dispatch_variable_ref -function dispatch_variable_ref(model::InfiniteModel, - index::IndependentParameterIndex - )::IndependentParameterRef +function dispatch_variable_ref( + model::InfiniteModel, + index::IndependentParameterIndex + ) return IndependentParameterRef(model, index) end - -function dispatch_variable_ref(model::InfiniteModel, - index::FiniteParameterIndex - )::FiniteParameterRef +function dispatch_variable_ref( + model::InfiniteModel, + index::FiniteParameterIndex + ) return FiniteParameterRef(model, index) end # Extend _add_data_object -function _add_data_object(model::InfiniteModel, - object::ScalarParameterData{<:IndependentParameter} - )::IndependentParameterIndex +function _add_data_object( + model::InfiniteModel, + object::ScalarParameterData{<:IndependentParameter} + ) index = MOIUC.add_item(model.independent_params, object) push!(model.param_object_indices, index) return index end - -function _add_data_object(model::InfiniteModel, - object::ScalarParameterData{<:FiniteParameter} - )::FiniteParameterIndex +function _add_data_object( + model::InfiniteModel, + object::ScalarParameterData{<:FiniteParameter} + ) return MOIUC.add_item(model.finite_params, object) end # Extend _data_dictionary (type based) -function _data_dictionary(model::InfiniteModel, - ::Type{IndependentParameter})::MOIUC.CleverDict +function _data_dictionary( + model::InfiniteModel, + ::Type{IndependentParameter} + ) return model.independent_params end - -function _data_dictionary(model::InfiniteModel, - ::Type{FiniteParameter})::MOIUC.CleverDict +function _data_dictionary( + model::InfiniteModel, + ::Type{FiniteParameter} + ) return model.finite_params end # Extend _data_dictionary (ref based) -function _data_dictionary(pref::IndependentParameterRef)::MOIUC.CleverDict +function _data_dictionary(pref::IndependentParameterRef) return JuMP.owner_model(pref).independent_params end - -function _data_dictionary(pref::FiniteParameterRef)::MOIUC.CleverDict +function _data_dictionary(pref::FiniteParameterRef) return JuMP.owner_model(pref).finite_params end # Extend _data_object -function _data_object(pref::ScalarParameterRef)::AbstractDataObject +function _data_object(pref::ScalarParameterRef) object = get(_data_dictionary(pref), JuMP.index(pref), nothing) if isnothing(object) error("Invalid scalar parameter reference, cannot find ", @@ -64,7 +68,7 @@ end # CORE OBJECT METHODS ################################################################################ # Extend _core_variable_object for IndependentParameterRefs -function _core_variable_object(pref::IndependentParameterRef)::IndependentParameter +function _core_variable_object(pref::IndependentParameterRef) return _data_object(pref).parameter end @@ -99,7 +103,7 @@ function _adaptive_data_update( pref::ScalarParameterRef, param::P, data::ScalarParameterData{P} - )::Nothing where {P <: ScalarParameter} + ) where {P <: ScalarParameter} data.parameter = param return end @@ -109,101 +113,54 @@ function _adaptive_data_update( pref::ScalarParameterRef, param::P1, data::ScalarParameterData{P2} - )::Nothing where {P1, P2} + ) where {P1, P2} new_data = ScalarParameterData(param, data.object_num, data.parameter_num, data.name, data.parameter_func_indices, data.infinite_var_indices, data.derivative_indices, data.measure_indices, - data.constraint_indices, data.in_objective, - data.generative_measures, - data.has_internal_supports, - data.has_generative_supports, - data.has_deriv_constrs) + data.constraint_indices, data.in_objective) _data_dictionary(pref)[JuMP.index(pref)] = new_data return end # Extend _set_core_variable_object for ScalarParameterRefs -function _set_core_variable_object(pref::ScalarParameterRef, - param::ScalarParameter)::Nothing +function _set_core_variable_object( + pref::ScalarParameterRef, + param::ScalarParameter + ) _adaptive_data_update(pref, param, _data_object(pref)) return end ################################################################################ -# PARAMETER DEFINITION +# TRANSFORM ATTRIBUTES ################################################################################ -# Define the default derivative evaluation method -const DefaultDerivativeMethod = FiniteDifference() - -# Check that supports don't violate the domain bounds -function _check_supports_in_bounds(_error::Function, - supports::Union{<:Real, Vector{<:Real}}, - domain::AbstractInfiniteDomain)::Nothing - if !supports_in_domain(supports, domain) - _error("Supports violate the domain bounds.") - end - return -end +# TODO finish +################################################################################ +# PARAMETER DEFINITION +################################################################################ """ build_parameter( _error::Function, domain::InfiniteScalarDomain; - [num_supports::Int = 0, - supports::Union{Real, Vector{<:Real}} = Float64[], - sig_digits::Int = DefaultSigDigits, - derivative_method::AbstractDerivativeMethod = DefaultDerivativeMethod] + [kwargs...] )::IndependentParameter Returns a [`IndependentParameter`](@ref) given the appropriate information. -This is analagous to `JuMP.build_variable`. Errors if supports violate the -bounds associated with `domain`. This is meant to primarily serve as a -helper method for [`@infinite_parameter`](@ref). Here `derivative_method` -specifies the numerical evalution method that will be applied to derivatives that -are taken with respect to this infinite parameter. +This is analagous to `JuMP.build_variable`. This is meant to primarily serve as a +helper method for [`@infinite_parameter`](@ref). **Example** ```julia-repl -julia> param = build_parameter(error, IntervalDomain(0, 3), supports = Vector(0:3)); +julia> param = build_parameter(error, IntervalDomain(0, 3)); ``` """ -function build_parameter( - _error::Function, - domain::InfiniteScalarDomain; - num_supports::Int = 0, - supports::Union{Real, Vector{<:Real}} = Float64[], - sig_digits::Int = DefaultSigDigits, - derivative_method::AbstractDerivativeMethod = DefaultDerivativeMethod, - extra_kwargs... - ) - for (kwarg, _) in extra_kwargs - _error("Unrecognized keyword argument $kwarg") - end - label = UserDefined - length_supports = length(supports) - if !isempty(supports) - supports = round.(supports, sigdigits = sig_digits) - _check_supports_in_bounds(_error, supports, domain) - num_supports == 0 || @warn("Ignoring num_supports since supports is not empty.") - elseif num_supports != 0 - supports, label = generate_support_values(domain, num_supports = num_supports, - sig_digits = sig_digits) - end - supports_dict = DataStructures.SortedDict{Float64, Set{DataType}}( - i => Set([label]) for i in supports) - if length_supports != 0 && (length(supports_dict) != length_supports) - @warn("Support points are not unique, eliminating redundant points.") - end - return IndependentParameter(domain, supports_dict, sig_digits, derivative_method, - generative_support_info(derivative_method)) +function build_parameter(_error::Function, domain::InfiniteScalarDomain) + return IndependentParameter(domain) end # Fallback for bad domain types -function build_parameter( - _error::Function, - domain::AbstractInfiniteDomain, - kwargs... - ) +function build_parameter(_error::Function, domain::AbstractInfiniteDomain) _error("Expected scalar infinite domain for each independent parameter, ", "but got a domain of type `$(domain)`. If you are trying to use an ", "`InfiniteArrayDomain`, try setting `independent = false`.") @@ -222,14 +179,7 @@ julia> build_finite_parameter(error, 1) FiniteParameter(1.0) ``` """ -function build_parameter( - _error::Function, - value::Real; - extra_kwargs... - )::FiniteParameter - for (kwarg, _) in extra_kwargs - _error("Unrecognized keyword argument $kwarg") - end +function build_parameter(_error::Function, value::Real) return FiniteParameter(value) end @@ -261,7 +211,7 @@ function add_parameter( model::InfiniteModel, p::IndependentParameter, name::String = "" - )::GeneralVariableRef + ) # TODO add attribute kwargs obj_num = length(_param_object_indices(model)) + 1 param_num = model.last_param_num += 1 data_object = ScalarParameterData(p, obj_num, param_num, name) @@ -293,7 +243,7 @@ function add_parameter( model::InfiniteModel, p::FiniteParameter, name::String = "" - )::GeneralVariableRef + ) # TODO add attribute kwargs data_object = ScalarParameterData(p, -1, -1, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing @@ -333,12 +283,6 @@ function _constraint_dependencies(pref::ScalarParameterRef return _data_object(pref).constraint_indices end -# Extend _generative_measures -function _generative_measures(pref::ScalarParameterRef - )::Vector{MeasureIndex} - return _data_object(pref).generative_measures -end - ################################################################################ # USED_BY FUNCTIONS ################################################################################ @@ -494,35 +438,31 @@ julia> name(t) "time" ``` """ -function JuMP.set_name(pref::ScalarParameterRef, name::String)::Nothing +function JuMP.set_name(pref::ScalarParameterRef, name::String) _data_object(pref).name = name JuMP.owner_model(pref).name_to_param = nothing return end # Make a parameter reference -function _make_parameter_ref(model::InfiniteModel, - index::AbstractInfOptIndex)::GeneralVariableRef +function _make_parameter_ref(model::InfiniteModel, index::AbstractInfOptIndex) return GeneralVariableRef(model, MOIUC.key_to_index(index), typeof(index)) end - -function _make_parameter_ref(model::InfiniteModel, - index::DependentParameterIndex - )::GeneralVariableRef +function _make_parameter_ref(model::InfiniteModel, index::DependentParameterIndex) return GeneralVariableRef(model, MOIUC.key_to_index(index.object_index), typeof(index), index.param_index) end # Get the name_to_param Dictionary -function _param_name_dict(model::InfiniteModel - )::Union{Dict{String, AbstractInfOptIndex}, Nothing} +function _param_name_dict(model::InfiniteModel) return model.name_to_param end # Update name_to_param -function _update_param_name_dict(model::InfiniteModel, +function _update_param_name_dict( + model::InfiniteModel, param_dict::MOIUC.CleverDict{K, V} - )::Nothing where {K, V <: ScalarParameterData} + ) where {K, V <: ScalarParameterData} name_dict = _param_name_dict(model) for (index, data_object) in param_dict param_name = data_object.name @@ -537,10 +477,9 @@ function _update_param_name_dict(model::InfiniteModel, model.name_to_param = name_dict return end - function _update_param_name_dict(model::InfiniteModel, param_dict::MOIUC.CleverDict{K, V} - )::Nothing where {K, V <: MultiParameterData} + ) where {K, V <: MultiParameterData} name_dict = _param_name_dict(model) for (index, data_object) in param_dict param_nums = data_object.parameter_nums @@ -573,8 +512,7 @@ julia> parameter_by_name(model, "t") t ``` """ -function parameter_by_name(model::InfiniteModel, - name::String)::Union{GeneralVariableRef, Nothing} +function parameter_by_name(model::InfiniteModel, name::String) if isnothing(_param_name_dict(model)) model.name_to_param = Dict{String, AbstractInfOptIndex}() _update_param_name_dict(model, model.independent_params) @@ -592,290 +530,21 @@ function parameter_by_name(model::InfiniteModel, end ################################################################################ -# GENERATIVE SUPPORT FUNCTIONS -################################################################################ -# Extend copy for NoGenerativeSupports -function Base.copy(d::NoGenerativeSupports)::NoGenerativeSupports - return NoGenerativeSupports() -end - -# Extend copy for UniformGenerativeInfo -function Base.copy(d::UniformGenerativeInfo)::UniformGenerativeInfo - return UniformGenerativeInfo(copy(d.support_basis), d.label) -end - -""" - support_label(info::AbstractGenerativeInfo)::DataType - -Return the support label to be associated with generative supports produced in -accordance with `info`. This is intended an internal method that should be -extended for user defined types of [`AbstractGenerativeInfo`](@ref). -""" -function support_label(info::AbstractGenerativeInfo) - error("`support_label` not defined for generative support info type " * - "$(typeof(info)).") -end - -# UniformGenerativeInfo -function support_label(info::UniformGenerativeInfo)::DataType - return info.label -end - -# NoGenerativeSupports -function support_label(info::NoGenerativeSupports)::DataType - return _NoLabel -end - -""" - generative_support_info(pref::IndependentParameterRef)::AbstractGenerativeInfo - -Return the generative support information associated with `pref`. -""" -function generative_support_info(pref::IndependentParameterRef)::AbstractGenerativeInfo - return _core_variable_object(pref).generative_supp_info -end - -""" - has_generative_supports(pref::IndependentParameterRef)::Bool - -Return whether generative supports have been added to `pref` in accordance -with its generative support info. -""" -function has_generative_supports(pref::IndependentParameterRef)::Bool - return _data_object(pref).has_generative_supports -end - -# Specify if a parameter has generative supports -function _set_has_generative_supports(pref::IndependentParameterRef, - status::Bool)::Nothing - _data_object(pref).has_generative_supports = status - return -end - -# Reset (remove) the generative supports if needed -function _reset_generative_supports(pref::IndependentParameterRef)::Nothing - if has_generative_supports(pref) - label = support_label(generative_support_info(pref)) - delete_supports(pref, label = label) # this also calls _set_has_generative_supports - end - return -end - -# Specify the generative_support_info -function _set_generative_support_info(pref::IndependentParameterRef, - info::AbstractGenerativeInfo)::Nothing - sig_digits = significant_digits(pref) - method = derivative_method(pref) - domain = _parameter_domain(pref) - supps = _parameter_supports(pref) - new_param = IndependentParameter(domain, supps, sig_digits, method, info) - _reset_generative_supports(pref) - _set_core_variable_object(pref, new_param) - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - return -end - -""" - make_generative_supports(info::AbstractGenerativeInfo, - pref::IndependentParameterRef, - existing_supps::Vector{Float64} - )::Vector{Float64} - -Generate the generative supports for `pref` in accordance with `info` and the -`existing_supps` that `pref` has. The returned supports should not include -`existing_supps`. This is intended as internal method to enable -[`add_generative_supports`](@ref) and should be extended for any user defined -`info` types that are created to enable new measure and/or derivative evaluation -techniques that require the creation of generative supports. -""" -function make_generative_supports(info::AbstractGenerativeInfo, pref, supps) - error("`make_generative_supports` is not defined for generative support " * - "info of type $(typeof(info)).") -end - -# UniformGenerativeInfo -function make_generative_supports(info::UniformGenerativeInfo, - pref, supps)::Vector{Float64} - # collect the preliminaries - basis = info.support_basis - num_internal = length(basis) - num_existing = length(supps) - num_existing <= 1 && error("$(pref) does not have enough supports for " * - "creating generative supports.") - internal_nodes = Vector{Float64}(undef, num_internal * (num_existing - 1)) - # generate the internal node supports - for i in Iterators.take(eachindex(supps), num_existing - 1) - lb = supps[i] - ub = supps[i+1] - internal_nodes[(i-1)*num_internal+1:i*num_internal] = basis * (ub - lb) .+ lb - end - return internal_nodes -end - -## Define internal dispatch methods for adding generative supports -# AbstractGenerativeInfo -function _add_generative_supports(pref, info::AbstractGenerativeInfo)::Nothing - if !has_generative_supports(pref) - existing_supps = supports(pref, label = All) - supps = make_generative_supports(info, pref, existing_supps) - add_supports(pref, supps, label = support_label(info)) - _set_has_generative_supports(pref, true) - end - return -end - -# NoGenerativeSupports -function _add_generative_supports(pref, info::NoGenerativeSupports)::Nothing - return -end - -""" - add_generative_supports(pref::IndependentParameterRef)::Nothing - -Create generative supports for `pref` if needed in accordance with its -generative support info using [`make_generative_supports`](@ref) and add them to -`pref`. This is intended as an internal function, but can be useful user defined -optimizer model extensions that utlize our support system. -""" -function add_generative_supports(pref::IndependentParameterRef)::Nothing - info = generative_support_info(pref) - _add_generative_supports(pref, info) - return -end - -################################################################################ -# DERIVATIVE METHOD FUNCTIONS -################################################################################ -# Determine if any derivatives have derivative constraints -function has_derivative_constraints(pref::IndependentParameterRef)::Bool - return _data_object(pref).has_deriv_constrs -end - -# Make update function for whether it has derivative supports -function _set_has_derivative_constraints(pref::IndependentParameterRef, - status::Bool)::Nothing - _data_object(pref).has_deriv_constrs = status - return -end - -""" - derivative_method(pref::IndependentParameterRef)::AbstractDerivativeMethod - -Returns the numerical derivative evaluation method employed with `pref` when it -is used as an operator parameter in a derivative. - -**Example** -```julia-repl -julia> derivative_method(pref) -FiniteDifference(Backward, true) -``` -""" -function derivative_method(pref::IndependentParameterRef)::AbstractDerivativeMethod - return _core_variable_object(pref).derivative_method -end - -# Make method to reset derivative constraints (supports are handled separately) -function _reset_derivative_constraints(pref::Union{IndependentParameterRef, - DependentParameterRef})::Nothing - if has_derivative_constraints(pref) - @warn("Support/method changes will invalidate existing derivative evaluation " * - "constraints that have been added to the InfiniteModel. Thus, " * - "these are being deleted.") - for idx in _derivative_dependencies(pref) - delete_derivative_constraints(DerivativeRef(JuMP.owner_model(pref), idx)) - end - _set_has_derivative_constraints(pref, false) - end - return -end - -""" - set_derivative_method(pref::IndependentParameterRef, - method::AbstractDerivativeMethod)::Nothing - -Specfies the desired derivative evaluation method `method` for derivatives that are -taken with respect to `pref`. Any internal supports exclusively associated with -the previous method will be deleted. Also, if any derivatives were evaluated -manually, the associated derivative evaluation constraints will be deleted. Errors -if new derivative method generates supports that are incompatible with existing -measures. - -**Example** -```julia-repl -julia> set_derivative_method(d, OrthogonalCollocation(2)) - -``` -""" -function set_derivative_method(pref::IndependentParameterRef, - method::NonGenerativeDerivativeMethod - )::Nothing - old_param = _core_variable_object(pref) - domain = _parameter_domain(pref) - supps = _parameter_supports(pref) - sig_figs = significant_digits(pref) - if isempty(_generative_measures(pref)) - _reset_generative_supports(pref) - new_param = IndependentParameter(domain, supps, sig_figs, method, - NoGenerativeSupports()) - else - info = generative_support_info(pref) - new_param = IndependentParameter(domain, supps, sig_figs, method, info) - end - _reset_derivative_constraints(pref) - _set_core_variable_object(pref, new_param) - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - return -end - -# GenerativeDerivativeMethod -function set_derivative_method(pref::IndependentParameterRef, - method::GenerativeDerivativeMethod - )::Nothing - new_info = generative_support_info(method) - old_info = generative_support_info(pref) - if !isempty(_generative_measures(pref)) && new_info != old_info - error("Generative derivative method conflicts with existing generative " * - "measures.") - end - old_param = _core_variable_object(pref) - domain = _parameter_domain(pref) - supps = _parameter_supports(pref) - sig_figs = significant_digits(pref) - new_param = IndependentParameter(domain, supps, sig_figs, method, new_info) - _reset_derivative_constraints(pref) - _reset_generative_supports(pref) - _set_core_variable_object(pref, new_param) - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - return -end - -################################################################################ -# SET FUNCTIONS +# DOMAIN FUNCTIONS ################################################################################ # Internal functions -function _parameter_domain(pref::IndependentParameterRef)::InfiniteScalarDomain +function _parameter_domain(pref::IndependentParameterRef) return _core_variable_object(pref).domain end -function _update_parameter_domain(pref::IndependentParameterRef, - domain::AbstractInfiniteDomain)::Nothing - # old supports will always be discarded - sig_digits = significant_digits(pref) - method = derivative_method(pref) - info = generative_support_info(pref) - new_param = IndependentParameter(domain, DataStructures.SortedDict{Float64, Set{DataType}}(), - sig_digits, method, info) +function _update_parameter_domain( + pref::IndependentParameterRef, + domain::AbstractInfiniteDomain + ) + new_param = IndependentParameter(domain) _set_core_variable_object(pref, new_param) - _reset_derivative_constraints(pref) - _set_has_generative_supports(pref, false) - _set_has_internal_supports(pref, false) + # TODO do something about the supports and other attributes if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_backend_ready(JuMP.owner_model(pref), false) end return end @@ -891,7 +560,7 @@ julia> infinite_domain(t) [0, 1] ``` """ -function infinite_domain(pref::IndependentParameterRef)::InfiniteScalarDomain +function infinite_domain(pref::IndependentParameterRef) return _parameter_domain(pref) end @@ -910,8 +579,10 @@ julia> infinite_domain(t) [0, 2] ``` """ -function set_infinite_domain(pref::IndependentParameterRef, - domain::InfiniteScalarDomain)::Nothing +function set_infinite_domain( + pref::IndependentParameterRef, + domain::InfiniteScalarDomain + ) if used_by_measure(pref) error("$pref is used by a measure so changing its " * "infinite domain is not allowed.") @@ -937,7 +608,7 @@ julia> has_lower_bound(t) true ``` """ -function JuMP.has_lower_bound(pref::IndependentParameterRef)::Bool +function JuMP.has_lower_bound(pref::IndependentParameterRef) domain = _parameter_domain(pref) return JuMP.has_lower_bound(domain) end @@ -955,7 +626,7 @@ julia> lower_bound(t) 0.0 ``` """ -function JuMP.lower_bound(pref::IndependentParameterRef)::Real +function JuMP.lower_bound(pref::IndependentParameterRef) domain = _parameter_domain(pref) if !JuMP.has_lower_bound(pref) error("Parameter $(pref) does not have a lower bound.") @@ -979,7 +650,7 @@ julia> lower_bound(t) -1.0 ``` """ -function JuMP.set_lower_bound(pref::IndependentParameterRef, lower::Real)::Nothing +function JuMP.set_lower_bound(pref::IndependentParameterRef, lower::Real) domain = _parameter_domain(pref) new_domain = JuMP.set_lower_bound(domain, lower) _update_parameter_domain(pref, new_domain) @@ -1000,7 +671,7 @@ julia> has_upper_bound(t) true ``` """ -function JuMP.has_upper_bound(pref::IndependentParameterRef)::Bool +function JuMP.has_upper_bound(pref::IndependentParameterRef) domain = _parameter_domain(pref) return JuMP.has_upper_bound(domain) end @@ -1020,7 +691,7 @@ julia> upper_bound(t) 1.0 ``` """ -function JuMP.upper_bound(pref::IndependentParameterRef)::Real +function JuMP.upper_bound(pref::IndependentParameterRef) domain = _parameter_domain(pref) if !JuMP.has_upper_bound(pref) error("Parameter $(pref) does not have a upper bound.") @@ -1044,332 +715,13 @@ julia> upper_bound(t) 2.0 ``` """ -function JuMP.set_upper_bound(pref::IndependentParameterRef, upper::Real)::Nothing +function JuMP.set_upper_bound(pref::IndependentParameterRef, upper::Real) domain = _parameter_domain(pref) new_domain = JuMP.set_upper_bound(domain, upper) _update_parameter_domain(pref, new_domain) return end -################################################################################ -# SUPPORT FUNCTIONS -################################################################################ -# Internal functions -function _parameter_supports(pref::IndependentParameterRef) - return _core_variable_object(pref).supports -end -function _parameter_support_values(pref::IndependentParameterRef)::Vector{Float64} - return collect(keys(_parameter_supports(pref))) -end -function _update_parameter_supports(pref::IndependentParameterRef, - supports::DataStructures.SortedDict{Float64, Set{DataType}})::Nothing - domain = _parameter_domain(pref) - method = derivative_method(pref) - sig_figs = significant_digits(pref) - info = generative_support_info(pref) - new_param = IndependentParameter(domain, supports, sig_figs, method, info) - _set_core_variable_object(pref, new_param) - _reset_derivative_constraints(pref) - _set_has_generative_supports(pref, false) - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - return -end - -""" - has_internal_supports(pref::Union{IndependentParameterRef, DependentParameterRef})::Bool - -Indicate if `pref` has internal supports that will be hidden from the user by -default. -""" -function has_internal_supports( - pref::Union{IndependentParameterRef, DependentParameterRef} - )::Bool - return _data_object(pref).has_internal_supports -end - -# update has internal supports -function _set_has_internal_supports( - pref::Union{IndependentParameterRef, DependentParameterRef}, - status::Bool - )::Nothing - _data_object(pref).has_internal_supports = status - return -end - -""" - significant_digits(pref::IndependentParameterRef)::Int - -Return the number of significant digits enforced on the supports of `pref`. - -**Example** -```julia-repl -julia> significant_digits(t) -12 -``` -""" -function significant_digits(pref::IndependentParameterRef)::Int - return _core_variable_object(pref).sig_digits -end - -""" - num_supports(pref::IndependentParameterRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel])::Int - -Return the number of support points associated with `pref`. By default, only the -number of public supports are counted. The full amount can be determined by setting -`label = All`. Moreover, the amount of labels that satisfy `label` is obtained -using an [`AbstractSupportLabel`](@ref). - -**Example** -```julia-repl -julia> num_supports(t) -2 -``` -""" -function num_supports(pref::IndependentParameterRef; - label::Type{<:AbstractSupportLabel} = PublicLabel)::Int - supports_dict = _parameter_supports(pref) - if label == All || (!has_internal_supports(pref) && label == PublicLabel) - return length(supports_dict) - else - return count(p -> any(v -> v <: label, p[2]), supports_dict) - end -end - -""" - has_supports(pref::IndependentParameterRef)::Bool - -Return true if `pref` has supports or false otherwise. - -**Example** -```julia-repl -julia> has_supports(t) -true -``` -""" -has_supports(pref::IndependentParameterRef)::Bool = !isempty(_parameter_supports(pref)) - -""" - supports(pref::IndependentParameterRef; - [label::Type{<:AbstractSupportLabel} = PublicLabel])::Vector{Float64} - -Return the support points associated with `pref`. Errors if there are no -supports. Users can query just support points generated by a certain method -using the keyword argument `label`. By default, the function returns all public -support points regardless of the associated label. The full collection is given by setting -`label = All`. Moreover, the amount of labels that satisfy `label` is obtained -using an [`AbstractSupportLabel`](@ref). - -**Example** -```julia-repl -julia> supports(t) -2-element Array{Float64,1}: - 0.0 - 1.0 -``` -""" -function supports(pref::IndependentParameterRef; - label::Type{<:AbstractSupportLabel} = PublicLabel)::Vector{Float64} - if label == All || (!has_internal_supports(pref) && label == PublicLabel) - return _parameter_support_values(pref) - else - return findall(x -> any(v -> v <: label, x), _parameter_supports(pref)) - end -end - -# Return a matrix os supports when given a vector of IndependentParameterRefs (for measures) -function supports(prefs::Vector{IndependentParameterRef}; - label::Type{<:AbstractSupportLabel} = PublicLabel, - use_combinatorics::Bool = true)::Matrix{Float64} - # generate the support matrix considering all the unique combinations - if use_combinatorics - supp_list = Tuple(supports(p, label = label) for p in prefs) - inds = CartesianIndices(ntuple(i -> 1:length(supp_list[i]), length(prefs))) - supps = Matrix{Float64}(undef, length(prefs), length(inds)) - for (k, idx) in enumerate(inds) - supps[:, k] = [supp_list[i][j] for (i, j) in enumerate(idx.I)] - end - return supps - # generate the support matrix while negating the unique combinations - else - num_supps = num_supports(first(prefs), label = label) - trans_supps = Matrix{Float64}(undef, num_supps, length(prefs)) - for i in eachindex(prefs) - supp = supports(prefs[i], label = label) - if length(supp) != num_supps - error("Cannot simultaneously query the supports of multiple " * - "independent parameters if the support dimensions do not match " * - "while ignoring the combinatorics. Try setting `use_combinatorics = true`.") - else - @inbounds trans_supps[:, i] = supp - end - end - return permutedims(trans_supps) - end -end - -""" - set_supports(pref::IndependentParameterRef, supports::Vector{<:Real}; - [force::Bool = false, - label::Type{<:AbstractSupportLabel} = UserDefined] - )::Nothing - -Specify the support points for `pref`. Errors if the supports violate the bounds -associated with the infinite domain. Warns if the points are not unique. If `force` -this will overwrite exisiting supports otherwise it will error if there are -existing supports. - -**Example** -```julia-repl -julia> set_supports(t, [0, 1]) - -julia> supports(t) -2-element Array{Int64,1}: - 0 - 1 -``` -""" -function set_supports(pref::IndependentParameterRef, supports::Vector{<:Real}; - force::Bool = false, - label::Type{<:AbstractSupportLabel} = UserDefined - )::Nothing - if has_supports(pref) && !force - error("Unable set supports for $pref since it already has supports." * - " Consider using `add_supports` or use `force = true` to " * - "overwrite the existing supports.") - end - domain = _parameter_domain(pref) - supports = round.(supports, sigdigits = significant_digits(pref)) - _check_supports_in_bounds(error, supports, domain) - supports_dict = DataStructures.SortedDict{Float64, Set{DataType}}( - i => Set([label]) for i in supports) - if length(supports_dict) != length(supports) - @warn("Support points are not unique, eliminating redundant points.") - end - _update_parameter_supports(pref, supports_dict) - _set_has_internal_supports(pref, label <: InternalLabel) - return -end - -""" - add_supports(pref::IndependentParameterRef, - supports::Union{Real, Vector{<:Real}}; - [label::Type{<:AbstractSupportLabel} = UserDefined])::Nothing - -Add additional support points for `pref` with identifying label `label`. - -**Example** -```julia-repl -julia> add_supports(t, 0.5) - -julia> supports(t) -3-element Array{Float64,1}: - 0.0 - 0.5 - 1.0 - -julia> add_supports(t, [0.25, 1]) - -julia> supports(t) -4-element Array{Float64,1}: - 0.0 - 0.25 - 0.5 - 1.0 -``` -""" -function add_supports(pref::IndependentParameterRef, - supports::Union{Real, Vector{<:Real}}; - label::Type{<:AbstractSupportLabel} = UserDefined, - check::Bool = true)::Nothing - domain = infinite_domain(pref) - supports = round.(supports, sigdigits = significant_digits(pref)) - check && _check_supports_in_bounds(error, supports, domain) - supports_dict = _parameter_supports(pref) - added_new_support = false - for s in supports - if haskey(supports_dict, s) - push!(supports_dict[s], label) - else - supports_dict[s] = Set([label]) - added_new_support = true - end - end - if label <: InternalLabel - _set_has_internal_supports(pref, true) - end - if added_new_support - _reset_derivative_constraints(pref) - _reset_generative_supports(pref) - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - end - return -end - -""" - delete_supports(pref::IndependentParameterRef; - [label::Type{<:AbstractSupportLabel} = All])::Nothing - -Delete the support points for `pref`. If `label != All` then delete `label` and -any supports that solely depend on it. - -**Example** -```julia-repl -julia> delete_supports(t) - -julia> supports(t) -ERROR: Parameter t does not have supports. -``` -""" -function delete_supports(pref::IndependentParameterRef; - label::Type{<:AbstractSupportLabel} = All)::Nothing - supp_dict = _parameter_supports(pref) - if has_derivative_constraints(pref) - @warn("Deleting supports invalidated derivative evaluations. Thus, these " * - "are being deleted as well.") - for idx in _derivative_dependencies(pref) - delete_derivative_constraints(DerivativeRef(JuMP.owner_model(pref), idx)) - end - _set_has_derivative_constraints(pref, false) - end - if label == All - if used_by_measure(pref) - error("Cannot delete the supports of $pref since it is used by " * - "a measure.") - end - empty!(supp_dict) - _set_has_generative_supports(pref, false) - _set_has_internal_supports(pref, false) - else - if has_generative_supports(pref) && support_label(generative_support_info(pref)) != label - label = Union{label, support_label(generative_support_info(pref))} - end - _set_has_generative_supports(pref, false) - filter!(p -> !all(v -> v <: label, p[2]), supp_dict) - for (k, v) in supp_dict - filter!(l -> !(l <: label), v) - end - if has_internal_supports(pref) && num_supports(pref, label = InternalLabel) == 0 - _set_has_internal_supports(pref, false) - end - end - if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) - end - return -end - -# Make dispatch for an array of parameters -function delete_supports(prefs::AbstractArray{<:IndependentParameterRef}; - label::Type{<:AbstractSupportLabel} = All)::Nothing - delete_supports.(prefs, label = label) - return -end - """ parameter_value(pref::FiniteParameterRef)::Float64 @@ -1382,7 +734,7 @@ julia> value(cost) 42.0 ``` """ -function parameter_value(pref::FiniteParameterRef)::Real +function parameter_value(pref::FiniteParameterRef) return _core_variable_object(pref).value end @@ -1400,105 +752,20 @@ julia> value(cost) 27.0 ``` """ -function JuMP.set_value(pref::FiniteParameterRef, value::Real)::Nothing +function JuMP.set_value(pref::FiniteParameterRef, value::Real) _data_object(pref).parameter = FiniteParameter(value) if is_used(pref) - set_optimizer_model_ready(JuMP.owner_model(pref), false) + set_backend_ready(JuMP.owner_model(pref), false) end return end -""" - fill_in_supports!(pref::IndependentParameterRef; - [num_supports::Int = DefaultNumSupports])::Nothing - -Automatically generate support points for a particular independent parameter `pref`. -Generating `num_supports` for the parameter. The supports are generated uniformly -if the underlying infinite domain is an `IntervalDomain` or they are generating randomly -accordingly to the distribution if the domain is a `UniDistributionDomain`. -Will add nothing if there are supports -and `modify = false`. Extensions that use user defined domain types should extend -[`generate_and_add_supports!`](@ref) and/or [`generate_support_values`](@ref) -as needed. Errors if the infinite domain type is not recognized. - -**Example** -```julia-repl -julia> fill_in_supports!(x, num_supports = 4) - -julia> supports(x) -4-element Array{Number,1}: - 0.0 - 0.333 - 0.667 - 1.0 - -``` -""" -function fill_in_supports!(pref::IndependentParameterRef; - num_supports::Int = DefaultNumSupports, - modify::Bool = true)::Nothing - domain = infinite_domain(pref) - current_amount = length(_parameter_supports(pref)) - if (modify || current_amount == 0) && current_amount < num_supports - generate_and_add_supports!(pref, domain, - num_supports = num_supports - current_amount, - adding_extra = (current_amount > 0)) - end - return -end - -""" - generate_and_add_supports!(pref::IndependentParameterRef, - domain::AbstractInfiniteDomain, - [method::Type{<:AbstractSupportLabel}]; - [num_supports::Int = DefaultNumSupports])::Nothing - -Generate supports for independent parameter `pref` via [`generate_support_values`](@ref) -and add them to `pref`. This is intended as an extendable internal method for -[`fill_in_supports!`](@ref fill_in_supports!(::IndependentParameterRef)). -Most extensions that empoy user-defined infinite domains can typically enable this -by extending [`generate_support_values`](@ref). Errors if the infinite domain type -is not recognized. -""" -function generate_and_add_supports!(pref::IndependentParameterRef, - domain::AbstractInfiniteDomain; - num_supports::Int = DefaultNumSupports, - adding_extra::Bool = false)::Nothing - sig_digits = significant_digits(pref) - if isa(domain, IntervalDomain) && adding_extra - supports, label = generate_support_values(domain, MCSample, - num_supports = num_supports, - sig_digits = sig_digits) - else - supports, label = generate_supports(domain, - num_supports = num_supports, - sig_digits = sig_digits) - end - add_supports(pref, supports, label = label) - return -end - -# Dispatch with method -function generate_and_add_supports!(pref::IndependentParameterRef, - domain::AbstractInfiniteDomain, - method::Type{<:AbstractSupportLabel}; - num_supports::Int = DefaultNumSupports, - adding_extra::Bool = false)::Nothing - sig_digits = significant_digits(pref) - supports, label = generate_supports(domain, method, - num_supports = num_supports, - sig_digits = sig_digits) - add_supports(pref, supports, label = label) - return -end - ################################################################################ # DELETE FUNCTIONS ################################################################################ # Check if parameter is used by measure data and error if it is to prevent bad # deleting behavior -function _check_param_in_data(pref::GeneralVariableRef, - data::AbstractMeasureData)::Nothing +function _check_param_in_data(pref::GeneralVariableRef, data::AbstractMeasureData) prefs = parameter_refs(data) if isequal(pref, prefs) || any(isequal(pref), prefs) error("Unable to delete `$pref` since it is used to evaluate measures.") @@ -1507,8 +774,7 @@ function _check_param_in_data(pref::GeneralVariableRef, end # Update the dependent measures -function _update_measures(model::InfiniteModel, - pref::GeneralVariableRef)::Nothing +function _update_measures(model::InfiniteModel, pref::GeneralVariableRef) for mindex in _measure_dependencies(pref) mref = dispatch_variable_ref(model, mindex) func = measure_function(mref) @@ -1525,8 +791,7 @@ function _update_measures(model::InfiniteModel, end # Update the dependent constraints -function _update_constraints(model::InfiniteModel, - pref::GeneralVariableRef)::Nothing +function _update_constraints(model::InfiniteModel, pref::GeneralVariableRef) for cindex in copy(_constraint_dependencies(pref)) cref = _make_constraint_ref(model, cindex) func = JuMP.jump_function(JuMP.constraint_object(cref)) @@ -1546,7 +811,7 @@ function _update_constraints(model::InfiniteModel, end # Remove given object/parameter number and update the list -function _update_number_list(nums::Vector{Int}, list::Vector{Int})::Nothing +function _update_number_list(nums::Vector{Int}, list::Vector{Int}) filter!(e -> !(e in nums), list) max_num = maximum(nums) for i in eachindex(list) @@ -1558,8 +823,11 @@ function _update_number_list(nums::Vector{Int}, list::Vector{Int})::Nothing end # Update the model with the removed parameter/object numbers -function _update_model_numbers(model::InfiniteModel, obj_num::Int, - param_nums::Vector{Int})::Nothing +function _update_model_numbers( + model::InfiniteModel, + obj_num::Int, + param_nums::Vector{Int} + ) # update the independent parameters for (_, object) in _data_dictionary(model, IndependentParameter) if object.object_num > obj_num @@ -1626,7 +894,7 @@ julia> delete(model, x) function JuMP.delete( model::InfiniteModel, pref::IndependentParameterRef - )::Nothing + ) @assert JuMP.is_valid(model, pref) "Parameter reference is invalid." gvref = _make_parameter_ref(JuMP.owner_model(pref), JuMP.index(pref)) # ensure deletion is okay (pref isn't used by measure data) @@ -1644,9 +912,9 @@ function JuMP.delete( error("Cannot delete `$pref` since it is used by an parameter ", "function(s).") end - # update optimizer model status + # update backend status if is_used(pref) - set_optimizer_model_ready(model, false) + set_backend_ready(model, false) end # delete dependence of measures on pref _update_measures(model, gvref) @@ -1668,9 +936,9 @@ end # FiniteParameterRef function JuMP.delete(model::InfiniteModel, pref::FiniteParameterRef)::Nothing @assert JuMP.is_valid(model, pref) "Parameter reference is invalid." - # update optimizer model status + # update backend status if is_used(pref) - set_optimizer_model_ready(model, false) + set_backend_ready(model, false) end gvref = _make_parameter_ref(model, JuMP.index(pref)) # delete dependence of measures on pref diff --git a/src/show.jl b/src/show.jl index 42d31d029..8bd9e9c00 100644 --- a/src/show.jl +++ b/src/show.jl @@ -229,53 +229,7 @@ end ################################################################################ # MEASURE STRING METHODS ################################################################################ -## Convert measure data into a useable string for measure printing -# 1-D DiscreteMeasureData/FunctionalDiscreteMeasureData -function measure_data_string(print_mode, - data::Union{DiscreteMeasureData{GeneralVariableRef}, - FunctionalDiscreteMeasureData{GeneralVariableRef}} - )::String - pref = parameter_refs(data) - lb = JuMP.lower_bound(data) - ub = JuMP.upper_bound(data) - nan_bound = isnan(lb) || isnan(ub) - if nan_bound - return JuMP.function_string(print_mode, pref) - else - domain = IntervalDomain(lb, ub) - return string(JuMP.function_string(print_mode, pref), " ", - in_domain_string(print_mode, domain)) - end -end -# Multi-D DiscreteMeasureData/FunctionalDiscreteMeasureData -function measure_data_string(print_mode, - data::Union{DiscreteMeasureData{Vector{GeneralVariableRef}}, - FunctionalDiscreteMeasureData{Vector{GeneralVariableRef}}} - )::String - prefs = parameter_refs(data) - lbs = JuMP.lower_bound(data) - ubs = JuMP.upper_bound(data) - has_bounds = !isnan(first(lbs)) && !isnan(first(ubs)) - homo_bounds = has_bounds && _allequal(lbs) && _allequal(ubs) - names = map(p -> _remove_name_index(p), prefs) - homo_names = _allequal(names) - num_prefs = length(prefs) - if homo_names && homo_bounds - domain = IntervalDomain(first(lbs), first(ubs)) - return string(first(names), " ", in_domain_string(print_mode, domain), - "^", num_prefs) - elseif has_bounds - str_list = [JuMP.function_string(print_mode, prefs[i]) * " " * - in_domain_string(print_mode, IntervalDomain(lbs[i], ubs[i])) - for i in eachindex(prefs)] - return _make_str_value(str_list)[2:end-1] - elseif homo_names - return first(names) - else - return _make_str_value(prefs) - end -end # extract the most compact parameter name possible function _get_root_parameter_name(data::AbstractMeasureData)::String @@ -288,23 +242,20 @@ function _get_root_parameter_name(data::AbstractMeasureData)::String end end -# Fallback for measure_data_string -function measure_data_string(print_mode, data::AbstractMeasureData)::String - return _get_root_parameter_name(data) -end # Make strings to represent measures in REPLMode function variable_string(m::Type{JuMP.REPLMode}, mref::MeasureRef)::String - data = measure_data(mref) - data_str = measure_data_string(m, data) - func_str = JuMP.function_string(m, measure_function(mref)) - name = JuMP.name(mref) - if name == "integral" - name = _math_symbol(m, :integral) - elseif name == "expect" - name = _math_symbol(m, :expect) - end - return string(name, "{", data_str, "}[", func_str, "]") + # data = measure_data(mref) + # data_str = measure_data_string(m, data) + # func_str = JuMP.function_string(m, measure_function(mref)) + # name = JuMP.name(mref) + # if name == "integral" + # name = _math_symbol(m, :integral) + # elseif name == "expect" + # name = _math_symbol(m, :expect) + # end + # return string(name, "{", data_str, "}[", func_str, "]") + return "TODO measure printing" end # Make strings to represent measures in IJuliaMode @@ -726,8 +677,9 @@ end # Show the backend information associated with the optimizer model function JuMP.show_backend_summary(io::IO, model::InfiniteModel) - println(io, "Optimizer model backend information: ") - JuMP.show_backend_summary(io, optimizer_model(model)) + println(io, "Transformation backend information: ") + println(io, "TODO implement transform printing") + # JuMP.show_backend_summary(io, optimizer_model(model)) return end @@ -763,28 +715,29 @@ function Base.show(io::IO, model::InfiniteModel) end println(io, " problem with:") # show finite parameter info - num_finite_params = num_parameters(model, FiniteParameter) - println(io, "Finite Parameter", _plural(num_finite_params), ": ", - num_finite_params) - # show infinite parameter info - num_infinite_params = num_parameters(model, InfiniteParameter) - println(io, "Infinite Parameter", _plural(num_infinite_params), ": ", - num_infinite_params) - # show variable info - num_vars = JuMP.num_variables(model) - println(io, "Variable", _plural(num_vars), ": ", num_vars) - # show the derivative info - num_derivs = num_derivatives(model) - println(io, "Derivative", _plural(num_derivs), ": ", num_derivs) - # show measure info - num_meas = num_measures(model) - println(io, "Measure", _plural(num_meas), ": ", num_meas) - # show objective function info - if sense != MOI.FEASIBILITY_SENSE - JuMP.show_objective_function_summary(io, model) - end - # show constraint info - JuMP.show_constraints_summary(io, model) + # num_finite_params = num_parameters(model, FiniteParameter) + # println(io, "Finite Parameter", _plural(num_finite_params), ": ", + # num_finite_params) + # # show infinite parameter info + # num_infinite_params = num_parameters(model, InfiniteParameter) + # println(io, "Infinite Parameter", _plural(num_infinite_params), ": ", + # num_infinite_params) + # # show variable info + # num_vars = JuMP.num_variables(model) + # println(io, "Variable", _plural(num_vars), ": ", num_vars) + # # show the derivative info + # num_derivs = num_derivatives(model) + # println(io, "Derivative", _plural(num_derivs), ": ", num_derivs) + # # show measure info + # num_meas = num_measures(model) + # println(io, "Measure", _plural(num_meas), ": ", num_meas) + # # show objective function info + # if sense != MOI.FEASIBILITY_SENSE + # JuMP.show_objective_function_summary(io, model) + # end + # # show constraint info + # JuMP.show_constraints_summary(io, model) + println(io, "TODO update model details printing") # show other info names_in_scope = sort!(collect(keys(JuMP.object_dictionary(model)))) if !isempty(names_in_scope) From 1ecdd6751ee09ea330f045297e2313619f9986f7 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Mon, 7 Mar 2022 17:13:12 -0500 Subject: [PATCH 02/10] add getters/setters for transform cache --- src/datatypes.jl | 12 ++-- src/general_variables.jl | 147 ++++++++++++++------------------------- src/scalar_parameters.jl | 5 +- src/transform.jl | 34 +++++++++ 4 files changed, 94 insertions(+), 104 deletions(-) create mode 100644 src/transform.jl diff --git a/src/datatypes.jl b/src/datatypes.jl index d7953c618..9dee886a1 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -979,12 +979,12 @@ A convenient container for storing all the transformation attributes stored in an `InfiniteModel` that can be used by the transformation backend. """ struct TransformAttrCache - finite_params::Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any} - indep_params::Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any} - depend_params::Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any} # TODO fix this - infinite_vars::Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any} - semi_vars::Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any} - point_vars::Dict{Tuple{PointVariableIndex, VariableAttr}, Any} + finite_parameters::Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any} + independent_parameters::Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any} + dependent_params::Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any} # TODO maybe allow for parameter-wsie attributes + infinite_variables::Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any} + semi_ifninite_variables::Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any} + point_variables::Dict{Tuple{PointVariableIndex, VariableAttr}, Any} derivatives::Dict{Tuple{DerivativeIndex, DerivativeAttr}, Any} measures::Dict{Tuple{MeasureIndex, MeasureAttr}, Any} constraints::Dict{Tuple{InfOptConstraintIndex, ConstraintAttr}, Any} diff --git a/src/general_variables.jl b/src/general_variables.jl index 84384cba9..1b279226f 100644 --- a/src/general_variables.jl +++ b/src/general_variables.jl @@ -458,10 +458,7 @@ function _object_number(pref::GeneralVariableRef)::Int end # Define 1 argument user method wrappers and their fallbacks -for op = (:infinite_domain, :num_supports, :significant_digits, :has_supports, - :supports, :delete_supports, :fill_in_supports!, :parameter_value, - :derivative_method, :has_generative_supports, :has_internal_supports, - :add_generative_supports, :raw_function, :generative_support_info) +for op = (:infinite_domain, :parameter_value, :raw_function) @eval begin # define the fallback method function $op(pref; kwargs...) @@ -526,20 +523,20 @@ function set_infinite_domain( end # Better fallbacks for supports -function supports(pref::DispatchVariableRef; kwargs...) - throw(ArgumentError("`supports` not defined for variable reference type(s) " * - "`$(typeof(pref))`.")) -end -function supports(prefs::AbstractArray; kwargs...) - throw(ArgumentError("`supports` not defined for variable reference type(s) " * - "`$(typeof(prefs))`.")) -end +# function supports(pref::DispatchVariableRef; kwargs...) +# throw(ArgumentError("`supports` not defined for variable reference type(s) " * +# "`$(typeof(pref))`.")) +# end +# function supports(prefs::AbstractArray; kwargs...) +# throw(ArgumentError("`supports` not defined for variable reference type(s) " * +# "`$(typeof(prefs))`.")) +# end # Dispatch fallback -function set_supports(pref, supports; kwargs...) - throw(ArgumentError("`set_supports` not defined for variable reference type(s) " * - "`$(typeof(pref))`.")) -end +# function set_supports(pref, supports; kwargs...) +# throw(ArgumentError("`set_supports` not defined for variable reference type(s) " * +# "`$(typeof(pref))`.")) +# end """ set_supports(pref::GeneralVariableRef, supports::Union{Real, Vector{<:Real}}; @@ -549,15 +546,15 @@ Set the support points associated with a single infinite parameter `pref`. An `ArgumentError` is thrown if `pref` is not an independent infinite parameter. """ -function set_supports( - pref::GeneralVariableRef, - supports::Union{Real, Vector{<:Real}}; - force::Bool = false, - label::Type{<:All} = UserDefined - )::Nothing - return set_supports(dispatch_variable_ref(pref), supports, - force = force, label = label) -end +# function set_supports( +# pref::GeneralVariableRef, +# supports::Union{Real, Vector{<:Real}}; +# force::Bool = false, +# label::Type{<:All} = UserDefined +# )::Nothing +# return set_supports(dispatch_variable_ref(pref), supports, +# force = force, label = label) +# end """ set_supports( @@ -570,21 +567,21 @@ Set the support points associated with dependent infinite parameters `prefs`. An `ArgumentError` is thrown if `prefs` is are not dependent infinite parameters. """ -function set_supports( - prefs::AbstractArray{<:GeneralVariableRef}, - supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; - label::Type{<:All} = UserDefined, - force::Bool = false - )::Nothing - return set_supports(dispatch_variable_ref.(prefs), supports, label = label, - force = force) -end +# function set_supports( +# prefs::AbstractArray{<:GeneralVariableRef}, +# supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; +# label::Type{<:All} = UserDefined, +# force::Bool = false +# )::Nothing +# return set_supports(dispatch_variable_ref.(prefs), supports, label = label, +# force = force) +# end # Dispatch fallback -function add_supports(pref, supports; kwargs...) - throw(ArgumentError("`add_supports` not defined for variable reference type(s) " * - "`$(typeof(pref))`.")) -end +# function add_supports(pref, supports; kwargs...) +# throw(ArgumentError("`add_supports` not defined for variable reference type(s) " * +# "`$(typeof(pref))`.")) +# end """ add_supports(pref::GeneralVariableRef, @@ -594,15 +591,15 @@ Add the support points `supports` to a single infinite parameter `pref`. An `ArgumentError` is thrown if `pref` is not an independent infinite parameter. """ -function add_supports( - pref::GeneralVariableRef, - supports::Union{Real, Vector{<:Real}}; - check::Bool = true, - label::Type{<:All} = UserDefined - )::Nothing - return add_supports(dispatch_variable_ref(pref), supports, - check = check, label = label) -end +# function add_supports( +# pref::GeneralVariableRef, +# supports::Union{Real, Vector{<:Real}}; +# check::Bool = true, +# label::Type{<:All} = UserDefined +# )::Nothing +# return add_supports(dispatch_variable_ref(pref), supports, +# check = check, label = label) +# end """ add_supports( @@ -614,15 +611,15 @@ Add the support points `supports` to the dependent infinite parameters `prefs`. An `ArgumentError` is thrown if `prefs` is are not dependent infinite parameters. """ -function add_supports( - prefs::AbstractArray{<:GeneralVariableRef}, - supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; - label::Type{<:All} = UserDefined, - check::Bool = true - )::Nothing - return add_supports(dispatch_variable_ref.(prefs), supports, label = label, - check = check) -end +# function add_supports( +# prefs::AbstractArray{<:GeneralVariableRef}, +# supports::Union{Array{<:Real, 2}, Vector{<:AbstractArray{<:Real}}}; +# label::Type{<:All} = UserDefined, +# check::Bool = true +# )::Nothing +# return add_supports(dispatch_variable_ref.(prefs), supports, label = label, +# check = check) +# end # Fallback function JuMP.set_value(vref::DispatchVariableRef, value::Real) @@ -642,44 +639,6 @@ function JuMP.set_value(vref::GeneralVariableRef, value::Real)::Nothing return JuMP.set_value(dispatch_variable_ref(vref), value) end -# Dispatch fallback -function set_derivative_method(pref::DispatchVariableRef, method) - throw(ArgumentError("`set_derivative_method` not defined for variable reference type(s) " * - "`$(typeof(pref))`.")) -end - -""" - set_derivative_method(pref::GeneralVariableRef, - method::AbstractDerivativeMethod - )::Nothing - -Specify the numerical derivative evaluation technique associated with `pref`. -An `ArgumentError` is thrown if `pref` is not an infinite parameter. -""" -function set_derivative_method( - pref::GeneralVariableRef, - method::AbstractDerivativeMethod - )::Nothing - return set_derivative_method(dispatch_variable_ref(pref), method) -end - -# Define parameter status setters -for op = (:_set_has_generative_supports, :_set_has_internal_supports, - :_set_has_derivative_constraints) - @eval begin - # define the fallback method - function $op(vref::DispatchVariableRef, status) - str = string("`$($op)` not defined for variable reference type ", - "`$(typeof(vref))`.") - throw(ArgumentError(str)) - end - # define the dispatch version - function $op(vref::GeneralVariableRef, status::Bool)::Nothing - return $op(dispatch_variable_ref(vref), status) - end - end -end - # Dispatch fallback function call_function(fref::DispatchVariableRef, support...) throw(ArgumentError("`call_function` not defined for variable reference type(s) " * diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 7e95055f0..96b6bbaef 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -141,10 +141,7 @@ end # PARAMETER DEFINITION ################################################################################ """ - build_parameter( - _error::Function, domain::InfiniteScalarDomain; - [kwargs...] - )::IndependentParameter + build_parameter(_error::Function, domain::InfiniteScalarDomain; [kwargs...])::IndependentParameter Returns a [`IndependentParameter`](@ref) given the appropriate information. This is analagous to `JuMP.build_variable`. This is meant to primarily serve as a diff --git a/src/transform.jl b/src/transform.jl new file mode 100644 index 000000000..7c8b0defd --- /dev/null +++ b/src/transform.jl @@ -0,0 +1,34 @@ +################################################################################ +# ATTRIBUTE CACHE METHODS +################################################################################ +## Extend Base.[get/set]index +# Non-model attributes +for (I, A, name) = ((:FiniteParameterIndex, :FiniteParameterAttr, :finite_parameters), + (:IndependentParameterIndex, :InfiniteParameterAttr, :independent_parameters), + (:DependentParametersIndex, :InfiniteParameterAttr, :dependent_parameters), + (:InfiniteVariableIndex, :VariableAttr, :infinite_variables), + (:SemiInfiniteVariableIndex, :VariableAttr, :semi_infinite_variables), + (:PointVariableIndex, :VariableAttr, :point_variables), + (:DerivativeIndex, :DerivativeAttr, :derivatives), + (:MeasureIndex, :MeasureAttr, :measures), + (:InfOptConstraintIndex, :ConstraintAttr, :constraints) + ) + @eval begin + function Base.getindex(cache::TransformAttrCache, idx::$I, attr::$A) + return cache.$(name)[idx, attr] + end + function Base.setindex(cache::TransformAttrCache, idx::$I, attr::$A, value) + cache.$(name)[idx, attr] = value + return + end + end +end + +# Model attributes +function Base.getindex(cache::TransformAttrCache, attr::ModelAttr) + return cache.model[attr] +end +function Base.setindex(cache::TransformAttrCache, attr::ModelAttr, value) + cache.model[attr] = value + return +end From 8f1dbbe67d26500208d3f2950b97232ae04c4222 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Fri, 18 Mar 2022 12:09:49 -0400 Subject: [PATCH 03/10] Added get/set api --- src/InfiniteOpt.jl | 3 +- src/transform.jl | 156 +++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 152 insertions(+), 7 deletions(-) diff --git a/src/InfiniteOpt.jl b/src/InfiniteOpt.jl index 18018b3be..2644f22de 100644 --- a/src/InfiniteOpt.jl +++ b/src/InfiniteOpt.jl @@ -51,6 +51,7 @@ include("macros.jl") include("objective.jl") # include("measure_expansions.jl") # include("derivative_evaluations.jl") +include("transform.jl") include("optimize.jl") # include("results.jl") include("show.jl") @@ -62,7 +63,7 @@ include("general_variables.jl") # Reexport.@reexport using .TranscriptionOpt # Define additional stuff that should not be exported -const _EXCLUDE_SYMBOLS = [Symbol(@__MODULE__), :eval, :include] +const _EXCLUDE_SYMBOLS = [Symbol(@__MODULE__), :eval, :include, :get, :set] # Following JuMP, export everything that doesn't start with a `_` for sym in names(@__MODULE__, all = true) diff --git a/src/transform.jl b/src/transform.jl index 7c8b0defd..0935f3c23 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -1,7 +1,7 @@ ################################################################################ # ATTRIBUTE CACHE METHODS ################################################################################ -## Extend Base.[get/set]index +## Extend basic Base functions # Non-model attributes for (I, A, name) = ((:FiniteParameterIndex, :FiniteParameterAttr, :finite_parameters), (:IndependentParameterIndex, :InfiniteParameterAttr, :independent_parameters), @@ -17,9 +17,14 @@ for (I, A, name) = ((:FiniteParameterIndex, :FiniteParameterAttr, :finite_parame function Base.getindex(cache::TransformAttrCache, idx::$I, attr::$A) return cache.$(name)[idx, attr] end - function Base.setindex(cache::TransformAttrCache, idx::$I, attr::$A, value) - cache.$(name)[idx, attr] = value - return + function Base.setindex!(cache::TransformAttrCache, value, idx::$I, attr::$A) + return cache.$(name)[idx, attr] = value + end + function Base.haskey(cache::TransformAttrCache, key::Tuple{$I, $A}) + return haskey(cache.$(name), key) + end + function Base.get(cache::TransformAttrCache, key::Tuple{$I, $A}, default) + return get(cache.$(name), key, default) end end end @@ -28,7 +33,146 @@ end function Base.getindex(cache::TransformAttrCache, attr::ModelAttr) return cache.model[attr] end -function Base.setindex(cache::TransformAttrCache, attr::ModelAttr, value) - cache.model[attr] = value +function Base.setindex!(cache::TransformAttrCache, value, attr::ModelAttr) + return cache.model[attr] = value +end +function Base.haskey(cache::TransformAttrCache, attr::ModelAttr) + return haskey(cache.model, attr) +end +function Base.get(cache::TransformAttrCache, attr::ModelAttr, default) + return get(cache.model, attr, default) +end + +################################################################################ +# BASIC ATTRIBUTE API +################################################################################ +## Define the basic getters/setters +# Fallbacks +""" + InfiniteOpt.get(model::InfiniteModel, [index], attribute)::AbstractTransformAttr + +Retrieve the value of the transformation backend attribute `attribute` that is +associated with object `index` in `model`. If `attribute` is a [`ModelAttr`](@ref) +then the `index` argument is omitted. Errors if the `index` and `model` have +no such attribute. This is intended for use by those writing transformation +backends for `InfiniteModel`s. +""" +function get(model::InfiniteModel, idx, attr) + error("Objects with indices of type `$(typeof(idx))` are not compatible with ", + "attributes of type `$(typeof(attr))`.") +end +function get(model::InfiniteModel, attr) + error("`InfiniteModel`s are not compatible with ", + "attributes of type `$(typeof(attr))`.") +end + +""" + attribute_value_type(attribute::AbstractTransformAttr)::DataType + +Returns the type of a value that an `attribute` can accept. This should be +extended for new [`AbstractTransformAttr`](@ref)s such that they can be +checked. Defaults to `Any` such that [`InfiniteOpt.set`](@ref) does not +check the value type of an attribute. +""" +attribute_value_type(::AbstractTransformAttr) = Any + +""" + InfiniteOpt.set(model::InfiniteModel, [index], attribute, value)::Nothing + +Set the transformation backend attribute `attribute` for object `index` in +`model` to `value`. If `attribute` is a [`ModelAttr`](@ref) then the `index` +argument is omitted. Errors if `value` is incompatible with `attribute`. This +is intended for use by those writing transformation backends for `InfiniteModel`s. +""" +function set(model::InfiniteModel, idx, attr, value) + error("Objects with indices of type `$(typeof(idx))` are not compatible with ", + "attributes of type `$(typeof(attr))`.") +end +function set(model::InfiniteModel, attr, value) + error("`InfiniteModel`s are not compatible with ", + "attributes of type `$(typeof(attr))`.") +end + +# TODO enable `set` to optionally update the transformation backend incrementally + +# Singular dispatch variables (and the dependent parameter & constraint getters) +for (I, A) = ((:FiniteParameterIndex, :FiniteParameterAttr), + (:IndependentParameterIndex, :InfiniteParameterAttr), + (:DependentParametersIndex, :InfiniteParameterAttr), + (:InfiniteVariableIndex, :VariableAttr), + (:SemiInfiniteVariableIndex, :VariableAttr), + (:PointVariableIndex, :VariableAttr), + (:DerivativeIndex, :DerivativeAttr), + (:MeasureIndex, :MeasureAttr), + (:InfOptConstraintIndex, :ConstraintAttr)) + @eval begin + if !($I in (DependentParametersIndex, InfOptConstraintIndex)) + function get(model::InfiniteModel, idx::$I, attr::$A) + value = get(model.transform_attrs, (idx, attr), nothing) + if isnothing(value) + error("$(dispatch_variable_ref(model, idx)) does not have transform ", + "attribute `$attr`.") + end + return value + end + end + function set(model::InfiniteModel, idx::$I, attr::$A, value) + if !isa(value, attribute_value_type(attr)) + error("Expected a value of type `$(attribute_value_type(attr))` for", + "the attribute `$(attr)`, but got `$(value)` of type ", + "`$(typeof(value))`.") + end + model.transform_attrs[idx, attr] = value + return + end + end +end + +# Additional getters for constraints, models, and dependent parameters +function get( + model::InfiniteModel, + idx::InfOptConstraintIndex, + attr::ConstraintAttr + ) + value = get(model.transform_attrs, (idx, attr), nothing) + if isnothing(value) + error("$(InfOptConstraintRef(model, idx)) does not have transform ", + "attribute `$attr`.") + end + return value +end +function get( + model::InfiniteModel, + idx::DependentParametersIndex, + attr::InfiniteParameterAttr + ) + value = get(model.transform_attrs, (idx, attr), nothing) + if isnothing(value) + error("The dependent parameter group with index `$idx` does not ", + "have transform attribute `$attr`.") + end + return value +end +function get(model::InfiniteModel, attr::ModelAttr) + value = get(model.transform_attrs, attr, nothing) + if isnothing(value) + error("The model does not have transform attribute `$attr`.") + end + return value +end + +# Set for models +function set(model::InfiniteModel, attr::ModelAttr, value) + if !isa(value, attribute_value_type(attr)) + error("Expected a value of type `$(attribute_value_type(attr))` for", + "the attribute `$(attr)`, but got `$(value)` of type ", + "`$(typeof(value))`.") + end + model.transform_attrs[attr] = value return end + +################################################################################ +# DEFAULT TRANSFORM ATTRIBUTES +################################################################################ +attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} From 3ddaa99b4bf7e0eefc28a57dfe0c0008a729e2c9 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Fri, 18 Mar 2022 12:24:53 -0400 Subject: [PATCH 04/10] Resolve bug with Base.get --- src/array_parameters.jl | 4 ++-- src/constraints.jl | 8 ++++---- src/derivatives.jl | 4 ++-- src/expressions.jl | 8 ++++---- src/finite_variables.jl | 2 +- src/infinite_variables.jl | 2 +- src/measure_expansions.jl | 4 ++-- src/measures.jl | 4 ++-- src/nlp.jl | 6 +++--- src/optimize.jl | 4 ++-- src/point_variables.jl | 4 ++-- src/results.jl | 2 +- src/scalar_parameters.jl | 10 +++++++--- src/semi_infinite_variables.jl | 4 ++-- src/transform.jl | 17 ++++++----------- src/variable_basics.jl | 4 ++-- 16 files changed, 43 insertions(+), 44 deletions(-) diff --git a/src/array_parameters.jl b/src/array_parameters.jl index a04154b3d..c9fcd539d 100644 --- a/src/array_parameters.jl +++ b/src/array_parameters.jl @@ -30,7 +30,7 @@ end # Extend _data_object function _data_object(pref::DependentParameterRef)::MultiParameterData - object = get(_data_dictionary(pref), JuMP.index(pref).object_index, nothing) + object = Base.get(_data_dictionary(pref), JuMP.index(pref).object_index, nothing) if isnothing(object) error("Invalid dependent parameter reference, cannot find ", "corresponding parameter in the model. This is likely ", @@ -312,7 +312,7 @@ julia> name(pref) ``` """ function JuMP.name(pref::DependentParameterRef)::String - object = get(_data_dictionary(pref), JuMP.index(pref).object_index, nothing) + object = Base.get(_data_dictionary(pref), JuMP.index(pref).object_index, nothing) return isnothing(object) ? "" : object.names[_param_index(pref)] end diff --git a/src/constraints.jl b/src/constraints.jl index d4907f4e9..f3d44a21a 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -66,7 +66,7 @@ end # Extend _data_object function _data_object(cref::InfOptConstraintRef)::ConstraintData - object = get(_data_dictionary(cref), JuMP.index(cref), nothing) + object = Base.get(_data_dictionary(cref), JuMP.index(cref), nothing) if isnothing(object) error("Invalid constraint reference, cannot find corresponding ", "constraint in the model. This is likely caused by using the ", @@ -392,7 +392,7 @@ julia> name(cref) ``` """ function JuMP.name(cref::InfOptConstraintRef)::String - object = get(_data_dictionary(cref), JuMP.index(cref), nothing) + object = Base.get(_data_dictionary(cref), JuMP.index(cref), nothing) return isnothing(object) ? "" : object.name end @@ -587,7 +587,7 @@ function JuMP.constraint_by_name( end end end - index = get(model.name_to_constr, name, nothing) + index = Base.get(model.name_to_constr, name, nothing) if isnothing(index) return nothing elseif index == InfOptConstraintIndex(-1) @@ -809,7 +809,7 @@ Subdomain restrictions (1): t ∈ [0, 2] function domain_restrictions( cref::InfOptConstraintRef )::DomainRestrictions{GeneralVariableRef} - return get(JuMP.owner_model(cref).constraint_restrictions, JuMP.index(cref), + return Base.get(JuMP.owner_model(cref).constraint_restrictions, JuMP.index(cref), DomainRestrictions()) end diff --git a/src/derivatives.jl b/src/derivatives.jl index 86840dac6..a2e50a33a 100644 --- a/src/derivatives.jl +++ b/src/derivatives.jl @@ -29,7 +29,7 @@ end # Extend _data_object function _data_object(dref::DerivativeRef) - object = get(_data_dictionary(dref), JuMP.index(dref), nothing) + object = Base.get(_data_dictionary(dref), JuMP.index(dref), nothing) if isnothing(object) error("Invalid derivative reference, cannot find ", "corresponding derivative in the model. This is likely ", @@ -114,7 +114,7 @@ function _existing_derivative_index( pref::GeneralVariableRef )::Union{DerivativeIndex, Nothing} model = JuMP.owner_model(vref) - return get(model.deriv_lookup, (vref, pref), nothing) + return Base.get(model.deriv_lookup, (vref, pref), nothing) end # Get access the derivative constraint indices diff --git a/src/expressions.jl b/src/expressions.jl index 4897d07c2..f620709ce 100644 --- a/src/expressions.jl +++ b/src/expressions.jl @@ -26,7 +26,7 @@ end # Extend _data_object function _data_object(fref::ParameterFunctionRef) - object = get(_data_dictionary(fref), JuMP.index(fref), nothing) + object = Base.get(_data_dictionary(fref), JuMP.index(fref), nothing) if isnothing(object) error("Invalid parameter function reference, cannot find ", "corresponding object in the model. This is likely ", @@ -172,7 +172,7 @@ julia> name(fref) ``` """ function JuMP.name(fref::ParameterFunctionRef)::String - object = get(_data_dictionary(fref), JuMP.index(fref), nothing) + object = Base.get(_data_dictionary(fref), JuMP.index(fref), nothing) return isnothing(object) ? "" : object.name end @@ -874,7 +874,7 @@ function _affine_coefficient( func::GenericAffExpr, var::GeneralVariableRef )::Float64 - return get(func.terms, var, 0.0) + return Base.get(func.terms, var, 0.0) end # GenericQuadExpr @@ -882,7 +882,7 @@ function _affine_coefficient( func::GenericQuadExpr, var::GeneralVariableRef )::Float64 - return get(func.aff.terms, var, 0.0) + return Base.get(func.aff.terms, var, 0.0) end # Fallback diff --git a/src/finite_variables.jl b/src/finite_variables.jl index 119c896ee..d32813395 100644 --- a/src/finite_variables.jl +++ b/src/finite_variables.jl @@ -36,7 +36,7 @@ end function _data_object( vref::FiniteVariableRef )::VariableData{JuMP.ScalarVariable{Float64, Float64, Float64, Float64}} - object = get(_data_dictionary(vref), JuMP.index(vref), nothing) + object = Base.get(_data_dictionary(vref), JuMP.index(vref), nothing) if isnothing(object) error("Invalid finite variable reference, cannot find ", "corresponding variable in the model. This is likely ", diff --git a/src/infinite_variables.jl b/src/infinite_variables.jl index 6f3b45c8c..b7d8161d0 100644 --- a/src/infinite_variables.jl +++ b/src/infinite_variables.jl @@ -34,7 +34,7 @@ end # Extend _data_object function _data_object(vref::InfiniteVariableRef) - object = get(_data_dictionary(vref), JuMP.index(vref), nothing) + object = Base.get(_data_dictionary(vref), JuMP.index(vref), nothing) if isnothing(object) error("Invalid infinite variable reference, cannot find ", "corresponding variable in the model. This is likely ", diff --git a/src/measure_expansions.jl b/src/measure_expansions.jl index 1b94f3737..7fb61a182 100644 --- a/src/measure_expansions.jl +++ b/src/measure_expansions.jl @@ -39,7 +39,7 @@ function make_point_variable_ref( for i in eachindex(support) support[i] = round(support[i], sigdigits = significant_digits(prefs[i])) end - pindex = get(write_model.point_lookup, (ivref, support), nothing) + pindex = Base.get(write_model.point_lookup, (ivref, support), nothing) if isnothing(pindex) base_info = JuMP.VariableInfo(false, NaN, false, NaN, false, NaN, false, NaN, false, false) @@ -134,7 +134,7 @@ function make_semi_infinite_variable_ref( values::Vector{Float64} )::GeneralVariableRef eval_supps = Dict(indices[i] => values[i] for i in eachindex(indices)) - existing_index = get(write_model.semi_lookup, (ivref, eval_supps), nothing) + existing_index = Base.get(write_model.semi_lookup, (ivref, eval_supps), nothing) if isnothing(existing_index) var = JuMP.build_variable(error, ivref, eval_supps, check = false) return JuMP.add_variable(write_model, var, add_support = false) diff --git a/src/measures.jl b/src/measures.jl index 6d0a6980c..a9e03d704 100644 --- a/src/measures.jl +++ b/src/measures.jl @@ -34,7 +34,7 @@ end # Extend _data_object function _data_object(mref::MeasureRef) - object = get(_data_dictionary(mref), JuMP.index(mref), nothing) + object = Base.get(_data_dictionary(mref), JuMP.index(mref), nothing) if isnothing(object) error("Invalid measure reference, cannot find ", "corresponding measure in the model. This is likely ", @@ -1225,7 +1225,7 @@ Extend `JuMP.name` to return the name associated with a measure reference. """ function JuMP.name(mref::MeasureRef)::String - object = get(_data_dictionary(mref), JuMP.index(mref), nothing) + object = Base.get(_data_dictionary(mref), JuMP.index(mref), nothing) return isnothing(object) ? "" : object.name end diff --git a/src/nlp.jl b/src/nlp.jl index c0d065aa4..a61727fe5 100644 --- a/src/nlp.jl +++ b/src/nlp.jl @@ -128,7 +128,7 @@ function _is_zero(node::_LCRST.Node{NodeData}) return true elseif raw in (:/, :^) && _is_zero(node.child) return true - elseif all(_is_zero(n) for n in node) && iszero(get(_NativeNLPFunctions, (raw, length(collect(node))), (i...) -> true)((0.0 for n in node)...)) + elseif all(_is_zero(n) for n in node) && iszero(Base.get(_NativeNLPFunctions, (raw, length(collect(node))), (i...) -> true)((0.0 for n in node)...)) return true else return false @@ -1313,8 +1313,8 @@ function name_to_function(model::InfiniteModel, name::Symbol, num_args::Int) elseif name == :* return * else - return get(_NativeNLPFunctions, (name, num_args), - get(model.func_lookup, (name, num_args), nothing)) + return Base.get(_NativeNLPFunctions, (name, num_args), + Base.get(model.func_lookup, (name, num_args), nothing)) end end diff --git a/src/optimize.jl b/src/optimize.jl index 77e2432cb..f8c7b33c4 100644 --- a/src/optimize.jl +++ b/src/optimize.jl @@ -391,7 +391,7 @@ function JuMP.get_optimizer_attribute( model::InfiniteModel, attr::MOI.AbstractOptimizerAttribute ) - return MOI.get(optimizer_model(model), attr) + return MOI.Base.get(optimizer_model(model), attr) end """ @@ -454,7 +454,7 @@ julia> result_count(model) ``` """ function JuMP.result_count(model::InfiniteModel)::Int - return MOI.get(optimizer_model(model), MOI.ResultCount()) + return MOI.Base.get(optimizer_model(model), MOI.ResultCount()) end ################################################################################ diff --git a/src/point_variables.jl b/src/point_variables.jl index e191f522f..28ba07f0a 100644 --- a/src/point_variables.jl +++ b/src/point_variables.jl @@ -36,7 +36,7 @@ end function _data_object( vref::PointVariableRef )::VariableData{PointVariable{GeneralVariableRef}} - object = get(_data_dictionary(vref), JuMP.index(vref), nothing) + object = Base.get(_data_dictionary(vref), JuMP.index(vref), nothing) if isnothing(object) error("Invalid point variable reference, cannot find ", "corresponding variable in the model. This is likely ", @@ -430,7 +430,7 @@ function JuMP.add_variable( ivref = v.infinite_variable_ref divref = dispatch_variable_ref(ivref) JuMP.check_belongs_to_model(divref, model) - existing_index = get(model.point_lookup, (ivref, v.parameter_values), nothing) + existing_index = Base.get(model.point_lookup, (ivref, v.parameter_values), nothing) if isnothing(existing_index) data_object = VariableData(v, name) vindex = _add_data_object(model, data_object) diff --git a/src/results.jl b/src/results.jl index db664d521..5bf936b8d 100644 --- a/src/results.jl +++ b/src/results.jl @@ -87,7 +87,7 @@ end ## Define dispatch methods to collect value of parameters # InfiniteParameter function _get_value(pref, ::Type{<:InfiniteParameterIndex}, result; kwargs...) - label = get(kwargs, :label, PublicLabel) + label = Base.get(kwargs, :label, PublicLabel) return supports(pref, label = label) end diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 96b6bbaef..68728b6af 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -55,7 +55,7 @@ end # Extend _data_object function _data_object(pref::ScalarParameterRef) - object = get(_data_dictionary(pref), JuMP.index(pref), nothing) + object = Base.get(_data_dictionary(pref), JuMP.index(pref), nothing) if isnothing(object) error("Invalid scalar parameter reference, cannot find ", "corresponding parameter in the model. This is likely ", @@ -135,8 +135,12 @@ end ################################################################################ # TRANSFORM ATTRIBUTES ################################################################################ +# Enable Supports() value type checking for infinite parameters +attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} + # TODO finish + ################################################################################ # PARAMETER DEFINITION ################################################################################ @@ -417,7 +421,7 @@ julia> name(t) ``` """ function JuMP.name(pref::ScalarParameterRef)::String - object = get(_data_dictionary(pref), JuMP.index(pref), nothing) + object = Base.get(_data_dictionary(pref), JuMP.index(pref), nothing) return isnothing(object) ? "" : object.name end @@ -516,7 +520,7 @@ function parameter_by_name(model::InfiniteModel, name::String) _update_param_name_dict(model, model.dependent_params) _update_param_name_dict(model, model.finite_params) end - index = get(_param_name_dict(model), name, nothing) + index = Base.get(_param_name_dict(model), name, nothing) if isnothing(index) return nothing elseif index == IndependentParameterIndex(-1) diff --git a/src/semi_infinite_variables.jl b/src/semi_infinite_variables.jl index 68e4d6137..7c0b6c8a8 100644 --- a/src/semi_infinite_variables.jl +++ b/src/semi_infinite_variables.jl @@ -36,7 +36,7 @@ end function _data_object( vref::SemiInfiniteVariableRef )::VariableData{SemiInfiniteVariable{GeneralVariableRef}} - object = get(_data_dictionary(vref), JuMP.index(vref), nothing) + object = Base.get(_data_dictionary(vref), JuMP.index(vref), nothing) if isnothing(object) error("Invalid point variable reference, cannot find ", "corresponding variable in the model. This is likely ", @@ -323,7 +323,7 @@ function JuMP.add_variable( divref = dispatch_variable_ref(ivref) eval_supps = var.eval_supports JuMP.check_belongs_to_model(divref, model) - existing_index = get(model.semi_lookup, (ivref, eval_supps), nothing) + existing_index = Base.get(model.semi_lookup, (ivref, eval_supps), nothing) if isnothing(existing_index) data_object = VariableData(var, name) vindex = _add_data_object(model, data_object) diff --git a/src/transform.jl b/src/transform.jl index 0935f3c23..541d7c3d9 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -24,7 +24,7 @@ for (I, A, name) = ((:FiniteParameterIndex, :FiniteParameterAttr, :finite_parame return haskey(cache.$(name), key) end function Base.get(cache::TransformAttrCache, key::Tuple{$I, $A}, default) - return get(cache.$(name), key, default) + return Base.get(cache.$(name), key, default) end end end @@ -40,7 +40,7 @@ function Base.haskey(cache::TransformAttrCache, attr::ModelAttr) return haskey(cache.model, attr) end function Base.get(cache::TransformAttrCache, attr::ModelAttr, default) - return get(cache.model, attr, default) + return Base.get(cache.model, attr, default) end ################################################################################ @@ -108,7 +108,7 @@ for (I, A) = ((:FiniteParameterIndex, :FiniteParameterAttr), @eval begin if !($I in (DependentParametersIndex, InfOptConstraintIndex)) function get(model::InfiniteModel, idx::$I, attr::$A) - value = get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, (idx, attr), nothing) if isnothing(value) error("$(dispatch_variable_ref(model, idx)) does not have transform ", "attribute `$attr`.") @@ -134,7 +134,7 @@ function get( idx::InfOptConstraintIndex, attr::ConstraintAttr ) - value = get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, (idx, attr), nothing) if isnothing(value) error("$(InfOptConstraintRef(model, idx)) does not have transform ", "attribute `$attr`.") @@ -146,7 +146,7 @@ function get( idx::DependentParametersIndex, attr::InfiniteParameterAttr ) - value = get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, (idx, attr), nothing) if isnothing(value) error("The dependent parameter group with index `$idx` does not ", "have transform attribute `$attr`.") @@ -154,7 +154,7 @@ function get( return value end function get(model::InfiniteModel, attr::ModelAttr) - value = get(model.transform_attrs, attr, nothing) + value = Base.get(model.transform_attrs, attr, nothing) if isnothing(value) error("The model does not have transform attribute `$attr`.") end @@ -171,8 +171,3 @@ function set(model::InfiniteModel, attr::ModelAttr, value) model.transform_attrs[attr] = value return end - -################################################################################ -# DEFAULT TRANSFORM ATTRIBUTES -################################################################################ -attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} diff --git a/src/variable_basics.jl b/src/variable_basics.jl index fbc8e87a2..2e238dc0b 100644 --- a/src/variable_basics.jl +++ b/src/variable_basics.jl @@ -162,7 +162,7 @@ julia> name(vref) ``` """ function JuMP.name(vref::DecisionVariableRef)::String - object = get(_data_dictionary(vref), JuMP.index(vref), nothing) + object = Base.get(_data_dictionary(vref), JuMP.index(vref), nothing) return isnothing(object) ? "" : object.name end @@ -246,7 +246,7 @@ function JuMP.variable_by_name( _update_var_name_dict(model, model.point_vars) _update_var_name_dict(model, model.finite_vars) end - index = get(_var_name_dict(model), name, nothing) + index = Base.get(_var_name_dict(model), name, nothing) if isnothing(index) return nothing elseif index == FiniteVariableIndex(-1) From f5559af5b6a01795389b2c532f9e74ce3e3e0e60 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 23 Mar 2022 17:18:03 -0400 Subject: [PATCH 05/10] Can add parameter transform attributes via macros --- src/datatypes.jl | 17 ++++++++ src/scalar_parameters.jl | 72 ++++++++++++++++++++++++++++------ src/transform.jl | 83 +++++++++++++++++++++++++++++++++++++++- 3 files changed, 159 insertions(+), 13 deletions(-) diff --git a/src/datatypes.jl b/src/datatypes.jl index 9dee886a1..e5e19bba3 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -1015,6 +1015,23 @@ An abstract type for transformation interfaces/models that act as a backend for """ abstract type AbstractTransformBackend end + +""" + ObjectWithAttributes{O, D <: Dict} + +This serves as a wrapper type to store a modeling object (e.g., an infinite +parameter) and [`AbstractTransformAttr`](@ref)s that should be added when the +object is added to the model. + +**Fields** +- `object:O`: The modeling object to be added. +- `attributes::D`: The dictionary of attributes to be added (`attr` => `value`). +""" +struct ObjectWithAttributes{O, D <: Dict} + object::O + attributes::D +end + # TODO maybe add more types if needed ################################################################################ diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 68728b6af..e2aa208f4 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -133,13 +133,31 @@ function _set_core_variable_object( end ################################################################################ -# TRANSFORM ATTRIBUTES +# TRANSFORM ATTRIBUTE KEYWORD SUPPORT ################################################################################ -# Enable Supports() value type checking for infinite parameters -attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} +# Store the keywords to be accepted by @infinite_parameter +const _InfiniteParameterKeywords = Dict{Symbol, Any}() + +# Add registeration method for @infinite_parameter +function register_transform_keyword( + kw::Symbol, + attr::InfiniteParameterAttr, + type = nothing + ) + return _add_transform_keyword(_InfiniteParameterKeywords, kw, attr, type) +end -# TODO finish +# Store the keywords to be accepted by @finite_parameter +const _FiniteParameterKeywords = Dict{Symbol, Any}() +# Add registeration method for @finite_parameter +function register_transform_keyword( + kw::Symbol, + attr::FiniteParameterAttr, + type = nothing + ) + return _add_transform_keyword(_FiniteParameterKeywords, kw, attr, type) +end ################################################################################ # PARAMETER DEFINITION @@ -156,8 +174,9 @@ helper method for [`@infinite_parameter`](@ref). julia> param = build_parameter(error, IntervalDomain(0, 3)); ``` """ -function build_parameter(_error::Function, domain::InfiniteScalarDomain) - return IndependentParameter(domain) +function build_parameter(_error::Function, domain::InfiniteScalarDomain; kwargs...) + return _process_transform_kwargs(_error, _InfiniteParameterKeywords, kwargs, + IndependentParameter(domain)) end # Fallback for bad domain types @@ -168,7 +187,7 @@ function build_parameter(_error::Function, domain::AbstractInfiniteDomain) end """ - build_parameter(_error::Function, value::Real)::FiniteParameter + build_parameter(_error::Function, value::Real; [kwargs...])::FiniteParameter Returns a [`FiniteParameter`](@ref) given the appropriate information. This is analagous to `JuMP.build_variable`. This is meant to primarily serve as @@ -180,8 +199,9 @@ julia> build_finite_parameter(error, 1) FiniteParameter(1.0) ``` """ -function build_parameter(_error::Function, value::Real) - return FiniteParameter(value) +function build_parameter(_error::Function, value::Real; kwargs...) + return _process_transform_kwargs(_error, _FiniteParameterKeywords, kwargs, + FiniteParameter(value)) end # Generic fallback @@ -212,7 +232,7 @@ function add_parameter( model::InfiniteModel, p::IndependentParameter, name::String = "" - ) # TODO add attribute kwargs + ) obj_num = length(_param_object_indices(model)) + 1 param_num = model.last_param_num += 1 data_object = ScalarParameterData(p, obj_num, param_num, name) @@ -244,13 +264,43 @@ function add_parameter( model::InfiniteModel, p::FiniteParameter, name::String = "" - ) # TODO add attribute kwargs + ) data_object = ScalarParameterData(p, -1, -1, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing return GeneralVariableRef(model, obj_index.value, typeof(obj_index)) end +""" + add_parameter(model::InfiniteModel, obj::ObjectWithAttributes{<:ScalarParameter}, + name::String = "")::GeneralVariableRef + +Add a parameter build with `build_parameter` that contains [`InfiniteParameterAttr`](@ref)s +that need to be added to `model` as well. +""" +function add_parameter( + model::InfiniteModel, + obj::ObjectWithAttributes{<:ScalarParameter}, + name::String = "" + ) + pref = add_parameter(model, obj.object, name) + idx = JuMP.index(pref) + for (a, v) in obj.attributes + InfiniteOpt.set(model, idx, a, v) + end + return pref +end + +################################################################################ +# BASIC SUPPORT API +################################################################################ +# Enable Supports() value type checking for infinite parameters +attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} + +# TODO Is this absolutely needed, is there a more modular way to update them incrementally? + +# TODO finish + ################################################################################ # PARAMETER DEPENDENCIES ################################################################################ diff --git a/src/transform.jl b/src/transform.jl index 541d7c3d9..0c1c73af3 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -118,7 +118,7 @@ for (I, A) = ((:FiniteParameterIndex, :FiniteParameterAttr), end function set(model::InfiniteModel, idx::$I, attr::$A, value) if !isa(value, attribute_value_type(attr)) - error("Expected a value of type `$(attribute_value_type(attr))` for", + error("Expected a value of type `$(attribute_value_type(attr))` for ", "the attribute `$(attr)`, but got `$(value)` of type ", "`$(typeof(value))`.") end @@ -164,10 +164,89 @@ end # Set for models function set(model::InfiniteModel, attr::ModelAttr, value) if !isa(value, attribute_value_type(attr)) - error("Expected a value of type `$(attribute_value_type(attr))` for", + error("Expected a value of type `$(attribute_value_type(attr))` for ", "the attribute `$(attr)`, but got `$(value)` of type ", "`$(typeof(value))`.") end model.transform_attrs[attr] = value return end + +################################################################################ +# KEYWORD INPUT API +################################################################################ +""" + register_transform_keyword(kw::Symbol, attr::AbstractTransformAttr, + [type = attribute_value_type(attr)])::Nothing + +Register a new [`AbstractTransformAttr`](@ref) to be specified via keyword +argument to the appropriate creation macro (e.g., `@infinite_parameter` for +[`InfiniteParameterAttr`](@ref)s). Using `type` indicates what type of input +of input is accepted for `kw`. Note that `type` need not equal +`attribute_value_type(attr)` if [`process_transform_value`](@ref) is extended +for that value type. This is intended for advanced users who are implementing +their own transformation backend. Errors if `kw` is already in use. +""" +function register_transform_keyword end + +# Helper function for keyword registration +function _add_transform_keyword(dict, kw, attr, type) + if haskey(dict, kw) + error("Cannot register $kw as a keyword argument, since it is already ", + "in use.") + end + if isnothing(type) + dict[kw] = (attr, attribute_value_type(attr)) + else + dict[kw] = (attr, type) + end + return +end + +""" + process_transform_value(_error::Function, attr, value, object) + +Process a transformation backend attribute `attr` with a raw `value` that will +be associated with `object` once it is added to the model. This is intended +to handle raw input `value`s collected via keyword arguments when `object` is +created (i.e., transformation keywords that have been registered via +[`register_transform_keyword`](@ref)). This provides extra flexibility allow +more convenient input for users. By default `value` is simply returned. This +should be extended for registered transformation attribute types that wish to +process input that doesn't readily match [`attribute_value_type`](@ref). Note +that this is intended as an advanced function for developers of transformation +backends. +""" +function process_transform_value(_error::Function, attr, value, object) + return value +end + +# Helper function for checking the transformation keywords +function _process_transform_kwargs(_error, dict, kwargs, obj) + for (k, v) in kwargs + if !haskey(dict, k) + _error("Unrecognized keyword argument `$k`.") + elseif !isa(v, dict[k][2]) + _error("The `$k` keyword argument should be of type ", + "`$(dict[k][2])` not of type `$(typeof(v))`.") + end + end + if isempty(kwargs) + return obj + else + processed_kwargs = Dict() + sizehint!(processed_kwargs, length(kwargs)) + for (k, v) in kwargs + attr = dict[k][1] + if haskey(processed_kwargs, attr) + kw_inds = findall(kw -> dict[kw][1] == attr, keys(kwargs)) + kws = keys(kwargs)[kw_inds] + _error("The following keyword arguments cannot be given ", + "simultaneously: '", join(kws[1:end-1], "', "), "' and '", + kws[end], "'.") + end + processed_kwargs[attr] = process_transform_value(_error, attr, v, obj) + end + return ObjectWithAttributes(obj, processed_kwargs) + end +end From 7637c4bd3304d219a8eabf2810b4badc37c4b8bb Mon Sep 17 00:00:00 2001 From: pulsipher Date: Wed, 23 Mar 2022 17:21:03 -0400 Subject: [PATCH 06/10] comment clarification --- src/transform.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transform.jl b/src/transform.jl index 0c1c73af3..e339ee09f 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -221,7 +221,7 @@ function process_transform_value(_error::Function, attr, value, object) return value end -# Helper function for checking the transformation keywords +# Helper function for checking the transformation keywords in build functions function _process_transform_kwargs(_error, dict, kwargs, obj) for (k, v) in kwargs if !haskey(dict, k) From acefc00925cd1543bd217cf71a1066ee2f54cbc5 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Fri, 25 Mar 2022 17:22:58 -0400 Subject: [PATCH 07/10] Added incremental attribute API --- src/datatypes.jl | 61 ++++------------------------------------ src/scalar_parameters.jl | 22 +++++---------- src/transform.jl | 56 ++++++++++++++++++++++++++++++++++-- 3 files changed, 66 insertions(+), 73 deletions(-) diff --git a/src/datatypes.jl b/src/datatypes.jl index e5e19bba3..88cb05423 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -912,14 +912,6 @@ information about infinite parameters to be specified by the user. """ abstract type InfiniteParameterAttr <: AbstractTransformAttr end -""" - Supports <: InfiniteParameterAttr - -An attribute for storing support (discretization) points that will be used -by infinite parameter to approximate the infinite domain of a problem. -""" -struct Supports <: InfiniteParameterAttr end - """ VariableAttr <: AbstractTransformAttr @@ -956,13 +948,6 @@ information about constraints to be specified by the user. """ abstract type ConstraintAttr <: AbstractTransformAttr end -""" - BoundaryCondition <: ConstraintAttr - -An attribute to indicate whether a constraint is a boundary condition. -""" -struct BoundaryCondition <: ConstraintAttr end # TODO maybe do something else - """ ModelAttr <: AbstractTransformAttr @@ -1007,15 +992,6 @@ struct TransformAttrCache end end -""" - AbstractTransformBackend - -An abstract type for transformation interfaces/models that act as a backend for -`InfiniteModel`s. -""" -abstract type AbstractTransformBackend end - - """ ObjectWithAttributes{O, D <: Dict} @@ -1032,42 +1008,15 @@ struct ObjectWithAttributes{O, D <: Dict} attributes::D end -# TODO maybe add more types if needed - -################################################################################ -# BASIC SUPPORT LABELS -################################################################################ -""" - All - -This abstract support label is unique in that it isn't associated with a particular set of -supports, but rather is used used to indicate that all supports should be used. -""" -abstract type All end - -""" - PublicLabel <: All - -An abstract label used to denote that labels that should be given to the user by -default. -""" -abstract type PublicLabel <: All end - -""" - UserDefined <: PublicLabel - -A support label for supports that are supplied by the user directly to an infinite -parameter. """ -struct UserDefined <: PublicLabel end + AbstractTransformBackend +An abstract type for transformation interfaces/models that act as a backend for +`InfiniteModel`s. """ - InternalLabel <: All +abstract type AbstractTransformBackend end -An abstract label for support labels that are associated with supports that should -not be reported to the user by default. -""" -abstract type InternalLabel <: All end +# TODO maybe add more types if needed ################################################################################ # INFINITE MODEL diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index e2aa208f4..881564e85 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -78,22 +78,22 @@ function _core_variable_object(pref::FiniteParameterRef)::FiniteParameter end # Extend _parameter_number -function _parameter_number(pref::IndependentParameterRef)::Int +function _parameter_number(pref::IndependentParameterRef) return _data_object(pref).parameter_num end # Extend _parameter_numbers -function _parameter_numbers(pref::IndependentParameterRef)::Vector{Int} +function _parameter_numbers(pref::IndependentParameterRef) return [_parameter_number(pref)] end # Extend _object_number -function _object_number(pref::IndependentParameterRef)::Int +function _object_number(pref::IndependentParameterRef) return _data_object(pref).object_num end # Extend _object_numbers -function _object_numbers(pref::IndependentParameterRef)::Vector{Int} +function _object_numbers(pref::IndependentParameterRef) return [_object_number(pref)] end @@ -238,6 +238,7 @@ function add_parameter( data_object = ScalarParameterData(p, obj_num, param_num, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing + _update_transform_attributes(model, p) return GeneralVariableRef(model, obj_index.value, typeof(obj_index)) end @@ -268,6 +269,7 @@ function add_parameter( data_object = ScalarParameterData(p, -1, -1, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing + _update_transform_attributes(model, p) return GeneralVariableRef(model, obj_index.value, typeof(obj_index)) end @@ -291,16 +293,6 @@ function add_parameter( return pref end -################################################################################ -# BASIC SUPPORT API -################################################################################ -# Enable Supports() value type checking for infinite parameters -attribute_value_type(::Supports) = Union{Dict{Float64, DataType}, Dict{Vector{Float64}, DataType}} - -# TODO Is this absolutely needed, is there a more modular way to update them incrementally? - -# TODO finish - ################################################################################ # PARAMETER DEPENDENCIES ################################################################################ @@ -617,7 +609,7 @@ end """ set_infinite_domain(pref::IndependentParameterRef, - domain::InfiniteScalarDomain)::Nothing + domain::InfiniteScalarDomain)::Nothing Reset the infinite domain of `pref` with another `InfiniteScalarDomain`. An error will be thrown if `pref` is being used by some measure. diff --git a/src/transform.jl b/src/transform.jl index e339ee09f..23ace00d9 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -93,8 +93,6 @@ function set(model::InfiniteModel, attr, value) "attributes of type `$(typeof(attr))`.") end -# TODO enable `set` to optionally update the transformation backend incrementally - # Singular dispatch variables (and the dependent parameter & constraint getters) for (I, A) = ((:FiniteParameterIndex, :FiniteParameterAttr), (:IndependentParameterIndex, :InfiniteParameterAttr), @@ -250,3 +248,57 @@ function _process_transform_kwargs(_error, dict, kwargs, obj) return ObjectWithAttributes(obj, processed_kwargs) end end + +################################################################################ +# DYNAMIC ATTRIBUTE UPDATE API +################################################################################ +# Create storage container for transform attributes to be updated incrementally +const _IncrementalAttributes = Set{AbstractTransformAttr}() + +""" + register_attribute_to_update(attr::AbstractTransformAttr)::Nothing + +Register a transformation backend attribute `attr` that can be updated +incrementally when modeling objects are added to `InfiniteModel`s. This +must be implemented in combination with [`update_attribute_on_creation`](@ref). +Only individuals writing transformation backends should use this function. +""" +function register_attribute_to_update(attr::AbstractTransformAttr) + push!(_IncrementalAttributes, attr) + return +end + +""" + update_attribute_on_creation(model::InfiniteModel, attr::AbstractTransformAttr, obj)::Nothing + +Update a particular transformation backend attribute `attr` when an `InfiniteOpt` +modeling object `obj` is added to `model`. This enables transformation attributes +to be built-up incrementally as the `InfiniteModel` is created. This method is +only invoked for attributes that have been registered via +[`register_attribute_to_update`](@ref). By default, nothing is updated. Those +writing transformation backends should extend this based on particular +attribute-object combinations where an update should be made. For instance, we +could obtain the discrete supports with the point from a point variable when it +is created (this is what `TranscriptionOpt` does). + +Note this should NOT be used to modify/create an attribute for `obj` itself, it +should only be used to modify attributes associated with other modeling objects. +Instead use [`register_transform_keyword`](@ref) if you what to create an attribute +for `obj` on creation. +""" +function update_attribute_on_creation( + model::InfiniteModel, + attr::AbstractTransformAttr, + obj + ) + return +end + +# Helper function for incrementally updating transform attributes when +# adding modeling objects to the model +function _update_transform_attributes(model, obj) + for attr in _IncrementalAttributes + update_attribute_on_creation(model, attr, obj) + end + return +end From 7e315fc95d72a331d8dd3910c3346c670122b59e Mon Sep 17 00:00:00 2001 From: pulsipher Date: Thu, 7 Apr 2022 09:06:24 -0400 Subject: [PATCH 08/10] minor cleanup --- src/constraints.jl | 96 ++++++++++++++++++++-------------------- src/scalar_parameters.jl | 1 - 2 files changed, 48 insertions(+), 49 deletions(-) diff --git a/src/constraints.jl b/src/constraints.jl index f3d44a21a..2a772433f 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -25,7 +25,7 @@ CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. ``` """ -JuMP.owner_model(cref::InfOptConstraintRef)::InfiniteModel = cref.model +JuMP.owner_model(cref::InfOptConstraintRef) = cref.model """ JuMP.index(cref::InfOptConstraintRef)::InfOptConstraintIndex @@ -38,10 +38,10 @@ julia> index(cref) InfOptConstraintIndex(2) ``` """ -JuMP.index(cref::InfOptConstraintRef)::InfOptConstraintIndex = cref.index +JuMP.index(cref::InfOptConstraintRef) = cref.index # Extend Base and JuMP functions -function Base.:(==)(v::InfOptConstraintRef, w::InfOptConstraintRef)::Bool +function Base.:(==)(v::InfOptConstraintRef, w::InfOptConstraintRef) return v.model === w.model && v.index == w.index end Base.broadcastable(cref::InfOptConstraintRef) = Ref(cref) @@ -53,7 +53,7 @@ Base.broadcastable(cref::InfOptConstraintRef) = Ref(cref) function _add_data_object( model::InfiniteModel, object::ConstraintData - )::InfOptConstraintIndex + ) return MOIUC.add_item(model.constraints, object) end @@ -65,7 +65,7 @@ function _data_dictionary( end # Extend _data_object -function _data_object(cref::InfOptConstraintRef)::ConstraintData +function _data_object(cref::InfOptConstraintRef) object = Base.get(_data_dictionary(cref), JuMP.index(cref), nothing) if isnothing(object) error("Invalid constraint reference, cannot find corresponding ", @@ -78,7 +78,7 @@ end # Return the core constraint object function _core_constraint_object( cref::InfOptConstraintRef - )::JuMP.AbstractConstraint + ) return _data_object(cref).constraint end @@ -88,7 +88,7 @@ function _adaptive_data_update( cref::InfOptConstraintRef, c::C, data::ConstraintData{C} - )::Nothing where {C <: JuMP.AbstractConstraint} + ) where {C <: JuMP.AbstractConstraint} data.constraint = c return end @@ -98,7 +98,7 @@ function _adaptive_data_update( cref::InfOptConstraintRef, c::C1, data::ConstraintData{C2} - )::Nothing where {C1, C2} + ) where {C1, C2} new_data = ConstraintData(c, data.object_nums, data.name, data.measure_indices, data.is_info_constraint) _data_dictionary(cref)[JuMP.index(cref)] = new_data @@ -109,31 +109,31 @@ end function _set_core_constraint_object( cref::InfOptConstraintRef, constr::JuMP.AbstractConstraint - )::Nothing + ) _adaptive_data_update(cref, constr, _data_object(cref)) set_optimizer_model_ready(JuMP.owner_model(cref), false) return end # Extend _object_numbers -function _object_numbers(cref::InfOptConstraintRef)::Vector{Int} +function _object_numbers(cref::InfOptConstraintRef) return _data_object(cref).object_nums end # Extend _measure_dependencies function _measure_dependencies( cref::InfOptConstraintRef - )::Vector{MeasureIndex} + ) return _data_object(cref).measure_indices end # Return if this constraint is an info constraint -function _is_info_constraint(cref::InfOptConstraintRef)::Bool +function _is_info_constraint(cref::InfOptConstraintRef) return _data_object(cref).is_info_constraint end # Extend _delete_data_object -function _delete_data_object(cref::InfOptConstraintRef)::Nothing +function _delete_data_object(cref::InfOptConstraintRef) delete!(_data_dictionary(cref), JuMP.index(cref)) return end @@ -145,7 +145,7 @@ end function _check_restrictions( restrictions::DomainRestrictions{GeneralVariableRef}; _error = error - )::Nothing + ) depend_counter = Dict{DependentParameterRef, Int}() for (pref, domain) in restrictions # check that pref is an infinite parameter @@ -208,7 +208,7 @@ function JuMP.build_constraint( func, set, restrictions::DomainRestrictions - )::DomainRestrictedConstraint + ) # make the constraint and check the domain restrictions constr = JuMP.build_constraint(_error, func, set) _check_restrictions(restrictions, _error = _error) @@ -219,7 +219,7 @@ end function _validate_restrictions( model::InfiniteModel, restrictions::DomainRestrictions - )::Nothing + ) depend_supps = Dict{DependentParametersIndex, Matrix{Float64}}() for (pref, domain) in restrictions # check validity @@ -252,7 +252,7 @@ end function _update_var_constr_mapping( vrefs::Vector{GeneralVariableRef}, cref::InfOptConstraintRef - )::Nothing + ) for vref in vrefs dvref = dispatch_variable_ref(vref) push!(_constraint_dependencies(dvref), JuMP.index(cref)) @@ -293,7 +293,7 @@ function JuMP.add_constraint( c::JuMP.AbstractConstraint, name::String = ""; is_info_constr::Bool = false - )::InfOptConstraintRef + ) # gather the unique list of variable references for testing and mapping vrefs = _all_function_variables(JuMP.jump_function(c)) # test in the model @@ -321,7 +321,7 @@ function JuMP.add_constraint( model::InfiniteModel, c::DomainRestrictedConstraint, name::String = "" - )::InfOptConstraintRef + ) # test domain restrictions and add needed supports _validate_restrictions(model, c.restrictions) # add the underlying constraint @@ -350,7 +350,7 @@ true function JuMP.is_valid( model::InfiniteModel, cref::InfOptConstraintRef - )::Bool + ) return (model === JuMP.owner_model(cref) && JuMP.index(cref) in keys(_data_dictionary(cref))) end @@ -376,7 +376,7 @@ MathOptInterface.LessThan{Float64}(1.0)) """ function JuMP.constraint_object( cref::InfOptConstraintRef - )::JuMP.AbstractConstraint + ) return _core_constraint_object(cref) end @@ -391,7 +391,7 @@ julia> name(cref) "constr_name" ``` """ -function JuMP.name(cref::InfOptConstraintRef)::String +function JuMP.name(cref::InfOptConstraintRef) object = Base.get(_data_dictionary(cref), JuMP.index(cref), nothing) return isnothing(object) ? "" : object.name end @@ -412,7 +412,7 @@ julia> name(cref) function JuMP.set_name( cref::InfOptConstraintRef, name::String - )::Nothing + ) _data_object(cref).name = name JuMP.owner_model(cref).name_to_constr = nothing return @@ -429,7 +429,7 @@ end # Enforce that the MOI set is a traditional scalar one function _enforce_rhs_set( set::Union{MOI.LessThan{T}, MOI.GreaterThan{T}, MOI.EqualTo{T}} - )::Nothing where {T} + ) where {T} return end function _enforce_rhs_set(set) @@ -459,7 +459,7 @@ con : 2 x ≤ 4.0 function JuMP.set_normalized_rhs( cref::InfOptConstraintRef, value::Real - )::Nothing + ) old_constr = JuMP.constraint_object(cref) _enforce_rhs_set(JuMP.moi_set(old_constr)) new_set = _set_set_value(JuMP.moi_set(old_constr), value) @@ -474,7 +474,7 @@ end Return the right-hand side term of `cref` after JuMP has converted the constraint into its normalized form. """ -function JuMP.normalized_rhs(cref::InfOptConstraintRef)::Float64 +function JuMP.normalized_rhs(cref::InfOptConstraintRef) constr = JuMP.constraint_object(cref) set = JuMP.moi_set(constr) _enforce_rhs_set(set) @@ -494,7 +494,7 @@ will be translated by `-value`. For example, given a constraint `2x <= function JuMP.add_to_function_constant( cref::InfOptConstraintRef, value::Real - )::Nothing + ) current_value = JuMP.normalized_rhs(cref) JuMP.set_normalized_rhs(cref, current_value - value) return @@ -524,7 +524,7 @@ function JuMP.set_normalized_coefficient( cref::InfOptConstraintRef, variable::GeneralVariableRef, value::Real - )::Nothing + ) # update the constraint expression and update the constraint old_constr = JuMP.constraint_object(cref) new_func = _set_variable_coefficient!(JuMP.jump_function(old_constr), @@ -544,7 +544,7 @@ normalized the constraint into its standard form. function JuMP.normalized_coefficient( cref::InfOptConstraintRef, variable::GeneralVariableRef - )::Float64 + ) constr = JuMP.constraint_object(cref) func = JuMP.jump_function(constr) return _affine_coefficient(func, variable) # checks valid @@ -553,7 +553,7 @@ end # Return the appropriate constraint reference given the index and model function _make_constraint_ref(model::InfiniteModel, index::InfOptConstraintIndex - )::InfOptConstraintRef + ) return InfOptConstraintRef(model, index) end @@ -574,7 +574,7 @@ constr_name : x + pt = 3.0 function JuMP.constraint_by_name( model::InfiniteModel, name::String - )::Union{InfOptConstraintRef, Nothing} + ) if isnothing(model.name_to_constr) # Inspired from MOI/src/Utilities/model.jl model.name_to_constr = Dict{String, Int}() @@ -622,7 +622,7 @@ function JuMP.num_constraints( model::InfiniteModel, function_type, set_type - )::Int + ) counter = 0 for (index, data_object) in model.constraints if isa(JuMP.jump_function(data_object.constraint), function_type) && @@ -637,7 +637,7 @@ end function JuMP.num_constraints( model::InfiniteModel, function_type - )::Int + ) return JuMP.num_constraints(model, function_type, MOI.AbstractSet) end @@ -645,12 +645,12 @@ end function JuMP.num_constraints( model::InfiniteModel, set_type::Type{<:MOI.AbstractSet} - )::Int + ) return JuMP.num_constraints(model, Any, set_type) end # All the constraints -function JuMP.num_constraints(model::InfiniteModel)::Int +function JuMP.num_constraints(model::InfiniteModel) return length(model.constraints) end @@ -691,7 +691,7 @@ function JuMP.all_constraints( model::InfiniteModel, function_type, set_type - )::Vector{InfOptConstraintRef} + ) constr_list = Vector{InfOptConstraintRef}(undef, JuMP.num_constraints(model, function_type, set_type)) counter = 1 @@ -709,7 +709,7 @@ end function JuMP.all_constraints( model::InfiniteModel, function_type - )::Vector{InfOptConstraintRef} + ) return JuMP.all_constraints(model, function_type, MOI.AbstractSet) end @@ -717,14 +717,14 @@ end function JuMP.all_constraints( model::InfiniteModel, set_type::Type{<:MOI.AbstractSet} - )::Vector{InfOptConstraintRef} + ) return JuMP.all_constraints(model, JuMP.AbstractJuMPScalar, set_type) end # All the constraints function JuMP.all_constraints( model::InfiniteModel - )::Vector{InfOptConstraintRef} + ) return [_make_constraint_ref(model, idx) for (idx, _) in model.constraints] end @@ -745,7 +745,7 @@ julia> all_constraints(model) """ function JuMP.list_of_constraint_types( model::InfiniteModel - )::Vector{Tuple{DataType, DataType}} + ) type_set = Set{Tuple{DataType, DataType}}() for (index, object) in model.constraints push!(type_set, (typeof(JuMP.jump_function(object.constraint)), @@ -769,7 +769,7 @@ julia> parameter_refs(cref) (t,) ``` """ -function parameter_refs(cref::InfOptConstraintRef)::Tuple +function parameter_refs(cref::InfOptConstraintRef) model = JuMP.owner_model(cref) obj_indices = _param_object_indices(model)[_object_numbers(cref)] return Tuple(_make_param_tuple_element(model, idx) for idx in obj_indices) @@ -790,7 +790,7 @@ julia> has_domain_restrictions(cref) true ``` """ -function has_domain_restrictions(cref::InfOptConstraintRef)::Bool +function has_domain_restrictions(cref::InfOptConstraintRef) return !isempty(domain_restrictions(cref)) end @@ -808,7 +808,7 @@ Subdomain restrictions (1): t ∈ [0, 2] """ function domain_restrictions( cref::InfOptConstraintRef - )::DomainRestrictions{GeneralVariableRef} + ) return Base.get(JuMP.owner_model(cref).constraint_restrictions, JuMP.index(cref), DomainRestrictions()) end @@ -834,7 +834,7 @@ function set_domain_restrictions( cref::InfOptConstraintRef, restrictions::DomainRestrictions{GeneralVariableRef}; force::Bool = false - )::Nothing + ) if has_domain_restrictions(cref) && !force error("$cref already has domain restrictions. Consider adding more using " * "`add_domain_restrictions` or overwriting them by setting " * @@ -855,7 +855,7 @@ end function _update_restrictions( old::DomainRestrictions{GeneralVariableRef}, new::DomainRestrictions{GeneralVariableRef} - )::Nothing + ) # check each new restriction for (pref, domain) in new # we have a new restriction @@ -901,7 +901,7 @@ Subdomain restrictions (1): t ∈ [0, 2] function add_domain_restrictions( cref::InfOptConstraintRef, new_restrictions::DomainRestrictions{GeneralVariableRef} - )::Nothing + ) # check the new restrictions _check_restrictions(new_restrictions) model = JuMP.owner_model(cref) @@ -936,7 +936,7 @@ c1 : y(x) ≤ 42, ∀ x[1] ∈ [-1, 1], x[2] ∈ [-1, 1] """ function delete_domain_restrictions( cref::InfOptConstraintRef - )::Nothing + ) # delete the restrictions if there are any delete!(JuMP.owner_model(cref).constraint_restrictions, JuMP.index(cref)) # update status @@ -972,7 +972,7 @@ Subject to function JuMP.delete( model::InfiniteModel, cref::InfOptConstraintRef - )::Nothing + ) # check valid reference @assert JuMP.is_valid(model, cref) "Invalid constraint reference." # update variable dependencies diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 881564e85..3ab89c651 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -449,7 +449,6 @@ end ################################################################################ # NAME METHODS ################################################################################ - """ JuMP.name(pref::Union{IndependentParameterRef, FiniteParameterRef})::String From 68cbf9c9485b7422d57735f2a92663e0232e2105 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Fri, 15 Apr 2022 12:06:33 -0400 Subject: [PATCH 09/10] backend restructure --- src/datatypes.jl | 36 ++++++++++++++++++------------------ src/scalar_parameters.jl | 6 +++++- src/transform.jl | 33 ++++++++++++++++++++++++--------- 3 files changed, 47 insertions(+), 28 deletions(-) diff --git a/src/datatypes.jl b/src/datatypes.jl index 88cb05423..9cc1a2471 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -964,29 +964,29 @@ A convenient container for storing all the transformation attributes stored in an `InfiniteModel` that can be used by the transformation backend. """ struct TransformAttrCache - finite_parameters::Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any} - independent_parameters::Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any} - dependent_params::Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any} # TODO maybe allow for parameter-wsie attributes - infinite_variables::Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any} - semi_ifninite_variables::Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any} - point_variables::Dict{Tuple{PointVariableIndex, VariableAttr}, Any} - derivatives::Dict{Tuple{DerivativeIndex, DerivativeAttr}, Any} - measures::Dict{Tuple{MeasureIndex, MeasureAttr}, Any} - constraints::Dict{Tuple{InfOptConstraintIndex, ConstraintAttr}, Any} + finite_parameters::Dict{FiniteParameterIndex, Dict{FiniteParameterAttr, Any}} + independent_parameters::Dict{IndependentParameterIndex, Dict{InfiniteParameterAttr, Any}} + dependent_params::Dict{DependentParametersIndex, Dict{InfiniteParameterAttr, Any}} # TODO maybe allow for parameter-wsie attributes + infinite_variables::Dict{InfiniteVariableIndex, Dict{VariableAttr, Any}} + semi_ifninite_variables::Dict{SemiInfiniteVariableIndex, Dict{VariableAttr, Any}} + point_variables::Dict{PointVariableIndex, Dict{VariableAttr, Any}} + derivatives::Dict{DerivativeIndex, Dict{DerivativeAttr, Any}} + measures::Dict{MeasureIndex, Dict{MeasureAttr, Any}} + constraints::Dict{InfOptConstraintIndex, Dict{ConstraintAttr, Any}} model::Dict{ModelAttr, Any} # Constructor function TransformAttrCache() return new( - Dict{Tuple{FiniteParameterIndex, FiniteParameterAttr}, Any}(), - Dict{Tuple{IndependentParameterIndex, InfiniteParameterAttr}, Any}(), - Dict{Tuple{DependentParametersIndex, InfiniteParameterAttr}, Any}(), - Dict{Tuple{InfiniteVariableIndex, VariableAttr}, Any}(), - Dict{Tuple{SemiInfiniteVariableIndex, VariableAttr}, Any}(), - Dict{Tuple{PointVariableIndex, VariableAttr}, Any}(), - Dict{Tuple{DerivativeIndex, DerivativeAttr}, Any}(), - Dict{Tuple{MeasureIndex, MeasureAttr}, Any}(), - Dict{Tuple{InfOptConstraintIndex, ConstraintAttr}, Any}(), + Dict{FiniteParameterIndex, Dict{FiniteParameterAttr, Any}}(), + Dict{IndependentParameterIndex, Dict{InfiniteParameterAttr, Any}}(), + Dict{DependentParametersIndex, Dict{InfiniteParameterAttr, Any}}(), + Dict{InfiniteVariableIndex, Dict{VariableAttr, Any}}(), + Dict{SemiInfiniteVariableIndex, Dict{VariableAttr, Any}}(), + Dict{PointVariableIndex, Dict{VariableAttr, Any}}(), + Dict{DerivativeIndex, Dict{DerivativeAttr, Any}}(), + Dict{MeasureIndex, Dict{MeasureAttr, Any}}(), + Dict{InfOptConstraintIndex, Dict{ConstraintAttr, Any}}(), Dict{ModelAttr, Any}() ) end diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 881564e85..4ce587aac 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -952,7 +952,7 @@ function JuMP.delete( end # ensure pref is not used by a parameter function if used_by_parameter_function(pref) - error("Cannot delete `$pref` since it is used by an parameter ", + error("Cannot delete `$pref` since it is used by a parameter ", "function(s).") end # update backend status @@ -973,6 +973,8 @@ function JuMP.delete( _delete_data_object(pref) # update the object numbers and parameter numbers _update_model_numbers(model, obj_num, param_nums) + # delete any backend information + delete!(model.transform_backend, JuMP.index(pref)) return end @@ -1001,5 +1003,7 @@ function JuMP.delete(model::InfiniteModel, pref::FiniteParameterRef)::Nothing end # delete parameter information stored in model _delete_data_object(pref) + # delete any backend information + delete!(model.transform_backend, JuMP.index(pref)) return end diff --git a/src/transform.jl b/src/transform.jl index 23ace00d9..72fac727b 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -15,16 +15,31 @@ for (I, A, name) = ((:FiniteParameterIndex, :FiniteParameterAttr, :finite_parame ) @eval begin function Base.getindex(cache::TransformAttrCache, idx::$I, attr::$A) - return cache.$(name)[idx, attr] + return cache.$(name)[idx][attr] end function Base.setindex!(cache::TransformAttrCache, value, idx::$I, attr::$A) - return cache.$(name)[idx, attr] = value + if !haskey(cache.$(name), idx) + cache.$(name)[idx] = Dict{$A, Any}(attr => value) + return value + else + return cache.$(name)[idx][attr] = value + end end - function Base.haskey(cache::TransformAttrCache, key::Tuple{$I, $A}) - return haskey(cache.$(name), key) + function Base.haskey(cache::TransformAttrCache, idx::$I, attr::$A) + return haskey(cache.$(name), idx) && haskey(cache.$(name)[idx], attr) end - function Base.get(cache::TransformAttrCache, key::Tuple{$I, $A}, default) - return Base.get(cache.$(name), key, default) + function Base.get(cache::TransformAttrCache, idx::$I, attr::$A, default) + return Base.get(Base.get(cache.$(name), idx, default), attr, default) + end + function Base.delete!(cache::TransformAttrCache, idx::$I) + delete!(cache.$(name), idx) + return + end + function Base.delete!(cache::TransformAttrCache, idx::$I, attr::$A) + if haskey(cache.$(name), idx) + delete!(cache.$(name)[idx], attr) + end + return end end end @@ -106,7 +121,7 @@ for (I, A) = ((:FiniteParameterIndex, :FiniteParameterAttr), @eval begin if !($I in (DependentParametersIndex, InfOptConstraintIndex)) function get(model::InfiniteModel, idx::$I, attr::$A) - value = Base.get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, idx, attr, nothing) if isnothing(value) error("$(dispatch_variable_ref(model, idx)) does not have transform ", "attribute `$attr`.") @@ -132,7 +147,7 @@ function get( idx::InfOptConstraintIndex, attr::ConstraintAttr ) - value = Base.get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, idx, attr, nothing) if isnothing(value) error("$(InfOptConstraintRef(model, idx)) does not have transform ", "attribute `$attr`.") @@ -144,7 +159,7 @@ function get( idx::DependentParametersIndex, attr::InfiniteParameterAttr ) - value = Base.get(model.transform_attrs, (idx, attr), nothing) + value = Base.get(model.transform_attrs, idx, attr, nothing) if isnothing(value) error("The dependent parameter group with index `$idx` does not ", "have transform attribute `$attr`.") From bf483a8e6898f49651fa23253df3d1e69dcb4775 Mon Sep 17 00:00:00 2001 From: pulsipher Date: Tue, 21 Jun 2022 17:26:47 -0400 Subject: [PATCH 10/10] Minor changes --- src/datatypes.jl | 2 +- src/scalar_parameters.jl | 4 ++-- src/transform.jl | 12 ++++++------ 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/datatypes.jl b/src/datatypes.jl index 9cc1a2471..ea3062bad 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -966,7 +966,7 @@ an `InfiniteModel` that can be used by the transformation backend. struct TransformAttrCache finite_parameters::Dict{FiniteParameterIndex, Dict{FiniteParameterAttr, Any}} independent_parameters::Dict{IndependentParameterIndex, Dict{InfiniteParameterAttr, Any}} - dependent_params::Dict{DependentParametersIndex, Dict{InfiniteParameterAttr, Any}} # TODO maybe allow for parameter-wsie attributes + dependent_params::Dict{DependentParametersIndex, Dict{InfiniteParameterAttr, Any}} # TODO maybe allow for parameter-wise attributes infinite_variables::Dict{InfiniteVariableIndex, Dict{VariableAttr, Any}} semi_ifninite_variables::Dict{SemiInfiniteVariableIndex, Dict{VariableAttr, Any}} point_variables::Dict{PointVariableIndex, Dict{VariableAttr, Any}} diff --git a/src/scalar_parameters.jl b/src/scalar_parameters.jl index 57cc873eb..01bb71e10 100644 --- a/src/scalar_parameters.jl +++ b/src/scalar_parameters.jl @@ -238,7 +238,7 @@ function add_parameter( data_object = ScalarParameterData(p, obj_num, param_num, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing - _update_transform_attributes(model, p) + _update_transform_attributeson_creation(model, p) return GeneralVariableRef(model, obj_index.value, typeof(obj_index)) end @@ -269,7 +269,7 @@ function add_parameter( data_object = ScalarParameterData(p, -1, -1, name) obj_index = _add_data_object(model, data_object) model.name_to_param = nothing - _update_transform_attributes(model, p) + _update_transform_attributes_on_creation(model, p) return GeneralVariableRef(model, obj_index.value, typeof(obj_index)) end diff --git a/src/transform.jl b/src/transform.jl index 72fac727b..1c9c1cbca 100644 --- a/src/transform.jl +++ b/src/transform.jl @@ -192,7 +192,7 @@ end register_transform_keyword(kw::Symbol, attr::AbstractTransformAttr, [type = attribute_value_type(attr)])::Nothing -Register a new [`AbstractTransformAttr`](@ref) to be specified via keyword +Register a new [`AbstractTransformAttr`](@ref) to be specified via a keyword argument to the appropriate creation macro (e.g., `@infinite_parameter` for [`InfiniteParameterAttr`](@ref)s). Using `type` indicates what type of input of input is accepted for `kw`. Note that `type` need not equal @@ -223,7 +223,7 @@ Process a transformation backend attribute `attr` with a raw `value` that will be associated with `object` once it is added to the model. This is intended to handle raw input `value`s collected via keyword arguments when `object` is created (i.e., transformation keywords that have been registered via -[`register_transform_keyword`](@ref)). This provides extra flexibility allow +[`register_transform_keyword`](@ref)). This provides extra flexibility to allow more convenient input for users. By default `value` is simply returned. This should be extended for registered transformation attribute types that wish to process input that doesn't readily match [`attribute_value_type`](@ref). Note @@ -293,13 +293,13 @@ only invoked for attributes that have been registered via [`register_attribute_to_update`](@ref). By default, nothing is updated. Those writing transformation backends should extend this based on particular attribute-object combinations where an update should be made. For instance, we -could obtain the discrete supports with the point from a point variable when it -is created (this is what `TranscriptionOpt` does). +could obtain the discrete support from a point variable when it is created (this +is what `TranscriptionOpt` does). Note this should NOT be used to modify/create an attribute for `obj` itself, it should only be used to modify attributes associated with other modeling objects. Instead use [`register_transform_keyword`](@ref) if you what to create an attribute -for `obj` on creation. +for `obj` on creation. """ function update_attribute_on_creation( model::InfiniteModel, @@ -311,7 +311,7 @@ end # Helper function for incrementally updating transform attributes when # adding modeling objects to the model -function _update_transform_attributes(model, obj) +function _update_transform_attributes_on_creation(model, obj) for attr in _IncrementalAttributes update_attribute_on_creation(model, attr, obj) end