diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ec08fa..9113638 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,9 @@ Major changes in releases: +## Version 0.1.15 +- `Tuning`renamed to `tuning`and small fixes in it, updates for Julia 1.11. + ## Version 0.1.14 - `Results` from `Scheduler` is extended with field `is_local_optimum` to be able to indicate if the solution is a local optimum in respect to the current method and it diff --git a/MHLibDemos/Manifest.toml b/MHLibDemos/Manifest.toml index 696555a..7458a0a 100644 --- a/MHLibDemos/Manifest.toml +++ b/MHLibDemos/Manifest.toml @@ -99,9 +99,9 @@ version = "0.2.2" [[deps.JuliaInterpreter]] deps = ["CodeTracking", "InteractiveUtils", "Random", "UUIDs"] -git-tree-sha1 = "2984284a8abcfcc4784d95a9e2ea4e352dd8ede7" +git-tree-sha1 = "fc8504eca188aaae4345649ca6105806bc584b70" uuid = "aa1ae85d-cabe-5617-a682-6adf51b2e16a" -version = "0.9.36" +version = "0.9.37" [[deps.LibGit2]] deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"] @@ -157,7 +157,7 @@ version = "3.0.5" deps = ["ArgParse", "DataStructures", "Logging", "Printf", "Random", "Reexport", "StatsBase"] path = ".." uuid = "0f3a0e6e-e0e7-4894-bd64-856f0ae3b635" -version = "0.1.13" +version = "0.1.15" [[deps.MacroTools]] deps = ["Markdown", "Random"] @@ -239,9 +239,9 @@ version = "1.3.0" [[deps.Revise]] deps = ["CodeTracking", "Distributed", "FileWatching", "JuliaInterpreter", "LibGit2", "LoweredCodeUtils", "OrderedCollections", "REPL", "Requires", "UUIDs", "Unicode"] -git-tree-sha1 = "7f4228017b83c66bd6aa4fddeb170ce487e53bc7" +git-tree-sha1 = "834aedb1369919a7b2026d7e04c2d49a311d26f4" uuid = "295af30f-e4ad-537b-8983-00126c2a3abe" -version = "3.6.2" +version = "3.6.3" [[deps.SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" diff --git a/Manifest.toml b/Manifest.toml index cb1f710..f5d0407 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.11.1" manifest_format = "2.0" -project_hash = "b8b3564b02bd77ab3d1613a78103290e33d669ad" +project_hash = "8a01778624db31048bc446d5aac140b2a907499f" [[deps.ArgParse]] deps = ["Logging", "TextWrap"] diff --git a/Project.toml b/Project.toml index b334e73..393b26b 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "MHLib" uuid = "0f3a0e6e-e0e7-4894-bd64-856f0ae3b635" authors = ["Guenther Raidl and others"] -version = "0.1.14" +version = "0.1.15" [deps] ArgParse = "c7e460c6-2fb9-53a9-8c5b-16f535851c63" @@ -20,4 +20,4 @@ Reexport = "1.2" Logging = "1" Printf = "1" Random = "1" -julia = "1.10" +julia = "1.11" diff --git a/README.md b/README.md index 5bea1be..89dae64 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,8 @@ installed via ## Major Components -Note that `MHLib.jl` is still behind the capabilities of the Python `pymhlib`, however, much more performant. +Note that `MHLib.jl` is still behind the capabilities of the Python `pymhlib`, +however, much more performant. The main module provides the following types for candidate solutions and various functions for them: @@ -91,7 +92,10 @@ Further modules: ## Demos -For demonstration purposes subdirectory [`MHLibDemos`](MHLibDemos/README.md) provides a package (not officially registered at JuliaHub), with basic implementations for the following classical combinatorial optimization problems, to which some of MHLib's metaheuristics are applied: +For demonstration purposes subdirectory [`MHLibDemos`](MHLibDemos/README.md) provides +a package (not officially registered at JuliaHub), with basic implementations for the +following classical combinatorial optimization problems, to which some of +MHLib's metaheuristics are applied: - `OneMax`: basic test problem in which the goal is to set all digits in a binary string to `true` diff --git a/test/Manifest.toml b/test/Manifest.toml index 405ce53..ef38b5b 100644 --- a/test/Manifest.toml +++ b/test/Manifest.toml @@ -108,9 +108,9 @@ version = "0.2.2" [[deps.JuliaInterpreter]] deps = ["CodeTracking", "InteractiveUtils", "Random", "UUIDs"] -git-tree-sha1 = "2984284a8abcfcc4784d95a9e2ea4e352dd8ede7" +git-tree-sha1 = "fc8504eca188aaae4345649ca6105806bc584b70" uuid = "aa1ae85d-cabe-5617-a682-6adf51b2e16a" -version = "0.9.36" +version = "0.9.37" [[deps.LibCURL]] deps = ["LibCURL_jll", "MozillaCACerts_jll"] @@ -260,9 +260,9 @@ version = "1.3.0" [[deps.Revise]] deps = ["CodeTracking", "Distributed", "FileWatching", "JuliaInterpreter", "LibGit2", "LoweredCodeUtils", "OrderedCollections", "REPL", "Requires", "UUIDs", "Unicode"] -git-tree-sha1 = "7f4228017b83c66bd6aa4fddeb170ce487e53bc7" +git-tree-sha1 = "834aedb1369919a7b2026d7e04c2d49a311d26f4" uuid = "295af30f-e4ad-537b-8983-00126c2a3abe" -version = "3.6.2" +version = "3.6.3" [[deps.SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" diff --git a/Tuning/Manifest.toml b/tuning/Manifest.toml similarity index 100% rename from Tuning/Manifest.toml rename to tuning/Manifest.toml diff --git a/Tuning/Project.toml b/tuning/Project.toml similarity index 100% rename from Tuning/Project.toml rename to tuning/Project.toml diff --git a/Tuning/README.md b/tuning/README.md similarity index 88% rename from Tuning/README.md rename to tuning/README.md index 5e1f1c8..e0d3d51 100644 --- a/Tuning/README.md +++ b/tuning/README.md @@ -18,7 +18,7 @@ command line argument: 2) Here, for each call of the function to tune, a new Julia process is started. While this approach allows for parallelization, also using a compute cluster, it can be quite inefficient when the function to tune is comparably fast w.r.t. Julia's startup and - pre-compile time. + pre-compile times. 3) Here, the main Python script spawns a fixed number of Julia "server" subprocesses realized by `julia_server.jl`, which accept function calls, perform them, @@ -26,8 +26,9 @@ command line argument: In this way multiprocessing is utilized, but the overhead of starting a new Julia process for each function call is avoided. -Note that the demo needs to be started from the `Tuning` subdirectory. +Note that the demo needs to be started from the `tuning` subdirectory. -Ensure that Python is installed with the packages `juliacall`, `smac` and `ConfigSpace`. +Ensure that Python is installed with current versions of packages `juliacall`, `smac`, +and `ConfigSpace`. diff --git a/Tuning/call-julia-function-f.jl b/tuning/call-julia-function-f.jl similarity index 91% rename from Tuning/call-julia-function-f.jl rename to tuning/call-julia-function-f.jl index 7ca66c3..e6be465 100755 --- a/Tuning/call-julia-function-f.jl +++ b/tuning/call-julia-function-f.jl @@ -16,7 +16,7 @@ include("julia-function-to-tune.jl") # println(f, d) # end -c = f(d["--instance"], parse(Int, d["--seed"]), +c = f(d["--instance"], parse(Int, d["--seed"]), parse(Float64, d["--x"]), parse(Int, d["--y"]), d["--z"], diff --git a/Tuning/julia-function-to-tune.jl b/tuning/julia-function-to-tune.jl similarity index 94% rename from Tuning/julia-function-to-tune.jl rename to tuning/julia-function-to-tune.jl index d28b147..1a0c085 100644 --- a/Tuning/julia-function-to-tune.jl +++ b/tuning/julia-function-to-tune.jl @@ -5,7 +5,7 @@ Demo function to tune with SMAC3 in different ways. """ -function f(instance::AbstractString, seed::Int, x::Float64, y::Int, z::String)::Float64 +function f(instance::AbstractString, seed::Int, x::Float64, y::Int, z::AbstractString)::Float64 # just some busy waiting: xx=3 for i in 1:10000000 diff --git a/Tuning/julia_server.py b/tuning/julia_server.py similarity index 100% rename from Tuning/julia_server.py rename to tuning/julia_server.py diff --git a/Tuning/tuning.py b/tuning/tuning.py similarity index 80% rename from Tuning/tuning.py rename to tuning/tuning.py index 5753109..2917c26 100755 --- a/Tuning/tuning.py +++ b/tuning/tuning.py @@ -1,15 +1,20 @@ #!/usr/bin/env python3 # Demo for using SMAC3 to optimize the configuration of a Julia algorithm. -# Ensure that Python is installed with the packages `pyjulia`, `smac` and `ConfigSpace` +# Ensure that Python is installed with the packages `juliacall`, `smac` and `ConfigSpace`. +# You may provide 1, 2, or 3 as command line argument to select the variant to use. import ConfigSpace from ConfigSpace import Configuration, ConfigurationSpace -from smac import AlgorithmConfigurationFacade, Scenario +from smac import AlgorithmConfigurationFacade, HyperparameterOptimizationFacade, Scenario +from smac.intensifier.intensifier import Intensifier import os import sys +# print(os.getcwd()); os.chdir("tuning") #!!! + + # simple way to specify a configuration space config_space = ConfigurationSpace({ "x": (0.1, 4.0), @@ -19,7 +24,7 @@ # alternative long form, advanced configuration aspects possible: config_space2 = ConfigurationSpace() -config_space2.add_hyperparameters([ +config_space2.add([ ConfigSpace.UniformFloatHyperparameter("x", 0.1, 4.0), ConfigSpace.UniformIntegerHyperparameter("y", 1, 3), ConfigSpace.CategoricalHyperparameter("z", ["opt1", "opt2"]), @@ -29,11 +34,11 @@ instance_dir = "../test/data" # names of problem instances to be used for tuning instances = [fn for fn in os.listdir(instance_dir) if fn.startswith("maxsat")] +instances = instances[:1] # limit to first two instances for testing -# features of the problem instances +# a mapping of the problem instances to their features, # in the simplest case just the index, or otherwise some more relevant features features = {fn: [i] for (i, fn) in enumerate(instances)} -# names of problem instances to be used for tuning # Scenario object specifying the optimization environment scenario = Scenario(config_space2, deterministic=False, @@ -54,13 +59,15 @@ # exemplary wrapper for Julia function to tune def f(config: Configuration, instance: str, seed: int) -> float: - print(f'f({instance}, {seed}, {config["x"]}, {config["y"]}, {config["z"]})', - end=" -> ") - res = jl.f(instance, seed, config["x"], config["y"], config["z"]) + x = float(config["x"]); y = int(config["y"]); z = str(config["z"]) + print(f'f({instance}, {seed}, {x}, {y}, {y})', end=" -> ") + res = jl.f(instance, seed, x, y, z) print(res) return res smac = AlgorithmConfigurationFacade(scenario, f, overwrite=True) + # smac = HyperparameterOptimizationFacade(scenario, f, overwrite=True) + elif variant_to_use == "2":