Build:
- 0
2026-02-27 04:04.36: New job: test owl-opt.0.0.1 with ocaml-secondary-compiler.4.08.1, using opam dev
from https://github.com/ocaml/opam-repository.git#refs/pull/29451/head (09d7830dd4a7cd4cfc1725bd69ec5b222eae677d)
on debian-13-ocaml-5.4/amd64
To reproduce locally:
cd $(mktemp -d)
git clone --recursive "https://github.com/ocaml/opam-repository.git" && cd "opam-repository" && git fetch origin "refs/pull/29451/head" && git reset --hard 09d7830d
git fetch origin master
git merge --no-edit 5abb4f44e937819c2e438ab71bc23607a7cad3da
cat > ../Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-13-ocaml-5.4@sha256:4add1601135529e9f2e403a25c1c640231c0e871e87f88cf8feab4be5095104c
USER 1000:1000
WORKDIR /home/opam
RUN sudo ln -f /usr/bin/opam-dev /usr/bin/opam
RUN opam init --reinit -ni
RUN opam option solver=builtin-0install && opam config report
ENV OPAMDOWNLOADJOBS="1"
ENV OPAMERRLOGLEN="0"
ENV OPAMPRECISETRACKING="1"
ENV CI="true"
ENV OPAM_REPO_CI="true"
RUN rm -rf opam-repository/
COPY --chown=1000:1000 . opam-repository/
RUN opam repository set-url --strict default opam-repository/
RUN opam update --depexts || true
RUN opam pin add -k version -yn ocaml-secondary-compiler.4.08.1 4.08.1
RUN opam reinstall ocaml-secondary-compiler.4.08.1; \
res=$?; \
test "$res" != 31 && exit "$res"; \
export OPAMCLI=2.0; \
build_dir=$(opam var prefix)/.opam-switch/build; \
failed=$(ls "$build_dir"); \
partial_fails=""; \
for pkg in $failed; do \
if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-13\""; then \
echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \
fi; \
test "$pkg" != 'ocaml-secondary-compiler.4.08.1' && partial_fails="$partial_fails $pkg"; \
done; \
test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \
exit 1
RUN opam reinstall owl-opt.0.0.1; \
res=$?; \
test "$res" != 31 && exit "$res"; \
export OPAMCLI=2.0; \
build_dir=$(opam var prefix)/.opam-switch/build; \
failed=$(ls "$build_dir"); \
partial_fails=""; \
for pkg in $failed; do \
if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-13\""; then \
echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \
fi; \
test "$pkg" != 'owl-opt.0.0.1' && partial_fails="$partial_fails $pkg"; \
done; \
test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \
exit 1
RUN (opam reinstall --with-test owl-opt.0.0.1) || true
RUN opam reinstall --with-test --verbose owl-opt.0.0.1; \
res=$?; \
test "$res" != 31 && exit "$res"; \
export OPAMCLI=2.0; \
build_dir=$(opam var prefix)/.opam-switch/build; \
failed=$(ls "$build_dir"); \
partial_fails=""; \
for pkg in $failed; do \
if opam show -f x-ci-accept-failures: "$pkg" | grep -qF "\"debian-13\""; then \
echo "A package failed and has been disabled for CI using the 'x-ci-accept-failures' field."; \
fi; \
test "$pkg" != 'owl-opt.0.0.1' && partial_fails="$partial_fails $pkg"; \
done; \
test "${partial_fails}" != "" && echo "opam-repo-ci detected dependencies failing: ${partial_fails}"; \
exit 1
END-OF-DOCKERFILE
docker build -f ../Dockerfile .
2026-02-27 04:04.36: Using cache hint "ocaml/opam:debian-13-ocaml-5.4@sha256:4add1601135529e9f2e403a25c1c640231c0e871e87f88cf8feab4be5095104c-ocaml-secondary-compiler.4.08.1-owl-opt.0.0.1-09d7830dd4a7cd4cfc1725bd69ec5b222eae677d"
2026-02-27 04:04.36: Using OBuilder spec:
((from ocaml/opam:debian-13-ocaml-5.4@sha256:4add1601135529e9f2e403a25c1c640231c0e871e87f88cf8feab4be5095104c)
(user (uid 1000) (gid 1000))
(workdir /home/opam)
(run (shell "sudo ln -f /usr/bin/opam-dev /usr/bin/opam"))
(run (network host)
(shell "opam init --reinit --config .opamrc-sandbox -ni"))
(run (shell "opam option solver=builtin-0install && opam config report"))
(env OPAMDOWNLOADJOBS 1)
(env OPAMERRLOGLEN 0)
(env OPAMPRECISETRACKING 1)
(env CI true)
(env OPAM_REPO_CI true)
(run (shell "rm -rf opam-repository/"))
(copy (src .) (dst opam-repository/))
(run (shell "opam repository set-url --strict default opam-repository/"))
(run (network host)
(shell "opam update --depexts || true"))
(run (shell "opam pin add -k version -yn ocaml-secondary-compiler.4.08.1 4.08.1"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam reinstall ocaml-secondary-compiler.4.08.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'ocaml-secondary-compiler.4.08.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam reinstall owl-opt.0.0.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
(run (network host)
(shell "(opam reinstall --with-test owl-opt.0.0.1) || true"))
(run (shell "opam reinstall --with-test --verbose owl-opt.0.0.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
)
2026-02-27 04:04.36: Waiting for resource in pool OCluster
2026-03-02 07:37.59: Waiting for worker…
2026-03-02 07:40.41: Got resource from pool OCluster
Building on laodoke.caelum.ci.dev
All commits already cached
Updating files: 59% (10907/18334)
Updating files: 60% (11001/18334)
Updating files: 61% (11184/18334)
Updating files: 62% (11368/18334)
Updating files: 63% (11551/18334)
Updating files: 64% (11734/18334)
Updating files: 65% (11918/18334)
Updating files: 66% (12101/18334)
Updating files: 67% (12284/18334)
Updating files: 68% (12468/18334)
Updating files: 69% (12651/18334)
Updating files: 70% (12834/18334)
Updating files: 71% (13018/18334)
Updating files: 72% (13201/18334)
Updating files: 73% (13384/18334)
Updating files: 74% (13568/18334)
Updating files: 75% (13751/18334)
Updating files: 76% (13934/18334)
Updating files: 77% (14118/18334)
Updating files: 78% (14301/18334)
Updating files: 79% (14484/18334)
Updating files: 80% (14668/18334)
Updating files: 81% (14851/18334)
Updating files: 82% (15034/18334)
Updating files: 83% (15218/18334)
Updating files: 84% (15401/18334)
Updating files: 85% (15584/18334)
Updating files: 86% (15768/18334)
Updating files: 87% (15951/18334)
Updating files: 88% (16134/18334)
Updating files: 89% (16318/18334)
Updating files: 90% (16501/18334)
Updating files: 91% (16684/18334)
Updating files: 92% (16868/18334)
Updating files: 93% (17051/18334)
Updating files: 94% (17234/18334)
Updating files: 95% (17418/18334)
Updating files: 96% (17601/18334)
Updating files: 97% (17784/18334)
Updating files: 98% (17968/18334)
Updating files: 99% (18151/18334)
Updating files: 100% (18334/18334)
Updating files: 100% (18334/18334), done.
HEAD is now at 5abb4f44e9 Merge pull request #29466 from mseri/release-doi2bib-0.9.1
Merge made by the 'ort' strategy.
.../ocaml-base-compiler.3.07+1/opam | 1 +
.../ocaml-base-compiler.3.07+2/opam | 1 +
.../ocaml-base-compiler/ocaml-base-compiler.3.07/opam | 1 +
.../ocaml-base-compiler.3.08.0/opam | 1 +
.../ocaml-base-compiler.3.08.1/opam | 1 +
.../ocaml-base-compiler.3.08.2/opam | 1 +
.../ocaml-base-compiler.3.08.3/opam | 1 +
.../ocaml-base-compiler.3.08.4/opam | 1 +
.../ocaml-base-compiler.3.09.0/opam | 1 +
.../ocaml-base-compiler.3.09.1/opam | 1 +
.../ocaml-base-compiler.3.09.2/opam | 1 +
.../ocaml-base-compiler.3.09.3/opam | 1 +
.../ocaml-base-compiler.3.10.0/opam | 1 +
.../ocaml-base-compiler.3.10.1/opam | 1 +
.../ocaml-base-compiler.3.10.2/opam | 1 +
.../ocaml-base-compiler.3.11.0/opam | 1 +
.../ocaml-base-compiler.3.11.1/opam | 1 +
.../ocaml-base-compiler.3.11.2/opam | 1 +
.../ocaml-base-compiler.3.12.0/opam | 1 +
.../ocaml-base-compiler.3.12.1/opam | 1 +
.../ocaml-base-compiler.4.00.0/opam | 1 +
.../ocaml-base-compiler.4.00.1/opam | 1 +
.../ocaml-base-compiler.4.01.0/opam | 1 +
.../ocaml-base-compiler.4.02.0/opam | 1 +
.../ocaml-base-compiler.4.02.1/opam | 1 +
.../ocaml-base-compiler.4.02.2/opam | 1 +
.../ocaml-base-compiler.4.02.3/opam | 1 +
.../ocaml-base-compiler.4.03.0/opam | 1 +
.../ocaml-base-compiler.4.04.0/opam | 1 +
.../ocaml-base-compiler.4.04.1/opam | 1 +
.../ocaml-base-compiler.4.04.2/opam | 1 +
.../ocaml-base-compiler.4.05.0/opam | 1 +
.../ocaml-base-compiler.4.06.0/opam | 1 +
.../ocaml-base-compiler.4.06.1/opam | 1 +
.../ocaml-base-compiler.4.07.0/opam | 1 +
.../ocaml-base-compiler.4.07.1/opam | 1 +
.../ocaml-base-compiler.4.08.0/opam | 1 +
.../ocaml-base-compiler.4.08.1/opam | 1 +
.../ocaml-base-compiler.4.09.0/opam | 1 +
.../ocaml-base-compiler.4.09.1/opam | 1 +
.../ocaml-base-compiler.4.10.0/opam | 1 +
.../ocaml-base-compiler.4.10.1/opam | 1 +
.../ocaml-base-compiler.4.10.2/opam | 1 +
.../ocaml-base-compiler.4.11.0/opam | 1 +
.../ocaml-base-compiler.4.11.1/opam | 1 +
.../ocaml-base-compiler.4.11.2/opam | 1 +
.../ocaml-base-compiler.4.12.0/opam | 1 +
.../ocaml-base-compiler.4.12.1/opam | 1 +
.../ocaml-base-compiler.4.13.0/opam | 1 +
.../ocaml-base-compiler.4.13.1/opam | 1 +
.../ocaml-base-compiler.4.14.0/opam | 1 +
.../ocaml-base-compiler.4.14.1/opam | 1 +
.../ocaml-base-compiler.4.14.2/opam | 1 +
.../ocaml-base-compiler.4.14.2~rc1/opam | 1 +
.../ocaml-base-compiler.4.14.3/opam | 1 +
.../ocaml-base-compiler/ocaml-base-compiler.5.0.0/opam | 5 ++++-
.../ocaml-base-compiler/ocaml-base-compiler.5.1.0/opam | 5 ++++-
.../ocaml-base-compiler/ocaml-base-compiler.5.1.1/opam | 5 ++++-
.../ocaml-base-compiler/ocaml-base-compiler.5.2.0/opam | 5 ++++-
.../ocaml-base-compiler/ocaml-base-compiler.5.2.1/opam | 5 ++++-
packages/ocaml-compiler/ocaml-compiler.5.3.0/opam | 1 +
packages/ocaml-compiler/ocaml-compiler.5.3/opam | 1 +
packages/ocaml-compiler/ocaml-compiler.5.4.0/opam | 1 +
.../ocaml-compiler/ocaml-compiler.5.4.0~alpha1/opam | 1 +
.../ocaml-compiler/ocaml-compiler.5.4.0~beta1/opam | 1 +
.../ocaml-compiler/ocaml-compiler.5.4.0~beta2/opam | 1 +
packages/ocaml-compiler/ocaml-compiler.5.4.0~rc1/opam | 1 +
packages/ocaml-compiler/ocaml-compiler.5.4.1/opam | 1 +
packages/ocaml-compiler/ocaml-compiler.5.4/opam | 1 +
.../ocaml-secondary-compiler.4.08.1-1/opam | 1 +
.../ocaml-secondary-compiler.4.08.1/opam | 1 +
.../ocaml-secondary-compiler.4.14.2/opam | 1 +
.../ocaml-variants.3.09.1+metaocaml/opam | 1 +
.../ocaml-variants.4.00.0+debug-runtime/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.00.1+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.00.1+PIC/opam | 1 +
.../ocaml-variants.4.00.1+debug-runtime/opam | 1 +
.../ocaml-variants.4.00.1+open-types/opam | 1 +
.../ocaml-variants.4.00.1+raspberrypi/opam | 1 +
.../ocaml-variants.4.00.1+short-types/opam | 1 +
.../ocaml-variants/ocaml-variants.4.01.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.01.0+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.01.0+PIC/opam | 1 +
.../ocaml-variants.4.01.0+armv6-freebsd/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.01.0+fp/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.01.0+lsb/opam | 1 +
.../ocaml-variants.4.01.0+musl+static/opam | 1 +
.../ocaml-variants/ocaml-variants.4.01.0+musl/opam | 1 +
.../ocaml-variants.4.01.0+open-types/opam | 1 +
.../ocaml-variants/ocaml-variants.4.01.0+profile/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.0+PIC/opam | 1 +
.../ocaml-variants.4.02.0+improved-errors/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.1+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.1+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.1+PIC/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.1+fp/opam | 1 +
.../ocaml-variants.4.02.1+modular-implicits-ber/opam | 1 +
.../ocaml-variants.4.02.1+modular-implicits/opam | 1 +
.../ocaml-variants.4.02.1+musl+static/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.1+musl/opam | 1 +
.../ocaml-variants.4.02.2+improved-errors/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.3+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.3+PIC/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.3+buckle-1/opam | 1 +
.../ocaml-variants.4.02.3+buckle-master/opam | 1 +
.../ocaml-variants.4.02.3+bytecode-only/opam | 1 +
.../ocaml-variants.4.02.3+curried-constr/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.02.3+fp/opam | 1 +
.../ocaml-variants.4.02.3+musl+static/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.3+musl/opam | 1 +
.../ocaml-variants/ocaml-variants.4.02.4+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.03.0+32bit/opam | 1 +
.../ocaml-variants/ocaml-variants.4.03.0+fPIC/opam | 1 +
.../ocaml-variants/ocaml-variants.4.03.0+flambda/opam | 1 +
.../ocaml-variants.4.03.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.03.0+fp/opam | 1 +
.../ocaml-variants.4.03.0+statistical-memprof/opam | 1 +
.../ocaml-variants/ocaml-variants.4.03.1+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.04.0+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.04.0+afl/opam | 1 +
.../ocaml-variants.4.04.0+bytecode-only/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.0+fPIC/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.0+flambda/opam | 1 +
.../ocaml-variants.4.04.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.04.0+fp/opam | 1 +
.../ocaml-variants.4.04.0+safe-string/opam | 1 +
.../ocaml-variants.4.04.0+spacetime/opam | 1 +
.../ocaml-variants.4.04.0+trunk+forced_lto/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.1+32bit/opam | 1 +
.../ocaml-variants.4.04.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.04.1+copatterns/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.1+fPIC/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.1+flambda/opam | 1 +
.../ocaml-variants.4.04.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.04.1+fp/opam | 1 +
.../ocaml-variants.4.04.1+safe-string/opam | 1 +
.../ocaml-variants.4.04.1+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.2+32bit/opam | 1 +
.../ocaml-variants.4.04.2+bytecode-only/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.2+fPIC/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.2+flambda/opam | 1 +
.../ocaml-variants.4.04.2+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.04.2+fp/opam | 1 +
.../ocaml-variants.4.04.2+safe-string/opam | 1 +
.../ocaml-variants.4.04.2+spacetime/opam | 1 +
.../ocaml-variants.4.04.2+statistical-memprof/opam | 1 +
.../ocaml-variants/ocaml-variants.4.04.3+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.05.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.05.0+afl/opam | 1 +
.../ocaml-variants.4.05.0+bytecode-only/opam | 1 +
.../ocaml-variants/ocaml-variants.4.05.0+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.05.0+lto/opam | 1 +
.../ocaml-variants.4.05.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.05.0+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.05.0+safe-string/opam | 1 +
.../ocaml-variants.4.05.0+spacetime/opam | 1 +
.../ocaml-variants.4.05.0+statistical-memprof/opam | 1 +
.../ocaml-variants.4.05.1+trunk+afl/opam | 1 +
.../ocaml-variants.4.05.1+trunk+flambda/opam | 1 +
.../ocaml-variants.4.05.1+trunk+fp+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.05.1+trunk+fp/opam | 1 +
.../ocaml-variants.4.05.1+trunk+safe-string/opam | 1 +
.../ocaml-variants/ocaml-variants.4.05.1+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.06.0+afl/opam | 1 +
.../ocaml-variants.4.06.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.06.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.0+flambda/opam | 1 +
.../ocaml-variants.4.06.0+force-safe-string/opam | 1 +
.../ocaml-variants.4.06.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.06.0+fp/opam | 1 +
.../ocaml-variants.4.06.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.06.0+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.06.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.06.0+spacetime/opam | 1 +
.../ocaml-variants.4.06.0+statistical-memprof/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.1+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.06.1+afl/opam | 1 +
.../ocaml-variants.4.06.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.06.1+default-unsafe-string/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.1+flambda/opam | 1 +
.../ocaml-variants.4.06.1+force-safe-string/opam | 1 +
.../ocaml-variants.4.06.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.06.1+fp/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.06.1+lto/opam | 1 +
.../ocaml-variants.4.06.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.06.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.06.1+no-flat-float-array/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.1+rescript/opam | 1 +
.../ocaml-variants.4.06.1+statistical-memprof/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.1+termux/opam | 1 +
.../ocaml-variants.4.06.2+trunk+afl/opam | 1 +
.../ocaml-variants.4.06.2+trunk+flambda/opam | 1 +
.../ocaml-variants.4.06.2+trunk+force-safe-string/opam | 1 +
.../ocaml-variants.4.06.2+trunk+fp+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.2+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.06.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.07.0+afl/opam | 1 +
.../ocaml-variants.4.07.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.07.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.0+flambda/opam | 1 +
.../ocaml-variants.4.07.0+force-safe-string/opam | 1 +
.../ocaml-variants.4.07.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.07.0+fp/opam | 1 +
.../ocaml-variants.4.07.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.07.0+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.1+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.07.1+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.07.1+afl/opam | 1 +
.../ocaml-variants.4.07.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.07.1+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.1+flambda/opam | 1 +
.../ocaml-variants.4.07.1+force-safe-string/opam | 1 +
.../ocaml-variants.4.07.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.07.1+fp/opam | 1 +
.../ocaml-variants.4.07.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.07.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.07.1+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.07.1+spacetime/opam | 1 +
.../ocaml-variants.4.07.1+statistical-memprof/opam | 1 +
.../ocaml-variants.4.07.2+trunk+afl/opam | 1 +
.../opam | 1 +
.../ocaml-variants.4.07.2+trunk+flambda/opam | 1 +
.../ocaml-variants.4.07.2+trunk+fp+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.2+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.07.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.08.0+afl/opam | 1 +
.../ocaml-variants.4.08.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.08.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.0+flambda/opam | 1 +
.../ocaml-variants.4.08.0+force-safe-string/opam | 1 +
.../ocaml-variants.4.08.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.08.0+fp/opam | 1 +
.../ocaml-variants.4.08.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.08.0+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.08.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.08.0+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.1+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.08.1+afl/opam | 1 +
.../ocaml-variants.4.08.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.08.1+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.1+flambda/opam | 1 +
.../ocaml-variants.4.08.1+force-safe-string/opam | 1 +
.../ocaml-variants.4.08.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.08.1+fp/opam | 1 +
.../ocaml-variants.4.08.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.08.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.08.1+spacetime/opam | 1 +
.../ocaml-variants.4.08.2+trunk+afl/opam | 1 +
.../opam | 1 +
.../ocaml-variants.4.08.2+trunk+flambda/opam | 1 +
.../ocaml-variants.4.08.2+trunk+force-safe-string/opam | 1 +
.../ocaml-variants.4.08.2+trunk+fp+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.2+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.08.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.09.0+afl/opam | 1 +
.../ocaml-variants.4.09.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.09.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.0+flambda/opam | 1 +
.../ocaml-variants.4.09.0+force-safe-string/opam | 1 +
.../ocaml-variants.4.09.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.09.0+fp/opam | 1 +
.../ocaml-variants.4.09.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.09.0+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.09.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.09.0+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.1+32bit/opam | 1 +
.../ocaml-variants.4.09.1+afl+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.09.1+afl/opam | 1 +
.../ocaml-variants.4.09.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.09.1+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.1+flambda/opam | 1 +
.../ocaml-variants.4.09.1+force-safe-string/opam | 1 +
.../ocaml-variants.4.09.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.09.1+fp/opam | 1 +
.../ocaml-variants.4.09.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.09.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.09.1+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.09.1+spacetime/opam | 1 +
.../ocaml-variants.4.09.2+trunk+afl/opam | 1 +
.../opam | 1 +
.../ocaml-variants.4.09.2+trunk+flambda/opam | 1 +
.../ocaml-variants.4.09.2+trunk+fp+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.2+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.09.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.0+afl/opam | 1 +
.../ocaml-variants.4.10.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.10.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.0+flambda/opam | 1 +
.../ocaml-variants.4.10.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.0+fp/opam | 1 +
.../ocaml-variants.4.10.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.10.0+musl+static+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.0+nnpcheck/opam | 1 +
.../ocaml-variants.4.10.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.10.0+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.1+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.1+afl/opam | 1 +
.../ocaml-variants.4.10.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.10.1+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.1+flambda/opam | 1 +
.../ocaml-variants.4.10.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.1+fp/opam | 1 +
.../ocaml-variants.4.10.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.10.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.10.1+no-flat-float-array/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.1+rc1+afl/opam | 1 +
.../ocaml-variants.4.10.1+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.2+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.2+afl/opam | 1 +
.../ocaml-variants.4.10.2+bytecode-only/opam | 1 +
.../ocaml-variants.4.10.2+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.2+flambda/opam | 1 +
.../ocaml-variants.4.10.2+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.10.2+fp/opam | 1 +
.../ocaml-variants.4.10.2+musl+flambda/opam | 1 +
.../ocaml-variants.4.10.2+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.10.2+no-flat-float-array/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.2+rescript/opam | 1 +
.../ocaml-variants.4.10.2+spacetime/opam | 1 +
.../ocaml-variants.4.10.3+trunk+afl/opam | 1 +
.../ocaml-variants.4.10.3+trunk+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.3+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.10.3+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.0+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.0+afl/opam | 1 +
.../ocaml-variants.4.11.0+bytecode-only/opam | 1 +
.../ocaml-variants.4.11.0+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.0+flambda/opam | 1 +
.../ocaml-variants.4.11.0+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.0+fp/opam | 1 +
.../ocaml-variants.4.11.0+musl+flambda/opam | 1 +
.../ocaml-variants.4.11.0+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.11.0+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.11.0+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.1+32bit/opam | 1 +
.../ocaml-variants.4.11.1+BER+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.1+BER/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.1+afl/opam | 1 +
.../ocaml-variants.4.11.1+bytecode-only/opam | 1 +
.../ocaml-variants.4.11.1+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.1+flambda/opam | 1 +
.../ocaml-variants.4.11.1+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.1+fp/opam | 1 +
.../ocaml-variants.4.11.1+musl+flambda/opam | 1 +
.../ocaml-variants.4.11.1+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.11.1+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.11.1+spacetime/opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.2+32bit/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.2+afl/opam | 1 +
.../ocaml-variants.4.11.2+bytecode-only/opam | 1 +
.../ocaml-variants.4.11.2+default-unsafe-string/opam | 1 +
.../opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.2+flambda/opam | 1 +
.../ocaml-variants.4.11.2+fp+flambda/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.11.2+fp/opam | 1 +
.../ocaml-variants.4.11.2+musl+flambda/opam | 1 +
.../ocaml-variants.4.11.2+musl+static+flambda/opam | 1 +
.../ocaml-variants.4.11.2+no-flat-float-array/opam | 1 +
.../ocaml-variants.4.11.2+spacetime/opam | 1 +
.../ocaml-variants.4.11.3+trunk+afl/opam | 1 +
.../ocaml-variants.4.11.3+trunk+flambda/opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.3+trunk+fp/opam | 1 +
.../ocaml-variants/ocaml-variants.4.11.3+trunk/opam | 1 +
.../ocaml-variants.4.12.0+domains+effects/opam | 1 +
.../ocaml-variants/ocaml-variants.4.12.0+domains/opam | 1 +
.../ocaml-variants/ocaml-variants.4.12.0+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.12.1+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.12.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.13.0+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.13.1+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.13.2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.4.14.0+options/opam | 1 +
packages/ocaml-variants/ocaml-variants.4.14.1+BER/opam | 1 +
.../ocaml-variants/ocaml-variants.4.14.1+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.14.2+options/opam | 1 +
.../ocaml-variants.4.14.2~rc1+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.14.3+options/opam | 1 +
.../ocaml-variants/ocaml-variants.4.14.4+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.5.0.0+options/opam | 1 +
packages/ocaml-variants/ocaml-variants.5.0.0+tsan/opam | 1 +
.../ocaml-variants/ocaml-variants.5.0.1+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.5.1.0+options/opam | 1 +
packages/ocaml-variants/ocaml-variants.5.1.0+tsan/opam | 1 +
.../ocaml-variants.5.1.1+effect-syntax/opam | 1 +
.../ocaml-variants.5.1.1+flambda2+trunk/opam | 1 +
.../ocaml-variants/ocaml-variants.5.1.1+flambda2/opam | 1 +
.../ocaml-variants/ocaml-variants.5.1.1+options/opam | 1 +
packages/ocaml-variants/ocaml-variants.5.1.1+tsan/opam | 1 +
.../ocaml-variants/ocaml-variants.5.1.2+trunk/opam | 1 +
packages/ocaml-variants/ocaml-variants.5.2.0+msvc/opam | 1 +
.../ocaml-variants/ocaml-variants.5.2.0+options/opam | 1 +
.../ocaml-variants.5.2.0+statmemprof/opam | 1 +
.../ocaml-variants/ocaml-variants.5.2.1+options/opam | 1 +
.../ocaml-variants.5.2.1~rc1+options/opam | 1 +
.../ocaml-variants/ocaml-variants.5.2.2+trunk/opam | 1 +
packages/ocaml-variants/ocaml-variants.5.3.0+BER/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.0/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.1/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.2+win/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.2/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.3+win/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.14.3/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.15.0/opam | 1 +
packages/ocamlbuild/ocamlbuild.0.16.1/opam | 1 +
packages/ocamlfind/ocamlfind.1.8.0/opam | 1 +
packages/ocamlfind/ocamlfind.1.8.1/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.1/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.2/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.3/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.5/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.6/opam | 1 +
packages/ocamlfind/ocamlfind.1.9.8/opam | 1 +
packages/relocatable/relocatable.packages/opam | 18 ++++++++++++++++++
432 files changed, 464 insertions(+), 5 deletions(-)
create mode 100644 packages/relocatable/relocatable.packages/opam
(from ocaml/opam:debian-13-ocaml-5.4@sha256:4add1601135529e9f2e403a25c1c640231c0e871e87f88cf8feab4be5095104c)
2026-03-02 07:41.02 ---> using "3c18c9e472a4f76bc128dc0a5a1e21158ba3dbd0d6773ace6ec33f0cfe6fac9b" from cache
/: (user (uid 1000) (gid 1000))
/: (workdir /home/opam)
/home/opam: (run (shell "sudo ln -f /usr/bin/opam-dev /usr/bin/opam"))
2026-03-02 07:41.02 ---> using "7a3442ad99cd957e3ce65df65cad40aee4354c6d524c7813b8b589bc410c187e" from cache
/home/opam: (run (network host)
(shell "opam init --reinit --config .opamrc-sandbox -ni"))
Configuring from /home/opam/.opamrc-sandbox, then /home/opam/.opamrc, and finally from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.
Continue? [Y/n] y
This version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.
Format upgrade done.
<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2026-03-02 07:41.02 ---> using "4214446d06a5b764dff5167574d07a5b4441731262795df63d7a3c5f64e1a189" from cache
/home/opam: (run (shell "opam option solver=builtin-0install && opam config report"))
Set to 'builtin-0install' the field solver in global configuration
# opam config report
# opam-version 2.5.0
# self-upgrade no
# system arch=x86_64 os=linux os-distribution=debian os-version=13
# solver builtin-0install
# install-criteria -changed,-count[avoid-version,solution]
# upgrade-criteria -count[avoid-version,solution]
# jobs 71
# repositories 1 (version-controlled)
# pinned 1 (version)
# current-switch 5.4
# invariant ["ocaml-base-compiler" {>= "5.4.0"}]
# compiler-packages ocaml-base-compiler.5.4.0, ocaml-compiler.5.4.0, ocaml-options-vanilla.1
# ocaml:native true
# ocaml:native-tools true
# ocaml:native-dynlink true
# ocaml:stubsdir /home/opam/.opam/5.4/lib/ocaml/stublibs:/home/opam/.opam/5.4/lib/ocaml
# ocaml:preinstalled false
# ocaml:compiler 5.4.0
2026-03-02 07:41.02 ---> using "8bf276ecf196c09ac4fb294f887dce47d9744c212aabbbad5d313f0a4179272a" from cache
/home/opam: (env OPAMDOWNLOADJOBS 1)
/home/opam: (env OPAMERRLOGLEN 0)
/home/opam: (env OPAMPRECISETRACKING 1)
/home/opam: (env CI true)
/home/opam: (env OPAM_REPO_CI true)
/home/opam: (run (shell "rm -rf opam-repository/"))
2026-03-02 07:41.02 ---> using "c04e3f046c8eaf07d3d8f4b30debe47a6f7742bcdbfbfeb4d464149613cd35cd" from cache
/home/opam: (copy (src .) (dst opam-repository/))
2026-03-02 07:41.03 ---> using "f28d2d55603b70a3956974bde95e68526d52d43abb5c3f086de712d28b994566" from cache
/home/opam: (run (shell "opam repository set-url --strict default opam-repository/"))
[default] Initialised
2026-03-02 07:41.03 ---> using "3775e9b8ca53e7a42e175e2f214e7d345a02bf22f9dd6d271ae720654f277192" from cache
/home/opam: (run (network host)
(shell "opam update --depexts || true"))
+ /usr/bin/sudo "apt-get" "update"
- Hit:1 http://deb.debian.org/debian trixie InRelease
- Get:2 http://deb.debian.org/debian trixie-updates InRelease [47.3 kB]
- Get:3 http://deb.debian.org/debian-security trixie-security InRelease [43.4 kB]
- Get:4 http://deb.debian.org/debian-security trixie-security/main amd64 Packages [113 kB]
- Fetched 203 kB in 0s (1230 kB/s)
- Reading package lists...
-
2026-03-02 07:41.04 ---> using "a6fcf1eb697bb2cbcd49a3ac591b228ed4dfb5d9211b39971561804e137638fa" from cache
/home/opam: (run (shell "opam pin add -k version -yn ocaml-secondary-compiler.4.08.1 4.08.1"))
ocaml-secondary-compiler is now pinned to version 4.08.1
2026-03-02 07:41.04 ---> using "26b9929e6d78f54d8f80fd7749e76b57b2a5087e9bbfce4cc04584f7009adae7" from cache
/home/opam: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam reinstall ocaml-secondary-compiler.4.08.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'ocaml-secondary-compiler.4.08.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
ocaml-secondary-compiler.4.08.1 is not installed. Install it? [Y/n] y
The following actions will be performed:
=== install 1 package
- install ocaml-secondary-compiler 4.08.1 (pinned)
<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved ocaml-secondary-compiler.4.08.1 (cached)
-> installed ocaml-secondary-compiler.4.08.1
Done.
# To update the current shell environment, run: eval $(opam env)
2026-03-02 07:41.04 ---> using "6e65f0f970e618e23463227013afdf4ddd2988a331944d724eff919672c1ffef" from cache
/home/opam: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam reinstall owl-opt.0.0.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
owl-opt.0.0.1 is not installed. Install it? [Y/n] y
The following actions will be performed:
=== install 22 packages
- install base v0.17.3 [required by owl, ppx-owl-opt]
- install camlzip 1.14 [required by npy]
- install conf-openblas 0.2.3 [required by owl]
- install conf-pkg-config 4 [required by conf-zlib]
- install conf-zlib 1 [required by camlzip]
- install csexp 1.5.2 [required by dune-configurator]
- install ctypes 0.24.0 [required by owl]
- install dune 3.21.1 [required by owl-opt]
- install dune-configurator 3.21.1 [required by owl]
- install integers 0.7.0 [required by ctypes]
- install npy 0.0.9 [required by owl]
- install ocaml-compiler-libs v0.17.0 [required by ppxlib]
- install ocaml_intrinsics_kernel v0.17.1 [required by base]
- install ocamlfind 1.9.8 [required by camlzip]
- install owl 1.2 [required by owl-opt]
- install owl-base 1.2 [required by owl]
- install owl-opt 0.0.1
- install ppx-owl-opt 0.0.1 [required by owl-opt]
- install ppx_derivers 1.2.1 [required by ppxlib]
- install ppxlib 0.37.0 [required by ppx-owl-opt]
- install sexplib0 v0.17.0 [required by base, ppxlib]
- install stdlib-shims 0.3.0 [required by ppxlib]
The following system packages will first need to be installed:
liblapacke-dev libopenblas-dev pkg-config zlib1g-dev
<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>
opam believes some required external dependencies are missing. opam can:
> 1. Run apt-get to install them (may need root/sudo access)
2. Display the recommended apt-get command and wait while you run it manually (e.g. in another terminal)
3. Continue anyway, and, upon success, permanently register that this external dependency is present, but not detectable
4. Abort the installation
[1/2/3/4] 1
+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "liblapacke-dev" "libopenblas-dev" "pkg-config" "zlib1g-dev"
- Selecting previously unselected package libblas3:amd64.
- (Reading database ...
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 20654 files and directories currently installed.)
- Preparing to unpack .../00-libblas3_3.12.1-6_amd64.deb ...
- Unpacking libblas3:amd64 (3.12.1-6) ...
- Selecting previously unselected package libblas-dev:amd64.
- Preparing to unpack .../01-libblas-dev_3.12.1-6_amd64.deb ...
- Unpacking libblas-dev:amd64 (3.12.1-6) ...
- Selecting previously unselected package libgfortran5:amd64.
- Preparing to unpack .../02-libgfortran5_14.2.0-19_amd64.deb ...
- Unpacking libgfortran5:amd64 (14.2.0-19) ...
- Selecting previously unselected package libopenblas0-pthread:amd64.
- Preparing to unpack .../03-libopenblas0-pthread_0.3.29+ds-3_amd64.deb ...
- Unpacking libopenblas0-pthread:amd64 (0.3.29+ds-3) ...
- Selecting previously unselected package liblapack3:amd64.
- Preparing to unpack .../04-liblapack3_3.12.1-6_amd64.deb ...
- Unpacking liblapack3:amd64 (3.12.1-6) ...
- Selecting previously unselected package libopenblas-pthread-dev:amd64.
- Preparing to unpack .../05-libopenblas-pthread-dev_0.3.29+ds-3_amd64.deb ...
- Unpacking libopenblas-pthread-dev:amd64 (0.3.29+ds-3) ...
- Selecting previously unselected package liblapack-dev:amd64.
- Preparing to unpack .../06-liblapack-dev_3.12.1-6_amd64.deb ...
- Unpacking liblapack-dev:amd64 (3.12.1-6) ...
- Selecting previously unselected package libtmglib3:amd64.
- Preparing to unpack .../07-libtmglib3_3.12.1-6_amd64.deb ...
- Unpacking libtmglib3:amd64 (3.12.1-6) ...
- Selecting previously unselected package liblapacke:amd64.
- Preparing to unpack .../08-liblapacke_3.12.1-6_amd64.deb ...
- Unpacking liblapacke:amd64 (3.12.1-6) ...
- Selecting previously unselected package libtmglib-dev:amd64.
- Preparing to unpack .../09-libtmglib-dev_3.12.1-6_amd64.deb ...
- Unpacking libtmglib-dev:amd64 (3.12.1-6) ...
- Selecting previously unselected package liblapacke-dev:amd64.
- Preparing to unpack .../10-liblapacke-dev_3.12.1-6_amd64.deb ...
- Unpacking liblapacke-dev:amd64 (3.12.1-6) ...
- Selecting previously unselected package libopenblas0:amd64.
- Preparing to unpack .../11-libopenblas0_0.3.29+ds-3_amd64.deb ...
- Unpacking libopenblas0:amd64 (0.3.29+ds-3) ...
- Selecting previously unselected package libopenblas-dev:amd64.
- Preparing to unpack .../12-libopenblas-dev_0.3.29+ds-3_amd64.deb ...
- Unpacking libopenblas-dev:amd64 (0.3.29+ds-3) ...
- Selecting previously unselected package libpkgconf3:amd64.
- Preparing to unpack .../13-libpkgconf3_1.8.1-4_amd64.deb ...
- Unpacking libpkgconf3:amd64 (1.8.1-4) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../14-pkgconf-bin_1.8.1-4_amd64.deb ...
- Unpacking pkgconf-bin (1.8.1-4) ...
- Selecting previously unselected package pkgconf:amd64.
- Preparing to unpack .../15-pkgconf_1.8.1-4_amd64.deb ...
- Unpacking pkgconf:amd64 (1.8.1-4) ...
- Selecting previously unselected package pkg-config:amd64.
- Preparing to unpack .../16-pkg-config_1.8.1-4_amd64.deb ...
- Unpacking pkg-config:amd64 (1.8.1-4) ...
- Selecting previously unselected package zlib1g-dev:amd64.
- Preparing to unpack .../17-zlib1g-dev_1%3a1.3.dfsg+really1.3.1-1+b1_amd64.deb ...
- Unpacking zlib1g-dev:amd64 (1:1.3.dfsg+really1.3.1-1+b1) ...
- Setting up libblas3:amd64 (3.12.1-6) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/blas/libblas.so.3 to provide /usr/lib/x86_64-linux-gnu/libblas.so.3 (libblas.so.3-x86_64-linux-gnu) in auto mode
- Setting up libpkgconf3:amd64 (1.8.1-4) ...
- Setting up pkgconf-bin (1.8.1-4) ...
- Setting up libgfortran5:amd64 (14.2.0-19) ...
- Setting up zlib1g-dev:amd64 (1:1.3.dfsg+really1.3.1-1+b1) ...
- Setting up libblas-dev:amd64 (3.12.1-6) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/blas/libblas.so to provide /usr/lib/x86_64-linux-gnu/libblas.so (libblas.so-x86_64-linux-gnu) in auto mode
- Setting up liblapack3:amd64 (3.12.1-6) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/lapack/liblapack.so.3 to provide /usr/lib/x86_64-linux-gnu/liblapack.so.3 (liblapack.so.3-x86_64-linux-gnu) in auto mode
- Setting up libopenblas0-pthread:amd64 (0.3.29+ds-3) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so.3 to provide /usr/lib/x86_64-linux-gnu/libblas.so.3 (libblas.so.3-x86_64-linux-gnu) in auto mode
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/liblapack.so.3 to provide /usr/lib/x86_64-linux-gnu/liblapack.so.3 (liblapack.so.3-x86_64-linux-gnu) in auto mode
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblas.so.0 to provide /usr/lib/x86_64-linux-gnu/libopenblas.so.0 (libopenblas.so.0-x86_64-linux-gnu) in auto mode
- Setting up pkgconf:amd64 (1.8.1-4) ...
- Setting up libtmglib3:amd64 (3.12.1-6) ...
- Setting up liblapack-dev:amd64 (3.12.1-6) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/lapack/liblapack.so to provide /usr/lib/x86_64-linux-gnu/liblapack.so (liblapack.so-x86_64-linux-gnu) in auto mode
- Setting up pkg-config:amd64 (1.8.1-4) ...
- Setting up libopenblas0:amd64 (0.3.29+ds-3) ...
- Setting up liblapacke:amd64 (3.12.1-6) ...
- Setting up libtmglib-dev:amd64 (3.12.1-6) ...
- Setting up libopenblas-pthread-dev:amd64 (0.3.29+ds-3) ...
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libblas.so to provide /usr/lib/x86_64-linux-gnu/libblas.so (libblas.so-x86_64-linux-gnu) in auto mode
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/liblapack.so to provide /usr/lib/x86_64-linux-gnu/liblapack.so (liblapack.so-x86_64-linux-gnu) in auto mode
- update-alternatives: using /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblas.so to provide /usr/lib/x86_64-linux-gnu/libopenblas.so (libopenblas.so-x86_64-linux-gnu) in auto mode
- Setting up liblapacke-dev:amd64 (3.12.1-6) ...
- Setting up libopenblas-dev:amd64 (0.3.29+ds-3) ...
- Processing triggers for libc-bin (2.41-12+deb13u1) ...
<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved base.v0.17.3 (cached)
-> retrieved camlzip.1.14 (cached)
-> retrieved conf-openblas.0.2.3 (cached)
-> retrieved csexp.1.5.2 (cached)
-> retrieved ctypes.0.24.0 (cached)
-> installed conf-pkg-config.4
-> installed conf-zlib.1
-> installed conf-openblas.0.2.3
-> retrieved dune.3.21.1, dune-configurator.3.21.1 (cached)
-> retrieved integers.0.7.0 (cached)
-> retrieved npy.0.0.9 (cached)
-> retrieved ocaml-compiler-libs.v0.17.0 (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)
-> retrieved ocamlfind.1.9.8 (cached)
-> retrieved owl.1.2, owl-base.1.2 (cached)
-> retrieved owl-opt.0.0.1, ppx-owl-opt.0.0.1 (cached)
-> retrieved ppx_derivers.1.2.1 (cached)
-> installed ocamlfind.1.9.8
-> retrieved ppxlib.0.37.0 (cached)
-> retrieved sexplib0.v0.17.0 (cached)
-> retrieved stdlib-shims.0.3.0 (cached)
-> installed camlzip.1.14
-> installed dune.3.21.1
-> installed csexp.1.5.2
-> installed npy.0.0.9
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed ocaml-compiler-libs.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed integers.0.7.0
-> installed dune-configurator.3.21.1
-> installed owl-base.1.2
-> installed ctypes.0.24.0
-> installed base.v0.17.3
-> installed ppxlib.0.37.0
-> installed ppx-owl-opt.0.0.1
-> installed owl.1.2
-> installed owl-opt.0.0.1
Done.
# To update the current shell environment, run: eval $(opam env)
2026-03-02 07:46.38 ---> saved as "1bbfd2739a042502f38ad1153db13f41e6242c72a1deb1d900478bfdb8336af6"
/home/opam: (run (network host)
(shell "(opam reinstall --with-test owl-opt.0.0.1) || true"))
The following actions will be performed:
=== recompile 1 package
- recompile owl-opt 0.0.1
<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved owl-opt.0.0.1 (https://opam.ocaml.org/cache)
-> removed owl-opt.0.0.1
-> installed owl-opt.0.0.1
Done.
# To update the current shell environment, run: eval $(opam env)
2026-03-02 07:47.00 ---> saved as "09525911c0e53df101b78d168cdc3df8b16c7bd430dba7645d74d38f71e621cc"
/home/opam: (run (shell "opam reinstall --with-test --verbose owl-opt.0.0.1;\
\n res=$?;\
\n test \"$res\" != 31 && exit \"$res\";\
\n export OPAMCLI=2.0;\
\n build_dir=$(opam var prefix)/.opam-switch/build;\
\n failed=$(ls \"$build_dir\");\
\n partial_fails=\"\";\
\n for pkg in $failed; do\
\n if opam show -f x-ci-accept-failures: \"$pkg\" | grep -qF \"\\\"debian-13\\\"\"; then\
\n echo \"A package failed and has been disabled for CI using the 'x-ci-accept-failures' field.\";\
\n fi;\
\n test \"$pkg\" != 'owl-opt.0.0.1' && partial_fails=\"$partial_fails $pkg\";\
\n done;\
\n test \"${partial_fails}\" != \"\" && echo \"opam-repo-ci detected dependencies failing: ${partial_fails}\";\
\n exit 1"))
The following actions will be performed:
=== recompile 1 package
- recompile owl-opt 0.0.1
<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
Processing 1/4: [owl-opt.0.0.1: extract]
-> retrieved owl-opt.0.0.1 (cached)
Processing 2/4: [owl-opt: dune build]
+ /home/opam/.opam/opam-init/hooks/sandbox.sh "build" "dune" "build" "-p" "owl-opt" "-j" "71" (CWD=/home/opam/.opam/5.4/.opam-switch/build/owl-opt.0.0.1)
- File "dune-project", line 2, characters 11-14:
- 2 | (using fmt 1.1)
- ^^^
- Warning: Version 1.1 of integration with automatic formatters is not
- supported until version 1.7 of the dune language.
- Supported versions of this extension in version 1.5 of the dune language:
- - 1.0
Processing 2/4: [owl-opt: dune runtest]
+ /home/opam/.opam/opam-init/hooks/sandbox.sh "build" "dune" "runtest" "examples/opt" "-p" "owl-opt" "-j" "71" (CWD=/home/opam/.opam/5.4/.opam-switch/build/owl-opt.0.0.1)
- File "dune-project", line 2, characters 11-14:
- 2 | (using fmt 1.1)
- ^^^
- Warning: Version 1.1 of integration with automatic formatters is not
- supported until version 1.7 of the dune language.
- Supported versions of this extension in version 1.5 of the dune language:
- - 1.0
- (cd _build/default/examples/opt && ./rmsprop.exe)
-
step: 0 | loss: 9.118381293
step: 10 | loss: 9.103927978
step: 20 | loss: 9.093691219
step: 30 | loss: 9.084344667
step: 40 | loss: 9.075256754
step: 50 | loss: 9.066257005
step: 60 | loss: 9.057290584
step: 70 | loss: 9.048338894
step: 80 | loss: 9.039395519
step: 90 | loss: 9.030458239
step: 100 | loss: 9.021526292
step: 110 | loss: 9.012599419
step: 120 | loss: 9.003677540
step: 130 | loss: 8.994760637
step: 140 | loss: 8.985848713
step: 150 | loss: 8.976941777
step: 160 | loss: 8.968039844
step: 170 | loss: 8.959142927
step: 180 | loss: 8.950251040
step: 190 | loss: 8.941364198
step: 200 | loss: 8.932482416
step: 210 | loss: 8.923605708
step: 220 | loss: 8.914734088
step: 230 | loss: 8.905867572
step: 240 | loss: 8.897006175
step: 250 | loss: 8.888149910
step: 260 | loss: 8.879298793
step: 270 | loss: 8.870452839
step: 280 | loss: 8.861612063
step: 290 | loss: 8.852776479
step: 300 | loss: 8.843946103
step: 310 | loss: 8.835120949
step: 320 | loss: 8.826301032
step: 330 | loss: 8.817486368
step: 340 | loss: 8.808676972
step: 350 | loss: 8.799872859
step: 360 | loss: 8.791074043
step: 370 | loss: 8.782280541
step: 380 | loss: 8.773492366
step: 390 | loss: 8.764709536
step: 400 | loss: 8.755932063
step: 410 | loss: 8.747159965
step: 420 | loss: 8.738393256
step: 430 | loss: 8.729631952
step: 440 | loss: 8.720876068
step: 450 | loss: 8.712125619
step: 460 | loss: 8.703380620
step: 470 | loss: 8.694641087
step: 480 | loss: 8.685907036
step: 490 | loss: 8.677178481
step: 500 | loss: 8.668455439
step: 510 | loss: 8.659737923
step: 520 | loss: 8.651025951
step: 530 | loss: 8.642319536
step: 540 | loss: 8.633618696
step: 550 | loss: 8.624923443
step: 560 | loss: 8.616233795
step: 570 | loss: 8.607549767
step: 580 | loss: 8.598871373
step: 590 | loss: 8.590198629
step: 600 | loss: 8.581531550
step: 610 | loss: 8.572870151
step: 620 | loss: 8.564214447
step: 630 | loss: 8.555564453
step: 640 | loss: 8.546920184
step: 650 | loss: 8.538281654
step: 660 | loss: 8.529648879
step: 670 | loss: 8.521021873
step: 680 | loss: 8.512400650
step: 690 | loss: 8.503785224
step: 700 | loss: 8.495175610
step: 710 | loss: 8.486571821
step: 720 | loss: 8.477973870
step: 730 | loss: 8.469381771
step: 740 | loss: 8.460795536
step: 750 | loss: 8.452215179
step: 760 | loss: 8.443640711
step: 770 | loss: 8.435072143
step: 780 | loss: 8.426509486
step: 790 | loss: 8.417952751
step: 800 | loss: 8.409401946
step: 810 | loss: 8.400857081
step: 820 | loss: 8.392318162
step: 830 | loss: 8.383785194
step: 840 | loss: 8.375258182
step: 850 | loss: 8.366737128
step: 860 | loss: 8.358222031
step: 870 | loss: 8.349712886
step: 880 | loss: 8.341209685
step: 890 | loss: 8.332712415
step: 900 | loss: 8.324221054
step: 910 | loss: 8.315735572
step: 920 | loss: 8.307255925
step: 930 | loss: 8.298782049
step: 940 | loss: 8.290313853
step: 950 | loss: 8.281851201
step: 960 | loss: 8.273393889
step: 970 | loss: 8.264941607
step: 980 | loss: 8.256493897
step: 990 | loss: 8.248050193
step: 1000 | loss: 8.239610059
step: 1010 | loss: 8.231173368
step: 1020 | loss: 8.222740119
step: 1030 | loss: 8.214310322
step: 1040 | loss: 8.205883989
step: 1050 | loss: 8.197461130
step: 1060 | loss: 8.189041755
step: 1070 | loss: 8.180625876
step: 1080 | loss: 8.172213502
step: 1090 | loss: 8.163804646
step: 1100 | loss: 8.155399317
step: 1110 | loss: 8.146997526
step: 1120 | loss: 8.138599284
step: 1130 | loss: 8.130204602
step: 1140 | loss: 8.121813492
step: 1150 | loss: 8.113425964
step: 1160 | loss: 8.105042028
step: 1170 | loss: 8.096661697
step: 1180 | loss: 8.088284982
step: 1190 | loss: 8.079911893
step: 1200 | loss: 8.071542441
step: 1210 | loss: 8.063176638
step: 1220 | loss: 8.054814496
step: 1230 | loss: 8.046456025
step: 1240 | loss: 8.038101237
step: 1250 | loss: 8.029750143
step: 1260 | loss: 8.021402755
step: 1270 | loss: 8.013059084
step: 1280 | loss: 8.004719142
step: 1290 | loss: 7.996382940
step: 1300 | loss: 7.988050490
step: 1310 | loss: 7.979721804
step: 1320 | loss: 7.971396892
step: 1330 | loss: 7.963075768
step: 1340 | loss: 7.954758443
step: 1350 | loss: 7.946444928
step: 1360 | loss: 7.938135235
step: 1370 | loss: 7.929829377
step: 1380 | loss: 7.921527366
step: 1390 | loss: 7.913229212
step: 1400 | loss: 7.904934929
step: 1410 | loss: 7.896644528
step: 1420 | loss: 7.888358022
step: 1430 | loss: 7.880075422
step: 1440 | loss: 7.871796741
step: 1450 | loss: 7.863521992
step: 1460 | loss: 7.855251186
step: 1470 | loss: 7.846984336
step: 1480 | loss: 7.838721453
step: 1490 | loss: 7.830462552
step: 1500 | loss: 7.822207644
step: 1510 | loss: 7.813956741
step: 1520 | loss: 7.805709856
step: 1530 | loss: 7.797467002
step: 1540 | loss: 7.789228192
step: 1550 | loss: 7.780993438
step: 1560 | loss: 7.772762753
step: 1570 | loss: 7.764536149
step: 1580 | loss: 7.756313640
step: 1590 | loss: 7.748095239
step: 1600 | loss: 7.739880957
step: 1610 | loss: 7.731670809
step: 1620 | loss: 7.723464808
step: 1630 | loss: 7.715262966
step: 1640 | loss: 7.707065296
step: 1650 | loss: 7.698871812
step: 1660 | loss: 7.690682528
step: 1670 | loss: 7.682497455
step: 1680 | loss: 7.674316608
step: 1690 | loss: 7.666139999
step: 1700 | loss: 7.657967643
step: 1710 | loss: 7.649799553
step: 1720 | loss: 7.641635742
step: 1730 | loss: 7.633476223
step: 1740 | loss: 7.625321011
step: 1750 | loss: 7.617170119
step: 1760 | loss: 7.609023561
step: 1770 | loss: 7.600881350
step: 1780 | loss: 7.592743500
step: 1790 | loss: 7.584610025
step: 1800 | loss: 7.576480939
step: 1810 | loss: 7.568356256
step: 1820 | loss: 7.560235990
step: 1830 | loss: 7.552120155
step: 1840 | loss: 7.544008765
step: 1850 | loss: 7.535901833
step: 1860 | loss: 7.527799375
step: 1870 | loss: 7.519701405
step: 1880 | loss: 7.511607936
step: 1890 | loss: 7.503518983
step: 1900 | loss: 7.495434561
step: 1910 | loss: 7.487354684
step: 1920 | loss: 7.479279366
step: 1930 | loss: 7.471208622
step: 1940 | loss: 7.463142466
step: 1950 | loss: 7.455080914
step: 1960 | loss: 7.447023979
step: 1970 | loss: 7.438971677
step: 1980 | loss: 7.430924022
step: 1990 | loss: 7.422881029
step: 2000 | loss: 7.414842714
step: 2010 | loss: 7.406809090
step: 2020 | loss: 7.398780173
step: 2030 | loss: 7.390755978
step: 2040 | loss: 7.382736520
step: 2050 | loss: 7.374721814
step: 2060 | loss: 7.366711876
step: 2070 | loss: 7.358706720
step: 2080 | loss: 7.350706362
step: 2090 | loss: 7.342710817
step: 2100 | loss: 7.334720101
step: 2110 | loss: 7.326734229
step: 2120 | loss: 7.318753216
step: 2130 | loss: 7.310777079
step: 2140 | loss: 7.302805832
step: 2150 | loss: 7.294839491
step: 2160 | loss: 7.286878072
step: 2170 | loss: 7.278921591
step: 2180 | loss: 7.270970064
step: 2190 | loss: 7.263023506
step: 2200 | loss: 7.255081933
step: 2210 | loss: 7.247145361
step: 2220 | loss: 7.239213806
step: 2230 | loss: 7.231287285
step: 2240 | loss: 7.223365813
step: 2250 | loss: 7.215449406
step: 2260 | loss: 7.207538081
step: 2270 | loss: 7.199631854
step: 2280 | loss: 7.191730741
step: 2290 | loss: 7.183834759
step: 2300 | loss: 7.175943923
step: 2310 | loss: 7.168058251
step: 2320 | loss: 7.160177759
step: 2330 | loss: 7.152302463
step: 2340 | loss: 7.144432379
step: 2350 | loss: 7.136567526
step: 2360 | loss: 7.128707918
step: 2370 | loss: 7.120853573
step: 2380 | loss: 7.113004508
step: 2390 | loss: 7.105160740
step: 2400 | loss: 7.097322285
step: 2410 | loss: 7.089489160
step: 2420 | loss: 7.081661382
step: 2430 | loss: 7.073838968
step: 2440 | loss: 7.066021935
step: 2450 | loss: 7.058210300
step: 2460 | loss: 7.050404081
step: 2470 | loss: 7.042603294
step: 2480 | loss: 7.034807956
step: 2490 | loss: 7.027018085
step: 2500 | loss: 7.019233698
step: 2510 | loss: 7.011454813
step: 2520 | loss: 7.003681446
step: 2530 | loss: 6.995913614
step: 2540 | loss: 6.988151336
step: 2550 | loss: 6.980394629
step: 2560 | loss: 6.972643509
step: 2570 | loss: 6.964897995
step: 2580 | loss: 6.957158104
step: 2590 | loss: 6.949423853
step: 2600 | loss: 6.941695260
step: 2610 | loss: 6.933972342
step: 2620 | loss: 6.926255117
step: 2630 | loss: 6.918543603
step: 2640 | loss: 6.910837816
step: 2650 | loss: 6.903137774
step: 2660 | loss: 6.895443496
step: 2670 | loss: 6.887754998
step: 2680 | loss: 6.880072297
step: 2690 | loss: 6.872395412
step: 2700 | loss: 6.864724359
step: 2710 | loss: 6.857059157
step: 2720 | loss: 6.849399822
step: 2730 | loss: 6.841746371
step: 2740 | loss: 6.834098823
step: 2750 | loss: 6.826457193
step: 2760 | loss: 6.818821500
step: 2770 | loss: 6.811191759
step: 2780 | loss: 6.803567989
step: 2790 | loss: 6.795950205
step: 2800 | loss: 6.788338424
step: 2810 | loss: 6.780732663
step: 2820 | loss: 6.773132937
step: 2830 | loss: 6.765539263
step: 2840 | loss: 6.757951657
step: 2850 | loss: 6.750370134
step: 2860 | loss: 6.742794709
step: 2870 | loss: 6.735225397
step: 2880 | loss: 6.727662212
step: 2890 | loss: 6.720105167
step: 2900 | loss: 6.712554278
step: 2910 | loss: 6.705009555
step: 2920 | loss: 6.697471011
step: 2930 | loss: 6.689938656
step: 2940 | loss: 6.682412501
step: 2950 | loss: 6.674892555
step: 2960 | loss: 6.667378824
step: 2970 | loss: 6.659871314
step: 2980 | loss: 6.652370028
step: 2990 | loss: 6.644874967
step: 3000 | loss: 6.637386129
step: 3010 | loss: 6.629903508
step: 3020 | loss: 6.622427090
step: 3030 | loss: 6.614956860
step: 3040 | loss: 6.607492789
step: 3050 | loss: 6.600034839
step: 3060 | loss: 6.592582956
step: 3070 | loss: 6.585137062
step: 3080 | loss: 6.577697046
step: 3090 | loss: 6.570262746
step: 3100 | loss: 6.562833919
step: 3110 | loss: 6.555410199
step: 3120 | loss: 6.547991037
step: 3130 | loss: 6.540575724
step: 3140 | loss: 6.533163654
step: 3150 | loss: 6.525754606
step: 3160 | loss: 6.518348568
step: 3170 | loss: 6.510945548
step: 3180 | loss: 6.503545558
step: 3190 | loss: 6.496148607
step: 3200 | loss: 6.488754705
step: 3210 | loss: 6.481363862
step: 3220 | loss: 6.473976088
step: 3230 | loss: 6.466591393
step: 3240 | loss: 6.459209788
step: 3250 | loss: 6.451831282
step: 3260 | loss: 6.444455887
step: 3270 | loss: 6.437083611
step: 3280 | loss: 6.429714466
step: 3290 | loss: 6.422348462
step: 3300 | loss: 6.414985609
step: 3310 | loss: 6.407625917
step: 3320 | loss: 6.400269398
step: 3330 | loss: 6.392916060
step: 3340 | loss: 6.385565916
step: 3350 | loss: 6.378218975
step: 3360 | loss: 6.370875248
step: 3370 | loss: 6.363534746
step: 3380 | loss: 6.356197478
step: 3390 | loss: 6.348863457
step: 3400 | loss: 6.341532691
step: 3410 | loss: 6.334205193
step: 3420 | loss: 6.326880973
step: 3430 | loss: 6.319560041
step: 3440 | loss: 6.312242409
step: 3450 | loss: 6.304928086
step: 3460 | loss: 6.297617085
step: 3470 | loss: 6.290309415
step: 3480 | loss: 6.283005088
step: 3490 | loss: 6.275704115
step: 3500 | loss: 6.268406506
step: 3510 | loss: 6.261112272
step: 3520 | loss: 6.253821425
step: 3530 | loss: 6.246533974
step: 3540 | loss: 6.239249932
step: 3550 | loss: 6.231969310
step: 3560 | loss: 6.224692117
step: 3570 | loss: 6.217418366
step: 3580 | loss: 6.210148067
step: 3590 | loss: 6.202881231
step: 3600 | loss: 6.195617870
step: 3610 | loss: 6.188357994
step: 3620 | loss: 6.181101615
step: 3630 | loss: 6.173848743
step: 3640 | loss: 6.166599389
step: 3650 | loss: 6.159353565
step: 3660 | loss: 6.152111282
step: 3670 | loss: 6.144872550
step: 3680 | loss: 6.137637381
step: 3690 | loss: 6.130405786
step: 3700 | loss: 6.123177775
step: 3710 | loss: 6.115953359
step: 3720 | loss: 6.108732550
step: 3730 | loss: 6.101515357
step: 3740 | loss: 6.094301793
step: 3750 | loss: 6.087091867
step: 3760 | loss: 6.079885591
step: 3770 | loss: 6.072682975
step: 3780 | loss: 6.065484029
step: 3790 | loss: 6.058288763
step: 3800 | loss: 6.051097189
step: 3810 | loss: 6.043909317
step: 3820 | loss: 6.036725155
step: 3830 | loss: 6.029544715
step: 3840 | loss: 6.022368006
step: 3850 | loss: 6.015195038
step: 3860 | loss: 6.008025820
step: 3870 | loss: 6.000860360
step: 3880 | loss: 5.993698669
step: 3890 | loss: 5.986540754
step: 3900 | loss: 5.979386624
step: 3910 | loss: 5.972236287
step: 3920 | loss: 5.965089749
step: 3930 | loss: 5.957947019
step: 3940 | loss: 5.950808101
step: 3950 | loss: 5.943673003
step: 3960 | loss: 5.936541728
step: 3970 | loss: 5.929414282
step: 3980 | loss: 5.922290667
step: 3990 | loss: 5.915170885
step: 4000 | loss: 5.908054936
step: 4010 | loss: 5.900942821
step: 4020 | loss: 5.893834535
step: 4030 | loss: 5.886730074
step: 4040 | loss: 5.879629429
step: 4050 | loss: 5.872532589
step: 4060 | loss: 5.865439538
step: 4070 | loss: 5.858350255
step: 4080 | loss: 5.851264712
step: 4090 | loss: 5.844182871
step: 4100 | loss: 5.837104682
step: 4110 | loss: 5.830030079
step: 4120 | loss: 5.822958971
step: 4130 | loss: 5.815891233
step: 4140 | loss: 5.808826690
step: 4150 | loss: 5.801765081
step: 4160 | loss: 5.794706022
step: 4170 | loss: 5.787648938
step: 4180 | loss: 5.780593045
step: 4190 | loss: 5.773537573
step: 4200 | loss: 5.766482157
step: 4210 | loss: 5.759426742
step: 4220 | loss: 5.752371327
step: 4230 | loss: 5.745315912
step: 4240 | loss: 5.738260497
step: 4250 | loss: 5.731205082
step: 4260 | loss: 5.724149668
step: 4270 | loss: 5.717094253
step: 4280 | loss: 5.710038838
step: 4290 | loss: 5.702983423
step: 4300 | loss: 5.695928009
step: 4310 | loss: 5.688872594
step: 4320 | loss: 5.681817179
step: 4330 | loss: 5.674761765
step: 4340 | loss: 5.667706350
step: 4350 | loss: 5.660650936
step: 4360 | loss: 5.653595521
step: 4370 | loss: 5.646540107
step: 4380 | loss: 5.639484692
step: 4390 | loss: 5.632429278
step: 4400 | loss: 5.625373863
step: 4410 | loss: 5.618318449
step: 4420 | loss: 5.611263034
step: 4430 | loss: 5.604207620
step: 4440 | loss: 5.597152206
step: 4450 | loss: 5.590096792
step: 4460 | loss: 5.583041377
step: 4470 | loss: 5.575985963
step: 4480 | loss: 5.568930549
step: 4490 | loss: 5.561875135
step: 4500 | loss: 5.554819720
step: 4510 | loss: 5.547764306
step: 4520 | loss: 5.540708892
step: 4530 | loss: 5.533653478
step: 4540 | loss: 5.526598064
step: 4550 | loss: 5.519542650
step: 4560 | loss: 5.512487236
step: 4570 | loss: 5.505431822
step: 4580 | loss: 5.498376408
step: 4590 | loss: 5.491320994
step: 4600 | loss: 5.484265580
step: 4610 | loss: 5.477210167
step: 4620 | loss: 5.470154753
step: 4630 | loss: 5.463099339
step: 4640 | loss: 5.456043925
step: 4650 | loss: 5.448988512
step: 4660 | loss: 5.441933098
step: 4670 | loss: 5.434877684
step: 4680 | loss: 5.427822271
step: 4690 | loss: 5.420766857
step: 4700 | loss: 5.413711443
step: 4710 | loss: 5.406656030
step: 4720 | loss: 5.399600616
step: 4730 | loss: 5.392545203
step: 4740 | loss: 5.385489789
step: 4750 | loss: 5.378434376
step: 4760 | loss: 5.371378963
step: 4770 | loss: 5.364323549
step: 4780 | loss: 5.357268136
step: 4790 | loss: 5.350212723
step: 4800 | loss: 5.343157309
step: 4810 | loss: 5.336101896
step: 4820 | loss: 5.329046483
step: 4830 | loss: 5.321991070
step: 4840 | loss: 5.314935656
step: 4850 | loss: 5.307880243
step: 4860 | loss: 5.300824830
step: 4870 | loss: 5.293769417
step: 4880 | loss: 5.286714004
step: 4890 | loss: 5.279658591
step: 4900 | loss: 5.272603178
step: 4910 | loss: 5.265547765
step: 4920 | loss: 5.258492352
step: 4930 | loss: 5.251436940
step: 4940 | loss: 5.244381527
step: 4950 | loss: 5.237326114
step: 4960 | loss: 5.230270701
step: 4970 | loss: 5.223215289
step: 4980 | loss: 5.216159876
step: 4990 | loss: 5.209104463
step: 5000 | loss: 5.202049051
step: 5010 | loss: 5.194993638
step: 5020 | loss: 5.187938225
step: 5030 | loss: 5.180882813
step: 5040 | loss: 5.173827400
step: 5050 | loss: 5.166771988
step: 5060 | loss: 5.159716576
step: 5070 | loss: 5.152661163
step: 5080 | loss: 5.145605751
step: 5090 | loss: 5.138550339
step: 5100 | loss: 5.131494926
step: 5110 | loss: 5.124439514
step: 5120 | loss: 5.117384102
step: 5130 | loss: 5.110328690
step: 5140 | loss: 5.103273278
step: 5150 | loss: 5.096217865
step: 5160 | loss: 5.089162453
step: 5170 | loss: 5.082107041
step: 5180 | loss: 5.075051629
step: 5190 | loss: 5.067996217
step: 5200 | loss: 5.060940806
step: 5210 | loss: 5.053885394
step: 5220 | loss: 5.046829982
step: 5230 | loss: 5.039774570
step: 5240 | loss: 5.032719158
step: 5250 | loss: 5.025663747
step: 5260 | loss: 5.018608335
step: 5270 | loss: 5.011552923
step: 5280 | loss: 5.004497512
step: 5290 | loss: 4.997442100
step: 5300 | loss: 4.990386688
step: 5310 | loss: 4.983331277
step: 5320 | loss: 4.976275866
step: 5330 | loss: 4.969220454
step: 5340 | loss: 4.962165043
step: 5350 | loss: 4.955109631
step: 5360 | loss: 4.948054220
step: 5370 | loss: 4.940998809
step: 5380 | loss: 4.933943398
step: 5390 | loss: 4.926887986
step: 5400 | loss: 4.919832575
step: 5410 | loss: 4.912777164
step: 5420 | loss: 4.905721753
step: 5430 | loss: 4.898666342
step: 5440 | loss: 4.891610931
step: 5450 | loss: 4.884555520
step: 5460 | loss: 4.877500109
step: 5470 | loss: 4.870444698
step: 5480 | loss: 4.863389288
step: 5490 | loss: 4.856333877
step: 5500 | loss: 4.849278466
step: 5510 | loss: 4.842223055
step: 5520 | loss: 4.835167645
step: 5530 | loss: 4.828112234
step: 5540 | loss: 4.821056824
step: 5550 | loss: 4.814001413
step: 5560 | loss: 4.806946003
step: 5570 | loss: 4.799890592
step: 5580 | loss: 4.792835182
step: 5590 | loss: 4.785779771
step: 5600 | loss: 4.778724361
step: 5610 | loss: 4.771668951
step: 5620 | loss: 4.764613541
step: 5630 | loss: 4.757558130
step: 5640 | loss: 4.750502720
step: 5650 | loss: 4.743447310
step: 5660 | loss: 4.736391900
step: 5670 | loss: 4.729336490
step: 5680 | loss: 4.722281080
step: 5690 | loss: 4.715225670
step: 5700 | loss: 4.708170260
step: 5710 | loss: 4.701114851
step: 5720 | loss: 4.694059441
step: 5730 | loss: 4.687004031
step: 5740 | loss: 4.679948621
step: 5750 | loss: 4.672893212
step: 5760 | loss: 4.665837802
step: 5770 | loss: 4.658782393
step: 5780 | loss: 4.651726983
step: 5790 | loss: 4.644671574
step: 5800 | loss: 4.637616164
step: 5810 | loss: 4.630560755
step: 5820 | loss: 4.623505346
step: 5830 | loss: 4.616449937
step: 5840 | loss: 4.609394527
step: 5850 | loss: 4.602339118
step: 5860 | loss: 4.595283709
step: 5870 | loss: 4.588228300
step: 5880 | loss: 4.581172891
step: 5890 | loss: 4.574117482
step: 5900 | loss: 4.567062073
step: 5910 | loss: 4.560006664
step: 5920 | loss: 4.552951256
step: 5930 | loss: 4.545895847
step: 5940 | loss: 4.538840438
step: 5950 | loss: 4.531785029
step: 5960 | loss: 4.524729621
step: 5970 | loss: 4.517674212
step: 5980 | loss: 4.510618804
step: 5990 | loss: 4.503563395
step: 6000 | loss: 4.496507987
step: 6010 | loss: 4.489452579
step: 6020 | loss: 4.482397170
step: 6030 | loss: 4.475341762
step: 6040 | loss: 4.468286354
step: 6050 | loss: 4.461230946
step: 6060 | loss: 4.454175538
step: 6070 | loss: 4.447120130
step: 6080 | loss: 4.440064722
step: 6090 | loss: 4.433009314
step: 6100 | loss: 4.425953906
step: 6110 | loss: 4.418898498
step: 6120 | loss: 4.411843091
step: 6130 | loss: 4.404787683
step: 6140 | loss: 4.397732275
step: 6150 | loss: 4.390676868
step: 6160 | loss: 4.383621460
step: 6170 | loss: 4.376566053
step: 6180 | loss: 4.369510645
step: 6190 | loss: 4.362455238
step: 6200 | loss: 4.355399831
step: 6210 | loss: 4.348344423
step: 6220 | loss: 4.341289016
step: 6230 | loss: 4.334233609
step: 6240 | loss: 4.327178202
step: 6250 | loss: 4.320122795
step: 6260 | loss: 4.313067388
step: 6270 | loss: 4.306011981
step: 6280 | loss: 4.298956574
step: 6290 | loss: 4.291901168
step: 6300 | loss: 4.284845761
step: 6310 | loss: 4.277790354
step: 6320 | loss: 4.270734948
step: 6330 | loss: 4.263679541
step: 6340 | loss: 4.256624135
step: 6350 | loss: 4.249568728
step: 6360 | loss: 4.242513322
step: 6370 | loss: 4.235457916
step: 6380 | loss: 4.228402510
step: 6390 | loss: 4.221347104
step: 6400 | loss: 4.214291697
step: 6410 | loss: 4.207236291
step: 6420 | loss: 4.200180886
step: 6430 | loss: 4.193125480
step: 6440 | loss: 4.186070074
step: 6450 | loss: 4.179014668
step: 6460 | loss: 4.171959262
step: 6470 | loss: 4.164903857
step: 6480 | loss: 4.157848451
step: 6490 | loss: 4.150793046
step: 6500 | loss: 4.143737640
step: 6510 | loss: 4.136682235
step: 6520 | loss: 4.129626830
step: 6530 | loss: 4.122571424
step: 6540 | loss: 4.115516019
step: 6550 | loss: 4.108460614
step: 6560 | loss: 4.101405209
step: 6570 | loss: 4.094349804
step: 6580 | loss: 4.087294399
step: 6590 | loss: 4.080238995
step: 6600 | loss: 4.073183590
step: 6610 | loss: 4.066128185
step: 6620 | loss: 4.059072781
step: 6630 | loss: 4.052017376
step: 6640 | loss: 4.044961972
step: 6650 | loss: 4.037906567
step: 6660 | loss: 4.030851163
step: 6670 | loss: 4.023795759
step: 6680 | loss: 4.016740355
step: 6690 | loss: 4.009684951
step: 6700 | loss: 4.002629547
step: 6710 | loss: 3.995574143
step: 6720 | loss: 3.988518739
step: 6730 | loss: 3.981463335
step: 6740 | loss: 3.974407931
step: 6750 | loss: 3.967352528
step: 6760 | loss: 3.960297124
step: 6770 | loss: 3.953241721
step: 6780 | loss: 3.946186317
step: 6790 | loss: 3.939130914
step: 6800 | loss: 3.932075511
step: 6810 | loss: 3.925020108
step: 6820 | loss: 3.917964704
step: 6830 | loss: 3.910909301
step: 6840 | loss: 3.903853899
step: 6850 | loss: 3.896798496
step: 6860 | loss: 3.889743093
step: 6870 | loss: 3.882687690
step: 6880 | loss: 3.875632288
step: 6890 | loss: 3.868576885
step: 6900 | loss: 3.861521483
step: 6910 | loss: 3.854466080
step: 6920 | loss: 3.847410678
step: 6930 | loss: 3.840355276
step: 6940 | loss: 3.833299874
step: 6950 | loss: 3.826244472
step: 6960 | loss: 3.819189070
step: 6970 | loss: 3.812133668
step: 6980 | loss: 3.805078266
step: 6990 | loss: 3.798022865
step: 7000 | loss: 3.790967463
step: 7010 | loss: 3.783912062
step: 7020 | loss: 3.776856660
step: 7030 | loss: 3.769801259
step: 7040 | loss: 3.762745858
step: 7050 | loss: 3.755690456
step: 7060 | loss: 3.748635055
step: 7070 | loss: 3.741579654
step: 7080 | loss: 3.734524254
step: 7090 | loss: 3.727468853
step: 7100 | loss: 3.720413452
step: 7110 | loss: 3.713358052
step: 7120 | loss: 3.706302651
step: 7130 | loss: 3.699247251
step: 7140 | loss: 3.692191850
step: 7150 | loss: 3.685136450
step: 7160 | loss: 3.678081050
step: 7170 | loss: 3.671025650
step: 7180 | loss: 3.663970250
step: 7190 | loss: 3.656914850
step: 7200 | loss: 3.649859451
step: 7210 | loss: 3.642804051
step: 7220 | loss: 3.635748651
step: 7230 | loss: 3.628693252
step: 7240 | loss: 3.621637853
step: 7250 | loss: 3.614582453
step: 7260 | loss: 3.607527054
step: 7270 | loss: 3.600471655
step: 7280 | loss: 3.593416256
step: 7290 | loss: 3.586360857
step: 7300 | loss: 3.579305459
step: 7310 | loss: 3.572250060
step: 7320 | loss: 3.565194662
step: 7330 | loss: 3.558139263
step: 7340 | loss: 3.551083865
step: 7350 | loss: 3.544028467
step: 7360 | loss: 3.536973069
step: 7370 | loss: 3.529917671
step: 7380 | loss: 3.522862273
step: 7390 | loss: 3.515806875
step: 7400 | loss: 3.508751477
step: 7410 | loss: 3.501696080
step: 7420 | loss: 3.494640682
step: 7430 | loss: 3.487585285
step: 7440 | loss: 3.480529888
step: 7450 | loss: 3.473474491
step: 7460 | loss: 3.466419094
step: 7470 | loss: 3.459363697
step: 7480 | loss: 3.452308300
step: 7490 | loss: 3.445252904
step: 7500 | loss: 3.438197507
step: 7510 | loss: 3.431142111
step: 7520 | loss: 3.424086715
step: 7530 | loss: 3.417031318
step: 7540 | loss: 3.409975922
step: 7550 | loss: 3.402920526
step: 7560 | loss: 3.395865131
step: 7570 | loss: 3.388809735
step: 7580 | loss: 3.381754339
step: 7590 | loss: 3.374698944
step: 7600 | loss: 3.367643549
step: 7610 | loss: 3.360588154
step: 7620 | loss: 3.353532758
step: 7630 | loss: 3.346477364
step: 7640 | loss: 3.339421969
step: 7650 | loss: 3.332366574
step: 7660 | loss: 3.325311180
step: 7670 | loss: 3.318255785
step: 7680 | loss: 3.311200391
step: 7690 | loss: 3.304144997
step: 7700 | loss: 3.297089603
step: 7710 | loss: 3.290034209
step: 7720 | loss: 3.282978815
step: 7730 | loss: 3.275923422
step: 7740 | loss: 3.268868028
step: 7750 | loss: 3.261812635
step: 7760 | loss: 3.254757242
step: 7770 | loss: 3.247701849
step: 7780 | loss: 3.240646456
step: 7790 | loss: 3.233591063
step: 7800 | loss: 3.226535670
step: 7810 | loss: 3.219480278
step: 7820 | loss: 3.212424885
step: 7830 | loss: 3.205369493
step: 7840 | loss: 3.198314101
step: 7850 | loss: 3.191258709
step: 7860 | loss: 3.184203317
step: 7870 | loss: 3.177147926
step: 7880 | loss: 3.170092534
step: 7890 | loss: 3.163037143
step: 7900 | loss: 3.155981752
step: 7910 | loss: 3.148926361
step: 7920 | loss: 3.141870970
step: 7930 | loss: 3.134815579
step: 7940 | loss: 3.127760189
step: 7950 | loss: 3.120704798
step: 7960 | loss: 3.113649408
step: 7970 | loss: 3.106594018
step: 7980 | loss: 3.099538628
step: 7990 | loss: 3.092483238
step: 8000 | loss: 3.085427849
step: 8010 | loss: 3.078372459
step: 8020 | loss: 3.071317070
step: 8030 | loss: 3.064261681
step: 8040 | loss: 3.057206292
step: 8050 | loss: 3.050150903
step: 8060 | loss: 3.043095515
step: 8070 | loss: 3.036040126
step: 8080 | loss: 3.028984738
step: 8090 | loss: 3.021929350
step: 8100 | loss: 3.014873962
step: 8110 | loss: 3.007818574
step: 8120 | loss: 3.000763187
step: 8130 | loss: 2.993707799
step: 8140 | loss: 2.986652412
step: 8150 | loss: 2.979597025
step: 8160 | loss: 2.972541638
step: 8170 | loss: 2.965486252
step: 8180 | loss: 2.958430865
step: 8190 | loss: 2.951375479
step: 8200 | loss: 2.944320093
step: 8210 | loss: 2.937264707
step: 8220 | loss: 2.930209321
step: 8230 | loss: 2.923153935
step: 8240 | loss: 2.916098550
step: 8250 | loss: 2.909043165
step: 8260 | loss: 2.901987780
step: 8270 | loss: 2.894932395
step: 8280 | loss: 2.887877011
step: 8290 | loss: 2.880821626
step: 8300 | loss: 2.873766242
step: 8310 | loss: 2.866710858
step: 8320 | loss: 2.859655474
step: 8330 | loss: 2.852600091
step: 8340 | loss: 2.845544707
step: 8350 | loss: 2.838489324
step: 8360 | loss: 2.831433941
step: 8370 | loss: 2.824378559
step: 8380 | loss: 2.817323176
step: 8390 | loss: 2.810267794
step: 8400 | loss: 2.803212412
step: 8410 | loss: 2.796157030
step: 8420 | loss: 2.789101648
step: 8430 | loss: 2.782046267
step: 8440 | loss: 2.774990885
step: 8450 | loss: 2.767935504
step: 8460 | loss: 2.760880124
step: 8470 | loss: 2.753824743
step: 8480 | loss: 2.746769363
step: 8490 | loss: 2.739713983
step: 8500 | loss: 2.732658603
step: 8510 | loss: 2.725603223
step: 8520 | loss: 2.718547844
step: 8530 | loss: 2.711492465
step: 8540 | loss: 2.704437086
step: 8550 | loss: 2.697381707
step: 8560 | loss: 2.690326329
step: 8570 | loss: 2.683270951
step: 8580 | loss: 2.676215573
step: 8590 | loss: 2.669160195
step: 8600 | loss: 2.662104818
step: 8610 | loss: 2.655049441
step: 8620 | loss: 2.647994064
step: 8630 | loss: 2.640938687
step: 8640 | loss: 2.633883311
step: 8650 | loss: 2.626827935
step: 8660 | loss: 2.619772559
step: 8670 | loss: 2.612717183
step: 8680 | loss: 2.605661808
step: 8690 | loss: 2.598606433
step: 8700 | loss: 2.591551058
step: 8710 | loss: 2.584495683
step: 8720 | loss: 2.577440309
step: 8730 | loss: 2.570384935
step: 8740 | loss: 2.563329562
step: 8750 | loss: 2.556274188
step: 8760 | loss: 2.549218815
step: 8770 | loss: 2.542163442
step: 8780 | loss: 2.535108070
step: 8790 | loss: 2.528052698
step: 8800 | loss: 2.520997326
step: 8810 | loss: 2.513941954
step: 8820 | loss: 2.506886583
step: 8830 | loss: 2.499831212
step: 8840 | loss: 2.492775841
step: 8850 | loss: 2.485720470
step: 8860 | loss: 2.478665100
step: 8870 | loss: 2.471609731
step: 8880 | loss: 2.464554361
step: 8890 | loss: 2.457498992
step: 8900 | loss: 2.450443623
step: 8910 | loss: 2.443388255
step: 8920 | loss: 2.436332887
step: 8930 | loss: 2.429277519
step: 8940 | loss: 2.422222151
step: 8950 | loss: 2.415166784
step: 8960 | loss: 2.408111417
step: 8970 | loss: 2.401056051
step: 8980 | loss: 2.394000685
step: 8990 | loss: 2.386945319
step: 9000 | loss: 2.379889953
step: 9010 | loss: 2.372834588
step: 9020 | loss: 2.365779223
step: 9030 | loss: 2.358723859
step: 9040 | loss: 2.351668495
step: 9050 | loss: 2.344613131
step: 9060 | loss: 2.337557768
step: 9070 | loss: 2.330502405
step: 9080 | loss: 2.323447043
step: 9090 | loss: 2.316391681
step: 9100 | loss: 2.309336319
step: 9110 | loss: 2.302280958
step: 9120 | loss: 2.295225597
step: 9130 | loss: 2.288170236
step: 9140 | loss: 2.281114876
step: 9150 | loss: 2.274059516
step: 9160 | loss: 2.267004157
step: 9170 | loss: 2.259948798
step: 9180 | loss: 2.252893439
step: 9190 | loss: 2.245838081
step: 9200 | loss: 2.238782723
step: 9210 | loss: 2.231727366
step: 9220 | loss: 2.224672009
step: 9230 | loss: 2.217616653
step: 9240 | loss: 2.210561297
step: 9250 | loss: 2.203505941
step: 9260 | loss: 2.196450586
step: 9270 | loss: 2.189395232
step: 9280 | loss: 2.182339878
step: 9290 | loss: 2.175284524
step: 9300 | loss: 2.168229171
step: 9310 | loss: 2.161173818
step: 9320 | loss: 2.154118466
step: 9330 | loss: 2.147063114
step: 9340 | loss: 2.140007762
step: 9350 | loss: 2.132952412
step: 9360 | loss: 2.125897061
step: 9370 | loss: 2.118841711
step: 9380 | loss: 2.111786362
step: 9390 | loss: 2.104731013
step: 9400 | loss: 2.097675665
step: 9410 | loss: 2.090620317
step: 9420 | loss: 2.083564970
step: 9430 | loss: 2.076509623
step: 9440 | loss: 2.069454277
step: 9450 | loss: 2.062398931
step: 9460 | loss: 2.055343586
step: 9470 | loss: 2.048288241
step: 9480 | loss: 2.041232897
step: 9490 | loss: 2.034177554
step: 9500 | loss: 2.027122211
step: 9510 | loss: 2.020066869
step: 9520 | loss: 2.013011527
step: 9530 | loss: 2.005956186
step: 9540 | loss: 1.998900845
step: 9550 | loss: 1.991845505
step: 9560 | loss: 1.984790166
step: 9570 | loss: 1.977734827
step: 9580 | loss: 1.970679489
step: 9590 | loss: 1.963624151
step: 9600 | loss: 1.956568814
step: 9610 | loss: 1.949513478
step: 9620 | loss: 1.942458143
step: 9630 | loss: 1.935402808
step: 9640 | loss: 1.928347473
step: 9650 | loss: 1.921292140
step: 9660 | loss: 1.914236807
step: 9670 | loss: 1.907181475
step: 9680 | loss: 1.900126143
step: 9690 | loss: 1.893070812
step: 9700 | loss: 1.886015482
step: 9710 | loss: 1.878960153
step: 9720 | loss: 1.871904824
step: 9730 | loss: 1.864849496
step: 9740 | loss: 1.857794169
step: 9750 | loss: 1.850738842
step: 9760 | loss: 1.843683516
step: 9770 | loss: 1.836628191
step: 9780 | loss: 1.829572867
step: 9790 | loss: 1.822517544
step: 9800 | loss: 1.815462221
step: 9810 | loss: 1.808406899
step: 9820 | loss: 1.801351578
step: 9830 | loss: 1.794296258
step: 9840 | loss: 1.787240938
step: 9850 | loss: 1.780185620
step: 9860 | loss: 1.773130302
step: 9870 | loss: 1.766074985
step: 9880 | loss: 1.759019669
step: 9890 | loss: 1.751964354
step: 9900 | loss: 1.744909040
step: 9910 | loss: 1.737853727
step: 9920 | loss: 1.730798414
step: 9930 | loss: 1.723743103
step: 9940 | loss: 1.716687792
step: 9950 | loss: 1.709632483
step: 9960 | loss: 1.702577174
step: 9970 | loss: 1.695521866
step: 9980 | loss: 1.688466560
step: 9990 | loss: 1.681411254
step: 10000 | loss: 1.674355949
step: 10010 | loss: 1.667300645
step: 10020 | loss: 1.660245343
step: 10030 | loss: 1.653190041
step: 10040 | loss: 1.646134741
step: 10050 | loss: 1.639079441
step: 10060 | loss: 1.632024143
step: 10070 | loss: 1.624968846
step: 10080 | loss: 1.617913549
step: 10090 | loss: 1.610858254
step: 10100 | loss: 1.603802960
step: 10110 | loss: 1.596747668
step: 10120 | loss: 1.589692376
step: 10130 | loss: 1.582637086
step: 10140 | loss: 1.575581797
step: 10150 | loss: 1.568526509
step: 10160 | loss: 1.561471222
step: 10170 | loss: 1.554415937
step: 10180 | loss: 1.547360652
step: 10190 | loss: 1.540305369
step: 10200 | loss: 1.533250088
step: 10210 | loss: 1.526194808
step: 10220 | loss: 1.519139529
step: 10230 | loss: 1.512084251
step: 10240 | loss: 1.505028975
step: 10250 | loss: 1.497973700
step: 10260 | loss: 1.490918427
step: 10270 | loss: 1.483863155
step: 10280 | loss: 1.476807885
step: 10290 | loss: 1.469752616
step: 10300 | loss: 1.462697348
step: 10310 | loss: 1.455642083
step: 10320 | loss: 1.448586818
step: 10330 | loss: 1.441531555
step: 10340 | loss: 1.434476294
step: 10350 | loss: 1.427421035
step: 10360 | loss: 1.420365777
step: 10370 | loss: 1.413310521
step: 10380 | loss: 1.406255266
step: 10390 | loss: 1.399200013
step: 10400 | loss: 1.392144762
step: 10410 | loss: 1.385089513
step: 10420 | loss: 1.378034265
step: 10430 | loss: 1.370979020
step: 10440 | loss: 1.363923776
step: 10450 | loss: 1.356868534
step: 10460 | loss: 1.349813294
step: 10470 | loss: 1.342758056
step: 10480 | loss: 1.335702820
step: 10490 | loss: 1.328647586
step: 10500 | loss: 1.321592354
step: 10510 | loss: 1.314537124
step: 10520 | loss: 1.307481896
step: 10530 | loss: 1.300426670
step: 10540 | loss: 1.293371446
step: 10550 | loss: 1.286316225
step: 10560 | loss: 1.279261006
step: 10570 | loss: 1.272205789
step: 10580 | loss: 1.265150575
step: 10590 | loss: 1.258095362
step: 10600 | loss: 1.251040153
step: 10610 | loss: 1.243984945
step: 10620 | loss: 1.236929741
step: 10630 | loss: 1.229874538
step: 10640 | loss: 1.222819339
step: 10650 | loss: 1.215764142
step: 10660 | loss: 1.208708947
step: 10670 | loss: 1.201653755
step: 10680 | loss: 1.194598567
step: 10690 | loss: 1.187543380
step: 10700 | loss: 1.180488197
step: 10710 | loss: 1.173433017
step: 10720 | loss: 1.166377839
step: 10730 | loss: 1.159322665
step: 10740 | loss: 1.152267493
step: 10750 | loss: 1.145212325
step: 10760 | loss: 1.138157160
step: 10770 | loss: 1.131101998
step: 10780 | loss: 1.124046840
step: 10790 | loss: 1.116991685
step: 10800 | loss: 1.109936533
step: 10810 | loss: 1.102881385
step: 10820 | loss: 1.095826240
step: 10830 | loss: 1.088771099
step: 10840 | loss: 1.081715962
step: 10850 | loss: 1.074660829
step: 10860 | loss: 1.067605699
step: 10870 | loss: 1.060550573
step: 10880 | loss: 1.053495451
step: 10890 | loss: 1.046440334
step: 10900 | loss: 1.039385220
step: 10910 | loss: 1.032330111
step: 10920 | loss: 1.025275006
step: 10930 | loss: 1.018219906
step: 10940 | loss: 1.011164810
step: 10950 | loss: 1.004109719
step: 10960 | loss: 0.997054632
step: 10970 | loss: 0.989999551
step: 10980 | loss: 0.982944474
step: 10990 | loss: 0.975889402
step: 11000 | loss: 0.968834335
step: 11010 | loss: 0.961779274
step: 11020 | loss: 0.954724218
step: 11030 | loss: 0.947669168
step: 11040 | loss: 0.940614123
step: 11050 | loss: 0.933559084
step: 11060 | loss: 0.926504050
step: 11070 | loss: 0.919449023
step: 11080 | loss: 0.912394002
step: 11090 | loss: 0.905338987
step: 11100 | loss: 0.898283979
step: 11110 | loss: 0.891228977
step: 11120 | loss: 0.884173982
step: 11130 | loss: 0.877118994
step: 11140 | loss: 0.870064013
step: 11150 | loss: 0.863009040
step: 11160 | loss: 0.855954073
step: 11170 | loss: 0.848899115
step: 11180 | loss: 0.841844164
step: 11190 | loss: 0.834789221
step: 11200 | loss: 0.827734287
step: 11210 | loss: 0.820679360
step: 11220 | loss: 0.813624443
step: 11230 | loss: 0.806569534
step: 11240 | loss: 0.799514635
step: 11250 | loss: 0.792459744
step: 11260 | loss: 0.785404864
step: 11270 | loss: 0.778349993
step: 11280 | loss: 0.771295133
step: 11290 | loss: 0.764240282
step: 11300 | loss: 0.757185443
step: 11310 | loss: 0.750130615
step: 11320 | loss: 0.743075797
step: 11330 | loss: 0.736020992
step: 11340 | loss: 0.728966199
step: 11350 | loss: 0.721911417
step: 11360 | loss: 0.714856649
step: 11370 | loss: 0.707801894
step: 11380 | loss: 0.700747152
step: 11390 | loss: 0.693692424
step: 11400 | loss: 0.686637710
step: 11410 | loss: 0.679583011
step: 11420 | loss: 0.672528328
step: 11430 | loss: 0.665473660
step: 11440 | loss: 0.658419008
step: 11450 | loss: 0.651364373
step: 11460 | loss: 0.644309755
step: 11470 | loss: 0.637255155
step: 11480 | loss: 0.630200574
step: 11490 | loss: 0.623146012
step: 11500 | loss: 0.616091469
step: 11510 | loss: 0.609036947
step: 11520 | loss: 0.601982446
step: 11530 | loss: 0.594927967
step: 11540 | loss: 0.587873510
step: 11550 | loss: 0.580819078
step: 11560 | loss: 0.573764669
step: 11570 | loss: 0.566710286
step: 11580 | loss: 0.559655929
step: 11590 | loss: 0.552601599
step: 11600 | loss: 0.545547298
step: 11610 | loss: 0.538493026
step: 11620 | loss: 0.531438784
step: 11630 | loss: 0.524384575
step: 11640 | loss: 0.517330398
step: 11650 | loss: 0.510276256
step: 11660 | loss: 0.503222150
step: 11670 | loss: 0.496168082
step: 11680 | loss: 0.489114052
step: 11690 | loss: 0.482060063
step: 11700 | loss: 0.475006117
step: 11710 | loss: 0.467952216
step: 11720 | loss: 0.460898361
step: 11730 | loss: 0.453844554
step: 11740 | loss: 0.446790799
step: 11750 | loss: 0.439737097
step: 11760 | loss: 0.432683452
step: 11770 | loss: 0.425629865
step: 11780 | loss: 0.418576340
step: 11790 | loss: 0.411522880
step: 11800 | loss: 0.404469489
step: 11810 | loss: 0.397416170
step: 11820 | loss: 0.390362926
step: 11830 | loss: 0.383309763
step: 11840 | loss: 0.376256685
step: 11850 | loss: 0.369203696
step: 11860 | loss: 0.362150801
step: 11870 | loss: 0.355098007
step: 11880 | loss: 0.348045320
step: 11890 | loss: 0.340992745
step: 11900 | loss: 0.333940290
step: 11910 | loss: 0.326887964
step: 11920 | loss: 0.319835773
step: 11930 | loss: 0.312783728
step: 11940 | loss: 0.305731839
step: 11950 | loss: 0.298680116
step: 11960 | loss: 0.291628572
step: 11970 | loss: 0.284577219
step: 11980 | loss: 0.277526073
step: 11990 | loss: 0.270475149
step: 12000 | loss: 0.263424465
step: 12010 | loss: 0.256374040
step: 12020 | loss: 0.249323898
step: 12030 | loss: 0.242274062
step: 12040 | loss: 0.235224559
step: 12050 | loss: 0.228175421
step: 12060 | loss: 0.221126681
step: 12070 | loss: 0.214078379
step: 12080 | loss: 0.207030560
step: 12090 | loss: 0.199983274
step: 12100 | loss: 0.192936579
step: 12110 | loss: 0.185890541
step: 12120 | loss: 0.178845238
step: 12130 | loss: 0.171800758
step: 12140 | loss: 0.164757207
step: 12150 | loss: 0.157714708
step: 12160 | loss: 0.150673405
step: 12170 | loss: 0.143633473
step: 12180 | loss: 0.136595120
step: 12190 | loss: 0.129558600
step: 12200 | loss: 0.122524223
step: 12210 | loss: 0.115492372
step: 12220 | loss: 0.108463529
step: 12230 | loss: 0.101438304
step: 12240 | loss: 0.094417482
step: 12250 | loss: 0.087402095
step: 12260 | loss: 0.080393514
step: 12270 | loss: 0.073393616
step: 12280 | loss: 0.066405021
step: 12290 | loss: 0.059431506
step: 12300 | loss: 0.052478699
step: 12310 | loss: 0.045555347
step: 12320 | loss: 0.038675722
step: 12330 | loss: 0.031864541
step: 12340 | loss: 0.025167632
step: 12350 | loss: 0.018675972
step: 12360 | loss: 0.012571229
step: 12370 | loss: 0.007085659
step: 12380 | loss: 0.001999260
- final loss: 0.000993
- (cd _build/default/examples/opt && ./single.exe)
-
step: 0 | loss: 3.415466780
step: 10 | loss: 3.412111692
step: 20 | loss: 3.408388767
step: 30 | loss: 3.404671573
step: 40 | loss: 3.400960398
step: 50 | loss: 3.397255320
step: 60 | loss: 3.393556274
step: 70 | loss: 3.389863117
step: 80 | loss: 3.386175667
step: 90 | loss: 3.382493724
step: 100 | loss: 3.378817082
step: 110 | loss: 3.375145532
step: 120 | loss: 3.371478867
step: 130 | loss: 3.367816882
step: 140 | loss: 3.364159377
step: 150 | loss: 3.360506155
step: 160 | loss: 3.356857026
step: 170 | loss: 3.353211806
step: 180 | loss: 3.349570320
step: 190 | loss: 3.345932398
step: 200 | loss: 3.342297881
step: 210 | loss: 3.338666619
step: 220 | loss: 3.335038468
step: 230 | loss: 3.331413297
step: 240 | loss: 3.327790982
step: 250 | loss: 3.324171410
step: 260 | loss: 3.320554476
step: 270 | loss: 3.316940084
step: 280 | loss: 3.313328147
step: 290 | loss: 3.309718586
step: 300 | loss: 3.306111330
step: 310 | loss: 3.302506313
step: 320 | loss: 3.298903479
step: 330 | loss: 3.295302777
step: 340 | loss: 3.291704161
step: 350 | loss: 3.288107589
step: 360 | loss: 3.284513028
step: 370 | loss: 3.280920445
step: 380 | loss: 3.277329812
step: 390 | loss: 3.273741105
step: 400 | loss: 3.270154303
step: 410 | loss: 3.266569388
step: 420 | loss: 3.262986344
step: 430 | loss: 3.259405155
step: 440 | loss: 3.255825810
step: 450 | loss: 3.252248299
step: 460 | loss: 3.248672612
step: 470 | loss: 3.245098740
step: 480 | loss: 3.241526678
step: 490 | loss: 3.237956418
step: 500 | loss: 3.234387955
step: 510 | loss: 3.230821284
step: 520 | loss: 3.227256402
step: 530 | loss: 3.223693304
step: 540 | loss: 3.220131986
step: 550 | loss: 3.216572447
step: 560 | loss: 3.213014683
step: 570 | loss: 3.209458691
step: 580 | loss: 3.205904469
step: 590 | loss: 3.202352015
step: 600 | loss: 3.198801326
step: 610 | loss: 3.195252401
step: 620 | loss: 3.191705238
step: 630 | loss: 3.188159835
step: 640 | loss: 3.184616189
step: 650 | loss: 3.181074300
step: 660 | loss: 3.177534165
step: 670 | loss: 3.173995783
step: 680 | loss: 3.170459151
step: 690 | loss: 3.166924269
step: 700 | loss: 3.163391134
step: 710 | loss: 3.159859745
step: 720 | loss: 3.156330100
step: 730 | loss: 3.152802198
step: 740 | loss: 3.149276036
step: 750 | loss: 3.145751614
step: 760 | loss: 3.142228928
step: 770 | loss: 3.138707978
step: 780 | loss: 3.135188762
step: 790 | loss: 3.131671278
step: 800 | loss: 3.128155525
step: 810 | loss: 3.124641500
step: 820 | loss: 3.121129202
step: 830 | loss: 3.117618629
step: 840 | loss: 3.114109779
step: 850 | loss: 3.110602651
step: 860 | loss: 3.107097242
step: 870 | loss: 3.103593552
step: 880 | loss: 3.100091577
step: 890 | loss: 3.096591317
step: 900 | loss: 3.093092769
step: 910 | loss: 3.089595932
step: 920 | loss: 3.086100803
step: 930 | loss: 3.082607382
step: 940 | loss: 3.079115665
step: 950 | loss: 3.075625651
step: 960 | loss: 3.072137338
step: 970 | loss: 3.068650724
step: 980 | loss: 3.065165808
step: 990 | loss: 3.061682587
step: 1000 | loss: 3.058201059
step: 1010 | loss: 3.054721222
step: 1020 | loss: 3.051243074
step: 1030 | loss: 3.047766614
step: 1040 | loss: 3.044291839
step: 1050 | loss: 3.040818747
step: 1060 | loss: 3.037347337
step: 1070 | loss: 3.033877605
step: 1080 | loss: 3.030409550
step: 1090 | loss: 3.026943170
step: 1100 | loss: 3.023478462
step: 1110 | loss: 3.020015425
step: 1120 | loss: 3.016554057
step: 1130 | loss: 3.013094354
step: 1140 | loss: 3.009636316
step: 1150 | loss: 3.006179939
step: 1160 | loss: 3.002725222
step: 1170 | loss: 2.999272162
step: 1180 | loss: 2.995820756
step: 1190 | loss: 2.992371004
step: 1200 | loss: 2.988922902
step: 1210 | loss: 2.985476448
step: 1220 | loss: 2.982031640
step: 1230 | loss: 2.978588475
step: 1240 | loss: 2.975146950
step: 1250 | loss: 2.971707065
step: 1260 | loss: 2.968268815
step: 1270 | loss: 2.964832199
step: 1280 | loss: 2.961397214
step: 1290 | loss: 2.957963858
step: 1300 | loss: 2.954532128
step: 1310 | loss: 2.951102022
step: 1320 | loss: 2.947673537
step: 1330 | loss: 2.944246670
step: 1340 | loss: 2.940821419
step: 1350 | loss: 2.937397781
step: 1360 | loss: 2.933975754
step: 1370 | loss: 2.930555335
step: 1380 | loss: 2.927136522
step: 1390 | loss: 2.923719310
step: 1400 | loss: 2.920303699
step: 1410 | loss: 2.916889684
step: 1420 | loss: 2.913477264
step: 1430 | loss: 2.910066436
step: 1440 | loss: 2.906657196
step: 1450 | loss: 2.903249542
step: 1460 | loss: 2.899843470
step: 1470 | loss: 2.896438979
step: 1480 | loss: 2.893036065
step: 1490 | loss: 2.889634725
step: 1500 | loss: 2.886234956
step: 1510 | loss: 2.882836755
step: 1520 | loss: 2.879440119
step: 1530 | loss: 2.876045046
step: 1540 | loss: 2.872651531
step: 1550 | loss: 2.869259573
step: 1560 | loss: 2.865869167
step: 1570 | loss: 2.862480311
step: 1580 | loss: 2.859093001
step: 1590 | loss: 2.855707234
step: 1600 | loss: 2.852323008
step: 1610 | loss: 2.848940318
step: 1620 | loss: 2.845559162
step: 1630 | loss: 2.842179536
step: 1640 | loss: 2.838801436
step: 1650 | loss: 2.835424860
step: 1660 | loss: 2.832049805
step: 1670 | loss: 2.828676265
step: 1680 | loss: 2.825304239
step: 1690 | loss: 2.821933723
step: 1700 | loss: 2.818564713
step: 1710 | loss: 2.815197206
step: 1720 | loss: 2.811831197
step: 1730 | loss: 2.808466685
step: 1740 | loss: 2.805103664
step: 1750 | loss: 2.801742132
step: 1760 | loss: 2.798382084
step: 1770 | loss: 2.795023517
step: 1780 | loss: 2.791666428
step: 1790 | loss: 2.788310812
step: 1800 | loss: 2.784956666
step: 1810 | loss: 2.781603985
step: 1820 | loss: 2.778252767
step: 1830 | loss: 2.774903007
step: 1840 | loss: 2.771554702
step: 1850 | loss: 2.768207847
step: 1860 | loss: 2.764862438
step: 1870 | loss: 2.761518472
step: 1880 | loss: 2.758175945
step: 1890 | loss: 2.754834852
step: 1900 | loss: 2.751495189
step: 1910 | loss: 2.748156953
step: 1920 | loss: 2.744820140
step: 1930 | loss: 2.741484744
step: 1940 | loss: 2.738150763
step: 1950 | loss: 2.734818192
step: 1960 | loss: 2.731487026
step: 1970 | loss: 2.728157262
step: 1980 | loss: 2.724828895
step: 1990 | loss: 2.721501920
step: 2000 | loss: 2.718176335
step: 2010 | loss: 2.714852134
step: 2020 | loss: 2.711529312
step: 2030 | loss: 2.708207866
step: 2040 | loss: 2.704887792
step: 2050 | loss: 2.701569084
step: 2060 | loss: 2.698251738
step: 2070 | loss: 2.694935750
step: 2080 | loss: 2.691621115
step: 2090 | loss: 2.688307829
step: 2100 | loss: 2.684995887
step: 2110 | loss: 2.681685285
step: 2120 | loss: 2.678376018
step: 2130 | loss: 2.675068081
step: 2140 | loss: 2.671761470
step: 2150 | loss: 2.668456180
step: 2160 | loss: 2.665152206
step: 2170 | loss: 2.661849544
step: 2180 | loss: 2.658548189
step: 2190 | loss: 2.655248137
step: 2200 | loss: 2.651949381
step: 2210 | loss: 2.648651919
step: 2220 | loss: 2.645355744
step: 2230 | loss: 2.642060853
step: 2240 | loss: 2.638767239
step: 2250 | loss: 2.635474900
step: 2260 | loss: 2.632183828
step: 2270 | loss: 2.628894021
step: 2280 | loss: 2.625605472
step: 2290 | loss: 2.622318176
step: 2300 | loss: 2.619032130
step: 2310 | loss: 2.615747328
step: 2320 | loss: 2.612463765
step: 2330 | loss: 2.609181435
step: 2340 | loss: 2.605900335
step: 2350 | loss: 2.602620459
step: 2360 | loss: 2.599341802
step: 2370 | loss: 2.596064359
step: 2380 | loss: 2.592788125
step: 2390 | loss: 2.589513095
step: 2400 | loss: 2.586239263
step: 2410 | loss: 2.582966626
step: 2420 | loss: 2.579695178
step: 2430 | loss: 2.576424913
step: 2440 | loss: 2.573155827
step: 2450 | loss: 2.569887915
step: 2460 | loss: 2.566621172
step: 2470 | loss: 2.563355592
step: 2480 | loss: 2.560091170
step: 2490 | loss: 2.556827902
step: 2500 | loss: 2.553565782
step: 2510 | loss: 2.550304805
step: 2520 | loss: 2.547044966
step: 2530 | loss: 2.543786261
step: 2540 | loss: 2.540528683
step: 2550 | loss: 2.537272228
step: 2560 | loss: 2.534016891
step: 2570 | loss: 2.530762667
step: 2580 | loss: 2.527509551
step: 2590 | loss: 2.524257538
step: 2600 | loss: 2.521006622
step: 2610 | loss: 2.517756799
step: 2620 | loss: 2.514508064
step: 2630 | loss: 2.511260411
step: 2640 | loss: 2.508013836
step: 2650 | loss: 2.504768333
step: 2660 | loss: 2.501523899
step: 2670 | loss: 2.498280527
step: 2680 | loss: 2.495038213
step: 2690 | loss: 2.491796953
step: 2700 | loss: 2.488556740
step: 2710 | loss: 2.485317570
step: 2720 | loss: 2.482079439
step: 2730 | loss: 2.478842342
step: 2740 | loss: 2.475606273
step: 2750 | loss: 2.472371228
step: 2760 | loss: 2.469137203
step: 2770 | loss: 2.465904192
step: 2780 | loss: 2.462672191
step: 2790 | loss: 2.459441195
step: 2800 | loss: 2.456211199
step: 2810 | loss: 2.452982199
step: 2820 | loss: 2.449754191
step: 2830 | loss: 2.446527169
step: 2840 | loss: 2.443301130
step: 2850 | loss: 2.440076068
step: 2860 | loss: 2.436851980
step: 2870 | loss: 2.433628861
step: 2880 | loss: 2.430406706
step: 2890 | loss: 2.427185511
step: 2900 | loss: 2.423965272
step: 2910 | loss: 2.420745985
step: 2920 | loss: 2.417527645
step: 2930 | loss: 2.414310249
step: 2940 | loss: 2.411093792
step: 2950 | loss: 2.407878269
step: 2960 | loss: 2.404663678
step: 2970 | loss: 2.401450014
step: 2980 | loss: 2.398237273
step: 2990 | loss: 2.395025451
step: 3000 | loss: 2.391814545
step: 3010 | loss: 2.388604550
step: 3020 | loss: 2.385395463
step: 3030 | loss: 2.382187280
step: 3040 | loss: 2.378979998
step: 3050 | loss: 2.375773612
step: 3060 | loss: 2.372568120
step: 3070 | loss: 2.369363518
step: 3080 | loss: 2.366159802
step: 3090 | loss: 2.362956970
step: 3100 | loss: 2.359755017
step: 3110 | loss: 2.356553941
step: 3120 | loss: 2.353353739
step: 3130 | loss: 2.350154407
step: 3140 | loss: 2.346955942
step: 3150 | loss: 2.343758341
step: 3160 | loss: 2.340561601
step: 3170 | loss: 2.337365720
step: 3180 | loss: 2.334170695
step: 3190 | loss: 2.330976522
step: 3200 | loss: 2.327783200
step: 3210 | loss: 2.324590725
step: 3220 | loss: 2.321399095
step: 3230 | loss: 2.318208308
step: 3240 | loss: 2.315018360
step: 3250 | loss: 2.311829251
step: 3260 | loss: 2.308640976
step: 3270 | loss: 2.305453535
step: 3280 | loss: 2.302266925
step: 3290 | loss: 2.299081144
step: 3300 | loss: 2.295896190
step: 3310 | loss: 2.292712061
step: 3320 | loss: 2.289528755
step: 3330 | loss: 2.286346270
step: 3340 | loss: 2.283164605
step: 3350 | loss: 2.279983757
step: 3360 | loss: 2.276803726
step: 3370 | loss: 2.273624509
step: 3380 | loss: 2.270446106
step: 3390 | loss: 2.267268514
step: 3400 | loss: 2.264091733
step: 3410 | loss: 2.260915762
step: 3420 | loss: 2.257740598
step: 3430 | loss: 2.254566241
step: 3440 | loss: 2.251392690
step: 3450 | loss: 2.248219943
step: 3460 | loss: 2.245048001
step: 3470 | loss: 2.241876862
step: 3480 | loss: 2.238706525
step: 3490 | loss: 2.235536990
step: 3500 | loss: 2.232368255
step: 3510 | loss: 2.229200321
step: 3520 | loss: 2.226033187
step: 3530 | loss: 2.222866852
step: 3540 | loss: 2.219701316
step: 3550 | loss: 2.216536579
step: 3560 | loss: 2.213372641
step: 3570 | loss: 2.210209500
step: 3580 | loss: 2.207047158
step: 3590 | loss: 2.203885614
step: 3600 | loss: 2.200724868
step: 3610 | loss: 2.197564920
step: 3620 | loss: 2.194405770
step: 3630 | loss: 2.191247419
step: 3640 | loss: 2.188089866
step: 3650 | loss: 2.184933113
step: 3660 | loss: 2.181777159
step: 3670 | loss: 2.178622005
step: 3680 | loss: 2.175467652
step: 3690 | loss: 2.172314099
step: 3700 | loss: 2.169161348
step: 3710 | loss: 2.166009399
step: 3720 | loss: 2.162858253
step: 3730 | loss: 2.159707911
step: 3740 | loss: 2.156558373
step: 3750 | loss: 2.153409641
step: 3760 | loss: 2.150261715
step: 3770 | loss: 2.147114597
step: 3780 | loss: 2.143968287
step: 3790 | loss: 2.140822786
step: 3800 | loss: 2.137678097
step: 3810 | loss: 2.134534219
step: 3820 | loss: 2.131391154
step: 3830 | loss: 2.128248904
step: 3840 | loss: 2.125107469
step: 3850 | loss: 2.121966852
step: 3860 | loss: 2.118827054
step: 3870 | loss: 2.115688075
step: 3880 | loss: 2.112549918
step: 3890 | loss: 2.109412585
step: 3900 | loss: 2.106276076
step: 3910 | loss: 2.103140393
step: 3920 | loss: 2.100005539
step: 3930 | loss: 2.096871515
step: 3940 | loss: 2.093738323
step: 3950 | loss: 2.090605964
step: 3960 | loss: 2.087474440
step: 3970 | loss: 2.084343754
step: 3980 | loss: 2.081213907
step: 3990 | loss: 2.078084902
step: 4000 | loss: 2.074956739
step: 4010 | loss: 2.071829422
step: 4020 | loss: 2.068702952
step: 4030 | loss: 2.065577332
step: 4040 | loss: 2.062452563
step: 4050 | loss: 2.059328649
step: 4060 | loss: 2.056205590
step: 4070 | loss: 2.053083389
step: 4080 | loss: 2.049962049
step: 4090 | loss: 2.046841572
step: 4100 | loss: 2.043721960
step: 4110 | loss: 2.040603216
step: 4120 | loss: 2.037485342
step: 4130 | loss: 2.034368340
step: 4140 | loss: 2.031252213
step: 4150 | loss: 2.028136963
step: 4160 | loss: 2.025022593
step: 4170 | loss: 2.021909106
step: 4180 | loss: 2.018796503
step: 4190 | loss: 2.015684788
step: 4200 | loss: 2.012573963
step: 4210 | loss: 2.009464031
step: 4220 | loss: 2.006354994
step: 4230 | loss: 2.003246856
step: 4240 | loss: 2.000139618
step: 4250 | loss: 1.997033284
step: 4260 | loss: 1.993927856
step: 4270 | loss: 1.990823337
step: 4280 | loss: 1.987719730
step: 4290 | loss: 1.984617037
step: 4300 | loss: 1.981515263
step: 4310 | loss: 1.978414408
step: 4320 | loss: 1.975314477
step: 4330 | loss: 1.972215472
step: 4340 | loss: 1.969117396
step: 4350 | loss: 1.966020252
step: 4360 | loss: 1.962924043
step: 4370 | loss: 1.959828772
step: 4380 | loss: 1.956734442
step: 4390 | loss: 1.953641056
step: 4400 | loss: 1.950548617
step: 4410 | loss: 1.947457128
step: 4420 | loss: 1.944366592
step: 4430 | loss: 1.941277012
step: 4440 | loss: 1.938188391
step: 4450 | loss: 1.935100733
step: 4460 | loss: 1.932014041
step: 4470 | loss: 1.928928317
step: 4480 | loss: 1.925843565
step: 4490 | loss: 1.922759788
step: 4500 | loss: 1.919676989
step: 4510 | loss: 1.916595172
step: 4520 | loss: 1.913514340
step: 4530 | loss: 1.910434495
step: 4540 | loss: 1.907355642
step: 4550 | loss: 1.904277783
step: 4560 | loss: 1.901200923
step: 4570 | loss: 1.898125063
step: 4580 | loss: 1.895050208
step: 4590 | loss: 1.891976360
step: 4600 | loss: 1.888903524
step: 4610 | loss: 1.885831703
step: 4620 | loss: 1.882760899
step: 4630 | loss: 1.879691117
step: 4640 | loss: 1.876622359
step: 4650 | loss: 1.873554630
step: 4660 | loss: 1.870487932
step: 4670 | loss: 1.867422270
step: 4680 | loss: 1.864357646
step: 4690 | loss: 1.861294064
step: 4700 | loss: 1.858231528
step: 4710 | loss: 1.855170041
step: 4720 | loss: 1.852109607
step: 4730 | loss: 1.849050229
step: 4740 | loss: 1.845991911
step: 4750 | loss: 1.842934657
step: 4760 | loss: 1.839878469
step: 4770 | loss: 1.836823352
step: 4780 | loss: 1.833769309
step: 4790 | loss: 1.830716344
step: 4800 | loss: 1.827664460
step: 4810 | loss: 1.824613662
step: 4820 | loss: 1.821563953
step: 4830 | loss: 1.818515336
step: 4840 | loss: 1.815467816
step: 4850 | loss: 1.812421395
step: 4860 | loss: 1.809376078
step: 4870 | loss: 1.806331869
step: 4880 | loss: 1.803288771
step: 4890 | loss: 1.800246788
step: 4900 | loss: 1.797205924
step: 4910 | loss: 1.794166183
step: 4920 | loss: 1.791127568
step: 4930 | loss: 1.788090083
step: 4940 | loss: 1.785053732
step: 4950 | loss: 1.782018520
step: 4960 | loss: 1.778984449
step: 4970 | loss: 1.775951524
step: 4980 | loss: 1.772919749
step: 4990 | loss: 1.769889127
step: 5000 | loss: 1.766859663
step: 5010 | loss: 1.763831360
step: 5020 | loss: 1.760804223
step: 5030 | loss: 1.757778255
step: 5040 | loss: 1.754753460
step: 5050 | loss: 1.751729843
step: 5060 | loss: 1.748707407
step: 5070 | loss: 1.745686156
step: 5080 | loss: 1.742666095
step: 5090 | loss: 1.739647227
step: 5100 | loss: 1.736629557
step: 5110 | loss: 1.733613088
step: 5120 | loss: 1.730597825
step: 5130 | loss: 1.727583772
step: 5140 | loss: 1.724570932
step: 5150 | loss: 1.721559311
step: 5160 | loss: 1.718548911
step: 5170 | loss: 1.715539738
step: 5180 | loss: 1.712531795
step: 5190 | loss: 1.709525087
step: 5200 | loss: 1.706519618
step: 5210 | loss: 1.703515391
step: 5220 | loss: 1.700512412
step: 5230 | loss: 1.697510684
step: 5240 | loss: 1.694510212
step: 5250 | loss: 1.691510999
step: 5260 | loss: 1.688513051
step: 5270 | loss: 1.685516371
step: 5280 | loss: 1.682520963
step: 5290 | loss: 1.679526833
step: 5300 | loss: 1.676533984
step: 5310 | loss: 1.673542420
step: 5320 | loss: 1.670552146
step: 5330 | loss: 1.667563166
step: 5340 | loss: 1.664575485
step: 5350 | loss: 1.661589107
step: 5360 | loss: 1.658604036
step: 5370 | loss: 1.655620277
step: 5380 | loss: 1.652637833
step: 5390 | loss: 1.649656710
step: 5400 | loss: 1.646676912
step: 5410 | loss: 1.643698443
step: 5420 | loss: 1.640721308
step: 5430 | loss: 1.637745511
step: 5440 | loss: 1.634771056
step: 5450 | loss: 1.631797949
step: 5460 | loss: 1.628826193
step: 5470 | loss: 1.625855793
step: 5480 | loss: 1.622886753
step: 5490 | loss: 1.619919078
step: 5500 | loss: 1.616952773
step: 5510 | loss: 1.613987841
step: 5520 | loss: 1.611024288
step: 5530 | loss: 1.608062118
step: 5540 | loss: 1.605101336
step: 5550 | loss: 1.602141945
step: 5560 | loss: 1.599183951
step: 5570 | loss: 1.596227359
step: 5580 | loss: 1.593272172
step: 5590 | loss: 1.590318395
step: 5600 | loss: 1.587366034
step: 5610 | loss: 1.584415091
step: 5620 | loss: 1.581465573
step: 5630 | loss: 1.578517484
step: 5640 | loss: 1.575570828
step: 5650 | loss: 1.572625610
step: 5660 | loss: 1.569681834
step: 5670 | loss: 1.566739506
step: 5680 | loss: 1.563798629
step: 5690 | loss: 1.560859209
step: 5700 | loss: 1.557921250
step: 5710 | loss: 1.554984757
step: 5720 | loss: 1.552049734
step: 5730 | loss: 1.549116187
step: 5740 | loss: 1.546184119
step: 5750 | loss: 1.543253535
step: 5760 | loss: 1.540324441
step: 5770 | loss: 1.537396840
step: 5780 | loss: 1.534470738
step: 5790 | loss: 1.531546139
step: 5800 | loss: 1.528623048
step: 5810 | loss: 1.525701469
step: 5820 | loss: 1.522781407
step: 5830 | loss: 1.519862868
step: 5840 | loss: 1.516945855
step: 5850 | loss: 1.514030373
step: 5860 | loss: 1.511116427
step: 5870 | loss: 1.508204022
step: 5880 | loss: 1.505293163
step: 5890 | loss: 1.502383853
step: 5900 | loss: 1.499476098
step: 5910 | loss: 1.496569903
step: 5920 | loss: 1.493665272
step: 5930 | loss: 1.490762210
step: 5940 | loss: 1.487860722
step: 5950 | loss: 1.484960812
step: 5960 | loss: 1.482062485
step: 5970 | loss: 1.479165745
step: 5980 | loss: 1.476270598
step: 5990 | loss: 1.473377048
step: 6000 | loss: 1.470485100
step: 6010 | loss: 1.467594758
step: 6020 | loss: 1.464706028
step: 6030 | loss: 1.461818913
step: 6040 | loss: 1.458933419
step: 6050 | loss: 1.456049549
step: 6060 | loss: 1.453167310
step: 6070 | loss: 1.450286705
step: 6080 | loss: 1.447407739
step: 6090 | loss: 1.444530417
step: 6100 | loss: 1.441654744
step: 6110 | loss: 1.438780723
step: 6120 | loss: 1.435908360
step: 6130 | loss: 1.433037659
step: 6140 | loss: 1.430168625
step: 6150 | loss: 1.427301262
step: 6160 | loss: 1.424435576
step: 6170 | loss: 1.421571570
step: 6180 | loss: 1.418709249
step: 6190 | loss: 1.415848617
step: 6200 | loss: 1.412989680
step: 6210 | loss: 1.410132442
step: 6220 | loss: 1.407276907
step: 6230 | loss: 1.404423079
step: 6240 | loss: 1.401570964
step: 6250 | loss: 1.398720566
step: 6260 | loss: 1.395871888
step: 6270 | loss: 1.393024937
step: 6280 | loss: 1.390179715
step: 6290 | loss: 1.387336227
step: 6300 | loss: 1.384494479
step: 6310 | loss: 1.381654473
step: 6320 | loss: 1.378816215
step: 6330 | loss: 1.375979709
step: 6340 | loss: 1.373144959
step: 6350 | loss: 1.370311970
step: 6360 | loss: 1.367480745
step: 6370 | loss: 1.364651289
step: 6380 | loss: 1.361823607
step: 6390 | loss: 1.358997702
step: 6400 | loss: 1.356173578
step: 6410 | loss: 1.353351240
step: 6420 | loss: 1.350530692
step: 6430 | loss: 1.347711939
step: 6440 | loss: 1.344894983
step: 6450 | loss: 1.342079829
step: 6460 | loss: 1.339266482
step: 6470 | loss: 1.336454944
step: 6480 | loss: 1.333645221
step: 6490 | loss: 1.330837316
step: 6500 | loss: 1.328031233
step: 6510 | loss: 1.325226976
step: 6520 | loss: 1.322424548
step: 6530 | loss: 1.319623954
step: 6540 | loss: 1.316825198
step: 6550 | loss: 1.314028282
step: 6560 | loss: 1.311233212
step: 6570 | loss: 1.308439989
step: 6580 | loss: 1.305648619
step: 6590 | loss: 1.302859105
step: 6600 | loss: 1.300071450
step: 6610 | loss: 1.297285658
step: 6620 | loss: 1.294501732
step: 6630 | loss: 1.291719675
step: 6640 | loss: 1.288939492
step: 6650 | loss: 1.286161186
step: 6660 | loss: 1.283384759
step: 6670 | loss: 1.280610216
step: 6680 | loss: 1.277837559
step: 6690 | loss: 1.275066791
step: 6700 | loss: 1.272297916
step: 6710 | loss: 1.269530937
step: 6720 | loss: 1.266765856
step: 6730 | loss: 1.264002678
step: 6740 | loss: 1.261241404
step: 6750 | loss: 1.258482038
step: 6760 | loss: 1.255724582
step: 6770 | loss: 1.252969039
step: 6780 | loss: 1.250215413
step: 6790 | loss: 1.247463705
step: 6800 | loss: 1.244713918
step: 6810 | loss: 1.241966056
step: 6820 | loss: 1.239220119
step: 6830 | loss: 1.236476112
step: 6840 | loss: 1.233734035
step: 6850 | loss: 1.230993892
step: 6860 | loss: 1.228255685
step: 6870 | loss: 1.225519415
step: 6880 | loss: 1.222785086
step: 6890 | loss: 1.220052699
step: 6900 | loss: 1.217322256
step: 6910 | loss: 1.214593759
step: 6920 | loss: 1.211867210
step: 6930 | loss: 1.209142610
step: 6940 | loss: 1.206419962
step: 6950 | loss: 1.203699267
step: 6960 | loss: 1.200980527
step: 6970 | loss: 1.198263743
step: 6980 | loss: 1.195548916
step: 6990 | loss: 1.192836048
step: 7000 | loss: 1.190125141
step: 7010 | loss: 1.187416194
step: 7020 | loss: 1.184709210
step: 7030 | loss: 1.182004190
step: 7040 | loss: 1.179301134
step: 7050 | loss: 1.176600043
step: 7060 | loss: 1.173900918
step: 7070 | loss: 1.171203760
step: 7080 | loss: 1.168508570
step: 7090 | loss: 1.165815347
step: 7100 | loss: 1.163124092
step: 7110 | loss: 1.160434806
step: 7120 | loss: 1.157747489
step: 7130 | loss: 1.155062141
step: 7140 | loss: 1.152378762
step: 7150 | loss: 1.149697352
step: 7160 | loss: 1.147017910
step: 7170 | loss: 1.144340438
step: 7180 | loss: 1.141664933
step: 7190 | loss: 1.138991397
step: 7200 | loss: 1.136319827
step: 7210 | loss: 1.133650225
step: 7220 | loss: 1.130982588
step: 7230 | loss: 1.128316916
step: 7240 | loss: 1.125653209
step: 7250 | loss: 1.122991464
step: 7260 | loss: 1.120331682
step: 7270 | loss: 1.117673860
step: 7280 | loss: 1.115017997
step: 7290 | loss: 1.112364092
step: 7300 | loss: 1.109712143
step: 7310 | loss: 1.107062148
step: 7320 | loss: 1.104414106
step: 7330 | loss: 1.101768014
step: 7340 | loss: 1.099123871
step: 7350 | loss: 1.096481674
step: 7360 | loss: 1.093841421
step: 7370 | loss: 1.091203110
step: 7380 | loss: 1.088566737
step: 7390 | loss: 1.085932301
step: 7400 | loss: 1.083299798
step: 7410 | loss: 1.080669226
step: 7420 | loss: 1.078040582
step: 7430 | loss: 1.075413861
step: 7440 | loss: 1.072789062
step: 7450 | loss: 1.070166180
step: 7460 | loss: 1.067545213
step: 7470 | loss: 1.064926155
step: 7480 | loss: 1.062309004
step: 7490 | loss: 1.059693756
step: 7500 | loss: 1.057080406
step: 7510 | loss: 1.054468951
step: 7520 | loss: 1.051859385
step: 7530 | loss: 1.049251704
step: 7540 | loss: 1.046645905
step: 7550 | loss: 1.044041981
step: 7560 | loss: 1.041439928
step: 7570 | loss: 1.038839741
step: 7580 | loss: 1.036241416
step: 7590 | loss: 1.033644945
step: 7600 | loss: 1.031050325
step: 7610 | loss: 1.028457550
step: 7620 | loss: 1.025866613
step: 7630 | loss: 1.023277509
step: 7640 | loss: 1.020690231
step: 7650 | loss: 1.018104775
step: 7660 | loss: 1.015521133
step: 7670 | loss: 1.012939298
step: 7680 | loss: 1.010359265
step: 7690 | loss: 1.007781027
step: 7700 | loss: 1.005204577
step: 7710 | loss: 1.002629907
step: 7720 | loss: 1.000057011
step: 7730 | loss: 0.997485881
step: 7740 | loss: 0.994916509
step: 7750 | loss: 0.992348890
step: 7760 | loss: 0.989783013
step: 7770 | loss: 0.987218872
step: 7780 | loss: 0.984656459
step: 7790 | loss: 0.982095765
step: 7800 | loss: 0.979536783
step: 7810 | loss: 0.976979503
step: 7820 | loss: 0.974423917
step: 7830 | loss: 0.971870017
step: 7840 | loss: 0.969317793
step: 7850 | loss: 0.966767237
step: 7860 | loss: 0.964218339
step: 7870 | loss: 0.961671090
step: 7880 | loss: 0.959125481
step: 7890 | loss: 0.956581503
step: 7900 | loss: 0.954039144
step: 7910 | loss: 0.951498396
step: 7920 | loss: 0.948959249
step: 7930 | loss: 0.946421693
step: 7940 | loss: 0.943885717
step: 7950 | loss: 0.941351311
step: 7960 | loss: 0.938818465
step: 7970 | loss: 0.936287167
step: 7980 | loss: 0.933757408
step: 7990 | loss: 0.931229177
step: 8000 | loss: 0.928702461
step: 8010 | loss: 0.926177251
step: 8020 | loss: 0.923653536
step: 8030 | loss: 0.921131303
step: 8040 | loss: 0.918610541
step: 8050 | loss: 0.916091239
step: 8060 | loss: 0.913573385
step: 8070 | loss: 0.911056968
step: 8080 | loss: 0.908541975
step: 8090 | loss: 0.906028395
step: 8100 | loss: 0.903516215
step: 8110 | loss: 0.901005423
step: 8120 | loss: 0.898496008
step: 8130 | loss: 0.895987956
step: 8140 | loss: 0.893481255
step: 8150 | loss: 0.890975892
step: 8160 | loss: 0.888471855
step: 8170 | loss: 0.885969132
step: 8180 | loss: 0.883467709
step: 8190 | loss: 0.880967573
step: 8200 | loss: 0.878468712
step: 8210 | loss: 0.875971112
step: 8220 | loss: 0.873474761
step: 8230 | loss: 0.870979645
step: 8240 | loss: 0.868485751
step: 8250 | loss: 0.865993065
step: 8260 | loss: 0.863501575
step: 8270 | loss: 0.861011267
step: 8280 | loss: 0.858522128
step: 8290 | loss: 0.856034143
step: 8300 | loss: 0.853547300
step: 8310 | loss: 0.851061585
step: 8320 | loss: 0.848576984
step: 8330 | loss: 0.846093484
step: 8340 | loss: 0.843611071
step: 8350 | loss: 0.841129731
step: 8360 | loss: 0.838649451
step: 8370 | loss: 0.836170217
step: 8380 | loss: 0.833692015
step: 8390 | loss: 0.831214832
step: 8400 | loss: 0.828738654
step: 8410 | loss: 0.826263466
step: 8420 | loss: 0.823789256
step: 8430 | loss: 0.821316010
step: 8440 | loss: 0.818843714
step: 8450 | loss: 0.816372354
step: 8460 | loss: 0.813901917
step: 8470 | loss: 0.811432389
step: 8480 | loss: 0.808963756
step: 8490 | loss: 0.806496005
step: 8500 | loss: 0.804029122
step: 8510 | loss: 0.801563095
step: 8520 | loss: 0.799097909
step: 8530 | loss: 0.796633550
step: 8540 | loss: 0.794170007
step: 8550 | loss: 0.791707265
step: 8560 | loss: 0.789245312
step: 8570 | loss: 0.786784133
step: 8580 | loss: 0.784323717
step: 8590 | loss: 0.781864050
step: 8600 | loss: 0.779405119
step: 8610 | loss: 0.776946912
step: 8620 | loss: 0.774489415
step: 8630 | loss: 0.772032616
step: 8640 | loss: 0.769576503
step: 8650 | loss: 0.767121062
step: 8660 | loss: 0.764666283
step: 8670 | loss: 0.762212152
step: 8680 | loss: 0.759758657
step: 8690 | loss: 0.757305786
step: 8700 | loss: 0.754853528
step: 8710 | loss: 0.752401870
step: 8720 | loss: 0.749950801
step: 8730 | loss: 0.747500309
step: 8740 | loss: 0.745050383
step: 8750 | loss: 0.742601012
step: 8760 | loss: 0.740152184
step: 8770 | loss: 0.737703889
step: 8780 | loss: 0.735256115
step: 8790 | loss: 0.732808852
step: 8800 | loss: 0.730362089
step: 8810 | loss: 0.727915815
step: 8820 | loss: 0.725470021
step: 8830 | loss: 0.723024696
step: 8840 | loss: 0.720579830
step: 8850 | loss: 0.718135414
step: 8860 | loss: 0.715691437
step: 8870 | loss: 0.713247889
step: 8880 | loss: 0.710804763
step: 8890 | loss: 0.708362047
step: 8900 | loss: 0.705919734
step: 8910 | loss: 0.703477814
step: 8920 | loss: 0.701036278
step: 8930 | loss: 0.698595118
step: 8940 | loss: 0.696154325
step: 8950 | loss: 0.693713891
step: 8960 | loss: 0.691273808
step: 8970 | loss: 0.688834067
step: 8980 | loss: 0.686394662
step: 8990 | loss: 0.683955584
step: 9000 | loss: 0.681516825
step: 9010 | loss: 0.679078379
step: 9020 | loss: 0.676640238
step: 9030 | loss: 0.674202395
step: 9040 | loss: 0.671764844
step: 9050 | loss: 0.669327576
step: 9060 | loss: 0.666890587
step: 9070 | loss: 0.664453869
step: 9080 | loss: 0.662017417
step: 9090 | loss: 0.659581223
step: 9100 | loss: 0.657145282
step: 9110 | loss: 0.654709589
step: 9120 | loss: 0.652274137
step: 9130 | loss: 0.649838922
step: 9140 | loss: 0.647403937
step: 9150 | loss: 0.644969177
step: 9160 | loss: 0.642534638
step: 9170 | loss: 0.640100314
step: 9180 | loss: 0.637666200
step: 9190 | loss: 0.635232292
step: 9200 | loss: 0.632798585
step: 9210 | loss: 0.630365075
step: 9220 | loss: 0.627931758
step: 9230 | loss: 0.625498628
step: 9240 | loss: 0.623065683
step: 9250 | loss: 0.620632918
step: 9260 | loss: 0.618200329
step: 9270 | loss: 0.615767913
step: 9280 | loss: 0.613335665
step: 9290 | loss: 0.610903583
step: 9300 | loss: 0.608471664
step: 9310 | loss: 0.606039903
step: 9320 | loss: 0.603608297
step: 9330 | loss: 0.601176844
step: 9340 | loss: 0.598745541
step: 9350 | loss: 0.596314384
step: 9360 | loss: 0.593883370
step: 9370 | loss: 0.591452498
step: 9380 | loss: 0.589021765
step: 9390 | loss: 0.586591167
step: 9400 | loss: 0.584160702
step: 9410 | loss: 0.581730368
step: 9420 | loss: 0.579300163
step: 9430 | loss: 0.576870084
step: 9440 | loss: 0.574440130
step: 9450 | loss: 0.572010298
step: 9460 | loss: 0.569580585
step: 9470 | loss: 0.567150991
step: 9480 | loss: 0.564721513
step: 9490 | loss: 0.562292149
step: 9500 | loss: 0.559862897
step: 9510 | loss: 0.557433756
step: 9520 | loss: 0.555004725
step: 9530 | loss: 0.552575800
step: 9540 | loss: 0.550146982
step: 9550 | loss: 0.547718267
step: 9560 | loss: 0.545289656
step: 9570 | loss: 0.542861146
step: 9580 | loss: 0.540432735
step: 9590 | loss: 0.538004423
step: 9600 | loss: 0.535576208
step: 9610 | loss: 0.533148089
step: 9620 | loss: 0.530720065
step: 9630 | loss: 0.528292134
step: 9640 | loss: 0.525864295
step: 9650 | loss: 0.523436547
step: 9660 | loss: 0.521008888
step: 9670 | loss: 0.518581319
step: 9680 | loss: 0.516153837
step: 9690 | loss: 0.513726442
step: 9700 | loss: 0.511299132
step: 9710 | loss: 0.508871906
step: 9720 | loss: 0.506444764
step: 9730 | loss: 0.504017705
step: 9740 | loss: 0.501590727
step: 9750 | loss: 0.499163830
step: 9760 | loss: 0.496737013
step: 9770 | loss: 0.494310274
step: 9780 | loss: 0.491883613
step: 9790 | loss: 0.489457030
step: 9800 | loss: 0.487030523
step: 9810 | loss: 0.484604091
step: 9820 | loss: 0.482177733
step: 9830 | loss: 0.479751450
step: 9840 | loss: 0.477325239
step: 9850 | loss: 0.474899101
step: 9860 | loss: 0.472473034
step: 9870 | loss: 0.470047038
step: 9880 | loss: 0.467621112
step: 9890 | loss: 0.465195255
step: 9900 | loss: 0.462769467
step: 9910 | loss: 0.460343746
step: 9920 | loss: 0.457918092
step: 9930 | loss: 0.455492505
step: 9940 | loss: 0.453066984
step: 9950 | loss: 0.450641527
step: 9960 | loss: 0.448216135
step: 9970 | loss: 0.445790807
step: 9980 | loss: 0.443365542
step: 9990 | loss: 0.440940339
step: 10000 | loss: 0.438515198
step: 10010 | loss: 0.436090118
step: 10020 | loss: 0.433665099
step: 10030 | loss: 0.431240139
step: 10040 | loss: 0.428815239
step: 10050 | loss: 0.426390398
step: 10060 | loss: 0.423965615
step: 10070 | loss: 0.421540889
step: 10080 | loss: 0.419116220
step: 10090 | loss: 0.416691608
step: 10100 | loss: 0.414267051
step: 10110 | loss: 0.411842550
step: 10120 | loss: 0.409418103
step: 10130 | loss: 0.406993711
step: 10140 | loss: 0.404569372
step: 10150 | loss: 0.402145086
step: 10160 | loss: 0.399720853
step: 10170 | loss: 0.397296672
step: 10180 | loss: 0.394872542
step: 10190 | loss: 0.392448463
step: 10200 | loss: 0.390024435
step: 10210 | loss: 0.387600457
step: 10220 | loss: 0.385176528
step: 10230 | loss: 0.382752648
step: 10240 | loss: 0.380328816
step: 10250 | loss: 0.377905033
step: 10260 | loss: 0.375481297
step: 10270 | loss: 0.373057608
step: 10280 | loss: 0.370633965
step: 10290 | loss: 0.368210369
step: 10300 | loss: 0.365786818
step: 10310 | loss: 0.363363313
step: 10320 | loss: 0.360939852
step: 10330 | loss: 0.358516435
step: 10340 | loss: 0.356093063
step: 10350 | loss: 0.353669733
step: 10360 | loss: 0.351246447
step: 10370 | loss: 0.348823203
step: 10380 | loss: 0.346400001
step: 10390 | loss: 0.343976841
step: 10400 | loss: 0.341553722
step: 10410 | loss: 0.339130644
step: 10420 | loss: 0.336707607
step: 10430 | loss: 0.334284609
step: 10440 | loss: 0.331861651
step: 10450 | loss: 0.329438733
step: 10460 | loss: 0.327015853
step: 10470 | loss: 0.324593012
step: 10480 | loss: 0.322170208
step: 10490 | loss: 0.319747443
step: 10500 | loss: 0.317324714
step: 10510 | loss: 0.314902023
step: 10520 | loss: 0.312479368
step: 10530 | loss: 0.310056750
step: 10540 | loss: 0.307634167
step: 10550 | loss: 0.305211620
step: 10560 | loss: 0.302789108
step: 10570 | loss: 0.300366630
step: 10580 | loss: 0.297944187
step: 10590 | loss: 0.295521779
step: 10600 | loss: 0.293099404
step: 10610 | loss: 0.290677062
step: 10620 | loss: 0.288254753
step: 10630 | loss: 0.285832477
step: 10640 | loss: 0.283410234
step: 10650 | loss: 0.280988023
step: 10660 | loss: 0.278565843
step: 10670 | loss: 0.276143695
step: 10680 | loss: 0.273721578
step: 10690 | loss: 0.271299492
step: 10700 | loss: 0.268877436
step: 10710 | loss: 0.266455411
step: 10720 | loss: 0.264033415
step: 10730 | loss: 0.261611449
step: 10740 | loss: 0.259189513
step: 10750 | loss: 0.256767605
step: 10760 | loss: 0.254345726
step: 10770 | loss: 0.251923876
step: 10780 | loss: 0.249502054
step: 10790 | loss: 0.247080260
step: 10800 | loss: 0.244658493
step: 10810 | loss: 0.242236754
step: 10820 | loss: 0.239815041
step: 10830 | loss: 0.237393356
step: 10840 | loss: 0.234971697
step: 10850 | loss: 0.232550064
step: 10860 | loss: 0.230128457
step: 10870 | loss: 0.227706876
step: 10880 | loss: 0.225285320
step: 10890 | loss: 0.222863790
step: 10900 | loss: 0.220442285
step: 10910 | loss: 0.218020804
step: 10920 | loss: 0.215599348
step: 10930 | loss: 0.213177916
step: 10940 | loss: 0.210756508
step: 10950 | loss: 0.208335124
step: 10960 | loss: 0.205913763
step: 10970 | loss: 0.203492425
step: 10980 | loss: 0.201071111
step: 10990 | loss: 0.198649819
step: 11000 | loss: 0.196228550
step: 11010 | loss: 0.193807304
step: 11020 | loss: 0.191386079
step: 11030 | loss: 0.188964877
step: 11040 | loss: 0.186543696
step: 11050 | loss: 0.184122536
step: 11060 | loss: 0.181701398
step: 11070 | loss: 0.179280281
step: 11080 | loss: 0.176859185
step: 11090 | loss: 0.174438109
step: 11100 | loss: 0.172017054
step: 11110 | loss: 0.169596019
step: 11120 | loss: 0.167175004
step: 11130 | loss: 0.164754009
step: 11140 | loss: 0.162333033
step: 11150 | loss: 0.159912077
step: 11160 | loss: 0.157491140
step: 11170 | loss: 0.155070222
step: 11180 | loss: 0.152649323
step: 11190 | loss: 0.150228443
step: 11200 | loss: 0.147807581
step: 11210 | loss: 0.145386738
step: 11220 | loss: 0.142965912
step: 11230 | loss: 0.140545105
step: 11240 | loss: 0.138124315
step: 11250 | loss: 0.135703543
step: 11260 | loss: 0.133282788
step: 11270 | loss: 0.130862051
step: 11280 | loss: 0.128441330
step: 11290 | loss: 0.126020627
step: 11300 | loss: 0.123599940
step: 11310 | loss: 0.121179270
step: 11320 | loss: 0.118758616
step: 11330 | loss: 0.116337978
step: 11340 | loss: 0.113917356
step: 11350 | loss: 0.111496751
step: 11360 | loss: 0.109076160
step: 11370 | loss: 0.106655586
step: 11380 | loss: 0.104235027
step: 11390 | loss: 0.101814483
step: 11400 | loss: 0.099393955
step: 11410 | loss: 0.096973441
step: 11420 | loss: 0.094552942
step: 11430 | loss: 0.092132458
step: 11440 | loss: 0.089711988
step: 11450 | loss: 0.087291533
step: 11460 | loss: 0.084871092
step: 11470 | loss: 0.082450665
step: 11480 | loss: 0.080030252
step: 11490 | loss: 0.077609853
step: 11500 | loss: 0.075189468
step: 11510 | loss: 0.072769096
step: 11520 | loss: 0.070348737
step: 11530 | loss: 0.067928392
step: 11540 | loss: 0.065508060
step: 11550 | loss: 0.063087741
step: 11560 | loss: 0.060667435
step: 11570 | loss: 0.058247141
step: 11580 | loss: 0.055826861
step: 11590 | loss: 0.053406592
step: 11600 | loss: 0.050986336
step: 11610 | loss: 0.048566093
step: 11620 | loss: 0.046145861
step: 11630 | loss: 0.043725641
step: 11640 | loss: 0.041305434
step: 11650 | loss: 0.038885238
step: 11660 | loss: 0.036465054
step: 11670 | loss: 0.034044881
step: 11680 | loss: 0.031624720
step: 11690 | loss: 0.029204570
step: 11700 | loss: 0.026784431
step: 11710 | loss: 0.024364303
step: 11720 | loss: 0.021944186
step: 11730 | loss: 0.019524081
step: 11740 | loss: 0.017103986
step: 11750 | loss: 0.014683901
step: 11760 | loss: 0.012263827
step: 11770 | loss: 0.009843764
step: 11780 | loss: 0.007423711
step: 11790 | loss: 0.005003668
step: 11800 | loss: 0.002583635
- final loss: 0.000890
- (cd _build/default/examples/opt && ./gd.exe)
-
iter: 0 | loss: 4.538790
iter: 1 | loss: 4.538790
iter: 2 | loss: 4.538567
iter: 3 | loss: 4.538344
iter: 4 | loss: 4.538121
iter: 5 | loss: 4.537898
iter: 6 | loss: 4.537675
iter: 7 | loss: 4.537452
iter: 8 | loss: 4.537229
iter: 9 | loss: 4.537006
iter: 10 | loss: 4.536783
iter: 11 | loss: 4.536560
iter: 12 | loss: 4.536337
iter: 13 | loss: 4.536114
iter: 14 | loss: 4.535891
iter: 15 | loss: 4.535668
iter: 16 | loss: 4.535445
iter: 17 | loss: 4.535222
iter: 18 | loss: 4.534999
iter: 19 | loss: 4.534776
iter: 20 | loss: 4.534553
iter: 21 | loss: 4.534330
iter: 22 | loss: 4.534107
iter: 23 | loss: 4.533884
iter: 24 | loss: 4.533661
iter: 25 | loss: 4.533438
iter: 26 | loss: 4.533215
iter: 27 | loss: 4.532992
iter: 28 | loss: 4.532769
iter: 29 | loss: 4.532546
iter: 30 | loss: 4.532323
iter: 31 | loss: 4.532100
iter: 32 | loss: 4.531877
iter: 33 | loss: 4.531654
iter: 34 | loss: 4.531431
iter: 35 | loss: 4.531208
iter: 36 | loss: 4.530985
iter: 37 | loss: 4.530762
iter: 38 | loss: 4.530539
iter: 39 | loss: 4.530316
iter: 40 | loss: 4.530093
iter: 41 | loss: 4.529870
iter: 42 | loss: 4.529647
iter: 43 | loss: 4.529424
iter: 44 | loss: 4.529201
iter: 45 | loss: 4.528978
iter: 46 | loss: 4.528755
iter: 47 | loss: 4.528533
iter: 48 | loss: 4.528310
iter: 49 | loss: 4.528087
iter: 50 | loss: 4.527864
iter: 51 | loss: 4.527641
iter: 52 | loss: 4.527418
iter: 53 | loss: 4.527195
iter: 54 | loss: 4.526972
iter: 55 | loss: 4.526749
iter: 56 | loss: 4.526526
iter: 57 | loss: 4.526303
iter: 58 | loss: 4.526080
iter: 59 | loss: 4.525857
iter: 60 | loss: 4.525634
iter: 61 | loss: 4.525411
iter: 62 | loss: 4.525188
iter: 63 | loss: 4.524965
iter: 64 | loss: 4.524742
iter: 65 | loss: 4.524519
iter: 66 | loss: 4.524296
iter: 67 | loss: 4.524073
iter: 68 | loss: 4.523850
iter: 69 | loss: 4.523627
iter: 70 | loss: 4.523404
iter: 71 | loss: 4.523181
iter: 72 | loss: 4.522958
iter: 73 | loss: 4.522735
iter: 74 | loss: 4.522512
iter: 75 | loss: 4.522289
iter: 76 | loss: 4.522066
iter: 77 | loss: 4.521843
iter: 78 | loss: 4.521620
iter: 79 | loss: 4.521397
iter: 80 | loss: 4.521174
iter: 81 | loss: 4.520951
iter: 82 | loss: 4.520728
iter: 83 | loss: 4.520505
iter: 84 | loss: 4.520282
iter: 85 | loss: 4.520059
iter: 86 | loss: 4.519836
iter: 87 | loss: 4.519613
iter: 88 | loss: 4.519390
iter: 89 | loss: 4.519167
iter: 90 | loss: 4.518944
iter: 91 | loss: 4.518721
iter: 92 | loss: 4.518498
iter: 93 | loss: 4.518275
iter: 94 | loss: 4.518052
iter: 95 | loss: 4.517829
iter: 96 | loss: 4.517606
iter: 97 | loss: 4.517383
iter: 98 | loss: 4.517160
iter: 99 | loss: 4.516937
iter: 100 | loss: 4.516715
iter: 101 | loss: 4.516492
iter: 102 | loss: 4.516269
iter: 103 | loss: 4.516046
iter: 104 | loss: 4.515823
iter: 105 | loss: 4.515600
iter: 106 | loss: 4.515377
iter: 107 | loss: 4.515154
iter: 108 | loss: 4.514931
iter: 109 | loss: 4.514708
iter: 110 | loss: 4.514485
iter: 111 | loss: 4.514262
iter: 112 | loss: 4.514039
iter: 113 | loss: 4.513816
iter: 114 | loss: 4.513593
iter: 115 | loss: 4.513370
iter: 116 | loss: 4.513147
iter: 117 | loss: 4.512924
iter: 118 | loss: 4.512701
iter: 119 | loss: 4.512478
iter: 120 | loss: 4.512255
iter: 121 | loss: 4.512032
iter: 122 | loss: 4.511809
iter: 123 | loss: 4.511586
iter: 124 | loss: 4.511363
iter: 125 | loss: 4.511140
iter: 126 | loss: 4.510917
iter: 127 | loss: 4.510694
iter: 128 | loss: 4.510471
iter: 129 | loss: 4.510248
iter: 130 | loss: 4.510025
iter: 131 | loss: 4.509802
iter: 132 | loss: 4.509579
iter: 133 | loss: 4.509356
iter: 134 | loss: 4.509133
iter: 135 | loss: 4.508910
iter: 136 | loss: 4.508687
iter: 137 | loss: 4.508464
iter: 138 | loss: 4.508241
iter: 139 | loss: 4.508018
iter: 140 | loss: 4.507795
iter: 141 | loss: 4.507572
iter: 142 | loss: 4.507349
iter: 143 | loss: 4.507126
iter: 144 | loss: 4.506903
iter: 145 | loss: 4.506680
iter: 146 | loss: 4.506457
iter: 147 | loss: 4.506234
iter: 148 | loss: 4.506011
iter: 149 | loss: 4.505788
iter: 150 | loss: 4.505565
iter: 151 | loss: 4.505342
iter: 152 | loss: 4.505120
iter: 153 | loss: 4.504897
iter: 154 | loss: 4.504674
iter: 155 | loss: 4.504451
iter: 156 | loss: 4.504228
iter: 157 | loss: 4.504005
iter: 158 | loss: 4.503782
iter: 159 | loss: 4.503559
iter: 160 | loss: 4.503336
iter: 161 | loss: 4.503113
iter: 162 | loss: 4.502890
iter: 163 | loss: 4.502667
iter: 164 | loss: 4.502444
iter: 165 | loss: 4.502221
iter: 166 | loss: 4.501998
iter: 167 | loss: 4.501775
iter: 168 | loss: 4.501552
iter: 169 | loss: 4.501329
iter: 170 | loss: 4.501106
iter: 171 | loss: 4.500883
iter: 172 | loss: 4.500660
iter: 173 | loss: 4.500437
iter: 174 | loss: 4.500214
iter: 175 | loss: 4.499991
iter: 176 | loss: 4.499768
iter: 177 | loss: 4.499545
iter: 178 | loss: 4.499322
iter: 179 | loss: 4.499099
iter: 180 | loss: 4.498876
iter: 181 | loss: 4.498653
iter: 182 | loss: 4.498430
iter: 183 | loss: 4.498207
iter: 184 | loss: 4.497984
iter: 185 | loss: 4.497761
iter: 186 | loss: 4.497538
iter: 187 | loss: 4.497315
iter: 188 | loss: 4.497092
iter: 189 | loss: 4.496869
iter: 190 | loss: 4.496646
iter: 191 | loss: 4.496423
iter: 192 | loss: 4.496200
iter: 193 | loss: 4.495977
iter: 194 | loss: 4.495754
iter: 195 | loss: 4.495531
iter: 196 | loss: 4.495308
iter: 197 | loss: 4.495085
iter: 198 | loss: 4.494862
iter: 199 | loss: 4.494639
iter: 200 | loss: 4.494416
iter: 201 | loss: 4.494193
iter: 202 | loss: 4.493970
iter: 203 | loss: 4.493747
iter: 204 | loss: 4.493525
iter: 205 | loss: 4.493302
iter: 206 | loss: 4.493079
iter: 207 | loss: 4.492856
iter: 208 | loss: 4.492633
iter: 209 | loss: 4.492410
iter: 210 | loss: 4.492187
iter: 211 | loss: 4.491964
iter: 212 | loss: 4.491741
iter: 213 | loss: 4.491518
iter: 214 | loss: 4.491295
iter: 215 | loss: 4.491072
iter: 216 | loss: 4.490849
iter: 217 | loss: 4.490626
iter: 218 | loss: 4.490403
iter: 219 | loss: 4.490180
iter: 220 | loss: 4.489957
iter: 221 | loss: 4.489734
iter: 222 | loss: 4.489511
iter: 223 | loss: 4.489288
iter: 224 | loss: 4.489065
iter: 225 | loss: 4.488842
iter: 226 | loss: 4.488619
iter: 227 | loss: 4.488396
iter: 228 | loss: 4.488173
iter: 229 | loss: 4.487950
iter: 230 | loss: 4.487727
iter: 231 | loss: 4.487504
iter: 232 | loss: 4.487281
iter: 233 | loss: 4.487058
iter: 234 | loss: 4.486835
iter: 235 | loss: 4.486612
iter: 236 | loss: 4.486389
iter: 237 | loss: 4.486166
iter: 238 | loss: 4.485943
iter: 239 | loss: 4.485720
iter: 240 | loss: 4.485497
iter: 241 | loss: 4.485274
iter: 242 | loss: 4.485051
iter: 243 | loss: 4.484828
iter: 244 | loss: 4.484605
iter: 245 | loss: 4.484382
iter: 246 | loss: 4.484159
iter: 247 | loss: 4.483936
iter: 248 | loss: 4.483713
iter: 249 | loss: 4.483490
iter: 250 | loss: 4.483267
iter: 251 | loss: 4.483044
iter: 252 | loss: 4.482821
iter: 253 | loss: 4.482598
iter: 254 | loss: 4.482375
iter: 255 | loss: 4.482152
iter: 256 | loss: 4.481930
iter: 257 | loss: 4.481707
iter: 258 | loss: 4.481484
iter: 259 | loss: 4.481261
iter: 260 | loss: 4.481038
iter: 261 | loss: 4.480815
iter: 262 | loss: 4.480592
iter: 263 | loss: 4.480369
iter: 264 | loss: 4.480146
iter: 265 | loss: 4.479923
iter: 266 | loss: 4.479700
iter: 267 | loss: 4.479477
iter: 268 | loss: 4.479254
iter: 269 | loss: 4.479031
iter: 270 | loss: 4.478808
iter: 271 | loss: 4.478585
iter: 272 | loss: 4.478362
iter: 273 | loss: 4.478139
iter: 274 | loss: 4.477916
iter: 275 | loss: 4.477693
iter: 276 | loss: 4.477470
iter: 277 | loss: 4.477247
iter: 278 | loss: 4.477024
iter: 279 | loss: 4.476801
iter: 280 | loss: 4.476578
iter: 281 | loss: 4.476355
iter: 282 | loss: 4.476132
iter: 283 | loss: 4.475909
iter: 284 | loss: 4.475686
iter: 285 | loss: 4.475463
iter: 286 | loss: 4.475240
iter: 287 | loss: 4.475017
iter: 288 | loss: 4.474794
iter: 289 | loss: 4.474571
iter: 290 | loss: 4.474348
iter: 291 | loss: 4.474125
iter: 292 | loss: 4.473902
iter: 293 | loss: 4.473679
iter: 294 | loss: 4.473456
iter: 295 | loss: 4.473233
iter: 296 | loss: 4.473010
iter: 297 | loss: 4.472787
iter: 298 | loss: 4.472564
iter: 299 | loss: 4.472341
iter: 300 | loss: 4.472118
iter: 301 | loss: 4.471895
iter: 302 | loss: 4.471672
iter: 303 | loss: 4.471449
iter: 304 | loss: 4.471226
iter: 305 | loss: 4.471003
iter: 306 | loss: 4.470780
iter: 307 | loss: 4.470557
iter: 308 | loss: 4.470335
iter: 309 | loss: 4.470112
iter: 310 | loss: 4.469889
iter: 311 | loss: 4.469666
iter: 312 | loss: 4.469443
iter: 313 | loss: 4.469220
iter: 314 | loss: 4.468997
iter: 315 | loss: 4.468774
iter: 316 | loss: 4.468551
iter: 317 | loss: 4.468328
iter: 318 | loss: 4.468105
iter: 319 | loss: 4.467882
iter: 320 | loss: 4.467659
iter: 321 | loss: 4.467436
iter: 322 | loss: 4.467213
iter: 323 | loss: 4.466990
iter: 324 | loss: 4.466767
iter: 325 | loss: 4.466544
iter: 326 | loss: 4.466321
iter: 327 | loss: 4.466098
iter: 328 | loss: 4.465875
iter: 329 | loss: 4.465652
iter: 330 | loss: 4.465429
iter: 331 | loss: 4.465206
iter: 332 | loss: 4.464983
iter: 333 | loss: 4.464760
iter: 334 | loss: 4.464537
iter: 335 | loss: 4.464314
iter: 336 | loss: 4.464091
iter: 337 | loss: 4.463868
iter: 338 | loss: 4.463645
iter: 339 | loss: 4.463422
iter: 340 | loss: 4.463199
iter: 341 | loss: 4.462976
iter: 342 | loss: 4.462753
iter: 343 | loss: 4.462530
iter: 344 | loss: 4.462307
iter: 345 | loss: 4.462084
iter: 346 | loss: 4.461861
iter: 347 | loss: 4.461638
iter: 348 | loss: 4.461415
iter: 349 | loss: 4.461192
iter: 350 | loss: 4.460969
iter: 351 | loss: 4.460746
iter: 352 | loss: 4.460523
iter: 353 | loss: 4.460300
iter: 354 | loss: 4.460077
iter: 355 | loss: 4.459854
iter: 356 | loss: 4.459631
iter: 357 | loss: 4.459408
iter: 358 | loss: 4.459185
iter: 359 | loss: 4.458962
iter: 360 | loss: 4.458740
iter: 361 | loss: 4.458517
iter: 362 | loss: 4.458294
iter: 363 | loss: 4.458071
iter: 364 | loss: 4.457848
iter: 365 | loss: 4.457625
iter: 366 | loss: 4.457402
iter: 367 | loss: 4.457179
iter: 368 | loss: 4.456956
iter: 369 | loss: 4.456733
iter: 370 | loss: 4.456510
iter: 371 | loss: 4.456287
iter: 372 | loss: 4.456064
iter: 373 | loss: 4.455841
iter: 374 | loss: 4.455618
iter: 375 | loss: 4.455395
iter: 376 | loss: 4.455172
iter: 377 | loss: 4.454949
iter: 378 | loss: 4.454726
iter: 379 | loss: 4.454503
iter: 380 | loss: 4.454280
iter: 381 | loss: 4.454057
iter: 382 | loss: 4.453834
iter: 383 | loss: 4.453611
iter: 384 | loss: 4.453388
iter: 385 | loss: 4.453165
iter: 386 | loss: 4.452942
iter: 387 | loss: 4.452719
iter: 388 | loss: 4.452496
iter: 389 | loss: 4.452273
iter: 390 | loss: 4.452050
iter: 391 | loss: 4.451827
iter: 392 | loss: 4.451604
iter: 393 | loss: 4.451381
iter: 394 | loss: 4.451158
iter: 395 | loss: 4.450935
iter: 396 | loss: 4.450712
iter: 397 | loss: 4.450489
iter: 398 | loss: 4.450266
iter: 399 | loss: 4.450043
iter: 400 | loss: 4.449820
iter: 401 | loss: 4.449597
iter: 402 | loss: 4.449374
iter: 403 | loss: 4.449151
iter: 404 | loss: 4.448928
iter: 405 | loss: 4.448705
iter: 406 | loss: 4.448482
iter: 407 | loss: 4.448259
iter: 408 | loss: 4.448036
iter: 409 | loss: 4.447813
iter: 410 | loss: 4.447590
iter: 411 | loss: 4.447367
iter: 412 | loss: 4.447145
iter: 413 | loss: 4.446922
iter: 414 | loss: 4.446699
iter: 415 | loss: 4.446476
iter: 416 | loss: 4.446253
iter: 417 | loss: 4.446030
iter: 418 | loss: 4.445807
iter: 419 | loss: 4.445584
iter: 420 | loss: 4.445361
iter: 421 | loss: 4.445138
iter: 422 | loss: 4.444915
iter: 423 | loss: 4.444692
iter: 424 | loss: 4.444469
iter: 425 | loss: 4.444246
iter: 426 | loss: 4.444023
iter: 427 | loss: 4.443800
iter: 428 | loss: 4.443577
iter: 429 | loss: 4.443354
iter: 430 | loss: 4.443131
iter: 431 | loss: 4.442908
iter: 432 | loss: 4.442685
iter: 433 | loss: 4.442462
iter: 434 | loss: 4.442239
iter: 435 | loss: 4.442016
iter: 436 | loss: 4.441793
iter: 437 | loss: 4.441570
iter: 438 | loss: 4.441347
iter: 439 | loss: 4.441124
iter: 440 | loss: 4.440901
iter: 441 | loss: 4.440678
iter: 442 | loss: 4.440455
iter: 443 | loss: 4.440232
iter: 444 | loss: 4.440009
iter: 445 | loss: 4.439786
iter: 446 | loss: 4.439563
iter: 447 | loss: 4.439340
iter: 448 | loss: 4.439117
iter: 449 | loss: 4.438894
iter: 450 | loss: 4.438671
iter: 451 | loss: 4.438448
iter: 452 | loss: 4.438225
iter: 453 | loss: 4.438002
iter: 454 | loss: 4.437779
iter: 455 | loss: 4.437556
iter: 456 | loss: 4.437333
iter: 457 | loss: 4.437110
iter: 458 | loss: 4.436887
iter: 459 | loss: 4.436664
iter: 460 | loss: 4.436441
iter: 461 | loss: 4.436218
iter: 462 | loss: 4.435995
iter: 463 | loss: 4.435772
iter: 464 | loss: 4.435550
iter: 465 | loss: 4.435327
iter: 466 | loss: 4.435104
iter: 467 | loss: 4.434881
iter: 468 | loss: 4.434658
iter: 469 | loss: 4.434435
iter: 470 | loss: 4.434212
iter: 471 | loss: 4.433989
iter: 472 | loss: 4.433766
iter: 473 | loss: 4.433543
iter: 474 | loss: 4.433320
iter: 475 | loss: 4.433097
iter: 476 | loss: 4.432874
iter: 477 | loss: 4.432651
iter: 478 | loss: 4.432428
iter: 479 | loss: 4.432205
iter: 480 | loss: 4.431982
iter: 481 | loss: 4.431759
iter: 482 | loss: 4.431536
iter: 483 | loss: 4.431313
iter: 484 | loss: 4.431090
iter: 485 | loss: 4.430867
iter: 486 | loss: 4.430644
iter: 487 | loss: 4.430421
iter: 488 | loss: 4.430198
iter: 489 | loss: 4.429975
iter: 490 | loss: 4.429752
iter: 491 | loss: 4.429529
iter: 492 | loss: 4.429306
iter: 493 | loss: 4.429083
iter: 494 | loss: 4.428860
iter: 495 | loss: 4.428637
iter: 496 | loss: 4.428414
iter: 497 | loss: 4.428191
iter: 498 | loss: 4.427968
iter: 499 | loss: 4.427745
iter: 500 | loss: 4.427522
iter: 501 | loss: 4.427299
iter: 502 | loss: 4.427076
iter: 503 | loss: 4.426853
iter: 504 | loss: 4.426630
iter: 505 | loss: 4.426407
iter: 506 | loss: 4.426184
iter: 507 | loss: 4.425961
iter: 508 | loss: 4.425738
iter: 509 | loss: 4.425515
iter: 510 | loss: 4.425292
iter: 511 | loss: 4.425069
iter: 512 | loss: 4.424846
iter: 513 | loss: 4.424623
iter: 514 | loss: 4.424400
iter: 515 | loss: 4.424177
iter: 516 | loss: 4.423955
iter: 517 | loss: 4.423732
iter: 518 | loss: 4.423509
iter: 519 | loss: 4.423286
iter: 520 | loss: 4.423063
iter: 521 | loss: 4.422840
iter: 522 | loss: 4.422617
iter: 523 | loss: 4.422394
iter: 524 | loss: 4.422171
iter: 525 | loss: 4.421948
iter: 526 | loss: 4.421725
iter: 527 | loss: 4.421502
iter: 528 | loss: 4.421279
iter: 529 | loss: 4.421056
iter: 530 | loss: 4.420833
iter: 531 | loss: 4.420610
iter: 532 | loss: 4.420387
iter: 533 | loss: 4.420164
iter: 534 | loss: 4.419941
iter: 535 | loss: 4.419718
iter: 536 | loss: 4.419495
iter: 537 | loss: 4.419272
iter: 538 | loss: 4.419049
iter: 539 | loss: 4.418826
iter: 540 | loss: 4.418603
iter: 541 | loss: 4.418380
iter: 542 | loss: 4.418157
iter: 543 | loss: 4.417934
iter: 544 | loss: 4.417711
iter: 545 | loss: 4.417488
iter: 546 | loss: 4.417265
iter: 547 | loss: 4.417042
iter: 548 | loss: 4.416819
iter: 549 | loss: 4.416596
iter: 550 | loss: 4.416373
iter: 551 | loss: 4.416150
iter: 552 | loss: 4.415927
iter: 553 | loss: 4.415704
iter: 554 | loss: 4.415481
iter: 555 | loss: 4.415258
iter: 556 | loss: 4.415035
iter: 557 | loss: 4.414812
iter: 558 | loss: 4.414589
iter: 559 | loss: 4.414366
iter: 560 | loss: 4.414143
iter: 561 | loss: 4.413920
iter: 562 | loss: 4.413697
iter: 563 | loss: 4.413474
iter: 564 | loss: 4.413251
iter: 565 | loss: 4.413028
iter: 566 | loss: 4.412805
iter: 567 | loss: 4.412582
iter: 568 | loss: 4.412360
iter: 569 | loss: 4.412137
iter: 570 | loss: 4.411914
iter: 571 | loss: 4.411691
iter: 572 | loss: 4.411468
iter: 573 | loss: 4.411245
iter: 574 | loss: 4.411022
iter: 575 | loss: 4.410799
iter: 576 | loss: 4.410576
iter: 577 | loss: 4.410353
iter: 578 | loss: 4.410130
iter: 579 | loss: 4.409907
iter: 580 | loss: 4.409684
iter: 581 | loss: 4.409461
iter: 582 | loss: 4.409238
iter: 583 | loss: 4.409015
iter: 584 | loss: 4.408792
iter: 585 | loss: 4.408569
iter: 586 | loss: 4.408346
iter: 587 | loss: 4.408123
iter: 588 | loss: 4.407900
iter: 589 | loss: 4.407677
iter: 590 | loss: 4.407454
iter: 591 | loss: 4.407231
iter: 592 | loss: 4.407008
iter: 593 | loss: 4.406785
iter: 594 | loss: 4.406562
iter: 595 | loss: 4.406339
iter: 596 | loss: 4.406116
iter: 597 | loss: 4.405893
iter: 598 | loss: 4.405670
iter: 599 | loss: 4.405447
iter: 600 | loss: 4.405224
iter: 601 | loss: 4.405001
iter: 602 | loss: 4.404778
iter: 603 | loss: 4.404555
iter: 604 | loss: 4.404332
iter: 605 | loss: 4.404109
iter: 606 | loss: 4.403886
iter: 607 | loss: 4.403663
iter: 608 | loss: 4.403440
iter: 609 | loss: 4.403217
iter: 610 | loss: 4.402994
iter: 611 | loss: 4.402771
iter: 612 | loss: 4.402548
iter: 613 | loss: 4.402325
iter: 614 | loss: 4.402102
iter: 615 | loss: 4.401879
iter: 616 | loss: 4.401656
iter: 617 | loss: 4.401433
iter: 618 | loss: 4.401210
iter: 619 | loss: 4.400987
iter: 620 | loss: 4.400765
iter: 621 | loss: 4.400542
iter: 622 | loss: 4.400319
iter: 623 | loss: 4.400096
iter: 624 | loss: 4.399873
iter: 625 | loss: 4.399650
iter: 626 | loss: 4.399427
iter: 627 | loss: 4.399204
iter: 628 | loss: 4.398981
iter: 629 | loss: 4.398758
iter: 630 | loss: 4.398535
iter: 631 | loss: 4.398312
iter: 632 | loss: 4.398089
iter: 633 | loss: 4.397866
iter: 634 | loss: 4.397643
iter: 635 | loss: 4.397420
iter: 636 | loss: 4.397197
iter: 637 | loss: 4.396974
iter: 638 | loss: 4.396751
iter: 639 | loss: 4.396528
iter: 640 | loss: 4.396305
iter: 641 | loss: 4.396082
iter: 642 | loss: 4.395859
iter: 643 | loss: 4.395636
iter: 644 | loss: 4.395413
iter: 645 | loss: 4.395190
iter: 646 | loss: 4.394967
iter: 647 | loss: 4.394744
iter: 648 | loss: 4.394521
iter: 649 | loss: 4.394298
iter: 650 | loss: 4.394075
iter: 651 | loss: 4.393852
iter: 652 | loss: 4.393629
iter: 653 | loss: 4.393406
iter: 654 | loss: 4.393183
iter: 655 | loss: 4.392960
iter: 656 | loss: 4.392737
iter: 657 | loss: 4.392514
iter: 658 | loss: 4.392291
iter: 659 | loss: 4.392068
iter: 660 | loss: 4.391845
iter: 661 | loss: 4.391622
iter: 662 | loss: 4.391399
iter: 663 | loss: 4.391176
iter: 664 | loss: 4.390953
iter: 665 | loss: 4.390730
iter: 666 | loss: 4.390507
iter: 667 | loss: 4.390284
iter: 668 | loss: 4.390061
iter: 669 | loss: 4.389838
iter: 670 | loss: 4.389615
iter: 671 | loss: 4.389392
iter: 672 | loss: 4.389170
iter: 673 | loss: 4.388947
iter: 674 | loss: 4.388724
iter: 675 | loss: 4.388501
iter: 676 | loss: 4.388278
iter: 677 | loss: 4.388055
iter: 678 | loss: 4.387832
iter: 679 | loss: 4.387609
iter: 680 | loss: 4.387386
iter: 681 | loss: 4.387163
iter: 682 | loss: 4.386940
iter: 683 | loss: 4.386717
iter: 684 | loss: 4.386494
iter: 685 | loss: 4.386271
iter: 686 | loss: 4.386048
iter: 687 | loss: 4.385825
iter: 688 | loss: 4.385602
iter: 689 | loss: 4.385379
iter: 690 | loss: 4.385156
iter: 691 | loss: 4.384933
iter: 692 | loss: 4.384710
iter: 693 | loss: 4.384487
iter: 694 | loss: 4.384264
iter: 695 | loss: 4.384041
iter: 696 | loss: 4.383818
iter: 697 | loss: 4.383595
iter: 698 | loss: 4.383372
iter: 699 | loss: 4.383149
iter: 700 | loss: 4.382926
iter: 701 | loss: 4.382703
iter: 702 | loss: 4.382480
iter: 703 | loss: 4.382257
iter: 704 | loss: 4.382034
iter: 705 | loss: 4.381811
iter: 706 | loss: 4.381588
iter: 707 | loss: 4.381365
iter: 708 | loss: 4.381142
iter: 709 | loss: 4.380919
iter: 710 | loss: 4.380696
iter: 711 | loss: 4.380473
iter: 712 | loss: 4.380250
iter: 713 | loss: 4.380027
iter: 714 | loss: 4.379804
iter: 715 | loss: 4.379581
iter: 716 | loss: 4.379358
iter: 717 | loss: 4.379135
iter: 718 | loss: 4.378912
iter: 719 | loss: 4.378689
iter: 720 | loss: 4.378466
iter: 721 | loss: 4.378243
iter: 722 | loss: 4.378020
iter: 723 | loss: 4.377797
iter: 724 | loss: 4.377575
iter: 725 | loss: 4.377352
iter: 726 | loss: 4.377129
iter: 727 | loss: 4.376906
iter: 728 | loss: 4.376683
iter: 729 | loss: 4.376460
iter: 730 | loss: 4.376237
iter: 731 | loss: 4.376014
iter: 732 | loss: 4.375791
iter: 733 | loss: 4.375568
iter: 734 | loss: 4.375345
iter: 735 | loss: 4.375122
iter: 736 | loss: 4.374899
iter: 737 | loss: 4.374676
iter: 738 | loss: 4.374453
iter: 739 | loss: 4.374230
iter: 740 | loss: 4.374007
iter: 741 | loss: 4.373784
iter: 742 | loss: 4.373561
iter: 743 | loss: 4.373338
iter: 744 | loss: 4.373115
iter: 745 | loss: 4.372892
iter: 746 | loss: 4.372669
iter: 747 | loss: 4.372446
iter: 748 | loss: 4.372223
iter: 749 | loss: 4.372000
iter: 750 | loss: 4.371777
iter: 751 | loss: 4.371554
iter: 752 | loss: 4.371331
iter: 753 | loss: 4.371108
iter: 754 | loss: 4.370885
iter: 755 | loss: 4.370662
iter: 756 | loss: 4.370439
iter: 757 | loss: 4.370216
iter: 758 | loss: 4.369993
iter: 759 | loss: 4.369770
iter: 760 | loss: 4.369547
iter: 761 | loss: 4.369324
iter: 762 | loss: 4.369101
iter: 763 | loss: 4.368878
iter: 764 | loss: 4.368655
iter: 765 | loss: 4.368432
iter: 766 | loss: 4.368209
iter: 767 | loss: 4.367986
iter: 768 | loss: 4.367763
iter: 769 | loss: 4.367540
iter: 770 | loss: 4.367317
iter: 771 | loss: 4.367094
iter: 772 | loss: 4.366871
iter: 773 | loss: 4.366648
iter: 774 | loss: 4.366425
iter: 775 | loss: 4.366202
iter: 776 | loss: 4.365979
iter: 777 | loss: 4.365757
iter: 778 | loss: 4.365534
iter: 779 | loss: 4.365311
iter: 780 | loss: 4.365088
iter: 781 | loss: 4.364865
iter: 782 | loss: 4.364642
iter: 783 | loss: 4.364419
iter: 784 | loss: 4.364196
iter: 785 | loss: 4.363973
iter: 786 | loss: 4.363750
iter: 787 | loss: 4.363527
iter: 788 | loss: 4.363304
iter: 789 | loss: 4.363081
iter: 790 | loss: 4.362858
iter: 791 | loss: 4.362635
iter: 792 | loss: 4.362412
iter: 793 | loss: 4.362189
iter: 794 | loss: 4.361966
iter: 795 | loss: 4.361743
iter: 796 | loss: 4.361520
iter: 797 | loss: 4.361297
iter: 798 | loss: 4.361074
iter: 799 | loss: 4.360851
iter: 800 | loss: 4.360628
iter: 801 | loss: 4.360405
iter: 802 | loss: 4.360182
iter: 803 | loss: 4.359959
iter: 804 | loss: 4.359736
iter: 805 | loss: 4.359513
iter: 806 | loss: 4.359290
iter: 807 | loss: 4.359067
iter: 808 | loss: 4.358844
iter: 809 | loss: 4.358621
iter: 810 | loss: 4.358398
iter: 811 | loss: 4.358175
iter: 812 | loss: 4.357952
iter: 813 | loss: 4.357729
iter: 814 | loss: 4.357506
iter: 815 | loss: 4.357283
iter: 816 | loss: 4.357060
iter: 817 | loss: 4.356837
iter: 818 | loss: 4.356614
iter: 819 | loss: 4.356391
iter: 820 | loss: 4.356168
iter: 821 | loss: 4.355945
iter: 822 | loss: 4.355722
iter: 823 | loss: 4.355499
iter: 824 | loss: 4.355276
iter: 825 | loss: 4.355053
iter: 826 | loss: 4.354830
iter: 827 | loss: 4.354607
iter: 828 | loss: 4.354384
iter: 829 | loss: 4.354162
iter: 830 | loss: 4.353939
iter: 831 | loss: 4.353716
iter: 832 | loss: 4.353493
iter: 833 | loss: 4.353270
iter: 834 | loss: 4.353047
iter: 835 | loss: 4.352824
iter: 836 | loss: 4.352601
iter: 837 | loss: 4.352378
iter: 838 | loss: 4.352155
iter: 839 | loss: 4.351932
iter: 840 | loss: 4.351709
iter: 841 | loss: 4.351486
iter: 842 | loss: 4.351263
iter: 843 | loss: 4.351040
iter: 844 | loss: 4.350817
iter: 845 | loss: 4.350594
iter: 846 | loss: 4.350371
iter: 847 | loss: 4.350148
iter: 848 | loss: 4.349925
iter: 849 | loss: 4.349702
iter: 850 | loss: 4.349479
iter: 851 | loss: 4.349256
iter: 852 | loss: 4.349033
iter: 853 | loss: 4.348810
iter: 854 | loss: 4.348587
iter: 855 | loss: 4.348364
iter: 856 | loss: 4.348141
iter: 857 | loss: 4.347918
iter: 858 | loss: 4.347695
iter: 859 | loss: 4.347472
iter: 860 | loss: 4.347249
iter: 861 | loss: 4.347026
iter: 862 | loss: 4.346803
iter: 863 | loss: 4.346580
iter: 864 | loss: 4.346357
iter: 865 | loss: 4.346134
iter: 866 | loss: 4.345911
iter: 867 | loss: 4.345688
iter: 868 | loss: 4.345465
iter: 869 | loss: 4.345242
iter: 870 | loss: 4.345019
iter: 871 | loss: 4.344796
iter: 872 | loss: 4.344573
iter: 873 | loss: 4.344350
iter: 874 | loss: 4.344127
iter: 875 | loss: 4.343904
iter: 876 | loss: 4.343681
iter: 877 | loss: 4.343458
iter: 878 | loss: 4.343235
iter: 879 | loss: 4.343012
iter: 880 | loss: 4.342789
iter: 881 | loss: 4.342567
iter: 882 | loss: 4.342344
iter: 883 | loss: 4.342121
iter: 884 | loss: 4.341898
iter: 885 | loss: 4.341675
iter: 886 | loss: 4.341452
iter: 887 | loss: 4.341229
iter: 888 | loss: 4.341006
iter: 889 | loss: 4.340783
iter: 890 | loss: 4.340560
iter: 891 | loss: 4.340337
iter: 892 | loss: 4.340114
iter: 893 | loss: 4.339891
iter: 894 | loss: 4.339668
iter: 895 | loss: 4.339445
iter: 896 | loss: 4.339222
iter: 897 | loss: 4.338999
iter: 898 | loss: 4.338776
iter: 899 | loss: 4.338553
iter: 900 | loss: 4.338330
iter: 901 | loss: 4.338107
iter: 902 | loss: 4.337884
iter: 903 | loss: 4.337661
iter: 904 | loss: 4.337438
iter: 905 | loss: 4.337215
iter: 906 | loss: 4.336992
iter: 907 | loss: 4.336769
iter: 908 | loss: 4.336546
iter: 909 | loss: 4.336323
iter: 910 | loss: 4.336100
iter: 911 | loss: 4.335877
iter: 912 | loss: 4.335654
iter: 913 | loss: 4.335431
iter: 914 | loss: 4.335208
iter: 915 | loss: 4.334985
iter: 916 | loss: 4.334762
iter: 917 | loss: 4.334539
iter: 918 | loss: 4.334316
iter: 919 | loss: 4.334093
iter: 920 | loss: 4.333870
iter: 921 | loss: 4.333647
iter: 922 | loss: 4.333424
iter: 923 | loss: 4.333201
iter: 924 | loss: 4.332978
iter: 925 | loss: 4.332755
iter: 926 | loss: 4.332532
iter: 927 | loss: 4.332309
iter: 928 | loss: 4.332086
iter: 929 | loss: 4.331863
iter: 930 | loss: 4.331640
iter: 931 | loss: 4.331417
iter: 932 | loss: 4.331194
iter: 933 | loss: 4.330972
iter: 934 | loss: 4.330749
iter: 935 | loss: 4.330526
iter: 936 | loss: 4.330303
iter: 937 | loss: 4.330080
iter: 938 | loss: 4.329857
iter: 939 | loss: 4.329634
iter: 940 | loss: 4.329411
iter: 941 | loss: 4.329188
iter: 942 | loss: 4.328965
iter: 943 | loss: 4.328742
iter: 944 | loss: 4.328519
iter: 945 | loss: 4.328296
iter: 946 | loss: 4.328073
iter: 947 | loss: 4.327850
iter: 948 | loss: 4.327627
iter: 949 | loss: 4.327404
iter: 950 | loss: 4.327181
iter: 951 | loss: 4.326958
iter: 952 | loss: 4.326735
iter: 953 | loss: 4.326512
iter: 954 | loss: 4.326289
iter: 955 | loss: 4.326066
iter: 956 | loss: 4.325843
iter: 957 | loss: 4.325620
iter: 958 | loss: 4.325397
iter: 959 | loss: 4.325174
iter: 960 | loss: 4.324951
iter: 961 | loss: 4.324728
iter: 962 | loss: 4.324505
iter: 963 | loss: 4.324282
iter: 964 | loss: 4.324059
iter: 965 | loss: 4.323836
iter: 966 | loss: 4.323613
iter: 967 | loss: 4.323390
iter: 968 | loss: 4.323167
iter: 969 | loss: 4.322944
iter: 970 | loss: 4.322721
iter: 971 | loss: 4.322498
iter: 972 | loss: 4.322275
iter: 973 | loss: 4.322052
iter: 974 | loss: 4.321829
iter: 975 | loss: 4.321606
iter: 976 | loss: 4.321383
iter: 977 | loss: 4.321160
iter: 978 | loss: 4.320937
iter: 979 | loss: 4.320714
iter: 980 | loss: 4.320491
iter: 981 | loss: 4.320268
iter: 982 | loss: 4.320045
iter: 983 | loss: 4.319822
iter: 984 | loss: 4.319599
iter: 985 | loss: 4.319377
iter: 986 | loss: 4.319154
iter: 987 | loss: 4.318931
iter: 988 | loss: 4.318708
iter: 989 | loss: 4.318485
iter: 990 | loss: 4.318262
iter: 991 | loss: 4.318039
iter: 992 | loss: 4.317816
iter: 993 | loss: 4.317593
iter: 994 | loss: 4.317370
iter: 995 | loss: 4.317147
iter: 996 | loss: 4.316924
iter: 997 | loss: 4.316701
iter: 998 | loss: 4.316478
iter: 999 | loss: 4.316255
iter: 1000 | loss: 4.316032
iter: 1001 | loss: 4.315809
iter: 1002 | loss: 4.315586
iter: 1003 | loss: 4.315363
iter: 1004 | loss: 4.315140
iter: 1005 | loss: 4.314917
iter: 1006 | loss: 4.314694
iter: 1007 | loss: 4.314471
iter: 1008 | loss: 4.314248
iter: 1009 | loss: 4.314025
iter: 1010 | loss: 4.313802
iter: 1011 | loss: 4.313579
iter: 1012 | loss: 4.313356
iter: 1013 | loss: 4.313133
iter: 1014 | loss: 4.312910
iter: 1015 | loss: 4.312687
iter: 1016 | loss: 4.312464
iter: 1017 | loss: 4.312241
iter: 1018 | loss: 4.312018
iter: 1019 | loss: 4.311795
iter: 1020 | loss: 4.311572
iter: 1021 | loss: 4.311349
iter: 1022 | loss: 4.311126
iter: 1023 | loss: 4.310903
iter: 1024 | loss: 4.310680
iter: 1025 | loss: 4.310457
iter: 1026 | loss: 4.310234
iter: 1027 | loss: 4.310011
iter: 1028 | loss: 4.309788
iter: 1029 | loss: 4.309565
iter: 1030 | loss: 4.309342
iter: 1031 | loss: 4.309119
iter: 1032 | loss: 4.308896
iter: 1033 | loss: 4.308673
iter: 1034 | loss: 4.308450
iter: 1035 | loss: 4.308227
iter: 1036 | loss: 4.308004
iter: 1037 | loss: 4.307782
iter: 1038 | loss: 4.307559
iter: 1039 | loss: 4.307336
iter: 1040 | loss: 4.307113
iter: 1041 | loss: 4.306890
iter: 1042 | loss: 4.306667
iter: 1043 | loss: 4.306444
iter: 1044 | loss: 4.306221
iter: 1045 | loss: 4.305998
iter: 1046 | loss: 4.305775
iter: 1047 | loss: 4.305552
iter: 1048 | loss: 4.305329
iter: 1049 | loss: 4.305106
iter: 1050 | loss: 4.304883
iter: 1051 | loss: 4.304660
iter: 1052 | loss: 4.304437
iter: 1053 | loss: 4.304214
iter: 1054 | loss: 4.303991
iter: 1055 | loss: 4.303768
iter: 1056 | loss: 4.303545
iter: 1057 | loss: 4.303322
iter: 1058 | loss: 4.303099
iter: 1059 | loss: 4.302876
iter: 1060 | loss: 4.302653
iter: 1061 | loss: 4.302430
iter: 1062 | loss: 4.302207
iter: 1063 | loss: 4.301984
iter: 1064 | loss: 4.301761
iter: 1065 | loss: 4.301538
iter: 1066 | loss: 4.301315
iter: 1067 | loss: 4.301092
iter: 1068 | loss: 4.300869
iter: 1069 | loss: 4.300646
iter: 1070 | loss: 4.300423
iter: 1071 | loss: 4.300200
iter: 1072 | loss: 4.299977
iter: 1073 | loss: 4.299754
iter: 1074 | loss: 4.299531
iter: 1075 | loss: 4.299308
iter: 1076 | loss: 4.299085
iter: 1077 | loss: 4.298862
iter: 1078 | loss: 4.298639
iter: 1079 | loss: 4.298416
iter: 1080 | loss: 4.298193
iter: 1081 | loss: 4.297970
iter: 1082 | loss: 4.297747
iter: 1083 | loss: 4.297524
iter: 1084 | loss: 4.297301
iter: 1085 | loss: 4.297078
iter: 1086 | loss: 4.296855
iter: 1087 | loss: 4.296632
iter: 1088 | loss: 4.296409
iter: 1089 | loss: 4.296187
iter: 1090 | loss: 4.295964
iter: 1091 | loss: 4.295741
iter: 1092 | loss: 4.295518
iter: 1093 | loss: 4.295295
iter: 1094 | loss: 4.295072
iter: 1095 | loss: 4.294849
iter: 1096 | loss: 4.294626
iter: 1097 | loss: 4.294403
iter: 1098 | loss: 4.294180
iter: 1099 | loss: 4.293957
iter: 1100 | loss: 4.293734
iter: 1101 | loss: 4.293511
iter: 1102 | loss: 4.293288
iter: 1103 | loss: 4.293065
iter: 1104 | loss: 4.292842
iter: 1105 | loss: 4.292619
iter: 1106 | loss: 4.292396
iter: 1107 | loss: 4.292173
iter: 1108 | loss: 4.291950
iter: 1109 | loss: 4.291727
iter: 1110 | loss: 4.291504
iter: 1111 | loss: 4.291281
iter: 1112 | loss: 4.291058
iter: 1113 | loss: 4.290835
iter: 1114 | loss: 4.290612
iter: 1115 | loss: 4.290389
iter: 1116 | loss: 4.290166
iter: 1117 | loss: 4.289943
iter: 1118 | loss: 4.289720
iter: 1119 | loss: 4.289497
iter: 1120 | loss: 4.289274
iter: 1121 | loss: 4.289051
iter: 1122 | loss: 4.288828
iter: 1123 | loss: 4.288605
iter: 1124 | loss: 4.288382
iter: 1125 | loss: 4.288159
iter: 1126 | loss: 4.287936
iter: 1127 | loss: 4.287713
iter: 1128 | loss: 4.287490
iter: 1129 | loss: 4.287267
iter: 1130 | loss: 4.287044
iter: 1131 | loss: 4.286821
iter: 1132 | loss: 4.286598
iter: 1133 | loss: 4.286375
iter: 1134 | loss: 4.286152
iter: 1135 | loss: 4.285929
iter: 1136 | loss: 4.285706
iter: 1137 | loss: 4.285483
iter: 1138 | loss: 4.285260
iter: 1139 | loss: 4.285037
iter: 1140 | loss: 4.284814
iter: 1141 | loss: 4.284592
iter: 1142 | loss: 4.284369
iter: 1143 | loss: 4.284146
iter: 1144 | loss: 4.283923
iter: 1145 | loss: 4.283700
iter: 1146 | loss: 4.283477
iter: 1147 | loss: 4.283254
iter: 1148 | loss: 4.283031
iter: 1149 | loss: 4.282808
iter: 1150 | loss: 4.282585
iter: 1151 | loss: 4.282362
iter: 1152 | loss: 4.282139
iter: 1153 | loss: 4.281916
iter: 1154 | loss: 4.281693
iter: 1155 | loss: 4.281470
iter: 1156 | loss: 4.281247
iter: 1157 | loss: 4.281024
iter: 1158 | loss: 4.280801
iter: 1159 | loss: 4.280578
iter: 1160 | loss: 4.280355
iter: 1161 | loss: 4.280132
iter: 1162 | loss: 4.279909
iter: 1163 | loss: 4.279686
iter: 1164 | loss: 4.279463
iter: 1165 | loss: 4.279240
iter: 1166 | loss: 4.279017
iter: 1167 | loss: 4.278794
iter: 1168 | loss: 4.278571
iter: 1169 | loss: 4.278348
iter: 1170 | loss: 4.278125
iter: 1171 | loss: 4.277902
iter: 1172 | loss: 4.277679
iter: 1173 | loss: 4.277456
iter: 1174 | loss: 4.277233
iter: 1175 | loss: 4.277010
iter: 1176 | loss: 4.276787
iter: 1177 | loss: 4.276564
iter: 1178 | loss: 4.276341
iter: 1179 | loss: 4.276118
iter: 1180 | loss: 4.275895
iter: 1181 | loss: 4.275672
iter: 1182 | loss: 4.275449
iter: 1183 | loss: 4.275226
iter: 1184 | loss: 4.275003
iter: 1185 | loss: 4.274780
iter: 1186 | loss: 4.274557
iter: 1187 | loss: 4.274334
iter: 1188 | loss: 4.274111
iter: 1189 | loss: 4.273888
iter: 1190 | loss: 4.273665
iter: 1191 | loss: 4.273442
iter: 1192 | loss: 4.273219
iter: 1193 | loss: 4.272997
iter: 1194 | loss: 4.272774
iter: 1195 | loss: 4.272551
iter: 1196 | loss: 4.272328
iter: 1197 | loss: 4.272105
iter: 1198 | loss: 4.271882
iter: 1199 | loss: 4.271659
iter: 1200 | loss: 4.271436
iter: 1201 | loss: 4.271213
iter: 1202 | loss: 4.270990
iter: 1203 | loss: 4.270767
iter: 1204 | loss: 4.270544
iter: 1205 | loss: 4.270321
iter: 1206 | loss: 4.270098
iter: 1207 | loss: 4.269875
iter: 1208 | loss: 4.269652
iter: 1209 | loss: 4.269429
iter: 1210 | loss: 4.269206
iter: 1211 | loss: 4.268983
iter: 1212 | loss: 4.268760
iter: 1213 | loss: 4.268537
iter: 1214 | loss: 4.268314
iter: 1215 | loss: 4.268091
iter: 1216 | loss: 4.267868
iter: 1217 | loss: 4.267645
iter: 1218 | loss: 4.267422
iter: 1219 | loss: 4.267199
iter: 1220 | loss: 4.266976
iter: 1221 | loss: 4.266753
iter: 1222 | loss: 4.266530
iter: 1223 | loss: 4.266307
iter: 1224 | loss: 4.266084
iter: 1225 | loss: 4.265861
iter: 1226 | loss: 4.265638
iter: 1227 | loss: 4.265415
iter: 1228 | loss: 4.265192
iter: 1229 | loss: 4.264969
iter: 1230 | loss: 4.264746
iter: 1231 | loss: 4.264523
iter: 1232 | loss: 4.264300
iter: 1233 | loss: 4.264077
iter: 1234 | loss: 4.263854
iter: 1235 | loss: 4.263631
iter: 1236 | loss: 4.263408
iter: 1237 | loss: 4.263185
iter: 1238 | loss: 4.262962
iter: 1239 | loss: 4.262739
iter: 1240 | loss: 4.262516
iter: 1241 | loss: 4.262293
iter: 1242 | loss: 4.262070
iter: 1243 | loss: 4.261847
iter: 1244 | loss: 4.261624
iter: 1245 | loss: 4.261402
iter: 1246 | loss: 4.261179
iter: 1247 | loss: 4.260956
iter: 1248 | loss: 4.260733
iter: 1249 | loss: 4.260510
iter: 1250 | loss: 4.260287
iter: 1251 | loss: 4.260064
iter: 1252 | loss: 4.259841
iter: 1253 | loss: 4.259618
iter: 1254 | loss: 4.259395
iter: 1255 | loss: 4.259172
iter: 1256 | loss: 4.258949
iter: 1257 | loss: 4.258726
iter: 1258 | loss: 4.258503
iter: 1259 | loss: 4.258280
iter: 1260 | loss: 4.258057
iter: 1261 | loss: 4.257834
iter: 1262 | loss: 4.257611
iter: 1263 | loss: 4.257388
iter: 1264 | loss: 4.257165
iter: 1265 | loss: 4.256942
iter: 1266 | loss: 4.256719
iter: 1267 | loss: 4.256496
iter: 1268 | loss: 4.256273
iter: 1269 | loss: 4.256050
iter: 1270 | loss: 4.255827
iter: 1271 | loss: 4.255604
iter: 1272 | loss: 4.255381
iter: 1273 | loss: 4.255158
iter: 1274 | loss: 4.254935
iter: 1275 | loss: 4.254712
iter: 1276 | loss: 4.254489
iter: 1277 | loss: 4.254266
iter: 1278 | loss: 4.254043
iter: 1279 | loss: 4.253820
iter: 1280 | loss: 4.253597
iter: 1281 | loss: 4.253374
iter: 1282 | loss: 4.253151
iter: 1283 | loss: 4.252928
iter: 1284 | loss: 4.252705
iter: 1285 | loss: 4.252482
iter: 1286 | loss: 4.252259
iter: 1287 | loss: 4.252036
iter: 1288 | loss: 4.251813
iter: 1289 | loss: 4.251590
iter: 1290 | loss: 4.251367
iter: 1291 | loss: 4.251144
iter: 1292 | loss: 4.250921
iter: 1293 | loss: 4.250698
iter: 1294 | loss: 4.250475
iter: 1295 | loss: 4.250252
iter: 1296 | loss: 4.250029
iter: 1297 | loss: 4.249807
iter: 1298 | loss: 4.249584
iter: 1299 | loss: 4.249361
iter: 1300 | loss: 4.249138
iter: 1301 | loss: 4.248915
iter: 1302 | loss: 4.248692
iter: 1303 | loss: 4.248469
iter: 1304 | loss: 4.248246
iter: 1305 | loss: 4.248023
iter: 1306 | loss: 4.247800
iter: 1307 | loss: 4.247577
iter: 1308 | loss: 4.247354
iter: 1309 | loss: 4.247131
iter: 1310 | loss: 4.246908
iter: 1311 | loss: 4.246685
iter: 1312 | loss: 4.246462
iter: 1313 | loss: 4.246239
iter: 1314 | loss: 4.246016
iter: 1315 | loss: 4.245793
iter: 1316 | loss: 4.245570
iter: 1317 | loss: 4.245347
iter: 1318 | loss: 4.245124
iter: 1319 | loss: 4.244901
iter: 1320 | loss: 4.244678
iter: 1321 | loss: 4.244455
iter: 1322 | loss: 4.244232
iter: 1323 | loss: 4.244009
iter: 1324 | loss: 4.243786
iter: 1325 | loss: 4.243563
iter: 1326 | loss: 4.243340
iter: 1327 | loss: 4.243117
iter: 1328 | loss: 4.242894
iter: 1329 | loss: 4.242671
iter: 1330 | loss: 4.242448
iter: 1331 | loss: 4.242225
iter: 1332 | loss: 4.242002
iter: 1333 | loss: 4.241779
iter: 1334 | loss: 4.241556
iter: 1335 | loss: 4.241333
iter: 1336 | loss: 4.241110
iter: 1337 | loss: 4.240887
iter: 1338 | loss: 4.240664
iter: 1339 | loss: 4.240441
iter: 1340 | loss: 4.240218
iter: 1341 | loss: 4.239995
iter: 1342 | loss: 4.239772
iter: 1343 | loss: 4.239549
iter: 1344 | loss: 4.239326
iter: 1345 | loss: 4.239103
iter: 1346 | loss: 4.238880
iter: 1347 | loss: 4.238657
iter: 1348 | loss: 4.238434
iter: 1349 | loss: 4.238212
iter: 1350 | loss: 4.237989
iter: 1351 | loss: 4.237766
iter: 1352 | loss: 4.237543
iter: 1353 | loss: 4.237320
iter: 1354 | loss: 4.237097
iter: 1355 | loss: 4.236874
iter: 1356 | loss: 4.236651
iter: 1357 | loss: 4.236428
iter: 1358 | loss: 4.236205
iter: 1359 | loss: 4.235982
iter: 1360 | loss: 4.235759
iter: 1361 | loss: 4.235536
iter: 1362 | loss: 4.235313
iter: 1363 | loss: 4.235090
iter: 1364 | loss: 4.234867
iter: 1365 | loss: 4.234644
iter: 1366 | loss: 4.234421
iter: 1367 | loss: 4.234198
iter: 1368 | loss: 4.233975
iter: 1369 | loss: 4.233752
iter: 1370 | loss: 4.233529
iter: 1371 | loss: 4.233306
iter: 1372 | loss: 4.233083
iter: 1373 | loss: 4.232860
iter: 1374 | loss: 4.232637
iter: 1375 | loss: 4.232414
iter: 1376 | loss: 4.232191
iter: 1377 | loss: 4.231968
iter: 1378 | loss: 4.231745
iter: 1379 | loss: 4.231522
iter: 1380 | loss: 4.231299
iter: 1381 | loss: 4.231076
iter: 1382 | loss: 4.230853
iter: 1383 | loss: 4.230630
iter: 1384 | loss: 4.230407
iter: 1385 | loss: 4.230184
iter: 1386 | loss: 4.229961
iter: 1387 | loss: 4.229738
iter: 1388 | loss: 4.229515
iter: 1389 | loss: 4.229292
iter: 1390 | loss: 4.229069
iter: 1391 | loss: 4.228846
iter: 1392 | loss: 4.228623
iter: 1393 | loss: 4.228400
iter: 1394 | loss: 4.228177
iter: 1395 | loss: 4.227954
iter: 1396 | loss: 4.227731
iter: 1397 | loss: 4.227508
iter: 1398 | loss: 4.227285
iter: 1399 | loss: 4.227062
iter: 1400 | loss: 4.226839
iter: 1401 | loss: 4.226617
iter: 1402 | loss: 4.226394
iter: 1403 | loss: 4.226171
iter: 1404 | loss: 4.225948
iter: 1405 | loss: 4.225725
iter: 1406 | loss: 4.225502
iter: 1407 | loss: 4.225279
iter: 1408 | loss: 4.225056
iter: 1409 | loss: 4.224833
iter: 1410 | loss: 4.224610
iter: 1411 | loss: 4.224387
iter: 1412 | loss: 4.224164
iter: 1413 | loss: 4.223941
iter: 1414 | loss: 4.223718
iter: 1415 | loss: 4.223495
iter: 1416 | loss: 4.223272
iter: 1417 | loss: 4.223049
iter: 1418 | loss: 4.222826
iter: 1419 | loss: 4.222603
iter: 1420 | loss: 4.222380
iter: 1421 | loss: 4.222157
iter: 1422 | loss: 4.221934
iter: 1423 | loss: 4.221711
iter: 1424 | loss: 4.221488
iter: 1425 | loss: 4.221265
iter: 1426 | loss: 4.221042
iter: 1427 | loss: 4.220819
iter: 1428 | loss: 4.220596
iter: 1429 | loss: 4.220373
iter: 1430 | loss: 4.220150
iter: 1431 | loss: 4.219927
iter: 1432 | loss: 4.219704
iter: 1433 | loss: 4.219481
iter: 1434 | loss: 4.219258
iter: 1435 | loss: 4.219035
iter: 1436 | loss: 4.218812
iter: 1437 | loss: 4.218589
iter: 1438 | loss: 4.218366
iter: 1439 | loss: 4.218143
iter: 1440 | loss: 4.217920
iter: 1441 | loss: 4.217697
iter: 1442 | loss: 4.217474
iter: 1443 | loss: 4.217251
iter: 1444 | loss: 4.217028
iter: 1445 | loss: 4.216805
iter: 1446 | loss: 4.216582
iter: 1447 | loss: 4.216359
iter: 1448 | loss: 4.216136
iter: 1449 | loss: 4.215913
iter: 1450 | loss: 4.215690
iter: 1451 | loss: 4.215467
iter: 1452 | loss: 4.215244
iter: 1453 | loss: 4.215022
iter: 1454 | loss: 4.214799
iter: 1455 | loss: 4.214576
iter: 1456 | loss: 4.214353
iter: 1457 | loss: 4.214130
iter: 1458 | loss: 4.213907
iter: 1459 | loss: 4.213684
iter: 1460 | loss: 4.213461
iter: 1461 | loss: 4.213238
iter: 1462 | loss: 4.213015
iter: 1463 | loss: 4.212792
iter: 1464 | loss: 4.212569
iter: 1465 | loss: 4.212346
iter: 1466 | loss: 4.212123
iter: 1467 | loss: 4.211900
iter: 1468 | loss: 4.211677
iter: 1469 | loss: 4.211454
iter: 1470 | loss: 4.211231
iter: 1471 | loss: 4.211008
iter: 1472 | loss: 4.210785
iter: 1473 | loss: 4.210562
iter: 1474 | loss: 4.210339
iter: 1475 | loss: 4.210116
iter: 1476 | loss: 4.209893
iter: 1477 | loss: 4.209670
iter: 1478 | loss: 4.209447
iter: 1479 | loss: 4.209224
iter: 1480 | loss: 4.209001
iter: 1481 | loss: 4.208778
iter: 1482 | loss: 4.208555
iter: 1483 | loss: 4.208332
iter: 1484 | loss: 4.208109
iter: 1485 | loss: 4.207886
iter: 1486 | loss: 4.207663
iter: 1487 | loss: 4.207440
iter: 1488 | loss: 4.207217
iter: 1489 | loss: 4.206994
iter: 1490 | loss: 4.206771
iter: 1491 | loss: 4.206548
iter: 1492 | loss: 4.206325
iter: 1493 | loss: 4.206102
iter: 1494 | loss: 4.205879
iter: 1495 | loss: 4.205656
iter: 1496 | loss: 4.205433
iter: 1497 | loss: 4.205210
iter: 1498 | loss: 4.204987
iter: 1499 | loss: 4.204764
iter: 1500 | loss: 4.204541
iter: 1501 | loss: 4.204318
iter: 1502 | loss: 4.204095
iter: 1503 | loss: 4.203872
iter: 1504 | loss: 4.203649
iter: 1505 | loss: 4.203426
iter: 1506 | loss: 4.203204
iter: 1507 | loss: 4.202981
iter: 1508 | loss: 4.202758
iter: 1509 | loss: 4.202535
iter: 1510 | loss: 4.202312
iter: 1511 | loss: 4.202089
iter: 1512 | loss: 4.201866
iter: 1513 | loss: 4.201643
iter: 1514 | loss: 4.201420
iter: 1515 | loss: 4.201197
iter: 1516 | loss: 4.200974
iter: 1517 | loss: 4.200751
iter: 1518 | loss: 4.200528
iter: 1519 | loss: 4.200305
iter: 1520 | loss: 4.200082
iter: 1521 | loss: 4.199859
iter: 1522 | loss: 4.199636
iter: 1523 | loss: 4.199413
iter: 1524 | loss: 4.199190
iter: 1525 | loss: 4.198967
iter: 1526 | loss: 4.198744
iter: 1527 | loss: 4.198521
iter: 1528 | loss: 4.198298
iter: 1529 | loss: 4.198075
iter: 1530 | loss: 4.197852
iter: 1531 | loss: 4.197629
iter: 1532 | loss: 4.197406
iter: 1533 | loss: 4.197183
iter: 1534 | loss: 4.196960
iter: 1535 | loss: 4.196737
iter: 1536 | loss: 4.196514
iter: 1537 | loss: 4.196291
iter: 1538 | loss: 4.196068
iter: 1539 | loss: 4.195845
iter: 1540 | loss: 4.195622
iter: 1541 | loss: 4.195399
iter: 1542 | loss: 4.195176
iter: 1543 | loss: 4.194953
iter: 1544 | loss: 4.194730
iter: 1545 | loss: 4.194507
iter: 1546 | loss: 4.194284
iter: 1547 | loss: 4.194061
iter: 1548 | loss: 4.193838
iter: 1549 | loss: 4.193615
iter: 1550 | loss: 4.193392
iter: 1551 | loss: 4.193169
iter: 1552 | loss: 4.192946
iter: 1553 | loss: 4.192723
iter: 1554 | loss: 4.192500
iter: 1555 | loss: 4.192277
iter: 1556 | loss: 4.192054
iter: 1557 | loss: 4.191831
iter: 1558 | loss: 4.191609
iter: 1559 | loss: 4.191386
iter: 1560 | loss: 4.191163
iter: 1561 | loss: 4.190940
iter: 1562 | loss: 4.190717
iter: 1563 | loss: 4.190494
iter: 1564 | loss: 4.190271
iter: 1565 | loss: 4.190048
iter: 1566 | loss: 4.189825
iter: 1567 | loss: 4.189602
iter: 1568 | loss: 4.189379
iter: 1569 | loss: 4.189156
iter: 1570 | loss: 4.188933
iter: 1571 | loss: 4.188710
iter: 1572 | loss: 4.188487
iter: 1573 | loss: 4.188264
iter: 1574 | loss: 4.188041
iter: 1575 | loss: 4.187818
iter: 1576 | loss: 4.187595
iter: 1577 | loss: 4.187372
iter: 1578 | loss: 4.187149
iter: 1579 | loss: 4.186926
iter: 1580 | loss: 4.186703
iter: 1581 | loss: 4.186480
iter: 1582 | loss: 4.186257
iter: 1583 | loss: 4.186034
iter: 1584 | loss: 4.185811
iter: 1585 | loss: 4.185588
iter: 1586 | loss: 4.185365
iter: 1587 | loss: 4.185142
iter: 1588 | loss: 4.184919
iter: 1589 | loss: 4.184696
iter: 1590 | loss: 4.184473
iter: 1591 | loss: 4.184250
iter: 1592 | loss: 4.184027
iter: 1593 | loss: 4.183804
iter: 1594 | loss: 4.183581
iter: 1595 | loss: 4.183358
iter: 1596 | loss: 4.183135
iter: 1597 | loss: 4.182912
iter: 1598 | loss: 4.182689
iter: 1599 | loss: 4.182466
iter: 1600 | loss: 4.182243
iter: 1601 | loss: 4.182020
iter: 1602 | loss: 4.181797
iter: 1603 | loss: 4.181574
iter: 1604 | loss: 4.181351
iter: 1605 | loss: 4.181128
iter: 1606 | loss: 4.180905
iter: 1607 | loss: 4.180682
iter: 1608 | loss: 4.180459
iter: 1609 | loss: 4.180236
iter: 1610 | loss: 4.180014
iter: 1611 | loss: 4.179791
iter: 1612 | loss: 4.179568
iter: 1613 | loss: 4.179345
iter: 1614 | loss: 4.179122
iter: 1615 | loss: 4.178899
iter: 1616 | loss: 4.178676
iter: 1617 | loss: 4.178453
iter: 1618 | loss: 4.178230
iter: 1619 | loss: 4.178007
iter: 1620 | loss: 4.177784
iter: 1621 | loss: 4.177561
iter: 1622 | loss: 4.177338
iter: 1623 | loss: 4.177115
iter: 1624 | loss: 4.176892
iter: 1625 | loss: 4.176669
iter: 1626 | loss: 4.176446
iter: 1627 | loss: 4.176223
iter: 1628 | loss: 4.176000
iter: 1629 | loss: 4.175777
iter: 1630 | loss: 4.175554
iter: 1631 | loss: 4.175331
iter: 1632 | loss: 4.175108
iter: 1633 | loss: 4.174885
iter: 1634 | loss: 4.174662
iter: 1635 | loss: 4.174439
iter: 1636 | loss: 4.174216
iter: 1637 | loss: 4.173993
iter: 1638 | loss: 4.173770
iter: 1639 | loss: 4.173547
iter: 1640 | loss: 4.173324
iter: 1641 | loss: 4.173101
iter: 1642 | loss: 4.172878
iter: 1643 | loss: 4.172655
iter: 1644 | loss: 4.172432
iter: 1645 | loss: 4.172209
iter: 1646 | loss: 4.171986
iter: 1647 | loss: 4.171763
iter: 1648 | loss: 4.171540
iter: 1649 | loss: 4.171317
iter: 1650 | loss: 4.171094
iter: 1651 | loss: 4.170871
iter: 1652 | loss: 4.170648
iter: 1653 | loss: 4.170425
iter: 1654 | loss: 4.170202
iter: 1655 | loss: 4.169979
iter: 1656 | loss: 4.169756
iter: 1657 | loss: 4.169533
iter: 1658 | loss: 4.169310
iter: 1659 | loss: 4.169087
iter: 1660 | loss: 4.168864
iter: 1661 | loss: 4.168641
iter: 1662 | loss: 4.168419
iter: 1663 | loss: 4.168196
iter: 1664 | loss: 4.167973
iter: 1665 | loss: 4.167750
iter: 1666 | loss: 4.167527
iter: 1667 | loss: 4.167304
iter: 1668 | loss: 4.167081
iter: 1669 | loss: 4.166858
iter: 1670 | loss: 4.166635
iter: 1671 | loss: 4.166412
iter: 1672 | loss: 4.166189
iter: 1673 | loss: 4.165966
iter: 1674 | loss: 4.165743
iter: 1675 | loss: 4.165520
iter: 1676 | loss: 4.165297
iter: 1677 | loss: 4.165074
iter: 1678 | loss: 4.164851
iter: 1679 | loss: 4.164628
iter: 1680 | loss: 4.164405
iter: 1681 | loss: 4.164182
iter: 1682 | loss: 4.163959
iter: 1683 | loss: 4.163736
iter: 1684 | loss: 4.163513
iter: 1685 | loss: 4.163290
iter: 1686 | loss: 4.163067
iter: 1687 | loss: 4.162844
iter: 1688 | loss: 4.162621
iter: 1689 | loss: 4.162398
iter: 1690 | loss: 4.162175
iter: 1691 | loss: 4.161952
iter: 1692 | loss: 4.161729
iter: 1693 | loss: 4.161506
iter: 1694 | loss: 4.161283
iter: 1695 | loss: 4.161060
iter: 1696 | loss: 4.160837
iter: 1697 | loss: 4.160614
iter: 1698 | loss: 4.160391
iter: 1699 | loss: 4.160168
iter: 1700 | loss: 4.159945
iter: 1701 | loss: 4.159722
iter: 1702 | loss: 4.159499
iter: 1703 | loss: 4.159276
iter: 1704 | loss: 4.159053
iter: 1705 | loss: 4.158830
iter: 1706 | loss: 4.158607
iter: 1707 | loss: 4.158384
iter: 1708 | loss: 4.158161
iter: 1709 | loss: 4.157938
iter: 1710 | loss: 4.157715
iter: 1711 | loss: 4.157492
iter: 1712 | loss: 4.157269
iter: 1713 | loss: 4.157046
iter: 1714 | loss: 4.156824
iter: 1715 | loss: 4.156601
iter: 1716 | loss: 4.156378
iter: 1717 | loss: 4.156155
iter: 1718 | loss: 4.155932
iter: 1719 | loss: 4.155709
iter: 1720 | loss: 4.155486
iter: 1721 | loss: 4.155263
iter: 1722 | loss: 4.155040
iter: 1723 | loss: 4.154817
iter: 1724 | loss: 4.154594
iter: 1725 | loss: 4.154371
iter: 1726 | loss: 4.154148
iter: 1727 | loss: 4.153925
iter: 1728 | loss: 4.153702
iter: 1729 | loss: 4.153479
iter: 1730 | loss: 4.153256
iter: 1731 | loss: 4.153033
iter: 1732 | loss: 4.152810
iter: 1733 | loss: 4.152587
iter: 1734 | loss: 4.152364
iter: 1735 | loss: 4.152141
iter: 1736 | loss: 4.151918
iter: 1737 | loss: 4.151695
iter: 1738 | loss: 4.151472
iter: 1739 | loss: 4.151249
iter: 1740 | loss: 4.151026
iter: 1741 | loss: 4.150803
iter: 1742 | loss: 4.150580
iter: 1743 | loss: 4.150357
iter: 1744 | loss: 4.150134
iter: 1745 | loss: 4.149911
iter: 1746 | loss: 4.149688
iter: 1747 | loss: 4.149465
iter: 1748 | loss: 4.149242
iter: 1749 | loss: 4.149019
iter: 1750 | loss: 4.148796
iter: 1751 | loss: 4.148573
iter: 1752 | loss: 4.148350
iter: 1753 | loss: 4.148127
iter: 1754 | loss: 4.147904
iter: 1755 | loss: 4.147681
iter: 1756 | loss: 4.147458
iter: 1757 | loss: 4.147235
iter: 1758 | loss: 4.147012
iter: 1759 | loss: 4.146789
iter: 1760 | loss: 4.146566
iter: 1761 | loss: 4.146343
iter: 1762 | loss: 4.146120
iter: 1763 | loss: 4.145897
iter: 1764 | loss: 4.145674
iter: 1765 | loss: 4.145451
iter: 1766 | loss: 4.145229
iter: 1767 | loss: 4.145006
iter: 1768 | loss: 4.144783
iter: 1769 | loss: 4.144560
iter: 1770 | loss: 4.144337
iter: 1771 | loss: 4.144114
iter: 1772 | loss: 4.143891
iter: 1773 | loss: 4.143668
iter: 1774 | loss: 4.143445
iter: 1775 | loss: 4.143222
iter: 1776 | loss: 4.142999
iter: 1777 | loss: 4.142776
iter: 1778 | loss: 4.142553
iter: 1779 | loss: 4.142330
iter: 1780 | loss: 4.142107
iter: 1781 | loss: 4.141884
iter: 1782 | loss: 4.141661
iter: 1783 | loss: 4.141438
iter: 1784 | loss: 4.141215
iter: 1785 | loss: 4.140992
iter: 1786 | loss: 4.140769
iter: 1787 | loss: 4.140546
iter: 1788 | loss: 4.140323
iter: 1789 | loss: 4.140100
iter: 1790 | loss: 4.139877
iter: 1791 | loss: 4.139654
iter: 1792 | loss: 4.139431
iter: 1793 | loss: 4.139208
iter: 1794 | loss: 4.138985
iter: 1795 | loss: 4.138762
iter: 1796 | loss: 4.138539
iter: 1797 | loss: 4.138316
iter: 1798 | loss: 4.138093
iter: 1799 | loss: 4.137870
iter: 1800 | loss: 4.137647
iter: 1801 | loss: 4.137424
iter: 1802 | loss: 4.137201
iter: 1803 | loss: 4.136978
iter: 1804 | loss: 4.136755
iter: 1805 | loss: 4.136532
iter: 1806 | loss: 4.136309
iter: 1807 | loss: 4.136086
iter: 1808 | loss: 4.135863
iter: 1809 | loss: 4.135640
iter: 1810 | loss: 4.135417
iter: 1811 | loss: 4.135194
iter: 1812 | loss: 4.134971
iter: 1813 | loss: 4.134748
iter: 1814 | loss: 4.134525
iter: 1815 | loss: 4.134302
iter: 1816 | loss: 4.134079
iter: 1817 | loss: 4.133856
iter: 1818 | loss: 4.133634
iter: 1819 | loss: 4.133411
iter: 1820 | loss: 4.133188
iter: 1821 | loss: 4.132965
iter: 1822 | loss: 4.132742
iter: 1823 | loss: 4.132519
iter: 1824 | loss: 4.132296
iter: 1825 | loss: 4.132073
iter: 1826 | loss: 4.131850
iter: 1827 | loss: 4.131627
iter: 1828 | loss: 4.131404
iter: 1829 | loss: 4.131181
iter: 1830 | loss: 4.130958
iter: 1831 | loss: 4.130735
iter: 1832 | loss: 4.130512
iter: 1833 | loss: 4.130289
iter: 1834 | loss: 4.130066
iter: 1835 | loss: 4.129843
iter: 1836 | loss: 4.129620
iter: 1837 | loss: 4.129397
iter: 1838 | loss: 4.129174
iter: 1839 | loss: 4.128951
iter: 1840 | loss: 4.128728
iter: 1841 | loss: 4.128505
iter: 1842 | loss: 4.128282
iter: 1843 | loss: 4.128059
iter: 1844 | loss: 4.127836
iter: 1845 | loss: 4.127613
iter: 1846 | loss: 4.127390
iter: 1847 | loss: 4.127167
iter: 1848 | loss: 4.126944
iter: 1849 | loss: 4.126721
iter: 1850 | loss: 4.126498
iter: 1851 | loss: 4.126275
iter: 1852 | loss: 4.126052
iter: 1853 | loss: 4.125829
iter: 1854 | loss: 4.125606
iter: 1855 | loss: 4.125383
iter: 1856 | loss: 4.125160
iter: 1857 | loss: 4.124937
iter: 1858 | loss: 4.124714
iter: 1859 | loss: 4.124491
iter: 1860 | loss: 4.124268
iter: 1861 | loss: 4.124045
iter: 1862 | loss: 4.123822
iter: 1863 | loss: 4.123599
iter: 1864 | loss: 4.123376
iter: 1865 | loss: 4.123153
iter: 1866 | loss: 4.122930
iter: 1867 | loss: 4.122707
iter: 1868 | loss: 4.122484
iter: 1869 | loss: 4.122261
iter: 1870 | loss: 4.122039
iter: 1871 | loss: 4.121816
iter: 1872 | loss: 4.121593
iter: 1873 | loss: 4.121370
iter: 1874 | loss: 4.121147
iter: 1875 | loss: 4.120924
iter: 1876 | loss: 4.120701
iter: 1877 | loss: 4.120478
iter: 1878 | loss: 4.120255
iter: 1879 | loss: 4.120032
iter: 1880 | loss: 4.119809
iter: 1881 | loss: 4.119586
iter: 1882 | loss: 4.119363
iter: 1883 | loss: 4.119140
iter: 1884 | loss: 4.118917
iter: 1885 | loss: 4.118694
iter: 1886 | loss: 4.118471
iter: 1887 | loss: 4.118248
iter: 1888 | loss: 4.118025
iter: 1889 | loss: 4.117802
iter: 1890 | loss: 4.117579
iter: 1891 | loss: 4.117356
iter: 1892 | loss: 4.117133
iter: 1893 | loss: 4.116910
iter: 1894 | loss: 4.116687
iter: 1895 | loss: 4.116464
iter: 1896 | loss: 4.116241
iter: 1897 | loss: 4.116018
iter: 1898 | loss: 4.115795
iter: 1899 | loss: 4.115572
iter: 1900 | loss: 4.115349
iter: 1901 | loss: 4.115126
iter: 1902 | loss: 4.114903
iter: 1903 | loss: 4.114680
iter: 1904 | loss: 4.114457
iter: 1905 | loss: 4.114234
iter: 1906 | loss: 4.114011
iter: 1907 | loss: 4.113788
iter: 1908 | loss: 4.113565
iter: 1909 | loss: 4.113342
iter: 1910 | loss: 4.113119
iter: 1911 | loss: 4.112896
iter: 1912 | loss: 4.112673
iter: 1913 | loss: 4.112450
iter: 1914 | loss: 4.112227
iter: 1915 | loss: 4.112004
iter: 1916 | loss: 4.111781
iter: 1917 | loss: 4.111558
iter: 1918 | loss: 4.111335
iter: 1919 | loss: 4.111112
iter: 1920 | loss: 4.110889
iter: 1921 | loss: 4.110666
iter: 1922 | loss: 4.110444
iter: 1923 | loss: 4.110221
iter: 1924 | loss: 4.109998
iter: 1925 | loss: 4.109775
iter: 1926 | loss: 4.109552
iter: 1927 | loss: 4.109329
iter: 1928 | loss: 4.109106
iter: 1929 | loss: 4.108883
iter: 1930 | loss: 4.108660
iter: 1931 | loss: 4.108437
iter: 1932 | loss: 4.108214
iter: 1933 | loss: 4.107991
iter: 1934 | loss: 4.107768
iter: 1935 | loss: 4.107545
iter: 1936 | loss: 4.107322
iter: 1937 | loss: 4.107099
iter: 1938 | loss: 4.106876
iter: 1939 | loss: 4.106653
iter: 1940 | loss: 4.106430
iter: 1941 | loss: 4.106207
iter: 1942 | loss: 4.105984
iter: 1943 | loss: 4.105761
iter: 1944 | loss: 4.105538
iter: 1945 | loss: 4.105315
iter: 1946 | loss: 4.105092
iter: 1947 | loss: 4.104869
iter: 1948 | loss: 4.104646
iter: 1949 | loss: 4.104423
iter: 1950 | loss: 4.104200
iter: 1951 | loss: 4.103977
iter: 1952 | loss: 4.103754
iter: 1953 | loss: 4.103531
iter: 1954 | loss: 4.103308
iter: 1955 | loss: 4.103085
iter: 1956 | loss: 4.102862
iter: 1957 | loss: 4.102639
iter: 1958 | loss: 4.102416
iter: 1959 | loss: 4.102193
iter: 1960 | loss: 4.101970
iter: 1961 | loss: 4.101747
iter: 1962 | loss: 4.101524
iter: 1963 | loss: 4.101301
iter: 1964 | loss: 4.101078
iter: 1965 | loss: 4.100855
iter: 1966 | loss: 4.100632
iter: 1967 | loss: 4.100409
iter: 1968 | loss: 4.100186
iter: 1969 | loss: 4.099963
iter: 1970 | loss: 4.099740
iter: 1971 | loss: 4.099517
iter: 1972 | loss: 4.099294
iter: 1973 | loss: 4.099071
iter: 1974 | loss: 4.098849
iter: 1975 | loss: 4.098626
iter: 1976 | loss: 4.098403
iter: 1977 | loss: 4.098180
iter: 1978 | loss: 4.097957
iter: 1979 | loss: 4.097734
iter: 1980 | loss: 4.097511
iter: 1981 | loss: 4.097288
iter: 1982 | loss: 4.097065
iter: 1983 | loss: 4.096842
iter: 1984 | loss: 4.096619
iter: 1985 | loss: 4.096396
iter: 1986 | loss: 4.096173
iter: 1987 | loss: 4.095950
iter: 1988 | loss: 4.095727
iter: 1989 | loss: 4.095504
iter: 1990 | loss: 4.095281
iter: 1991 | loss: 4.095058
iter: 1992 | loss: 4.094835
iter: 1993 | loss: 4.094612
iter: 1994 | loss: 4.094389
iter: 1995 | loss: 4.094166
iter: 1996 | loss: 4.093943
iter: 1997 | loss: 4.093720
iter: 1998 | loss: 4.093497
iter: 1999 | loss: 4.093274
iter: 2000 | loss: 4.093051
iter: 2001 | loss: 4.092828
iter: 2002 | loss: 4.092605
iter: 2003 | loss: 4.092382
iter: 2004 | loss: 4.092159
iter: 2005 | loss: 4.091936
iter: 2006 | loss: 4.091713
iter: 2007 | loss: 4.091490
iter: 2008 | loss: 4.091267
iter: 2009 | loss: 4.091044
iter: 2010 | loss: 4.090821
iter: 2011 | loss: 4.090598
iter: 2012 | loss: 4.090375
iter: 2013 | loss: 4.090152
iter: 2014 | loss: 4.089929
iter: 2015 | loss: 4.089706
iter: 2016 | loss: 4.089483
iter: 2017 | loss: 4.089260
iter: 2018 | loss: 4.089037
iter: 2019 | loss: 4.088814
iter: 2020 | loss: 4.088591
iter: 2021 | loss: 4.088368
iter: 2022 | loss: 4.088145
iter: 2023 | loss: 4.087922
iter: 2024 | loss: 4.087699
iter: 2025 | loss: 4.087476
iter: 2026 | loss: 4.087254
iter: 2027 | loss: 4.087031
iter: 2028 | loss: 4.086808
iter: 2029 | loss: 4.086585
iter: 2030 | loss: 4.086362
iter: 2031 | loss: 4.086139
iter: 2032 | loss: 4.085916
iter: 2033 | loss: 4.085693
iter: 2034 | loss: 4.085470
iter: 2035 | loss: 4.085247
iter: 2036 | loss: 4.085024
iter: 2037 | loss: 4.084801
iter: 2038 | loss: 4.084578
iter: 2039 | loss: 4.084355
iter: 2040 | loss: 4.084132
iter: 2041 | loss: 4.083909
iter: 2042 | loss: 4.083686
iter: 2043 | loss: 4.083463
iter: 2044 | loss: 4.083240
iter: 2045 | loss: 4.083017
iter: 2046 | loss: 4.082794
iter: 2047 | loss: 4.082571
iter: 2048 | loss: 4.082348
iter: 2049 | loss: 4.082125
iter: 2050 | loss: 4.081902
iter: 2051 | loss: 4.081679
iter: 2052 | loss: 4.081456
iter: 2053 | loss: 4.081233
iter: 2054 | loss: 4.081010
iter: 2055 | loss: 4.080787
iter: 2056 | loss: 4.080564
iter: 2057 | loss: 4.080341
iter: 2058 | loss: 4.080118
iter: 2059 | loss: 4.079895
iter: 2060 | loss: 4.079672
iter: 2061 | loss: 4.079449
iter: 2062 | loss: 4.079226
iter: 2063 | loss: 4.079003
iter: 2064 | loss: 4.078780
iter: 2065 | loss: 4.078557
iter: 2066 | loss: 4.078334
iter: 2067 | loss: 4.078111
iter: 2068 | loss: 4.077888
iter: 2069 | loss: 4.077665
iter: 2070 | loss: 4.077442
iter: 2071 | loss: 4.077219
iter: 2072 | loss: 4.076996
iter: 2073 | loss: 4.076773
iter: 2074 | loss: 4.076550
iter: 2075 | loss: 4.076327
iter: 2076 | loss: 4.076104
iter: 2077 | loss: 4.075881
iter: 2078 | loss: 4.075659
iter: 2079 | loss: 4.075436
iter: 2080 | loss: 4.075213
iter: 2081 | loss: 4.074990
iter: 2082 | loss: 4.074767
iter: 2083 | loss: 4.074544
iter: 2084 | loss: 4.074321
iter: 2085 | loss: 4.074098
iter: 2086 | loss: 4.073875
iter: 2087 | loss: 4.073652
iter: 2088 | loss: 4.073429
iter: 2089 | loss: 4.073206
iter: 2090 | loss: 4.072983
iter: 2091 | loss: 4.072760
iter: 2092 | loss: 4.072537
iter: 2093 | loss: 4.072314
iter: 2094 | loss: 4.072091
iter: 2095 | loss: 4.071868
iter: 2096 | loss: 4.071645
iter: 2097 | loss: 4.071422
iter: 2098 | loss: 4.071199
iter: 2099 | loss: 4.070976
iter: 2100 | loss: 4.070753
iter: 2101 | loss: 4.070530
iter: 2102 | loss: 4.070307
iter: 2103 | loss: 4.070084
iter: 2104 | loss: 4.069861
iter: 2105 | loss: 4.069638
iter: 2106 | loss: 4.069415
iter: 2107 | loss: 4.069192
iter: 2108 | loss: 4.068969
iter: 2109 | loss: 4.068746
iter: 2110 | loss: 4.068523
iter: 2111 | loss: 4.068300
iter: 2112 | loss: 4.068077
iter: 2113 | loss: 4.067854
iter: 2114 | loss: 4.067631
iter: 2115 | loss: 4.067408
iter: 2116 | loss: 4.067185
iter: 2117 | loss: 4.066962
iter: 2118 | loss: 4.066739
iter: 2119 | loss: 4.066516
iter: 2120 | loss: 4.066293
iter: 2121 | loss: 4.066070
iter: 2122 | loss: 4.065847
iter: 2123 | loss: 4.065624
iter: 2124 | loss: 4.065401
iter: 2125 | loss: 4.065178
iter: 2126 | loss: 4.064955
iter: 2127 | loss: 4.064732
iter: 2128 | loss: 4.064509
iter: 2129 | loss: 4.064286
iter: 2130 | loss: 4.064064
iter: 2131 | loss: 4.063841
iter: 2132 | loss: 4.063618
iter: 2133 | loss: 4.063395
iter: 2134 | loss: 4.063172
iter: 2135 | loss: 4.062949
iter: 2136 | loss: 4.062726
iter: 2137 | loss: 4.062503
iter: 2138 | loss: 4.062280
iter: 2139 | loss: 4.062057
iter: 2140 | loss: 4.061834
iter: 2141 | loss: 4.061611
iter: 2142 | loss: 4.061388
iter: 2143 | loss: 4.061165
iter: 2144 | loss: 4.060942
iter: 2145 | loss: 4.060719
iter: 2146 | loss: 4.060496
iter: 2147 | loss: 4.060273
iter: 2148 | loss: 4.060050
iter: 2149 | loss: 4.059827
iter: 2150 | loss: 4.059604
iter: 2151 | loss: 4.059381
iter: 2152 | loss: 4.059158
iter: 2153 | loss: 4.058935
iter: 2154 | loss: 4.058712
iter: 2155 | loss: 4.058489
iter: 2156 | loss: 4.058266
iter: 2157 | loss: 4.058043
iter: 2158 | loss: 4.057820
iter: 2159 | loss: 4.057597
iter: 2160 | loss: 4.057374
iter: 2161 | loss: 4.057151
iter: 2162 | loss: 4.056928
iter: 2163 | loss: 4.056705
iter: 2164 | loss: 4.056482
iter: 2165 | loss: 4.056259
iter: 2166 | loss: 4.056036
iter: 2167 | loss: 4.055813
iter: 2168 | loss: 4.055590
iter: 2169 | loss: 4.055367
iter: 2170 | loss: 4.055144
iter: 2171 | loss: 4.054921
iter: 2172 | loss: 4.054698
iter: 2173 | loss: 4.054475
iter: 2174 | loss: 4.054252
iter: 2175 | loss: 4.054029
iter: 2176 | loss: 4.053806
iter: 2177 | loss: 4.053583
iter: 2178 | loss: 4.053360
iter: 2179 | loss: 4.053137
iter: 2180 | loss: 4.052914
iter: 2181 | loss: 4.052691
iter: 2182 | loss: 4.052468
iter: 2183 | loss: 4.052246
iter: 2184 | loss: 4.052023
iter: 2185 | loss: 4.051800
iter: 2186 | loss: 4.051577
iter: 2187 | loss: 4.051354
iter: 2188 | loss: 4.051131
iter: 2189 | loss: 4.050908
iter: 2190 | loss: 4.050685
iter: 2191 | loss: 4.050462
iter: 2192 | loss: 4.050239
iter: 2193 | loss: 4.050016
iter: 2194 | loss: 4.049793
iter: 2195 | loss: 4.049570
iter: 2196 | loss: 4.049347
iter: 2197 | loss: 4.049124
iter: 2198 | loss: 4.048901
iter: 2199 | loss: 4.048678
iter: 2200 | loss: 4.048455
iter: 2201 | loss: 4.048232
iter: 2202 | loss: 4.048009
iter: 2203 | loss: 4.047786
iter: 2204 | loss: 4.047563
iter: 2205 | loss: 4.047340
iter: 2206 | loss: 4.047117
iter: 2207 | loss: 4.046894
iter: 2208 | loss: 4.046671
iter: 2209 | loss: 4.046448
iter: 2210 | loss: 4.046225
iter: 2211 | loss: 4.046002
iter: 2212 | loss: 4.045779
iter: 2213 | loss: 4.045556
iter: 2214 | loss: 4.045333
iter: 2215 | loss: 4.045110
iter: 2216 | loss: 4.044887
iter: 2217 | loss: 4.044664
iter: 2218 | loss: 4.044441
iter: 2219 | loss: 4.044218
iter: 2220 | loss: 4.043995
iter: 2221 | loss: 4.043772
iter: 2222 | loss: 4.043549
iter: 2223 | loss: 4.043326
iter: 2224 | loss: 4.043103
iter: 2225 | loss: 4.042880
iter: 2226 | loss: 4.042657
iter: 2227 | loss: 4.042434
iter: 2228 | loss: 4.042211
iter: 2229 | loss: 4.041988
iter: 2230 | loss: 4.041765
iter: 2231 | loss: 4.041542
iter: 2232 | loss: 4.041319
iter: 2233 | loss: 4.041096
iter: 2234 | loss: 4.040873
iter: 2235 | loss: 4.040651
iter: 2236 | loss: 4.040428
iter: 2237 | loss: 4.040205
iter: 2238 | loss: 4.039982
iter: 2239 | loss: 4.039759
iter: 2240 | loss: 4.039536
iter: 2241 | loss: 4.039313
iter: 2242 | loss: 4.039090
iter: 2243 | loss: 4.038867
iter: 2244 | loss: 4.038644
iter: 2245 | loss: 4.038421
iter: 2246 | loss: 4.038198
iter: 2247 | loss: 4.037975
iter: 2248 | loss: 4.037752
iter: 2249 | loss: 4.037529
iter: 2250 | loss: 4.037306
iter: 2251 | loss: 4.037083
iter: 2252 | loss: 4.036860
iter: 2253 | loss: 4.036637
iter: 2254 | loss: 4.036414
iter: 2255 | loss: 4.036191
iter: 2256 | loss: 4.035968
iter: 2257 | loss: 4.035745
iter: 2258 | loss: 4.035522
iter: 2259 | loss: 4.035299
iter: 2260 | loss: 4.035076
iter: 2261 | loss: 4.034853
iter: 2262 | loss: 4.034630
iter: 2263 | loss: 4.034407
iter: 2264 | loss: 4.034184
iter: 2265 | loss: 4.033961
iter: 2266 | loss: 4.033738
iter: 2267 | loss: 4.033515
iter: 2268 | loss: 4.033292
iter: 2269 | loss: 4.033069
iter: 2270 | loss: 4.032846
iter: 2271 | loss: 4.032623
iter: 2272 | loss: 4.032400
iter: 2273 | loss: 4.032177
iter: 2274 | loss: 4.031954
iter: 2275 | loss: 4.031731
iter: 2276 | loss: 4.031508
iter: 2277 | loss: 4.031285
iter: 2278 | loss: 4.031062
iter: 2279 | loss: 4.030839
iter: 2280 | loss: 4.030616
iter: 2281 | loss: 4.030393
iter: 2282 | loss: 4.030170
iter: 2283 | loss: 4.029947
iter: 2284 | loss: 4.029724
iter: 2285 | loss: 4.029501
iter: 2286 | loss: 4.029278
iter: 2287 | loss: 4.029056
iter: 2288 | loss: 4.028833
iter: 2289 | loss: 4.028610
iter: 2290 | loss: 4.028387
iter: 2291 | loss: 4.028164
iter: 2292 | loss: 4.027941
iter: 2293 | loss: 4.027718
iter: 2294 | loss: 4.027495
iter: 2295 | loss: 4.027272
iter: 2296 | loss: 4.027049
iter: 2297 | loss: 4.026826
iter: 2298 | loss: 4.026603
iter: 2299 | loss: 4.026380
iter: 2300 | loss: 4.026157
iter: 2301 | loss: 4.025934
iter: 2302 | loss: 4.025711
iter: 2303 | loss: 4.025488
iter: 2304 | loss: 4.025265
iter: 2305 | loss: 4.025042
iter: 2306 | loss: 4.024819
iter: 2307 | loss: 4.024596
iter: 2308 | loss: 4.024373
iter: 2309 | loss: 4.024150
iter: 2310 | loss: 4.023927
iter: 2311 | loss: 4.023704
iter: 2312 | loss: 4.023481
iter: 2313 | loss: 4.023258
iter: 2314 | loss: 4.023035
iter: 2315 | loss: 4.022812
iter: 2316 | loss: 4.022589
iter: 2317 | loss: 4.022366
iter: 2318 | loss: 4.022143
iter: 2319 | loss: 4.021920
iter: 2320 | loss: 4.021697
iter: 2321 | loss: 4.021474
iter: 2322 | loss: 4.021251
iter: 2323 | loss: 4.021028
iter: 2324 | loss: 4.020805
iter: 2325 | loss: 4.020582
iter: 2326 | loss: 4.020359
iter: 2327 | loss: 4.020136
iter: 2328 | loss: 4.019913
iter: 2329 | loss: 4.019690
iter: 2330 | loss: 4.019467
iter: 2331 | loss: 4.019244
iter: 2332 | loss: 4.019021
iter: 2333 | loss: 4.018798
iter: 2334 | loss: 4.018575
iter: 2335 | loss: 4.018352
iter: 2336 | loss: 4.018129
iter: 2337 | loss: 4.017906
iter: 2338 | loss: 4.017683
iter: 2339 | loss: 4.017461
iter: 2340 | loss: 4.017238
iter: 2341 | loss: 4.017015
iter: 2342 | loss: 4.016792
iter: 2343 | loss: 4.016569
iter: 2344 | loss: 4.016346
iter: 2345 | loss: 4.016123
iter: 2346 | loss: 4.015900
iter: 2347 | loss: 4.015677
iter: 2348 | loss: 4.015454
iter: 2349 | loss: 4.015231
iter: 2350 | loss: 4.015008
iter: 2351 | loss: 4.014785
iter: 2352 | loss: 4.014562
iter: 2353 | loss: 4.014339
iter: 2354 | loss: 4.014116
iter: 2355 | loss: 4.013893
iter: 2356 | loss: 4.013670
iter: 2357 | loss: 4.013447
iter: 2358 | loss: 4.013224
iter: 2359 | loss: 4.013001
iter: 2360 | loss: 4.012778
iter: 2361 | loss: 4.012555
iter: 2362 | loss: 4.012332
iter: 2363 | loss: 4.012109
iter: 2364 | loss: 4.011886
iter: 2365 | loss: 4.011663
iter: 2366 | loss: 4.011440
iter: 2367 | loss: 4.011217
iter: 2368 | loss: 4.010994
iter: 2369 | loss: 4.010771
iter: 2370 | loss: 4.010548
iter: 2371 | loss: 4.010325
iter: 2372 | loss: 4.010102
iter: 2373 | loss: 4.009879
iter: 2374 | loss: 4.009656
iter: 2375 | loss: 4.009433
iter: 2376 | loss: 4.009210
iter: 2377 | loss: 4.008987
iter: 2378 | loss: 4.008764
iter: 2379 | loss: 4.008541
iter: 2380 | loss: 4.008318
iter: 2381 | loss: 4.008095
iter: 2382 | loss: 4.007872
iter: 2383 | loss: 4.007649
iter: 2384 | loss: 4.007426
iter: 2385 | loss: 4.007203
iter: 2386 | loss: 4.006980
iter: 2387 | loss: 4.006757
iter: 2388 | loss: 4.006534
iter: 2389 | loss: 4.006311
iter: 2390 | loss: 4.006088
iter: 2391 | loss: 4.005866
iter: 2392 | loss: 4.005643
iter: 2393 | loss: 4.005420
iter: 2394 | loss: 4.005197
iter: 2395 | loss: 4.004974
iter: 2396 | loss: 4.004751
iter: 2397 | loss: 4.004528
iter: 2398 | loss: 4.004305
iter: 2399 | loss: 4.004082
iter: 2400 | loss: 4.003859
iter: 2401 | loss: 4.003636
iter: 2402 | loss: 4.003413
iter: 2403 | loss: 4.003190
iter: 2404 | loss: 4.002967
iter: 2405 | loss: 4.002744
iter: 2406 | loss: 4.002521
iter: 2407 | loss: 4.002298
iter: 2408 | loss: 4.002075
iter: 2409 | loss: 4.001852
iter: 2410 | loss: 4.001629
iter: 2411 | loss: 4.001406
iter: 2412 | loss: 4.001183
iter: 2413 | loss: 4.000960
iter: 2414 | loss: 4.000737
iter: 2415 | loss: 4.000514
iter: 2416 | loss: 4.000291
iter: 2417 | loss: 4.000068
iter: 2418 | loss: 3.999845
iter: 2419 | loss: 3.999622
iter: 2420 | loss: 3.999399
iter: 2421 | loss: 3.999176
iter: 2422 | loss: 3.998953
iter: 2423 | loss: 3.998730
iter: 2424 | loss: 3.998507
iter: 2425 | loss: 3.998284
iter: 2426 | loss: 3.998061
iter: 2427 | loss: 3.997838
iter: 2428 | loss: 3.997615
iter: 2429 | loss: 3.997392
iter: 2430 | loss: 3.997169
iter: 2431 | loss: 3.996946
iter: 2432 | loss: 3.996723
iter: 2433 | loss: 3.996500
iter: 2434 | loss: 3.996277
iter: 2435 | loss: 3.996054
iter: 2436 | loss: 3.995831
iter: 2437 | loss: 3.995608
iter: 2438 | loss: 3.995385
iter: 2439 | loss: 3.995162
iter: 2440 | loss: 3.994939
iter: 2441 | loss: 3.994716
iter: 2442 | loss: 3.994493
iter: 2443 | loss: 3.994271
iter: 2444 | loss: 3.994048
iter: 2445 | loss: 3.993825
iter: 2446 | loss: 3.993602
iter: 2447 | loss: 3.993379
iter: 2448 | loss: 3.993156
iter: 2449 | loss: 3.992933
iter: 2450 | loss: 3.992710
iter: 2451 | loss: 3.992487
iter: 2452 | loss: 3.992264
iter: 2453 | loss: 3.992041
iter: 2454 | loss: 3.991818
iter: 2455 | loss: 3.991595
iter: 2456 | loss: 3.991372
iter: 2457 | loss: 3.991149
iter: 2458 | loss: 3.990926
iter: 2459 | loss: 3.990703
iter: 2460 | loss: 3.990480
iter: 2461 | loss: 3.990257
iter: 2462 | loss: 3.990034
iter: 2463 | loss: 3.989811
iter: 2464 | loss: 3.989588
iter: 2465 | loss: 3.989365
iter: 2466 | loss: 3.989142
iter: 2467 | loss: 3.988919
iter: 2468 | loss: 3.988696
iter: 2469 | loss: 3.988473
iter: 2470 | loss: 3.988250
iter: 2471 | loss: 3.988027
iter: 2472 | loss: 3.987804
iter: 2473 | loss: 3.987581
iter: 2474 | loss: 3.987358
iter: 2475 | loss: 3.987135
iter: 2476 | loss: 3.986912
iter: 2477 | loss: 3.986689
iter: 2478 | loss: 3.986466
iter: 2479 | loss: 3.986243
iter: 2480 | loss: 3.986020
iter: 2481 | loss: 3.985797
iter: 2482 | loss: 3.985574
iter: 2483 | loss: 3.985351
iter: 2484 | loss: 3.985128
iter: 2485 | loss: 3.984905
iter: 2486 | loss: 3.984682
iter: 2487 | loss: 3.984459
iter: 2488 | loss: 3.984236
iter: 2489 | loss: 3.984013
iter: 2490 | loss: 3.983790
iter: 2491 | loss: 3.983567
iter: 2492 | loss: 3.983344
iter: 2493 | loss: 3.983121
iter: 2494 | loss: 3.982898
iter: 2495 | loss: 3.982676
iter: 2496 | loss: 3.982453
iter: 2497 | loss: 3.982230
iter: 2498 | loss: 3.982007
iter: 2499 | loss: 3.981784
iter: 2500 | loss: 3.981561
iter: 2501 | loss: 3.981338
iter: 2502 | loss: 3.981115
iter: 2503 | loss: 3.980892
iter: 2504 | loss: 3.980669
iter: 2505 | loss: 3.980446
iter: 2506 | loss: 3.980223
iter: 2507 | loss: 3.980000
iter: 2508 | loss: 3.979777
iter: 2509 | loss: 3.979554
iter: 2510 | loss: 3.979331
iter: 2511 | loss: 3.979108
iter: 2512 | loss: 3.978885
iter: 2513 | loss: 3.978662
iter: 2514 | loss: 3.978439
iter: 2515 | loss: 3.978216
iter: 2516 | loss: 3.977993
iter: 2517 | loss: 3.977770
iter: 2518 | loss: 3.977547
iter: 2519 | loss: 3.977324
iter: 2520 | loss: 3.977101
iter: 2521 | loss: 3.976878
iter: 2522 | loss: 3.976655
iter: 2523 | loss: 3.976432
iter: 2524 | loss: 3.976209
iter: 2525 | loss: 3.975986
iter: 2526 | loss: 3.975763
iter: 2527 | loss: 3.975540
iter: 2528 | loss: 3.975317
iter: 2529 | loss: 3.975094
iter: 2530 | loss: 3.974871
iter: 2531 | loss: 3.974648
iter: 2532 | loss: 3.974425
iter: 2533 | loss: 3.974202
iter: 2534 | loss: 3.973979
iter: 2535 | loss: 3.973756
iter: 2536 | loss: 3.973533
iter: 2537 | loss: 3.973310
iter: 2538 | loss: 3.973087
iter: 2539 | loss: 3.972864
iter: 2540 | loss: 3.972641
iter: 2541 | loss: 3.972418
iter: 2542 | loss: 3.972195
iter: 2543 | loss: 3.971972
iter: 2544 | loss: 3.971749
iter: 2545 | loss: 3.971526
iter: 2546 | loss: 3.971303
iter: 2547 | loss: 3.971081
iter: 2548 | loss: 3.970858
iter: 2549 | loss: 3.970635
iter: 2550 | loss: 3.970412
iter: 2551 | loss: 3.970189
iter: 2552 | loss: 3.969966
iter: 2553 | loss: 3.969743
iter: 2554 | loss: 3.969520
iter: 2555 | loss: 3.969297
iter: 2556 | loss: 3.969074
iter: 2557 | loss: 3.968851
iter: 2558 | loss: 3.968628
iter: 2559 | loss: 3.968405
iter: 2560 | loss: 3.968182
iter: 2561 | loss: 3.967959
iter: 2562 | loss: 3.967736
iter: 2563 | loss: 3.967513
iter: 2564 | loss: 3.967290
iter: 2565 | loss: 3.967067
iter: 2566 | loss: 3.966844
iter: 2567 | loss: 3.966621
iter: 2568 | loss: 3.966398
iter: 2569 | loss: 3.966175
iter: 2570 | loss: 3.965952
iter: 2571 | loss: 3.965729
iter: 2572 | loss: 3.965506
iter: 2573 | loss: 3.965283
iter: 2574 | loss: 3.965060
iter: 2575 | loss: 3.964837
iter: 2576 | loss: 3.964614
iter: 2577 | loss: 3.964391
iter: 2578 | loss: 3.964168
iter: 2579 | loss: 3.963945
iter: 2580 | loss: 3.963722
iter: 2581 | loss: 3.963499
iter: 2582 | loss: 3.963276
iter: 2583 | loss: 3.963053
iter: 2584 | loss: 3.962830
iter: 2585 | loss: 3.962607
iter: 2586 | loss: 3.962384
iter: 2587 | loss: 3.962161
iter: 2588 | loss: 3.961938
iter: 2589 | loss: 3.961715
iter: 2590 | loss: 3.961492
iter: 2591 | loss: 3.961269
iter: 2592 | loss: 3.961046
iter: 2593 | loss: 3.960823
iter: 2594 | loss: 3.960600
iter: 2595 | loss: 3.960377
iter: 2596 | loss: 3.960154
iter: 2597 | loss: 3.959931
iter: 2598 | loss: 3.959708
iter: 2599 | loss: 3.959486
iter: 2600 | loss: 3.959263
iter: 2601 | loss: 3.959040
iter: 2602 | loss: 3.958817
iter: 2603 | loss: 3.958594
iter: 2604 | loss: 3.958371
iter: 2605 | loss: 3.958148
iter: 2606 | loss: 3.957925
iter: 2607 | loss: 3.957702
iter: 2608 | loss: 3.957479
iter: 2609 | loss: 3.957256
iter: 2610 | loss: 3.957033
iter: 2611 | loss: 3.956810
iter: 2612 | loss: 3.956587
iter: 2613 | loss: 3.956364
iter: 2614 | loss: 3.956141
iter: 2615 | loss: 3.955918
iter: 2616 | loss: 3.955695
iter: 2617 | loss: 3.955472
iter: 2618 | loss: 3.955249
iter: 2619 | loss: 3.955026
iter: 2620 | loss: 3.954803
iter: 2621 | loss: 3.954580
iter: 2622 | loss: 3.954357
iter: 2623 | loss: 3.954134
iter: 2624 | loss: 3.953911
iter: 2625 | loss: 3.953688
iter: 2626 | loss: 3.953465
iter: 2627 | loss: 3.953242
iter: 2628 | loss: 3.953019
iter: 2629 | loss: 3.952796
iter: 2630 | loss: 3.952573
iter: 2631 | loss: 3.952350
iter: 2632 | loss: 3.952127
iter: 2633 | loss: 3.951904
iter: 2634 | loss: 3.951681
iter: 2635 | loss: 3.951458
iter: 2636 | loss: 3.951235
iter: 2637 | loss: 3.951012
iter: 2638 | loss: 3.950789
iter: 2639 | loss: 3.950566
iter: 2640 | loss: 3.950343
iter: 2641 | loss: 3.950120
iter: 2642 | loss: 3.949897
iter: 2643 | loss: 3.949674
iter: 2644 | loss: 3.949451
iter: 2645 | loss: 3.949228
iter: 2646 | loss: 3.949005
iter: 2647 | loss: 3.948782
iter: 2648 | loss: 3.948559
iter: 2649 | loss: 3.948336
iter: 2650 | loss: 3.948113
iter: 2651 | loss: 3.947891
iter: 2652 | loss: 3.947668
iter: 2653 | loss: 3.947445
iter: 2654 | loss: 3.947222
iter: 2655 | loss: 3.946999
iter: 2656 | loss: 3.946776
iter: 2657 | loss: 3.946553
iter: 2658 | loss: 3.946330
iter: 2659 | loss: 3.946107
iter: 2660 | loss: 3.945884
iter: 2661 | loss: 3.945661
iter: 2662 | loss: 3.945438
iter: 2663 | loss: 3.945215
iter: 2664 | loss: 3.944992
iter: 2665 | loss: 3.944769
iter: 2666 | loss: 3.944546
iter: 2667 | loss: 3.944323
iter: 2668 | loss: 3.944100
iter: 2669 | loss: 3.943877
iter: 2670 | loss: 3.943654
iter: 2671 | loss: 3.943431
iter: 2672 | loss: 3.943208
iter: 2673 | loss: 3.942985
iter: 2674 | loss: 3.942762
iter: 2675 | loss: 3.942539
iter: 2676 | loss: 3.942316
iter: 2677 | loss: 3.942093
iter: 2678 | loss: 3.941870
iter: 2679 | loss: 3.941647
iter: 2680 | loss: 3.941424
iter: 2681 | loss: 3.941201
iter: 2682 | loss: 3.940978
iter: 2683 | loss: 3.940755
iter: 2684 | loss: 3.940532
iter: 2685 | loss: 3.940309
iter: 2686 | loss: 3.940086
iter: 2687 | loss: 3.939863
iter: 2688 | loss: 3.939640
iter: 2689 | loss: 3.939417
iter: 2690 | loss: 3.939194
iter: 2691 | loss: 3.938971
iter: 2692 | loss: 3.938748
iter: 2693 | loss: 3.938525
iter: 2694 | loss: 3.938302
iter: 2695 | loss: 3.938079
iter: 2696 | loss: 3.937856
iter: 2697 | loss: 3.937633
iter: 2698 | loss: 3.937410
iter: 2699 | loss: 3.937187
iter: 2700 | loss: 3.936964
iter: 2701 | loss: 3.936741
iter: 2702 | loss: 3.936518
iter: 2703 | loss: 3.936296
iter: 2704 | loss: 3.936073
iter: 2705 | loss: 3.935850
iter: 2706 | loss: 3.935627
iter: 2707 | loss: 3.935404
iter: 2708 | loss: 3.935181
iter: 2709 | loss: 3.934958
iter: 2710 | loss: 3.934735
iter: 2711 | loss: 3.934512
iter: 2712 | loss: 3.934289
iter: 2713 | loss: 3.934066
iter: 2714 | loss: 3.933843
iter: 2715 | loss: 3.933620
iter: 2716 | loss: 3.933397
iter: 2717 | loss: 3.933174
iter: 2718 | loss: 3.932951
iter: 2719 | loss: 3.932728
iter: 2720 | loss: 3.932505
iter: 2721 | loss: 3.932282
iter: 2722 | loss: 3.932059
iter: 2723 | loss: 3.931836
iter: 2724 | loss: 3.931613
iter: 2725 | loss: 3.931390
iter: 2726 | loss: 3.931167
iter: 2727 | loss: 3.930944
iter: 2728 | loss: 3.930721
iter: 2729 | loss: 3.930498
iter: 2730 | loss: 3.930275
iter: 2731 | loss: 3.930052
iter: 2732 | loss: 3.929829
iter: 2733 | loss: 3.929606
iter: 2734 | loss: 3.929383
iter: 2735 | loss: 3.929160
iter: 2736 | loss: 3.928937
iter: 2737 | loss: 3.928714
iter: 2738 | loss: 3.928491
iter: 2739 | loss: 3.928268
iter: 2740 | loss: 3.928045
iter: 2741 | loss: 3.927822
iter: 2742 | loss: 3.927599
iter: 2743 | loss: 3.927376
iter: 2744 | loss: 3.927153
iter: 2745 | loss: 3.926930
iter: 2746 | loss: 3.926707
iter: 2747 | loss: 3.926484
iter: 2748 | loss: 3.926261
iter: 2749 | loss: 3.926038
iter: 2750 | loss: 3.925815
iter: 2751 | loss: 3.925592
iter: 2752 | loss: 3.925369
iter: 2753 | loss: 3.925146
iter: 2754 | loss: 3.924923
iter: 2755 | loss: 3.924701
iter: 2756 | loss: 3.924478
iter: 2757 | loss: 3.924255
iter: 2758 | loss: 3.924032
iter: 2759 | loss: 3.923809
iter: 2760 | loss: 3.923586
iter: 2761 | loss: 3.923363
iter: 2762 | loss: 3.923140
iter: 2763 | loss: 3.922917
iter: 2764 | loss: 3.922694
iter: 2765 | loss: 3.922471
iter: 2766 | loss: 3.922248
iter: 2767 | loss: 3.922025
iter: 2768 | loss: 3.921802
iter: 2769 | loss: 3.921579
iter: 2770 | loss: 3.921356
iter: 2771 | loss: 3.921133
iter: 2772 | loss: 3.920910
iter: 2773 | loss: 3.920687
iter: 2774 | loss: 3.920464
iter: 2775 | loss: 3.920241
iter: 2776 | loss: 3.920018
iter: 2777 | loss: 3.919795
iter: 2778 | loss: 3.919572
iter: 2779 | loss: 3.919349
iter: 2780 | loss: 3.919126
iter: 2781 | loss: 3.918903
iter: 2782 | loss: 3.918680
iter: 2783 | loss: 3.918457
iter: 2784 | loss: 3.918234
iter: 2785 | loss: 3.918011
iter: 2786 | loss: 3.917788
iter: 2787 | loss: 3.917565
iter: 2788 | loss: 3.917342
iter: 2789 | loss: 3.917119
iter: 2790 | loss: 3.916896
iter: 2791 | loss: 3.916673
iter: 2792 | loss: 3.916450
iter: 2793 | loss: 3.916227
iter: 2794 | loss: 3.916004
iter: 2795 | loss: 3.915781
iter: 2796 | loss: 3.915558
iter: 2797 | loss: 3.915335
iter: 2798 | loss: 3.915112
iter: 2799 | loss: 3.914889
iter: 2800 | loss: 3.914666
iter: 2801 | loss: 3.914443
iter: 2802 | loss: 3.914220
iter: 2803 | loss: 3.913997
iter: 2804 | loss: 3.913774
iter: 2805 | loss: 3.913551
iter: 2806 | loss: 3.913328
iter: 2807 | loss: 3.913106
iter: 2808 | loss: 3.912883
iter: 2809 | loss: 3.912660
iter: 2810 | loss: 3.912437
iter: 2811 | loss: 3.912214
iter: 2812 | loss: 3.911991
iter: 2813 | loss: 3.911768
iter: 2814 | loss: 3.911545
iter: 2815 | loss: 3.911322
iter: 2816 | loss: 3.911099
iter: 2817 | loss: 3.910876
iter: 2818 | loss: 3.910653
iter: 2819 | loss: 3.910430
iter: 2820 | loss: 3.910207
iter: 2821 | loss: 3.909984
iter: 2822 | loss: 3.909761
iter: 2823 | loss: 3.909538
iter: 2824 | loss: 3.909315
iter: 2825 | loss: 3.909092
iter: 2826 | loss: 3.908869
iter: 2827 | loss: 3.908646
iter: 2828 | loss: 3.908423
iter: 2829 | loss: 3.908200
iter: 2830 | loss: 3.907977
iter: 2831 | loss: 3.907754
iter: 2832 | loss: 3.907531
iter: 2833 | loss: 3.907308
iter: 2834 | loss: 3.907085
iter: 2835 | loss: 3.906862
iter: 2836 | loss: 3.906639
iter: 2837 | loss: 3.906416
iter: 2838 | loss: 3.906193
iter: 2839 | loss: 3.905970
iter: 2840 | loss: 3.905747
iter: 2841 | loss: 3.905524
iter: 2842 | loss: 3.905301
iter: 2843 | loss: 3.905078
iter: 2844 | loss: 3.904855
iter: 2845 | loss: 3.904632
iter: 2846 | loss: 3.904409
iter: 2847 | loss: 3.904186
iter: 2848 | loss: 3.903963
iter: 2849 | loss: 3.903740
iter: 2850 | loss: 3.903517
iter: 2851 | loss: 3.903294
iter: 2852 | loss: 3.903071
iter: 2853 | loss: 3.902848
iter: 2854 | loss: 3.902625
iter: 2855 | loss: 3.902402
iter: 2856 | loss: 3.902179
iter: 2857 | loss: 3.901956
iter: 2858 | loss: 3.901733
iter: 2859 | loss: 3.901511
iter: 2860 | loss: 3.901288
iter: 2861 | loss: 3.901065
iter: 2862 | loss: 3.900842
iter: 2863 | loss: 3.900619
iter: 2864 | loss: 3.900396
iter: 2865 | loss: 3.900173
iter: 2866 | loss: 3.899950
iter: 2867 | loss: 3.899727
iter: 2868 | loss: 3.899504
iter: 2869 | loss: 3.899281
iter: 2870 | loss: 3.899058
iter: 2871 | loss: 3.898835
iter: 2872 | loss: 3.898612
iter: 2873 | loss: 3.898389
iter: 2874 | loss: 3.898166
iter: 2875 | loss: 3.897943
iter: 2876 | loss: 3.897720
iter: 2877 | loss: 3.897497
iter: 2878 | loss: 3.897274
iter: 2879 | loss: 3.897051
iter: 2880 | loss: 3.896828
iter: 2881 | loss: 3.896605
iter: 2882 | loss: 3.896382
iter: 2883 | loss: 3.896159
iter: 2884 | loss: 3.895936
iter: 2885 | loss: 3.895713
iter: 2886 | loss: 3.895490
iter: 2887 | loss: 3.895267
iter: 2888 | loss: 3.895044
iter: 2889 | loss: 3.894821
iter: 2890 | loss: 3.894598
iter: 2891 | loss: 3.894375
iter: 2892 | loss: 3.894152
iter: 2893 | loss: 3.893929
iter: 2894 | loss: 3.893706
iter: 2895 | loss: 3.893483
iter: 2896 | loss: 3.893260
iter: 2897 | loss: 3.893037
iter: 2898 | loss: 3.892814
iter: 2899 | loss: 3.892591
iter: 2900 | loss: 3.892368
iter: 2901 | loss: 3.892145
iter: 2902 | loss: 3.891922
iter: 2903 | loss: 3.891699
iter: 2904 | loss: 3.891476
iter: 2905 | loss: 3.891253
iter: 2906 | loss: 3.891030
iter: 2907 | loss: 3.890807
iter: 2908 | loss: 3.890584
iter: 2909 | loss: 3.890361
iter: 2910 | loss: 3.890138
iter: 2911 | loss: 3.889915
iter: 2912 | loss: 3.889693
iter: 2913 | loss: 3.889470
iter: 2914 | loss: 3.889247
iter: 2915 | loss: 3.889024
iter: 2916 | loss: 3.888801
iter: 2917 | loss: 3.888578
iter: 2918 | loss: 3.888355
iter: 2919 | loss: 3.888132
iter: 2920 | loss: 3.887909
iter: 2921 | loss: 3.887686
iter: 2922 | loss: 3.887463
iter: 2923 | loss: 3.887240
iter: 2924 | loss: 3.887017
iter: 2925 | loss: 3.886794
iter: 2926 | loss: 3.886571
iter: 2927 | loss: 3.886348
iter: 2928 | loss: 3.886125
iter: 2929 | loss: 3.885902
iter: 2930 | loss: 3.885679
iter: 2931 | loss: 3.885456
iter: 2932 | loss: 3.885233
iter: 2933 | loss: 3.885010
iter: 2934 | loss: 3.884787
iter: 2935 | loss: 3.884564
iter: 2936 | loss: 3.884341
iter: 2937 | loss: 3.884118
iter: 2938 | loss: 3.883895
iter: 2939 | loss: 3.883672
iter: 2940 | loss: 3.883449
iter: 2941 | loss: 3.883226
iter: 2942 | loss: 3.883003
iter: 2943 | loss: 3.882780
iter: 2944 | loss: 3.882557
iter: 2945 | loss: 3.882334
iter: 2946 | loss: 3.882111
iter: 2947 | loss: 3.881888
iter: 2948 | loss: 3.881665
iter: 2949 | loss: 3.881442
iter: 2950 | loss: 3.881219
iter: 2951 | loss: 3.880996
iter: 2952 | loss: 3.880773
iter: 2953 | loss: 3.880550
iter: 2954 | loss: 3.880327
iter: 2955 | loss: 3.880104
iter: 2956 | loss: 3.879881
iter: 2957 | loss: 3.879658
iter: 2958 | loss: 3.879435
iter: 2959 | loss: 3.879212
iter: 2960 | loss: 3.878989
iter: 2961 | loss: 3.878766
iter: 2962 | loss: 3.878543
iter: 2963 | loss: 3.878320
iter: 2964 | loss: 3.878098
iter: 2965 | loss: 3.877875
iter: 2966 | loss: 3.877652
iter: 2967 | loss: 3.877429
iter: 2968 | loss: 3.877206
iter: 2969 | loss: 3.876983
iter: 2970 | loss: 3.876760
iter: 2971 | loss: 3.876537
iter: 2972 | loss: 3.876314
iter: 2973 | loss: 3.876091
iter: 2974 | loss: 3.875868
iter: 2975 | loss: 3.875645
iter: 2976 | loss: 3.875422
iter: 2977 | loss: 3.875199
iter: 2978 | loss: 3.874976
iter: 2979 | loss: 3.874753
iter: 2980 | loss: 3.874530
iter: 2981 | loss: 3.874307
iter: 2982 | loss: 3.874084
iter: 2983 | loss: 3.873861
iter: 2984 | loss: 3.873638
iter: 2985 | loss: 3.873415
iter: 2986 | loss: 3.873192
iter: 2987 | loss: 3.872969
iter: 2988 | loss: 3.872746
iter: 2989 | loss: 3.872523
iter: 2990 | loss: 3.872300
iter: 2991 | loss: 3.872077
iter: 2992 | loss: 3.871854
iter: 2993 | loss: 3.871631
iter: 2994 | loss: 3.871408
iter: 2995 | loss: 3.871185
iter: 2996 | loss: 3.870962
iter: 2997 | loss: 3.870739
iter: 2998 | loss: 3.870516
iter: 2999 | loss: 3.870293
iter: 3000 | loss: 3.870070
iter: 3001 | loss: 3.869847
iter: 3002 | loss: 3.869624
iter: 3003 | loss: 3.869401
iter: 3004 | loss: 3.869178
iter: 3005 | loss: 3.868955
iter: 3006 | loss: 3.868732
iter: 3007 | loss: 3.868509
iter: 3008 | loss: 3.868286
iter: 3009 | loss: 3.868063
iter: 3010 | loss: 3.867840
iter: 3011 | loss: 3.867617
iter: 3012 | loss: 3.867394
iter: 3013 | loss: 3.867171
iter: 3014 | loss: 3.866948
iter: 3015 | loss: 3.866725
iter: 3016 | loss: 3.866503
iter: 3017 | loss: 3.866280
iter: 3018 | loss: 3.866057
iter: 3019 | loss: 3.865834
iter: 3020 | loss: 3.865611
iter: 3021 | loss: 3.865388
iter: 3022 | loss: 3.865165
iter: 3023 | loss: 3.864942
iter: 3024 | loss: 3.864719
iter: 3025 | loss: 3.864496
iter: 3026 | loss: 3.864273
iter: 3027 | loss: 3.864050
iter: 3028 | loss: 3.863827
iter: 3029 | loss: 3.863604
iter: 3030 | loss: 3.863381
iter: 3031 | loss: 3.863158
iter: 3032 | loss: 3.862935
iter: 3033 | loss: 3.862712
iter: 3034 | loss: 3.862489
iter: 3035 | loss: 3.862266
iter: 3036 | loss: 3.862043
iter: 3037 | loss: 3.861820
iter: 3038 | loss: 3.861597
iter: 3039 | loss: 3.861374
iter: 3040 | loss: 3.861151
iter: 3041 | loss: 3.860928
iter: 3042 | loss: 3.860705
iter: 3043 | loss: 3.860482
iter: 3044 | loss: 3.860259
iter: 3045 | loss: 3.860036
iter: 3046 | loss: 3.859813
iter: 3047 | loss: 3.859590
iter: 3048 | loss: 3.859367
iter: 3049 | loss: 3.859144
iter: 3050 | loss: 3.858921
iter: 3051 | loss: 3.858698
iter: 3052 | loss: 3.858475
iter: 3053 | loss: 3.858252
iter: 3054 | loss: 3.858029
iter: 3055 | loss: 3.857806
iter: 3056 | loss: 3.857583
iter: 3057 | loss: 3.857360
iter: 3058 | loss: 3.857137
iter: 3059 | loss: 3.856914
iter: 3060 | loss: 3.856691
iter: 3061 | loss: 3.856468
iter: 3062 | loss: 3.856245
iter: 3063 | loss: 3.856022
iter: 3064 | loss: 3.855799
iter: 3065 | loss: 3.855576
iter: 3066 | loss: 3.855353
iter: 3067 | loss: 3.855130
iter: 3068 | loss: 3.854908
iter: 3069 | loss: 3.854685
iter: 3070 | loss: 3.854462
iter: 3071 | loss: 3.854239
iter: 3072 | loss: 3.854016
iter: 3073 | loss: 3.853793
iter: 3074 | loss: 3.853570
iter: 3075 | loss: 3.853347
iter: 3076 | loss: 3.853124
iter: 3077 | loss: 3.852901
iter: 3078 | loss: 3.852678
iter: 3079 | loss: 3.852455
iter: 3080 | loss: 3.852232
iter: 3081 | loss: 3.852009
iter: 3082 | loss: 3.851786
iter: 3083 | loss: 3.851563
iter: 3084 | loss: 3.851340
iter: 3085 | loss: 3.851117
iter: 3086 | loss: 3.850894
iter: 3087 | loss: 3.850671
iter: 3088 | loss: 3.850448
iter: 3089 | loss: 3.850225
iter: 3090 | loss: 3.850002
iter: 3091 | loss: 3.849779
iter: 3092 | loss: 3.849556
iter: 3093 | loss: 3.849333
iter: 3094 | loss: 3.849110
iter: 3095 | loss: 3.848887
iter: 3096 | loss: 3.848664
iter: 3097 | loss: 3.848441
iter: 3098 | loss: 3.848218
iter: 3099 | loss: 3.847995
iter: 3100 | loss: 3.847772
iter: 3101 | loss: 3.847549
iter: 3102 | loss: 3.847326
iter: 3103 | loss: 3.847103
iter: 3104 | loss: 3.846880
iter: 3105 | loss: 3.846657
iter: 3106 | loss: 3.846434
iter: 3107 | loss: 3.846211
iter: 3108 | loss: 3.845988
iter: 3109 | loss: 3.845765
iter: 3110 | loss: 3.845542
iter: 3111 | loss: 3.845319
iter: 3112 | loss: 3.845096
iter: 3113 | loss: 3.844873
iter: 3114 | loss: 3.844650
iter: 3115 | loss: 3.844427
iter: 3116 | loss: 3.844204
iter: 3117 | loss: 3.843981
iter: 3118 | loss: 3.843758
iter: 3119 | loss: 3.843535
iter: 3120 | loss: 3.843313
iter: 3121 | loss: 3.843090
iter: 3122 | loss: 3.842867
iter: 3123 | loss: 3.842644
iter: 3124 | loss: 3.842421
iter: 3125 | loss: 3.842198
iter: 3126 | loss: 3.841975
iter: 3127 | loss: 3.841752
iter: 3128 | loss: 3.841529
iter: 3129 | loss: 3.841306
iter: 3130 | loss: 3.841083
iter: 3131 | loss: 3.840860
iter: 3132 | loss: 3.840637
iter: 3133 | loss: 3.840414
iter: 3134 | loss: 3.840191
iter: 3135 | loss: 3.839968
iter: 3136 | loss: 3.839745
iter: 3137 | loss: 3.839522
iter: 3138 | loss: 3.839299
iter: 3139 | loss: 3.839076
iter: 3140 | loss: 3.838853
iter: 3141 | loss: 3.838630
iter: 3142 | loss: 3.838407
iter: 3143 | loss: 3.838184
iter: 3144 | loss: 3.837961
iter: 3145 | loss: 3.837738
iter: 3146 | loss: 3.837515
iter: 3147 | loss: 3.837292
iter: 3148 | loss: 3.837069
iter: 3149 | loss: 3.836846
iter: 3150 | loss: 3.836623
iter: 3151 | loss: 3.836400
iter: 3152 | loss: 3.836177
iter: 3153 | loss: 3.835954
iter: 3154 | loss: 3.835731
iter: 3155 | loss: 3.835508
iter: 3156 | loss: 3.835285
iter: 3157 | loss: 3.835062
iter: 3158 | loss: 3.834839
iter: 3159 | loss: 3.834616
iter: 3160 | loss: 3.834393
iter: 3161 | loss: 3.834170
iter: 3162 | loss: 3.833947
iter: 3163 | loss: 3.833724
iter: 3164 | loss: 3.833501
iter: 3165 | loss: 3.833278
iter: 3166 | loss: 3.833055
iter: 3167 | loss: 3.832832
iter: 3168 | loss: 3.832609
iter: 3169 | loss: 3.832386
iter: 3170 | loss: 3.832163
iter: 3171 | loss: 3.831940
iter: 3172 | loss: 3.831718
iter: 3173 | loss: 3.831495
iter: 3174 | loss: 3.831272
iter: 3175 | loss: 3.831049
iter: 3176 | loss: 3.830826
iter: 3177 | loss: 3.830603
iter: 3178 | loss: 3.830380
iter: 3179 | loss: 3.830157
iter: 3180 | loss: 3.829934
iter: 3181 | loss: 3.829711
iter: 3182 | loss: 3.829488
iter: 3183 | loss: 3.829265
iter: 3184 | loss: 3.829042
iter: 3185 | loss: 3.828819
iter: 3186 | loss: 3.828596
iter: 3187 | loss: 3.828373
iter: 3188 | loss: 3.828150
iter: 3189 | loss: 3.827927
iter: 3190 | loss: 3.827704
iter: 3191 | loss: 3.827481
iter: 3192 | loss: 3.827258
iter: 3193 | loss: 3.827035
iter: 3194 | loss: 3.826812
iter: 3195 | loss: 3.826589
iter: 3196 | loss: 3.826366
iter: 3197 | loss: 3.826143
iter: 3198 | loss: 3.825920
iter: 3199 | loss: 3.825697
iter: 3200 | loss: 3.825474
iter: 3201 | loss: 3.825251
iter: 3202 | loss: 3.825028
iter: 3203 | loss: 3.824805
iter: 3204 | loss: 3.824582
iter: 3205 | loss: 3.824359
iter: 3206 | loss: 3.824136
iter: 3207 | loss: 3.823913
iter: 3208 | loss: 3.823690
iter: 3209 | loss: 3.823467
iter: 3210 | loss: 3.823244
iter: 3211 | loss: 3.823021
iter: 3212 | loss: 3.822798
iter: 3213 | loss: 3.822575
iter: 3214 | loss: 3.822352
iter: 3215 | loss: 3.822129
iter: 3216 | loss: 3.821906
iter: 3217 | loss: 3.821683
iter: 3218 | loss: 3.821460
iter: 3219 | loss: 3.821237
iter: 3220 | loss: 3.821014
iter: 3221 | loss: 3.820791
iter: 3222 | loss: 3.820568
iter: 3223 | loss: 3.820345
iter: 3224 | loss: 3.820123
iter: 3225 | loss: 3.819900
iter: 3226 | loss: 3.819677
iter: 3227 | loss: 3.819454
iter: 3228 | loss: 3.819231
iter: 3229 | loss: 3.819008
iter: 3230 | loss: 3.818785
iter: 3231 | loss: 3.818562
iter: 3232 | loss: 3.818339
iter: 3233 | loss: 3.818116
iter: 3234 | loss: 3.817893
iter: 3235 | loss: 3.817670
iter: 3236 | loss: 3.817447
iter: 3237 | loss: 3.817224
iter: 3238 | loss: 3.817001
iter: 3239 | loss: 3.816778
iter: 3240 | loss: 3.816555
iter: 3241 | loss: 3.816332
iter: 3242 | loss: 3.816109
iter: 3243 | loss: 3.815886
iter: 3244 | loss: 3.815663
iter: 3245 | loss: 3.815440
iter: 3246 | loss: 3.815217
iter: 3247 | loss: 3.814994
iter: 3248 | loss: 3.814771
iter: 3249 | loss: 3.814548
iter: 3250 | loss: 3.814325
iter: 3251 | loss: 3.814102
iter: 3252 | loss: 3.813879
iter: 3253 | loss: 3.813656
iter: 3254 | loss: 3.813433
iter: 3255 | loss: 3.813210
iter: 3256 | loss: 3.812987
iter: 3257 | loss: 3.812764
iter: 3258 | loss: 3.812541
iter: 3259 | loss: 3.812318
iter: 3260 | loss: 3.812095
iter: 3261 | loss: 3.811872
iter: 3262 | loss: 3.811649
iter: 3263 | loss: 3.811426
iter: 3264 | loss: 3.811203
iter: 3265 | loss: 3.810980
iter: 3266 | loss: 3.810757
iter: 3267 | loss: 3.810534
iter: 3268 | loss: 3.810311
iter: 3269 | loss: 3.810088
iter: 3270 | loss: 3.809865
iter: 3271 | loss: 3.809642
iter: 3272 | loss: 3.809419
iter: 3273 | loss: 3.809196
iter: 3274 | loss: 3.808973
iter: 3275 | loss: 3.808750
iter: 3276 | loss: 3.808528
iter: 3277 | loss: 3.808305
iter: 3278 | loss: 3.808082
iter: 3279 | loss: 3.807859
iter: 3280 | loss: 3.807636
iter: 3281 | loss: 3.807413
iter: 3282 | loss: 3.807190
iter: 3283 | loss: 3.806967
iter: 3284 | loss: 3.806744
iter: 3285 | loss: 3.806521
iter: 3286 | loss: 3.806298
iter: 3287 | loss: 3.806075
iter: 3288 | loss: 3.805852
iter: 3289 | loss: 3.805629
iter: 3290 | loss: 3.805406
iter: 3291 | loss: 3.805183
iter: 3292 | loss: 3.804960
iter: 3293 | loss: 3.804737
iter: 3294 | loss: 3.804514
iter: 3295 | loss: 3.804291
iter: 3296 | loss: 3.804068
iter: 3297 | loss: 3.803845
iter: 3298 | loss: 3.803622
iter: 3299 | loss: 3.803399
iter: 3300 | loss: 3.803176
iter: 3301 | loss: 3.802953
iter: 3302 | loss: 3.802730
iter: 3303 | loss: 3.802507
iter: 3304 | loss: 3.802284
iter: 3305 | loss: 3.802061
iter: 3306 | loss: 3.801838
iter: 3307 | loss: 3.801615
iter: 3308 | loss: 3.801392
iter: 3309 | loss: 3.801169
iter: 3310 | loss: 3.800946
iter: 3311 | loss: 3.800723
iter: 3312 | loss: 3.800500
iter: 3313 | loss: 3.800277
iter: 3314 | loss: 3.800054
iter: 3315 | loss: 3.799831
iter: 3316 | loss: 3.799608
iter: 3317 | loss: 3.799385
iter: 3318 | loss: 3.799162
iter: 3319 | loss: 3.798939
iter: 3320 | loss: 3.798716
iter: 3321 | loss: 3.798493
iter: 3322 | loss: 3.798270
iter: 3323 | loss: 3.798047
iter: 3324 | loss: 3.797824
iter: 3325 | loss: 3.797601
iter: 3326 | loss: 3.797378
iter: 3327 | loss: 3.797155
iter: 3328 | loss: 3.796933
iter: 3329 | loss: 3.796710
iter: 3330 | loss: 3.796487
iter: 3331 | loss: 3.796264
iter: 3332 | loss: 3.796041
iter: 3333 | loss: 3.795818
iter: 3334 | loss: 3.795595
iter: 3335 | loss: 3.795372
iter: 3336 | loss: 3.795149
iter: 3337 | loss: 3.794926
iter: 3338 | loss: 3.794703
iter: 3339 | loss: 3.794480
iter: 3340 | loss: 3.794257
iter: 3341 | loss: 3.794034
iter: 3342 | loss: 3.793811
iter: 3343 | loss: 3.793588
iter: 3344 | loss: 3.793365
iter: 3345 | loss: 3.793142
iter: 3346 | loss: 3.792919
iter: 3347 | loss: 3.792696
iter: 3348 | loss: 3.792473
iter: 3349 | loss: 3.792250
iter: 3350 | loss: 3.792027
iter: 3351 | loss: 3.791804
iter: 3352 | loss: 3.791581
iter: 3353 | loss: 3.791358
iter: 3354 | loss: 3.791135
iter: 3355 | loss: 3.790912
iter: 3356 | loss: 3.790689
iter: 3357 | loss: 3.790466
iter: 3358 | loss: 3.790243
iter: 3359 | loss: 3.790020
iter: 3360 | loss: 3.789797
iter: 3361 | loss: 3.789574
iter: 3362 | loss: 3.789351
iter: 3363 | loss: 3.789128
iter: 3364 | loss: 3.788905
iter: 3365 | loss: 3.788682
iter: 3366 | loss: 3.788459
iter: 3367 | loss: 3.788236
iter: 3368 | loss: 3.788013
iter: 3369 | loss: 3.787790
iter: 3370 | loss: 3.787567
iter: 3371 | loss: 3.787344
iter: 3372 | loss: 3.787121
iter: 3373 | loss: 3.786898
iter: 3374 | loss: 3.786675
iter: 3375 | loss: 3.786452
iter: 3376 | loss: 3.786229
iter: 3377 | loss: 3.786006
iter: 3378 | loss: 3.785783
iter: 3379 | loss: 3.785560
iter: 3380 | loss: 3.785338
iter: 3381 | loss: 3.785115
iter: 3382 | loss: 3.784892
iter: 3383 | loss: 3.784669
iter: 3384 | loss: 3.784446
iter: 3385 | loss: 3.784223
iter: 3386 | loss: 3.784000
iter: 3387 | loss: 3.783777
iter: 3388 | loss: 3.783554
iter: 3389 | loss: 3.783331
iter: 3390 | loss: 3.783108
iter: 3391 | loss: 3.782885
iter: 3392 | loss: 3.782662
iter: 3393 | loss: 3.782439
iter: 3394 | loss: 3.782216
iter: 3395 | loss: 3.781993
iter: 3396 | loss: 3.781770
iter: 3397 | loss: 3.781547
iter: 3398 | loss: 3.781324
iter: 3399 | loss: 3.781101
iter: 3400 | loss: 3.780878
iter: 3401 | loss: 3.780655
iter: 3402 | loss: 3.780432
iter: 3403 | loss: 3.780209
iter: 3404 | loss: 3.779986
iter: 3405 | loss: 3.779763
iter: 3406 | loss: 3.779540
iter: 3407 | loss: 3.779317
iter: 3408 | loss: 3.779094
iter: 3409 | loss: 3.778871
iter: 3410 | loss: 3.778648
iter: 3411 | loss: 3.778425
iter: 3412 | loss: 3.778202
iter: 3413 | loss: 3.777979
iter: 3414 | loss: 3.777756
iter: 3415 | loss: 3.777533
iter: 3416 | loss: 3.777310
iter: 3417 | loss: 3.777087
iter: 3418 | loss: 3.776864
iter: 3419 | loss: 3.776641
iter: 3420 | loss: 3.776418
iter: 3421 | loss: 3.776195
iter: 3422 | loss: 3.775972
iter: 3423 | loss: 3.775749
iter: 3424 | loss: 3.775526
iter: 3425 | loss: 3.775303
iter: 3426 | loss: 3.775080
iter: 3427 | loss: 3.774857
iter: 3428 | loss: 3.774634
iter: 3429 | loss: 3.774411
iter: 3430 | loss: 3.774188
iter: 3431 | loss: 3.773965
iter: 3432 | loss: 3.773743
iter: 3433 | loss: 3.773520
iter: 3434 | loss: 3.773297
iter: 3435 | loss: 3.773074
iter: 3436 | loss: 3.772851
iter: 3437 | loss: 3.772628
iter: 3438 | loss: 3.772405
iter: 3439 | loss: 3.772182
iter: 3440 | loss: 3.771959
iter: 3441 | loss: 3.771736
iter: 3442 | loss: 3.771513
iter: 3443 | loss: 3.771290
iter: 3444 | loss: 3.771067
iter: 3445 | loss: 3.770844
iter: 3446 | loss: 3.770621
iter: 3447 | loss: 3.770398
iter: 3448 | loss: 3.770175
iter: 3449 | loss: 3.769952
iter: 3450 | loss: 3.769729
iter: 3451 | loss: 3.769506
iter: 3452 | loss: 3.769283
iter: 3453 | loss: 3.769060
iter: 3454 | loss: 3.768837
iter: 3455 | loss: 3.768614
iter: 3456 | loss: 3.768391
iter: 3457 | loss: 3.768168
iter: 3458 | loss: 3.767945
iter: 3459 | loss: 3.767722
iter: 3460 | loss: 3.767499
iter: 3461 | loss: 3.767276
iter: 3462 | loss: 3.767053
iter: 3463 | loss: 3.766830
iter: 3464 | loss: 3.766607
iter: 3465 | loss: 3.766384
iter: 3466 | loss: 3.766161
iter: 3467 | loss: 3.765938
iter: 3468 | loss: 3.765715
iter: 3469 | loss: 3.765492
iter: 3470 | loss: 3.765269
iter: 3471 | loss: 3.765046
iter: 3472 | loss: 3.764823
iter: 3473 | loss: 3.764600
iter: 3474 | loss: 3.764377
iter: 3475 | loss: 3.764154
iter: 3476 | loss: 3.763931
iter: 3477 | loss: 3.763708
iter: 3478 | loss: 3.763485
iter: 3479 | loss: 3.763262
iter: 3480 | loss: 3.763039
iter: 3481 | loss: 3.762816
iter: 3482 | loss: 3.762593
iter: 3483 | loss: 3.762370
iter: 3484 | loss: 3.762148
iter: 3485 | loss: 3.761925
iter: 3486 | loss: 3.761702
iter: 3487 | loss: 3.761479
iter: 3488 | loss: 3.761256
iter: 3489 | loss: 3.761033
iter: 3490 | loss: 3.760810
iter: 3491 | loss: 3.760587
iter: 3492 | loss: 3.760364
iter: 3493 | loss: 3.760141
iter: 3494 | loss: 3.759918
iter: 3495 | loss: 3.759695
iter: 3496 | loss: 3.759472
iter: 3497 | loss: 3.759249
iter: 3498 | loss: 3.759026
iter: 3499 | loss: 3.758803
iter: 3500 | loss: 3.758580
iter: 3501 | loss: 3.758357
iter: 3502 | loss: 3.758134
iter: 3503 | loss: 3.757911
iter: 3504 | loss: 3.757688
iter: 3505 | loss: 3.757465
iter: 3506 | loss: 3.757242
iter: 3507 | loss: 3.757019
iter: 3508 | loss: 3.756796
iter: 3509 | loss: 3.756573
iter: 3510 | loss: 3.756350
iter: 3511 | loss: 3.756127
iter: 3512 | loss: 3.755904
iter: 3513 | loss: 3.755681
iter: 3514 | loss: 3.755458
iter: 3515 | loss: 3.755235
iter: 3516 | loss: 3.755012
iter: 3517 | loss: 3.754789
iter: 3518 | loss: 3.754566
iter: 3519 | loss: 3.754343
iter: 3520 | loss: 3.754120
iter: 3521 | loss: 3.753897
iter: 3522 | loss: 3.753674
iter: 3523 | loss: 3.753451
iter: 3524 | loss: 3.753228
iter: 3525 | loss: 3.753005
iter: 3526 | loss: 3.752782
iter: 3527 | loss: 3.752559
iter: 3528 | loss: 3.752336
iter: 3529 | loss: 3.752113
iter: 3530 | loss: 3.751890
iter: 3531 | loss: 3.751667
iter: 3532 | loss: 3.751444
iter: 3533 | loss: 3.751221
iter: 3534 | loss: 3.750998
iter: 3535 | loss: 3.750775
iter: 3536 | loss: 3.750553
iter: 3537 | loss: 3.750330
iter: 3538 | loss: 3.750107
iter: 3539 | loss: 3.749884
iter: 3540 | loss: 3.749661
iter: 3541 | loss: 3.749438
iter: 3542 | loss: 3.749215
iter: 3543 | loss: 3.748992
iter: 3544 | loss: 3.748769
iter: 3545 | loss: 3.748546
iter: 3546 | loss: 3.748323
iter: 3547 | loss: 3.748100
iter: 3548 | loss: 3.747877
iter: 3549 | loss: 3.747654
iter: 3550 | loss: 3.747431
iter: 3551 | loss: 3.747208
iter: 3552 | loss: 3.746985
iter: 3553 | loss: 3.746762
iter: 3554 | loss: 3.746539
iter: 3555 | loss: 3.746316
iter: 3556 | loss: 3.746093
iter: 3557 | loss: 3.745870
iter: 3558 | loss: 3.745647
iter: 3559 | loss: 3.745424
iter: 3560 | loss: 3.745201
iter: 3561 | loss: 3.744978
iter: 3562 | loss: 3.744755
iter: 3563 | loss: 3.744532
iter: 3564 | loss: 3.744309
iter: 3565 | loss: 3.744086
iter: 3566 | loss: 3.743863
iter: 3567 | loss: 3.743640
iter: 3568 | loss: 3.743417
iter: 3569 | loss: 3.743194
iter: 3570 | loss: 3.742971
iter: 3571 | loss: 3.742748
iter: 3572 | loss: 3.742525
iter: 3573 | loss: 3.742302
iter: 3574 | loss: 3.742079
iter: 3575 | loss: 3.741856
iter: 3576 | loss: 3.741633
iter: 3577 | loss: 3.741410
iter: 3578 | loss: 3.741187
iter: 3579 | loss: 3.740964
iter: 3580 | loss: 3.740741
iter: 3581 | loss: 3.740518
iter: 3582 | loss: 3.740295
iter: 3583 | loss: 3.740072
iter: 3584 | loss: 3.739849
iter: 3585 | loss: 3.739626
iter: 3586 | loss: 3.739403
iter: 3587 | loss: 3.739180
iter: 3588 | loss: 3.738957
iter: 3589 | loss: 3.738735
iter: 3590 | loss: 3.738512
iter: 3591 | loss: 3.738289
iter: 3592 | loss: 3.738066
iter: 3593 | loss: 3.737843
iter: 3594 | loss: 3.737620
iter: 3595 | loss: 3.737397
iter: 3596 | loss: 3.737174
iter: 3597 | loss: 3.736951
iter: 3598 | loss: 3.736728
iter: 3599 | loss: 3.736505
iter: 3600 | loss: 3.736282
iter: 3601 | loss: 3.736059
iter: 3602 | loss: 3.735836
iter: 3603 | loss: 3.735613
iter: 3604 | loss: 3.735390
iter: 3605 | loss: 3.735167
iter: 3606 | loss: 3.734944
iter: 3607 | loss: 3.734721
iter: 3608 | loss: 3.734498
iter: 3609 | loss: 3.734275
iter
- ...TRUNCATED BY DUNE...
- (cd _build/default/examples/opt && ./adam.exe)
-
step: 0 | loss: 3.368183616
step: 10 | loss: 3.363613578
step: 20 | loss: 3.358539297
step: 30 | loss: 3.353469146
step: 40 | loss: 3.348403390
step: 50 | loss: 3.343342156
step: 60 | loss: 3.338285481
step: 70 | loss: 3.333233348
step: 80 | loss: 3.328185714
step: 90 | loss: 3.323142523
step: 100 | loss: 3.318103716
step: 110 | loss: 3.313069227
step: 120 | loss: 3.308038992
step: 130 | loss: 3.303012944
step: 140 | loss: 3.297991019
step: 150 | loss: 3.292973148
step: 160 | loss: 3.287959265
step: 170 | loss: 3.282949303
step: 180 | loss: 3.277943194
step: 190 | loss: 3.272940873
step: 200 | loss: 3.267942272
step: 210 | loss: 3.262947325
step: 220 | loss: 3.257955966
step: 230 | loss: 3.252968130
step: 240 | loss: 3.247983752
step: 250 | loss: 3.243002768
step: 260 | loss: 3.238025116
step: 270 | loss: 3.233050732
step: 280 | loss: 3.228079557
step: 290 | loss: 3.223111530
step: 300 | loss: 3.218146594
step: 310 | loss: 3.213184691
step: 320 | loss: 3.208225766
step: 330 | loss: 3.203269766
step: 340 | loss: 3.198316638
step: 350 | loss: 3.193366331
step: 360 | loss: 3.188418798
step: 370 | loss: 3.183473991
step: 380 | loss: 3.178531867
step: 390 | loss: 3.173592382
step: 400 | loss: 3.168655495
step: 410 | loss: 3.163721168
step: 420 | loss: 3.158789364
step: 430 | loss: 3.153860048
step: 440 | loss: 3.148933187
step: 450 | loss: 3.144008752
step: 460 | loss: 3.139086712
step: 470 | loss: 3.134167041
step: 480 | loss: 3.129249715
step: 490 | loss: 3.124334710
step: 500 | loss: 3.119422004
step: 510 | loss: 3.114511579
step: 520 | loss: 3.109603416
step: 530 | loss: 3.104697500
step: 540 | loss: 3.099793814
step: 550 | loss: 3.094892347
step: 560 | loss: 3.089993086
step: 570 | loss: 3.085096020
step: 580 | loss: 3.080201141
step: 590 | loss: 3.075308440
step: 600 | loss: 3.070417910
step: 610 | loss: 3.065529545
step: 620 | loss: 3.060643340
step: 630 | loss: 3.055759292
step: 640 | loss: 3.050877395
step: 650 | loss: 3.045997649
step: 660 | loss: 3.041120051
step: 670 | loss: 3.036244599
step: 680 | loss: 3.031371294
step: 690 | loss: 3.026500134
step: 700 | loss: 3.021631122
step: 710 | loss: 3.016764256
step: 720 | loss: 3.011899539
step: 730 | loss: 3.007036971
step: 740 | loss: 3.002176556
step: 750 | loss: 2.997318294
step: 760 | loss: 2.992462189
step: 770 | loss: 2.987608243
step: 780 | loss: 2.982756460
step: 790 | loss: 2.977906841
step: 800 | loss: 2.973059390
step: 810 | loss: 2.968214112
step: 820 | loss: 2.963371008
step: 830 | loss: 2.958530084
step: 840 | loss: 2.953691342
step: 850 | loss: 2.948854787
step: 860 | loss: 2.944020422
step: 870 | loss: 2.939188252
step: 880 | loss: 2.934358280
step: 890 | loss: 2.929530511
step: 900 | loss: 2.924704948
step: 910 | loss: 2.919881597
step: 920 | loss: 2.915060461
step: 930 | loss: 2.910241544
step: 940 | loss: 2.905424852
step: 950 | loss: 2.900610388
step: 960 | loss: 2.895798157
step: 970 | loss: 2.890988162
step: 980 | loss: 2.886180410
step: 990 | loss: 2.881374904
step: 1000 | loss: 2.876571648
step: 1010 | loss: 2.871770648
step: 1020 | loss: 2.866971907
step: 1030 | loss: 2.862175431
step: 1040 | loss: 2.857381223
step: 1050 | loss: 2.852589289
step: 1060 | loss: 2.847799634
step: 1070 | loss: 2.843012261
step: 1080 | loss: 2.838227176
step: 1090 | loss: 2.833444383
step: 1100 | loss: 2.828663886
step: 1110 | loss: 2.823885692
step: 1120 | loss: 2.819109803
step: 1130 | loss: 2.814336226
step: 1140 | loss: 2.809564964
step: 1150 | loss: 2.804796023
step: 1160 | loss: 2.800029407
step: 1170 | loss: 2.795265122
step: 1180 | loss: 2.790503171
step: 1190 | loss: 2.785743560
step: 1200 | loss: 2.780986294
step: 1210 | loss: 2.776231377
step: 1220 | loss: 2.771478815
step: 1230 | loss: 2.766728611
step: 1240 | loss: 2.761980772
step: 1250 | loss: 2.757235302
step: 1260 | loss: 2.752492205
step: 1270 | loss: 2.747751487
step: 1280 | loss: 2.743013153
step: 1290 | loss: 2.738277207
step: 1300 | loss: 2.733543655
step: 1310 | loss: 2.728812502
step: 1320 | loss: 2.724083752
step: 1330 | loss: 2.719357410
step: 1340 | loss: 2.714633482
step: 1350 | loss: 2.709911972
step: 1360 | loss: 2.705192885
step: 1370 | loss: 2.700476227
step: 1380 | loss: 2.695762002
step: 1390 | loss: 2.691050216
step: 1400 | loss: 2.686340873
step: 1410 | loss: 2.681633979
step: 1420 | loss: 2.676929538
step: 1430 | loss: 2.672227556
step: 1440 | loss: 2.667528037
step: 1450 | loss: 2.662830987
step: 1460 | loss: 2.658136411
step: 1470 | loss: 2.653444314
step: 1480 | loss: 2.648754700
step: 1490 | loss: 2.644067575
step: 1500 | loss: 2.639382945
step: 1510 | loss: 2.634700813
step: 1520 | loss: 2.630021186
step: 1530 | loss: 2.625344068
step: 1540 | loss: 2.620669464
step: 1550 | loss: 2.615997380
step: 1560 | loss: 2.611327820
step: 1570 | loss: 2.606660790
step: 1580 | loss: 2.601996294
step: 1590 | loss: 2.597334339
step: 1600 | loss: 2.592674928
step: 1610 | loss: 2.588018067
step: 1620 | loss: 2.583363762
step: 1630 | loss: 2.578712016
step: 1640 | loss: 2.574062836
step: 1650 | loss: 2.569416226
step: 1660 | loss: 2.564772192
step: 1670 | loss: 2.560130738
step: 1680 | loss: 2.555491870
step: 1690 | loss: 2.550855592
step: 1700 | loss: 2.546221910
step: 1710 | loss: 2.541590829
step: 1720 | loss: 2.536962354
step: 1730 | loss: 2.532336490
step: 1740 | loss: 2.527713241
step: 1750 | loss: 2.523092614
step: 1760 | loss: 2.518474613
step: 1770 | loss: 2.513859242
step: 1780 | loss: 2.509246508
step: 1790 | loss: 2.504636415
step: 1800 | loss: 2.500028968
step: 1810 | loss: 2.495424172
step: 1820 | loss: 2.490822032
step: 1830 | loss: 2.486222552
step: 1840 | loss: 2.481625739
step: 1850 | loss: 2.477031597
step: 1860 | loss: 2.472440130
step: 1870 | loss: 2.467851345
step: 1880 | loss: 2.463265244
step: 1890 | loss: 2.458681835
step: 1900 | loss: 2.454101120
step: 1910 | loss: 2.449523106
step: 1920 | loss: 2.444947797
step: 1930 | loss: 2.440375198
step: 1940 | loss: 2.435805314
step: 1950 | loss: 2.431238149
step: 1960 | loss: 2.426673708
step: 1970 | loss: 2.422111996
step: 1980 | loss: 2.417553018
step: 1990 | loss: 2.412996779
step: 2000 | loss: 2.408443282
step: 2010 | loss: 2.403892534
step: 2020 | loss: 2.399344537
step: 2030 | loss: 2.394799298
step: 2040 | loss: 2.390256820
step: 2050 | loss: 2.385717109
step: 2060 | loss: 2.381180169
step: 2070 | loss: 2.376646003
step: 2080 | loss: 2.372114618
step: 2090 | loss: 2.367586016
step: 2100 | loss: 2.363060204
step: 2110 | loss: 2.358537184
step: 2120 | loss: 2.354016962
step: 2130 | loss: 2.349499542
step: 2140 | loss: 2.344984928
step: 2150 | loss: 2.340473124
step: 2160 | loss: 2.335964135
step: 2170 | loss: 2.331457965
step: 2180 | loss: 2.326954618
step: 2190 | loss: 2.322454098
step: 2200 | loss: 2.317956410
step: 2210 | loss: 2.313461556
step: 2220 | loss: 2.308969543
step: 2230 | loss: 2.304480373
step: 2240 | loss: 2.299994050
step: 2250 | loss: 2.295510579
step: 2260 | loss: 2.291029963
step: 2270 | loss: 2.286552206
step: 2280 | loss: 2.282077312
step: 2290 | loss: 2.277605285
step: 2300 | loss: 2.273136128
step: 2310 | loss: 2.268669845
step: 2320 | loss: 2.264206440
step: 2330 | loss: 2.259745916
step: 2340 | loss: 2.255288277
step: 2350 | loss: 2.250833526
step: 2360 | loss: 2.246381667
step: 2370 | loss: 2.241932704
step: 2380 | loss: 2.237486638
step: 2390 | loss: 2.233043475
step: 2400 | loss: 2.228603216
step: 2410 | loss: 2.224165866
step: 2420 | loss: 2.219731427
step: 2430 | loss: 2.215299902
step: 2440 | loss: 2.210871295
step: 2450 | loss: 2.206445608
step: 2460 | loss: 2.202022845
step: 2470 | loss: 2.197603007
step: 2480 | loss: 2.193186099
step: 2490 | loss: 2.188772122
step: 2500 | loss: 2.184361080
step: 2510 | loss: 2.179952975
step: 2520 | loss: 2.175547809
step: 2530 | loss: 2.171145585
step: 2540 | loss: 2.166746305
step: 2550 | loss: 2.162349973
step: 2560 | loss: 2.157956589
step: 2570 | loss: 2.153566157
step: 2580 | loss: 2.149178678
step: 2590 | loss: 2.144794154
step: 2600 | loss: 2.140412588
step: 2610 | loss: 2.136033982
step: 2620 | loss: 2.131658337
step: 2630 | loss: 2.127285655
step: 2640 | loss: 2.122915938
step: 2650 | loss: 2.118549188
step: 2660 | loss: 2.114185406
step: 2670 | loss: 2.109824593
step: 2680 | loss: 2.105466752
step: 2690 | loss: 2.101111883
step: 2700 | loss: 2.096759987
step: 2710 | loss: 2.092411067
step: 2720 | loss: 2.088065123
step: 2730 | loss: 2.083722155
step: 2740 | loss: 2.079382166
step: 2750 | loss: 2.075045156
step: 2760 | loss: 2.070711125
step: 2770 | loss: 2.066380075
step: 2780 | loss: 2.062052006
step: 2790 | loss: 2.057726919
step: 2800 | loss: 2.053404813
step: 2810 | loss: 2.049085690
step: 2820 | loss: 2.044769550
step: 2830 | loss: 2.040456393
step: 2840 | loss: 2.036146219
step: 2850 | loss: 2.031839027
step: 2860 | loss: 2.027534819
step: 2870 | loss: 2.023233593
step: 2880 | loss: 2.018935350
step: 2890 | loss: 2.014640090
step: 2900 | loss: 2.010347810
step: 2910 | loss: 2.006058513
step: 2920 | loss: 2.001772195
step: 2930 | loss: 1.997488858
step: 2940 | loss: 1.993208500
step: 2950 | loss: 1.988931120
step: 2960 | loss: 1.984656717
step: 2970 | loss: 1.980385290
step: 2980 | loss: 1.976116838
step: 2990 | loss: 1.971851360
step: 3000 | loss: 1.967588854
step: 3010 | loss: 1.963329318
step: 3020 | loss: 1.959072752
step: 3030 | loss: 1.954819153
step: 3040 | loss: 1.950568520
step: 3050 | loss: 1.946320851
step: 3060 | loss: 1.942076143
step: 3070 | loss: 1.937834395
step: 3080 | loss: 1.933595605
step: 3090 | loss: 1.929359770
step: 3100 | loss: 1.925126887
step: 3110 | loss: 1.920896955
step: 3120 | loss: 1.916669971
step: 3130 | loss: 1.912445932
step: 3140 | loss: 1.908224836
step: 3150 | loss: 1.904006678
step: 3160 | loss: 1.899791458
step: 3170 | loss: 1.895579170
step: 3180 | loss: 1.891369813
step: 3190 | loss: 1.887163383
step: 3200 | loss: 1.882959876
step: 3210 | loss: 1.878759289
step: 3220 | loss: 1.874561619
step: 3230 | loss: 1.870366861
step: 3240 | loss: 1.866175013
step: 3250 | loss: 1.861986069
step: 3260 | loss: 1.857800027
step: 3270 | loss: 1.853616881
step: 3280 | loss: 1.849436628
step: 3290 | loss: 1.845259264
step: 3300 | loss: 1.841084784
step: 3310 | loss: 1.836913184
step: 3320 | loss: 1.832744458
step: 3330 | loss: 1.828578603
step: 3340 | loss: 1.824415614
step: 3350 | loss: 1.820255485
step: 3360 | loss: 1.816098213
step: 3370 | loss: 1.811943791
step: 3380 | loss: 1.807792214
step: 3390 | loss: 1.803643478
step: 3400 | loss: 1.799497578
step: 3410 | loss: 1.795354506
step: 3420 | loss: 1.791214259
step: 3430 | loss: 1.787076831
step: 3440 | loss: 1.782942215
step: 3450 | loss: 1.778810406
step: 3460 | loss: 1.774681399
step: 3470 | loss: 1.770555187
step: 3480 | loss: 1.766431764
step: 3490 | loss: 1.762311124
step: 3500 | loss: 1.758193261
step: 3510 | loss: 1.754078169
step: 3520 | loss: 1.749965842
step: 3530 | loss: 1.745856272
step: 3540 | loss: 1.741749453
step: 3550 | loss: 1.737645380
step: 3560 | loss: 1.733544044
step: 3570 | loss: 1.729445440
step: 3580 | loss: 1.725349561
step: 3590 | loss: 1.721256400
step: 3600 | loss: 1.717165949
step: 3610 | loss: 1.713078202
step: 3620 | loss: 1.708993152
step: 3630 | loss: 1.704910791
step: 3640 | loss: 1.700831113
step: 3650 | loss: 1.696754110
step: 3660 | loss: 1.692679774
step: 3670 | loss: 1.688608099
step: 3680 | loss: 1.684539077
step: 3690 | loss: 1.680472701
step: 3700 | loss: 1.676408962
step: 3710 | loss: 1.672347854
step: 3720 | loss: 1.668289368
step: 3730 | loss: 1.664233497
step: 3740 | loss: 1.660180234
step: 3750 | loss: 1.656129570
step: 3760 | loss: 1.652081497
step: 3770 | loss: 1.648036008
step: 3780 | loss: 1.643993094
step: 3790 | loss: 1.639952749
step: 3800 | loss: 1.635914963
step: 3810 | loss: 1.631879728
step: 3820 | loss: 1.627847037
step: 3830 | loss: 1.623816882
step: 3840 | loss: 1.619789254
step: 3850 | loss: 1.615764144
step: 3860 | loss: 1.611741546
step: 3870 | loss: 1.607721450
step: 3880 | loss: 1.603703849
step: 3890 | loss: 1.599688733
step: 3900 | loss: 1.595676095
step: 3910 | loss: 1.591665927
step: 3920 | loss: 1.587658219
step: 3930 | loss: 1.583652964
step: 3940 | loss: 1.579650153
step: 3950 | loss: 1.575649777
step: 3960 | loss: 1.571651829
step: 3970 | loss: 1.567656299
step: 3980 | loss: 1.563663180
step: 3990 | loss: 1.559672462
step: 4000 | loss: 1.555684138
step: 4010 | loss: 1.551698198
step: 4020 | loss: 1.547714634
step: 4030 | loss: 1.543733438
step: 4040 | loss: 1.539754600
step: 4050 | loss: 1.535778114
step: 4060 | loss: 1.531803969
step: 4070 | loss: 1.527832157
step: 4080 | loss: 1.523862670
step: 4090 | loss: 1.519895498
step: 4100 | loss: 1.515930635
step: 4110 | loss: 1.511968070
step: 4120 | loss: 1.508007795
step: 4130 | loss: 1.504049802
step: 4140 | loss: 1.500094081
step: 4150 | loss: 1.496140625
step: 4160 | loss: 1.492189424
step: 4170 | loss: 1.488240471
step: 4180 | loss: 1.484293755
step: 4190 | loss: 1.480349270
step: 4200 | loss: 1.476407005
step: 4210 | loss: 1.472466952
step: 4220 | loss: 1.468529103
step: 4230 | loss: 1.464593449
step: 4240 | loss: 1.460659982
step: 4250 | loss: 1.456728691
step: 4260 | loss: 1.452799570
step: 4270 | loss: 1.448872609
step: 4280 | loss: 1.444947799
step: 4290 | loss: 1.441025133
step: 4300 | loss: 1.437104600
step: 4310 | loss: 1.433186192
step: 4320 | loss: 1.429269901
step: 4330 | loss: 1.425355718
step: 4340 | loss: 1.421443634
step: 4350 | loss: 1.417533640
step: 4360 | loss: 1.413625728
step: 4370 | loss: 1.409719889
step: 4380 | loss: 1.405816113
step: 4390 | loss: 1.401914393
step: 4400 | loss: 1.398014719
step: 4410 | loss: 1.394117083
step: 4420 | loss: 1.390221475
step: 4430 | loss: 1.386327887
step: 4440 | loss: 1.382436310
step: 4450 | loss: 1.378546735
step: 4460 | loss: 1.374659153
step: 4470 | loss: 1.370773556
step: 4480 | loss: 1.366889933
step: 4490 | loss: 1.363008277
step: 4500 | loss: 1.359128578
step: 4510 | loss: 1.355250827
step: 4520 | loss: 1.351375016
step: 4530 | loss: 1.347501135
step: 4540 | loss: 1.343629175
step: 4550 | loss: 1.339759127
step: 4560 | loss: 1.335890982
step: 4570 | loss: 1.332024731
step: 4580 | loss: 1.328160364
step: 4590 | loss: 1.324297873
step: 4600 | loss: 1.320437248
step: 4610 | loss: 1.316578481
step: 4620 | loss: 1.312721561
step: 4630 | loss: 1.308866479
step: 4640 | loss: 1.305013226
step: 4650 | loss: 1.301161794
step: 4660 | loss: 1.297312172
step: 4670 | loss: 1.293464351
step: 4680 | loss: 1.289618321
step: 4690 | loss: 1.285774074
step: 4700 | loss: 1.281931600
step: 4710 | loss: 1.278090890
step: 4720 | loss: 1.274251933
step: 4730 | loss: 1.270414720
step: 4740 | loss: 1.266579243
step: 4750 | loss: 1.262745490
step: 4760 | loss: 1.258913454
step: 4770 | loss: 1.255083123
step: 4780 | loss: 1.251254490
step: 4790 | loss: 1.247427543
step: 4800 | loss: 1.243602273
step: 4810 | loss: 1.239778672
step: 4820 | loss: 1.235956728
step: 4830 | loss: 1.232136433
step: 4840 | loss: 1.228317777
step: 4850 | loss: 1.224500749
step: 4860 | loss: 1.220685342
step: 4870 | loss: 1.216871544
step: 4880 | loss: 1.213059346
step: 4890 | loss: 1.209248739
step: 4900 | loss: 1.205439713
step: 4910 | loss: 1.201632258
step: 4920 | loss: 1.197826365
step: 4930 | loss: 1.194022023
step: 4940 | loss: 1.190219225
step: 4950 | loss: 1.186417959
step: 4960 | loss: 1.182618217
step: 4970 | loss: 1.178819988
step: 4980 | loss: 1.175023264
step: 4990 | loss: 1.171228035
step: 5000 | loss: 1.167434292
step: 5010 | loss: 1.163642025
step: 5020 | loss: 1.159851224
step: 5030 | loss: 1.156061882
step: 5040 | loss: 1.152273988
step: 5050 | loss: 1.148487533
step: 5060 | loss: 1.144702509
step: 5070 | loss: 1.140918905
step: 5080 | loss: 1.137136714
step: 5090 | loss: 1.133355926
step: 5100 | loss: 1.129576533
step: 5110 | loss: 1.125798525
step: 5120 | loss: 1.122021895
step: 5130 | loss: 1.118246633
step: 5140 | loss: 1.114472732
step: 5150 | loss: 1.110700182
step: 5160 | loss: 1.106928975
step: 5170 | loss: 1.103159104
step: 5180 | loss: 1.099390560
step: 5190 | loss: 1.095623335
step: 5200 | loss: 1.091857422
step: 5210 | loss: 1.088092812
step: 5220 | loss: 1.084329499
step: 5230 | loss: 1.080567474
step: 5240 | loss: 1.076806731
step: 5250 | loss: 1.073047262
step: 5260 | loss: 1.069289060
step: 5270 | loss: 1.065532118
step: 5280 | loss: 1.061776430
step: 5290 | loss: 1.058021989
step: 5300 | loss: 1.054268788
step: 5310 | loss: 1.050516822
step: 5320 | loss: 1.046766084
step: 5330 | loss: 1.043016568
step: 5340 | loss: 1.039268268
step: 5350 | loss: 1.035521180
step: 5360 | loss: 1.031775297
step: 5370 | loss: 1.028030614
step: 5380 | loss: 1.024287127
step: 5390 | loss: 1.020544830
step: 5400 | loss: 1.016803720
step: 5410 | loss: 1.013063790
step: 5420 | loss: 1.009325038
step: 5430 | loss: 1.005587459
step: 5440 | loss: 1.001851049
step: 5450 | loss: 0.998115805
step: 5460 | loss: 0.994381723
step: 5470 | loss: 0.990648801
step: 5480 | loss: 0.986917034
step: 5490 | loss: 0.983186422
step: 5500 | loss: 0.979456960
step: 5510 | loss: 0.975728646
step: 5520 | loss: 0.972001479
step: 5530 | loss: 0.968275457
step: 5540 | loss: 0.964550578
step: 5550 | loss: 0.960826841
step: 5560 | loss: 0.957104244
step: 5570 | loss: 0.953382787
step: 5580 | loss: 0.949662468
step: 5590 | loss: 0.945943289
step: 5600 | loss: 0.942225247
step: 5610 | loss: 0.938508344
step: 5620 | loss: 0.934792579
step: 5630 | loss: 0.931077954
step: 5640 | loss: 0.927364468
step: 5650 | loss: 0.923652123
step: 5660 | loss: 0.919940920
step: 5670 | loss: 0.916230861
step: 5680 | loss: 0.912521947
step: 5690 | loss: 0.908814180
step: 5700 | loss: 0.905107563
step: 5710 | loss: 0.901402098
step: 5720 | loss: 0.897697787
step: 5730 | loss: 0.893994634
step: 5740 | loss: 0.890292641
step: 5750 | loss: 0.886591812
step: 5760 | loss: 0.882892152
step: 5770 | loss: 0.879193663
step: 5780 | loss: 0.875496349
step: 5790 | loss: 0.871800216
step: 5800 | loss: 0.868105268
step: 5810 | loss: 0.864411509
step: 5820 | loss: 0.860718946
step: 5830 | loss: 0.857027582
step: 5840 | loss: 0.853337424
step: 5850 | loss: 0.849648477
step: 5860 | loss: 0.845960748
step: 5870 | loss: 0.842274243
step: 5880 | loss: 0.838588968
step: 5890 | loss: 0.834904930
step: 5900 | loss: 0.831222136
step: 5910 | loss: 0.827540593
step: 5920 | loss: 0.823860309
step: 5930 | loss: 0.820181291
step: 5940 | loss: 0.816503547
step: 5950 | loss: 0.812827086
step: 5960 | loss: 0.809151915
step: 5970 | loss: 0.805478044
step: 5980 | loss: 0.801805481
step: 5990 | loss: 0.798134234
step: 6000 | loss: 0.794464314
step: 6010 | loss: 0.790795730
step: 6020 | loss: 0.787128491
step: 6030 | loss: 0.783462608
step: 6040 | loss: 0.779798090
step: 6050 | loss: 0.776134948
step: 6060 | loss: 0.772473193
step: 6070 | loss: 0.768812834
step: 6080 | loss: 0.765153884
step: 6090 | loss: 0.761496354
step: 6100 | loss: 0.757840255
step: 6110 | loss: 0.754185598
step: 6120 | loss: 0.750532397
step: 6130 | loss: 0.746880662
step: 6140 | loss: 0.743230406
step: 6150 | loss: 0.739581642
step: 6160 | loss: 0.735934383
step: 6170 | loss: 0.732288641
step: 6180 | loss: 0.728644431
step: 6190 | loss: 0.725001764
step: 6200 | loss: 0.721360656
step: 6210 | loss: 0.717721120
step: 6220 | loss: 0.714083171
step: 6230 | loss: 0.710446822
step: 6240 | loss: 0.706812089
step: 6250 | loss: 0.703178986
step: 6260 | loss: 0.699547528
step: 6270 | loss: 0.695917732
step: 6280 | loss: 0.692289612
step: 6290 | loss: 0.688663184
step: 6300 | loss: 0.685038464
step: 6310 | loss: 0.681415470
step: 6320 | loss: 0.677794216
step: 6330 | loss: 0.674174721
step: 6340 | loss: 0.670557001
step: 6350 | loss: 0.666941074
step: 6360 | loss: 0.663326957
step: 6370 | loss: 0.659714668
step: 6380 | loss: 0.656104225
step: 6390 | loss: 0.652495647
step: 6400 | loss: 0.648888952
step: 6410 | loss: 0.645284160
step: 6420 | loss: 0.641681288
step: 6430 | loss: 0.638080358
step: 6440 | loss: 0.634481389
step: 6450 | loss: 0.630884400
step: 6460 | loss: 0.627289412
step: 6470 | loss: 0.623696446
step: 6480 | loss: 0.620105522
step: 6490 | loss: 0.616516663
step: 6500 | loss: 0.612929888
step: 6510 | loss: 0.609345221
step: 6520 | loss: 0.605762683
step: 6530 | loss: 0.602182296
step: 6540 | loss: 0.598604084
step: 6550 | loss: 0.595028069
step: 6560 | loss: 0.591454275
step: 6570 | loss: 0.587882725
step: 6580 | loss: 0.584313443
step: 6590 | loss: 0.580746455
step: 6600 | loss: 0.577181784
step: 6610 | loss: 0.573619455
step: 6620 | loss: 0.570059494
step: 6630 | loss: 0.566501926
step: 6640 | loss: 0.562946778
step: 6650 | loss: 0.559394076
step: 6660 | loss: 0.555843847
step: 6670 | loss: 0.552296117
step: 6680 | loss: 0.548750914
step: 6690 | loss: 0.545208266
step: 6700 | loss: 0.541668202
step: 6710 | loss: 0.538130749
step: 6720 | loss: 0.534595937
step: 6730 | loss: 0.531063795
step: 6740 | loss: 0.527534352
step: 6750 | loss: 0.524007639
step: 6760 | loss: 0.520483687
step: 6770 | loss: 0.516962525
step: 6780 | loss: 0.513444186
step: 6790 | loss: 0.509928700
step: 6800 | loss: 0.506416101
step: 6810 | loss: 0.502906420
step: 6820 | loss: 0.499399690
step: 6830 | loss: 0.495895945
step: 6840 | loss: 0.492395218
step: 6850 | loss: 0.488897543
step: 6860 | loss: 0.485402955
step: 6870 | loss: 0.481911488
step: 6880 | loss: 0.478423179
step: 6890 | loss: 0.474938062
step: 6900 | loss: 0.471456174
step: 6910 | loss: 0.467977551
step: 6920 | loss: 0.464502230
step: 6930 | loss: 0.461030250
step: 6940 | loss: 0.457561646
step: 6950 | loss: 0.454096459
step: 6960 | loss: 0.450634726
step: 6970 | loss: 0.447176486
step: 6980 | loss: 0.443721780
step: 6990 | loss: 0.440270646
step: 7000 | loss: 0.436823126
step: 7010 | loss: 0.433379259
step: 7020 | loss: 0.429939087
step: 7030 | loss: 0.426502652
step: 7040 | loss: 0.423069996
step: 7050 | loss: 0.419641160
step: 7060 | loss: 0.416216188
step: 7070 | loss: 0.412795122
step: 7080 | loss: 0.409378006
step: 7090 | loss: 0.405964884
step: 7100 | loss: 0.402555801
step: 7110 | loss: 0.399150799
step: 7120 | loss: 0.395749926
step: 7130 | loss: 0.392353225
step: 7140 | loss: 0.388960742
step: 7150 | loss: 0.385572523
step: 7160 | loss: 0.382188614
step: 7170 | loss: 0.378809062
step: 7180 | loss: 0.375433913
step: 7190 | loss: 0.372063214
step: 7200 | loss: 0.368697013
step: 7210 | loss: 0.365335357
step: 7220 | loss: 0.361978292
step: 7230 | loss: 0.358625868
step: 7240 | loss: 0.355278132
step: 7250 | loss: 0.351935131
step: 7260 | loss: 0.348596915
step: 7270 | loss: 0.345263531
step: 7280 | loss: 0.341935028
step: 7290 | loss: 0.338611454
step: 7300 | loss: 0.335292857
step: 7310 | loss: 0.331979285
step: 7320 | loss: 0.328670786
step: 7330 | loss: 0.325367409
step: 7340 | loss: 0.322069202
step: 7350 | loss: 0.318776211
step: 7360 | loss: 0.315488485
step: 7370 | loss: 0.312206071
step: 7380 | loss: 0.308929015
step: 7390 | loss: 0.305657364
step: 7400 | loss: 0.302391164
step: 7410 | loss: 0.299130461
step: 7420 | loss: 0.295875299
step: 7430 | loss: 0.292625722
step: 7440 | loss: 0.289381776
step: 7450 | loss: 0.286143502
step: 7460 | loss: 0.282910943
step: 7470 | loss: 0.279684140
step: 7480 | loss: 0.276463134
step: 7490 | loss: 0.273247964
step: 7500 | loss: 0.270038669
step: 7510 | loss: 0.266835286
step: 7520 | loss: 0.263637850
step: 7530 | loss: 0.260446397
step: 7540 | loss: 0.257260960
step: 7550 | loss: 0.254081571
step: 7560 | loss: 0.250908260
step: 7570 | loss: 0.247741055
step: 7580 | loss: 0.244579983
step: 7590 | loss: 0.241425070
step: 7600 | loss: 0.238276338
step: 7610 | loss: 0.235133808
step: 7620 | loss: 0.231997499
step: 7630 | loss: 0.228867428
step: 7640 | loss: 0.225743608
step: 7650 | loss: 0.222626052
step: 7660 | loss: 0.219514768
step: 7670 | loss: 0.216409763
step: 7680 | loss: 0.213311040
step: 7690 | loss: 0.210218601
step: 7700 | loss: 0.207132442
step: 7710 | loss: 0.204052558
step: 7720 | loss: 0.200978942
step: 7730 | loss: 0.197911580
step: 7740 | loss: 0.194850458
step: 7750 | loss: 0.191795556
step: 7760 | loss: 0.188746853
step: 7770 | loss: 0.185704322
step: 7780 | loss: 0.182667933
step: 7790 | loss: 0.179637653
step: 7800 | loss: 0.176613443
step: 7810 | loss: 0.173595263
step: 7820 | loss: 0.170583066
step: 7830 | loss: 0.167576803
step: 7840 | loss: 0.164576420
step: 7850 | loss: 0.161581859
step: 7860 | loss: 0.158593058
step: 7870 | loss: 0.155609950
step: 7880 | loss: 0.152632466
step: 7890 | loss: 0.149660530
step: 7900 | loss: 0.146694064
step: 7910 | loss: 0.143732986
step: 7920 | loss: 0.140777209
step: 7930 | loss: 0.137826642
step: 7940 | loss: 0.134881191
step: 7950 | loss: 0.131940758
step: 7960 | loss: 0.129005243
step: 7970 | loss: 0.126074539
step: 7980 | loss: 0.123148541
step: 7990 | loss: 0.120227136
step: 8000 | loss: 0.117310211
step: 8010 | loss: 0.114397651
step: 8020 | loss: 0.111489338
step: 8030 | loss: 0.108585151
step: 8040 | loss: 0.105684969
step: 8050 | loss: 0.102788669
step: 8060 | loss: 0.099896127
step: 8070 | loss: 0.097007218
step: 8080 | loss: 0.094121819
step: 8090 | loss: 0.091239804
step: 8100 | loss: 0.088361050
step: 8110 | loss: 0.085485434
step: 8120 | loss: 0.082612834
step: 8130 | loss: 0.079743131
step: 8140 | loss: 0.076876206
step: 8150 | loss: 0.074011946
step: 8160 | loss: 0.071150238
step: 8170 | loss: 0.068290974
step: 8180 | loss: 0.065434049
step: 8190 | loss: 0.062579363
step: 8200 | loss: 0.059726821
step: 8210 | loss: 0.056876330
step: 8220 | loss: 0.054027805
step: 8230 | loss: 0.051181166
step: 8240 | loss: 0.048336337
step: 8250 | loss: 0.045493248
step: 8260 | loss: 0.042651836
step: 8270 | loss: 0.039812043
step: 8280 | loss: 0.036973815
step: 8290 | loss: 0.034137107
step: 8300 | loss: 0.031301877
step: 8310 | loss: 0.028468087
step: 8320 | loss: 0.025635706
step: 8330 | loss: 0.022804707
step: 8340 | loss: 0.019975065
step: 8350 | loss: 0.017146759
step: 8360 | loss: 0.014319772
step: 8370 | loss: 0.011494087
step: 8380 | loss: 0.008669689
step: 8390 | loss: 0.005846564
step: 8400 | loss: 0.003024697
- final loss: 0.000768
- (cd _build/default/examples/opt && ./pair.exe)
-
step: 0 | loss: 3.399400262
step: 10 | loss: 3.395288871
step: 20 | loss: 3.390721363
step: 30 | loss: 3.386154683
step: 40 | loss: 3.381588892
step: 50 | loss: 3.377024028
step: 60 | loss: 3.372460111
step: 70 | loss: 3.367897150
step: 80 | loss: 3.363335153
step: 90 | loss: 3.358774123
step: 100 | loss: 3.354214061
step: 110 | loss: 3.349654971
step: 120 | loss: 3.345096854
step: 130 | loss: 3.340539711
step: 140 | loss: 3.335983546
step: 150 | loss: 3.331428359
step: 160 | loss: 3.326874154
step: 170 | loss: 3.322320930
step: 180 | loss: 3.317768692
step: 190 | loss: 3.313217439
step: 200 | loss: 3.308667176
step: 210 | loss: 3.304117902
step: 220 | loss: 3.299569621
step: 230 | loss: 3.295022334
step: 240 | loss: 3.290476044
step: 250 | loss: 3.285930751
step: 260 | loss: 3.281386459
step: 270 | loss: 3.276843170
step: 280 | loss: 3.272300885
step: 290 | loss: 3.267759606
step: 300 | loss: 3.263219336
step: 310 | loss: 3.258680077
step: 320 | loss: 3.254141830
step: 330 | loss: 3.249604598
step: 340 | loss: 3.245068384
step: 350 | loss: 3.240533188
step: 360 | loss: 3.235999013
step: 370 | loss: 3.231465863
step: 380 | loss: 3.226933737
step: 390 | loss: 3.222402640
step: 400 | loss: 3.217872572
step: 410 | loss: 3.213343537
step: 420 | loss: 3.208815536
step: 430 | loss: 3.204288572
step: 440 | loss: 3.199762647
step: 450 | loss: 3.195237764
step: 460 | loss: 3.190713924
step: 470 | loss: 3.186191130
step: 480 | loss: 3.181669384
step: 490 | loss: 3.177148688
step: 500 | loss: 3.172629046
step: 510 | loss: 3.168110458
step: 520 | loss: 3.163592929
step: 530 | loss: 3.159076459
step: 540 | loss: 3.154561052
step: 550 | loss: 3.150046709
step: 560 | loss: 3.145533434
step: 570 | loss: 3.141021229
step: 580 | loss: 3.136510096
step: 590 | loss: 3.132000038
step: 600 | loss: 3.127491057
step: 610 | loss: 3.122983155
step: 620 | loss: 3.118476336
step: 630 | loss: 3.113970602
step: 640 | loss: 3.109465954
step: 650 | loss: 3.104962397
step: 660 | loss: 3.100459933
step: 670 | loss: 3.095958563
step: 680 | loss: 3.091458291
step: 690 | loss: 3.086959120
step: 700 | loss: 3.082461051
step: 710 | loss: 3.077964089
step: 720 | loss: 3.073468234
step: 730 | loss: 3.068973491
step: 740 | loss: 3.064479861
step: 750 | loss: 3.059987348
step: 760 | loss: 3.055495955
step: 770 | loss: 3.051005683
step: 780 | loss: 3.046516536
step: 790 | loss: 3.042028517
step: 800 | loss: 3.037541628
step: 810 | loss: 3.033055872
step: 820 | loss: 3.028571253
step: 830 | loss: 3.024087772
step: 840 | loss: 3.019605433
step: 850 | loss: 3.015124239
step: 860 | loss: 3.010644193
step: 870 | loss: 3.006165297
step: 880 | loss: 3.001687555
step: 890 | loss: 2.997210969
step: 900 | loss: 2.992735542
step: 910 | loss: 2.988261278
step: 920 | loss: 2.983788180
step: 930 | loss: 2.979316250
step: 940 | loss: 2.974845492
step: 950 | loss: 2.970375908
step: 960 | loss: 2.965907502
step: 970 | loss: 2.961440277
step: 980 | loss: 2.956974235
step: 990 | loss: 2.952509381
step: 1000 | loss: 2.948045717
step: 1010 | loss: 2.943583246
step: 1020 | loss: 2.939121972
step: 1030 | loss: 2.934661898
step: 1040 | loss: 2.930203027
step: 1050 | loss: 2.925745362
step: 1060 | loss: 2.921288906
step: 1070 | loss: 2.916833664
step: 1080 | loss: 2.912379637
step: 1090 | loss: 2.907926830
step: 1100 | loss: 2.903475246
step: 1110 | loss: 2.899024888
step: 1120 | loss: 2.894575759
step: 1130 | loss: 2.890127863
step: 1140 | loss: 2.885681204
step: 1150 | loss: 2.881235784
step: 1160 | loss: 2.876791608
step: 1170 | loss: 2.872348678
step: 1180 | loss: 2.867906998
step: 1190 | loss: 2.863466572
step: 1200 | loss: 2.859027403
step: 1210 | loss: 2.854589495
step: 1220 | loss: 2.850152851
step: 1230 | loss: 2.845717474
step: 1240 | loss: 2.841283370
step: 1250 | loss: 2.836850540
step: 1260 | loss: 2.832418990
step: 1270 | loss: 2.827988721
step: 1280 | loss: 2.823559739
step: 1290 | loss: 2.819132046
step: 1300 | loss: 2.814705647
step: 1310 | loss: 2.810280545
step: 1320 | loss: 2.805856745
step: 1330 | loss: 2.801434249
step: 1340 | loss: 2.797013061
step: 1350 | loss: 2.792593186
step: 1360 | loss: 2.788174627
step: 1370 | loss: 2.783757389
step: 1380 | loss: 2.779341474
step: 1390 | loss: 2.774926887
step: 1400 | loss: 2.770513632
step: 1410 | loss: 2.766101713
step: 1420 | loss: 2.761691133
step: 1430 | loss: 2.757281898
step: 1440 | loss: 2.752874010
step: 1450 | loss: 2.748467473
step: 1460 | loss: 2.744062293
step: 1470 | loss: 2.739658472
step: 1480 | loss: 2.735256015
step: 1490 | loss: 2.730854927
step: 1500 | loss: 2.726455210
step: 1510 | loss: 2.722056870
step: 1520 | loss: 2.717659911
step: 1530 | loss: 2.713264336
step: 1540 | loss: 2.708870150
step: 1550 | loss: 2.704477357
step: 1560 | loss: 2.700085962
step: 1570 | loss: 2.695695969
step: 1580 | loss: 2.691307381
step: 1590 | loss: 2.686920204
step: 1600 | loss: 2.682534442
step: 1610 | loss: 2.678150098
step: 1620 | loss: 2.673767178
step: 1630 | loss: 2.669385686
step: 1640 | loss: 2.665005627
step: 1650 | loss: 2.660627004
step: 1660 | loss: 2.656249822
step: 1670 | loss: 2.651874087
step: 1680 | loss: 2.647499801
step: 1690 | loss: 2.643126970
step: 1700 | loss: 2.638755599
step: 1710 | loss: 2.634385692
step: 1720 | loss: 2.630017253
step: 1730 | loss: 2.625650287
step: 1740 | loss: 2.621284800
step: 1750 | loss: 2.616920795
step: 1760 | loss: 2.612558277
step: 1770 | loss: 2.608197252
step: 1780 | loss: 2.603837723
step: 1790 | loss: 2.599479696
step: 1800 | loss: 2.595123175
step: 1810 | loss: 2.590768165
step: 1820 | loss: 2.586414671
step: 1830 | loss: 2.582062698
step: 1840 | loss: 2.577712251
step: 1850 | loss: 2.573363335
step: 1860 | loss: 2.569015954
step: 1870 | loss: 2.564670114
step: 1880 | loss: 2.560325819
step: 1890 | loss: 2.555983075
step: 1900 | loss: 2.551641886
step: 1910 | loss: 2.547302259
step: 1920 | loss: 2.542964196
step: 1930 | loss: 2.538627705
step: 1940 | loss: 2.534292790
step: 1950 | loss: 2.529959456
step: 1960 | loss: 2.525627708
step: 1970 | loss: 2.521297551
step: 1980 | loss: 2.516968991
step: 1990 | loss: 2.512642034
step: 2000 | loss: 2.508316683
step: 2010 | loss: 2.503992945
step: 2020 | loss: 2.499670824
step: 2030 | loss: 2.495350327
step: 2040 | loss: 2.491031458
step: 2050 | loss: 2.486714223
step: 2060 | loss: 2.482398628
step: 2070 | loss: 2.478084677
step: 2080 | loss: 2.473772376
step: 2090 | loss: 2.469461731
step: 2100 | loss: 2.465152748
step: 2110 | loss: 2.460845430
step: 2120 | loss: 2.456539786
step: 2130 | loss: 2.452235819
step: 2140 | loss: 2.447933535
step: 2150 | loss: 2.443632941
step: 2160 | loss: 2.439334041
step: 2170 | loss: 2.435036841
step: 2180 | loss: 2.430741348
step: 2190 | loss: 2.426447567
step: 2200 | loss: 2.422155503
step: 2210 | loss: 2.417865162
step: 2220 | loss: 2.413576551
step: 2230 | loss: 2.409289675
step: 2240 | loss: 2.405004539
step: 2250 | loss: 2.400721150
step: 2260 | loss: 2.396439514
step: 2270 | loss: 2.392159636
step: 2280 | loss: 2.387881523
step: 2290 | loss: 2.383605180
step: 2300 | loss: 2.379330613
step: 2310 | loss: 2.375057829
step: 2320 | loss: 2.370786833
step: 2330 | loss: 2.366517632
step: 2340 | loss: 2.362250231
step: 2350 | loss: 2.357984637
step: 2360 | loss: 2.353720856
step: 2370 | loss: 2.349458894
step: 2380 | loss: 2.345198757
step: 2390 | loss: 2.340940452
step: 2400 | loss: 2.336683984
step: 2410 | loss: 2.332429360
step: 2420 | loss: 2.328176587
step: 2430 | loss: 2.323925670
step: 2440 | loss: 2.319676615
step: 2450 | loss: 2.315429430
step: 2460 | loss: 2.311184120
step: 2470 | loss: 2.306940693
step: 2480 | loss: 2.302699154
step: 2490 | loss: 2.298459509
step: 2500 | loss: 2.294221766
step: 2510 | loss: 2.289985931
step: 2520 | loss: 2.285752010
step: 2530 | loss: 2.281520009
step: 2540 | loss: 2.277289937
step: 2550 | loss: 2.273061798
step: 2560 | loss: 2.268835600
step: 2570 | loss: 2.264611349
step: 2580 | loss: 2.260389052
step: 2590 | loss: 2.256168716
step: 2600 | loss: 2.251950347
step: 2610 | loss: 2.247733952
step: 2620 | loss: 2.243519537
step: 2630 | loss: 2.239307111
step: 2640 | loss: 2.235096679
step: 2650 | loss: 2.230888248
step: 2660 | loss: 2.226681824
step: 2670 | loss: 2.222477416
step: 2680 | loss: 2.218275030
step: 2690 | loss: 2.214074672
step: 2700 | loss: 2.209876350
step: 2710 | loss: 2.205680071
step: 2720 | loss: 2.201485841
step: 2730 | loss: 2.197293667
step: 2740 | loss: 2.193103557
step: 2750 | loss: 2.188915518
step: 2760 | loss: 2.184729556
step: 2770 | loss: 2.180545679
step: 2780 | loss: 2.176363894
step: 2790 | loss: 2.172184208
step: 2800 | loss: 2.168006628
step: 2810 | loss: 2.163831161
step: 2820 | loss: 2.159657814
step: 2830 | loss: 2.155486596
step: 2840 | loss: 2.151317512
step: 2850 | loss: 2.147150570
step: 2860 | loss: 2.142985777
step: 2870 | loss: 2.138823141
step: 2880 | loss: 2.134662669
step: 2890 | loss: 2.130504369
step: 2900 | loss: 2.126348247
step: 2910 | loss: 2.122194311
step: 2920 | loss: 2.118042568
step: 2930 | loss: 2.113893026
step: 2940 | loss: 2.109745692
step: 2950 | loss: 2.105600573
step: 2960 | loss: 2.101457678
step: 2970 | loss: 2.097317013
step: 2980 | loss: 2.093178586
step: 2990 | loss: 2.089042405
step: 3000 | loss: 2.084908476
step: 3010 | loss: 2.080776808
step: 3020 | loss: 2.076647408
step: 3030 | loss: 2.072520283
step: 3040 | loss: 2.068395442
step: 3050 | loss: 2.064272891
step: 3060 | loss: 2.060152639
step: 3070 | loss: 2.056034693
step: 3080 | loss: 2.051919060
step: 3090 | loss: 2.047805749
step: 3100 | loss: 2.043694766
step: 3110 | loss: 2.039586120
step: 3120 | loss: 2.035479819
step: 3130 | loss: 2.031375869
step: 3140 | loss: 2.027274279
step: 3150 | loss: 2.023175057
step: 3160 | loss: 2.019078209
step: 3170 | loss: 2.014983745
step: 3180 | loss: 2.010891671
step: 3190 | loss: 2.006801995
step: 3200 | loss: 2.002714726
step: 3210 | loss: 1.998629870
step: 3220 | loss: 1.994547437
step: 3230 | loss: 1.990467433
step: 3240 | loss: 1.986389866
step: 3250 | loss: 1.982314744
step: 3260 | loss: 1.978242076
step: 3270 | loss: 1.974171868
step: 3280 | loss: 1.970104129
step: 3290 | loss: 1.966038867
step: 3300 | loss: 1.961976089
step: 3310 | loss: 1.957915803
step: 3320 | loss: 1.953858017
step: 3330 | loss: 1.949802739
step: 3340 | loss: 1.945749977
step: 3350 | loss: 1.941699739
step: 3360 | loss: 1.937652032
step: 3370 | loss: 1.933606865
step: 3380 | loss: 1.929564245
step: 3390 | loss: 1.925524181
step: 3400 | loss: 1.921486679
step: 3410 | loss: 1.917451748
step: 3420 | loss: 1.913419397
step: 3430 | loss: 1.909389631
step: 3440 | loss: 1.905362461
step: 3450 | loss: 1.901337893
step: 3460 | loss: 1.897315935
step: 3470 | loss: 1.893296596
step: 3480 | loss: 1.889279882
step: 3490 | loss: 1.885265803
step: 3500 | loss: 1.881254365
step: 3510 | loss: 1.877245577
step: 3520 | loss: 1.873239446
step: 3530 | loss: 1.869235980
step: 3540 | loss: 1.865235188
step: 3550 | loss: 1.861237077
step: 3560 | loss: 1.857241654
step: 3570 | loss: 1.853248927
step: 3580 | loss: 1.849258905
step: 3590 | loss: 1.845271595
step: 3600 | loss: 1.841287005
step: 3610 | loss: 1.837305143
step: 3620 | loss: 1.833326015
step: 3630 | loss: 1.829349631
step: 3640 | loss: 1.825375997
step: 3650 | loss: 1.821405122
step: 3660 | loss: 1.817437013
step: 3670 | loss: 1.813471677
step: 3680 | loss: 1.809509123
step: 3690 | loss: 1.805549358
step: 3700 | loss: 1.801592389
step: 3710 | loss: 1.797638224
step: 3720 | loss: 1.793686871
step: 3730 | loss: 1.789738337
step: 3740 | loss: 1.785792630
step: 3750 | loss: 1.781849757
step: 3760 | loss: 1.777909725
step: 3770 | loss: 1.773972542
step: 3780 | loss: 1.770038216
step: 3790 | loss: 1.766106754
step: 3800 | loss: 1.762178162
step: 3810 | loss: 1.758252449
step: 3820 | loss: 1.754329622
step: 3830 | loss: 1.750409687
step: 3840 | loss: 1.746492653
step: 3850 | loss: 1.742578525
step: 3860 | loss: 1.738667313
step: 3870 | loss: 1.734759021
step: 3880 | loss: 1.730853659
step: 3890 | loss: 1.726951232
step: 3900 | loss: 1.723051747
step: 3910 | loss: 1.719155212
step: 3920 | loss: 1.715261634
step: 3930 | loss: 1.711371018
step: 3940 | loss: 1.707483373
step: 3950 | loss: 1.703598705
step: 3960 | loss: 1.699717021
step: 3970 | loss: 1.695838326
step: 3980 | loss: 1.691962629
step: 3990 | loss: 1.688089935
step: 4000 | loss: 1.684220252
step: 4010 | loss: 1.680353584
step: 4020 | loss: 1.676489940
step: 4030 | loss: 1.672629325
step: 4040 | loss: 1.668771746
step: 4050 | loss: 1.664917209
step: 4060 | loss: 1.661065720
step: 4070 | loss: 1.657217285
step: 4080 | loss: 1.653371911
step: 4090 | loss: 1.649529603
step: 4100 | loss: 1.645690368
step: 4110 | loss: 1.641854211
step: 4120 | loss: 1.638021138
step: 4130 | loss: 1.634191155
step: 4140 | loss: 1.630364268
step: 4150 | loss: 1.626540482
step: 4160 | loss: 1.622719803
step: 4170 | loss: 1.618902237
step: 4180 | loss: 1.615087788
step: 4190 | loss: 1.611276463
step: 4200 | loss: 1.607468266
step: 4210 | loss: 1.603663203
step: 4220 | loss: 1.599861278
step: 4230 | loss: 1.596062498
step: 4240 | loss: 1.592266867
step: 4250 | loss: 1.588474389
step: 4260 | loss: 1.584685070
step: 4270 | loss: 1.580898915
step: 4280 | loss: 1.577115928
step: 4290 | loss: 1.573336113
step: 4300 | loss: 1.569559476
step: 4310 | loss: 1.565786020
step: 4320 | loss: 1.562015750
step: 4330 | loss: 1.558248670
step: 4340 | loss: 1.554484785
step: 4350 | loss: 1.550724098
step: 4360 | loss: 1.546966613
step: 4370 | loss: 1.543212335
step: 4380 | loss: 1.539461266
step: 4390 | loss: 1.535713412
step: 4400 | loss: 1.531968774
step: 4410 | loss: 1.528227357
step: 4420 | loss: 1.524489165
step: 4430 | loss: 1.520754199
step: 4440 | loss: 1.517022465
step: 4450 | loss: 1.513293964
step: 4460 | loss: 1.509568700
step: 4470 | loss: 1.505846675
step: 4480 | loss: 1.502127893
step: 4490 | loss: 1.498412356
step: 4500 | loss: 1.494700067
step: 4510 | loss: 1.490991028
step: 4520 | loss: 1.487285241
step: 4530 | loss: 1.483582709
step: 4540 | loss: 1.479883434
step: 4550 | loss: 1.476187418
step: 4560 | loss: 1.472494662
step: 4570 | loss: 1.468805169
step: 4580 | loss: 1.465118941
step: 4590 | loss: 1.461435979
step: 4600 | loss: 1.457756283
step: 4610 | loss: 1.454079857
step: 4620 | loss: 1.450406701
step: 4630 | loss: 1.446736816
step: 4640 | loss: 1.443070203
step: 4650 | loss: 1.439406863
step: 4660 | loss: 1.435746798
step: 4670 | loss: 1.432090006
step: 4680 | loss: 1.428436490
step: 4690 | loss: 1.424786249
step: 4700 | loss: 1.421139285
step: 4710 | loss: 1.417495596
step: 4720 | loss: 1.413855184
step: 4730 | loss: 1.410218047
step: 4740 | loss: 1.406584187
step: 4750 | loss: 1.402953603
step: 4760 | loss: 1.399326294
step: 4770 | loss: 1.395702259
step: 4780 | loss: 1.392081500
step: 4790 | loss: 1.388464013
step: 4800 | loss: 1.384849800
step: 4810 | loss: 1.381238858
step: 4820 | loss: 1.377631187
step: 4830 | loss: 1.374026785
step: 4840 | loss: 1.370425652
step: 4850 | loss: 1.366827786
step: 4860 | loss: 1.363233185
step: 4870 | loss: 1.359641847
step: 4880 | loss: 1.356053772
step: 4890 | loss: 1.352468956
step: 4900 | loss: 1.348887399
step: 4910 | loss: 1.345309098
step: 4920 | loss: 1.341734050
step: 4930 | loss: 1.338162255
step: 4940 | loss: 1.334593708
step: 4950 | loss: 1.331028408
step: 4960 | loss: 1.327466353
step: 4970 | loss: 1.323907539
step: 4980 | loss: 1.320351964
step: 4990 | loss: 1.316799624
step: 5000 | loss: 1.313250518
step: 5010 | loss: 1.309704641
step: 5020 | loss: 1.306161991
step: 5030 | loss: 1.302622564
step: 5040 | loss: 1.299086358
step: 5050 | loss: 1.295553368
step: 5060 | loss: 1.292023591
step: 5070 | loss: 1.288497024
step: 5080 | loss: 1.284973663
step: 5090 | loss: 1.281453505
step: 5100 | loss: 1.277936545
step: 5110 | loss: 1.274422779
step: 5120 | loss: 1.270912205
step: 5130 | loss: 1.267404817
step: 5140 | loss: 1.263900612
step: 5150 | loss: 1.260399585
step: 5160 | loss: 1.256901733
step: 5170 | loss: 1.253407051
step: 5180 | loss: 1.249915536
step: 5190 | loss: 1.246427182
step: 5200 | loss: 1.242941986
step: 5210 | loss: 1.239459942
step: 5220 | loss: 1.235981048
step: 5230 | loss: 1.232505297
step: 5240 | loss: 1.229032687
step: 5250 | loss: 1.225563212
step: 5260 | loss: 1.222096867
step: 5270 | loss: 1.218633650
step: 5280 | loss: 1.215173554
step: 5290 | loss: 1.211716575
step: 5300 | loss: 1.208262710
step: 5310 | loss: 1.204811952
step: 5320 | loss: 1.201364299
step: 5330 | loss: 1.197919745
step: 5340 | loss: 1.194478287
step: 5350 | loss: 1.191039918
step: 5360 | loss: 1.187604636
step: 5370 | loss: 1.184172436
step: 5380 | loss: 1.180743314
step: 5390 | loss: 1.177317264
step: 5400 | loss: 1.173894284
step: 5410 | loss: 1.170474368
step: 5420 | loss: 1.167057513
step: 5430 | loss: 1.163643715
step: 5440 | loss: 1.160232970
step: 5450 | loss: 1.156825273
step: 5460 | loss: 1.153420621
step: 5470 | loss: 1.150019010
step: 5480 | loss: 1.146620437
step: 5490 | loss: 1.143224897
step: 5500 | loss: 1.139832388
step: 5510 | loss: 1.136442906
step: 5520 | loss: 1.133056448
step: 5530 | loss: 1.129673011
step: 5540 | loss: 1.126292592
step: 5550 | loss: 1.122915187
step: 5560 | loss: 1.119540794
step: 5570 | loss: 1.116169411
step: 5580 | loss: 1.112801035
step: 5590 | loss: 1.109435663
step: 5600 | loss: 1.106073294
step: 5610 | loss: 1.102713925
step: 5620 | loss: 1.099357555
step: 5630 | loss: 1.096004182
step: 5640 | loss: 1.092653804
step: 5650 | loss: 1.089306420
step: 5660 | loss: 1.085962029
step: 5670 | loss: 1.082620630
step: 5680 | loss: 1.079282223
step: 5690 | loss: 1.075946805
step: 5700 | loss: 1.072614379
step: 5710 | loss: 1.069284942
step: 5720 | loss: 1.065958495
step: 5730 | loss: 1.062635038
step: 5740 | loss: 1.059314573
step: 5750 | loss: 1.055997099
step: 5760 | loss: 1.052682617
step: 5770 | loss: 1.049371129
step: 5780 | loss: 1.046062636
step: 5790 | loss: 1.042757139
step: 5800 | loss: 1.039454641
step: 5810 | loss: 1.036155143
step: 5820 | loss: 1.032858649
step: 5830 | loss: 1.029565160
step: 5840 | loss: 1.026274679
step: 5850 | loss: 1.022987210
step: 5860 | loss: 1.019702756
step: 5870 | loss: 1.016421321
step: 5880 | loss: 1.013142908
step: 5890 | loss: 1.009867522
step: 5900 | loss: 1.006595166
step: 5910 | loss: 1.003325847
step: 5920 | loss: 1.000059569
step: 5930 | loss: 0.996796336
step: 5940 | loss: 0.993536155
step: 5950 | loss: 0.990279032
step: 5960 | loss: 0.987024972
step: 5970 | loss: 0.983773983
step: 5980 | loss: 0.980526069
step: 5990 | loss: 0.977281240
step: 6000 | loss: 0.974039501
step: 6010 | loss: 0.970800860
step: 6020 | loss: 0.967565326
step: 6030 | loss: 0.964332905
step: 6040 | loss: 0.961103607
step: 6050 | loss: 0.957877440
step: 6060 | loss: 0.954654413
step: 6070 | loss: 0.951434535
step: 6080 | loss: 0.948217816
step: 6090 | loss: 0.945004265
step: 6100 | loss: 0.941793892
step: 6110 | loss: 0.938586709
step: 6120 | loss: 0.935382724
step: 6130 | loss: 0.932181949
step: 6140 | loss: 0.928984396
step: 6150 | loss: 0.925790075
step: 6160 | loss: 0.922598999
step: 6170 | loss: 0.919411178
step: 6180 | loss: 0.916226626
step: 6190 | loss: 0.913045355
step: 6200 | loss: 0.909867376
step: 6210 | loss: 0.906692705
step: 6220 | loss: 0.903521352
step: 6230 | loss: 0.900353333
step: 6240 | loss: 0.897188660
step: 6250 | loss: 0.894027348
step: 6260 | loss: 0.890869410
step: 6270 | loss: 0.887714861
step: 6280 | loss: 0.884563716
step: 6290 | loss: 0.881415990
step: 6300 | loss: 0.878271696
step: 6310 | loss: 0.875130852
step: 6320 | loss: 0.871993472
step: 6330 | loss: 0.868859573
step: 6340 | loss: 0.865729169
step: 6350 | loss: 0.862602277
step: 6360 | loss: 0.859478913
step: 6370 | loss: 0.856359095
step: 6380 | loss: 0.853242837
step: 6390 | loss: 0.850130159
step: 6400 | loss: 0.847021075
step: 6410 | loss: 0.843915605
step: 6420 | loss: 0.840813764
step: 6430 | loss: 0.837715572
step: 6440 | loss: 0.834621044
step: 6450 | loss: 0.831530200
step: 6460 | loss: 0.828443057
step: 6470 | loss: 0.825359633
step: 6480 | loss: 0.822279947
step: 6490 | loss: 0.819204017
step: 6500 | loss: 0.816131862
step: 6510 | loss: 0.813063500
step: 6520 | loss: 0.809998951
step: 6530 | loss: 0.806938233
step: 6540 | loss: 0.803881364
step: 6550 | loss: 0.800828365
step: 6560 | loss: 0.797779255
step: 6570 | loss: 0.794734052
step: 6580 | loss: 0.791692777
step: 6590 | loss: 0.788655448
step: 6600 | loss: 0.785622085
step: 6610 | loss: 0.782592709
step: 6620 | loss: 0.779567338
step: 6630 | loss: 0.776545992
step: 6640 | loss: 0.773528691
step: 6650 | loss: 0.770515455
step: 6660 | loss: 0.767506304
step: 6670 | loss: 0.764501257
step: 6680 | loss: 0.761500335
step: 6690 | loss: 0.758503558
step: 6700 | loss: 0.755510945
step: 6710 | loss: 0.752522517
step: 6720 | loss: 0.749538293
step: 6730 | loss: 0.746558293
step: 6740 | loss: 0.743582538
step: 6750 | loss: 0.740611047
step: 6760 | loss: 0.737643840
step: 6770 | loss: 0.734680938
step: 6780 | loss: 0.731722359
step: 6790 | loss: 0.728768124
step: 6800 | loss: 0.725818253
step: 6810 | loss: 0.722872765
step: 6820 | loss: 0.719931680
step: 6830 | loss: 0.716995017
step: 6840 | loss: 0.714062796
step: 6850 | loss: 0.711135036
step: 6860 | loss: 0.708211757
step: 6870 | loss: 0.705292977
step: 6880 | loss: 0.702378717
step: 6890 | loss: 0.699468994
step: 6900 | loss: 0.696563828
step: 6910 | loss: 0.693663237
step: 6920 | loss: 0.690767241
step: 6930 | loss: 0.687875856
step: 6940 | loss: 0.684989103
step: 6950 | loss: 0.682106998
step: 6960 | loss: 0.679229560
step: 6970 | loss: 0.676356807
step: 6980 | loss: 0.673488755
step: 6990 | loss: 0.670625423
step: 7000 | loss: 0.667766828
step: 7010 | loss: 0.664912987
step: 7020 | loss: 0.662063916
step: 7030 | loss: 0.659219631
step: 7040 | loss: 0.656380150
step: 7050 | loss: 0.653545488
step: 7060 | loss: 0.650715662
step: 7070 | loss: 0.647890685
step: 7080 | loss: 0.645070574
step: 7090 | loss: 0.642255344
step: 7100 | loss: 0.639445009
step: 7110 | loss: 0.636639584
step: 7120 | loss: 0.633839082
step: 7130 | loss: 0.631043517
step: 7140 | loss: 0.628252903
step: 7150 | loss: 0.625467253
step: 7160 | loss: 0.622686578
step: 7170 | loss: 0.619910893
step: 7180 | loss: 0.617140208
step: 7190 | loss: 0.614374534
step: 7200 | loss: 0.611613884
step: 7210 | loss: 0.608858268
step: 7220 | loss: 0.606107695
step: 7230 | loss: 0.603362177
step: 7240 | loss: 0.600621722
step: 7250 | loss: 0.597886339
step: 7260 | loss: 0.595156038
step: 7270 | loss: 0.592430825
step: 7280 | loss: 0.589710709
step: 7290 | loss: 0.586995696
step: 7300 | loss: 0.584285793
step: 7310 | loss: 0.581581007
step: 7320 | loss: 0.578881342
step: 7330 | loss: 0.576186805
step: 7340 | loss: 0.573497398
step: 7350 | loss: 0.570813126
step: 7360 | loss: 0.568133993
step: 7370 | loss: 0.565460002
step: 7380 | loss: 0.562791153
step: 7390 | loss: 0.560127450
step: 7400 | loss: 0.557468892
step: 7410 | loss: 0.554815481
step: 7420 | loss: 0.552167216
step: 7430 | loss: 0.549524096
step: 7440 | loss: 0.546886120
step: 7450 | loss: 0.544253285
step: 7460 | loss: 0.541625589
step: 7470 | loss: 0.539003027
step: 7480 | loss: 0.536385597
step: 7490 | loss: 0.533773293
step: 7500 | loss: 0.531166108
step: 7510 | loss: 0.528564038
step: 7520 | loss: 0.525967075
step: 7530 | loss: 0.523375211
step: 7540 | loss: 0.520788439
step: 7550 | loss: 0.518206747
step: 7560 | loss: 0.515630128
step: 7570 | loss: 0.513058569
step: 7580 | loss: 0.510492060
step: 7590 | loss: 0.507930589
step: 7600 | loss: 0.505374142
step: 7610 | loss: 0.502822705
step: 7620 | loss: 0.500276265
step: 7630 | loss: 0.497734806
step: 7640 | loss: 0.495198312
step: 7650 | loss: 0.492666766
step: 7660 | loss: 0.490140151
step: 7670 | loss: 0.487618447
step: 7680 | loss: 0.485101637
step: 7690 | loss: 0.482589699
step: 7700 | loss: 0.480082614
step: 7710 | loss: 0.477580359
step: 7720 | loss: 0.475082913
step: 7730 | loss: 0.472590252
step: 7740 | loss: 0.470102352
step: 7750 | loss: 0.467619188
step: 7760 | loss: 0.465140736
step: 7770 | loss: 0.462666968
step: 7780 | loss: 0.460197859
step: 7790 | loss: 0.457733379
step: 7800 | loss: 0.455273500
step: 7810 | loss: 0.452818194
step: 7820 | loss: 0.450367429
step: 7830 | loss: 0.447921176
step: 7840 | loss: 0.445479402
step: 7850 | loss: 0.443042076
step: 7860 | loss: 0.440609164
step: 7870 | loss: 0.438180633
step: 7880 | loss: 0.435756448
step: 7890 | loss: 0.433336574
step: 7900 | loss: 0.430920976
step: 7910 | loss: 0.428509616
step: 7920 | loss: 0.426102458
step: 7930 | loss: 0.423699465
step: 7940 | loss: 0.421300597
step: 7950 | loss: 0.418905816
step: 7960 | loss: 0.416515082
step: 7970 | loss: 0.414128356
step: 7980 | loss: 0.411745596
step: 7990 | loss: 0.409366761
step: 8000 | loss: 0.406991810
step: 8010 | loss: 0.404620701
step: 8020 | loss: 0.402253389
step: 8030 | loss: 0.399889834
step: 8040 | loss: 0.397529990
step: 8050 | loss: 0.395173814
step: 8060 | loss: 0.392821261
step: 8070 | loss: 0.390472286
step: 8080 | loss: 0.388126845
step: 8090 | loss: 0.385784891
step: 8100 | loss: 0.383446379
step: 8110 | loss: 0.381111263
step: 8120 | loss: 0.378779495
step: 8130 | loss: 0.376451031
step: 8140 | loss: 0.374125821
step: 8150 | loss: 0.371803821
step: 8160 | loss: 0.369484981
step: 8170 | loss: 0.367169255
step: 8180 | loss: 0.364856596
step: 8190 | loss: 0.362546955
step: 8200 | loss: 0.360240285
step: 8210 | loss: 0.357936539
step: 8220 | loss: 0.355635667
step: 8230 | loss: 0.353337624
step: 8240 | loss: 0.351042360
step: 8250 | loss: 0.348749828
step: 8260 | loss: 0.346459981
step: 8270 | loss: 0.344172772
step: 8280 | loss: 0.341888152
step: 8290 | loss: 0.339606074
step: 8300 | loss: 0.337326492
step: 8310 | loss: 0.335049359
step: 8320 | loss: 0.332774627
step: 8330 | loss: 0.330502252
step: 8340 | loss: 0.328232186
step: 8350 | loss: 0.325964384
step: 8360 | loss: 0.323698800
step: 8370 | loss: 0.321435390
step: 8380 | loss: 0.319174108
step: 8390 | loss: 0.316914911
step: 8400 | loss: 0.314657754
step: 8410 | loss: 0.312402593
step: 8420 | loss: 0.310149387
step: 8430 | loss: 0.307898092
step: 8440 | loss: 0.305648665
step: 8450 | loss: 0.303401067
step: 8460 | loss: 0.301155254
step: 8470 | loss: 0.298911188
step: 8480 | loss: 0.296668827
step: 8490 | loss: 0.294428133
step: 8500 | loss: 0.292189067
step: 8510 | loss: 0.289951590
step: 8520 | loss: 0.287715665
step: 8530 | loss: 0.285481255
step: 8540 | loss: 0.283248323
step: 8550 | loss: 0.281016834
step: 8560 | loss: 0.278786754
step: 8570 | loss: 0.276558046
step: 8580 | loss: 0.274330678
step: 8590 | loss: 0.272104617
step: 8600 | loss: 0.269879831
step: 8610 | loss: 0.267656287
step: 8620 | loss: 0.265433955
step: 8630 | loss: 0.263212804
step: 8640 | loss: 0.260992806
step: 8650 | loss: 0.258773931
step: 8660 | loss: 0.256556150
step: 8670 | loss: 0.254339438
step: 8680 | loss: 0.252123766
step: 8690 | loss: 0.249909109
step: 8700 | loss: 0.247695441
step: 8710 | loss: 0.245482738
step: 8720 | loss: 0.243270977
step: 8730 | loss: 0.241060133
step: 8740 | loss: 0.238850184
step: 8750 | loss: 0.236641108
step: 8760 | loss: 0.234432885
step: 8770 | loss: 0.232225493
step: 8780 | loss: 0.230018912
step: 8790 | loss: 0.227813124
step: 8800 | loss: 0.225608109
step: 8810 | loss: 0.223403850
step: 8820 | loss: 0.221200329
step: 8830 | loss: 0.218997529
step: 8840 | loss: 0.216795435
step: 8850 | loss: 0.214594030
step: 8860 | loss: 0.212393300
step: 8870 | loss: 0.210193229
step: 8880 | loss: 0.207993804
step: 8890 | loss: 0.205795012
step: 8900 | loss: 0.203596839
step: 8910 | loss: 0.201399272
step: 8920 | loss: 0.199202301
step: 8930 | loss: 0.197005913
step: 8940 | loss: 0.194810096
step: 8950 | loss: 0.192614842
step: 8960 | loss: 0.190420138
step: 8970 | loss: 0.188225975
step: 8980 | loss: 0.186032344
step: 8990 | loss: 0.183839236
step: 9000 | loss: 0.181646641
step: 9010 | loss: 0.179454551
step: 9020 | loss: 0.177262959
step: 9030 | loss: 0.175071855
step: 9040 | loss: 0.172881233
step: 9050 | loss: 0.170691086
step: 9060 | loss: 0.168501406
step: 9070 | loss: 0.166312187
step: 9080 | loss: 0.164123421
step: 9090 | loss: 0.161935104
step: 9100 | loss: 0.159747227
step: 9110 | loss: 0.157559787
step: 9120 | loss: 0.155372777
step: 9130 | loss: 0.153186191
step: 9140 | loss: 0.151000024
step: 9150 | loss: 0.148814271
step: 9160 | loss: 0.146628927
step: 9170 | loss: 0.144443987
step: 9180 | loss: 0.142259446
step: 9190 | loss: 0.140075299
step: 9200 | loss: 0.137891542
step: 9210 | loss: 0.135708170
step: 9220 | loss: 0.133525179
step: 9230 | loss: 0.131342565
step: 9240 | loss: 0.129160324
step: 9250 | loss: 0.126978450
step: 9260 | loss: 0.124796942
step: 9270 | loss: 0.122615793
step: 9280 | loss: 0.120435001
step: 9290 | loss: 0.118254562
step: 9300 | loss: 0.116074471
step: 9310 | loss: 0.113894726
step: 9320 | loss: 0.111715322
step: 9330 | loss: 0.109536256
step: 9340 | loss: 0.107357524
step: 9350 | loss: 0.105179123
step: 9360 | loss: 0.103001050
step: 9370 | loss: 0.100823300
step: 9380 | loss: 0.098645871
step: 9390 | loss: 0.096468759
step: 9400 | loss: 0.094291961
step: 9410 | loss: 0.092115474
step: 9420 | loss: 0.089939294
step: 9430 | loss: 0.087763418
step: 9440 | loss: 0.085587843
step: 9450 | loss: 0.083412566
step: 9460 | loss: 0.081237584
step: 9470 | loss: 0.079062893
step: 9480 | loss: 0.076888492
step: 9490 | loss: 0.074714376
step: 9500 | loss: 0.072540543
step: 9510 | loss: 0.070366989
step: 9520 | loss: 0.068193713
step: 9530 | loss: 0.066020711
step: 9540 | loss: 0.063847980
step: 9550 | loss: 0.061675518
step: 9560 | loss: 0.059503321
step: 9570 | loss: 0.057331387
step: 9580 | loss: 0.055159713
step: 9590 | loss: 0.052988298
step: 9600 | loss: 0.050817137
step: 9610 | loss: 0.048646228
step: 9620 | loss: 0.046475569
step: 9630 | loss: 0.044305157
step: 9640 | loss: 0.042134989
step: 9650 | loss: 0.039965064
step: 9660 | loss: 0.037795378
step: 9670 | loss: 0.035625930
step: 9680 | loss: 0.033456716
step: 9690 | loss: 0.031287734
step: 9700 | loss: 0.029118982
step: 9710 | loss: 0.026950458
step: 9720 | loss: 0.024782159
step: 9730 | loss: 0.022614082
step: 9740 | loss: 0.020446227
step: 9750 | loss: 0.018278589
step: 9760 | loss: 0.016111168
step: 9770 | loss: 0.013943960
step: 9780 | loss: 0.011776965
step: 9790 | loss: 0.009610178
step: 9800 | loss: 0.007443599
step: 9810 | loss: 0.005277226
step: 9820 | loss: 0.003111055
step: 9830 | loss: 0.000945085
- final loss: 0.000945
-> compiled owl-opt.0.0.1
-> removed owl-opt.0.0.1
-> installed owl-opt.0.0.1
Done.
# To update the current shell environment, run: eval $(opam env)
2026-03-02 07:47.19 ---> saved as "52071fcd499bf8038604a6dc49ebf1b6470485883f48642d1f37301ca9444957"
Job succeeded
2026-03-02 07:47.57: Job succeeded