diff --git a/GeneralisedFilters/Project.toml b/GeneralisedFilters/Project.toml index abec5f91..5a8f4418 100644 --- a/GeneralisedFilters/Project.toml +++ b/GeneralisedFilters/Project.toml @@ -4,13 +4,21 @@ version = "0.4.2" authors = ["THargreaves ", "Charles Knipp ", "FredericWantiez ", "Hong Ge "] [deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001" AcceleratedKernels = "6a4ca0a5-0e36-4168-a932-d9be78d558f1" +Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" +ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8" +ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d" OffsetArrays = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" @@ -18,15 +26,27 @@ SSMProblems = "26aad666-b158-4e64-9d35-0e672562fa48" StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" +Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] -AbstractMCMC = "5" +ADTypes = "1.21.0" +AbstractMCMC = "5.9.0" AcceleratedKernels = "0.3, 0.4" +AdvancedHMC = "0.8.3" +AdvancedMH = "0.8.9" Aqua = "0.8" +Bijectors = "0.15.16" CUDA = "5" +ChainRulesCore = "1.26.0" DataStructures = "0.18.20, 0.19" Distributions = "0.25" +DynamicPPL = "0.39.13" +ForwardDiff = "1.3.2" +LogDensityProblems = "2.2.0" +LogDensityProblemsAD = "1.13.1" LogExpFunctions = "0.3" +MCMCChains = "7.7.0" OffsetArrays = "1.14.1" PDMats = "0.11.35" SSMProblems = "0.6" @@ -34,9 +54,13 @@ StaticArrays = "1.9.17" Statistics = "1" StatsBase = "0.34.3" Test = "1" +Turing = "0.42.6" +Zygote = "0.7.10" julia = "1.10" [extras] +AdvancedHMC = "0bf59076-c3b1-5ca4-86bd-e02cd72cde3d" +AdvancedMH = "5b7e9947-ddc0-4b3f-9b55-0d8042f74170" Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" @@ -47,4 +71,4 @@ TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" TestItems = "1c621080-faea-4a02-84b6-bbd5e436b8fe" [targets] -test = ["Aqua", "FiniteDifferences", "PDMats", "StableRNGs", "Test", "TestItemRunner", "TestItems", "JET"] +test = ["AdvancedHMC", "AdvancedMH", "Aqua", "FiniteDifferences", "PDMats", "StableRNGs", "Test", "TestItemRunner", "TestItems", "JET"] diff --git a/GeneralisedFilters/examples/dfsv/5_Industry_Portfolios.csv b/GeneralisedFilters/examples/dfsv/5_Industry_Portfolios.csv new file mode 100644 index 00000000..c963f44f --- /dev/null +++ b/GeneralisedFilters/examples/dfsv/5_Industry_Portfolios.csv @@ -0,0 +1,5216 @@ +This file was created using the 202512 CRSP database. +It contains value- and equal-weighted returns for 5 industry portfolios. + +The portfolios are constructed at the end of June. + +The annual returns are from January to December. + +Missing data are indicated by -99.99 or -999. + + + Average Value Weighted Returns -- Monthly +,Cnsmr,Manuf,HiTec,Hlth,Other +192607, 5.24, 2.75, 1.82, 1.85, 2.14 +192608, 2.77, 2.30, 2.41, 4.17, 4.35 +192609, 2.22, -0.41, 1.06, 0.69, 0.31 +192610, -3.96, -2.53, -2.26, -0.57, -2.85 +192611, 3.71, 2.68, 3.07, 5.43, 2.13 +192612, 3.62, 2.71, 1.02, 0.09, 3.46 +192701, -1.18, 0.15, 0.46, 5.07, 1.50 +192702, 5.30, 4.00, 4.19, 1.71, 5.07 +192703, 1.65, -1.43, 3.69, 1.01, 1.17 +192704, 3.59, -1.14, 1.34, 2.74, 0.86 +192705, 6.16, 5.66, 5.28, 4.13, 6.56 +192706, -2.04, -3.05, 0.51, 0.54, -2.17 +192707, 8.71, 7.55, 7.68, 9.84, 6.09 +192708, 5.23, 2.01, 2.85, 0.27, -1.01 +192709, 5.90, 4.74, 3.94, 5.66, 4.58 +192710, -2.95, -4.71, -4.55, 5.14, -3.83 +192711, 7.52, 7.73, 6.51, 3.68, 4.50 +192712, 3.63, 1.84, 1.28, -0.47, 1.95 +192801, -1.24, 0.19, -0.03, 2.70, -0.95 +192802, -1.44, -1.12, -0.38, -3.44, -2.01 +192803, 13.85, 7.09, 10.34, 9.94, 5.99 +192804, 0.43, 7.72, 2.66, 2.35, 4.33 +192805, 3.51, 0.09, 4.36, 9.30, -0.01 +192806, -4.07, -4.36, -5.10, -3.12, -4.44 +192807, 2.22, 1.03, -0.29, 2.40, -0.35 +192808, 9.01, 6.29, 7.23, 14.72, 4.75 +192809, 3.67, 4.45, 0.42, -1.14, 1.09 +192810, 1.69, 2.39, 2.38, 0.05, -0.50 +192811, 7.80, 16.07, 13.46, 5.05, 10.00 +192812, -0.93, 0.53, 3.84, -0.46, 0.39 +192901, 0.63, 6.07, 11.70, 3.41, 5.59 +192902, -1.48, 1.34, -1.14, 0.04, 0.04 +192903, -3.63, 1.11, 4.61, -1.50, -2.20 +192904, 1.09, 2.42, 2.48, 1.48, 0.73 +192905, -9.72, -5.66, -3.67, -7.89, -0.29 +192906, 7.02, 12.32, 15.79, 4.53, 5.80 +192907, 0.39, 4.65, 10.97, 2.60, 6.58 +192908, 4.95, 9.70, 11.92, 3.13, 7.67 +192909, -5.74, -4.08, -5.11, -7.04, -6.31 +192910, -20.48, -20.56, -22.67, -18.04, -12.07 +192911, -12.15, -12.47, -14.84, -7.85, -9.43 +192912, 0.13, 2.38, 6.13, 1.09, -0.74 +193001, 5.57, 5.32, 6.32, 2.55, 5.13 +193002, 0.49, 2.23, 7.45, 2.29, 3.27 +193003, 3.88, 10.21, 8.46, 6.37, 4.73 +193004, -1.73, -1.12, -0.84, -1.27, -6.39 +193005, 1.87, -2.14, -2.40, -1.52, -1.00 +193006, -15.72, -16.89, -14.85, -11.93, -11.88 +193007, 5.40, 4.66, 2.78, 6.52, 3.42 +193008, 1.58, -0.43, 2.00, 7.77, -0.38 +193009, -11.52, -13.93, -12.40, -8.75, -10.60 +193010, -7.53, -8.93, -10.87, -4.37, -8.45 +193011, 2.04, -3.73, -3.26, -3.11, -5.17 +193012, -5.85, -7.85, -7.10, -6.92, -8.89 +193101, 6.46, 4.66, 5.00, 8.24, 11.96 +193102, 11.36, 12.18, 13.43, 7.83, 5.49 +193103, -2.67, -7.18, -5.67, 1.79, -9.38 +193104, -5.92, -12.29, -8.49, -3.62, -10.01 +193105, -10.72, -13.82, -10.76, -10.36, -17.39 +193106, 11.05, 15.07, 14.09, 9.72, 16.41 +193107, -2.84, -7.25, -7.26, 2.61, -9.91 +193108, -0.33, 1.94, 2.03, 0.39, -4.02 +193109, -28.78, -29.99, -26.89, -29.59, -26.78 +193110, 10.07, 9.58, 7.06, 11.27, 2.08 +193111, -5.56, -7.20, -9.77, -2.83, -19.16 +193112, -10.89, -15.58, -10.12, -4.10, -16.15 +193201, -0.39, -2.20, -6.74, -2.66, 7.22 +193202, 4.45, 6.94, 9.78, 2.00, -0.39 +193203, -10.31, -9.99, -11.61, -6.50, -16.23 +193204, -15.93, -18.34, -16.23, -19.77, -22.22 +193205, -22.68, -17.84, -16.06, -34.77, -30.65 +193206, -0.76, 1.25, -5.55, 10.87, -1.56 +193207, 28.50, 35.55, 24.41, 29.21, 55.88 +193208, 29.12, 37.32, 33.85, 36.14, 58.75 +193209, 1.83, -5.25, -1.78, -11.93, -4.20 +193210, -14.58, -11.24, -11.87, -18.56, -18.00 +193211, -6.01, -6.65, -3.37, 5.33, -6.92 +193212, 4.14, 4.29, 4.80, 6.78, 4.91 +193301, 2.27, -0.91, 0.64, -1.14, 8.53 +193302, -14.12, -17.20, -11.55, -10.96, -16.44 +193303, 9.74, 1.57, -2.44, -3.21, 3.65 +193304, 43.71, 44.15, 25.09, 37.14, 30.90 +193305, 16.48, 22.14, 18.29, 23.66, 31.56 +193306, 13.97, 13.01, 10.62, 10.39, 14.55 +193307, -8.18, -12.03, -6.94, -19.11, -6.66 +193308, 12.06, 13.05, 7.59, -0.42, 14.61 +193309, -9.85, -9.97, -8.88, -1.34, -16.43 +193310, -9.11, -7.04, -8.08, -6.03, -11.43 +193311, 12.08, 10.87, 8.16, 9.45, 5.39 +193312, 2.02, 2.27, -1.93, -11.78, 4.71 +193401, 12.54, 11.78, 9.78, 16.79, 18.71 +193402, -2.39, -3.00, -1.24, -3.60, -1.90 +193403, 1.01, -1.37, 1.99, -0.64, 0.81 +193404, -0.91, -2.66, -1.67, 8.00, -1.61 +193405, -6.93, -8.10, -5.18, -0.34, -7.41 +193406, 1.78, 3.32, 1.85, 2.03, 3.13 +193407, -8.06, -11.93, -7.27, -1.90, -17.49 +193408, 5.90, 6.25, 4.05, -3.07, 4.53 +193409, 0.89, -1.10, 0.99, 0.82, -0.33 +193410, 0.19, -2.79, -1.25, -0.71, -2.22 +193411, 10.98, 9.11, 4.02, 4.06, 5.65 +193412, 0.17, 0.38, 1.16, 0.61, -0.13 +193501, -4.28, -3.07, 0.21, -0.86, -6.73 +193502, -0.19, -3.47, 0.68, 4.30, -4.75 +193503, -4.98, -2.76, -2.17, -3.14, -5.43 +193504, 6.14, 11.16, 9.60, 3.18, 8.77 +193505, 1.86, 3.78, 5.95, -4.08, 3.91 +193506, 6.71, 5.35, 6.56, 4.08, 6.37 +193507, 8.13, 7.18, 7.36, 3.20, 8.13 +193508, 2.40, 2.29, 3.71, 0.25, 2.51 +193509, 3.88, 1.24, 5.75, 1.10, 0.54 +193510, 6.60, 9.03, 5.59, 5.98, 2.45 +193511, 2.25, 3.55, 7.97, 1.74, 13.23 +193512, 2.66, 6.80, 1.46, 2.35, 4.25 +193601, 2.21, 9.07, 5.21, 3.46, 12.41 +193602, 1.77, 2.50, 3.73, 3.46, 2.84 +193603, 2.92, 1.97, -1.94, 3.51, -3.08 +193604, -6.79, -9.25, -6.98, -9.20, -7.43 +193605, 4.97, 4.60, 5.91, 4.80, 7.37 +193606, 4.56, 1.73, 1.93, 1.28, 1.78 +193607, 4.35, 7.39, 6.70, 4.66, 9.64 +193608, 0.77, 0.64, 2.06, -1.48, 1.86 +193609, 1.45, 1.23, -0.42, 0.77, 0.82 +193610, 5.65, 8.68, 5.39, 3.78, 8.06 +193611, 3.75, 3.28, 4.63, 3.37, 0.47 +193612, -5.06, 3.42, 1.01, -2.06, -1.24 +193701, 3.57, 3.75, 3.66, 3.47, 1.07 +193702, 0.86, 1.50, -2.20, -3.73, 4.78 +193703, -2.46, 1.11, -3.30, -5.03, 2.66 +193704, -7.10, -7.90, -6.43, -3.32, -6.85 +193705, -1.88, 0.01, 1.17, 1.68, -3.33 +193706, -5.42, -3.51, 0.00, -4.13, -9.01 +193707, 8.88, 9.90, 6.32, 4.14, 8.74 +193708, -2.87, -5.61, -3.65, 1.10, -8.36 +193709, -11.48, -15.74, -8.04, -10.80, -15.94 +193710, -8.92, -11.00, -6.21, -3.55, -9.96 +193711, -10.89, -8.70, -3.99, -5.13, -6.78 +193712, -7.38, -3.08, -1.85, -2.41, -6.85 +193801, 5.71, -1.21, -2.45, 3.51, 0.49 +193802, 4.87, 8.40, 0.69, 8.62, 4.90 +193803, -20.60, -25.10, -22.45, -14.54, -30.18 +193804, 13.90, 14.72, 14.91, 11.82, 14.60 +193805, -2.82, -5.64, -0.87, 1.70, -3.93 +193806, 21.29, 27.79, 19.00, 10.09, 25.08 +193807, 10.87, 6.25, 3.41, 5.80, 10.01 +193808, -0.65, -4.33, -0.42, 0.36, -3.74 +193809, 0.47, 1.40, 1.25, 0.00, -1.23 +193810, 6.27, 8.43, 6.67, 5.62, 11.08 +193811, -1.61, -1.79, -1.81, 1.43, -2.04 +193812, 2.13, 5.25, 2.83, 0.86, 5.97 +193901, -4.28, -7.44, -2.69, -0.96, -8.73 +193902, 4.78, 2.70, 3.17, 3.73, 4.69 +193903, -10.44, -13.56, -8.14, -4.36, -15.10 +193904, 0.46, -0.69, 0.43, 1.00, -1.78 +193905, 7.60, 6.17, 6.69, 5.50, 8.02 +193906, -3.60, -5.91, -5.58, 1.08, -7.56 +193907, 10.02, 10.40, 9.61, 3.08, 12.15 +193908, -7.25, -6.02, -5.86, -5.46, -9.82 +193909, 9.65, 23.44, 6.49, 7.06, 25.76 +193910, 2.27, -2.35, 1.31, 0.22, -1.61 +193911, -1.72, -5.51, -0.59, 1.22, -5.11 +193912, 3.37, 2.90, 3.19, 3.77, 1.35 +194001, -0.71, -3.50, -1.85, -1.79, -2.75 +194002, 1.66, 1.78, 0.37, 1.49, 0.86 +194003, 2.66, 1.61, 1.93, 1.91, 2.64 +194004, 0.58, 0.36, -0.43, -0.31, -0.74 +194005, -23.42, -22.50, -16.87, -21.27, -24.10 +194006, 7.23, 6.24, 6.51, 4.93, 8.45 +194007, 2.83, 3.48, 3.12, -0.52, 3.38 +194008, 3.66, 1.78, 1.06, -2.29, 2.89 +194009, 2.93, 1.91, 2.68, 4.62, 2.45 +194010, 0.68, 4.52, 1.94, -3.00, 5.30 +194011, -1.20, -2.50, -0.79, -1.86, -0.14 +194012, -0.77, 1.50, 1.21, -0.23, 0.22 +194101, -4.12, -5.45, -2.28, -2.19, -1.61 +194102, -1.33, -1.63, -1.50, -4.71, -0.33 +194103, 0.00, 0.96, 1.21, 1.78, 1.80 +194104, -6.41, -4.47, -7.32, -5.22, -4.44 +194105, 0.01, 3.24, 0.09, -1.47, -0.93 +194106, 5.28, 5.91, 7.22, 6.72, 4.53 +194107, 4.88, 7.93, 0.48, 8.04, 7.52 +194108, 1.68, -1.52, 0.93, 0.08, 0.18 +194109, 2.12, -2.33, -0.17, -0.21, -2.58 +194110, -6.87, -4.51, -5.66, -3.72, -4.15 +194111, -3.69, -0.63, -3.28, -2.96, -1.54 +194112, -7.72, -3.36, -5.14, -1.60, -5.37 +194201, 3.11, -1.53, -0.43, -3.02, 9.55 +194202, -1.57, -3.33, -1.34, -5.41, -1.86 +194203, -5.04, -6.85, -6.68, -5.54, -9.01 +194204, -4.61, -4.23, -5.81, -4.44, -1.96 +194205, 9.90, 3.60, 7.78, 14.78, 3.48 +194206, 2.71, 3.59, 0.57, 2.85, 1.90 +194207, 2.90, 3.36, 3.30, 1.60, 6.58 +194208, 1.52, 1.90, 1.73, 2.44, 2.43 +194209, 0.98, 3.22, 3.41, 0.26, 3.38 +194210, 4.03, 8.21, 6.22, 4.14, 8.91 +194211, 2.62, -1.38, 2.36, 1.58, -2.12 +194212, 5.41, 5.85, 2.90, 8.71, 4.48 +194301, 7.32, 7.47, 6.50, 2.47, 7.08 +194302, 4.82, 6.26, 6.30, 3.65, 9.47 +194303, 5.58, 6.19, 3.73, 4.57, 10.14 +194304, 1.06, -0.23, 2.76, 1.40, 2.19 +194305, 6.80, 5.40, 5.36, 5.99, 5.59 +194306, 3.82, 1.28, 2.35, -0.66, -0.80 +194307, -4.85, -5.02, -3.54, -3.32, -5.22 +194308, 2.44, 0.92, 0.72, 0.26, 1.41 +194309, 2.01, 2.40, 3.14, 4.02, 2.18 +194310, -1.07, -1.07, -1.19, 0.59, -1.08 +194311, -4.81, -6.82, -3.85, -3.94, -8.04 +194312, 6.92, 6.51, 5.29, 3.95, 7.16 +194401, 2.23, 1.26, 0.75, 0.29, 4.50 +194402, 1.22, -0.03, -0.47, -2.65, 2.06 +194403, 3.34, 2.10, 2.09, 2.46, 2.66 +194404, -1.47, -1.71, -1.18, -2.37, -2.46 +194405, 6.16, 5.11, 3.18, 5.68, 4.27 +194406, 6.95, 4.91, 5.17, 3.85, 5.99 +194407, -1.89, -1.51, -1.04, -1.24, -0.78 +194408, 3.18, 1.07, 1.26, 0.76, 0.36 +194409, 0.54, -0.24, -0.15, 0.43, 0.11 +194410, -0.84, 0.49, 0.79, 1.09, 0.68 +194411, 1.96, 1.25, 1.11, 0.52, 4.31 +194412, 2.71, 4.42, 2.54, 1.71, 8.48 +194501, 1.58, 3.72, -0.80, 0.56, 0.61 +194502, 5.35, 6.98, 4.66, 4.24, 7.90 +194503, -3.67, -4.76, -1.94, -1.83, -3.59 +194504, 8.48, 8.13, 5.19, 6.33, 8.86 +194505, 2.74, 0.43, 2.80, 4.12, 3.12 +194506, -0.93, 0.32, 0.42, -4.66, 4.49 +194507, -0.96, -2.76, 0.32, -1.62, -5.48 +194508, 8.45, 6.02, 5.49, 9.22, 2.54 +194509, 4.93, 5.06, 2.63, 6.51, 6.11 +194510, 4.40, 3.81, 1.57, 6.60, 5.73 +194511, 4.17, 5.82, 3.05, 8.59, 9.15 +194512, 2.25, 0.74, 1.52, 1.53, 0.14 +194601, 7.05, 6.07, 4.14, 7.76, 7.39 +194602, -5.95, -5.69, -5.10, -4.32, -6.15 +194603, 6.16, 7.19, 1.66, 14.58, 4.69 +194604, 4.01, 5.56, 1.65, 14.67, 1.69 +194605, 3.43, 4.41, 3.97, 5.63, 3.40 +194606, -5.50, -3.37, -2.48, -5.52, -3.26 +194607, -2.46, -2.43, -1.64, -3.23, -4.97 +194608, -6.58, -6.25, -6.95, -3.27, -6.81 +194609, -10.74, -9.56, -7.15, -7.03, -14.41 +194610, -1.28, -1.13, -4.03, -0.24, -0.45 +194611, -2.36, 1.29, -0.07, 1.66, 0.01 +194612, 4.44, 5.94, 4.24, 7.24, 2.76 +194701, 3.08, 0.33, 3.05, -5.32, 0.47 +194702, 0.18, -1.28, -2.07, -4.20, -1.28 +194703, -2.35, -0.67, -2.22, -1.26, -3.35 +194704, -6.30, -3.99, -2.59, -8.61, -6.27 +194705, -0.95, -0.75, -0.72, -2.65, -2.07 +194706, 7.27, 6.16, 0.08, 4.20, 3.19 +194707, 4.79, 3.69, 3.46, 2.61, 5.98 +194708, -1.83, -1.24, -1.89, -3.83, -3.18 +194709, -0.56, -0.59, 0.98, -2.63, -0.96 +194710, 2.25, 3.43, 0.95, 1.21, 1.00 +194711, -2.18, -1.76, -1.59, -3.06, -1.98 +194712, 1.04, 4.50, 1.45, 3.12, 3.71 +194801, -3.61, -4.70, -2.69, -8.25, -0.88 +194802, -5.30, -4.39, -2.45, -3.54, -4.12 +194803, 6.32, 9.40, 6.32, 6.35, 10.01 +194804, 2.33, 5.15, 1.04, 2.14, 3.26 +194805, 8.74, 7.04, 7.51, 6.80, 6.77 +194806, -1.12, 0.89, -0.52, -2.19, -0.78 +194807, -4.17, -5.63, -4.11, -4.26, -4.99 +194808, 1.01, -0.45, 1.51, -1.90, 1.53 +194809, -3.12, -3.06, -1.68, -3.10, -3.46 +194810, 5.05, 7.24, 3.89, 5.20, 4.96 +194811, -7.84, -10.74, -5.65, -5.75, -10.61 +194812, 1.70, 4.10, 3.59, 4.54, 2.87 +194901, 2.60, -0.52, -1.30, 4.58, 0.30 +194902, -2.55, -3.19, -2.22, -0.93, -3.19 +194903, 3.60, 4.39, 3.51, 5.50, 4.92 +194904, -2.39, -1.36, -2.67, 0.24, -1.53 +194905, -0.97, -3.73, -3.70, -1.90, -2.71 +194906, 1.64, -0.40, 0.64, 2.17, -1.37 +194907, 6.24, 5.84, 4.48, 6.01, 4.19 +194908, 2.79, 3.44, 0.87, 0.18, 1.44 +194909, 2.63, 3.66, 2.35, 1.86, 3.41 +194910, 2.91, 3.68, 2.77, 3.38, 2.42 +194911, 2.81, 1.29, 2.74, 4.80, 1.22 +194912, 5.96, 4.35, 5.28, 8.72, 7.14 +195001, 1.26, 1.47, 3.21, 1.06, 3.20 +195002, 1.91, 1.29, 2.06, 1.92, 1.01 +195003, 0.28, 1.92, 3.47, -2.92, -0.71 +195004, 3.22, 5.21, 3.58, 5.52, 1.51 +195005, 3.84, 6.18, 1.07, 3.97, 1.36 +195006, -4.82, -6.10, -6.22, -4.20, -6.84 +195007, -1.01, 2.81, -2.09, -6.96, 7.50 +195008, 5.83, 5.26, 3.84, 3.68, 2.33 +195009, 7.19, 4.22, 2.17, 6.16, 6.52 +195010, 0.29, 0.22, -0.63, -1.77, -1.52 +195011, -0.71, 4.66, 1.18, 4.67, 3.93 +195012, 2.38, 6.86, 2.90, 6.10, 11.12 +195101, 5.87, 6.03, 4.81, 3.10, 6.37 +195102, 2.17, 1.41, 2.46, 1.45, -0.42 +195103, -1.25, -2.60, -1.00, 1.70, -2.86 +195104, 1.66, 7.12, 2.03, 10.41, 3.75 +195105, -2.75, -2.15, -1.62, 1.57, -3.26 +195106, -4.47, -1.78, -0.92, 0.83, -5.12 +195107, 3.04, 8.92, 4.20, 14.48, 7.40 +195108, 4.44, 4.78, 4.27, -1.43, 3.45 +195109, 1.67, 0.05, 1.64, -1.83, 3.58 +195110, -2.75, -1.67, -3.79, -5.98, -3.62 +195111, 1.10, 0.21, 1.51, -0.48, 2.12 +195112, 1.50, 4.42, 2.91, 5.15, 2.13 +195201, -0.34, 2.56, -0.76, 0.65, 3.33 +195202, -1.64, -3.02, -1.99, -4.25, -1.58 +195203, 4.29, 4.89, 3.25, 0.44, 5.84 +195204, -2.82, -6.27, -2.09, -5.50, -3.39 +195205, 2.95, 3.80, 1.99, 2.52, 2.95 +195206, 3.79, 4.22, 4.02, -0.96, 4.03 +195207, 1.66, 1.03, 0.82, -4.40, 1.14 +195208, 1.02, -1.47, 0.34, -0.57, -0.14 +195209, -1.53, -2.47, 0.34, -4.12, -1.37 +195210, -0.15, -0.57, -0.07, -1.38, -1.57 +195211, 5.86, 5.82, 6.60, 7.24, 7.30 +195212, 3.56, 3.19, 1.98, -1.95, 3.74 +195301, 0.81, -0.53, -0.69, -5.12, 1.08 +195302, 0.12, -0.29, 0.06, 2.07, -0.22 +195303, -2.46, -0.97, -0.61, -0.16, -1.45 +195304, -1.01, -3.56, -0.70, -5.34, -3.21 +195305, 0.45, 0.68, 0.76, -0.05, 1.43 +195306, -2.13, -1.67, -0.77, -3.31, -2.05 +195307, 1.79, 3.19, 1.58, 3.66, 1.43 +195308, -4.29, -4.58, -2.53, 0.04, -6.81 +195309, -0.17, 0.25, 2.51, 0.43, -1.00 +195310, 3.22, 5.59, 4.12, 2.65, 3.70 +195311, 1.19, 3.26, 3.61, 8.03, 2.78 +195312, -0.33, 0.58, 0.61, -1.35, -2.31 +195401, 4.34, 5.58, 4.30, 5.65, 6.65 +195402, 0.64, 1.68, 3.79, -0.18, 1.54 +195403, 1.89, 4.31, 5.33, 0.29, 1.80 +195404, 2.29, 5.03, 5.69, 1.20, 2.87 +195405, 3.31, 3.66, 0.34, 1.61, 4.40 +195406, 1.33, 0.09, 5.46, -2.45, 1.50 +195407, 7.14, 4.94, 1.72, 8.22, 6.82 +195408, -2.33, -2.08, -3.15, -2.58, -2.27 +195409, 8.25, 6.77, 4.56, 2.81, 4.30 +195410, -1.70, -1.88, -1.45, -1.63, 0.59 +195411, 6.01, 10.51, 8.62, 12.61, 11.18 +195412, 4.99, 6.01, 2.41, 6.15, 9.37 +195501, 0.76, 0.31, 2.30, -2.88, 0.89 +195502, 0.40, 3.54, 4.23, 2.86, 3.78 +195503, -0.62, -0.01, -0.70, 6.11, 0.94 +195504, 4.15, 2.88, 3.89, 0.01, 3.42 +195505, 0.20, 1.40, 0.59, 3.17, 1.64 +195506, 6.13, 8.77, 1.84, 0.17, 2.37 +195507, 9.63, 0.76, -0.27, 0.13, -1.42 +195508, -1.85, 1.31, -0.71, 0.16, 0.58 +195509, 4.56, -0.95, -1.74, -3.18, -2.71 +195510, -3.20, -2.10, -3.26, 0.82, -2.63 +195511, 6.72, 7.48, 5.97, 6.29, 8.36 +195512, -1.79, 2.68, 2.99, 6.23, -0.67 +195601, -4.65, -2.37, -1.96, -6.50, -2.76 +195602, 3.60, 4.26, 3.36, 8.29, 3.38 +195603, 2.36, 8.33, 6.30, 7.26, 5.71 +195604, -2.61, 1.43, -0.33, 2.77, 0.89 +195605, -4.13, -5.53, -3.44, -1.24, -5.41 +195606, 3.94, 3.67, 4.51, 3.60, 1.55 +195607, 3.72, 5.93, 4.11, 3.10, 2.42 +195608, -0.60, -3.69, -2.60, -3.08, -3.55 +195609, -2.51, -5.68, -5.06, -3.70, -4.41 +195610, -0.10, 0.92, 1.14, 0.58, 1.10 +195611, -2.34, 1.32, 1.06, -1.36, 0.21 +195612, 0.77, 4.18, 3.61, 6.07, 1.99 +195701, -2.83, -4.10, -1.89, -2.27, -0.54 +195702, -1.52, -2.22, -0.20, -0.35, -2.56 +195703, 1.19, 2.35, 3.27, 8.23, 2.31 +195704, 4.20, 4.69, 4.78, 5.43, 3.16 +195705, 1.47, 4.55, 3.73, 4.80, 1.36 +195706, -0.58, -1.19, 2.12, 6.48, -1.10 +195707, 2.67, 0.58, 0.52, 2.60, 1.13 +195708, -2.70, -5.55, -4.14, -2.97, -5.47 +195709, -3.69, -6.51, -4.57, -3.04, -6.67 +195710, -3.39, -4.47, -1.92, -0.68, -6.87 +195711, -1.05, 3.21, 4.26, 8.63, 0.41 +195712, -3.79, -4.24, -1.28, -0.48, -4.43 +195801, 6.40, 4.27, 4.13, -0.41, 11.03 +195802, 0.41, -2.28, -0.62, 4.95, -1.78 +195803, 3.65, 3.62, 1.97, 8.17, 2.51 +195804, 3.99, 3.25, 1.13, 6.56, 4.52 +195805, 3.81, 2.07, 1.77, 1.44, 3.74 +195806, 2.51, 3.17, 2.75, 3.55, 2.89 +195807, 5.17, 4.64, 2.48, 5.98, 5.22 +195808, 2.30, 1.66, 2.22, 1.98, 3.06 +195809, 6.66, 3.97, 5.71, 11.74, 4.27 +195810, 2.10, 2.72, 3.18, 1.05, 3.75 +195811, 3.47, 2.62, 3.41, 8.04, 4.63 +195812, 3.44, 4.14, 13.25, 4.61, 3.09 +195901, 1.63, 0.79, 0.35, -0.62, 2.10 +195902, 0.84, -0.04, 5.03, 3.88, 1.76 +195903, -0.81, 0.74, 1.10, 4.25, -0.74 +195904, 5.22, 2.54, 6.81, 11.93, 1.95 +195905, 3.30, 1.79, 2.40, -1.07, 0.63 +195906, 1.17, -0.38, -1.03, 6.60, 1.23 +195907, 5.22, 4.06, 0.51, 4.74, 0.46 +195908, 1.10, -1.68, -1.20, -3.57, -2.23 +195909, -3.48, -5.36, -2.81, -4.95, -4.11 +195910, 0.93, 1.26, 3.38, 2.34, 0.30 +195911, 0.97, 1.22, 4.77, 5.62, -0.50 +195912, 3.21, 2.67, 3.79, -1.68, 2.07 +196001, -7.20, -7.09, -5.47, -6.38, -4.51 +196002, 1.22, -0.09, 6.65, 0.31, -0.02 +196003, -2.14, -1.90, 1.00, 1.62, -2.27 +196004, -0.12, -2.76, -0.16, 1.65, -1.11 +196005, 2.92, 1.61, 6.89, 13.73, 0.25 +196006, 2.93, 2.92, 0.71, -0.16, 2.79 +196007, -1.40, -2.41, -2.45, -3.87, -1.39 +196008, 3.78, 2.81, 3.65, 2.47, 3.11 +196009, -5.57, -5.07, -7.53, -8.72, -5.70 +196010, 0.24, 0.00, -1.76, -3.49, -0.81 +196011, 5.00, 4.24, 5.75, 4.05, 6.67 +196012, 2.30, 4.81, 7.24, 6.22, 4.21 +196101, 5.67, 7.05, 5.19, 6.02, 7.78 +196102, 5.34, 3.64, 1.78, 5.20, 5.68 +196103, 4.67, 1.65, 4.98, 3.80, 3.55 +196104, -0.94, 0.86, 0.63, -0.31, 0.57 +196105, 3.04, 2.96, 0.99, 2.28, 4.18 +196106, -0.90, -3.00, -3.67, -3.43, -3.74 +196107, 3.74, 2.52, 3.99, 3.50, 1.26 +196108, 3.45, 2.19, 2.42, 3.86, 5.19 +196109, 1.52, -3.63, -1.18, -1.23, -1.80 +196110, 4.00, 2.69, 1.89, 3.66, 2.30 +196111, 6.68, 3.66, 5.48, 3.13, 3.84 +196112, -0.53, 0.03, 1.20, -1.99, -1.86 +196201, -4.92, -2.39, -5.50, -4.78, -1.46 +196202, 0.04, 2.70, 1.95, 3.75, 1.70 +196203, -0.05, -0.22, -1.32, 0.41, -1.77 +196204, -5.60, -5.59, -8.01, -11.34, -7.08 +196205, -8.40, -7.89, -9.14, -10.81, -7.86 +196206, -8.26, -7.24, -10.03, -12.90, -9.16 +196207, 5.98, 5.68, 9.20, 7.78, 5.74 +196208, 2.85, 2.41, 2.47, -7.92, 3.83 +196209, -4.66, -3.94, -7.46, -4.05, -5.76 +196210, -0.65, 0.32, 1.35, 2.46, 0.57 +196211, 9.84, 11.13, 10.75, 17.63, 14.78 +196212, 1.84, 1.66, 0.45, 0.88, 1.39 +196301, 6.09, 4.75, 4.88, 5.66, 5.22 +196302, -2.39, -1.93, -3.09, -2.58, -0.49 +196303, 4.12, 3.78, 2.34, 1.18, 2.94 +196304, 4.44, 4.51, 5.06, 7.63, 5.93 +196305, 4.49, 1.18, 1.78, -0.25, 3.57 +196306, -2.13, -1.10, -3.47, -1.09, -1.37 +196307, -0.42, 0.24, -0.42, 0.57, -1.59 +196308, 6.02, 5.02, 4.62, 9.57, 5.40 +196309, -0.44, -2.07, 1.50, -4.07, -3.16 +196310, 5.96, 0.95, 5.28, 3.39, 1.40 +196311, -3.16, -0.43, 2.38, -1.64, 0.10 +196312, 1.19, 2.85, 1.12, 1.54, 2.65 +196401, 1.37, 3.04, 3.17, 3.70, 1.17 +196402, 1.71, 1.82, 0.60, 1.65, 5.34 +196403, 1.98, 2.04, 0.81, -1.94, 2.33 +196404, 1.35, 0.61, -1.00, -1.42, 0.29 +196405, 2.06, 1.69, 0.86, 0.52, 2.38 +196406, 1.75, 1.09, 2.99, 0.58, 1.28 +196407, 3.41, 2.71, -1.93, 4.75, 1.29 +196408, 0.67, -1.19, -2.03, -4.22, -3.14 +196409, 3.51, 3.62, -0.17, 2.75, 3.94 +196410, 0.52, 1.36, -0.93, 2.86, 1.86 +196411, 0.42, 0.81, -1.43, 2.17, -1.65 +196412, 0.99, -0.29, 1.86, 4.38, -1.78 +196501, 4.27, 3.47, 3.10, 7.66, 4.73 +196502, 0.55, 0.25, 1.38, 2.42, 2.58 +196503, 0.17, -1.57, -1.17, -0.92, -0.38 +196504, 6.04, 2.37, 3.51, 2.97, 2.32 +196505, -1.75, -0.23, 1.79, -1.25, -1.51 +196506, -6.17, -4.47, -4.83, -5.31, -7.02 +196507, 1.17, 1.79, 1.28, 3.95, 3.34 +196508, 3.35, 2.37, 3.26, 6.30, 5.09 +196509, 3.29, 3.47, 2.92, 2.24, 1.77 +196510, 3.78, 2.92, 1.08, 3.23, 4.03 +196511, -1.27, 0.70, -1.03, 3.79, 4.12 +196512, 1.37, 1.62, -1.17, 3.82, 3.31 +196601, -0.11, 1.20, -0.02, 2.78, 5.16 +196602, -1.26, -2.17, 2.70, -1.56, 2.06 +196603, -2.69, -1.61, -2.87, -1.60, -2.57 +196604, 0.86, 2.63, 4.53, -0.01, 3.02 +196605, -6.15, -4.60, -4.62, -5.26, -7.78 +196606, -2.01, -1.49, 0.16, 0.99, 0.64 +196607, -2.01, -0.77, -1.45, -1.72, -1.90 +196608, -7.22, -7.60, -5.38, -8.11, -11.84 +196609, -1.36, 0.07, -1.28, -1.14, -1.92 +196610, 1.27, 5.75, 4.56, 4.87, 3.03 +196611, -0.68, 0.72, 4.03, 6.91, 7.92 +196612, -2.03, 0.91, 1.63, -0.18, 2.19 +196701, 10.97, 7.25, 8.38, 6.45, 11.97 +196702, 0.91, 0.27, 4.30, 1.40, -0.32 +196703, 3.71, 4.36, 4.35, 7.86, 4.41 +196704, 8.44, 2.81, 4.42, 4.45, 2.11 +196705, -5.20, -3.41, -4.91, -5.29, -2.02 +196706, 2.48, 1.53, 4.85, 5.41, 3.51 +196707, 8.20, 5.09, -0.38, 3.79, 7.82 +196708, -0.14, -0.76, -1.10, 0.65, -0.25 +196709, 3.79, 2.89, 5.61, 2.90, 1.27 +196710, -2.92, -3.63, 1.79, -5.03, -5.04 +196711, -0.06, 1.09, 0.95, 1.33, -0.32 +196712, 2.23, 4.26, 1.44, 1.70, 5.40 +196801, -2.09, -3.68, -4.85, -6.57, -1.90 +196802, -2.10, -3.31, -3.79, -4.25, -3.44 +196803, 1.19, -0.04, 2.24, 0.11, -0.27 +196804, 11.58, 7.74, 8.77, 11.90, 11.94 +196805, 2.39, 2.15, 2.29, 3.35, 6.23 +196806, -0.10, 1.74, -0.01, 2.23, 3.03 +196807, -3.76, -0.58, -3.75, -4.12, -3.01 +196808, 2.02, 2.01, 0.80, 0.68, 2.62 +196809, 6.34, 3.54, 2.95, 0.84, 9.44 +196810, 2.35, 1.38, -2.13, -0.04, 1.09 +196811, 2.81, 6.22, 7.44, 8.12, 7.71 +196812, -3.21, -3.01, -5.81, -1.02, -3.68 +196901, -1.34, -0.21, -2.63, -0.23, 0.78 +196902, -4.78, -5.40, -3.52, -5.17, -8.41 +196903, 3.60, 3.47, 3.31, 3.35, 0.74 +196904, 1.90, 0.78, 5.20, 4.98, 1.83 +196905, 0.58, 0.82, -0.65, 1.68, -1.20 +196906, -6.32, -7.73, -1.56, -5.04, -10.52 +196907, -7.35, -6.37, -5.10, -2.73, -9.13 +196908, 4.98, 4.01, 5.14, 9.31, 8.98 +196909, -0.11, -4.76, -1.22, 2.09, -0.63 +196910, 6.16, 4.65, 5.04, 8.98, 8.00 +196911, -3.65, -4.05, -0.83, 0.32, -5.56 +196912, -2.80, -1.74, -2.31, 4.30, -3.76 +197001, -6.25, -7.94, -6.99, -7.12, -8.85 +197002, 5.28, 5.79, 4.62, 5.60, 9.48 +197003, 0.97, 0.23, -0.91, -5.09, -1.80 +197004, -9.94, -8.86, -12.38, -10.11, -13.82 +197005, -7.64, -4.46, -7.40, -7.77, -8.17 +197006, -3.18, -4.14, -10.50, -0.51, -6.85 +197007, 9.15, 7.94, 5.36, 2.16, 8.02 +197008, 5.07, 5.23, 5.79, 1.82, 5.10 +197009, 5.12, 2.93, 5.92, 8.03, 7.59 +197010, -1.00, -0.90, -3.58, 0.10, -5.89 +197011, 6.18, 5.20, 4.81, 3.08, 3.85 +197012, 6.40, 6.05, 5.29, 5.36, 8.37 +197101, 5.47, 3.68, 7.00, 4.88, 7.81 +197102, 3.17, 1.60, -0.25, 1.39, 2.35 +197103, 4.78, 3.39, 5.44, 3.67, 6.01 +197104, 5.27, 2.95, 1.80, 4.25, 4.36 +197105, -3.11, -3.26, -5.84, -2.11, -4.60 +197106, -0.19, 1.20, -1.10, 2.02, -1.06 +197107, -3.75, -3.60, -6.92, -2.25, -3.85 +197108, 6.66, 2.60, 3.59, 3.28, 7.03 +197109, 0.17, -0.90, -0.96, 0.66, -0.43 +197110, -4.37, -4.23, -3.47, -2.50, -4.20 +197111, 0.08, -0.72, 1.30, 0.59, -0.18 +197112, 8.86, 9.27, 9.96, 7.91, 8.14 +197201, 2.09, 1.74, 5.92, 2.60, 3.33 +197202, 3.63, 3.08, 1.35, 7.13, 2.25 +197203, 2.10, -0.17, 0.66, 0.46, 2.31 +197204, 0.27, 0.52, 0.21, 1.08, 1.82 +197205, 0.36, 2.07, 2.96, 3.52, -0.61 +197206, -2.73, -1.79, -2.59, 2.51, -4.51 +197207, -2.04, 0.01, 0.90, 3.13, -2.95 +197208, 1.07, 5.52, 3.53, 1.04, 3.62 +197209, -1.36, -0.73, 0.47, -0.44, -1.72 +197210, -0.55, 2.54, -1.59, 1.51, 1.13 +197211, 6.95, 5.11, 3.99, -0.56, 5.14 +197212, 1.29, 0.57, 1.81, 3.74, -0.73 +197301, -5.20, -1.27, 0.62, -1.19, -7.45 +197302, -5.55, -3.79, -3.52, -0.71, -6.06 +197303, -3.11, 0.46, -0.65, 0.78, -0.26 +197304, -6.62, -2.80, -4.27, -5.49, -7.75 +197305, -2.87, -1.67, -2.02, 1.06, -4.99 +197306, -3.87, -0.09, -0.14, 0.01, -0.23 +197307, 8.54, 3.36, 3.13, 6.87, 9.53 +197308, -3.78, -2.24, -3.76, -6.90, -2.30 +197309, 6.92, 6.37, 0.66, 0.78, 7.24 +197310, -3.37, 1.26, 0.54, 1.83, -1.01 +197311, -16.42, -11.46, -9.08, -7.76, -13.50 +197312, -1.83, 3.95, -1.35, -4.48, 2.45 +197401, 4.89, -1.65, 1.55, -2.56, 1.18 +197402, -0.02, -0.43, 1.31, -1.22, 1.01 +197403, -1.74, -1.97, -3.41, 1.21, -3.63 +197404, -3.25, -4.83, -4.25, -1.48, -6.33 +197405, -0.53, -4.38, -3.26, -0.12, -8.49 +197406, -1.94, -1.15, -1.77, -1.62, -5.56 +197407, -11.58, -4.19, -8.01, -14.14, -6.63 +197408, -10.05, -9.50, -3.10, -8.06, -10.64 +197409, -12.42, -9.81, -11.83, -15.56, -9.63 +197410, 10.34, 17.11, 13.09, 29.59, 20.18 +197411, -4.73, -4.57, -4.17, -1.23, -2.81 +197412, -2.48, -2.18, -2.42, -3.32, -4.31 +197501, 21.80, 12.53, 13.48, -0.75, 17.93 +197502, 4.68, 5.07, 9.79, 16.07, 2.87 +197503, 8.16, 1.81, 0.24, 1.29, 4.14 +197504, 3.19, 7.26, 2.98, 0.65, 3.26 +197505, 4.06, 6.32, 5.02, 6.53, 5.72 +197506, 7.42, 5.44, 2.96, 0.96, 5.95 +197507, -5.08, -5.22, -7.46, -12.03, -6.13 +197508, -2.35, -0.88, -3.18, -3.32, -5.14 +197509, -2.80, -3.61, -1.01, -6.52, -6.32 +197510, 9.87, 3.57, 7.79, 10.67, 3.91 +197511, 4.03, 1.59, 4.76, 5.13, 3.58 +197512, -1.69, -0.49, -0.83, -6.36, -0.26 +197601, 11.70, 12.93, 14.94, 7.74, 12.93 +197602, 2.23, -0.52, 2.02, -6.56, 3.18 +197603, 2.47, 2.87, 2.07, 6.54, 2.00 +197604, -1.54, -0.07, -1.56, -4.31, -1.56 +197605, -3.61, 0.75, -0.78, -2.82, -1.71 +197606, 3.17, 4.21, 6.53, 3.67, 5.36 +197607, -0.49, -0.75, -0.96, -0.38, -0.05 +197608, 0.25, -0.70, 1.20, 2.17, -1.16 +197609, 2.17, 3.07, 2.41, 3.63, 0.92 +197610, -1.26, -2.60, -2.33, -3.74, -0.43 +197611, 1.88, 0.60, 2.21, -9.76, 1.99 +197612, 5.73, 6.09, 3.70, 6.29, 8.38 +197701, -5.03, -2.79, -2.30, -8.83, -4.16 +197702, -2.21, -1.91, 0.02, -0.21, -1.73 +197703, -2.43, -0.77, -0.44, -1.79, -0.09 +197704, 0.21, 1.40, -1.81, -5.22, 2.13 +197705, -1.32, -1.32, -0.31, -1.89, -0.89 +197706, 4.89, 4.70, 5.34, 9.03, 5.34 +197707, -0.43, -2.16, -0.44, -2.23, -0.39 +197708, 1.06, -3.16, -0.63, 3.84, -1.18 +197709, 0.40, 0.18, 0.31, -0.89, -0.10 +197710, -3.66, -3.99, -3.97, -1.75, -4.41 +197711, 3.99, 4.05, 4.01, 6.09, 6.49 +197712, -0.60, 0.89, 2.29, 0.62, 0.62 +197801, -6.23, -6.02, -4.51, -1.89, -5.29 +197802, 0.03, -1.69, -0.44, -3.02, -0.16 +197803, 4.39, 3.01, 0.73, 4.17, 5.46 +197804, 8.90, 7.59, 8.92, 9.24, 9.36 +197805, 2.07, 2.43, 0.85, 5.17, 2.60 +197806, -1.54, -1.75, -0.82, 1.14, -0.35 +197807, 4.36, 5.48, 5.73, 8.87, 6.64 +197808, 4.22, 3.62, 5.07, 0.97, 6.53 +197809, -1.11, 0.30, -2.16, -2.71, -1.45 +197810, -12.22, -10.33, -8.34, -12.84, -14.11 +197811, 1.84, 3.94, 4.25, 5.18, 2.51 +197812, 0.29, 1.35, 3.90, 3.42, 1.29 +197901, 4.89, 5.37, 4.05, 1.42, 6.03 +197902, -3.80, -2.33, -2.30, -4.52, -3.21 +197903, 5.47, 7.04, 4.73, 5.04, 8.16 +197904, 1.25, 0.73, -0.58, 0.43, 1.49 +197905, -1.63, -1.70, -1.22, -2.64, -0.28 +197906, 3.07, 5.74, 1.02, 4.83, 7.00 +197907, 1.81, 1.88, -0.98, 0.15, 3.31 +197908, 7.04, 6.74, 4.26, 8.58, 5.78 +197909, -0.85, 1.41, -1.98, -0.55, -0.66 +197910, -8.53, -5.98, -6.77, -4.34, -10.15 +197911, 2.52, 6.87, 6.66, 8.79, 7.03 +197912, 3.25, 2.75, 0.61, 2.08, 4.12 +198001, 4.32, 8.92, 4.03, -1.29, 5.54 +198002, -5.31, 3.75, -2.86, -4.64, -3.37 +198003, -8.78, -13.80, -9.32, -2.85, -12.33 +198004, 3.42, 6.13, 3.09, 4.29, 6.13 +198005, 7.72, 5.07, 4.88, 6.97, 8.09 +198006, 2.77, 4.20, 1.69, 0.22, 5.09 +198007, 9.76, 5.12, 10.16, 8.88, 6.84 +198008, 1.59, 2.01, 4.55, 0.27, 2.70 +198009, 0.73, 4.00, 0.76, 3.62, 3.82 +198010, -0.72, 3.89, 0.24, -1.24, 1.56 +198011, 2.54, 15.55, 6.53, 8.88, 7.67 +198012, -0.32, -6.26, -0.44, 4.61, 0.04 +198101, 0.33, -5.92, -4.26, -1.62, -3.36 +198102, 3.65, 1.44, 0.98, 1.80, 1.67 +198103, 8.69, 2.02, 3.80, 5.87, 9.03 +198104, 1.88, -4.15, 1.99, 3.03, 0.33 +198105, 2.78, -1.53, 5.19, 3.46, 2.80 +198106, -1.73, 1.84, -5.20, -5.60, -1.36 +198107, -2.19, 2.52, -2.45, -3.90, -2.73 +198108, -5.98, -5.71, -5.13, -7.75, -5.52 +198109, -2.85, -9.13, -1.59, -1.29, -5.29 +198110, 4.06, 6.38, 5.50, 7.96, 7.15 +198111, 2.69, 5.81, 1.94, 3.94, 5.05 +198112, -1.26, -3.81, -0.53, -0.74, -3.79 +198201, -0.05, -4.98, 1.97, 1.44, -3.18 +198202, -1.80, -6.89, -5.45, -1.74, -3.92 +198203, 3.32, -1.73, -1.60, -2.56, -1.26 +198204, 5.65, 3.76, 4.23, 7.39, 3.64 +198205, -2.76, -1.30, -3.50, -3.05, -5.59 +198206, 2.24, -3.90, -2.41, -0.75, -2.42 +198207, -0.20, -4.47, 1.40, 0.78, -3.03 +198208, 11.50, 12.19, 12.17, 12.44, 11.36 +198209, 3.46, 0.40, 1.11, 1.36, 4.20 +198210, 14.15, 8.62, 14.01, 8.22, 15.68 +198211, 8.07, 1.10, 9.20, 5.17, 7.57 +198212, 0.43, 3.00, 1.46, 0.58, -1.36 +198301, 1.44, 4.30, 9.59, 1.42, 2.85 +198302, 5.16, 2.06, 2.39, 2.83, 4.56 +198303, 5.48, 2.91, 0.65, 4.54, 4.95 +198304, 8.28, 6.99, 6.80, 5.05, 8.34 +198305, -0.73, 1.89, 1.61, -2.75, 2.37 +198306, 6.14, 2.75, 4.45, 6.92, 1.68 +198307, -3.23, -1.76, -5.31, -5.68, -3.64 +198308, -2.18, 2.85, -0.22, -1.01, -0.91 +198309, 3.68, 0.42, 2.18, 1.30, 1.60 +198310, -0.71, -1.09, -6.47, -2.18, -4.07 +198311, 3.61, 2.48, 2.56, -3.03, 4.72 +198312, -1.63, -0.64, -0.17, -3.63, -0.87 +198401, -3.85, 1.77, -3.40, -0.57, -1.63 +198402, -5.36, -2.30, -5.27, -2.03, -4.93 +198403, 1.13, 1.49, -0.02, -0.67, 2.45 +198404, 0.70, 1.25, 0.31, 2.32, -2.03 +198405, -4.32, -4.61, -5.02, -6.46, -6.88 +198406, 5.54, 0.77, 4.82, 3.86, 2.22 +198407, 0.15, -3.13, -1.21, -3.77, -1.87 +198408, 9.48, 11.69, 12.18, 10.44, 11.09 +198409, 0.42, 0.48, -2.33, -4.42, 2.18 +198410, 0.84, -0.43, -0.61, 1.66, 0.77 +198411, -1.76, -0.26, -2.92, -0.08, -0.34 +198412, 2.45, 1.44, 3.20, 2.71, 3.77 +198501, 9.14, 6.48, 10.83, 8.31, 9.81 +198502, 2.06, 2.10, -0.33, 3.60, 2.06 +198503, -0.03, 0.49, -3.09, 3.07, -0.16 +198504, -1.70, 0.86, -2.39, -1.77, 1.44 +198505, 8.03, 4.19, 3.92, 9.37, 6.30 +198506, 2.01, 1.35, 0.70, 2.27, 2.86 +198507, -1.78, 0.28, 1.67, -0.47, -0.16 +198508, -0.78, 0.31, -1.66, -0.82, -0.38 +198509, -2.59, -3.62, -4.54, -3.92, -4.98 +198510, 4.09, 4.74, 3.35, 2.87, 6.40 +198511, 8.55, 4.93, 8.30, 9.56, 7.42 +198512, 5.51, 2.75, 7.63, 3.52, 4.47 +198601, 1.82, 0.10, -1.26, 1.06, 3.54 +198602, 9.69, 7.18, 5.60, 6.07, 8.34 +198603, 8.21, 4.42, 1.06, 12.08, 5.76 +198604, -0.56, -1.72, 3.51, 0.71, -2.57 +198605, 7.71, 4.84, 1.94, 7.25, 4.66 +198606, 3.59, 0.77, -1.30, 5.68, 1.54 +198607, -7.95, -3.70, -6.73, -5.36, -6.43 +198608, 3.30, 8.97, 6.93, 4.00, 6.81 +198609, -9.27, -6.05, -7.74, -12.60, -8.83 +198610, 7.55, 5.08, 1.83, 7.98, 4.20 +198611, 2.04, 2.11, 3.69, 1.37, -0.66 +198612, -4.07, -1.99, -3.69, -1.22, -2.58 +198701, 14.60, 13.01, 12.87, 13.15, 11.17 +198702, 6.51, 1.47, 6.64, 11.12, 4.66 +198703, 2.21, 4.05, 2.27, 0.15, -0.36 +198704, -1.20, -1.80, 2.50, -2.45, -4.07 +198705, 0.56, 0.50, 1.07, 1.25, -0.01 +198706, 5.47, 4.95, 1.85, 4.78, 4.62 +198707, 6.03, 4.12, 2.49, 5.79, 3.55 +198708, 4.08, 2.80, 6.70, 2.21, 4.33 +198709, -4.31, -1.30, -0.78, -2.57, -1.80 +198710, -24.86, -20.99, -21.34, -20.30, -23.72 +198711, -8.30, -5.89, -9.52, -9.31, -6.57 +198712, 8.21, 7.26, 8.95, 5.35, 5.39 +198801, 4.20, 4.47, 1.89, 6.37, 6.31 +198802, 7.53, 4.44, 5.13, 4.21, 4.33 +198803, -1.95, -0.81, -4.43, -2.49, -1.35 +198804, 1.18, 1.93, 1.94, -2.01, -0.39 +198805, -0.34, 0.50, -0.02, -0.37, 0.94 +198806, 5.58, 4.06, 7.02, 2.78, 6.03 +198807, 0.22, -0.50, -2.86, 0.51, -0.61 +198808, -2.23, -2.77, -5.93, -1.08, -0.91 +198809, 6.65, 1.88, 4.69, 3.76, 4.06 +198810, 4.81, 1.42, 1.26, 1.26, 0.01 +198811, -1.68, -1.36, -1.66, -2.13, -2.16 +198812, 2.16, 2.34, 3.09, 1.76, 0.91 +198901, 6.14, 6.08, 7.67, 6.81, 6.86 +198902, -1.77, -1.84, -2.67, -1.24, -0.52 +198903, 3.26, 1.68, -0.48, 4.91, 3.57 +198904, 4.68, 4.36, 7.28, 5.43, 4.63 +198905, 4.54, 3.79, 4.12, 3.37, 4.24 +198906, 0.08, -0.94, -1.50, -2.95, 0.77 +198907, 9.70, 6.31, 6.20, 13.57, 8.11 +198908, 0.91, 3.01, 1.57, 0.40, 3.22 +198909, -1.97, -1.01, 0.91, 2.34, 1.40 +198910, -3.61, -1.97, -3.02, 0.37, -4.90 +198911, 1.04, 3.33, 0.29, 4.56, -0.05 +198912, 0.88, 4.12, 2.89, 1.04, -1.60 +199001, -7.36, -5.59, -8.27, -7.30, -8.92 +199002, 1.25, 2.16, 1.64, -2.08, 2.52 +199003, 4.56, 1.70, 4.40, 3.80, -0.38 +199004, -1.52, -3.57, -2.68, 0.34, -3.42 +199005, 9.84, 7.02, 10.67, 12.81, 9.18 +199006, 1.23, -0.64, -3.13, 4.42, -1.73 +199007, -2.49, 2.39, -6.00, 2.67, -3.58 +199008, -10.13, -8.00, -11.18, -6.65, -11.30 +199009, -6.71, -4.34, -2.17, -4.32, -9.97 +199010, -0.29, -0.47, 0.20, 1.46, -6.49 +199011, 7.68, 4.49, 6.04, 9.68, 10.63 +199012, 3.44, 2.01, 2.69, 2.87, 5.49 +199101, 6.95, 2.28, 7.08, 4.78, 7.39 +199102, 9.56, 6.31, 4.52, 10.16, 9.80 +199103, 5.84, 1.45, 1.27, 6.24, 3.07 +199104, 0.04, 0.88, -0.84, -1.21, 1.34 +199105, 6.39, 3.58, 1.13, 4.47, 4.98 +199106, -4.65, -3.71, -5.30, -4.19, -5.24 +199107, 4.89, 3.79, 3.95, 8.51, 4.74 +199108, 3.91, 2.17, 1.79, 2.95, 3.21 +199109, -3.15, -0.56, -0.59, -0.33, -0.67 +199110, -1.25, 1.57, 2.03, 6.53, 2.25 +199111, -1.48, -4.41, -4.53, -2.53, -5.41 +199112, 13.42, 7.65, 9.56, 16.60, 12.76 +199201, -0.57, -0.84, 1.92, -5.44, 1.99 +199202, 2.79, 0.79, 1.39, -2.74, 3.07 +199203, -1.20, -1.36, -3.19, -5.53, -2.19 +199204, -0.17, 4.36, 2.77, -2.30, 0.62 +199205, 0.51, 0.95, -1.19, 1.11, 1.47 +199206, -1.89, -2.46, -1.45, -4.78, -0.06 +199207, 3.80, 4.34, 4.12, 5.79, 3.12 +199208, -0.91, -1.42, -2.71, -3.46, -3.46 +199209, 2.15, 1.17, 2.62, -4.86, 3.44 +199210, 1.36, -0.41, 0.72, 3.34, 2.98 +199211, 5.58, 1.44, 5.26, 4.84, 6.11 +199212, 0.64, 1.86, 2.73, -1.35, 3.80 +199301, 0.72, 1.27, 3.09, -6.87, 3.36 +199302, -1.64, 3.74, 2.44, -9.01, 0.35 +199303, 1.82, 2.51, 2.80, 0.73, 3.80 +199304, -7.32, 0.23, -3.23, 1.29, -3.28 +199305, 3.80, 1.64, 6.45, 3.60, 1.64 +199306, -1.07, 0.36, 2.01, -2.98, 2.30 +199307, -0.49, 0.53, -0.90, -5.98, 2.06 +199308, 3.57, 3.72, 5.90, 3.03, 3.47 +199309, -0.80, -0.84, 0.35, 0.84, 1.78 +199310, 5.67, 0.64, 2.03, 5.93, -2.33 +199311, 0.72, -2.00, -3.09, 0.98, -2.83 +199312, 0.51, 2.57, 1.26, 2.32, 2.53 +199401, 1.41, 3.44, 3.45, 1.74, 4.36 +199402, -0.50, -2.12, -1.67, -5.42, -3.85 +199403, -4.77, -4.13, -3.72, -6.75, -4.29 +199404, 0.51, 1.39, -0.12, 2.25, 1.37 +199405, -1.74, 0.19, 1.88, 4.31, 3.04 +199406, -2.44, -2.74, -2.51, -3.42, -2.61 +199407, 2.16, 4.40, 3.46, 1.10, 2.70 +199408, 4.40, 2.83, 5.38, 10.87, 3.48 +199409, -1.88, -1.97, -0.73, 1.71, -4.21 +199410, 1.25, 2.26, 3.91, 0.91, 0.06 +199411, -2.89, -4.16, -3.50, 0.43, -5.18 +199412, -0.02, 2.13, 1.80, 0.57, 1.40 +199501, 0.64, 1.05, 1.22, 6.06, 4.68 +199502, 3.12, 4.35, 4.50, 2.32, 4.86 +199503, 2.94, 2.87, 2.78, 2.77, 1.90 +199504, 0.33, 2.68, 5.60, 3.00, 1.77 +199505, 3.91, 3.94, 2.15, 1.63, 4.46 +199506, 2.87, 0.44, 7.50, 4.48, 2.22 +199507, 2.38, 2.57, 6.76, 5.47, 4.75 +199508, -0.73, -0.30, 1.58, 1.03, 3.72 +199509, 3.84, 2.78, 2.89, 7.38, 4.47 +199510, -2.83, -1.34, 0.98, 1.03, -1.99 +199511, 5.32, 5.11, 0.74, 4.93, 6.11 +199512, 0.84, 3.11, -1.06, 5.89, 1.05 +199601, 1.62, 2.20, 2.12, 6.05, 3.48 +199602, 3.47, 0.24, 2.97, -1.12, 2.14 +199603, 2.69, 3.03, -2.86, 0.21, 1.81 +199604, 2.41, 1.27, 7.22, -0.61, 0.22 +199605, 4.90, 1.71, 2.53, 3.91, 1.50 +199606, -0.18, 0.87, -2.95, -0.25, -0.24 +199607, -5.57, -4.47, -7.76, -6.25, -4.24 +199608, 2.52, 3.06, 2.51, 4.31, 3.91 +199609, 3.33, 4.72, 7.77, 7.36, 5.09 +199610, 0.25, 2.94, -1.44, -0.85, 3.94 +199611, 4.49, 5.37, 10.00, 6.61, 7.45 +199612, -1.51, -0.02, -1.12, -2.62, -1.81 +199701, 3.11, 3.69, 7.20, 9.57, 6.00 +199702, 3.19, -1.05, -4.62, 0.93, 2.57 +199703, -3.51, -1.51, -6.31, -7.48, -5.82 +199704, 3.84, 3.02, 6.14, 6.07, 5.10 +199705, 6.03, 7.08, 9.17, 7.01, 6.03 +199706, 4.11, 3.98, 1.98, 8.87, 5.20 +199707, 5.70, 6.63, 12.02, 2.10, 9.23 +199708, -3.73, -3.48, -1.66, -6.10, -4.72 +199709, 5.39, 4.27, 5.00, 6.61, 8.00 +199710, -2.70, -3.95, -5.35, 0.09, -2.80 +199711, 4.81, 2.65, 4.06, 2.77, 2.93 +199712, 2.11, 0.66, -1.69, 3.52, 5.19 +199801, -0.79, -1.63, 5.20, 5.66, -1.96 +199802, 8.26, 6.34, 7.81, 5.53, 8.20 +199803, 6.31, 5.09, 3.97, 3.58, 5.90 +199804, -1.39, 1.23, 2.49, 1.91, 2.01 +199805, 2.31, -2.71, -5.46, -2.50, -3.08 +199806, 4.14, -0.31, 6.92, 6.20, 3.11 +199807, -3.18, -5.87, 1.56, -0.26, -1.73 +199808, -14.37, -10.87, -15.97, -12.37, -21.43 +199809, 1.30, 4.78, 13.51, 12.09, 3.00 +199810, 10.81, 6.40, 6.38, 3.83, 8.34 +199811, 7.65, 2.47, 9.12, 6.19, 6.05 +199812, 4.88, 1.65, 14.92, 4.94, 3.39 +199901, 0.19, -1.80, 13.30, 0.28, 0.97 +199902, -2.72, -2.60, -9.07, 0.17, 0.01 +199903, 0.55, 5.79, 5.65, 2.51, 2.42 +199904, 2.68, 10.45, 3.78, -5.53, 6.71 +199905, -1.50, -1.76, -0.40, -2.03, -3.89 +199906, 4.21, 1.75, 9.91, 4.43, 3.05 +199907, -4.16, -0.47, -1.73, -4.40, -5.94 +199908, -4.74, 0.19, 1.50, 3.76, -5.04 +199909, -2.93, -3.50, 1.14, -7.07, -4.55 +199910, 4.60, 3.54, 4.43, 10.73, 11.43 +199911, 1.53, -0.89, 10.59, 1.52, -2.49 +199912, 2.95, 6.40, 16.67, -5.62, 1.40 +200001, -7.88, -4.97, -4.90, 7.63, -4.75 +200002, -5.30, -4.58, 13.08, -2.51, -7.38 +200003, 11.04, 8.18, 4.90, -0.03, 14.36 +200004, -2.25, 1.45, -10.10, 5.04, -2.83 +200005, -0.55, 1.19, -10.67, 3.72, 3.35 +200006, -1.29, -2.54, 10.91, 10.99, -3.13 +200007, 0.02, -0.93, -5.48, -6.51, 5.36 +200008, -1.50, 8.50, 9.96, 3.50, 9.24 +200009, 2.29, -0.41, -12.97, 3.67, 0.25 +200010, 1.89, 0.68, -6.10, 2.06, -1.29 +200011, -0.73, -1.83, -21.87, 0.75, -5.98 +200012, 5.08, 7.02, -7.19, 3.49, 6.47 +200101, 3.82, -3.36, 16.03, -8.51, 0.63 +200102, -3.05, -1.11, -22.61, -0.54, -4.25 +200103, -3.60, -3.12, -11.89, -8.59, -4.50 +200104, 4.26, 7.88, 14.23, 4.25, 6.08 +200105, 1.90, 1.24, -2.81, 2.77, 3.64 +200106, -1.59, -5.74, 0.41, -2.98, -0.09 +200107, 2.61, -1.13, -5.79, 2.28, -2.17 +200108, -4.13, -2.04, -11.51, -3.10, -5.20 +200109, -8.01, -9.66, -14.34, -0.63, -7.87 +200110, 1.42, 3.85, 7.64, 1.26, -0.98 +200111, 7.48, 2.91, 13.20, 5.02, 7.20 +200112, 3.03, 3.77, 0.18, -2.82, 3.68 +200201, 2.01, -0.75, -2.22, -2.41, -1.56 +200202, 1.66, 3.78, -10.62, -0.45, -0.25 +200203, 3.09, 6.61, 4.87, 0.54, 5.24 +200204, 0.03, -2.11, -12.12, -7.55, -2.75 +200205, -0.36, -1.64, -1.94, -2.28, -0.53 +200206, -5.72, -2.71, -12.75, -9.53, -5.11 +200207, -9.66, -10.09, -10.24, -2.76, -6.37 +200208, 1.90, 0.04, -0.65, 0.65, 1.13 +200209, -8.89, -8.64, -14.78, -5.89, -10.60 +200210, 4.81, 2.85, 20.10, 5.68, 6.38 +200211, 1.44, 4.58, 14.98, 3.78, 4.20 +200212, -4.35, -1.32, -11.97, -4.09, -4.79 +200301, -4.33, -3.49, -2.25, -0.60, -2.38 +200302, -2.15, -1.37, -0.94, -1.96, -2.70 +200303, 1.23, 1.86, -0.83, 3.73, 1.04 +200304, 7.63, 4.49, 9.73, 4.27, 11.64 +200305, 5.70, 6.70, 8.88, 3.94, 5.23 +200306, 2.68, 0.21, 0.80, 4.16, 0.91 +200307, 1.63, 0.92, 3.57, 0.48, 3.61 +200308, 4.45, 4.30, 4.79, -2.82, 0.62 +200309, -2.51, -2.05, -2.10, 0.30, 0.26 +200310, 7.77, 5.57, 7.92, 0.87, 6.34 +200311, 1.83, 1.51, 1.26, 1.82, 1.15 +200312, 0.95, 8.41, 3.44, 5.64, 4.28 +200401, 0.21, -0.01, 3.82, 3.10, 2.89 +200402, 4.92, 2.71, -2.32, 0.89, 2.13 +200403, 0.01, 0.04, -2.40, -3.91, -0.66 +200404, -0.79, 0.07, -3.81, 3.51, -3.70 +200405, -0.69, 1.28, 2.89, -0.42, 1.72 +200406, 1.06, 4.82, 2.23, -0.50, 1.31 +200407, -4.80, -1.12, -6.70, -5.93, -2.30 +200408, -0.55, 0.91, -3.07, 1.64, 1.88 +200409, 0.86, 3.86, 2.89, -1.41, 1.10 +200410, 2.47, 0.12, 4.66, -2.43, 1.09 +200411, 4.30, 6.63, 5.21, 0.67, 4.69 +200412, 4.08, 1.02, 3.36, 5.66, 4.41 +200501, -1.07, 0.26, -5.82, -4.08, -2.17 +200502, 0.60, 8.50, -0.08, 2.44, 0.15 +200503, -0.81, -1.76, -1.93, -0.48, -2.36 +200504, -4.09, -3.87, -3.89, 3.95, -1.59 +200505, 5.02, 2.43, 6.52, 1.48, 3.24 +200506, 0.88, 2.26, -1.20, -0.58, 1.42 +200507, 4.67, 5.34, 5.40, 3.72, 2.41 +200508, -3.38, 1.54, -0.57, -0.31, -1.67 +200509, -1.18, 3.71, 0.19, -1.93, 1.22 +200510, -0.07, -5.99, -2.06, -3.26, 1.24 +200511, 2.73, 2.80, 5.61, 1.59, 4.91 +200512, -0.41, 1.06, -1.88, 2.40, 0.19 +200601, 1.56, 7.34, 4.02, 2.19, 1.58 +200602, 1.19, -2.43, -0.10, 1.05, 0.99 +200603, 2.02, 2.20, 2.65, -1.01, 1.87 +200604, -0.51, 3.87, -0.59, -1.86, 2.15 +200605, -0.63, -2.61, -5.42, -2.20, -3.09 +200606, 0.40, 1.09, -0.47, -0.11, -0.35 +200607, -1.37, 0.82, -2.28, 4.12, -0.91 +200608, 2.55, 0.19, 6.12, 2.33, 1.73 +200609, 3.17, -0.80, 3.60, 1.53, 3.24 +200610, 3.59, 4.85, 4.33, 1.70, 2.95 +200611, -0.04, 5.16, 2.43, 0.07, 1.60 +200612, 1.50, -0.30, 0.30, 0.13, 3.21 +200701, 2.80, 1.15, 2.18, 3.51, 1.05 +200702, -0.96, 0.37, -2.45, -2.85, -2.29 +200703, 1.35, 3.52, 1.21, -0.25, -0.26 +200704, 3.06, 4.56, 4.12, 6.78, 3.30 +200705, 2.61, 4.92, 4.70, 1.14, 3.17 +200706, -1.34, -0.87, -0.02, -3.37, -2.77 +200707, -4.91, -1.03, -1.63, -4.02, -5.27 +200708, 1.39, 0.71, 1.86, 2.32, 1.18 +200709, 1.31, 5.96, 3.60, 3.54, 2.63 +200710, 1.55, 2.53, 4.42, 1.41, 0.24 +200711, -2.30, -2.95, -7.19, 0.01, -5.98 +200712, -1.95, 3.38, 1.04, -3.95, -3.49 +200801, -2.75, -7.98, -11.24, -4.61, -2.04 +200802, -2.91, 2.42, -3.65, -0.93, -7.90 +200803, 1.19, -1.06, 0.49, -1.77, -1.94 +200804, 2.55, 7.39, 5.98, 0.43, 3.47 +200805, 2.07, 3.73, 5.44, 1.62, -2.39 +200806, -8.50, -3.09, -9.33, -3.01, -14.76 +200807, 1.50, -7.84, -1.19, 6.75, 4.39 +200808, 3.62, 0.56, 2.77, 1.77, 0.67 +200809, -4.78, -12.71, -12.50, -5.95, -6.31 +200810, -15.37, -17.66, -17.61, -11.03, -19.72 +200811, -6.39, -3.08, -8.60, -7.33, -12.36 +200812, 2.99, -0.77, 2.51, 6.74, 0.81 +200901, -6.77, -5.47, -5.10, -2.20, -17.66 +200902, -5.32, -12.93, -5.55, -10.82, -14.59 +200903, 8.63, 5.23, 10.60, 7.41, 13.07 +200904, 9.02, 9.30, 11.66, -0.94, 16.22 +200905, 2.04, 7.31, 2.58, 6.21, 7.16 +200906, 0.30, -1.91, 3.87, 3.61, -2.00 +200907, 7.66, 7.78, 7.53, 5.48, 9.17 +200908, 2.46, 1.67, 1.94, 2.20, 7.73 +200909, 3.10, 5.26, 5.50, 2.18, 3.12 +200910, -0.26, -0.85, -2.04, -3.47, -5.45 +200911, 4.86, 5.42, 5.25, 8.07, 5.96 +200912, 2.44, 2.08, 6.07, 2.47, 0.88 +201001, -1.97, -4.07, -7.59, -0.01, -0.53 +201002, 3.85, 3.31, 4.36, 0.38, 3.61 +201003, 6.29, 4.76, 6.88, 3.63, 8.10 +201004, 1.50, 3.45, 2.51, -2.18, 1.81 +201005, -5.79, -8.74, -7.36, -8.08, -8.75 +201006, -6.55, -5.13, -5.66, -1.66, -6.50 +201007, 6.38, 8.57, 7.77, 2.14, 6.78 +201008, -3.25, -3.86, -5.52, -1.70, -7.10 +201009, 9.77, 8.70, 11.47, 9.00, 8.51 +201010, 3.60, 4.42, 6.00, 1.94, 2.03 +201011, 2.73, 2.40, -0.99, -3.34, 0.49 +201012, 4.07, 7.26, 6.12, 5.45, 9.23 +201101, -1.39, 4.30, 3.07, -0.72, 2.10 +201102, 2.84, 4.84, 3.47, 3.33, 2.95 +201103, 1.90, 1.82, -0.74, 2.26, -1.10 +201104, 4.42, 2.55, 3.15, 6.25, 1.06 +201105, 1.00, -2.57, -1.19, 1.99, -2.64 +201106, -0.97, -1.20, -2.21, -1.87, -2.11 +201107, -1.34, -2.14, -1.55, -3.67, -3.64 +201108, -2.81, -6.59, -6.39, -3.13, -8.16 +201109, -5.05, -9.87, -4.99, -3.97, -11.09 +201110, 9.32, 13.52, 10.87, 5.52, 13.58 +201111, 0.44, 1.60, -1.49, 1.30, -2.04 +201112, 1.15, 0.01, -0.44, 3.47, 1.47 +201201, 3.30, 2.97, 7.10, 3.43, 7.23 +201202, 3.65, 4.40, 6.31, 1.25, 4.20 +201203, 4.11, -0.67, 4.18, 3.95, 4.91 +201204, 0.26, -0.64, -1.33, 0.60, -1.68 +201205, -3.24, -7.64, -6.34, -3.24, -7.72 +201206, 2.15, 3.16, 3.84, 6.84, 4.91 +201207, 1.19, 2.06, 0.84, 1.63, -1.23 +201208, 1.81, 1.74, 3.93, 0.97, 3.13 +201209, 1.63, 2.54, 2.02, 4.68, 3.83 +201210, -0.79, -0.79, -5.45, -1.92, 0.93 +201211, 2.21, 0.07, 1.15, 1.51, -0.38 +201212, -0.93, 1.81, 0.55, -0.53, 3.66 +201301, 5.20, 6.40, 2.97, 8.08, 6.76 +201302, 1.61, 1.21, 1.06, 1.33, 1.40 +201303, 4.83, 2.91, 3.41, 6.52, 4.19 +201304, 3.21, 0.69, 1.34, 2.59, 0.83 +201305, 1.07, 1.49, 2.97, 1.61, 5.94 +201306, -0.03, -1.69, -1.75, -1.38, -1.20 +201307, 5.05, 5.57, 5.17, 8.07, 5.65 +201308, -3.85, -2.29, -0.91, -3.67, -3.68 +201309, 3.76, 3.36, 3.92, 4.17, 3.83 +201310, 4.60, 4.46, 4.23, 3.55, 3.83 +201311, 2.66, 1.47, 2.79, 4.50, 4.70 +201312, 1.12, 2.87, 3.98, 0.59, 3.15 +201401, -5.95, -4.15, -1.94, 1.79, -4.33 +201402, 4.90, 5.13, 4.59, 6.35, 3.45 +201403, 0.72, 1.40, -0.45, -2.47, 2.04 +201404, 0.06, 2.99, -0.74, -0.33, -1.94 +201405, 1.84, 0.93, 3.57, 1.85, 1.78 +201406, 1.53, 3.01, 2.58, 2.87, 2.44 +201407, -3.11, -4.31, 0.29, -0.34, -2.26 +201408, 5.21, 4.00, 3.36, 5.54, 4.11 +201409, -1.66, -4.54, -1.41, -0.22, -1.11 +201410, 2.77, 0.86, 1.52, 5.92, 3.41 +201411, 6.27, -1.95, 3.89, 2.52, 2.51 +201412, 0.09, 0.46, -1.43, -0.83, 1.18 +201501, -0.79, -3.00, -3.68, 1.58, -6.40 +201502, 5.68, 3.70, 8.37, 4.00, 7.24 +201503, -0.50, -1.87, -2.43, 0.86, -0.61 +201504, -1.00, 1.63, 1.90, -1.55, 0.62 +201505, 1.21, -1.22, 1.58, 4.99, 1.72 +201506, -1.03, -3.10, -2.94, 0.37, 0.03 +201507, 4.46, -3.19, 2.06, 3.50, 1.82 +201508, -4.96, -4.76, -6.37, -8.38, -6.17 +201509, -1.67, -3.92, -1.56, -6.92, -3.06 +201510, 5.65, 8.67, 9.90, 7.51, 6.57 +201511, 0.47, -0.15, 0.53, 0.58, 1.38 +201512, -0.03, -4.56, -2.61, 0.32, -2.67 +201601, -3.52, -3.36, -4.72, -10.12, -8.73 +201602, 0.55, 1.26, -0.52, -0.97, -0.42 +201603, 5.70, 8.16, 8.00, 3.95, 7.15 +201604, -0.11, 3.51, -3.19, 4.96, 2.41 +201605, 0.73, 0.00, 4.08, 2.42, 1.20 +201606, 1.67, 2.71, -0.69, -0.17, -3.07 +201607, 2.67, 1.15, 6.37, 6.16, 3.73 +201608, -0.96, 0.36, 0.74, -3.15, 3.29 +201609, -1.38, 1.16, 1.87, 0.60, -1.02 +201610, -2.64, -2.28, -1.51, -7.53, 0.77 +201611, 1.43, 5.40, 1.90, 2.39, 11.50 +201612, 1.37, 1.53, 1.75, 0.69, 2.96 +201701, 1.47, 0.59, 4.33, 2.02, 0.93 +201702, 3.03, 2.19, 3.79, 6.97, 3.72 +201703, 0.78, -0.19, 1.86, 0.04, -1.62 +201704, 1.79, 0.30, 2.06, 0.93, 0.39 +201705, 2.10, 0.26, 2.99, -0.27, -0.40 +201706, -1.22, 0.25, -2.12, 5.53, 4.39 +201707, 0.00, 2.30, 3.94, 0.69, 1.44 +201708, -1.51, -0.74, 1.95, 2.59, -0.57 +201709, 1.73, 4.49, 0.22, 2.08, 4.57 +201710, 1.67, 2.18, 4.64, -2.25, 2.22 +201711, 5.76, 3.13, 1.54, 2.48, 3.92 +201712, 2.08, 1.57, 0.59, -0.16, 1.36 +201801, 6.34, 2.44, 6.96, 6.39, 5.91 +201802, -5.16, -6.01, -1.16, -3.49, -3.32 +201803, -2.34, 0.13, -3.04, -2.39, -2.65 +201804, 1.00, 1.37, -0.49, -0.05, 0.59 +201805, 0.59, 2.35, 5.58, 2.38, 1.45 +201806, 3.16, 0.11, 0.57, 1.47, -0.94 +201807, 2.69, 3.34, 2.58, 5.90, 3.74 +201808, 4.68, -0.84, 6.54, 4.50, 2.40 +201809, 0.29, 1.50, 0.20, 2.08, -1.50 +201810, -6.03, -8.55, -7.64, -8.79, -6.76 +201811, 2.19, 2.49, -0.97, 6.41, 2.84 +201812, -9.86, -9.05, -8.29, -8.24, -10.82 +201901, 8.22, 8.91, 8.70, 5.31, 9.74 +201902, 1.17, 4.16, 5.27, 3.30, 3.08 +201903, 2.84, 0.72, 3.30, 0.51, -1.36 +201904, 4.11, 2.43, 5.84, -3.16, 6.67 +201905, -6.36, -8.27, -7.80, -3.40, -6.12 +201906, 6.92, 7.24, 7.60, 6.82, 6.59 +201907, 1.17, -0.37, 3.40, -2.16, 1.77 +201908, -0.65, -2.19, -2.17, -0.64, -4.63 +201909, 1.32, 3.83, 1.29, -0.98, 1.91 +201910, 0.92, -0.76, 3.12, 4.66, 3.09 +201911, 2.21, 1.52, 4.86, 5.48, 5.05 +201912, 2.24, 3.03, 3.44, 3.45, 2.36 +202001, 0.95, -3.03, 2.61, -1.99, -1.34 +202002, -7.34, -10.31, -6.89, -5.37, -9.87 +202003, -9.87, -19.28, -10.23, -5.09, -19.22 +202004, 15.85, 12.64, 14.19, 13.45, 11.45 +202005, 4.32, 4.57, 7.30, 4.16, 4.60 +202006, 3.97, 0.17, 4.76, -1.44, 0.50 +202007, 9.32, 3.41, 6.67, 4.39, 3.81 +202008, 10.13, 2.98, 10.08, 2.35, 6.79 +202009, -4.05, -2.38, -4.90, -1.43, -2.95 +202010, -2.75, 0.00, -2.01, -4.37, -1.70 +202011, 11.43, 12.96, 11.11, 9.83, 15.88 +202012, 3.97, 2.54, 5.09, 4.71, 5.67 +202101, 0.68, -0.85, 0.12, 3.42, -2.72 +202102, -2.04, 5.95, 1.65, -1.25, 9.79 +202103, 5.35, 7.32, 0.95, -0.09, 5.26 +202104, 5.81, 2.48, 6.07, 2.96, 5.70 +202105, -1.69, 2.50, -0.85, -0.12, 2.77 +202106, 2.43, -0.01, 6.07, 4.41, -2.02 +202107, 0.07, -0.20, 3.29, 2.84, -0.57 +202108, 1.72, 0.80, 4.34, 2.84, 2.70 +202109, -3.40, -2.82, -6.09, -6.02, -2.45 +202110, 8.75, 6.01, 7.03, 2.23, 6.68 +202111, 0.49, -1.87, 0.76, -4.26, -5.48 +202112, 1.64, 5.84, 1.84, 6.58, 4.94 +202201, -7.41, -0.83, -7.94, -8.77, -3.26 +202202, -2.26, 0.87, -4.14, -1.01, -1.72 +202203, 4.34, 4.71, 2.80, 5.15, 1.09 +202204, -8.45, -3.99, -12.44, -6.91, -9.13 +202205, -4.30, 4.69, -1.22, 0.96, 0.62 +202206, -7.41, -10.80, -7.99, -2.02, -9.96 +202207, 14.01, 8.43, 10.64, 2.77, 8.86 +202208, -3.28, -0.90, -5.18, -5.04, -2.51 +202209, -7.62, -10.92, -11.68, -1.92, -8.09 +202210, 3.76, 13.49, 5.50, 8.78, 11.49 +202211, 2.30, 6.15, 5.11, 5.35, 5.90 +202212, -9.41, -2.66, -7.86, -1.72, -5.36 +202301, 9.12, 3.59, 9.91, -1.05, 7.51 +202302, -1.96, -3.94, -0.73, -4.29, -2.44 +202303, 2.73, 0.82, 9.63, 2.49, -5.00 +202304, -0.07, 0.71, 0.30, 4.12, 1.80 +202305, 0.58, -6.68, 7.26, -3.66, -2.84 +202306, 8.95, 7.97, 5.83, 4.66, 7.44 +202307, 2.35, 4.18, 4.03, -0.12, 5.38 +202308, -1.81, -1.82, -1.54, -0.23, -3.38 +202309, -4.93, -3.95, -5.86, -4.70, -3.41 +202310, -3.49, -3.01, -1.68, -4.59, -2.69 +202311, 7.86, 5.20, 11.75, 5.80, 10.45 +202312, 5.49, 3.85, 4.48, 6.91, 6.62 +202401, -2.32, -1.24, 3.19, 3.52, 0.99 +202402, 7.52, 4.22, 5.70, 4.63, 4.65 +202403, 1.70, 6.40, 2.40, 2.19, 4.49 +202404, -3.60, -2.08, -4.52, -5.87, -4.56 +202405, 1.77, 3.09, 7.87, 3.20, 3.26 +202406, 2.67, -2.41, 7.16, 2.93, -0.13 +202407, 2.67, 3.65, -1.48, 0.83, 6.20 +202408, 1.06, 1.09, 1.35, 6.41, 3.06 +202409, 4.41, 1.74, 3.10, -1.84, 0.67 +202410, -1.69, -2.21, -0.09, -3.15, 1.10 +202411, 10.92, 6.36, 5.16, -0.51, 10.18 +202412, -0.45, -8.07, 0.93, -5.38, -7.08 +202501, 4.63, 3.08, 0.33, 6.35, 6.31 +202502, -5.38, 0.09, -3.30, 3.50, -0.56 +202503, -6.31, -1.42, -8.72, -4.16, -4.34 +202504, 1.29, -4.83, 0.77, -0.66, -1.53 +202505, 6.55, 4.90, 10.30, -3.95, 4.31 +202506, 0.60, 3.36, 8.79, 2.01, 4.17 +202507, 0.99, 2.70, 5.21, -1.28, -1.19 +202508, 2.19, 1.33, 1.09, 4.73, 4.22 +202509, 2.35, 2.25, 6.70, 2.03, 0.28 +202510, 1.07, 1.51, 5.11, 4.28, -2.78 +202511, 0.32, 0.95, -2.48, 10.86, 1.74 +202512, -0.25, -0.28, -0.46, -1.53, 1.97 + + + Average Equal Weighted Returns -- Monthly +,Cnsmr,Manuf,HiTec,Hlth,Other +192607, 1.71, 1.29, 1.98, 2.53, 0.10 +192608, 2.75, 2.32, 2.30, 6.03, 4.45 +192609, -0.85, -0.29, 0.70, 0.81, -0.45 +192610, -4.23, -2.97, -4.69, 0.13, -2.75 +192611, 3.53, 2.12, 1.36, 7.22, 1.02 +192612, 1.85, 3.62, 1.49, 0.40, 1.22 +192701, -1.77, 1.40, 0.37, 6.16, 5.14 +192702, 3.68, 5.65, 8.03, 2.47, 6.80 +192703, -2.10, -2.86, -0.94, -0.21, -0.41 +192704, 1.17, 0.22, 1.73, 5.80, 1.30 +192705, 5.41, 6.79, 2.86, 4.80, 8.43 +192706, -2.54, -2.22, 0.80, 3.61, -1.18 +192707, 6.36, 5.89, 7.12, 8.59, 4.58 +192708, 1.67, 0.09, -1.34, 0.14, -1.44 +192709, 2.34, 2.91, 4.48, 6.01, 3.73 +192710, -2.59, -4.71, -3.59, 5.15, -4.89 +192711, 8.57, 11.16, 11.96, 5.05, 7.80 +192712, 3.80, 3.20, 1.54, -1.43, 2.01 +192801, 2.56, 4.13, 1.00, 1.45, 3.14 +192802, -2.17, -2.45, -0.02, -3.02, -3.18 +192803, 7.70, 6.28, 11.24, 8.87, 5.76 +192804, 4.29, 13.26, 4.13, 3.35, 8.02 +192805, 4.92, 1.41, 10.57, 7.24, 4.70 +192806, -7.82, -6.81, -5.02, -1.41, -5.95 +192807, 0.92, 0.42, 0.13, -0.81, -0.95 +192808, 6.82, 5.62, 4.95, 9.45, 4.32 +192809, 4.07, 7.30, 2.76, -0.05, 2.96 +192810, 3.03, 1.04, 2.81, 0.24, -0.02 +192811, 9.70, 14.39, 13.25, 3.51, 10.66 +192812, -0.09, -0.40, 2.67, -0.92, -1.48 +192901, 1.08, 3.88, 7.45, 4.33, 5.89 +192902, -1.20, 1.83, 3.43, -0.07, 1.45 +192903, -6.02, -0.70, 6.85, -0.51, -4.31 +192904, 1.12, 1.60, 3.37, 2.43, 1.29 +192905, -9.32, -10.23, -4.84, -6.96, -6.59 +192906, 5.75, 10.50, 12.10, 4.75, 5.50 +192907, 0.43, 2.66, 7.17, 3.12, 2.79 +192908, 0.63, 3.94, 6.60, -0.52, 3.14 +192909, -4.83, -4.22, -2.47, -5.80, -4.12 +192910, -21.13, -21.99, -24.20, -17.14, -16.12 +192911, -12.92, -11.17, -18.57, -4.60, -10.44 +192912, -4.17, -2.82, 3.87, 3.96, -0.63 +193001, 10.85, 11.14, 8.69, 2.39, 9.83 +193002, 2.81, 3.10, 6.98, 3.78, 4.02 +193003, 8.15, 14.13, 10.16, 8.13, 7.73 +193004, -4.44, -5.11, -0.72, -3.32, -5.26 +193005, -3.16, -4.07, -6.21, -1.88, -3.35 +193006, -18.46, -18.97, -22.43, -11.15, -16.86 +193007, 3.35, 4.13, 6.34, 8.97, 2.86 +193008, -1.35, -2.57, 0.50, 1.16, 1.36 +193009, -13.77, -16.31, -19.59, -5.83, -15.41 +193010, -8.05, -10.93, -13.82, -5.24, -9.42 +193011, -0.10, -3.59, -0.87, -1.05, -4.48 +193012, -12.66, -14.86, -11.52, -11.03, -14.11 +193101, 13.43, 13.08, 13.74, 8.00, 18.66 +193102, 16.53, 15.66, 21.37, 16.20, 11.42 +193103, -1.72, -8.69, -10.17, 0.62, -9.03 +193104, -13.91, -17.01, -13.88, -11.43, -14.22 +193105, -11.38, -15.94, -11.48, -15.95, -15.61 +193106, 13.75, 20.40, 15.72, 12.30, 22.14 +193107, -3.59, -8.39, -8.73, -3.29, -8.98 +193108, -1.87, -0.27, -2.01, -1.11, -5.21 +193109, -29.51, -32.86, -35.08, -32.02, -29.35 +193110, 10.49, 12.53, 12.19, 13.23, 5.79 +193111, -6.58, -8.47, -10.71, -9.07, -13.61 +193112, -16.59, -18.96, -18.44, -11.57, -21.67 +193201, 7.75, 6.67, 5.67, 0.97, 16.35 +193202, 2.64, 2.30, 3.77, -1.72, 1.02 +193203, -11.75, -11.21, -14.04, -9.06, -15.08 +193204, -16.89, -18.43, -23.01, -13.65, -18.65 +193205, -18.43, -18.53, -19.50, -26.50, -25.16 +193206, 5.25, 4.36, 3.86, 5.66, 1.88 +193207, 31.95, 46.20, 43.89, 30.91, 56.51 +193208, 64.41, 64.27, 51.55, 40.21, 76.88 +193209, -3.88, -6.67, -6.47, -2.86, -7.95 +193210, -15.97, -16.07, -17.81, -17.01, -18.00 +193211, -7.53, -6.50, -7.93, -3.83, -10.28 +193212, -4.26, -1.25, -1.86, -0.96, -5.14 +193301, 2.75, 2.54, 4.96, -1.45, 7.50 +193302, -14.75, -15.89, -15.99, -11.31, -16.17 +193303, 12.80, 11.05, 8.84, 5.24, 2.44 +193304, 53.82, 54.60, 51.18, 23.03, 44.44 +193305, 58.90, 59.75, 46.23, 34.68, 65.79 +193306, 24.37, 20.23, 15.65, 9.01, 22.82 +193307, -11.01, -11.54, -5.70, -18.43, -2.25 +193308, 11.52, 12.55, 14.40, 3.53, 8.44 +193309, -15.06, -14.09, -14.02, -7.31, -17.75 +193310, -10.20, -10.56, -12.37, -4.95, -12.32 +193311, 6.49, 8.08, 10.12, 6.50, 3.41 +193312, 2.26, 1.28, 6.86, -11.04, 1.03 +193401, 33.08, 29.91, 32.48, 28.60, 35.59 +193402, 2.62, 0.82, -4.72, -3.91, 0.75 +193403, 1.85, -0.50, 2.61, -1.91, 0.52 +193404, -0.26, 0.39, -2.15, 1.24, 0.44 +193405, -10.86, -12.09, -8.48, -9.76, -9.35 +193406, -0.03, 1.54, 3.89, 1.37, -2.50 +193407, -16.54, -18.94, -16.13, -12.44, -22.90 +193408, 8.33, 10.52, 8.63, 2.42, 11.11 +193409, 0.58, -2.22, -0.86, -1.40, -0.91 +193410, 0.43, -2.69, -0.67, 1.82, -5.14 +193411, 14.24, 15.61, 14.22, 4.83, 6.47 +193412, 1.61, 2.27, 0.17, 0.10, -0.06 +193501, -3.29, -2.11, -4.66, -1.53, -5.00 +193502, -5.14, -5.37, -1.57, -1.04, -9.33 +193503, -8.16, -6.32, -6.16, -4.69, -8.52 +193504, 7.47, 11.25, 12.34, 3.30, 10.81 +193505, 1.33, 3.28, 2.03, -1.35, 2.95 +193506, 4.34, 3.99, 10.95, 4.02, 2.64 +193507, 9.63, 12.24, 14.09, 10.04, 10.67 +193508, 6.91, 7.07, 3.39, 0.56, 13.22 +193509, 4.24, 2.10, 4.39, 5.84, -0.83 +193510, 8.55, 10.08, 16.63, 10.08, 2.94 +193511, 10.20, 12.01, 9.69, 1.44, 29.85 +193512, 2.31, 9.20, 8.49, 8.48, 3.59 +193601, 11.03, 15.82, 9.43, 13.98, 23.45 +193602, 4.27, 4.68, 3.66, 1.26, 5.34 +193603, 1.56, 1.10, -0.33, 3.47, -1.99 +193604, -11.81, -12.84, -10.16, -14.09, -12.38 +193605, 4.61, 5.05, 6.20, 6.25, 6.51 +193606, 1.28, -0.93, 0.58, 0.76, -0.72 +193607, 7.50, 8.95, 8.96, 5.92, 7.75 +193608, 2.05, 2.34, 0.41, -1.07, 2.34 +193609, 3.66, 3.41, 4.87, 1.03, 2.52 +193610, 6.50, 7.12, 4.86, 3.54, 6.47 +193611, 10.15, 9.51, 6.44, 5.44, 7.91 +193612, -2.39, 7.13, -0.97, -2.47, 3.27 +193701, 6.34, 9.75, 4.22, 8.96, 6.93 +193702, 2.14, 1.27, 0.63, -3.49, 6.60 +193703, -2.01, 2.32, -4.47, -3.37, 1.37 +193704, -9.81, -10.12, -9.05, -5.41, -11.58 +193705, -3.08, -1.13, 0.73, 0.26, -4.33 +193706, -6.86, -6.49, -5.31, -6.91, -12.43 +193707, 9.38, 10.50, 8.79, 6.21, 10.44 +193708, -4.74, -5.45, -2.09, -4.45, -6.86 +193709, -18.28, -20.85, -19.35, -12.58, -19.32 +193710, -9.49, -10.94, -11.66, -4.84, -9.26 +193711, -12.65, -10.27, -10.98, -9.72, -9.46 +193712, -11.56, -8.73, -6.08, -6.74, -13.36 +193801, 7.19, 2.04, 2.81, 4.11, 4.59 +193802, 4.36, 8.18, 9.06, 5.24, 4.32 +193803, -26.62, -29.60, -29.23, -16.44, -31.17 +193804, 18.71, 21.75, 21.72, 13.63, 21.44 +193805, -6.62, -7.97, -5.94, -2.40, -6.31 +193806, 28.88, 35.08, 32.31, 15.43, 27.74 +193807, 14.65, 10.08, 12.57, 6.46, 13.17 +193808, -5.31, -6.13, -4.18, -2.78, -9.43 +193809, -1.73, -0.81, -1.10, -1.87, -2.55 +193810, 13.23, 15.56, 12.21, 9.67, 15.06 +193811, -3.33, -2.61, -4.18, 1.50, -6.76 +193812, -0.10, 4.76, 5.49, -0.20, 3.74 +193901, -5.32, -9.28, -8.16, -5.45, -7.89 +193902, 4.88, 3.73, 3.32, 4.91, 3.70 +193903, -14.66, -19.67, -17.36, -8.19, -17.02 +193904, 1.01, -0.07, 0.30, -0.23, -1.75 +193905, 8.79, 8.95, 11.19, 6.14, 6.56 +193906, -6.23, -9.15, -11.07, -3.02, -10.84 +193907, 11.98, 15.01, 17.24, 8.22, 12.56 +193908, -10.27, -10.44, -11.43, -10.38, -11.46 +193909, 29.76, 44.15, 19.23, 14.88, 56.67 +193910, 1.75, -1.78, 0.14, 2.87, -3.61 +193911, -5.58, -9.72, -4.05, -3.33, -10.60 +193912, 1.58, 2.87, 1.85, 0.39, -1.32 +194001, -0.09, -3.96, -1.35, -1.13, -0.64 +194002, 4.25, 2.69, 2.46, 1.54, 1.46 +194003, 2.79, 2.08, 4.55, 4.88, 3.46 +194004, 1.44, 2.06, -0.16, 1.70, -0.07 +194005, -27.49, -26.92, -23.98, -22.37, -27.84 +194006, 6.43, 7.59, 5.01, 3.69, 6.34 +194007, 3.17, 4.04, 4.51, 1.50, 3.99 +194008, 2.55, 2.13, 5.33, -0.97, 0.42 +194009, 3.68, 4.41, 2.76, 4.44, 2.27 +194010, 3.00, 6.62, 6.52, -0.18, 3.47 +194011, 1.53, -0.73, -2.43, -0.51, -1.66 +194012, -2.11, -0.68, -0.05, -0.68, -4.79 +194101, -0.46, -4.84, -0.40, 3.43, 8.56 +194102, -2.48, -2.06, 0.42, -3.35, -1.50 +194103, 1.44, -0.03, 2.44, 0.29, 3.88 +194104, -6.33, -5.71, -6.21, -5.87, -4.54 +194105, 0.47, 2.34, -0.32, 3.28, -0.94 +194106, 7.28, 7.12, 6.51, 4.42, 3.90 +194107, 13.31, 11.32, 8.03, 12.65, 17.62 +194108, 0.63, -1.53, -1.13, 0.44, -1.88 +194109, -0.72, -2.72, -0.34, 2.56, -3.72 +194110, -6.25, -6.32, -7.16, -2.49, -6.39 +194111, -3.68, -3.43, -2.91, 0.03, -3.53 +194112, -9.03, -6.88, -6.69, -4.08, -11.39 +194201, 10.18, 8.33, 6.20, 2.95, 38.44 +194202, -1.82, -1.07, -2.39, -5.42, -2.75 +194203, -4.69, -4.70, -0.69, -4.48, -9.13 +194204, -4.92, -5.42, -4.49, -3.16, -1.99 +194205, 4.22, 1.68, 7.48, 7.66, 1.56 +194206, 2.59, 2.30, 3.08, 4.64, 2.08 +194207, 4.72, 3.40, 5.08, 2.46, 7.21 +194208, 2.05, 2.49, 1.82, 3.02, 5.61 +194209, 2.99, 4.45, 5.22, 2.04, 8.82 +194210, 7.90, 10.76, 12.28, 4.80, 11.83 +194211, 0.54, -2.69, 1.48, 1.68, -3.64 +194212, 2.56, 4.08, 8.22, 9.54, 1.57 +194301, 15.63, 15.32, 9.26, 6.07, 26.33 +194302, 9.68, 11.22, 7.36, 6.06, 27.17 +194303, 12.12, 10.15, 10.00, 11.96, 12.04 +194304, 5.03, 1.31, 3.69, 5.01, 6.04 +194305, 8.26, 7.71, 9.04, 4.89, 11.06 +194306, 3.71, 0.54, 2.82, 0.49, -2.62 +194307, -4.95, -7.28, -7.58, -2.80, -7.13 +194308, 1.18, 0.02, 0.47, -1.01, -0.53 +194309, 4.08, 3.07, 4.88, 3.86, 3.14 +194310, 0.16, -0.28, -0.31, -0.05, -1.52 +194311, -7.44, -9.20, -8.31, -5.68, -10.59 +194312, 10.23, 9.57, 11.18, 5.95, 9.06 +194401, 4.33, 4.09, 5.03, 2.52, 6.22 +194402, 1.73, 1.07, -0.52, -2.30, 2.98 +194403, 4.86, 3.32, 4.03, 5.86, 4.67 +194404, -2.50, -2.93, -2.44, -1.87, -4.01 +194405, 8.07, 6.14, 6.99, 5.44, 4.82 +194406, 10.85, 9.66, 9.46, 5.62, 13.32 +194407, -0.73, -1.33, -1.94, -2.63, -1.96 +194408, 4.42, 2.89, 1.82, 0.07, 1.80 +194409, 0.18, 0.09, 0.21, 1.55, -0.59 +194410, 0.57, -0.24, 0.13, -0.60, -0.17 +194411, 2.54, 1.99, 1.09, 1.48, 3.30 +194412, 4.39, 6.71, 5.84, 5.31, 13.07 +194501, 4.64, 5.01, 2.97, 1.75, 3.16 +194502, 7.22, 9.12, 5.86, 6.28, 10.74 +194503, -4.83, -6.36, -6.19, -3.34, -5.60 +194504, 8.84, 8.79, 8.71, 9.73, 9.43 +194505, 4.42, 1.67, 1.09, 3.96, 4.06 +194506, 2.42, 3.29, 0.07, -3.31, 8.65 +194507, -2.25, -3.48, -3.70, -2.09, -5.67 +194508, 7.86, 7.06, 7.96, 9.99, 2.37 +194509, 5.81, 6.70, 4.50, 6.96, 6.71 +194510, 7.54, 5.71, 2.83, 7.62, 7.46 +194511, 10.24, 8.48, 7.22, 9.50, 12.25 +194512, 3.34, 2.03, 2.63, 2.81, 1.48 +194601, 8.70, 9.42, 6.20, 9.56, 11.47 +194602, -6.32, -6.29, -7.78, -4.33, -7.46 +194603, 6.90, 6.42, 2.70, 15.63, 2.83 +194604, 6.81, 5.13, 1.93, 13.60, 3.27 +194605, 5.52, 6.11, 5.52, 5.93, 3.99 +194606, -5.72, -4.36, -6.02, -6.94, -4.25 +194607, -2.55, -4.23, -5.21, -3.90, -5.59 +194608, -7.79, -6.89, -8.45, -4.93, -8.62 +194609, -13.29, -12.91, -13.13, -7.74, -16.16 +194610, -0.89, -1.56, -2.52, -2.38, -0.43 +194611, -1.72, 0.88, -1.74, 1.17, -0.54 +194612, 4.15, 6.45, 7.03, 7.25, 0.69 +194701, 3.05, 2.52, 5.16, -2.73, 4.19 +194702, -0.18, -0.60, -2.84, -4.22, -0.33 +194703, -3.43, -2.20, -3.52, -1.26, -4.45 +194704, -9.27, -7.12, -7.72, -8.46, -9.13 +194705, -3.62, -3.89, -2.83, -5.37, -4.24 +194706, 6.66, 5.59, 6.47, 3.07, 4.84 +194707, 6.05, 6.20, 6.75, 3.97, 6.69 +194708, -1.68, -0.91, -2.52, -4.93, -3.40 +194709, 0.34, 1.20, 1.65, -3.99, -0.47 +194710, 3.79, 3.98, 2.98, 3.60, 0.92 +194711, -3.15, -2.98, -2.80, -2.89, -2.59 +194712, 0.18, 3.20, 1.07, 0.89, 4.91 +194801, -2.43, -2.34, -2.35, -5.62, 2.40 +194802, -6.49, -5.76, -5.81, -4.79, -5.79 +194803, 7.12, 10.87, 11.95, 6.54, 10.94 +194804, 3.22, 4.11, 2.66, 1.52, 5.02 +194805, 9.27, 8.38, 11.43, 7.25, 8.20 +194806, -2.21, -1.10, -0.32, -1.02, -0.72 +194807, -4.63, -5.56, -6.73, -4.64, -5.60 +194808, -0.05, -0.34, -0.24, -1.91, 0.03 +194809, -4.19, -4.02, -4.39, -3.44, -5.32 +194810, 4.09, 5.96, 5.29, 5.13, 6.12 +194811, -9.75, -11.26, -10.35, -7.33, -11.65 +194812, -0.77, 2.05, 5.74, 2.63, 1.52 +194901, 3.22, 1.06, -1.42, 4.85, 1.95 +194902, -4.13, -3.72, -6.94, -2.45, -4.37 +194903, 5.81, 5.96, 5.04, 6.22, 6.67 +194904, -2.92, -3.68, -5.73, 0.26, -1.66 +194905, -2.78, -4.79, -5.94, -2.70, -5.60 +194906, 0.77, -1.39, -2.55, 1.05, -1.74 +194907, 6.07, 6.48, 7.15, 5.40, 5.68 +194908, 2.42, 3.02, 1.47, 0.80, 1.99 +194909, 4.09, 4.18, 6.87, 3.49, 4.57 +194910, 3.61, 3.85, 4.70, 3.45, 4.64 +194911, 1.16, 1.20, 0.69, 2.58, -0.13 +194912, 7.10, 6.14, 10.19, 8.17, 8.46 +195001, 4.16, 3.04, 8.47, 2.16, 4.98 +195002, 0.54, 2.14, 3.67, 3.93, 1.43 +195003, -0.38, -0.31, 4.03, -2.94, -0.63 +195004, 3.89, 5.12, 10.35, 5.62, 3.97 +195005, 3.15, 4.55, -0.65, 1.45, 1.74 +195006, -6.89, -7.60, -9.68, -6.41, -8.25 +195007, 4.11, 5.35, -0.96, -3.19, 8.88 +195008, 5.01, 5.47, 7.37, 4.06, 3.68 +195009, 5.50, 5.09, 6.86, 5.86, 6.02 +195010, -0.92, 0.05, -3.22, 0.55, -0.22 +195011, 0.55, 4.45, 1.07, 3.29, 5.49 +195012, 6.92, 8.19, 4.63, 6.38, 14.04 +195101, 7.56, 7.82, 8.88, 4.05, 6.73 +195102, 1.46, 1.04, 3.10, 2.16, 0.50 +195103, -3.74, -3.22, -3.14, -0.05, -3.77 +195104, 1.91, 5.84, 4.53, 8.91, 2.63 +195105, -2.02, -2.32, -3.23, -0.23, -3.99 +195106, -4.90, -4.23, -4.30, -1.47, -5.43 +195107, 3.00, 8.18, 5.38, 10.80, 6.92 +195108, 4.16, 5.19, 6.18, -0.63, 4.42 +195109, 1.36, 2.35, 2.43, 0.02, 4.05 +195110, -2.71, -1.69, -4.04, -3.58, -3.18 +195111, 0.26, 0.43, 3.14, -1.32, 2.04 +195112, 0.47, 2.21, 2.68, 3.20, 0.95 +195201, 0.40, 2.55, 1.13, 2.52, 2.05 +195202, -1.65, -2.51, -1.42, -3.56, -1.56 +195203, 2.06, 2.96, 2.94, 0.21, 4.30 +195204, -3.46, -5.45, -3.18, -4.83, -3.97 +195205, 2.26, 2.59, 1.01, 1.05, 2.91 +195206, 2.26, 3.27, 3.79, 0.96, 4.11 +195207, 0.64, 1.46, 1.82, -2.23, 0.65 +195208, 0.63, -0.37, 0.62, -0.01, -0.11 +195209, -1.50, -2.00, 1.79, -2.28, -1.22 +195210, -1.36, -1.08, -0.37, -1.40, -1.13 +195211, 4.75, 5.82, 6.48, 5.41, 6.15 +195212, 1.70, 2.27, 2.58, -0.24, 3.58 +195301, 3.00, 2.05, 2.21, -2.38, 3.18 +195302, 1.93, 0.95, 1.30, 2.08, 0.62 +195303, -1.07, -0.89, -2.18, -0.02, -1.54 +195304, -1.59, -2.81, -1.43, -2.47, -2.78 +195305, 0.48, 0.49, -0.02, 0.61, 1.48 +195306, -3.52, -3.11, -2.39, -3.13, -3.06 +195307, 1.52, 1.91, 1.70, 2.58, 1.64 +195308, -4.28, -5.07, -5.95, -1.87, -7.26 +195309, -1.71, -0.67, 2.41, -0.38, -2.48 +195310, 2.31, 4.84, 3.77, 2.71, 3.30 +195311, 0.86, 2.72, 1.38, 6.35, 2.73 +195312, -2.16, -0.97, -0.94, 0.03, -2.36 +195401, 6.67, 7.41, 6.52, 5.92, 7.66 +195402, 0.46, 1.70, 2.23, 1.35, 1.63 +195403, 1.22, 3.90, 3.61, 0.15, 2.81 +195404, -0.65, 2.65, 3.42, 2.18, 2.21 +195405, 4.05, 4.26, 2.84, 2.33, 5.52 +195406, 1.31, 1.01, 3.40, -0.32, 1.30 +195407, 7.61, 6.74, 7.89, 7.08, 7.54 +195408, -0.88, -1.13, -2.63, -1.75, 0.16 +195409, 3.83, 4.56, 6.00, 5.42, 4.40 +195410, -1.22, -1.01, 0.17, 0.28, 0.66 +195411, 6.49, 9.73, 11.81, 12.07, 10.75 +195412, 7.42, 8.46, 8.87, 8.02, 12.78 +195501, 1.69, 1.09, 2.16, -2.25, 1.95 +195502, 3.05, 4.81, 5.77, 3.80, 4.34 +195503, -0.62, 0.03, 0.78, 4.61, 0.90 +195504, 2.66, 1.75, 2.93, 3.89, 3.14 +195505, 0.26, 0.48, 0.88, 1.06, 2.25 +195506, 2.52, 4.82, 2.33, 1.47, 2.12 +195507, 2.17, 0.67, -1.55, 1.02, -0.55 +195508, -0.65, 1.51, -0.89, -0.24, 0.29 +195509, 0.01, -0.79, -2.80, -1.15, -1.64 +195510, -1.36, -1.54, -3.55, 0.94, -2.44 +195511, 2.79, 6.84, 6.19, 6.99, 6.56 +195512, 1.48, 2.64, 3.32, 7.28, 1.23 +195601, -1.18, -2.17, -2.75, -4.48, -2.80 +195602, 1.85, 4.09, 3.84, 4.06, 3.67 +195603, 2.45, 6.65, 7.03, 7.09, 4.59 +195604, -0.49, 0.66, 0.30, 1.28, 1.24 +195605, -3.59, -3.82, -5.60, -3.52, -4.71 +195606, 1.14, 2.92, 2.66, 2.25, 1.28 +195607, 1.82, 5.63, 4.06, 4.34, 2.72 +195608, -0.72, -1.99, -1.79, -3.28, -3.06 +195609, -2.84, -4.15, -4.01, -3.02, -3.20 +195610, 1.17, 0.87, -0.54, 0.65, 0.02 +195611, -0.03, 1.46, -0.40, -0.95, -0.56 +195612, -0.03, 3.25, 4.13, 6.22, 2.37 +195701, 2.35, -1.05, 0.42, -1.17, 0.98 +195702, -1.65, -2.39, -2.48, 0.14, -3.33 +195703, 1.71, 2.07, 3.92, 6.89, 2.54 +195704, 2.07, 2.42, 4.73, 4.60, 2.92 +195705, 1.38, 2.43, 3.87, 4.52, 1.89 +195706, -0.04, -1.66, 0.76, 4.12, -0.40 +195707, 0.66, 0.30, 0.13, 2.11, 1.25 +195708, -3.24, -5.49, -6.81, -2.38, -5.74 +195709, -3.09, -6.21, -5.86, -1.55, -6.63 +195710, -5.77, -6.05, -8.47, -1.60, -8.64 +195711, 1.40, 2.77, 5.17, 7.45, -0.55 +195712, -3.53, -5.33, -6.57, -0.59, -5.50 +195801, 10.71, 10.05, 10.82, 2.36, 13.21 +195802, 0.04, -1.86, -2.45, 4.30, -1.27 +195803, 3.54, 3.67, 5.08, 10.36, 2.11 +195804, 3.93, 2.26, 2.10, 8.46, 3.95 +195805, 3.67, 4.01, 5.52, 2.43, 3.46 +195806, 2.99, 2.80, 4.05, 2.73, 4.57 +195807, 3.88, 6.49, 3.86, 4.17, 5.53 +195808, 5.05, 1.83, 4.93, 3.29, 2.29 +195809, 5.86, 4.43, 5.18, 11.11, 5.84 +195810, 2.84, 2.93, 6.23, 1.31, 3.73 +195811, 4.54, 3.05, 10.22, 8.60, 4.41 +195812, 3.54, 4.22, 5.00, 4.95, 2.80 +195901, 5.26, 4.10, 2.66, -0.09, 4.11 +195902, 3.64, 1.82, 8.82, 4.88, 1.46 +195903, 1.42, 0.99, 3.05, 4.51, 0.30 +195904, 2.29, 1.99, 9.40, 11.06, 0.85 +195905, 0.63, 0.99, 0.45, -2.47, 0.89 +195906, 0.71, 0.32, 1.13, 6.27, -0.29 +195907, 3.98, 3.34, 2.14, 3.79, 0.92 +195908, -0.20, -1.61, -3.97, 0.02, -2.08 +195909, -3.01, -5.23, -5.88, -6.10, -4.10 +195910, 1.99, 1.59, 6.90, 5.48, 1.14 +195911, 0.91, 1.86, 5.08, 4.79, -0.49 +195912, 2.60, 1.96, 3.95, -0.92, 1.91 +196001, -3.06, -4.16, -7.64, -4.72, -2.60 +196002, 1.06, 0.30, 5.47, 1.21, -0.05 +196003, -2.98, -2.12, -2.32, 0.16, -2.64 +196004, -0.94, -2.54, -2.32, 1.17, -2.44 +196005, 1.55, 1.56, 11.03, 11.27, 0.06 +196006, 3.29, 1.74, 1.42, 3.32, 1.79 +196007, -1.32, -1.63, -5.74, -4.15, -1.59 +196008, 3.90, 3.27, 5.66, 1.92, 4.06 +196009, -4.78, -5.97, -10.18, -8.56, -5.79 +196010, -1.58, -2.15, -6.44, -4.32, -2.07 +196011, 4.87, 4.17, 6.32, 4.22, 5.25 +196012, 2.89, 4.69, 3.63, 6.04, 2.64 +196101, 8.00, 8.36, 4.76, 7.78, 9.63 +196102, 6.42, 5.84, 7.65, 6.56, 5.47 +196103, 6.42, 3.97, 6.33, 6.50, 4.63 +196104, 0.67, 0.63, 2.25, -2.72, 1.22 +196105, 4.04, 4.54, 3.24, 1.57, 5.03 +196106, -3.17, -4.31, -5.71, -3.36, -4.40 +196107, 0.91, 1.53, 0.85, 1.81, 0.34 +196108, 2.94, 1.62, -0.41, 4.30, 3.32 +196109, -1.60, -3.12, -5.96, -2.45, -3.18 +196110, 3.38, 1.80, -0.26, 4.03, 2.15 +196111, 6.41, 3.64, 6.05, 2.80, 4.78 +196112, -0.66, 0.19, -0.09, -2.59, -1.67 +196201, -1.58, -0.41, -2.63, -3.71, 1.44 +196202, 1.02, 1.92, 0.26, 2.00, 1.55 +196203, -0.46, -0.26, -1.30, 0.11, -1.44 +196204, -7.11, -6.14, -9.52, -10.12, -6.61 +196205, -9.49, -9.32, -12.89, -12.40, -9.54 +196206, -8.34, -7.89, -11.06, -12.17, -8.71 +196207, 6.23, 5.77, 9.46, 5.77, 7.20 +196208, 2.74, 2.48, 4.60, -3.78, 4.41 +196209, -6.49, -5.23, -9.54, -5.57, -6.52 +196210, -3.14, -1.54, -4.04, 0.73, -2.10 +196211, 11.70, 13.13, 16.88, 19.67, 17.43 +196212, -0.69, -0.86, -2.67, -0.26, -0.52 +196301, 8.21, 7.71, 7.78, 7.00, 8.50 +196302, -1.78, -1.09, -3.99, -2.11, -0.59 +196303, 1.55, 2.61, -0.26, 1.27, 2.85 +196304, 3.87, 3.51, 3.86, 7.48, 4.21 +196305, 2.40, 3.16, 3.69, -0.16, 5.36 +196306, -1.57, -1.57, -1.81, -1.79, -1.50 +196307, -0.34, -0.37, -2.36, 0.70, -1.28 +196308, 3.74, 4.52, 3.58, 7.26, 4.50 +196309, -1.33, -1.07, 1.03, -1.42, -1.25 +196310, 1.81, 0.42, 3.79, 3.90, 1.10 +196311, -2.32, -0.75, -2.55, -1.51, -2.10 +196312, -1.35, 0.16, -3.18, -0.36, 0.61 +196401, 3.20, 3.67, 2.31, 4.02, 2.97 +196402, 2.06, 2.33, 1.31, 0.76, 5.19 +196403, 1.71, 3.50, 2.20, -1.49, 3.28 +196404, 0.94, 0.14, -1.63, -1.30, -0.13 +196405, 0.19, 1.21, -0.95, -0.15, 1.05 +196406, 1.94, 1.07, 0.57, -0.36, -0.48 +196407, 2.89, 3.46, 1.56, 4.60, 2.49 +196408, -0.37, -0.15, -1.70, -2.44, -1.38 +196409, 3.60, 3.13, 4.36, 5.53, 5.29 +196410, 2.08, 1.79, 1.18, 2.05, 2.86 +196411, 0.72, -0.20, -1.52, 1.85, -0.69 +196412, -1.68, -1.31, -0.60, 3.61, -1.70 +196501, 7.87, 6.84, 9.65, 7.67, 6.10 +196502, 3.55, 3.00, 3.33, 2.03, 3.89 +196503, 1.80, 1.37, 1.87, 0.69, 1.07 +196504, 4.25, 4.32, 5.63, 2.92, 3.02 +196505, 0.55, -1.06, 0.19, -1.72, -1.59 +196506, -8.34, -7.89, -10.64, -6.58, -8.12 +196507, 2.04, 3.00, 4.82, 4.28, 3.08 +196508, 3.75, 3.47, 8.46, 6.72, 4.33 +196509, 2.24, 2.82, 5.51, 3.32, 1.85 +196510, 5.30, 5.88, 8.43, 4.15, 6.15 +196511, 3.26, 4.06, 13.12, 4.24, 4.91 +196512, 3.80, 4.06, 3.37, 5.01, 6.51 +196601, 6.38, 6.07, 5.94, 4.77, 7.24 +196602, 2.99, 2.33, 6.32, -0.48, 3.49 +196603, -1.39, -0.94, -1.77, -1.51, 0.54 +196604, 3.57, 4.68, 10.42, 4.85, 4.23 +196605, -8.44, -8.04, -12.99, -7.07, -9.17 +196606, -0.87, -0.77, 2.03, 2.19, -0.89 +196607, -1.81, -1.31, -2.11, -1.44, -1.65 +196608, -10.11, -9.56, -9.01, -9.53, -11.82 +196609, -1.97, -1.34, -3.72, -1.51, -2.22 +196610, -2.46, 0.88, -7.07, 2.46, -2.47 +196611, 1.54, 2.92, 12.18, 7.50, 7.36 +196612, -0.15, 2.55, 4.83, 1.94, 0.39 +196701, 17.19, 15.76, 22.86, 10.45, 20.10 +196702, 5.39, 3.20, 5.29, 3.03, 4.62 +196703, 4.15, 6.32, 7.69, 8.91, 6.66 +196704, 5.12, 3.87, 6.16, 5.10, 2.34 +196705, -0.58, -0.96, -2.52, -4.54, 0.47 +196706, 8.25, 7.50, 13.53, 6.63, 7.99 +196707, 9.04, 7.71, 4.95, 5.13, 11.30 +196708, 1.91, 0.13, -2.43, 2.06, 1.17 +196709, 8.42, 4.73, 8.87, 6.59, 4.93 +196710, -0.75, -3.19, -1.96, -6.10, -1.33 +196711, -0.21, 0.19, -0.57, 1.20, 0.25 +196712, 7.71, 7.91, 9.68, 1.81, 9.14 +196801, 4.59, 1.03, -3.46, -5.51, 6.14 +196802, -5.03, -5.57, -8.65, -5.51, -4.78 +196803, 0.02, -1.64, -1.12, -0.08, -1.22 +196804, 15.95, 12.32, 16.82, 10.97, 14.52 +196805, 11.06, 7.23, 9.16, 5.81, 11.24 +196806, 1.65, 2.15, -1.83, 0.60, 2.47 +196807, -2.68, -2.46, -6.69, -4.02, -0.97 +196808, 4.38, 3.10, 2.38, 2.06, 4.47 +196809, 7.87, 5.52, 6.47, 1.63, 9.71 +196810, 2.22, 0.57, -2.43, -0.31, 0.80 +196811, 6.56, 7.19, 7.86, 8.46, 8.11 +196812, 0.45, 0.74, -0.64, -0.74, 0.84 +196901, -0.57, 0.06, -2.37, -0.57, 1.02 +196902, -8.72, -8.17, -10.18, -6.36, -9.18 +196903, 1.71, 2.44, 0.87, 3.53, 1.87 +196904, 0.42, -0.67, 1.24, 3.97, 2.22 +196905, 0.39, 0.90, -0.33, 2.46, -0.57 +196906, -11.24, -10.69, -12.39, -8.16, -12.69 +196907, -10.13, -8.68, -11.94, -4.15, -10.51 +196908, 5.27, 3.88, 6.38, 10.39, 6.87 +196909, -2.27, -3.37, -2.40, 1.96, -1.25 +196910, 9.67, 7.85, 16.56, 11.17, 7.98 +196911, -5.65, -5.41, -4.81, -1.53, -7.04 +196912, -7.52, -5.49, -7.27, 0.67, -9.05 +197001, -3.74, -3.19, -5.81, -4.01, -5.13 +197002, 2.50, 4.48, 3.78, 4.15, 6.96 +197003, -2.04, -1.23, -6.28, -5.10, -2.47 +197004, -15.66, -14.27, -21.01, -17.16, -18.30 +197005, -9.65, -7.97, -10.30, -10.33, -9.40 +197006, -6.99, -6.94, -10.48, -5.70, -8.24 +197007, 6.57, 6.58, 0.79, 4.34, 5.80 +197008, 5.71, 5.98, 7.70, 4.92, 6.84 +197009, 13.46, 10.18, 20.76, 12.09, 13.20 +197010, -4.83, -5.15, -12.17, -4.55, -6.45 +197011, 0.02, 1.29, -2.55, -0.09, 0.25 +197012, 11.06, 7.67, 4.91, 9.37, 6.92 +197101, 14.04, 11.64, 17.95, 8.35, 16.09 +197102, 7.33, 3.16, 4.84, 6.83, 4.01 +197103, 6.08, 3.93, 9.06, 4.73, 6.25 +197104, 1.79, 2.62, 3.33, 4.29, 3.67 +197105, -4.86, -4.95, -6.36, -0.63, -6.10 +197106, -1.89, -1.64, -3.22, -0.36, -1.69 +197107, -5.83, -4.48, -8.91, -6.06, -5.89 +197108, 3.96, 3.83, 5.01, 4.74, 5.11 +197109, 0.72, -1.50, -2.93, 1.33, -0.01 +197110, -5.59, -5.42, -7.64, -4.88, -6.31 +197111, -3.57, -3.14, -2.57, -2.90, -4.03 +197112, 12.60, 10.90, 15.58, 11.78, 11.07 +197201, 10.11, 8.82, 17.69, 6.69, 11.79 +197202, 4.82, 3.71, 5.99, 7.78, 2.56 +197203, -0.40, -0.18, 0.22, 0.59, 0.11 +197204, -0.22, 0.52, 0.24, -0.68, -0.10 +197205, -2.45, -1.59, -1.42, 0.16, -2.29 +197206, -4.42, -2.59, -3.33, -1.30, -3.72 +197207, -4.61, -1.55, -2.68, -2.11, -4.31 +197208, -0.78, 2.99, 0.73, 3.71, 0.78 +197209, -3.87, -2.36, -3.89, -3.78, -3.38 +197210, -1.87, -0.55, -2.05, -4.60, -2.11 +197211, 6.05, 5.83, 2.78, -0.33, 4.29 +197212, -2.69, -1.37, -2.68, -0.11, -3.07 +197301, -5.89, -2.03, -3.85, -8.11, -4.62 +197302, -8.41, -6.31, -9.14, -7.50, -7.06 +197303, -4.25, -1.49, -3.49, -3.25, -2.12 +197304, -8.83, -4.83, -8.69, -10.31, -8.50 +197305, -9.63, -6.77, -9.77, -8.67, -8.92 +197306, -4.88, -2.42, -2.62, -4.26, -2.87 +197307, 11.87, 9.48, 15.73, 14.44, 9.88 +197308, -5.39, -3.34, -6.09, -6.12, -4.47 +197309, 6.16, 8.91, 11.33, 8.15, 5.98 +197310, -1.61, 2.15, 1.30, 0.74, -1.13 +197311, -20.28, -15.76, -21.94, -14.82, -16.41 +197312, -6.09, -1.97, -4.96, -5.71, -4.53 +197401, 14.52, 12.31, 15.04, 9.35, 12.85 +197402, 1.26, 0.77, -1.27, -0.78, 0.71 +197403, 2.12, 1.02, 0.53, -0.84, 0.57 +197404, -4.75, -4.64, -6.02, -5.61, -5.79 +197405, -6.23, -6.52, -7.01, -5.76, -7.97 +197406, -3.04, -1.52, -4.10, -3.12, -3.94 +197407, -5.64, -3.47, -7.55, -7.21, -5.73 +197408, -7.76, -7.14, -8.30, -7.91, -9.09 +197409, -8.57, -7.35, -10.88, -11.78, -7.10 +197410, 6.93, 11.87, 6.19, 20.19, 8.42 +197411, -6.20, -3.49, -5.91, -1.60, -5.21 +197412, -8.48, -6.24, -10.19, -8.52, -7.81 +197501, 31.37, 25.12, 34.52, 25.99, 29.05 +197502, 4.91, 4.80, 7.96, 11.81, 5.90 +197503, 11.65, 5.57, 11.64, 4.99, 7.09 +197504, 3.45, 4.80, 6.73, 3.80, 1.68 +197505, 6.89, 7.49, 10.93, 8.60, 7.22 +197506, 8.02, 7.90, 8.85, 5.48, 5.82 +197507, 0.84, -1.62, -2.45, -2.82, -0.77 +197508, -4.73, -3.76, -6.18, -7.71, -5.01 +197509, -2.61, -3.30, -4.49, -5.02, -4.42 +197510, 3.97, 1.25, 0.31, 1.06, 0.76 +197511, 3.89, 1.58, 1.72, 2.11, 0.76 +197512, -0.77, -0.81, -2.05, -0.72, 0.29 +197601, 20.87, 18.63, 25.22, 17.48, 17.09 +197602, 13.09, 8.40, 13.52, 7.14, 10.65 +197603, 1.57, 1.43, 2.99, 2.14, 2.61 +197604, -1.32, 0.05, -1.06, -3.91, -0.10 +197605, -3.96, 0.33, -3.42, -1.83, -1.86 +197606, 1.19, 3.70, 5.90, 2.64, 2.73 +197607, -0.62, 0.81, -1.23, 0.80, 0.84 +197608, -2.80, -1.14, -2.89, -1.59, -1.23 +197609, 0.82, 2.74, 1.61, 2.03, 1.61 +197610, -2.82, -1.98, -3.83, -0.95, -1.38 +197611, 2.68, 2.46, 2.84, 0.99, 2.74 +197612, 9.51, 9.17, 12.44, 9.22, 10.20 +197701, 2.16, 4.46, 5.57, 3.68, 3.65 +197702, -0.28, -0.04, 0.50, -0.49, 0.86 +197703, -0.13, 0.88, 1.98, 0.71, 1.68 +197704, 0.74, 2.29, 1.43, -1.20, 2.23 +197705, -0.02, 0.05, 0.23, 0.11, 0.85 +197706, 4.54, 5.67, 6.14, 7.07, 5.32 +197707, 1.09, -0.17, 1.19, 0.97, 2.26 +197708, -0.14, -1.34, 0.50, -0.37, 0.04 +197709, 1.94, 0.53, 0.88, 4.80, 1.58 +197710, -1.97, -3.08, -2.54, -0.32, -2.14 +197711, 6.63, 6.91, 9.12, 8.15, 7.05 +197712, 1.59, 1.74, 3.32, 3.85, 2.36 +197801, -1.11, -2.30, -2.27, -1.40, -0.22 +197802, 3.10, 2.08, 2.56, 1.86, 2.49 +197803, 6.88, 5.29, 9.19, 8.94, 6.86 +197804, 7.77, 7.25, 11.59, 10.77, 7.77 +197805, 7.27, 5.73, 11.67, 8.23, 6.22 +197806, 0.45, 0.40, 0.59, 0.77, 1.86 +197807, 3.17, 5.39, 7.06, 8.88, 5.65 +197808, 9.75, 6.97, 16.78, 9.70, 9.14 +197809, 0.91, -0.13, 0.81, -0.21, 0.55 +197810, -18.96, -17.18, -23.68, -22.80, -16.42 +197811, 4.12, 4.92, 8.05, 7.83, 2.74 +197812, -0.13, 1.04, 4.00, 3.34, 1.65 +197901, 9.34, 9.50, 11.82, 9.01, 8.42 +197902, -2.73, -1.46, -3.53, -4.45, -1.11 +197903, 6.69, 9.35, 10.99, 7.23, 7.49 +197904, 2.32, 2.59, 2.98, 1.07, 3.11 +197905, -1.31, -0.72, -1.34, -1.40, -0.64 +197906, 2.57, 7.25, 5.20, 4.51, 5.91 +197907, 1.59, 2.59, 1.89, 2.82, 2.71 +197908, 7.09, 8.06, 9.37, 7.62, 6.73 +197909, -0.81, 0.19, -0.32, 1.32, -0.23 +197910, -9.68, -9.32, -9.27, -9.28, -10.10 +197911, 4.02, 7.74, 10.84, 9.69, 6.00 +197912, 4.43, 7.56, 9.80, 7.18, 6.15 +198001, 7.46, 11.37, 13.55, 8.24, 9.73 +198002, -1.33, -1.02, -0.73, -3.26, -0.65 +198003, -15.20, -16.37, -18.79, -13.89, -16.01 +198004, 3.54, 5.46, 5.70, 6.20, 5.91 +198005, 7.80, 5.98, 6.76, 10.76, 7.61 +198006, 3.71, 4.33, 3.19, 1.94, 5.16 +198007, 8.30, 8.04, 16.42, 13.13, 7.88 +198008, 6.26, 5.60, 12.95, 8.66, 5.67 +198009, 3.76, 5.53, 6.23, 6.59, 4.58 +198010, 1.87, 4.47, 7.25, 6.40, 4.71 +198011, 2.47, 9.40, 10.57, 7.68, 6.28 +198012, -3.08, -3.80, -3.90, -2.12, -2.57 +198101, 3.01, -0.98, -2.50, 2.80, 1.12 +198102, 1.51, 0.19, -2.13, -1.80, 1.09 +198103, 7.94, 6.40, 8.28, 11.61, 7.56 +198104, 4.67, -0.14, 5.91, 7.11, 2.89 +198105, 2.71, 2.19, 7.69, 3.72, 1.72 +198106, 1.69, -1.21, -5.35, -2.22, 0.11 +198107, -2.25, -0.06, -5.36, -4.18, -2.05 +198108, -6.49, -7.05, -11.88, -10.82, -6.51 +198109, -6.33, -10.61, -9.79, -8.32, -8.56 +198110, 6.04, 6.40, 10.53, 10.00, 6.99 +198111, 1.21, 1.77, 2.28, 0.67, 2.44 +198112, -0.82, -2.71, -1.92, -1.58, -2.17 +198201, -0.30, -2.81, -1.04, -0.89, -2.06 +198202, -2.36, -6.38, -5.72, -3.02, -4.12 +198203, 1.24, -2.33, -1.19, -0.09, -1.11 +198204, 6.00, 3.08, 8.20, 9.68, 5.33 +198205, -1.57, -2.40, -3.47, -3.78, -2.45 +198206, -0.66, -4.77, -4.85, -2.61, -3.28 +198207, 0.79, -4.02, -1.24, 0.35, -1.59 +198208, 4.68, 6.71, 6.04, 6.12, 7.18 +198209, 5.29, -0.16, 5.10, 5.54, 4.10 +198210, 12.71, 9.98, 17.91, 15.71, 13.31 +198211, 10.69, 3.94, 13.81, 14.28, 10.35 +198212, 1.99, 1.42, 3.91, 4.04, 1.74 +198301, 8.36, 10.95, 16.11, 12.07, 10.50 +198302, 7.99, 2.39, 8.48, 7.97, 4.87 +198303, 7.68, 3.04, 2.84, 5.88, 6.30 +198304, 8.81, 6.39, 6.77, 8.71, 8.97 +198305, 7.84, 9.96, 11.67, 7.81, 8.71 +198306, 6.73, 3.83, 5.91, 5.78, 4.12 +198307, -1.30, -0.22, -5.99, -6.83, -1.31 +198308, -4.01, -1.95, -5.47, -4.56, -3.27 +198309, 1.49, 0.38, -1.37, -2.47, 0.61 +198310, -4.55, -4.48, -10.60, -10.90, -4.68 +198311, 3.81, 1.93, 5.00, 0.71, 2.66 +198312, -2.07, -1.48, -3.87, -5.83, -1.35 +198401, -0.83, 2.29, -1.99, -1.78, 0.66 +198402, -5.44, -4.65, -9.31, -6.20, -4.63 +198403, 1.13, 1.71, -0.62, 0.39, 0.44 +198404, -1.67, -1.10, -2.66, -0.96, -2.55 +198405, -3.94, -4.03, -6.38, -7.36, -5.30 +198406, 2.85, 0.40, 2.46, 2.87, 1.02 +198407, -3.48, -4.84, -7.22, -5.48, -3.77 +198408, 8.69, 8.75, 11.31, 13.42, 7.24 +198409, 0.11, 0.24, -2.87, -2.43, 1.38 +198410, -2.18, -3.15, -4.12, -2.24, -0.66 +198411, -2.79, -2.84, -5.37, -4.91, -2.47 +198412, 0.66, 0.23, 0.50, -0.24, 1.15 +198501, 11.18, 9.26, 17.83, 16.84, 11.74 +198502, 4.23, 4.06, 5.94, 7.91, 4.57 +198503, -1.05, -0.82, -3.90, -2.06, 0.58 +198504, -1.63, -1.36, -4.12, 0.65, 0.16 +198505, 3.12, 1.57, 1.12, 2.91, 3.07 +198506, 1.21, -0.82, -2.04, 1.69, 1.96 +198507, 1.00, 1.72, 2.87, 0.93, 2.33 +198508, -0.51, -0.85, -1.60, -1.82, 1.09 +198509, -4.40, -5.29, -8.21, -7.18, -5.31 +198510, 1.92, 1.28, 0.81, 2.28, 2.81 +198511, 5.75, 4.66, 5.20, 6.04, 5.73 +198512, 3.89, 1.11, 3.49, 3.85, 3.62 +198601, 4.07, 2.08, 3.93, 5.47, 6.29 +198602, 7.53, 3.67, 5.79, 3.46, 8.15 +198603, 6.17, 3.92, 3.16, 9.50, 4.68 +198604, 1.94, -0.23, 2.88, 1.17, 1.59 +198605, 5.10, 3.46, 2.94, 4.96, 3.96 +198606, 2.70, 0.75, -2.16, 2.27, 1.50 +198607, -8.01, -6.94, -9.53, -8.54, -6.32 +198608, 0.42, 3.70, 1.12, 0.28, 2.10 +198609, -6.87, -4.75, -7.74, -9.15, -6.12 +198610, 3.46, 1.96, 2.97, 3.29, 1.82 +198611, 0.35, -0.92, 0.15, -1.13, -1.80 +198612, -4.42, -2.77, -4.17, -5.20, -3.32 +198701, 9.11, 13.54, 14.03, 11.93, 10.67 +198702, 7.12, 6.46, 10.22, 14.52, 5.75 +198703, 2.62, 5.71, 1.23, 3.07, 2.65 +198704, -2.75, -1.30, -0.77, -2.73, -2.52 +198705, -0.61, 2.41, 0.61, -1.12, -0.93 +198706, 3.45, 3.47, 0.59, 1.93, 2.25 +198707, 3.31, 6.49, 2.03, -0.25, 2.44 +198708, 2.15, 0.85, 1.81, -0.02, 2.16 +198709, -2.06, -1.29, -0.88, -2.21, -1.76 +198710, -29.09, -26.15, -30.68, -32.89, -24.70 +198711, -6.69, -4.13, -7.63, -6.02, -4.22 +198712, 2.96, 2.95, 6.35, 1.23, 1.33 +198801, 7.13, 7.80, 7.03, 12.27, 8.28 +198802, 8.89, 6.10, 7.16, 7.59, 4.57 +198803, 4.34, 4.23, 3.62, 5.89, 2.14 +198804, 1.86, 2.38, 1.99, 1.43, -0.01 +198805, -1.21, -0.80, -3.61, -2.22, -1.54 +198806, 5.06, 4.49, 6.53, 4.45, 4.58 +198807, 0.20, 0.06, -1.24, 0.17, 0.36 +198808, -2.03, -2.64, -3.39, -1.89, -1.65 +198809, 3.54, 1.60, 2.26, 2.09, 1.32 +198810, -0.64, -1.03, -2.91, -3.78, -0.91 +198811, -4.14, -3.15, -4.75, -6.14, -3.37 +198812, 2.06, 2.44, 3.88, 3.29, 0.56 +198901, 5.48, 6.39, 7.67, 6.06, 6.12 +198902, 0.94, 0.65, -0.83, -1.24, 0.92 +198903, 2.71, 2.11, 1.39, 2.35, 0.82 +198904, 3.73, 3.76, 3.77, 3.50, 2.19 +198905, 3.31, 2.71, 3.32, 4.79, 2.34 +198906, -1.60, -0.83, -2.15, -4.49, 0.11 +198907, 3.71, 3.38, 0.92, 4.50, 2.90 +198908, 1.14, 2.35, 1.81, 1.79, 2.45 +198909, -0.68, 0.48, 0.33, 3.27, 0.00 +198910, -6.82, -4.43, -4.54, -5.13, -5.41 +198911, -1.63, 0.15, -2.06, 0.33, -1.14 +198912, -0.95, 1.03, -1.19, -1.38, -3.85 +199001, -5.67, -4.38, -5.66, -5.68, -5.09 +199002, 1.83, 2.90, 3.04, 1.45, 0.85 +199003, 3.55, 3.88, 3.86, 4.74, -0.19 +199004, -2.34, -2.76, -2.25, -0.91, -3.25 +199005, 4.29, 4.93, 7.83, 8.05, 2.09 +199006, 0.56, 0.03, 1.09, 4.70, -0.99 +199007, -4.23, -0.44, -5.61, -0.49, -3.58 +199008, -13.45, -8.23, -14.51, -10.57, -11.36 +199009, -10.15, -4.71, -9.21, -8.03, -8.82 +199010, -6.38, -5.69, -4.82, -4.07, -7.14 +199011, 3.97, 2.69, 4.46, 6.64, 3.44 +199012, -0.25, -0.81, -0.21, 3.48, -0.90 +199101, 9.11, 5.23, 14.78, 13.42, 7.25 +199102, 12.72, 10.36, 13.97, 21.23, 14.94 +199103, 8.89, 4.51, 11.35, 15.82, 7.12 +199104, 2.34, 1.76, 5.61, 4.01, 2.81 +199105, 4.67, 2.87, 1.25, 7.20, 2.50 +199106, -2.27, -3.62, -5.94, -4.40, -2.94 +199107, 2.14, 4.18, 3.39, 7.81, 3.05 +199108, 3.09, 2.15, 3.83, 6.14, 2.51 +199109, 0.63, -0.02, 2.15, 7.58, -0.86 +199110, 1.11, 2.10, 3.41, 10.18, 1.37 +199111, -2.80, -1.89, -3.74, -0.12, -2.48 +199112, 3.53, 1.95, 3.37, 10.71, 3.39 +199201, 13.11, 10.51, 19.47, 24.49, 18.60 +199202, 7.01, 3.97, 7.19, -0.86, 7.04 +199203, -0.25, -0.42, -1.96, -7.05, -0.24 +199204, -4.42, -1.46, -4.91, -11.28, -1.23 +199205, -1.26, 1.40, -1.33, -1.35, 2.26 +199206, -4.74, -3.86, -6.58, -6.04, -2.25 +199207, 2.31, 2.61, 1.27, 3.62, 3.69 +199208, -1.68, -2.29, -4.25, -4.26, -2.12 +199209, 1.57, 1.81, 1.88, -1.00, 1.43 +199210, 2.75, 0.71, 3.64, 3.24, 1.31 +199211, 8.44, 4.71, 10.39, 12.11, 5.60 +199212, 3.29, 2.78, 4.65, 0.64, 5.23 +199301, 6.42, 5.93, 7.46, 3.17, 9.11 +199302, -2.59, 2.51, -2.28, -12.79, 0.83 +199303, 3.29, 3.36, 1.63, 0.25, 3.87 +199304, -2.68, -0.47, -3.39, -3.80, -0.85 +199305, 4.76, 3.64, 6.83, 6.37, 1.85 +199306, 0.19, 1.25, 3.16, -0.11, 1.48 +199307, 0.50, 1.28, 1.14, -2.35, 3.09 +199308, 2.77, 4.56, 6.91, 1.77, 3.51 +199309, 2.35, 0.86, 3.65, 4.51, 3.40 +199310, 4.16, 2.14, 6.03, 6.91, 3.41 +199311, -0.70, -3.11, -2.21, -4.41, -2.73 +199312, 1.01, 1.75, 1.14, -0.99, 1.43 +199401, 2.49, 5.44, 5.69, 6.40, 5.38 +199402, -0.30, 0.33, 0.14, -2.84, -1.39 +199403, -3.82, -3.98, -4.74, -6.82, -3.61 +199404, -1.73, 0.22, -2.19, -4.59, -0.41 +199405, -1.74, 0.06, -0.94, -0.16, 1.43 +199406, -3.74, -1.48, -5.05, -4.82, -0.83 +199407, 0.81, 2.65, 2.53, -0.53, 0.95 +199408, 4.27, 2.65, 5.85, 6.99, 2.49 +199409, 0.16, 0.61, 3.06, 1.17, -0.23 +199410, -0.14, 0.49, 4.09, -1.59, -1.77 +199411, -4.32, -4.92, -2.76, -2.59, -5.19 +199412, -3.44, -0.27, 0.63, -4.15, -0.52 +199501, 1.78, 1.13, 3.83, 7.22, 4.00 +199502, 2.38, 2.68, 4.37, 2.78, 3.27 +199503, 0.32, 1.59, 3.70, 3.05, 1.61 +199504, 1.55, 2.41, 4.01, 2.22, 2.16 +199505, -0.11, 2.46, 2.31, 3.79, 2.18 +199506, 3.49, 2.82, 10.50, 10.00, 4.22 +199507, 4.59, 4.15, 9.69, 9.96, 4.92 +199508, 1.69, 1.54, 4.41, 7.78, 4.30 +199509, 1.80, 1.57, 4.14, 5.74, 2.89 +199510, -6.25, -4.89, -5.49, -6.31, -1.84 +199511, 0.46, 1.50, 2.50, 1.21, 2.09 +199512, -1.84, 1.20, -1.01, 7.31, 1.25 +199601, 1.49, 2.84, 0.24, 9.71, 3.11 +199602, 3.52, 2.50, 6.99, 5.01, 2.31 +199603, 4.96, 2.95, 0.64, 3.13, 2.65 +199604, 5.27, 5.66, 11.94, 7.26, 2.76 +199605, 8.41, 4.98, 12.41, 7.29, 4.51 +199606, -2.97, -1.50, -7.40, -6.60, -0.74 +199607, -8.61, -6.68, -13.61, -14.47, -5.36 +199608, 4.41, 3.77, 4.90, 5.47, 4.47 +199609, 1.51, 3.15, 4.45, 3.88, 2.98 +199610, -3.14, 0.75, -5.64, -7.35, -0.39 +199611, 1.82, 4.27, 2.19, 0.95, 2.55 +199612, -2.03, 0.99, -2.67, 1.09, 0.25 +199701, 4.81, 6.06, 8.58, 9.31, 5.46 +199702, -0.25, -2.69, -6.10, -1.45, 1.70 +199703, -4.12, -2.58, -9.01, -9.64, -3.17 +199704, -2.68, -1.14, -3.38, -6.69, -0.64 +199705, 7.08, 9.10, 14.48, 10.22, 6.33 +199706, 4.36, 4.09, 3.19, 2.62, 5.69 +199707, 3.15, 5.76, 6.75, 1.25, 5.86 +199708, 2.34, 3.89, 6.89, 2.81, 1.65 +199709, 7.49, 7.37, 9.46, 11.19, 8.75 +199710, -2.16, -2.06, -4.84, -2.33, 0.04 +199711, -1.29, -1.98, -4.46, -4.72, -0.10 +199712, -4.68, -2.92, -7.28, -6.75, 2.34 +199801, 1.29, -0.40, 3.28, 2.39, 1.05 +199802, 7.03, 5.19, 7.95, 4.99, 5.49 +199803, 5.07, 4.02, 4.97, 3.74, 5.64 +199804, 3.19, 1.29, 4.31, -0.49, 3.17 +199805, -2.69, -4.47, -7.72, -3.95, -3.58 +199806, -1.93, -4.13, -2.27, -6.22, -1.46 +199807, -5.33, -7.73, -6.37, -5.44, -4.00 +199808, -19.31, -16.96, -23.67, -24.55, -18.54 +199809, 0.23, 3.55, 6.29, 8.27, 3.47 +199810, 3.52, 3.48, 5.64, 4.01, 1.16 +199811, 11.06, 1.88, 16.43, 7.13, 4.75 +199812, 4.24, -2.14, 5.44, 1.69, 1.46 +199901, 5.46, 2.41, 15.53, 9.16, 4.52 +199902, -4.12, -5.95, -7.06, -7.33, -2.53 +199903, -3.10, 0.87, 0.29, -2.01, -2.04 +199904, 7.83, 12.17, 10.48, 3.22, 7.21 +199905, 3.43, 2.98, 6.49, 6.11, 1.51 +199906, 2.74, 3.53, 7.11, 3.17, 2.40 +199907, -1.09, 1.28, 1.88, 4.13, 0.06 +199908, -6.74, -2.46, -1.17, -1.44, -4.33 +199909, -3.06, -2.92, 2.07, -1.29, -2.87 +199910, -2.56, -3.36, 4.73, -2.46, -0.08 +199911, 4.40, 1.60, 26.75, 12.96, 2.78 +199912, 1.75, 3.04, 24.51, 15.46, 1.74 +200001, 1.23, 3.65, 11.10, 23.75, -0.76 +200002, 2.97, 3.52, 32.67, 42.79, 1.11 +200003, 4.40, 6.13, -3.21, -8.86, 1.77 +200004, -5.32, -1.81, -19.83, -13.52, -3.70 +200005, -4.82, -0.41, -14.16, -7.22, -2.37 +200006, 2.50, 2.39, 16.34, 18.90, 2.51 +200007, -1.50, -0.79, -8.24, -2.43, 1.88 +200008, 2.75, 6.44, 9.07, 12.32, 4.07 +200009, -3.62, -2.09, -11.15, 0.09, -0.88 +200010, -5.23, -3.53, -12.63, -7.91, -3.51 +200011, -9.31, -5.85, -26.51, -13.33, -5.72 +200012, -3.00, 5.76, -9.01, -3.41, 2.78 +200101, 24.41, 10.63, 47.40, 21.92, 15.48 +200102, -2.16, -1.35, -21.01, -8.98, -1.86 +200103, -3.81, -2.82, -15.98, -9.63, -2.47 +200104, 5.40, 5.55, 12.32, 12.17, 4.53 +200105, 7.80, 5.09, 4.02, 13.58, 6.93 +200106, 2.39, -2.77, -0.02, 7.25, 2.65 +200107, -0.51, -2.69, -9.61, -2.84, 1.32 +200108, -0.54, -3.11, -10.04, -2.91, -1.25 +200109, -12.70, -12.55, -19.02, -11.83, -9.44 +200110, 4.73, 4.60, 19.08, 12.15, 2.33 +200111, 7.59, 3.41, 15.09, 7.04, 5.03 +200112, 5.95, 4.94, 8.78, 6.48, 5.37 +200201, 4.54, 0.63, 1.89, -3.50, 3.21 +200202, -0.65, 0.63, -11.74, -7.63, -0.20 +200203, 8.28, 10.35, 9.89, 6.75, 6.45 +200204, 4.79, 1.23, -6.61, -6.39, 3.72 +200205, 0.98, -3.06, -6.73, -4.26, -0.05 +200206, -4.67, -4.81, -13.89, -9.87, -2.76 +200207, -12.91, -12.36, -14.53, -11.11, -9.01 +200208, -0.97, 0.28, 0.72, -0.74, 0.50 +200209, -7.59, -8.15, -13.86, -8.44, -5.28 +200210, 1.19, 2.25, 11.66, 4.32, 1.67 +200211, 9.27, 7.98, 27.29, 11.66, 6.62 +200212, -4.39, -2.72, -11.04, -7.17, -1.36 +200301, -2.35, -1.19, 1.59, 1.55, 0.88 +200302, -3.97, -2.41, -3.14, -2.24, -1.60 +200303, 0.12, -0.44, 0.69, 4.37, 1.90 +200304, 9.15, 6.84, 12.64, 15.20, 8.56 +200305, 8.54, 12.08, 22.67, 23.53, 8.77 +200306, 5.70, 3.04, 5.88, 9.06, 4.19 +200307, 6.96, 3.39, 11.71, 6.48, 6.29 +200308, 5.69, 5.90, 6.77, 4.52, 2.80 +200309, 0.58, 0.33, 3.42, 7.31, 1.79 +200310, 8.83, 7.56, 11.77, 4.96, 7.38 +200311, 3.21, 3.71, 4.87, 2.69, 3.91 +200312, 2.39, 8.37, 0.93, 4.06, 3.06 +200401, 4.63, 4.80, 12.07, 11.93, 4.21 +200402, 3.43, 2.36, -2.50, 0.35, 2.18 +200403, 0.76, 0.79, -2.09, 0.51, 1.18 +200404, -1.13, -0.75, -6.93, -1.07, -3.23 +200405, -0.87, 0.49, 1.68, -4.18, 0.28 +200406, 3.33, 5.97, 0.98, -0.11, 2.72 +200407, -5.07, -2.96, -12.66, -9.82, -3.00 +200408, -2.97, -1.45, -3.03, 0.07, 0.11 +200409, 3.02, 5.68, 4.26, 4.17, 3.53 +200410, 1.96, 0.71, 3.33, -1.07, 2.30 +200411, 8.81, 10.61, 9.95, 8.14, 6.89 +200412, 5.46, 4.19, 8.55, 7.54, 4.72 +200501, -1.94, -1.64, -7.38, -3.97, -2.13 +200502, 1.06, 5.49, -0.27, -1.97, 0.74 +200503, -0.88, -1.02, -4.45, -7.23, -2.15 +200504, -5.43, -6.45, -8.12, -3.40, -4.28 +200505, 5.44, 4.43, 6.34, 4.74, 3.94 +200506, 4.16, 4.84, 2.07, 3.26, 2.99 +200507, 6.60, 8.21, 6.73, 8.64, 4.89 +200508, -3.32, 0.84, -1.11, -0.99, -1.11 +200509, -1.92, 2.65, 1.07, 0.40, 0.55 +200510, -3.39, -5.84, -3.39, -4.49, -1.08 +200511, 3.80, 3.33, 4.59, 2.20, 3.42 +200512, 0.14, 1.94, 1.13, -0.09, 0.48 +200601, 5.97, 11.53, 8.65, 11.48, 4.60 +200602, 1.24, -1.89, 0.88, 2.06, 0.66 +200603, 4.07, 5.13, 5.29, 2.56, 3.51 +200604, 1.20, 3.82, 0.24, -3.57, 0.65 +200605, -3.81, -4.87, -7.47, -7.61, -2.17 +200606, -0.57, 0.15, -2.78, -1.91, 0.43 +200607, -3.78, -2.11, -5.05, -4.63, -2.36 +200608, 1.35, 0.54, 4.37, 3.73, 1.25 +200609, 3.19, -1.84, 1.34, -0.30, 1.62 +200610, 5.28, 6.25, 4.33, 6.42, 3.76 +200611, 1.36, 4.30, 2.42, 2.58, 2.35 +200612, 2.04, -0.17, 1.43, 1.37, 1.85 +200701, 3.29, 1.57, 2.75, 4.68, 1.22 +200702, 0.53, 1.62, 0.25, 0.17, -0.89 +200703, 1.07, 2.10, 0.47, 0.10, -0.96 +200704, 1.80, 4.22, 2.19, 4.94, 0.54 +200705, 2.29, 4.07, 2.85, -0.19, 2.35 +200706, -0.83, 0.63, 1.22, -3.52, -1.68 +200707, -5.74, -3.75, -3.23, -5.02, -6.81 +200708, -2.49, -1.83, -1.71, 0.02, -0.11 +200709, -0.53, 3.12, 2.55, 2.98, 0.41 +200710, -0.43, 2.77, 2.87, 2.84, -1.24 +200711, -9.06, -7.60, -9.78, -8.09, -7.49 +200712, -2.71, 1.64, -0.47, -2.54, -3.01 +200801, -4.50, -6.86, -9.89, -5.24, -0.97 +200802, -4.00, 1.10, -3.25, -1.44, -4.34 +200803, -1.49, -1.89, -3.03, -5.79, -2.01 +200804, 1.87, 5.96, 2.71, 2.23, 0.08 +200805, 2.27, 8.50, 5.02, 3.34, -0.20 +200806, -11.54, -3.91, -8.84, -6.87, -13.14 +200807, 0.75, -3.88, -1.30, 6.64, 2.84 +200808, 3.79, 1.07, 2.50, -0.11, 3.43 +200809, -8.70, -16.51, -14.75, -12.86, -6.12 +200810, -24.25, -24.93, -23.04, -18.95, -18.07 +200811, -17.58, -14.41, -13.94, -14.07, -12.52 +200812, 5.10, -0.42, 3.43, 6.56, 1.80 +200901, -5.56, -5.81, 0.47, 6.08, -9.12 +200902, -9.56, -16.49, -8.91, -11.84, -13.19 +200903, 15.21, 8.98, 12.37, 15.06, 14.48 +200904, 33.19, 23.25, 20.94, 14.20, 18.31 +200905, 8.38, 11.47, 8.41, 18.22, 6.84 +200906, 3.56, 0.29, 6.56, 10.49, 0.16 +200907, 12.21, 10.08, 8.54, 8.78, 6.27 +200908, 8.26, 5.96, 7.66, 5.57, 6.14 +200909, 6.10, 10.45, 11.13, 8.35, 2.23 +200910, -3.61, -5.20, -6.03, -10.43, -7.10 +200911, 2.01, 3.32, 2.94, 3.18, -0.61 +200912, 6.39, 8.22, 9.12, 8.08, 3.66 +201001, -0.75, -2.49, -3.17, -0.31, 1.96 +201002, 5.67, 4.08, 4.87, 2.25, 2.81 +201003, 10.08, 7.15, 8.38, 7.65, 7.10 +201004, 7.40, 8.23, 6.01, 7.01, 9.33 +201005, -7.30, -9.31, -7.09, -9.12, -8.47 +201006, -9.11, -6.75, -6.14, -7.71, -8.69 +201007, 6.21, 7.89, 6.65, 3.15, 4.08 +201008, -7.81, -6.44, -6.23, -5.45, -7.39 +201009, 12.31, 10.76, 13.82, 11.22, 7.99 +201010, 4.01, 6.23, 5.81, 2.77, 2.91 +201011, 3.71, 4.58, 1.47, 0.03, 1.09 +201012, 5.60, 8.90, 7.89, 10.33, 9.03 +201101, -0.56, 3.59, 3.22, 1.20, 1.72 +201102, 4.34, 5.73, 5.61, 4.37, 2.96 +201103, 1.20, 3.28, 0.83, 2.48, -0.08 +201104, 2.31, 0.37, 1.59, 4.88, -0.03 +201105, -1.04, -3.33, -1.88, 0.11, -2.69 +201106, -1.35, -2.68, -2.84, -4.27, -1.94 +201107, -1.90, -0.91, -4.80, -3.25, -2.01 +201108, -8.80, -9.70, -9.89, -11.25, -8.16 +201109, -9.25, -14.22, -11.11, -9.43, -9.81 +201110, 12.35, 16.37, 11.94, 9.79, 10.78 +201111, -2.62, 0.15, -3.59, -2.91, -1.14 +201112, 0.54, -1.11, -1.01, -0.85, 1.24 +201201, 7.45, 8.23, 10.53, 11.87, 8.59 +201202, 4.94, 3.82, 4.01, 3.85, 3.60 +201203, 3.79, -0.30, 2.43, 4.21, 5.10 +201204, -0.61, -0.97, -2.90, -1.55, -0.66 +201205, -5.63, -10.67, -8.26, -5.52, -5.12 +201206, 2.62, 3.08, 4.03, 7.12, 3.69 +201207, -0.83, -0.84, -2.20, 1.09, -1.38 +201208, 3.21, 2.01, 3.33, 1.80, 2.70 +201209, 3.43, 3.59, 3.00, 6.13, 3.92 +201210, -1.25, -1.39, -4.50, -5.22, 0.34 +201211, 2.17, 0.16, -0.43, 0.75, -0.25 +201212, 1.99, 3.08, 2.91, -0.65, 3.58 +201301, 6.39, 7.03, 6.75, 9.07, 7.65 +201302, 0.91, 0.46, 1.10, -0.27, 2.06 +201303, 5.58, 3.25, 4.47, 5.21, 4.30 +201304, 1.43, -1.19, -0.24, 1.07, -0.83 +201305, 5.46, 3.80, 5.59, 4.75, 4.76 +201306, 0.34, -1.67, 1.03, -1.41, 0.16 +201307, 6.02, 6.75, 7.94, 10.67, 6.93 +201308, -3.26, -1.52, -1.47, -1.71, -2.53 +201309, 5.43, 5.46, 6.35, 9.43, 4.29 +201310, 3.31, 4.69, 2.53, -1.95, 3.00 +201311, 3.26, 2.21, 4.25, 8.40, 4.23 +201312, 1.76, 3.16, 3.82, 4.09, 2.10 +201401, -4.37, -1.46, 1.37, 9.07, -1.73 +201402, 4.92, 5.44, 4.42, 5.98, 3.51 +201403, 1.33, 1.48, -1.36, -3.90, 1.11 +201404, -3.04, -0.34, -6.29, -7.80, -2.63 +201405, 0.16, -0.60, 0.40, 0.88, 0.16 +201406, 2.42, 4.98, 5.17, 7.29, 3.50 +201407, -4.53, -5.77, -4.94, -7.03, -3.33 +201408, 5.68, 4.28, 3.36, 4.77, 3.38 +201409, -3.98, -7.74, -4.39, -4.93, -3.44 +201410, 3.48, -0.88, 1.68, 5.41, 4.12 +201411, 2.28, -5.21, 1.52, 0.16, 0.29 +201412, 1.47, -1.83, 1.80, 5.29, 3.00 +201501, -2.56, -5.74, -3.48, 2.72, -5.12 +201502, 4.72, 7.82, 7.25, 9.23, 5.20 +201503, 1.52, -2.64, -1.04, 1.10, 0.99 +201504, -1.75, 4.15, 1.25, -4.22, -0.15 +201505, 0.23, -2.50, 2.06, 5.33, 0.35 +201506, -0.31, -3.69, -1.14, 2.76, 1.26 +201507, -1.67, -8.64, -3.74, -0.54, -1.31 +201508, -4.18, -2.10, -5.69, -7.18, -3.92 +201509, -5.22, -9.33, -3.25, -11.58, -2.49 +201510, 4.22, 7.40, 6.08, 2.97, 4.74 +201511, -0.28, -1.70, 2.33, 10.10, 2.32 +201512, -3.48, -6.70, -3.58, -5.73, -4.25 +201601, -6.42, -8.61, -8.29, -18.69, -7.61 +201602, 3.35, -0.09, 0.59, -3.04, 0.26 +201603, 7.09, 13.38, 6.63, 7.77, 7.38 +201604, 0.36, 8.22, 1.37, 5.36, 3.51 +201605, -2.35, -1.79, 3.22, 1.24, 1.10 +201606, 1.09, 1.64, 1.61, -4.28, -1.31 +201607, 6.13, 4.38, 6.87, 5.92, 5.35 +201608, 1.51, 2.88, 2.36, 0.71, 2.79 +201609, -0.64, 1.63, 2.05, 7.92, 0.89 +201610, -4.34, -4.44, -5.27, -13.72, -2.11 +201611, 7.78, 11.72, 5.23, 5.07, 12.77 +201612, 1.92, 1.58, 0.77, -2.71, 5.55 +201701, -2.27, 2.05, 3.39, 4.81, 0.73 +201702, 0.00, -0.38, 1.92, 6.23, 1.47 +201703, 0.63, -0.43, 1.75, 2.39, -0.57 +201704, 1.67, -0.68, 2.18, -3.42, 0.69 +201705, -2.16, -2.57, 1.51, -5.15, -2.61 +201706, 0.92, 0.85, 1.08, 10.29, 4.84 +201707, -1.71, 0.92, 1.61, -2.71, 0.56 +201708, -3.52, -3.86, -1.01, 1.32, -1.83 +201709, 6.30, 8.56, 5.68, 8.47, 7.13 +201710, -1.88, 0.86, 1.50, -2.44, 0.87 +201711, 6.27, 3.22, 1.83, 2.45, 3.28 +201712, 2.69, 2.05, 1.13, 0.76, 0.03 +201801, 1.78, 0.60, 3.98, 6.97, 2.66 +201802, -4.61, -6.83, -1.80, -3.60, -3.76 +201803, 0.76, 2.08, 0.10, 0.30, 1.36 +201804, 0.54, 1.71, -0.11, -0.32, 1.02 +201805, 3.84, 4.93, 5.77, 7.56, 3.62 +201806, 4.08, 2.14, 1.50, -2.37, -0.04 +201807, 0.15, 1.57, -0.36, -1.60, 1.30 +201808, 4.75, -0.86, 6.75, 5.93, 1.50 +201809, -1.35, -0.53, -1.50, -1.13, -2.66 +201810, -8.38, -11.49, -10.09, -14.58, -8.13 +201811, -1.05, -1.90, 0.64, -0.42, 0.58 +201812, -12.85, -14.16, -10.31, -18.47, -11.10 +201901, 12.08, 14.02, 15.52, 18.44, 10.71 +201902, 4.31, 4.36, 6.47, 6.52, 4.92 +201903, -3.26, -2.03, -0.84, 3.04, -3.49 +201904, 2.15, 2.77, 4.21, -2.59, 4.65 +201905, -10.44, -11.14, -8.01, -8.10, -6.01 +201906, 6.30, 6.70, 4.95, 4.25, 5.63 +201907, -0.04, -2.93, 1.18, -5.21, 1.12 +201908, -3.85, -8.41, -5.44, -4.89, -4.50 +201909, 5.44, 5.44, 0.52, -3.50, 3.37 +201910, -0.04, -1.97, 0.18, -0.27, 1.92 +201911, 2.78, -0.21, 4.39, 8.58, 2.94 +201912, 2.30, 7.14, 3.49, 9.66, 2.83 +202001, -4.11, -6.27, 1.17, 4.81, -3.25 +202002, -9.95, -11.27, -7.05, -0.99, -8.67 +202003, -23.67, -27.06, -18.92, -17.39, -24.90 +202004, 21.69, 26.65, 20.94, 21.41, 12.08 +202005, 8.27, 3.87, 9.87, 13.88, 4.45 +202006, 7.94, 6.65, 10.34, 4.99, 5.52 +202007, 6.19, 3.02, 9.66, 3.43, 0.46 +202008, 9.46, 3.39, 3.91, 1.30, 5.90 +202009, -1.90, -4.30, -2.85, -1.25, -3.54 +202010, 2.35, 0.77, 0.23, -2.71, 4.27 +202011, 22.44, 22.98, 21.99, 18.44, 18.31 +202012, 5.79, 9.02, 11.46, 9.88, 8.55 +202101, 17.60, 10.11, 9.54, 19.01, 4.24 +202102, 7.50, 12.56, 7.48, 6.37, 11.72 +202103, 5.77, 3.53, 0.86, -5.05, 6.52 +202104, 3.22, 1.42, 2.71, -2.18, 2.73 +202105, 4.79, 5.24, 0.13, -3.26, 2.79 +202106, 1.23, 2.65, 7.48, 5.32, -1.38 +202107, -3.81, -3.90, -2.78, -8.79, -1.82 +202108, -0.41, 0.90, 2.34, 4.94, 2.92 +202109, -4.55, 0.88, -4.71, -5.58, -1.10 +202110, 2.73, 5.04, 4.10, -3.78, 3.72 +202111, -3.08, -4.28, -6.68, -9.76, -3.00 +202112, 1.78, 2.49, -1.18, -5.71, 2.48 +202201, -8.45, -2.77, -11.30, -15.42, -4.57 +202202, -0.46, 4.18, -0.54, -4.70, 0.31 +202203, -0.82, 5.93, 0.62, 3.28, 0.78 +202204, -6.83, -6.21, -13.72, -20.01, -7.92 +202205, -2.36, 3.20, -2.90, -7.35, -0.07 +202206, -8.39, -11.46, -9.77, 1.97, -8.14 +202207, 7.91, 10.07, 10.24, 7.03, 6.69 +202208, -0.59, 0.34, -1.26, 2.61, -1.47 +202209, -13.91, -13.38, -12.04, -10.61, -7.53 +202210, 9.23, 12.83, 6.12, 1.14, 7.39 +202211, 3.71, 3.04, -1.12, -3.54, 1.77 +202212, -9.31, -6.08, -7.94, -4.19, -3.99 +202301, 17.77, 11.58, 19.58, 19.66, 9.88 +202302, -4.43, -2.85, -4.14, -6.86, -1.65 +202303, -5.60, -4.99, -3.35, -7.92, -7.97 +202304, -2.60, -2.93, -5.77, 1.73, -1.87 +202305, -3.47, -3.97, 5.82, 1.90, -2.79 +202306, 8.07, 10.08, 6.17, 1.03, 6.42 +202307, 5.45, 7.27, 5.58, 0.05, 6.82 +202308, -6.67, -5.03, -7.50, -8.71, -5.58 +202309, -7.17, -4.22, -8.88, -8.94, -5.30 +202310, -8.30, -6.29, -9.14, -10.46, -5.19 +202311, 6.79, 4.48, 10.93, 7.86, 9.59 +202312, 10.05, 8.18, 10.30, 16.94, 10.33 +202401, -5.17, -5.47, -3.29, 0.09, -3.61 +202402, 4.11, 4.61, 3.95, 11.74, 2.06 +202403, 1.11, 5.96, 0.84, 1.79, 2.87 +202404, -7.21, -3.51, -7.57, -11.06, -5.55 +202405, 13.20, 4.95, 3.13, 1.67, 3.58 +202406, -4.07, -4.89, -0.41, -7.13, -0.85 +202407, 4.34, 6.64, 5.35, 8.09, 9.28 +202408, -2.58, -3.05, -2.95, -4.26, -1.51 +202409, 0.64, 2.90, 0.60, -2.60, 0.22 +202410, -4.37, -1.77, 0.61, 0.74, 0.30 +202411, 7.32, 14.40, 13.29, 2.75, 10.63 +202412, -4.47, -5.32, 5.67, -6.72, -6.15 +202501, 1.39, 2.13, 0.68, 1.58, 1.58 +202502, -5.32, -5.15, -8.88, -8.60, -2.84 +202503, -8.30, -6.23, -9.83, -12.81, -6.76 +202504, -3.29, -4.80, 0.01, 2.77, -0.54 +202505, 4.85, 7.86, 13.84, -0.92, 7.02 +202506, 3.55, 5.83, 8.76, 4.82, 4.78 +202507, 3.21, 4.79, 1.47, 6.10, 0.79 +202508, 4.99, 5.21, 4.32, 9.12, 7.15 +202509, -0.61, 1.82, 5.38, 6.69, 0.46 +202510, -5.94, -0.09, 4.77, 6.13, -4.16 +202511, -0.38, -0.92, -7.34, 1.76, 1.31 +202512, -3.72, -1.17, -1.57, -3.83, 0.68 + + + Average Value Weighted Returns -- Annual +,Cnsmr,Manuf,HiTec,Hlth,Other + 1927, 49.28, 24.90, 37.96, 46.56, 27.66 + 1928, 38.66, 46.58, 44.67, 43.54, 18.90 + 1929, -35.17, -7.53, 8.77, -25.22, -6.69 + 1930, -21.74, -30.86, -24.87, -13.68, -32.35 + 1931, -31.24, -46.13, -37.14, -15.32, -59.42 + 1932, -14.96, -2.52, -12.27, -23.57, -9.81 + 1933, 77.24, 56.47, 26.47, 15.02, 62.01 + 1934, 13.94, -2.60, 6.31, 22.41, -2.17 + 1935, 34.80, 47.99, 66.32, 19.08, 36.12 + 1936, 21.60, 39.79, 29.80, 16.57, 36.85 + 1937, -38.18, -34.85, -22.72, -25.24, -41.66 + 1938, 39.66, 28.17, 15.95, 38.03, 22.25 + 1939, 8.82, -0.90, 6.55, 16.22, -4.24 + 1940, -7.33, -8.45, -3.03, -19.19, -5.52 + 1941, -15.97, -6.64, -15.09, -6.23, -7.41 + 1942, 23.19, 11.93, 13.79, 17.31, 27.11 + 1943, 33.26, 24.47, 30.41, 20.10, 32.29 + 1944, 26.44, 18.18, 14.79, 10.73, 34.03 + 1945, 42.67, 38.01, 27.59, 46.12, 45.88 + 1946, -11.03, 0.21, -12.04, 27.97, -16.77 + 1947, 3.78, 7.57, -1.35, -19.30, -5.27 + 1948, -1.39, 2.71, 6.00, -5.25, 2.79 + 1949, 27.86, 18.26, 12.99, 39.92, 16.87 + 1950, 20.85, 38.90, 14.99, 17.41, 32.21 + 1951, 10.16, 26.66, 17.34, 31.11, 13.34 + 1952, 17.53, 11.50, 15.02, -12.22, 21.52 + 1953, -3.00, 1.51, 8.03, 0.78, -6.87 + 1954, 41.97, 53.89, 44.01, 35.21, 60.05 + 1955, 27.09, 28.73, 15.79, 21.08, 14.97 + 1956, -3.04, 12.28, 10.44, 15.63, 0.48 + 1957, -9.94, -12.96, 4.17, 28.53, -18.24 + 1958, 53.66, 39.38, 49.30, 74.57, 57.79 + 1959, 20.73, 7.55, 25.13, 29.60, 2.76 + 1960, 1.20, -3.64, 13.98, 5.64, 0.54 + 1961, 41.70, 22.10, 25.97, 26.83, 29.74 + 1962, -12.89, -4.92, -16.52, -20.90, -7.29 + 1963, 25.68, 18.83, 23.76, 20.70, 22.05 + 1964, 21.56, 18.63, 2.63, 16.51, 13.81 + 1965, 15.15, 13.14, 10.21, 32.18, 24.03 + 1966, -21.32, -7.35, 1.36, -4.84, -3.63 + 1967, 36.09, 23.34, 33.17, 27.73, 31.14 + 1968, 17.73, 14.31, 3.02, 10.27, 32.23 + 1969, -9.74, -16.20, 0.14, 22.68, -19.14 + 1970, 8.22, 5.32, -12.05, -6.20, -6.53 + 1971, 24.37, 11.78, 9.55, 23.41, 22.09 + 1972, 11.24, 19.79, 18.74, 28.68, 8.98 + 1973, -33.04, -8.74, -18.62, -15.04, -23.66 + 1974, -30.56, -26.19, -24.85, -22.21, -32.80 + 1975, 61.03, 37.23, 38.10, 9.40, 30.86 + 1976, 24.17, 28.04, 32.35, 0.68, 32.97 + 1977, -5.46, -5.17, 1.71, -4.38, 1.04 + 1978, 3.35, 6.71, 12.57, 16.77, 11.32 + 1979, 14.33, 31.31, 6.93, 19.79, 30.68 + 1980, 17.59, 42.03, 24.30, 30.10, 34.32 + 1981, 9.65, -10.99, -0.57, 4.00, 2.73 + 1982, 51.93, 4.15, 35.31, 31.96, 21.12 + 1983, 27.47, 25.40, 18.35, 2.97, 22.96 + 1984, 4.56, 7.57, -1.53, 1.93, 3.72 + 1985, 36.52, 27.38, 25.72, 40.62, 40.04 + 1986, 21.78, 20.59, 2.69, 27.55, 12.86 + 1987, 2.94, 4.12, 9.27, 4.54, -7.30 + 1988, 28.69, 16.41, 9.66, 12.85, 18.06 + 1989, 25.73, 29.91, 25.00, 44.85, 28.07 + 1990, -2.48, -3.84, -9.47, 16.90, -19.00 + 1991, 46.60, 22.34, 20.82, 63.45, 43.45 + 1992, 12.47, 8.49, 13.33, -15.13, 22.52 + 1993, 4.98, 15.17, 20.30, -7.09, 13.25 + 1994, -4.76, 1.02, 7.34, 7.40, -4.31 + 1995, 24.76, 30.67, 41.59, 56.66, 45.00 + 1996, 19.51, 22.63, 19.03, 17.00, 25.22 + 1997, 31.42, 23.45, 26.79, 37.60, 42.00 + 1998, 26.06, 5.16, 57.87, 38.15, 8.19 + 1999, 0.09, 17.37, 68.09, -2.66, 2.66 + 2000, -0.44, 11.14, -38.13, 35.41, 11.88 + 2001, 3.09, -7.51, -23.13, -12.01, -4.96 + 2002, -14.31, -10.29, -36.33, -22.66, -15.28 + 2003, 26.91, 29.76, 38.89, 21.28, 33.39 + 2004, 11.19, 21.98, 6.05, 0.30, 15.20 + 2005, 2.49, 16.58, -0.52, 4.64, 6.91 + 2006, 14.13, 20.53, 14.96, 8.02, 15.80 + 2007, 2.29, 24.16, 11.76, 3.67, -8.69 + 2008, -25.22, -35.52, -40.27, -17.37, -46.81 + 2009, 30.37, 22.64, 49.16, 20.26, 18.96 + 2010, 20.99, 21.02, 16.73, 4.60, 16.65 + 2011, 9.13, 4.47, 0.44, 10.56, -11.06 + 2012, 16.18, 8.80, 17.01, 20.44, 22.99 + 2013, 33.00, 29.44, 33.11, 41.58, 41.04 + 2014, 12.66, 3.26, 14.40, 24.61, 11.43 + 2015, 7.17, -11.92, 3.58, 5.84, -0.52 + 2016, 5.29, 20.84, 14.18, -2.10, 20.02 + 2017, 18.94, 17.46, 28.81, 22.30, 22.05 + 2018, -3.68, -11.20, -0.52, 4.71, -9.84 + 2019, 26.05, 20.91, 42.23, 20.09, 30.58 + 2020, 37.88, -0.27, 40.67, 18.73, 8.99 + 2021, 20.93, 27.48, 27.41, 13.58, 26.10 + 2022, -24.95, 5.42, -31.65, -5.73, -13.77 + 2023, 26.26, 5.99, 50.76, 4.45, 19.33 + 2024, 26.52, 9.99, 34.56, 6.29, 23.97 + 2025, 7.58, 14.12, 23.97, 23.30, 12.70 + + + Average Equal Weighted Returns -- Annual +,Cnsmr,Manuf,HiTec,Hlth,Other + 1927, 25.89, 29.84, 37.11, 56.50, 35.76 + 1928, 38.09, 51.39, 58.57, 30.56, 30.25 + 1929, -42.39, -27.14, -6.47, -17.78, -22.14 + 1930, -34.42, -40.11, -39.90, -16.14, -38.60 + 1931, -35.27, -48.64, -48.11, -37.51, -53.60 + 1932, 8.03, 16.70, -8.08, -14.71, 10.11 + 1933, 147.57, 138.19, 134.66, 17.13, 116.83 + 1934, 31.06, 17.68, 23.70, 6.05, 4.45 + 1935, 43.12, 71.30, 91.40, 39.68, 58.76 + 1936, 43.07, 61.10, 37.59, 23.79, 58.41 + 1937, -48.41, -43.00, -44.86, -36.25, -49.44 + 1938, 36.80, 42.14, 45.44, 32.25, 21.35 + 1939, 11.11, 1.28, -5.79, 4.04, -1.75 + 1940, -5.88, -5.78, -1.04, -10.76, -17.25 + 1941, -7.56, -13.47, -8.66, 10.49, -2.96 + 1942, 28.36, 24.77, 51.22, 27.57, 67.62 + 1943, 71.54, 47.01, 48.44, 39.15, 88.87 + 1944, 45.33, 35.50, 33.12, 21.81, 51.06 + 1945, 69.87, 58.12, 38.24, 60.89, 68.32 + 1946, -8.58, -4.42, -21.44, 21.17, -21.34 + 1947, -2.38, 4.14, 0.71, -20.89, -4.24 + 1948, -8.23, -1.29, 4.25, -6.81, 2.76 + 1949, 26.40, 18.83, 12.39, 35.17, 21.18 + 1950, 27.88, 40.67, 34.71, 21.77, 47.51 + 1951, 6.31, 22.67, 22.61, 23.00, 11.43 + 1952, 6.65, 9.33, 18.22, -4.71, 16.40 + 1953, -4.46, -0.99, -0.54, 3.77, -6.87 + 1954, 42.14, 59.49, 68.46, 50.87, 73.73 + 1955, 14.79, 24.34, 16.14, 30.52, 19.33 + 1956, -0.65, 13.48, 6.31, 10.22, 1.07 + 1957, -7.84, -17.34, -11.87, 24.20, -20.02 + 1958, 63.63, 53.28, 79.47, 85.65, 63.26 + 1959, 21.89, 12.43, 37.93, 34.60, 4.50 + 1960, 2.39, -3.42, -3.48, 6.12, -3.86 + 1961, 38.56, 26.79, 19.12, 26.02, 29.91 + 1962, -16.38, -9.98, -23.63, -21.44, -6.31 + 1963, 13.07, 18.14, 9.21, 21.37, 21.72 + 1964, 18.56, 20.14, 7.11, 17.59, 20.07 + 1965, 33.48, 33.25, 66.00, 37.01, 34.82 + 1966, -13.10, -3.70, 1.64, 0.84, -6.66 + 1967, 87.00, 66.05, 95.07, 46.68, 89.83 + 1968, 55.92, 33.02, 16.09, 12.60, 62.55 + 1969, -26.91, -25.54, -26.45, 12.10, -28.50 + 1970, -7.44, -5.57, -31.61, -14.96, -13.76 + 1971, 24.69, 13.97, 21.95, 28.83, 21.27 + 1972, -1.47, 11.56, 10.23, 5.36, -0.54 + 1973, -46.46, -23.89, -38.50, -39.37, -38.35 + 1974, -25.09, -15.64, -35.12, -24.39, -28.19 + 1975, 84.35, 57.14, 82.56, 53.48, 54.87 + 1976, 41.69, 52.41, 60.57, 37.64, 51.29 + 1977, 17.08, 18.90, 31.61, 29.87, 28.62 + 1978, 21.84, 18.17, 48.30, 35.51, 28.95 + 1979, 24.45, 50.44, 56.97, 39.08, 38.35 + 1980, 25.76, 42.29, 69.67, 58.87, 41.86 + 1981, 12.46, -6.90, -6.94, 4.50, 3.43 + 1982, 44.37, 0.93, 40.63, 52.62, 28.90 + 1983, 47.25, 33.93, 29.37, 16.41, 40.86 + 1984, -7.40, -7.50, -24.56, -15.31, -7.82 + 1985, 26.65, 14.71, 16.18, 34.75, 36.48 + 1986, 11.58, 3.26, -1.97, 4.67, 11.97 + 1987, -15.56, 3.24, -10.41, -19.36, -10.89 + 1988, 27.16, 22.97, 16.66, 24.02, 14.65 + 1989, 9.05, 18.82, 8.14, 14.52, 7.14 + 1990, -26.34, -12.76, -21.81, -2.65, -30.69 + 1991, 51.16, 33.05, 65.30, 154.60, 44.57 + 1992, 27.62, 21.58, 30.18, 7.93, 44.91 + 1993, 20.78, 26.09, 33.55, -3.10, 31.82 + 1994, -11.28, 1.35, 5.68, -13.59, -4.04 + 1995, 9.84, 19.43, 51.12, 69.01, 35.63 + 1996, 14.28, 25.64, 11.75, 13.13, 20.35 + 1997, 13.88, 24.10, 11.50, 3.20, 38.78 + 1998, 2.94, -17.25, 8.47, -12.52, -3.97 + 1999, 3.91, 12.75, 130.49, 44.35, 8.05 + 2000, -18.25, 13.22, -40.66, 29.92, -3.29 + 2001, 40.48, 7.08, 12.17, 45.69, 30.14 + 2002, -4.42, -9.50, -30.51, -32.86, 2.37 + 2003, 53.74, 57.31, 111.56, 115.40, 59.18 + 2004, 22.60, 34.05, 11.33, 15.62, 23.63 + 2005, 3.56, 16.88, -4.05, -3.92, 6.00 + 2006, 18.41, 21.61, 13.28, 11.29, 17.11 + 2007, -12.71, 8.17, -0.78, -4.42, -16.74 + 2008, -47.87, -47.04, -50.88, -40.37, -41.32 + 2009, 98.31, 61.30, 96.95, 99.46, 26.21 + 2010, 30.84, 34.62, 34.15, 21.18, 21.11 + 2011, -6.34, -5.50, -13.27, -10.57, -10.14 + 2012, 22.70, 8.98, 11.16, 24.99, 26.10 + 2013, 42.82, 37.03, 50.63, 57.31, 42.03 + 2014, 5.22, -8.31, 1.98, 14.13, 7.74 + 2015, -8.90, -22.70, -3.78, 2.59, -2.95 + 2016, 15.44, 32.36, 17.28, -11.92, 30.76 + 2017, 6.56, 10.55, 24.90, 24.05, 15.13 + 2018, -13.08, -22.14, -6.81, -22.61, -13.86 + 2019, 17.15, 11.61, 27.69, 25.35, 25.42 + 2020, 41.67, 17.23, 68.63, 63.02, 12.64 + 2021, 35.68, 41.69, 19.46, -11.35, 33.11 + 2022, -28.55, -3.94, -37.93, -42.27, -16.83 + 2023, 6.29, 9.21, 16.03, 0.97, 10.57 + 2024, 0.89, 14.23, 19.21, -6.96, 10.26 + 2025, -10.19, 8.40, 9.17, 10.86, 8.87 + + + Number of Firms in Portfolios +,Cnsmr,Manuf,HiTec,Hlth,Other +192607, 169, 199, 23, 7, 113 +192608, 168, 199, 23, 7, 113 +192609, 168, 199, 23, 7, 113 +192610, 168, 198, 23, 7, 113 +192611, 168, 198, 23, 7, 113 +192612, 168, 198, 23, 7, 113 +192701, 167, 198, 23, 7, 112 +192702, 167, 197, 23, 7, 112 +192703, 165, 197, 23, 7, 111 +192704, 165, 197, 23, 7, 111 +192705, 165, 197, 22, 7, 110 +192706, 165, 197, 22, 7, 110 +192707, 187, 207, 24, 7, 122 +192708, 187, 207, 24, 7, 122 +192709, 187, 206, 24, 7, 122 +192710, 187, 206, 24, 7, 122 +192711, 187, 206, 24, 7, 122 +192712, 186, 204, 23, 7, 122 +192801, 186, 204, 23, 7, 122 +192802, 185, 204, 23, 7, 122 +192803, 185, 204, 23, 7, 122 +192804, 185, 202, 23, 6, 122 +192805, 185, 202, 23, 6, 122 +192806, 185, 202, 22, 6, 122 +192807, 202, 218, 23, 6, 122 +192808, 201, 218, 23, 6, 122 +192809, 200, 218, 23, 6, 122 +192810, 200, 218, 23, 6, 122 +192811, 200, 218, 23, 6, 121 +192812, 200, 218, 23, 6, 121 +192901, 199, 218, 23, 6, 121 +192902, 197, 218, 23, 5, 120 +192903, 197, 218, 23, 5, 120 +192904, 197, 218, 22, 5, 120 +192905, 197, 218, 22, 5, 120 +192906, 196, 217, 22, 5, 120 +192907, 233, 254, 24, 6, 133 +192908, 232, 253, 24, 6, 133 +192909, 232, 252, 24, 6, 133 +192910, 232, 252, 24, 6, 132 +192911, 231, 250, 24, 6, 131 +192912, 231, 250, 24, 6, 131 +193001, 230, 250, 24, 6, 130 +193002, 230, 249, 24, 6, 129 +193003, 228, 249, 24, 6, 129 +193004, 228, 248, 24, 5, 129 +193005, 227, 246, 24, 5, 128 +193006, 227, 245, 24, 5, 128 +193007, 240, 279, 31, 5, 144 +193008, 240, 278, 31, 5, 144 +193009, 240, 277, 31, 5, 144 +193010, 240, 276, 31, 5, 144 +193011, 240, 274, 31, 5, 144 +193012, 239, 274, 31, 5, 144 +193101, 239, 274, 31, 5, 144 +193102, 239, 273, 31, 5, 143 +193103, 239, 271, 31, 5, 143 +193104, 238, 271, 31, 5, 142 +193105, 238, 271, 30, 5, 142 +193106, 237, 271, 30, 5, 142 +193107, 243, 284, 30, 6, 142 +193108, 242, 284, 30, 6, 142 +193109, 242, 284, 30, 6, 141 +193110, 242, 283, 30, 6, 141 +193111, 241, 282, 30, 6, 140 +193112, 241, 282, 30, 6, 140 +193201, 240, 280, 30, 6, 139 +193202, 240, 280, 30, 6, 139 +193203, 240, 280, 30, 6, 139 +193204, 238, 279, 30, 6, 139 +193205, 237, 278, 30, 6, 135 +193206, 236, 277, 30, 6, 135 +193207, 233, 279, 29, 6, 134 +193208, 233, 279, 29, 6, 134 +193209, 231, 279, 29, 6, 134 +193210, 230, 278, 29, 6, 134 +193211, 230, 278, 29, 6, 134 +193212, 229, 278, 29, 6, 134 +193301, 227, 277, 29, 6, 133 +193302, 227, 277, 29, 6, 133 +193303, 226, 277, 29, 6, 133 +193304, 225, 276, 29, 6, 133 +193305, 225, 276, 29, 6, 133 +193306, 225, 274, 29, 6, 133 +193307, 222, 277, 31, 6, 139 +193308, 222, 277, 31, 6, 139 +193309, 222, 276, 31, 6, 139 +193310, 222, 276, 31, 6, 138 +193311, 222, 276, 31, 6, 138 +193312, 222, 276, 31, 6, 138 +193401, 222, 276, 30, 6, 138 +193402, 222, 276, 30, 6, 137 +193403, 222, 276, 30, 6, 137 +193404, 222, 276, 30, 6, 137 +193405, 222, 276, 30, 6, 137 +193406, 222, 274, 30, 6, 137 +193407, 227, 275, 31, 9, 143 +193408, 226, 275, 31, 9, 143 +193409, 226, 275, 31, 9, 143 +193410, 226, 275, 31, 9, 143 +193411, 226, 274, 31, 9, 139 +193412, 226, 273, 31, 9, 139 +193501, 226, 273, 31, 9, 139 +193502, 226, 273, 31, 9, 138 +193503, 224, 273, 31, 9, 138 +193504, 224, 273, 31, 9, 138 +193505, 223, 273, 31, 9, 138 +193506, 223, 273, 30, 9, 137 +193507, 227, 277, 30, 9, 142 +193508, 224, 275, 30, 9, 140 +193509, 224, 274, 30, 9, 140 +193510, 223, 274, 30, 9, 138 +193511, 223, 274, 30, 9, 138 +193512, 223, 274, 30, 9, 138 +193601, 223, 273, 30, 9, 138 +193602, 223, 273, 30, 9, 138 +193603, 223, 272, 30, 9, 138 +193604, 221, 272, 30, 9, 138 +193605, 221, 271, 30, 9, 138 +193606, 220, 271, 30, 9, 138 +193607, 238, 286, 31, 10, 144 +193608, 238, 286, 31, 10, 144 +193609, 238, 285, 31, 10, 143 +193610, 238, 285, 31, 10, 143 +193611, 238, 283, 31, 10, 142 +193612, 238, 283, 31, 10, 142 +193701, 238, 282, 31, 10, 142 +193702, 237, 282, 31, 10, 139 +193703, 237, 282, 31, 10, 139 +193704, 237, 281, 31, 10, 139 +193705, 237, 279, 31, 10, 139 +193706, 237, 279, 31, 10, 139 +193707, 250, 301, 34, 12, 146 +193708, 250, 301, 34, 12, 146 +193709, 250, 301, 34, 12, 146 +193710, 250, 300, 34, 12, 146 +193711, 250, 300, 34, 12, 146 +193712, 250, 300, 34, 12, 146 +193801, 250, 300, 34, 12, 146 +193802, 250, 300, 34, 12, 146 +193803, 250, 300, 34, 12, 146 +193804, 250, 300, 34, 12, 146 +193805, 250, 300, 34, 12, 145 +193806, 249, 299, 34, 12, 144 +193807, 253, 316, 35, 12, 149 +193808, 253, 314, 35, 12, 149 +193809, 251, 313, 35, 12, 148 +193810, 251, 313, 35, 12, 148 +193811, 251, 313, 35, 12, 147 +193812, 251, 313, 35, 12, 146 +193901, 250, 312, 35, 12, 143 +193902, 250, 312, 35, 12, 143 +193903, 250, 312, 35, 12, 143 +193904, 250, 312, 35, 12, 143 +193905, 250, 312, 35, 12, 142 +193906, 250, 312, 35, 12, 142 +193907, 251, 315, 36, 12, 141 +193908, 250, 314, 36, 12, 140 +193909, 250, 314, 36, 12, 138 +193910, 250, 313, 36, 12, 136 +193911, 250, 313, 36, 12, 135 +193912, 250, 313, 36, 12, 134 +194001, 250, 313, 36, 12, 134 +194002, 250, 312, 36, 12, 134 +194003, 250, 312, 36, 12, 134 +194004, 250, 312, 36, 12, 134 +194005, 250, 312, 36, 12, 134 +194006, 248, 312, 36, 12, 134 +194007, 254, 322, 36, 13, 140 +194008, 254, 322, 36, 13, 140 +194009, 253, 322, 36, 13, 139 +194010, 253, 322, 36, 13, 137 +194011, 253, 322, 36, 13, 136 +194012, 253, 322, 36, 13, 136 +194101, 251, 321, 36, 13, 136 +194102, 251, 321, 36, 13, 136 +194103, 251, 320, 36, 13, 136 +194104, 251, 320, 36, 13, 136 +194105, 251, 320, 36, 13, 136 +194106, 251, 320, 36, 13, 136 +194107, 253, 331, 38, 13, 140 +194108, 253, 331, 38, 13, 140 +194109, 253, 331, 38, 13, 139 +194110, 253, 331, 38, 13, 139 +194111, 253, 331, 38, 13, 139 +194112, 253, 331, 38, 13, 138 +194201, 253, 331, 38, 13, 138 +194202, 253, 331, 38, 13, 138 +194203, 253, 331, 38, 13, 138 +194204, 252, 331, 38, 13, 138 +194205, 252, 331, 38, 13, 138 +194206, 252, 331, 38, 13, 138 +194207, 253, 337, 39, 14, 141 +194208, 253, 336, 39, 14, 140 +194209, 253, 336, 39, 14, 140 +194210, 253, 336, 39, 14, 140 +194211, 252, 336, 39, 14, 140 +194212, 252, 336, 39, 14, 140 +194301, 252, 335, 39, 14, 140 +194302, 252, 335, 39, 14, 140 +194303, 252, 334, 39, 14, 140 +194304, 252, 333, 39, 14, 140 +194305, 252, 332, 39, 14, 140 +194306, 252, 332, 39, 14, 140 +194307, 255, 333, 40, 14, 139 +194308, 254, 333, 40, 14, 139 +194309, 254, 333, 40, 14, 139 +194310, 254, 333, 40, 14, 139 +194311, 254, 332, 40, 14, 139 +194312, 254, 331, 40, 14, 139 +194401, 254, 331, 40, 14, 137 +194402, 254, 330, 40, 14, 135 +194403, 254, 330, 40, 14, 134 +194404, 254, 330, 40, 14, 134 +194405, 254, 330, 40, 14, 134 +194406, 254, 329, 40, 14, 134 +194407, 257, 345, 43, 15, 138 +194408, 257, 345, 43, 15, 137 +194409, 257, 345, 43, 15, 137 +194410, 256, 345, 43, 15, 137 +194411, 255, 345, 43, 15, 136 +194412, 255, 345, 43, 15, 136 +194501, 255, 345, 43, 15, 135 +194502, 254, 345, 43, 15, 135 +194503, 254, 345, 43, 15, 135 +194504, 254, 345, 43, 15, 135 +194505, 254, 345, 43, 15, 135 +194506, 254, 345, 43, 15, 135 +194507, 266, 353, 46, 16, 140 +194508, 266, 353, 46, 16, 140 +194509, 266, 353, 46, 16, 140 +194510, 266, 353, 46, 16, 140 +194511, 265, 353, 46, 16, 140 +194512, 265, 352, 46, 16, 139 +194601, 265, 352, 46, 16, 139 +194602, 265, 351, 46, 16, 139 +194603, 264, 351, 46, 16, 139 +194604, 264, 351, 46, 16, 139 +194605, 264, 351, 46, 16, 139 +194606, 264, 351, 46, 16, 139 +194607, 274, 374, 48, 18, 144 +194608, 274, 374, 48, 18, 144 +194609, 273, 374, 48, 18, 144 +194610, 273, 374, 48, 18, 144 +194611, 272, 374, 48, 18, 144 +194612, 272, 374, 47, 18, 144 +194701, 272, 374, 47, 18, 144 +194702, 272, 374, 47, 18, 144 +194703, 272, 374, 47, 18, 144 +194704, 272, 374, 47, 18, 144 +194705, 272, 374, 47, 18, 144 +194706, 272, 374, 47, 18, 144 +194707, 289, 396, 50, 18, 148 +194708, 289, 395, 50, 18, 148 +194709, 289, 395, 50, 18, 148 +194710, 289, 395, 50, 18, 148 +194711, 289, 394, 50, 18, 148 +194712, 289, 394, 50, 18, 148 +194801, 289, 394, 50, 18, 148 +194802, 289, 394, 50, 18, 148 +194803, 289, 394, 50, 18, 148 +194804, 289, 394, 50, 18, 148 +194805, 289, 394, 50, 18, 148 +194806, 289, 394, 50, 18, 148 +194807, 300, 413, 51, 18, 156 +194808, 300, 412, 51, 18, 156 +194809, 300, 412, 51, 18, 156 +194810, 300, 411, 51, 18, 156 +194811, 300, 411, 51, 18, 156 +194812, 300, 411, 51, 18, 156 +194901, 300, 411, 51, 18, 156 +194902, 300, 411, 51, 18, 156 +194903, 300, 411, 51, 18, 156 +194904, 300, 411, 51, 18, 156 +194905, 300, 411, 51, 18, 156 +194906, 300, 411, 51, 18, 156 +194907, 304, 426, 53, 18, 160 +194908, 304, 425, 53, 18, 160 +194909, 304, 425, 53, 18, 160 +194910, 304, 425, 53, 18, 160 +194911, 304, 425, 53, 18, 160 +194912, 304, 425, 53, 18, 160 +195001, 304, 425, 53, 18, 159 +195002, 302, 425, 53, 18, 159 +195003, 302, 425, 53, 18, 159 +195004, 302, 425, 53, 18, 159 +195005, 302, 425, 53, 18, 159 +195006, 302, 425, 52, 18, 159 +195007, 303, 444, 52, 18, 161 +195008, 303, 443, 52, 18, 161 +195009, 303, 443, 52, 18, 161 +195010, 303, 443, 52, 18, 161 +195011, 303, 443, 52, 18, 161 +195012, 303, 443, 52, 18, 161 +195101, 302, 442, 52, 18, 161 +195102, 302, 442, 52, 18, 160 +195103, 302, 442, 52, 18, 160 +195104, 302, 442, 52, 18, 160 +195105, 302, 442, 52, 18, 160 +195106, 302, 442, 52, 18, 160 +195107, 307, 462, 54, 19, 163 +195108, 307, 462, 54, 19, 163 +195109, 306, 462, 54, 19, 163 +195110, 306, 461, 54, 19, 163 +195111, 306, 461, 54, 19, 163 +195112, 306, 459, 54, 19, 163 +195201, 306, 459, 54, 19, 163 +195202, 306, 459, 54, 19, 163 +195203, 306, 459, 54, 19, 163 +195204, 306, 459, 54, 19, 162 +195205, 305, 459, 54, 19, 162 +195206, 305, 459, 54, 19, 162 +195207, 309, 467, 57, 19, 162 +195208, 309, 467, 57, 19, 162 +195209, 309, 467, 57, 19, 162 +195210, 309, 467, 57, 19, 162 +195211, 309, 467, 57, 18, 162 +195212, 309, 466, 57, 18, 162 +195301, 309, 466, 57, 18, 162 +195302, 309, 466, 57, 18, 162 +195303, 308, 466, 56, 18, 162 +195304, 306, 466, 55, 18, 162 +195305, 306, 466, 55, 18, 162 +195306, 306, 465, 55, 17, 159 +195307, 307, 479, 58, 18, 163 +195308, 307, 477, 58, 18, 163 +195309, 306, 477, 58, 18, 163 +195310, 304, 477, 58, 18, 163 +195311, 303, 477, 58, 18, 163 +195312, 303, 476, 58, 18, 163 +195401, 303, 476, 58, 18, 163 +195402, 303, 476, 58, 18, 163 +195403, 303, 476, 58, 18, 163 +195404, 303, 476, 58, 18, 163 +195405, 303, 475, 58, 18, 163 +195406, 302, 473, 58, 18, 163 +195407, 301, 483, 59, 18, 164 +195408, 301, 483, 59, 18, 164 +195409, 301, 482, 59, 18, 164 +195410, 301, 482, 59, 18, 164 +195411, 300, 481, 59, 18, 162 +195412, 300, 480, 59, 18, 162 +195501, 300, 480, 59, 18, 162 +195502, 300, 479, 59, 18, 162 +195503, 298, 478, 59, 18, 162 +195504, 298, 476, 59, 18, 162 +195505, 298, 476, 59, 17, 161 +195506, 298, 475, 58, 17, 160 +195507, 302, 492, 60, 17, 161 +195508, 302, 491, 58, 17, 161 +195509, 302, 490, 58, 17, 160 +195510, 301, 488, 58, 17, 160 +195511, 300, 487, 58, 17, 160 +195512, 300, 487, 58, 17, 160 +195601, 298, 485, 58, 17, 160 +195602, 296, 485, 58, 17, 160 +195603, 295, 485, 58, 17, 160 +195604, 295, 485, 58, 17, 160 +195605, 295, 485, 58, 17, 160 +195606, 293, 485, 58, 17, 160 +195607, 294, 498, 60, 17, 160 +195608, 293, 495, 60, 17, 160 +195609, 292, 494, 60, 17, 159 +195610, 292, 493, 60, 17, 159 +195611, 292, 490, 60, 17, 159 +195612, 291, 490, 60, 17, 159 +195701, 291, 490, 60, 17, 159 +195702, 291, 486, 60, 17, 159 +195703, 291, 486, 60, 17, 159 +195704, 291, 486, 60, 17, 159 +195705, 290, 486, 60, 17, 159 +195706, 288, 485, 60, 17, 159 +195707, 293, 498, 70, 17, 162 +195708, 293, 498, 69, 17, 162 +195709, 292, 497, 68, 17, 162 +195710, 292, 496, 68, 17, 161 +195711, 290, 496, 67, 17, 161 +195712, 290, 496, 66, 17, 161 +195801, 290, 494, 66, 17, 159 +195802, 290, 492, 66, 17, 159 +195803, 289, 492, 66, 17, 159 +195804, 287, 490, 66, 17, 158 +195805, 287, 490, 66, 17, 158 +195806, 286, 489, 66, 17, 158 +195807, 282, 499, 73, 20, 170 +195808, 282, 499, 72, 20, 169 +195809, 282, 499, 72, 20, 168 +195810, 282, 498, 72, 20, 166 +195811, 282, 497, 72, 20, 164 +195812, 282, 497, 72, 20, 164 +195901, 282, 496, 72, 20, 164 +195902, 282, 496, 72, 20, 164 +195903, 280, 496, 72, 20, 164 +195904, 280, 495, 71, 20, 162 +195905, 280, 494, 71, 20, 160 +195906, 280, 494, 71, 20, 160 +195907, 284, 500, 78, 22, 163 +195908, 284, 500, 78, 22, 162 +195909, 284, 500, 78, 22, 161 +195910, 284, 500, 78, 22, 161 +195911, 283, 500, 77, 22, 161 +195912, 283, 499, 77, 22, 161 +196001, 283, 499, 76, 22, 160 +196002, 283, 499, 75, 22, 160 +196003, 283, 499, 75, 22, 159 +196004, 282, 499, 75, 22, 159 +196005, 281, 498, 75, 22, 158 +196006, 281, 496, 74, 22, 158 +196007, 291, 514, 83, 23, 168 +196008, 290, 513, 83, 23, 166 +196009, 289, 513, 83, 23, 166 +196010, 289, 512, 83, 23, 166 +196011, 286, 512, 83, 23, 165 +196012, 285, 512, 83, 23, 164 +196101, 284, 512, 83, 23, 164 +196102, 282, 511, 83, 22, 164 +196103, 279, 511, 83, 22, 163 +196104, 279, 510, 83, 22, 163 +196105, 278, 509, 83, 22, 163 +196106, 278, 509, 83, 22, 163 +196107, 291, 525, 90, 22, 166 +196108, 290, 522, 90, 22, 166 +196109, 289, 521, 90, 22, 164 +196110, 288, 521, 90, 22, 163 +196111, 287, 520, 90, 22, 163 +196112, 287, 520, 89, 22, 162 +196201, 287, 519, 88, 22, 161 +196202, 287, 519, 88, 22, 161 +196203, 286, 517, 88, 22, 161 +196204, 286, 517, 88, 22, 161 +196205, 286, 513, 88, 22, 161 +196206, 285, 512, 88, 22, 161 +196207, 296, 536, 98, 23, 167 +196208, 296, 536, 98, 23, 165 +196209, 296, 536, 98, 23, 165 +196210, 296, 535, 98, 23, 165 +196211, 295, 535, 98, 23, 165 +196212, 294, 534, 98, 23, 165 +196301, 294, 533, 98, 23, 165 +196302, 294, 533, 98, 23, 164 +196303, 294, 532, 98, 23, 164 +196304, 293, 532, 98, 23, 164 +196305, 293, 531, 98, 23, 163 +196306, 291, 529, 97, 23, 162 +196307, 557, 867, 155, 36, 351 +196308, 556, 861, 154, 36, 349 +196309, 555, 860, 154, 36, 345 +196310, 554, 855, 154, 36, 345 +196311, 553, 852, 153, 36, 342 +196312, 552, 850, 153, 36, 341 +196401, 549, 843, 153, 36, 340 +196402, 547, 837, 153, 36, 340 +196403, 547, 836, 153, 36, 340 +196404, 545, 833, 153, 36, 339 +196405, 544, 831, 152, 36, 337 +196406, 541, 827, 151, 36, 336 +196407, 586, 865, 166, 39, 351 +196408, 582, 863, 166, 38, 349 +196409, 581, 862, 166, 38, 348 +196410, 579, 857, 166, 38, 347 +196411, 579, 854, 166, 38, 344 +196412, 578, 852, 166, 38, 343 +196501, 576, 849, 164, 38, 343 +196502, 575, 844, 163, 38, 342 +196503, 571, 841, 163, 38, 341 +196504, 570, 839, 160, 38, 338 +196505, 568, 837, 160, 38, 338 +196506, 567, 835, 160, 38, 335 +196507, 603, 870, 176, 40, 363 +196508, 603, 868, 176, 39, 361 +196509, 602, 866, 175, 39, 360 +196510, 598, 862, 174, 39, 359 +196511, 593, 860, 174, 39, 358 +196512, 592, 858, 174, 39, 356 +196601, 589, 856, 173, 39, 356 +196602, 586, 853, 172, 38, 353 +196603, 584, 847, 172, 38, 351 +196604, 583, 846, 172, 38, 351 +196605, 580, 843, 172, 38, 351 +196606, 574, 840, 171, 38, 350 +196607, 611, 872, 185, 39, 365 +196608, 607, 868, 185, 39, 364 +196609, 607, 867, 184, 39, 360 +196610, 606, 866, 184, 39, 359 +196611, 606, 862, 184, 39, 357 +196612, 602, 859, 184, 39, 355 +196701, 599, 857, 183, 39, 351 +196702, 597, 856, 183, 39, 349 +196703, 596, 854, 183, 39, 348 +196704, 594, 851, 181, 39, 348 +196705, 592, 850, 179, 39, 346 +196706, 587, 845, 177, 39, 343 +196707, 612, 892, 193, 40, 353 +196708, 607, 885, 192, 40, 349 +196709, 604, 881, 192, 40, 348 +196710, 600, 872, 191, 40, 346 +196711, 596, 867, 188, 40, 345 +196712, 592, 864, 187, 40, 344 +196801, 590, 859, 186, 39, 343 +196802, 586, 846, 184, 38, 342 +196803, 584, 842, 183, 38, 339 +196804, 582, 841, 183, 38, 337 +196805, 579, 836, 183, 38, 334 +196806, 576, 831, 183, 38, 333 +196807, 617, 863, 212, 44, 349 +196808, 605, 859, 210, 44, 344 +196809, 604, 858, 210, 44, 342 +196810, 602, 854, 209, 44, 339 +196811, 598, 847, 208, 44, 339 +196812, 595, 842, 207, 44, 339 +196901, 592, 834, 207, 44, 338 +196902, 586, 830, 207, 44, 335 +196903, 583, 826, 207, 44, 334 +196904, 581, 824, 206, 44, 330 +196905, 578, 823, 204, 44, 330 +196906, 577, 819, 202, 44, 329 +196907, 642, 858, 229, 49, 374 +196908, 642, 854, 228, 49, 372 +196909, 641, 852, 226, 49, 371 +196910, 639, 850, 226, 49, 367 +196911, 639, 849, 225, 49, 367 +196912, 638, 845, 224, 49, 366 +197001, 632, 844, 223, 49, 366 +197002, 630, 842, 223, 49, 366 +197003, 629, 840, 223, 49, 366 +197004, 629, 840, 223, 49, 362 +197005, 629, 838, 221, 49, 361 +197006, 627, 833, 221, 49, 361 +197007, 693, 871, 226, 53, 409 +197008, 691, 869, 225, 53, 407 +197009, 689, 867, 224, 53, 405 +197010, 689, 867, 221, 53, 404 +197011, 689, 864, 221, 53, 404 +197012, 688, 863, 220, 52, 403 +197101, 685, 858, 219, 52, 401 +197102, 681, 857, 219, 51, 400 +197103, 678, 855, 219, 51, 399 +197104, 676, 855, 218, 51, 396 +197105, 674, 852, 217, 51, 396 +197106, 674, 848, 215, 51, 393 +197107, 718, 870, 221, 60, 439 +197108, 716, 869, 221, 60, 439 +197109, 715, 868, 221, 60, 438 +197110, 715, 868, 219, 60, 438 +197111, 715, 868, 219, 60, 438 +197112, 714, 865, 219, 60, 438 +197201, 714, 864, 218, 60, 436 +197202, 713, 860, 218, 60, 436 +197203, 713, 860, 218, 60, 435 +197204, 712, 860, 218, 60, 434 +197205, 711, 859, 217, 60, 432 +197206, 711, 857, 217, 60, 429 +197207, 773, 882, 221, 70, 488 +197208, 770, 880, 221, 70, 486 +197209, 764, 879, 221, 70, 483 +197210, 763, 879, 221, 70, 483 +197211, 760, 879, 221, 70, 483 +197212, 759, 877, 221, 70, 481 +197301, 758, 875, 221, 70, 481 +197302, 754, 873, 220, 70, 480 +197303, 751, 870, 219, 70, 479 +197304, 750, 868, 219, 70, 477 +197305, 748, 868, 219, 70, 476 +197306, 742, 864, 217, 70, 474 +197307, 1315, 1321, 475, 138, 1937 +197308, 1304, 1316, 471, 136, 1924 +197309, 1291, 1310, 468, 136, 1907 +197310, 1284, 1306, 465, 133, 1899 +197311, 1276, 1303, 461, 133, 1891 +197312, 1262, 1290, 453, 133, 1864 +197401, 1247, 1283, 449, 130, 1836 +197402, 1243, 1279, 447, 128, 1823 +197403, 1238, 1276, 445, 128, 1817 +197404, 1236, 1275, 439, 128, 1807 +197405, 1229, 1270, 436, 127, 1804 +197406, 1218, 1261, 434, 125, 1794 +197407, 1198, 1272, 432, 131, 1740 +197408, 1193, 1269, 431, 130, 1730 +197409, 1184, 1264, 426, 129, 1720 +197410, 1172, 1264, 421, 129, 1715 +197411, 1163, 1261, 417, 129, 1706 +197412, 1158, 1254, 415, 129, 1699 +197501, 1156, 1245, 412, 129, 1689 +197502, 1148, 1241, 410, 126, 1681 +197503, 1147, 1239, 408, 125, 1678 +197504, 1146, 1235, 406, 125, 1674 +197505, 1143, 1230, 406, 125, 1667 +197506, 1142, 1229, 405, 125, 1666 +197507, 1140, 1260, 402, 134, 1649 +197508, 1137, 1257, 402, 134, 1644 +197509, 1134, 1251, 400, 134, 1639 +197510, 1132, 1250, 399, 133, 1637 +197511, 1129, 1249, 399, 131, 1631 +197512, 1123, 1246, 398, 131, 1627 +197601, 1118, 1244, 396, 129, 1622 +197602, 1114, 1237, 395, 129, 1614 +197603, 1111, 1233, 395, 129, 1610 +197604, 1103, 1232, 394, 129, 1607 +197605, 1097, 1230, 390, 128, 1603 +197606, 1095, 1222, 389, 128, 1598 +197607, 1141, 1277, 412, 135, 1662 +197608, 1138, 1275, 410, 135, 1659 +197609, 1130, 1268, 410, 135, 1655 +197610, 1128, 1266, 408, 135, 1649 +197611, 1126, 1260, 408, 135, 1646 +197612, 1123, 1257, 406, 134, 1645 +197701, 1121, 1251, 403, 131, 1638 +197702, 1113, 1246, 399, 130, 1632 +197703, 1109, 1241, 397, 129, 1628 +197704, 1101, 1236, 397, 128, 1625 +197705, 1100, 1232, 396, 127, 1622 +197706, 1096, 1222, 395, 125, 1620 +197707, 1139, 1278, 431, 135, 1680 +197708, 1129, 1272, 429, 133, 1671 +197709, 1118, 1267, 426, 132, 1664 +197710, 1114, 1262, 424, 132, 1656 +197711, 1108, 1256, 422, 131, 1650 +197712, 1104, 1253, 421, 130, 1648 +197801, 1098, 1241, 418, 128, 1643 +197802, 1089, 1233, 413, 125, 1630 +197803, 1083, 1230, 412, 125, 1621 +197804, 1079, 1226, 411, 125, 1615 +197805, 1074, 1217, 408, 124, 1612 +197806, 1066, 1203, 407, 123, 1606 +197807, 1075, 1240, 422, 125, 1660 +197808, 1071, 1233, 421, 124, 1654 +197809, 1060, 1224, 419, 124, 1646 +197810, 1053, 1221, 418, 122, 1637 +197811, 1044, 1217, 416, 122, 1629 +197812, 1041, 1212, 414, 122, 1620 +197901, 1037, 1208, 410, 121, 1613 +197902, 1026, 1198, 408, 121, 1603 +197903, 1020, 1192, 407, 120, 1591 +197904, 1017, 1187, 405, 119, 1588 +197905, 1011, 1184, 401, 118, 1577 +197906, 1009, 1180, 400, 117, 1569 +197907, 1028, 1237, 442, 127, 1654 +197908, 1017, 1229, 442, 127, 1642 +197909, 1011, 1221, 441, 126, 1634 +197910, 1006, 1218, 439, 124, 1625 +197911, 1002, 1214, 438, 124, 1622 +197912, 995, 1210, 433, 124, 1615 +198001, 989, 1204, 428, 123, 1609 +198002, 980, 1194, 425, 123, 1589 +198003, 974, 1190, 423, 122, 1584 +198004, 970, 1187, 422, 122, 1573 +198005, 963, 1181, 420, 119, 1567 +198006, 958, 1177, 416, 117, 1557 +198007, 995, 1266, 463, 136, 1649 +198008, 991, 1265, 463, 136, 1640 +198009, 988, 1259, 459, 135, 1626 +198010, 978, 1252, 456, 134, 1619 +198011, 973, 1250, 455, 134, 1610 +198012, 971, 1245, 452, 133, 1604 +198101, 964, 1241, 447, 133, 1593 +198102, 952, 1238, 442, 132, 1587 +198103, 944, 1231, 441, 131, 1579 +198104, 940, 1223, 438, 130, 1575 +198105, 933, 1217, 437, 128, 1570 +198106, 928, 1212, 437, 127, 1569 +198107, 989, 1388, 580, 158, 1738 +198108, 982, 1386, 578, 156, 1730 +198109, 975, 1380, 577, 156, 1728 +198110, 968, 1379, 575, 155, 1716 +198111, 961, 1373, 572, 154, 1707 +198112, 956, 1370, 569, 154, 1701 +198201, 948, 1363, 567, 152, 1695 +198202, 944, 1360, 563, 152, 1685 +198203, 941, 1354, 561, 152, 1676 +198204, 934, 1350, 559, 150, 1663 +198205, 928, 1343, 556, 149, 1646 +198206, 918, 1341, 555, 149, 1634 +198207, 965, 1445, 657, 179, 1715 +198208, 959, 1439, 654, 175, 1706 +198209, 955, 1433, 652, 174, 1696 +198210, 951, 1421, 649, 172, 1674 +198211, 944, 1413, 645, 171, 1658 +198212, 938, 1411, 643, 170, 1639 +198301, 936, 1400, 639, 168, 1624 +198302, 926, 1390, 637, 167, 1603 +198303, 922, 1380, 636, 166, 1598 +198304, 918, 1376, 634, 164, 1590 +198305, 916, 1369, 632, 162, 1583 +198306, 912, 1364, 627, 162, 1571 +198307, 1008, 1418, 757, 204, 1735 +198308, 1006, 1413, 754, 204, 1724 +198309, 1000, 1398, 752, 202, 1710 +198310, 994, 1386, 748, 202, 1700 +198311, 993, 1378, 745, 202, 1692 +198312, 989, 1375, 743, 202, 1683 +198401, 981, 1363, 740, 201, 1676 +198402, 973, 1352, 738, 198, 1662 +198403, 959, 1344, 736, 196, 1655 +198404, 956, 1336, 735, 196, 1647 +198405, 951, 1328, 732, 196, 1638 +198406, 943, 1325, 731, 193, 1627 +198407, 1114, 1442, 991, 290, 1871 +198408, 1104, 1433, 990, 288, 1856 +198409, 1099, 1428, 988, 287, 1843 +198410, 1082, 1411, 982, 285, 1823 +198411, 1074, 1403, 972, 282, 1803 +198412, 1066, 1384, 965, 281, 1790 +198501, 1059, 1373, 963, 277, 1782 +198502, 1049, 1363, 957, 273, 1757 +198503, 1043, 1350, 953, 272, 1749 +198504, 1036, 1339, 946, 269, 1742 +198505, 1025, 1336, 939, 268, 1730 +198506, 1007, 1326, 934, 268, 1714 +198507, 1091, 1402, 1039, 317, 1776 +198508, 1082, 1394, 1030, 314, 1760 +198509, 1073, 1382, 1020, 313, 1739 +198510, 1063, 1375, 1015, 312, 1725 +198511, 1052, 1364, 1005, 310, 1718 +198512, 1040, 1351, 998, 307, 1705 +198601, 1027, 1340, 990, 303, 1693 +198602, 1022, 1331, 982, 299, 1677 +198603, 1015, 1324, 979, 296, 1666 +198604, 1008, 1314, 966, 292, 1653 +198605, 996, 1300, 960, 291, 1638 +198606, 989, 1288, 954, 288, 1620 +198607, 1155, 1410, 1049, 364, 1797 +198608, 1141, 1392, 1040, 361, 1785 +198609, 1134, 1378, 1032, 358, 1768 +198610, 1125, 1367, 1023, 356, 1752 +198611, 1113, 1353, 1012, 351, 1742 +198612, 1102, 1340, 1003, 346, 1724 +198701, 1084, 1310, 988, 345, 1691 +198702, 1068, 1303, 977, 341, 1675 +198703, 1067, 1294, 976, 338, 1670 +198704, 1062, 1283, 971, 334, 1658 +198705, 1051, 1272, 970, 334, 1650 +198706, 1047, 1267, 964, 334, 1638 +198707, 1221, 1357, 1096, 412, 2072 +198708, 1207, 1353, 1089, 410, 2057 +198709, 1196, 1343, 1077, 408, 2045 +198710, 1189, 1331, 1070, 406, 2039 +198711, 1175, 1321, 1062, 405, 2021 +198712, 1161, 1311, 1058, 399, 2003 +198801, 1145, 1304, 1049, 397, 1987 +198802, 1139, 1297, 1042, 394, 1971 +198803, 1131, 1286, 1029, 392, 1950 +198804, 1120, 1280, 1024, 384, 1936 +198805, 1100, 1271, 1016, 384, 1925 +198806, 1076, 1258, 1004, 382, 1903 +198807, 1200, 1308, 1083, 405, 2091 +198808, 1189, 1297, 1077, 403, 2078 +198809, 1178, 1286, 1067, 397, 2053 +198810, 1168, 1276, 1057, 391, 2034 +198811, 1159, 1268, 1052, 390, 2020 +198812, 1147, 1250, 1043, 386, 2005 +198901, 1127, 1238, 1028, 383, 1977 +198902, 1116, 1230, 1018, 381, 1960 +198903, 1108, 1224, 1004, 380, 1945 +198904, 1099, 1218, 991, 378, 1930 +198905, 1092, 1213, 985, 375, 1915 +198906, 1078, 1204, 974, 372, 1905 +198907, 1140, 1262, 1011, 401, 1991 +198908, 1133, 1255, 1005, 399, 1981 +198909, 1122, 1245, 996, 397, 1956 +198910, 1108, 1236, 989, 396, 1935 +198911, 1100, 1228, 986, 392, 1923 +198912, 1087, 1223, 975, 391, 1901 +199001, 1078, 1215, 970, 388, 1884 +199002, 1071, 1204, 963, 385, 1865 +199003, 1061, 1200, 957, 383, 1852 +199004, 1055, 1192, 950, 381, 1837 +199005, 1045, 1188, 941, 377, 1823 +199006, 1039, 1180, 933, 375, 1806 +199007, 1103, 1230, 998, 430, 1946 +199008, 1095, 1223, 989, 427, 1927 +199009, 1086, 1214, 981, 426, 1906 +199010, 1079, 1210, 976, 424, 1892 +199011, 1067, 1203, 971, 420, 1877 +199012, 1061, 1195, 962, 416, 1862 +199101, 1060, 1185, 954, 416, 1850 +199102, 1054, 1180, 950, 414, 1828 +199103, 1048, 1175, 945, 411, 1819 +199104, 1043, 1168, 938, 409, 1808 +199105, 1034, 1156, 932, 407, 1788 +199106, 1025, 1149, 928, 405, 1773 +199107, 1070, 1214, 971, 460, 1876 +199108, 1060, 1202, 962, 459, 1854 +199109, 1054, 1198, 957, 456, 1842 +199110, 1050, 1198, 948, 455, 1831 +199111, 1045, 1196, 940, 452, 1819 +199112, 1036, 1193, 933, 452, 1806 +199201, 1031, 1188, 921, 450, 1785 +199202, 1026, 1182, 916, 448, 1769 +199203, 1017, 1178, 911, 443, 1760 +199204, 1011, 1174, 904, 442, 1739 +199205, 1006, 1165, 900, 439, 1721 +199206, 997, 1154, 891, 434, 1704 +199207, 1149, 1204, 977, 565, 1841 +199208, 1139, 1194, 969, 557, 1820 +199209, 1126, 1186, 958, 555, 1809 +199210, 1119, 1182, 955, 549, 1797 +199211, 1109, 1177, 949, 542, 1777 +199212, 1102, 1175, 944, 542, 1768 +199301, 1100, 1173, 939, 541, 1750 +199302, 1097, 1170, 931, 540, 1733 +199303, 1091, 1168, 928, 537, 1722 +199304, 1087, 1165, 921, 535, 1704 +199305, 1084, 1162, 920, 532, 1697 +199306, 1083, 1158, 917, 528, 1687 +199307, 1219, 1211, 1039, 599, 1909 +199308, 1211, 1205, 1035, 596, 1897 +199309, 1207, 1200, 1032, 590, 1886 +199310, 1204, 1197, 1027, 586, 1880 +199311, 1199, 1194, 1024, 584, 1868 +199312, 1194, 1185, 1022, 582, 1863 +199401, 1189, 1183, 1014, 580, 1843 +199402, 1183, 1180, 1012, 579, 1823 +199403, 1180, 1175, 1005, 574, 1811 +199404, 1178, 1172, 1002, 572, 1797 +199405, 1177, 1166, 995, 567, 1786 +199406, 1174, 1160, 989, 563, 1775 +199407, 1365, 1318, 1162, 631, 2086 +199408, 1361, 1311, 1160, 627, 2074 +199409, 1355, 1304, 1153, 626, 2056 +199410, 1346, 1301, 1143, 622, 2041 +199411, 1341, 1296, 1138, 614, 2029 +199412, 1334, 1291, 1132, 611, 2017 +199501, 1324, 1282, 1118, 606, 2003 +199502, 1317, 1278, 1111, 602, 1978 +199503, 1313, 1271, 1103, 598, 1963 +199504, 1304, 1263, 1094, 593, 1948 +199505, 1294, 1259, 1093, 590, 1930 +199506, 1280, 1250, 1087, 583, 1914 +199507, 1400, 1308, 1220, 629, 2116 +199508, 1391, 1306, 1210, 621, 2100 +199509, 1379, 1305, 1201, 613, 2088 +199510, 1372, 1300, 1194, 608, 2077 +199511, 1363, 1294, 1187, 604, 2057 +199512, 1355, 1287, 1181, 595, 2043 +199601, 1348, 1284, 1169, 592, 2025 +199602, 1337, 1276, 1163, 588, 1991 +199603, 1328, 1267, 1153, 585, 1978 +199604, 1328, 1257, 1150, 581, 1961 +199605, 1324, 1251, 1144, 578, 1939 +199606, 1316, 1241, 1136, 575, 1924 +199607, 1439, 1336, 1417, 705, 2216 +199608, 1428, 1333, 1409, 698, 2197 +199609, 1420, 1322, 1398, 693, 2183 +199610, 1406, 1317, 1393, 689, 2171 +199611, 1395, 1308, 1386, 688, 2151 +199612, 1387, 1303, 1379, 686, 2132 +199701, 1375, 1295, 1371, 679, 2113 +199702, 1365, 1287, 1360, 674, 2095 +199703, 1354, 1285, 1350, 671, 2083 +199704, 1344, 1276, 1338, 666, 2061 +199705, 1336, 1275, 1328, 662, 2042 +199706, 1330, 1270, 1318, 659, 2027 +199707, 1448, 1349, 1532, 738, 2282 +199708, 1432, 1339, 1521, 735, 2252 +199709, 1418, 1321, 1510, 730, 2228 +199710, 1401, 1315, 1501, 720, 2214 +199711, 1389, 1301, 1495, 713, 2193 +199712, 1381, 1293, 1482, 708, 2175 +199801, 1364, 1280, 1462, 702, 2143 +199802, 1347, 1267, 1448, 698, 2126 +199803, 1334, 1263, 1435, 694, 2100 +199804, 1317, 1254, 1420, 686, 2069 +199805, 1306, 1242, 1411, 683, 2040 +199806, 1295, 1236, 1398, 678, 2018 +199807, 1412, 1277, 1556, 733, 2247 +199808, 1394, 1267, 1540, 728, 2216 +199809, 1379, 1257, 1524, 720, 2191 +199810, 1361, 1245, 1497, 713, 2173 +199811, 1341, 1229, 1468, 696, 2141 +199812, 1331, 1224, 1453, 687, 2119 +199901, 1315, 1215, 1435, 678, 2087 +199902, 1299, 1201, 1418, 670, 2061 +199903, 1283, 1190, 1406, 665, 2039 +199904, 1262, 1174, 1381, 654, 2015 +199905, 1247, 1161, 1366, 649, 1997 +199906, 1235, 1152, 1347, 646, 1979 +199907, 1250, 1165, 1507, 641, 2111 +199908, 1239, 1154, 1489, 632, 2080 +199909, 1227, 1131, 1471, 626, 2061 +199910, 1213, 1121, 1453, 616, 2042 +199911, 1205, 1109, 1435, 612, 2014 +199912, 1198, 1090, 1415, 596, 1990 +200001, 1179, 1081, 1394, 590, 1968 +200002, 1170, 1075, 1377, 586, 1947 +200003, 1161, 1064, 1358, 580, 1928 +200004, 1154, 1054, 1345, 578, 1889 +200005, 1144, 1047, 1331, 575, 1867 +200006, 1134, 1040, 1316, 570, 1855 +200007, 1181, 1043, 1689, 606, 1960 +200008, 1167, 1035, 1671, 603, 1941 +200009, 1152, 1027, 1655, 596, 1927 +200010, 1140, 1009, 1638, 588, 1911 +200011, 1116, 998, 1617, 582, 1880 +200012, 1097, 984, 1603, 569, 1856 +200101, 1077, 974, 1583, 557, 1838 +200102, 1063, 957, 1565, 553, 1807 +200103, 1051, 953, 1546, 548, 1787 +200104, 1039, 945, 1523, 544, 1771 +200105, 1027, 934, 1492, 537, 1747 +200106, 1012, 920, 1464, 533, 1730 +200107, 1004, 961, 1584, 576, 1760 +200108, 992, 957, 1554, 572, 1743 +200109, 972, 947, 1509, 569, 1723 +200110, 962, 942, 1497, 566, 1705 +200111, 953, 936, 1483, 564, 1689 +200112, 940, 926, 1458, 560, 1664 +200201, 931, 922, 1442, 558, 1641 +200202, 920, 914, 1427, 556, 1620 +200203, 913, 908, 1412, 554, 1599 +200204, 903, 902, 1398, 553, 1584 +200205, 897, 893, 1379, 547, 1569 +200206, 893, 890, 1359, 546, 1559 +200207, 905, 880, 1387, 579, 1604 +200208, 901, 870, 1375, 571, 1598 +200209, 895, 864, 1349, 568, 1579 +200210, 892, 860, 1333, 566, 1569 +200211, 884, 851, 1315, 561, 1555 +200212, 881, 843, 1301, 560, 1545 +200301, 876, 838, 1284, 557, 1536 +200302, 870, 830, 1268, 553, 1519 +200303, 867, 825, 1254, 548, 1511 +200304, 861, 818, 1230, 537, 1495 +200305, 850, 815, 1220, 532, 1490 +200306, 843, 812, 1209, 531, 1478 +200307, 838, 830, 1195, 531, 1527 +200308, 835, 824, 1184, 521, 1511 +200309, 830, 820, 1169, 517, 1497 +200310, 824, 817, 1158, 516, 1491 +200311, 819, 813, 1146, 515, 1482 +200312, 815, 811, 1134, 514, 1466 +200401, 808, 805, 1123, 513, 1455 +200402, 801, 802, 1116, 510, 1442 +200403, 798, 800, 1108, 507, 1433 +200404, 796, 796, 1095, 507, 1427 +200405, 787, 794, 1089, 505, 1413 +200406, 781, 790, 1086, 503, 1398 +200407, 806, 800, 1133, 555, 1489 +200408, 801, 793, 1126, 553, 1468 +200409, 793, 792, 1117, 549, 1460 +200410, 790, 788, 1116, 549, 1448 +200411, 784, 785, 1109, 549, 1436 +200412, 778, 782, 1099, 548, 1426 +200501, 773, 777, 1092, 545, 1414 +200502, 773, 771, 1088, 542, 1394 +200503, 769, 769, 1082, 541, 1381 +200504, 762, 761, 1080, 540, 1373 +200505, 756, 758, 1073, 538, 1366 +200506, 751, 754, 1064, 535, 1355 +200507, 790, 789, 1125, 578, 1447 +200508, 785, 786, 1115, 571, 1441 +200509, 776, 781, 1101, 568, 1432 +200510, 772, 778, 1092, 564, 1425 +200511, 761, 772, 1083, 560, 1420 +200512, 756, 771, 1072, 554, 1414 +200601, 748, 765, 1066, 553, 1401 +200602, 743, 763, 1054, 547, 1390 +200603, 736, 760, 1039, 544, 1385 +200604, 730, 760, 1032, 541, 1376 +200605, 727, 755, 1026, 535, 1369 +200606, 722, 752, 1017, 531, 1361 +200607, 766, 798, 1057, 576, 1451 +200608, 761, 793, 1048, 573, 1440 +200609, 758, 790, 1040, 572, 1433 +200610, 752, 788, 1036, 572, 1427 +200611, 748, 783, 1025, 571, 1418 +200612, 739, 781, 1020, 564, 1407 +200701, 737, 774, 1009, 559, 1397 +200702, 731, 769, 997, 553, 1384 +200703, 728, 765, 990, 551, 1371 +200704, 723, 761, 981, 549, 1365 +200705, 720, 751, 971, 543, 1352 +200706, 714, 747, 961, 542, 1344 +200707, 731, 787, 1021, 585, 1422 +200708, 728, 784, 1011, 579, 1412 +200709, 720, 781, 1001, 573, 1397 +200710, 715, 779, 989, 570, 1389 +200711, 707, 776, 980, 565, 1373 +200712, 697, 771, 970, 557, 1363 +200801, 691, 766, 961, 554, 1350 +200802, 690, 764, 953, 549, 1344 +200803, 688, 758, 949, 545, 1332 +200804, 686, 753, 942, 540, 1320 +200805, 680, 748, 936, 533, 1310 +200806, 673, 744, 926, 524, 1303 +200807, 699, 786, 966, 561, 1371 +200808, 693, 785, 959, 555, 1356 +200809, 688, 780, 947, 550, 1351 +200810, 683, 777, 935, 546, 1343 +200811, 675, 775, 926, 539, 1331 +200812, 668, 769, 917, 537, 1321 +200901, 666, 765, 910, 531, 1311 +200902, 661, 762, 908, 524, 1300 +200903, 656, 759, 901, 520, 1292 +200904, 654, 753, 895, 512, 1283 +200905, 650, 746, 886, 509, 1273 +200906, 648, 741, 881, 500, 1267 +200907, 643, 747, 881, 506, 1289 +200908, 642, 744, 870, 502, 1284 +200909, 638, 741, 865, 498, 1277 +200910, 635, 740, 864, 493, 1268 +200911, 633, 737, 856, 488, 1260 +200912, 628, 731, 847, 483, 1251 +201001, 625, 728, 841, 477, 1242 +201002, 622, 728, 834, 473, 1231 +201003, 620, 725, 826, 470, 1224 +201004, 614, 720, 823, 465, 1218 +201005, 614, 717, 812, 465, 1208 +201006, 612, 714, 809, 463, 1200 +201007, 627, 753, 848, 494, 1239 +201008, 624, 748, 838, 485, 1222 +201009, 621, 746, 832, 482, 1220 +201010, 619, 739, 827, 479, 1217 +201011, 617, 738, 815, 474, 1209 +201012, 614, 733, 808, 469, 1201 +201101, 612, 724, 798, 462, 1189 +201102, 610, 720, 793, 459, 1181 +201103, 608, 715, 786, 455, 1179 +201104, 601, 711, 781, 452, 1173 +201105, 597, 709, 771, 448, 1161 +201106, 593, 706, 765, 446, 1154 +201107, 606, 725, 803, 465, 1182 +201108, 603, 722, 796, 465, 1172 +201109, 602, 720, 791, 463, 1170 +201110, 601, 717, 786, 462, 1165 +201111, 599, 715, 780, 458, 1158 +201112, 597, 709, 774, 455, 1148 +201201, 593, 702, 769, 453, 1140 +201202, 591, 698, 761, 451, 1133 +201203, 589, 696, 754, 446, 1128 +201204, 588, 692, 750, 443, 1123 +201205, 585, 690, 747, 439, 1117 +201206, 583, 686, 744, 438, 1108 +201207, 598, 700, 791, 451, 1128 +201208, 594, 693, 782, 451, 1124 +201209, 591, 689, 775, 442, 1122 +201210, 587, 687, 773, 438, 1112 +201211, 581, 680, 768, 437, 1104 +201212, 580, 677, 761, 436, 1100 +201301, 579, 670, 753, 430, 1093 +201302, 574, 669, 751, 428, 1091 +201303, 568, 668, 748, 427, 1085 +201304, 567, 667, 743, 423, 1079 +201305, 563, 663, 735, 422, 1072 +201306, 559, 659, 731, 421, 1065 +201307, 577, 671, 766, 459, 1093 +201308, 575, 667, 759, 458, 1086 +201309, 572, 664, 753, 456, 1084 +201310, 570, 662, 747, 455, 1081 +201311, 566, 658, 743, 449, 1079 +201312, 556, 656, 736, 448, 1074 +201401, 555, 654, 730, 447, 1070 +201402, 553, 654, 727, 442, 1066 +201403, 549, 648, 723, 439, 1060 +201404, 548, 644, 722, 438, 1058 +201405, 546, 642, 719, 437, 1053 +201406, 543, 639, 718, 435, 1048 +201407, 568, 670, 776, 537, 1096 +201408, 563, 669, 771, 535, 1092 +201409, 559, 668, 770, 533, 1088 +201410, 558, 667, 767, 530, 1084 +201411, 554, 665, 761, 529, 1080 +201412, 552, 663, 758, 524, 1074 +201501, 549, 661, 750, 523, 1070 +201502, 546, 659, 745, 520, 1066 +201503, 542, 656, 740, 517, 1061 +201504, 539, 653, 736, 515, 1055 +201505, 538, 653, 731, 513, 1048 +201506, 537, 652, 726, 509, 1047 +201507, 555, 665, 760, 595, 1106 +201508, 551, 658, 752, 594, 1100 +201509, 546, 656, 749, 592, 1097 +201510, 545, 649, 746, 590, 1093 +201511, 542, 645, 737, 585, 1084 +201512, 542, 643, 733, 582, 1079 +201601, 540, 640, 727, 580, 1068 +201602, 537, 629, 722, 578, 1060 +201603, 535, 626, 716, 574, 1052 +201604, 534, 621, 713, 574, 1044 +201605, 528, 615, 710, 573, 1041 +201606, 524, 612, 700, 568, 1031 +201607, 541, 618, 715, 624, 1071 +201608, 538, 615, 710, 616, 1063 +201609, 536, 614, 704, 612, 1053 +201610, 534, 608, 696, 607, 1044 +201611, 530, 606, 690, 602, 1041 +201612, 529, 604, 679, 598, 1038 +201701, 527, 602, 672, 594, 1031 +201702, 526, 599, 667, 593, 1029 +201703, 524, 597, 662, 589, 1024 +201704, 520, 595, 659, 585, 1018 +201705, 518, 590, 654, 582, 1011 +201706, 516, 587, 651, 581, 1002 +201707, 529, 618, 687, 617, 1049 +201708, 526, 617, 681, 617, 1047 +201709, 525, 615, 673, 614, 1045 +201710, 520, 610, 666, 613, 1040 +201711, 520, 608, 658, 607, 1029 +201712, 517, 605, 650, 603, 1019 +201801, 512, 603, 647, 598, 1009 +201802, 508, 603, 645, 598, 1004 +201803, 507, 600, 640, 592, 999 +201804, 503, 598, 637, 589, 996 +201805, 499, 594, 634, 585, 988 +201806, 498, 592, 631, 584, 986 +201807, 512, 609, 673, 647, 1045 +201808, 511, 607, 668, 643, 1039 +201809, 511, 606, 664, 641, 1036 +201810, 510, 605, 662, 639, 1033 +201811, 505, 601, 657, 636, 1026 +201812, 502, 595, 655, 633, 1020 +201901, 499, 594, 648, 630, 1016 +201902, 497, 592, 644, 628, 1009 +201903, 497, 588, 636, 626, 1006 +201904, 494, 586, 629, 623, 1001 +201905, 491, 583, 626, 619, 996 +201906, 489, 581, 625, 619, 992 +201907, 502, 601, 655, 697, 1031 +201908, 502, 598, 649, 695, 1023 +201909, 500, 594, 644, 694, 1018 +201910, 496, 592, 639, 693, 1012 +201911, 493, 588, 634, 691, 1008 +201912, 490, 585, 633, 686, 1001 +202001, 488, 580, 628, 682, 992 +202002, 487, 576, 628, 676, 986 +202003, 483, 573, 627, 673, 983 +202004, 481, 571, 622, 673, 982 +202005, 481, 567, 616, 669, 979 +202006, 476, 564, 614, 668, 975 +202007, 487, 566, 633, 729, 1016 +202008, 486, 561, 632, 725, 1008 +202009, 483, 559, 627, 725, 1002 +202010, 482, 556, 624, 724, 1000 +202011, 482, 552, 622, 718, 994 +202012, 482, 550, 621, 715, 993 +202101, 479, 550, 618, 711, 992 +202102, 476, 547, 616, 708, 991 +202103, 476, 546, 613, 705, 988 +202104, 476, 544, 611, 702, 986 +202105, 475, 543, 602, 699, 983 +202106, 474, 543, 598, 699, 978 +202107, 514, 572, 707, 846, 1209 +202108, 512, 571, 703, 843, 1204 +202109, 512, 569, 700, 841, 1199 +202110, 510, 568, 696, 840, 1194 +202111, 509, 566, 691, 838, 1183 +202112, 509, 561, 688, 834, 1172 +202201, 507, 559, 685, 831, 1166 +202202, 506, 559, 685, 830, 1156 +202203, 506, 557, 682, 830, 1148 +202204, 504, 553, 678, 826, 1145 +202205, 502, 551, 675, 825, 1138 +202206, 500, 549, 672, 821, 1131 +202207, 550, 584, 779, 939, 1319 +202208, 548, 578, 774, 936, 1315 +202209, 547, 577, 771, 930, 1309 +202210, 546, 576, 763, 928, 1306 +202211, 545, 574, 755, 924, 1291 +202212, 544, 572, 752, 921, 1283 +202301, 541, 570, 747, 915, 1221 +202302, 539, 569, 743, 904, 1213 +202303, 537, 568, 736, 896, 1205 +202304, 536, 564, 732, 887, 1185 +202305, 536, 563, 731, 880, 1175 +202306, 533, 562, 726, 875, 1163 +202307, 541, 582, 745, 911, 1201 +202308, 537, 580, 745, 902, 1185 +202309, 533, 577, 739, 895, 1170 +202310, 532, 575, 735, 887, 1164 +202311, 529, 572, 730, 876, 1154 +202312, 525, 566, 722, 865, 1148 +202401, 524, 565, 714, 856, 1126 +202402, 523, 564, 710, 851, 1121 +202403, 519, 564, 705, 841, 1116 +202404, 513, 562, 701, 834, 1105 +202405, 513, 559, 699, 825, 1092 +202406, 510, 556, 698, 814, 1081 +202407, 528, 561, 709, 832, 1111 +202408, 526, 554, 705, 828, 1098 +202409, 522, 550, 698, 822, 1086 +202410, 517, 548, 694, 815, 1075 +202411, 512, 545, 689, 809, 1065 +202412, 511, 541, 685, 806, 1052 +202501, 506, 538, 683, 796, 1042 +202502, 505, 532, 679, 795, 1033 +202503, 500, 532, 671, 785, 1025 +202504, 500, 531, 667, 780, 1014 +202505, 495, 526, 661, 771, 1000 +202506, 493, 526, 659, 762, 994 +202507, 489, 537, 669, 759, 983 +202508, 487, 531, 664, 748, 975 +202509, 484, 529, 662, 739, 971 +202510, 478, 529, 660, 735, 965 +202511, 475, 527, 656, 727, 956 +202512, 473, 527, 652, 726, 948 + + + Average Firm Size +,Cnsmr,Manuf,HiTec,Hlth,Other +192607, 39.34, 54.81, 146.41, 25.52, 51.64 +192608, 40.13, 56.21, 149.03, 25.82, 52.59 +192609, 41.09, 57.30, 152.42, 26.73, 54.67 +192610, 41.82, 56.67, 152.25, 26.87, 54.60 +192611, 40.06, 55.11, 148.76, 26.54, 52.86 +192612, 41.16, 56.32, 153.15, 27.81, 53.74 +192701, 42.67, 57.40, 152.96, 27.62, 55.52 +192702, 42.06, 57.64, 153.61, 28.85, 56.17 +192703, 44.54, 59.71, 159.88, 29.16, 59.33 +192704, 45.05, 58.52, 163.90, 29.40, 59.71 +192705, 46.54, 57.71, 171.48, 30.03, 60.68 +192706, 49.12, 60.79, 180.25, 31.08, 64.37 +192707, 45.58, 62.30, 161.80, 33.05, 59.42 +192708, 49.44, 66.85, 174.22, 36.12, 62.80 +192709, 51.73, 68.21, 178.96, 36.02, 61.95 +192710, 54.58, 71.17, 184.25, 37.99, 64.44 +192711, 52.88, 67.50, 175.83, 39.62, 61.86 +192712, 55.53, 72.64, 194.69, 40.86, 64.40 +192801, 57.26, 74.14, 195.33, 40.55, 65.21 +192802, 56.78, 74.12, 195.23, 41.46, 64.42 +192803, 55.69, 72.98, 194.25, 39.95, 62.90 +192804, 63.18, 78.46, 212.50, 37.75, 66.36 +192805, 63.34, 84.36, 218.12, 38.56, 68.73 +192806, 65.17, 84.16, 235.37, 41.86, 68.53 +192807, 60.76, 79.40, 228.46, 62.28, 68.91 +192808, 62.16, 80.05, 227.76, 63.68, 68.49 +192809, 67.81, 84.71, 243.97, 72.50, 71.44 +192810, 70.11, 88.24, 242.90, 71.51, 71.98 +192811, 71.20, 90.19, 248.61, 71.46, 71.19 +192812, 76.27, 104.15, 281.81, 74.61, 77.98 +192901, 75.52, 104.37, 290.03, 74.09, 77.79 +192902, 76.51, 110.50, 323.97, 89.60, 82.53 +192903, 74.90, 111.68, 319.93, 89.08, 82.32 +192904, 71.98, 112.42, 342.76, 87.52, 79.93 +192905, 72.74, 114.96, 351.27, 88.71, 80.42 +192906, 65.57, 108.10, 338.09, 81.19, 79.74 +192907, 66.86, 121.73, 368.59, 85.62, 88.62 +192908, 67.17, 126.93, 409.03, 87.74, 94.24 +192909, 70.20, 139.09, 457.52, 89.94, 101.14 +192910, 65.98, 133.13, 432.22, 83.30, 93.66 +192911, 52.43, 104.94, 333.97, 68.18, 82.73 +192912, 45.67, 91.35, 284.16, 62.27, 74.67 +193001, 45.65, 92.29, 299.15, 62.54, 73.77 +193002, 48.06, 98.14, 318.07, 64.01, 77.81 +193003, 48.30, 99.92, 341.49, 65.05, 80.08 +193004, 49.94, 110.24, 367.85, 75.54, 83.56 +193005, 49.15, 109.42, 364.77, 74.54, 78.34 +193006, 49.81, 106.86, 355.78, 72.83, 77.20 +193007, 43.71, 89.28, 256.01, 61.53, 65.61 +193008, 45.97, 93.51, 263.12, 65.50, 67.62 +193009, 46.37, 92.66, 268.05, 69.96, 66.97 +193010, 40.83, 79.79, 232.99, 63.53, 59.56 +193011, 37.66, 72.86, 207.66, 60.71, 54.39 +193012, 38.31, 69.62, 200.67, 58.24, 51.35 +193101, 35.86, 63.84, 184.38, 53.87, 46.29 +193102, 38.08, 66.81, 193.51, 58.26, 52.02 +193103, 42.10, 75.14, 219.45, 62.22, 54.65 +193104, 40.96, 69.51, 204.99, 63.00, 49.30 +193105, 38.42, 61.06, 193.76, 60.68, 44.25 +193106, 34.23, 52.36, 172.87, 53.86, 36.32 +193107, 37.77, 59.44, 210.27, 60.13, 45.88 +193108, 36.75, 54.98, 194.94, 61.66, 41.11 +193109, 36.31, 55.71, 198.83, 61.28, 39.62 +193110, 25.71, 36.96, 143.67, 42.90, 28.76 +193111, 28.33, 40.44, 153.37, 47.70, 29.32 +193112, 26.45, 37.32, 138.34, 45.75, 23.57 +193201, 23.56, 31.55, 122.22, 43.55, 19.24 +193202, 23.37, 30.79, 113.93, 42.36, 20.57 +193203, 24.15, 32.65, 125.03, 42.61, 20.40 +193204, 21.66, 29.36, 108.83, 39.56, 16.98 +193205, 18.17, 23.98, 91.14, 31.70, 13.18 +193206, 13.94, 19.44, 76.47, 20.28, 9.08 +193207, 13.76, 19.49, 74.78, 22.20, 8.93 +193208, 17.62, 26.34, 93.02, 28.65, 13.92 +193209, 22.80, 35.93, 124.47, 38.31, 22.05 +193210, 23.15, 34.07, 120.42, 33.56, 21.01 +193211, 19.73, 30.16, 106.13, 27.30, 17.23 +193212, 18.51, 27.97, 102.53, 28.19, 15.98 +193301, 19.26, 29.18, 105.05, 29.95, 16.75 +193302, 19.65, 28.84, 105.55, 29.57, 18.17 +193303, 16.81, 23.75, 93.34, 25.90, 15.10 +193304, 18.39, 24.08, 89.56, 24.92, 15.55 +193305, 26.37, 34.65, 112.03, 34.15, 20.35 +193306, 30.59, 42.46, 132.50, 41.71, 26.73 +193307, 35.25, 47.64, 135.93, 46.26, 29.25 +193308, 32.33, 41.84, 126.48, 37.39, 27.30 +193309, 36.07, 47.37, 136.09, 36.78, 31.26 +193310, 32.36, 42.58, 122.48, 36.16, 26.22 +193311, 29.37, 39.49, 112.57, 33.96, 23.21 +193312, 32.71, 43.50, 121.75, 36.66, 24.36 +193401, 33.22, 44.45, 121.69, 32.24, 25.40 +193402, 37.30, 49.60, 133.58, 37.63, 30.32 +193403, 36.25, 47.96, 131.92, 35.78, 29.65 +193404, 36.44, 47.21, 132.80, 35.44, 29.82 +193405, 36.05, 45.88, 130.58, 38.25, 29.31 +193406, 33.42, 42.23, 123.79, 37.62, 27.09 +193407, 33.31, 43.39, 121.17, 25.40, 27.44 +193408, 30.72, 38.14, 112.36, 24.91, 22.52 +193409, 32.27, 40.32, 116.89, 23.85, 23.47 +193410, 32.42, 39.79, 116.31, 23.97, 23.28 +193411, 32.42, 38.74, 114.86, 23.78, 23.34 +193412, 35.81, 42.19, 119.39, 24.46, 24.55 +193501, 35.66, 42.25, 118.97, 24.53, 24.46 +193502, 34.07, 40.86, 119.09, 24.31, 22.92 +193503, 34.14, 39.26, 119.89, 25.05, 21.71 +193504, 32.27, 38.09, 115.55, 24.18, 20.41 +193505, 34.35, 42.26, 126.64, 24.94, 22.06 +193506, 34.83, 43.68, 138.29, 23.64, 23.02 +193507, 36.52, 46.75, 145.65, 24.53, 24.52 +193508, 39.95, 49.91, 156.32, 25.30, 26.82 +193509, 40.66, 51.05, 162.08, 25.08, 27.44 +193510, 42.26, 51.57, 169.53, 25.30, 27.20 +193511, 44.99, 56.11, 178.88, 26.79, 27.85 +193512, 45.67, 57.83, 193.08, 26.97, 31.41 +193601, 46.68, 61.83, 193.88, 27.54, 32.19 +193602, 47.65, 67.33, 203.99, 28.48, 36.00 +193603, 48.25, 69.09, 211.49, 29.17, 37.34 +193604, 49.93, 70.32, 205.48, 30.14, 36.10 +193605, 46.49, 63.93, 191.04, 27.35, 33.39 +193606, 48.63, 66.58, 202.31, 28.37, 35.77 +193607, 48.96, 65.28, 198.73, 46.06, 35.41 +193608, 51.02, 70.00, 211.91, 48.19, 38.71 +193609, 50.98, 70.43, 216.19, 47.21, 39.62 +193610, 51.52, 71.12, 213.39, 47.34, 39.82 +193611, 54.35, 77.30, 224.65, 49.11, 43.28 +193612, 55.67, 79.16, 234.17, 50.45, 43.19 +193701, 52.38, 81.50, 234.60, 48.53, 42.36 +193702, 54.29, 84.45, 243.18, 50.21, 43.39 +193703, 54.52, 85.48, 237.70, 48.07, 45.38 +193704, 52.99, 86.50, 227.74, 45.40, 46.45 +193705, 49.14, 80.04, 213.06, 43.88, 43.25 +193706, 47.87, 79.60, 215.43, 44.19, 41.69 +193707, 43.73, 74.46, 195.10, 38.25, 37.47 +193708, 47.55, 81.69, 207.38, 39.82, 40.63 +193709, 45.81, 76.79, 199.64, 40.03, 37.14 +193710, 40.36, 64.68, 181.62, 35.49, 31.06 +193711, 36.70, 57.43, 170.22, 34.21, 27.93 +193712, 32.19, 51.73, 162.22, 32.13, 25.64 +193801, 29.54, 49.79, 157.42, 31.08, 23.50 +193802, 31.15, 49.11, 153.53, 32.15, 23.39 +193803, 32.47, 53.02, 154.49, 34.70, 24.46 +193804, 25.69, 39.60, 118.30, 29.46, 17.12 +193805, 29.16, 45.36, 135.92, 32.93, 19.72 +193806, 28.30, 42.72, 134.68, 33.27, 19.01 +193807, 34.00, 52.66, 155.01, 36.38, 23.25 +193808, 37.62, 56.19, 160.24, 38.47, 25.53 +193809, 37.49, 53.62, 159.50, 38.39, 24.66 +193810, 37.56, 54.25, 159.78, 38.16, 24.23 +193811, 39.86, 58.83, 170.42, 40.29, 27.04 +193812, 38.91, 57.47, 167.17, 40.65, 26.43 +193901, 39.66, 60.51, 170.15, 40.78, 28.31 +193902, 37.90, 55.93, 165.55, 40.37, 25.91 +193903, 39.42, 57.23, 170.73, 41.66, 27.06 +193904, 35.21, 49.35, 155.27, 39.63, 22.84 +193905, 35.28, 48.93, 155.91, 40.01, 22.58 +193906, 37.68, 51.72, 166.25, 41.98, 24.32 +193907, 36.00, 48.18, 152.23, 42.48, 22.37 +193908, 39.69, 53.11, 166.84, 43.77, 25.12 +193909, 36.52, 49.70, 156.91, 41.17, 22.47 +193910, 39.92, 61.40, 165.40, 43.86, 28.51 +193911, 40.76, 59.87, 167.57, 43.93, 28.24 +193912, 39.59, 56.08, 165.91, 44.25, 26.75 +194001, 40.68, 57.43, 169.52, 45.59, 26.72 +194002, 40.32, 55.51, 166.38, 44.75, 25.94 +194003, 40.68, 56.22, 166.87, 45.20, 26.03 +194004, 41.64, 57.00, 168.33, 45.83, 26.58 +194005, 41.79, 57.08, 167.58, 45.67, 26.37 +194006, 31.97, 43.90, 139.20, 35.71, 19.85 +194007, 33.54, 45.44, 146.34, 35.44, 21.33 +194008, 34.41, 46.92, 150.88, 35.24, 22.00 +194009, 35.46, 47.41, 152.29, 34.22, 22.69 +194010, 36.37, 48.17, 154.60, 35.58, 23.43 +194011, 36.54, 50.23, 157.55, 34.50, 24.83 +194012, 35.67, 48.49, 155.49, 33.64, 24.48 +194101, 35.45, 49.04, 155.65, 33.33, 24.23 +194102, 33.92, 46.28, 152.07, 32.58, 23.79 +194103, 33.15, 45.38, 149.67, 30.82, 23.60 +194104, 33.02, 45.63, 149.72, 31.15, 23.86 +194105, 30.81, 43.49, 138.74, 29.51, 22.78 +194106, 30.49, 44.49, 138.71, 28.83, 22.46 +194107, 31.86, 46.01, 139.49, 30.51, 22.44 +194108, 33.34, 49.54, 140.07, 32.94, 24.05 +194109, 33.54, 48.43, 141.25, 32.75, 24.16 +194110, 34.12, 47.14, 139.33, 32.62, 23.35 +194111, 31.70, 44.87, 131.40, 31.24, 22.35 +194112, 30.10, 43.97, 126.58, 30.11, 21.80 +194201, 27.55, 42.12, 118.14, 29.52, 20.32 +194202, 28.33, 41.40, 117.61, 28.47, 22.22 +194203, 27.64, 39.73, 115.91, 26.71, 21.71 +194204, 26.21, 36.84, 106.48, 25.17, 19.58 +194205, 24.92, 35.20, 100.25, 23.91, 19.12 +194206, 27.16, 36.13, 107.93, 27.22, 19.67 +194207, 27.82, 36.91, 106.13, 27.47, 19.43 +194208, 28.56, 38.15, 109.61, 27.78, 20.72 +194209, 28.76, 38.59, 111.36, 28.28, 21.11 +194210, 28.92, 39.68, 113.50, 28.30, 21.65 +194211, 30.11, 42.84, 120.54, 29.35, 23.53 +194212, 30.59, 41.76, 123.23, 29.64, 22.65 +194301, 32.05, 44.04, 124.98, 32.13, 23.28 +194302, 34.33, 47.26, 133.08, 32.80, 24.87 +194303, 35.75, 50.07, 141.30, 33.84, 27.12 +194304, 37.60, 53.11, 144.94, 35.29, 29.67 +194305, 37.91, 53.02, 148.89, 35.66, 30.29 +194306, 40.26, 55.53, 156.68, 37.64, 31.84 +194307, 41.34, 55.95, 155.72, 37.63, 31.72 +194308, 39.40, 53.06, 150.14, 36.26, 29.99 +194309, 40.13, 53.25, 151.06, 36.19, 30.31 +194310, 40.80, 54.32, 154.20, 37.57, 30.79 +194311, 40.30, 53.82, 152.34, 37.63, 30.36 +194312, 38.02, 49.81, 146.21, 35.97, 27.57 +194401, 40.47, 52.72, 152.20, 37.25, 29.62 +194402, 41.30, 53.47, 153.35, 37.24, 31.31 +194403, 41.51, 53.17, 152.42, 36.08, 32.10 +194404, 42.76, 54.10, 154.01, 36.88, 32.76 +194405, 42.05, 53.09, 152.18, 35.89, 31.94 +194406, 44.35, 55.54, 156.79, 37.75, 33.12 +194407, 47.12, 57.19, 153.63, 38.71, 34.15 +194408, 46.16, 56.24, 152.00, 38.11, 34.06 +194409, 47.34, 56.48, 153.75, 38.21, 34.08 +194410, 47.64, 56.19, 152.04, 38.31, 33.95 +194411, 47.34, 56.38, 153.02, 38.58, 34.37 +194412, 47.87, 56.47, 154.42, 38.52, 35.40 +194501, 49.00, 58.67, 156.73, 39.11, 38.37 +194502, 49.87, 60.78, 155.47, 39.22, 38.53 +194503, 52.24, 64.74, 162.53, 40.70, 41.44 +194504, 50.18, 61.46, 157.87, 39.88, 39.80 +194505, 54.34, 66.37, 166.05, 42.29, 43.29 +194506, 55.54, 66.21, 170.49, 43.84, 44.25 +194507, 53.70, 66.01, 161.93, 42.93, 45.43 +194508, 53.11, 64.10, 162.43, 42.12, 42.86 +194509, 57.29, 67.60, 171.16, 45.80, 43.79 +194510, 59.98, 70.85, 174.18, 48.71, 46.32 +194511, 62.77, 73.46, 176.92, 51.79, 48.92 +194512, 64.99, 77.26, 182.08, 55.94, 53.26 +194601, 66.29, 77.61, 183.19, 56.72, 53.10 +194602, 70.86, 82.40, 190.75, 61.01, 56.93 +194603, 66.50, 77.40, 180.86, 58.17, 53.30 +194604, 70.45, 82.79, 182.39, 66.55, 55.47 +194605, 73.15, 87.30, 185.35, 76.20, 56.38 +194606, 75.37, 90.68, 192.54, 80.19, 58.05 +194607, 69.66, 85.87, 184.56, 72.08, 57.00 +194608, 67.83, 83.67, 181.47, 69.62, 54.05 +194609, 63.30, 78.07, 168.70, 67.05, 50.21 +194610, 56.35, 70.42, 155.20, 62.20, 42.77 +194611, 55.62, 69.49, 148.89, 61.90, 42.52 +194612, 53.88, 69.79, 151.45, 62.43, 42.26 +194701, 55.93, 73.55, 156.14, 66.71, 43.06 +194702, 57.51, 73.69, 160.87, 63.03, 43.18 +194703, 57.25, 72.34, 157.37, 60.13, 42.43 +194704, 55.44, 71.68, 152.40, 59.19, 40.77 +194705, 51.81, 68.68, 148.40, 53.97, 38.18 +194706, 50.97, 67.59, 147.14, 52.25, 37.12 +194707, 52.73, 70.01, 139.59, 55.77, 36.87 +194708, 55.13, 72.46, 144.35, 57.07, 39.02 +194709, 53.75, 71.15, 141.46, 54.62, 37.60 +194710, 53.25, 70.46, 141.38, 53.03, 37.03 +194711, 54.32, 72.89, 142.66, 53.54, 37.32 +194712, 52.60, 70.78, 140.07, 51.58, 36.20 +194801, 52.81, 73.46, 140.39, 52.96, 37.26 +194802, 50.76, 69.90, 136.59, 48.42, 36.85 +194803, 47.69, 66.39, 133.06, 46.45, 35.16 +194804, 50.43, 72.33, 139.92, 49.22, 38.37 +194805, 51.16, 75.86, 141.32, 50.15, 39.59 +194806, 55.25, 80.72, 151.73, 53.29, 41.97 +194807, 53.88, 80.12, 147.65, 51.99, 40.54 +194808, 51.51, 75.36, 141.56, 49.66, 38.46 +194809, 51.61, 74.49, 143.50, 48.46, 38.86 +194810, 49.77, 72.02, 139.51, 46.79, 37.28 +194811, 52.14, 76.98, 144.87, 49.09, 38.99 +194812, 47.32, 67.92, 136.42, 45.80, 34.54 +194901, 47.80, 70.20, 139.42, 47.62, 35.10 +194902, 48.90, 69.71, 137.57, 49.67, 35.12 +194903, 47.16, 66.97, 134.36, 48.94, 33.79 +194904, 48.62, 69.59, 137.46, 51.44, 35.09 +194905, 47.33, 68.41, 133.75, 51.44, 34.51 +194906, 46.44, 65.39, 128.63, 50.20, 33.33 +194907, 46.65, 63.89, 127.15, 51.13, 32.36 +194908, 49.44, 67.46, 132.83, 54.08, 33.67 +194909, 50.35, 69.19, 133.70, 53.90, 33.94 +194910, 51.45, 71.48, 135.35, 54.70, 34.88 +194911, 52.82, 73.99, 139.06, 56.42, 35.64 +194912, 53.23, 73.87, 142.55, 58.60, 35.76 +195001, 56.06, 76.60, 147.92, 63.48, 38.02 +195002, 56.88, 77.59, 152.64, 64.03, 39.16 +195003, 57.43, 78.01, 155.54, 64.95, 39.35 +195004, 57.36, 79.18, 159.25, 62.86, 38.80 +195005, 59.07, 83.13, 164.86, 66.21, 39.33 +195006, 60.81, 87.55, 169.52, 68.50, 39.57 +195007, 57.47, 82.21, 167.23, 65.89, 35.66 +195008, 56.77, 84.58, 163.71, 61.16, 38.25 +195009, 59.14, 88.14, 169.63, 63.09, 38.88 +195010, 63.04, 91.55, 171.51, 66.76, 41.19 +195011, 63.08, 91.60, 170.38, 65.39, 40.45 +195012, 61.40, 94.16, 171.35, 67.78, 41.54 +195101, 62.65, 100.30, 173.97, 71.58, 45.71 +195102, 66.17, 106.11, 182.31, 73.65, 48.77 +195103, 66.95, 106.90, 186.50, 74.45, 48.33 +195104, 65.87, 103.78, 182.59, 75.49, 46.55 +195105, 66.83, 111.00, 186.14, 83.19, 48.22 +195106, 64.35, 107.74, 182.78, 84.23, 46.32 +195107, 61.04, 106.10, 183.04, 83.48, 43.30 +195108, 62.79, 115.42, 190.71, 95.33, 46.42 +195109, 65.00, 119.87, 198.40, 93.68, 47.73 +195110, 65.86, 119.84, 199.70, 91.77, 49.24 +195111, 63.92, 117.57, 192.01, 86.12, 47.37 +195112, 63.82, 116.87, 194.30, 85.22, 47.91 +195201, 64.52, 121.59, 197.88, 89.28, 48.53 +195202, 64.18, 124.49, 196.36, 89.71, 50.07 +195203, 62.46, 119.80, 192.09, 85.58, 49.04 +195204, 64.90, 125.34, 196.21, 85.69, 51.84 +195205, 63.16, 117.24, 191.97, 80.83, 50.01 +195206, 64.39, 120.77, 195.44, 82.53, 51.10 +195207, 66.79, 127.11, 206.06, 82.53, 53.31 +195208, 67.81, 128.19, 207.60, 78.77, 53.84 +195209, 67.85, 125.33, 208.10, 77.97, 53.48 +195210, 66.61, 121.89, 206.60, 74.38, 52.50 +195211, 66.40, 120.98, 206.32, 72.15, 51.50 +195212, 69.50, 126.98, 219.32, 76.93, 54.79 +195301, 71.68, 130.59, 221.51, 75.15, 56.50 +195302, 72.18, 129.77, 219.98, 71.14, 56.98 +195303, 71.79, 128.34, 223.31, 72.29, 56.49 +195304, 70.14, 126.78, 223.07, 71.82, 55.40 +195305, 69.33, 122.07, 221.36, 67.94, 53.51 +195306, 69.01, 122.17, 222.67, 68.70, 54.82 +195307, 67.46, 119.47, 227.19, 64.10, 52.91 +195308, 68.57, 123.52, 230.77, 66.38, 53.56 +195309, 65.19, 116.91, 224.59, 66.11, 49.62 +195310, 65.20, 116.84, 227.75, 66.14, 48.90 +195311, 67.30, 123.15, 237.00, 67.76, 50.60 +195312, 67.28, 126.01, 244.62, 72.65, 51.53 +195401, 66.76, 126.28, 243.23, 71.26, 49.93 +195402, 69.59, 133.16, 253.70, 75.24, 53.14 +195403, 69.30, 134.35, 262.99, 74.73, 53.61 +195404, 70.41, 139.76, 274.28, 74.70, 54.30 +195405, 71.94, 146.86, 289.80, 75.45, 55.77 +195406, 73.83, 151.62, 290.45, 76.35, 57.86 +195407, 75.49, 152.88, 308.26, 76.63, 57.67 +195408, 80.79, 160.21, 313.49, 82.87, 61.50 +195409, 78.23, 155.43, 302.97, 80.36, 59.78 +195410, 84.48, 165.58, 314.23, 82.36, 62.12 +195411, 83.11, 162.53, 309.62, 80.88, 62.61 +195412, 87.00, 178.31, 335.55, 90.36, 69.04 +195501, 91.01, 188.62, 340.72, 95.59, 75.09 +195502, 91.64, 188.91, 348.55, 92.69, 75.65 +195503, 91.79, 194.81, 362.93, 94.93, 78.09 +195504, 90.98, 195.09, 357.45, 100.44, 78.49 +195505, 94.70, 200.44, 371.34, 104.73, 81.51 +195506, 94.18, 202.00, 379.21, 107.62, 82.90 +195507, 102.47, 218.95, 393.31, 107.44, 84.77 +195508, 112.26, 220.69, 397.99, 107.43, 83.49 +195509, 109.32, 222.58, 394.38, 107.14, 84.03 +195510, 114.27, 220.94, 384.63, 103.47, 81.47 +195511, 110.87, 215.99, 372.10, 104.16, 79.14 +195512, 116.97, 230.09, 393.49, 109.84, 84.95 +195601, 115.32, 236.47, 401.79, 116.35, 83.97 +195602, 110.43, 230.47, 393.90, 108.58, 81.51 +195603, 113.81, 239.11, 406.59, 117.02, 83.77 +195604, 116.24, 258.61, 428.84, 125.23, 88.26 +195605, 113.12, 262.02, 427.40, 128.50, 88.91 +195606, 108.19, 246.12, 412.05, 126.40, 83.58 +195607, 115.79, 254.70, 439.40, 133.25, 87.23 +195608, 120.37, 270.89, 457.44, 137.18, 89.23 +195609, 118.98, 259.69, 444.61, 132.39, 85.81 +195610, 115.74, 244.97, 418.98, 127.21, 81.69 +195611, 115.51, 247.62, 423.74, 127.75, 82.40 +195612, 112.16, 249.15, 427.42, 125.03, 81.86 +195701, 112.70, 259.06, 439.16, 131.95, 82.95 +195702, 109.40, 249.61, 430.86, 128.82, 82.28 +195703, 106.73, 242.65, 429.63, 127.81, 79.71 +195704, 107.77, 247.89, 439.83, 137.97, 81.16 +195705, 112.54, 259.09, 460.85, 145.24, 83.60 +195706, 113.95, 269.90, 477.26, 151.46, 84.20 +195707, 113.37, 269.10, 445.91, 163.80, 82.54 +195708, 116.29, 270.11, 454.42, 167.85, 83.31 +195709, 112.60, 254.24, 440.96, 162.07, 78.21 +195710, 108.17, 237.65, 417.83, 156.90, 73.00 +195711, 104.89, 226.49, 415.06, 155.54, 67.83 +195712, 102.71, 232.04, 438.37, 167.68, 67.62 +195801, 98.47, 222.34, 428.93, 165.87, 64.05 +195802, 104.65, 231.75, 446.63, 165.08, 70.95 +195803, 104.42, 224.98, 443.16, 172.52, 69.25 +195804, 108.63, 233.55, 448.36, 186.23, 71.09 +195805, 112.84, 240.68, 453.43, 198.12, 74.15 +195806, 116.59, 244.53, 460.85, 200.30, 76.53 +195807, 122.17, 253.21, 444.98, 201.85, 76.49 +195808, 128.35, 264.41, 462.11, 213.63, 80.46 +195809, 130.37, 267.53, 471.78, 217.15, 81.85 +195810, 138.76, 278.07, 495.19, 242.23, 85.18 +195811, 141.52, 285.48, 510.89, 244.55, 89.14 +195812, 145.39, 291.43, 527.38, 262.76, 92.83 +195901, 150.04, 303.56, 591.25, 273.91, 95.25 +195902, 152.30, 305.61, 593.25, 272.12, 97.14 +195903, 153.49, 303.97, 622.55, 281.72, 98.43 +195904, 151.90, 306.28, 631.33, 293.38, 98.27 +195905, 159.68, 314.24, 674.28, 327.99, 101.14 +195906, 163.92, 318.35, 689.82, 323.64, 101.33 +195907, 169.78, 320.22, 651.99, 348.12, 100.90 +195908, 178.45, 332.80, 655.28, 364.37, 100.16 +195909, 179.37, 325.72, 646.78, 350.51, 97.99 +195910, 172.86, 307.73, 624.96, 332.63, 93.66 +195911, 174.69, 311.17, 653.92, 340.10, 93.77 +195912, 175.15, 313.52, 684.28, 357.78, 92.76 +196001, 180.43, 321.32, 715.35, 350.70, 93.82 +196002, 167.22, 298.17, 684.58, 328.22, 89.48 +196003, 168.22, 296.33, 728.92, 328.33, 89.56 +196004, 164.72, 290.10, 732.53, 332.88, 87.16 +196005, 164.84, 282.20, 731.34, 338.23, 86.55 +196006, 168.57, 286.19, 791.26, 383.70, 86.26 +196007, 171.64, 289.80, 736.80, 374.20, 89.90 +196008, 169.39, 282.98, 718.73, 359.46, 88.97 +196009, 175.28, 289.11, 743.97, 367.40, 91.19 +196010, 165.25, 274.64, 684.81, 334.88, 85.76 +196011, 167.10, 274.22, 672.70, 322.90, 85.37 +196012, 174.88, 283.88, 710.22, 334.53, 90.92 +196101, 179.28, 297.13, 757.93, 354.83, 94.32 +196102, 190.22, 318.23, 797.20, 391.33, 101.55 +196103, 201.39, 328.17, 810.72, 410.72, 107.34 +196104, 210.45, 333.54, 847.34, 425.80, 110.87 +196105, 208.86, 336.59, 852.66, 424.22, 111.40 +196106, 214.10, 344.75, 860.16, 432.90, 115.49 +196107, 209.06, 329.10, 802.07, 411.58, 111.82 +196108, 217.31, 338.75, 834.03, 425.68, 113.11 +196109, 224.31, 344.84, 850.90, 441.11, 119.75 +196110, 228.18, 331.92, 839.75, 435.24, 117.43 +196111, 237.32, 340.85, 855.61, 450.78, 119.96 +196112, 251.34, 351.36, 908.76, 463.38, 124.46 +196201, 249.76, 350.97, 927.73, 453.68, 122.34 +196202, 237.15, 341.89, 876.71, 431.45, 120.44 +196203, 236.67, 350.42, 890.46, 446.69, 121.97 +196204, 236.25, 349.18, 877.54, 448.02, 119.47 +196205, 222.79, 331.45, 807.20, 396.94, 110.89 +196206, 203.50, 303.91, 729.84, 353.09, 101.56 +196207, 184.51, 271.42, 609.11, 312.24, 94.81 +196208, 195.24, 286.37, 665.11, 336.17, 101.14 +196209, 199.68, 291.49, 678.42, 308.54, 104.50 +196210, 190.09, 280.06, 626.69, 295.47, 98.23 +196211, 188.63, 280.22, 635.08, 302.34, 98.58 +196212, 205.58, 310.03, 700.04, 354.01, 112.54 +196301, 209.07, 315.27, 701.88, 356.42, 113.68 +196302, 221.43, 329.66, 736.10, 376.21, 120.11 +196303, 215.05, 322.28, 710.25, 365.42, 118.71 +196304, 224.27, 333.94, 725.61, 369.24, 121.92 +196305, 233.97, 349.21, 762.28, 396.86, 129.76 +196306, 244.37, 352.35, 780.15, 394.80, 134.59 +196307, 148.29, 234.75, 396.37, 318.47, 77.38 +196308, 147.74, 236.26, 397.24, 319.97, 76.48 +196309, 156.28, 247.21, 413.58, 349.45, 81.15 +196310, 155.60, 242.78, 419.35, 334.86, 78.41 +196311, 164.98, 245.23, 443.21, 345.88, 79.89 +196312, 158.59, 242.90, 451.81, 338.73, 79.75 +196401, 160.95, 251.12, 456.30, 343.34, 81.82 +196402, 163.50, 259.98, 470.72, 355.87, 82.69 +196403, 165.54, 263.76, 471.44, 360.58, 86.75 +196404, 169.09, 269.47, 474.66, 353.09, 88.80 +196405, 171.50, 271.43, 472.86, 347.79, 89.35 +196406, 174.97, 275.88, 477.90, 348.49, 91.33 +196407, 167.74, 270.98, 461.25, 350.57, 90.72 +196408, 174.25, 278.59, 452.30, 376.45, 91.96 +196409, 174.59, 274.24, 441.04, 359.31, 88.53 +196410, 180.89, 285.31, 439.69, 368.72, 91.98 +196411, 181.63, 289.39, 435.55, 378.99, 93.40 +196412, 181.27, 290.83, 427.26, 385.53, 91.73 +196501, 183.39, 290.47, 439.79, 401.60, 89.84 +196502, 191.21, 301.81, 456.02, 432.06, 94.24 +196503, 192.71, 301.86, 460.08, 441.17, 96.40 +196504, 192.94, 297.28, 462.24, 436.59, 96.54 +196505, 205.05, 304.60, 478.40, 449.17, 98.68 +196506, 200.67, 303.13, 484.66, 442.18, 97.67 +196507, 180.95, 279.00, 433.73, 406.50, 90.59 +196508, 182.93, 283.52, 439.24, 432.63, 93.93 +196509, 188.51, 289.25, 453.93, 458.43, 98.63 +196510, 195.62, 300.18, 469.23, 468.32, 100.42 +196511, 204.43, 309.05, 474.23, 483.02, 104.59 +196512, 200.27, 309.93, 467.05, 498.97, 107.26 +196601, 203.61, 315.27, 463.57, 517.83, 110.52 +196602, 203.98, 319.05, 466.01, 543.63, 116.88 +196603, 201.17, 312.52, 476.30, 533.61, 119.58 +196604, 195.69, 307.25, 461.98, 524.49, 116.25 +196605, 198.06, 315.92, 482.86, 524.00, 119.62 +196606, 186.30, 300.70, 460.51, 494.75, 110.17 +196607, 174.60, 288.92, 440.84, 497.65, 110.79 +196608, 171.77, 287.27, 434.41, 488.79, 108.83 +196609, 158.50, 264.18, 410.96, 447.32, 96.28 +196610, 156.17, 263.45, 403.77, 441.84, 94.47 +196611, 157.94, 279.42, 422.14, 463.01, 97.31 +196612, 156.40, 280.36, 436.82, 492.32, 105.04 +196701, 153.65, 283.05, 445.67, 491.35, 107.89 +196702, 170.56, 303.38, 482.96, 522.47, 121.26 +196703, 171.49, 303.50, 501.39, 527.82, 120.67 +196704, 178.03, 316.88, 527.71, 568.91, 125.73 +196705, 193.35, 325.81, 555.80, 593.89, 128.89 +196706, 183.54, 314.07, 529.54, 560.48, 125.55 +196707, 183.23, 307.80, 526.73, 586.44, 131.77 +196708, 199.05, 324.72, 527.14, 608.32, 142.28 +196709, 198.41, 321.74, 519.05, 610.20, 141.86 +196710, 205.73, 333.98, 549.81, 627.54, 143.01 +196711, 200.47, 321.33, 567.47, 595.61, 133.78 +196712, 200.07, 323.78, 573.40, 601.20, 133.24 +196801, 204.37, 338.37, 584.10, 626.19, 139.82 +196802, 200.52, 328.67, 561.10, 592.27, 137.34 +196803, 195.82, 317.29, 539.78, 565.21, 131.04 +196804, 198.45, 317.09, 551.23, 565.21, 130.67 +196805, 221.88, 340.66, 599.52, 631.98, 147.04 +196806, 227.02, 348.12, 610.67, 651.09, 155.77 +196807, 222.01, 349.14, 550.05, 625.43, 163.06 +196808, 215.63, 347.28, 532.42, 599.42, 159.94 +196809, 219.22, 352.90, 534.16, 601.09, 162.37 +196810, 233.06, 365.95, 551.92, 606.01, 173.05 +196811, 239.55, 371.48, 541.87, 605.38, 174.68 +196812, 245.83, 394.13, 582.99, 652.35, 187.67 +196901, 237.81, 384.57, 548.48, 644.73, 180.39 +196902, 236.32, 384.67, 534.04, 642.85, 182.63 +196903, 224.40, 363.46, 512.64, 607.74, 166.81 +196904, 232.84, 374.24, 531.50, 627.39, 168.75 +196905, 238.13, 376.81, 563.10, 658.00, 171.56 +196906, 237.81, 378.86, 558.38, 667.04, 167.02 +196907, 211.17, 341.90, 504.97, 595.72, 178.64 +196908, 195.33, 320.90, 481.23, 579.00, 162.41 +196909, 204.28, 332.49, 507.20, 630.89, 176.66 +196910, 204.15, 316.56, 500.60, 643.55, 174.69 +196911, 216.38, 330.65, 528.01, 700.85, 188.39 +196912, 207.43, 317.19, 522.96, 701.13, 177.84 +197001, 202.52, 311.36, 512.15, 730.42, 170.75 +197002, 190.16, 286.55, 476.20, 678.05, 155.48 +197003, 199.62, 301.65, 495.53, 713.84, 169.67 +197004, 201.16, 301.73, 490.43, 677.02, 166.67 +197005, 180.94, 275.17, 433.50, 608.34, 143.54 +197006, 166.74, 262.26, 398.68, 558.94, 131.20 +197007, 152.08, 245.59, 353.45, 525.38, 119.60 +197008, 166.16, 264.93, 373.04, 536.43, 129.35 +197009, 173.91, 277.55, 394.32, 544.27, 136.05 +197010, 182.44, 285.14, 422.06, 587.35, 146.34 +197011, 180.34, 282.52, 406.32, 587.21, 137.46 +197012, 190.80, 295.85, 425.52, 606.97, 142.61 +197101, 203.02, 314.80, 449.45, 638.98, 154.46 +197102, 214.98, 326.10, 480.87, 672.24, 166.54 +197103, 221.60, 330.19, 476.95, 679.39, 170.40 +197104, 232.36, 340.78, 504.56, 703.65, 181.23 +197105, 245.06, 351.41, 515.93, 733.12, 188.93 +197106, 236.57, 339.50, 486.79, 715.75, 180.90 +197107, 230.92, 341.21, 478.91, 717.02, 169.36 +197108, 222.31, 328.62, 445.75, 700.70, 162.68 +197109, 236.50, 335.61, 458.75, 721.35, 173.93 +197110, 236.55, 332.10, 457.82, 725.67, 172.81 +197111, 225.97, 317.10, 441.89, 707.07, 165.35 +197112, 225.52, 314.26, 444.86, 709.33, 164.60 +197201, 245.17, 343.23, 490.39, 764.77, 177.28 +197202, 250.35, 350.01, 519.36, 784.02, 183.01 +197203, 258.61, 359.01, 523.50, 838.02, 187.07 +197204, 263.97, 357.88, 526.32, 841.43, 191.30 +197205, 264.84, 359.49, 529.77, 850.37, 195.31 +197206, 264.80, 365.65, 542.58, 877.95, 193.95 +197207, 247.55, 355.98, 532.11, 790.93, 185.77 +197208, 243.11, 356.15, 536.87, 815.22, 180.30 +197209, 245.96, 374.34, 552.84, 822.05, 187.25 +197210, 242.52, 371.09, 554.79, 818.11, 183.59 +197211, 241.70, 379.71, 545.91, 830.16, 185.47 +197212, 257.32, 398.01, 564.57, 823.58, 195.12 +197301, 260.72, 400.58, 574.14, 854.02, 193.27 +197302, 248.13, 395.22, 580.29, 843.57, 179.02 +197303, 234.33, 379.26, 558.58, 835.64, 168.07 +197304, 227.02, 381.55, 554.20, 841.83, 167.89 +197305, 212.30, 370.21, 530.52, 795.02, 154.96 +197306, 206.66, 363.86, 521.06, 801.88, 147.39 +197307, 121.94, 250.38, 254.93, 420.84, 73.87 +197308, 133.23, 259.24, 265.06, 456.14, 81.08 +197309, 128.79, 253.26, 254.98, 423.46, 79.37 +197310, 138.04, 269.76, 257.99, 436.19, 85.28 +197311, 134.01, 273.06, 261.25, 443.99, 84.59 +197312, 112.10, 242.84, 240.36, 408.42, 73.86 +197401, 111.09, 252.99, 234.23, 398.78, 76.32 +197402, 116.52, 249.00, 238.87, 394.08, 77.49 +197403, 116.16, 246.85, 241.37, 388.12, 78.27 +197404, 114.07, 241.76, 235.61, 392.57, 75.54 +197405, 110.70, 230.28, 227.08, 389.58, 70.78 +197406, 110.45, 220.32, 218.93, 393.99, 64.85 +197407, 110.34, 216.50, 217.58, 373.34, 63.65 +197408, 97.77, 207.28, 200.56, 322.64, 59.59 +197409, 87.94, 186.86, 194.41, 297.45, 53.23 +197410, 77.47, 168.12, 172.61, 251.11, 48.00 +197411, 85.81, 196.47, 196.99, 325.16, 57.75 +197412, 81.48, 186.65, 187.68, 319.87, 56.07 +197501, 79.22, 183.19, 183.88, 308.98, 53.61 +197502, 96.90, 206.27, 209.62, 313.69, 63.37 +197503, 100.93, 215.01, 229.11, 365.30, 65.03 +197504, 109.01, 219.05, 230.28, 369.65, 67.58 +197505, 112.58, 235.12, 237.08, 371.84, 69.89 +197506, 116.64, 248.58, 247.36, 394.66, 73.68 +197507, 129.27, 264.15, 262.62, 383.32, 79.61 +197508, 122.68, 250.19, 243.00, 337.00, 74.78 +197509, 119.55, 247.42, 234.12, 324.34, 70.83 +197510, 116.06, 238.11, 231.82, 304.86, 66.16 +197511, 127.53, 245.85, 249.81, 342.09, 68.74 +197512, 132.76, 248.77, 260.09, 358.29, 71.13 +197601, 130.69, 247.29, 258.62, 340.28, 70.79 +197602, 146.30, 280.09, 297.96, 366.47, 80.06 +197603, 149.29, 277.76, 301.63, 341.04, 82.37 +197604, 153.51, 285.32, 308.07, 362.91, 83.74 +197605, 151.65, 285.12, 306.26, 349.84, 82.51 +197606, 145.69, 286.64, 302.14, 338.48, 81.03 +197607, 145.30, 290.42, 311.88, 335.43, 82.89 +197608, 143.67, 287.85, 310.29, 334.03, 82.74 +197609, 144.15, 285.32, 311.39, 339.49, 81.62 +197610, 147.20, 293.95, 319.91, 351.72, 82.35 +197611, 145.33, 286.85, 312.40, 338.08, 81.99 +197612, 146.99, 287.23, 318.06, 305.88, 83.29 +197701, 155.15, 305.41, 331.42, 330.84, 88.96 +197702, 147.80, 296.74, 326.57, 303.16, 85.32 +197703, 144.20, 288.79, 325.07, 303.40, 83.72 +197704, 140.96, 286.92, 323.08, 299.69, 83.41 +197705, 141.12, 291.30, 317.98, 285.93, 85.18 +197706, 138.55, 287.23, 314.59, 282.43, 84.15 +197707, 144.87, 299.60, 308.95, 281.48, 85.42 +197708, 144.88, 293.79, 308.61, 278.95, 85.23 +197709, 146.81, 283.09, 305.88, 289.87, 84.15 +197710, 147.40, 283.62, 307.67, 287.05, 84.09 +197711, 142.45, 272.74, 296.67, 283.21, 79.79 +197712, 146.79, 281.93, 306.25, 300.81, 84.73 +197801, 146.26, 285.72, 314.83, 306.86, 84.89 +197802, 137.66, 268.98, 304.06, 303.79, 80.52 +197803, 137.43, 262.92, 300.18, 292.75, 80.45 +197804, 143.37, 270.92, 302.37, 304.58, 84.60 +197805, 156.47, 291.79, 331.64, 334.69, 92.46 +197806, 159.44, 298.38, 331.70, 352.12, 94.71 +197807, 156.71, 292.66, 322.80, 353.39, 91.74 +197808, 163.50, 309.56, 342.03, 387.24, 97.95 +197809, 170.22, 319.85, 357.59, 388.89, 104.22 +197810, 168.68, 320.81, 349.99, 382.06, 102.81 +197811, 148.74, 287.25, 321.23, 332.56, 88.41 +197812, 150.28, 297.19, 332.43, 347.71, 90.08 +197901, 150.87, 301.15, 347.87, 361.28, 91.13 +197902, 158.62, 317.71, 362.77, 366.03, 96.58 +197903, 151.52, 309.05, 351.45, 349.71, 92.98 +197904, 159.46, 329.88, 369.07, 369.47, 100.26 +197905, 161.66, 332.15, 370.31, 372.47, 101.72 +197906, 157.62, 324.16, 362.34, 363.01, 101.11 +197907, 160.77, 332.80, 340.41, 356.39, 105.28 +197908, 164.27, 339.44, 336.94, 356.48, 109.01 +197909, 175.66, 361.41, 348.19, 387.34, 115.05 +197910, 174.32, 366.15, 342.05, 391.11, 113.98 +197911, 158.92, 343.64, 319.41, 373.49, 102.21 +197912, 162.33, 365.37, 339.90, 403.96, 109.17 +198001, 167.99, 375.27, 342.06, 415.13, 113.50 +198002, 175.74, 408.80, 357.50, 409.34, 119.84 +198003, 166.05, 421.93, 344.64, 390.70, 114.84 +198004, 151.45, 363.15, 312.35, 379.01, 100.67 +198005, 156.31, 385.73, 323.37, 402.38, 106.73 +198006, 167.68, 403.32, 337.04, 433.83, 115.09 +198007, 168.56, 404.29, 319.12, 383.14, 115.79 +198008, 185.09, 423.89, 351.38, 416.47, 123.71 +198009, 186.91, 431.45, 366.58, 417.49, 127.32 +198010, 187.89, 449.00, 370.89, 434.82, 132.02 +198011, 187.00, 464.88, 372.36, 428.42, 134.18 +198012, 191.02, 536.00, 395.27, 467.80, 143.08 +198101, 190.95, 502.68, 384.54, 488.68, 143.11 +198102, 193.16, 472.70, 370.64, 483.44, 138.44 +198103, 200.37, 478.30, 371.10, 492.94, 140.38 +198104, 217.45, 489.07, 387.00, 517.60, 152.54 +198105, 222.03, 467.92, 395.47, 540.53, 151.60 +198106, 227.34, 458.57, 411.77, 560.58, 155.30 +198107, 215.49, 423.67, 329.48, 441.15, 143.07 +198108, 210.14, 433.81, 322.20, 427.90, 139.45 +198109, 196.57, 407.60, 301.72, 392.22, 130.18 +198110, 191.27, 369.09, 297.61, 389.41, 121.29 +198111, 199.81, 386.33, 314.98, 422.50, 129.81 +198112, 204.95, 406.12, 318.65, 436.78, 136.29 +198201, 202.38, 389.59, 317.37, 437.05, 130.93 +198202, 202.21, 370.14, 325.23, 442.78, 126.26 +198203, 197.59, 342.57, 304.93, 432.61, 121.24 +198204, 204.16, 330.66, 300.19, 416.25, 119.09 +198205, 216.63, 343.33, 314.30, 449.44, 121.57 +198206, 211.36, 335.61, 293.56, 432.95, 114.71 +198207, 211.37, 306.19, 258.39, 374.43, 109.98 +198208, 211.71, 292.61, 261.88, 385.01, 106.83 +198209, 235.16, 325.02, 291.36, 432.41, 118.74 +198210, 243.22, 327.41, 295.68, 442.57, 124.16 +198211, 277.67, 356.12, 339.04, 481.12, 144.35 +198212, 299.63, 357.54, 368.07, 506.36, 155.31 +198301, 300.74, 366.63, 375.13, 514.35, 152.69 +198302, 306.52, 383.73, 412.23, 523.51, 156.99 +198303, 322.45, 389.09, 419.45, 538.27, 164.08 +198304, 340.45, 399.13, 422.54, 568.80, 171.93 +198305, 369.09, 428.32, 452.55, 604.55, 186.53 +198306, 366.53, 433.05, 461.78, 584.80, 191.42 +198307, 365.14, 439.60, 429.65, 531.60, 195.05 +198308, 353.66, 432.14, 408.30, 500.98, 188.22 +198309, 346.07, 444.36, 407.24, 498.34, 186.62 +198310, 359.89, 447.13, 416.00, 504.47, 189.75 +198311, 357.22, 442.56, 389.99, 492.98, 182.40 +198312, 369.82, 450.87, 399.90, 475.69, 191.18 +198401, 365.47, 449.04, 398.84, 460.22, 186.60 +198402, 353.53, 459.03, 386.21, 459.94, 183.85 +198403, 336.29, 439.42, 365.42, 452.93, 174.85 +198404, 339.67, 446.90, 364.13, 449.17, 179.18 +198405, 342.95, 453.79, 364.81, 459.06, 175.80 +198406, 329.03, 430.26, 345.77, 432.57, 163.92 +198407, 299.94, 398.50, 254.48, 312.15, 169.36 +198408, 301.32, 386.64, 251.59, 301.91, 166.79 +198409, 329.64, 429.32, 281.86, 332.65, 183.71 +198410, 332.85, 435.19, 276.05, 319.95, 188.34 +198411, 336.79, 429.65, 273.46, 328.31, 190.83 +198412, 329.83, 428.18, 265.41, 327.33, 190.28 +198501, 337.68, 435.23, 273.51, 338.88, 197.08 +198502, 367.97, 464.97, 304.47, 370.75, 218.24 +198503, 375.55, 474.26, 303.33, 382.68, 222.85 +198504, 376.25, 477.62, 295.13, 398.39, 222.20 +198505, 372.84, 481.57, 289.68, 392.38, 226.34 +198506, 405.91, 499.04, 301.46, 427.15, 241.35 +198507, 379.43, 460.85, 283.56, 376.50, 249.97 +198508, 375.16, 461.76, 290.21, 377.74, 249.84 +198509, 373.27, 462.93, 287.11, 374.03, 249.87 +198510, 354.56, 446.57, 272.63, 360.19, 237.71 +198511, 371.14, 469.53, 283.83, 362.18, 253.33 +198512, 400.07, 493.43, 308.60, 381.50, 272.63 +198601, 425.59, 503.92, 332.33, 399.64, 284.31 +198602, 433.93, 506.37, 326.52, 408.70, 295.57 +198603, 477.45, 539.61, 344.71, 435.16, 320.60 +198604, 519.18, 565.55, 351.45, 492.70, 338.93 +198605, 520.89, 557.07, 364.58, 496.58, 328.11 +198606, 560.76, 585.57, 372.09, 536.02, 346.10 +198607, 491.45, 541.09, 340.89, 477.05, 354.42 +198608, 453.96, 525.42, 319.89, 454.75, 333.00 +198609, 469.58, 574.74, 342.57, 474.58, 356.75 +198610, 428.47, 542.54, 313.60, 416.65, 325.85 +198611, 462.31, 569.03, 321.83, 455.26, 340.46 +198612, 471.60, 581.53, 335.70, 464.85, 339.11 +198701, 453.99, 578.82, 326.89, 459.61, 332.66 +198702, 524.53, 656.13, 370.82, 525.30, 371.87 +198703, 557.91, 664.16, 394.95, 579.85, 388.80 +198704, 571.79, 691.80, 404.69, 586.83, 388.09 +198705, 568.89, 681.56, 414.63, 572.18, 370.70 +198706, 572.13, 683.55, 420.72, 577.61, 371.56 +198707, 551.64, 676.02, 405.26, 494.61, 314.50 +198708, 588.96, 701.49, 416.70, 525.27, 326.66 +198709, 615.73, 721.92, 445.97, 536.69, 339.32 +198710, 588.04, 716.61, 444.31, 524.06, 332.55 +198711, 444.50, 568.11, 348.92, 414.54, 255.31 +198712, 410.26, 533.91, 316.05, 379.93, 238.63 +198801, 445.84, 573.74, 345.41, 401.79, 251.41 +198802, 465.92, 599.30, 352.94, 429.75, 267.74 +198803, 502.84, 626.52, 373.94, 448.09, 278.55 +198804, 494.44, 621.60, 358.08, 431.95, 274.48 +198805, 506.11, 634.87, 366.60, 423.08, 274.16 +198806, 512.85, 637.24, 369.53, 422.00, 277.11 +198807, 436.20, 675.34, 430.49, 417.39, 245.88 +198808, 439.83, 672.47, 419.52, 419.48, 245.03 +198809, 426.16, 654.06, 396.76, 419.55, 244.10 +198810, 457.20, 668.25, 417.38, 438.32, 255.20 +198811, 477.89, 676.66, 423.56, 444.21, 255.67 +198812, 470.15, 671.21, 417.71, 436.97, 250.19 +198901, 471.55, 690.95, 434.58, 447.46, 250.65 +198902, 498.60, 732.53, 470.87, 479.60, 267.93 +198903, 491.68, 718.30, 462.48, 473.61, 267.53 +198904, 510.57, 730.46, 464.63, 488.74, 277.67 +198905, 536.49, 763.77, 499.42, 517.75, 291.46 +198906, 542.82, 791.89, 521.91, 536.61, 303.71 +198907, 506.49, 756.68, 483.06, 485.13, 301.31 +198908, 556.18, 805.29, 513.93, 532.84, 324.92 +198909, 565.26, 828.76, 524.27, 535.97, 337.32 +198910, 559.55, 823.33, 531.43, 549.16, 344.78 +198911, 542.40, 810.37, 515.70, 522.08, 328.90 +198912, 551.82, 835.40, 521.61, 545.51, 328.84 +199001, 558.26, 871.65, 537.34, 551.81, 324.88 +199002, 509.85, 824.21, 493.85, 514.96, 295.74 +199003, 519.19, 839.35, 503.42, 505.50, 303.54 +199004, 544.59, 856.07, 528.24, 526.52, 303.15 +199005, 540.26, 826.28, 517.53, 530.28, 294.13 +199006, 594.61, 884.48, 575.89, 599.13, 323.22 +199007, 569.23, 843.53, 539.21, 608.71, 305.28 +199008, 558.28, 866.54, 509.49, 627.85, 296.25 +199009, 503.57, 796.47, 452.24, 585.63, 263.98 +199010, 471.82, 762.46, 443.40, 562.10, 236.66 +199011, 474.50, 761.01, 445.19, 574.39, 221.81 +199012, 511.99, 793.26, 475.07, 634.63, 245.97 +199101, 528.89, 813.36, 490.46, 651.10, 259.72 +199102, 567.95, 833.47, 526.19, 684.98, 278.54 +199103, 624.20, 884.28, 551.55, 758.03, 306.65 +199104, 662.50, 900.74, 554.21, 808.16, 317.04 +199105, 667.32, 915.80, 551.64, 800.81, 323.65 +199106, 714.46, 947.06, 559.00, 838.99, 341.90 +199107, 668.13, 860.00, 534.78, 723.66, 311.65 +199108, 706.25, 897.84, 559.07, 786.36, 329.55 +199109, 736.21, 914.96, 569.19, 812.62, 341.56 +199110, 714.45, 907.76, 562.23, 810.61, 340.26 +199111, 708.18, 922.00, 576.24, 868.51, 349.19 +199112, 701.75, 878.59, 552.78, 844.96, 331.61 +199201, 798.29, 947.24, 609.74, 986.36, 376.76 +199202, 796.55, 942.23, 623.36, 935.17, 383.23 +199203, 823.80, 947.96, 633.96, 917.64, 395.99 +199204, 817.27, 935.64, 616.69, 866.44, 389.45 +199205, 819.10, 981.34, 633.29, 851.75, 392.16 +199206, 829.32, 991.72, 630.59, 868.92, 400.29 +199207, 745.97, 943.91, 601.03, 676.77, 401.69 +199208, 780.29, 991.08, 629.42, 725.64, 417.47 +199209, 780.62, 978.36, 616.88, 701.03, 404.42 +199210, 800.96, 990.76, 633.75, 669.01, 420.00 +199211, 816.93, 988.98, 640.73, 699.87, 435.82 +199212, 866.18, 999.55, 676.37, 732.02, 463.47 +199301, 871.94, 1017.36, 696.10, 722.15, 484.31 +199302, 879.88, 1031.61, 722.35, 672.86, 503.43 +199303, 868.66, 1066.54, 740.72, 613.92, 506.90 +199304, 886.07, 1092.57, 761.84, 619.06, 528.55 +199305, 822.69, 1096.39, 736.55, 630.11, 512.60 +199306, 853.11, 1112.56, 785.35, 655.60, 522.19 +199307, 772.52, 1077.85, 750.48, 573.46, 526.72 +199308, 772.68, 1087.47, 745.11, 541.30, 539.76 +199309, 801.26, 1126.95, 790.08, 560.18, 560.02 +199310, 795.48, 1117.45, 795.26, 561.51, 570.41 +199311, 840.85, 1125.03, 812.34, 596.33, 558.33 +199312, 842.81, 1103.22, 787.38, 602.28, 542.85 +199401, 847.18, 1130.98, 801.18, 617.40, 560.83 +199402, 862.32, 1168.81, 829.15, 627.87, 587.40 +199403, 858.82, 1142.18, 819.88, 584.93, 567.24 +199404, 817.60, 1095.22, 790.42, 545.24, 543.52 +199405, 821.76, 1112.60, 792.87, 560.59, 553.32 +199406, 807.93, 1114.75, 811.11, 585.96, 572.03 +199407, 704.19, 973.64, 707.29, 537.94, 505.68 +199408, 716.33, 1019.93, 731.98, 546.65, 520.86 +199409, 744.32, 1049.07, 774.74, 605.15, 540.80 +199410, 733.36, 1028.21, 765.27, 616.77, 516.19 +199411, 744.09, 1052.35, 796.38, 626.64, 517.92 +199412, 724.12, 1000.06, 771.47, 630.69, 491.94 +199501, 723.10, 1025.18, 792.31, 637.15, 500.59 +199502, 730.79, 1036.39, 804.94, 678.87, 526.46 +199503, 749.82, 1082.43, 845.22, 695.80, 554.14 +199504, 773.62, 1113.77, 874.27, 716.23, 565.88 +199505, 781.06, 1145.40, 922.67, 734.23, 579.32 +199506, 815.17, 1191.38, 944.27, 753.11, 607.30 +199507, 804.89, 1145.83, 969.69, 758.49, 572.97 +199508, 828.12, 1175.14, 1039.32, 795.46, 601.11 +199509, 826.30, 1167.21, 1060.94, 810.59, 625.66 +199510, 860.52, 1200.81, 1095.20, 874.31, 653.05 +199511, 840.33, 1188.68, 1103.67, 887.46, 640.79 +199512, 887.86, 1250.49, 1110.31, 927.32, 681.43 +199601, 897.41, 1282.28, 1105.69, 985.88, 686.45 +199602, 916.30, 1315.97, 1132.54, 1047.45, 710.62 +199603, 952.75, 1322.35, 1155.75, 1035.98, 727.58 +199604, 976.41, 1365.00, 1123.82, 1043.49, 744.07 +199605, 999.93, 1386.90, 1201.47, 1041.41, 736.81 +199606, 1052.62, 1415.51, 1237.73, 1084.72, 750.04 +199607, 980.27, 1374.93, 1054.81, 969.77, 735.16 +199608, 929.81, 1314.36, 972.09, 913.58, 704.04 +199609, 957.00, 1358.43, 1000.37, 958.42, 734.15 +199610, 994.72, 1419.15, 1080.09, 1033.02, 771.11 +199611, 1003.56, 1468.50, 1063.20, 1024.13, 805.32 +199612, 1050.57, 1547.48, 1174.10, 1093.28, 869.05 +199701, 1033.36, 1541.58, 1165.41, 1073.87, 856.08 +199702, 1072.02, 1604.48, 1242.44, 1181.53, 907.11 +199703, 1110.76, 1584.64, 1192.18, 1195.29, 932.68 +199704, 1076.68, 1567.74, 1123.18, 1112.12, 884.59 +199705, 1120.85, 1614.64, 1186.79, 1183.33, 934.70 +199706, 1189.48, 1729.44, 1301.41, 1269.78, 993.90 +199707, 1144.81, 1725.90, 1215.07, 1267.15, 966.11 +199708, 1221.81, 1846.76, 1367.15, 1297.22, 1060.59 +199709, 1185.13, 1777.04, 1334.61, 1224.04, 1012.43 +199710, 1260.19, 1856.92, 1402.53, 1318.76, 1096.38 +199711, 1235.62, 1795.97, 1329.96, 1327.43, 1069.37 +199712, 1299.30, 1846.48, 1394.00, 1352.66, 1102.56 +199801, 1338.62, 1874.04, 1385.27, 1406.74, 1164.57 +199802, 1341.86, 1854.67, 1467.22, 1493.62, 1140.72 +199803, 1464.30, 1973.78, 1593.56, 1580.54, 1239.64 +199804, 1572.55, 2081.35, 1670.57, 1654.39, 1327.43 +199805, 1561.23, 2116.52, 1720.77, 1691.77, 1353.28 +199806, 1600.81, 2062.81, 1639.61, 1659.13, 1320.02 +199807, 1531.15, 2050.67, 1718.55, 1659.53, 1329.58 +199808, 1497.94, 1936.86, 1756.51, 1665.15, 1304.65 +199809, 1290.60, 1728.19, 1484.79, 1468.25, 1033.01 +199810, 1321.23, 1818.31, 1688.35, 1657.29, 1069.92 +199811, 1484.23, 1954.37, 1824.53, 1750.81, 1120.96 +199812, 1580.38, 2004.03, 2006.19, 1879.99, 1179.98 +199901, 1672.88, 2047.57, 2332.04, 1996.22, 1225.91 +199902, 1694.16, 1982.46, 2662.43, 2012.85, 1237.44 +199903, 1661.96, 1942.77, 2440.47, 2027.70, 1248.32 +199904, 1692.60, 2069.68, 2566.25, 2110.09, 1289.93 +199905, 1755.72, 2302.50, 2679.11, 2007.78, 1386.24 +199906, 1737.36, 2268.14, 2698.96, 1970.86, 1341.13 +199907, 1805.88, 2273.32, 2931.21, 2088.35, 1499.77 +199908, 1742.56, 2277.48, 2907.32, 2021.91, 1418.72 +199909, 1670.19, 2311.36, 2979.04, 2113.68, 1352.28 +199910, 1632.43, 2245.19, 3040.48, 1991.16, 1295.89 +199911, 1708.15, 2339.36, 3154.44, 2211.15, 1449.69 +199912, 1741.48, 2335.91, 3528.21, 2295.96, 1422.75 +200001, 1817.99, 2417.59, 4163.54, 2184.96, 1447.55 +200002, 1685.70, 2302.16, 3989.61, 2365.49, 1384.31 +200003, 1601.89, 2208.32, 4560.20, 2326.37, 1291.03 +200004, 1785.11, 2400.49, 4816.43, 2332.04, 1499.84 +200005, 1755.71, 2421.52, 4365.68, 2404.04, 1469.57 +200006, 1757.65, 2453.23, 3906.06, 2509.97, 1525.51 +200007, 1606.37, 2033.64, 4053.91, 2810.12, 1761.61 +200008, 1620.95, 2019.64, 3787.80, 2635.70, 1868.12 +200009, 1611.02, 2193.50, 4187.93, 2752.71, 2050.72 +200010, 1660.61, 2204.84, 3674.28, 2881.52, 2066.13 +200011, 1705.98, 2230.91, 3483.36, 2962.98, 2064.77 +200012, 1719.36, 2193.60, 2732.58, 3045.55, 1950.36 +200101, 1825.20, 2358.93, 2565.23, 3211.79, 2074.22 +200102, 1915.86, 2295.92, 3008.17, 2957.43, 2043.77 +200103, 1875.00, 2258.17, 2339.00, 2964.35, 1960.11 +200104, 1820.38, 2199.71, 2090.21, 2727.90, 1879.81 +200105, 1914.16, 2398.47, 2432.04, 2876.77, 2015.19 +200106, 1976.58, 2454.25, 2388.94, 2972.33, 2105.76 +200107, 1971.00, 2252.67, 2514.50, 2859.32, 2092.46 +200108, 2042.72, 2233.95, 2406.07, 2942.37, 2061.96 +200109, 1975.02, 2200.05, 2160.25, 2855.59, 1959.80 +200110, 1831.46, 1987.28, 1863.65, 2851.50, 1811.89 +200111, 1870.91, 2026.29, 2021.82, 2894.30, 1804.93 +200112, 2035.28, 2090.26, 2324.26, 3054.85, 1958.41 +200201, 2100.75, 2175.69, 2346.62, 2976.49, 2050.04 +200202, 2167.31, 2162.26, 2316.67, 2910.00, 2037.82 +200203, 2218.26, 2245.14, 2089.70, 2901.38, 2054.60 +200204, 2306.00, 2397.57, 2212.64, 2920.71, 2179.66 +200205, 2321.44, 2367.93, 1969.02, 2726.61, 2137.18 +200206, 2321.15, 2329.10, 1943.01, 2664.70, 2133.37 +200207, 2211.18, 2334.52, 1691.52, 2311.15, 2080.35 +200208, 2005.30, 2118.16, 1529.30, 2257.74, 1952.57 +200209, 2054.40, 2122.39, 1546.88, 2280.64, 1993.36 +200210, 1874.26, 1927.37, 1333.18, 2152.53, 1789.33 +200211, 1980.40, 1996.08, 1619.76, 2292.17, 1916.77 +200212, 2012.85, 2098.55, 1864.41, 2379.33, 2004.29 +200301, 1924.61, 2080.09, 1661.42, 2291.13, 1915.19 +200302, 1852.18, 2018.81, 1642.34, 2288.54, 1884.67 +200303, 1816.74, 1994.66, 1642.00, 2261.03, 1839.55 +200304, 1845.65, 2045.19, 1658.48, 2388.95, 1866.60 +200305, 2008.26, 2137.80, 1831.65, 2395.06, 2088.20 +200306, 2138.30, 2279.37, 2011.54, 2489.83, 2212.24 +200307, 2146.18, 2302.04, 2123.47, 2739.13, 2188.87 +200308, 2187.61, 2336.40, 2216.56, 2801.21, 2285.06 +200309, 2295.56, 2440.22, 2348.41, 2736.43, 2312.61 +200310, 2249.42, 2396.41, 2317.39, 2747.07, 2321.43 +200311, 2436.99, 2538.86, 2519.01, 2774.27, 2478.25 +200312, 2487.24, 2574.94, 2575.10, 2813.05, 2526.69 +200401, 2526.09, 2808.05, 2669.90, 2973.35, 2648.07 +200402, 2551.91, 2814.68, 2786.56, 3075.20, 2743.00 +200403, 2685.17, 2888.93, 2738.94, 3113.09, 2805.80 +200404, 2686.72, 2897.68, 2701.89, 2988.37, 2790.91 +200405, 2693.33, 2903.65, 2601.29, 3102.12, 2651.73 +200406, 2691.39, 2942.78, 2681.87, 3095.21, 2718.46 +200407, 2674.51, 3075.20, 2740.51, 2841.74, 2711.50 +200408, 2559.99, 3061.56, 2569.15, 2681.47, 2631.00 +200409, 2565.60, 3083.80, 2501.62, 2735.11, 2691.04 +200410, 2592.28, 3212.97, 2575.28, 2694.20, 2730.07 +200411, 2674.05, 3222.83, 2670.92, 2627.78, 2766.69 +200412, 2805.88, 3439.40, 2802.34, 2643.68, 2900.81 +200501, 2931.64, 3485.40, 2892.03, 2802.45, 3032.34 +200502, 2898.98, 3518.32, 2722.03, 2698.11, 3002.16 +200503, 2928.10, 3814.64, 2731.16, 2759.52, 3026.92 +200504, 2907.75, 3776.95, 2668.00, 2746.75, 2960.40 +200505, 2809.22, 3636.13, 2576.52, 2860.39, 2917.28 +200506, 2965.16, 3725.84, 2761.64, 2908.72, 3030.77 +200507, 2908.81, 3641.93, 2664.35, 2710.51, 3027.35 +200508, 3052.68, 3843.49, 2818.27, 2835.84, 3107.54 +200509, 2963.84, 3880.03, 2780.35, 2834.58, 3066.25 +200510, 2938.23, 4035.75, 2800.51, 2794.79, 3113.61 +200511, 2965.82, 3744.15, 2759.55, 2719.04, 3154.90 +200512, 3053.56, 3842.04, 2925.35, 2779.49, 3311.44 +200601, 3064.52, 3886.55, 2880.36, 2850.31, 3329.09 +200602, 3125.14, 4174.58, 3016.00, 2926.04, 3377.05 +200603, 3187.22, 4068.48, 3035.44, 2965.12, 3412.97 +200604, 3265.03, 4153.72, 3127.75, 2934.49, 3488.22 +200605, 3253.41, 4276.69, 3121.43, 2840.49, 3568.02 +200606, 3247.23, 4160.26, 2943.09, 2786.35, 3465.54 +200607, 3076.42, 4130.35, 2892.72, 2621.61, 3301.58 +200608, 3052.47, 4182.26, 2841.02, 2735.41, 3291.18 +200609, 3138.17, 4167.26, 3028.51, 2795.54, 3354.84 +200610, 3254.69, 4139.69, 3145.96, 2837.97, 3472.34 +200611, 3384.50, 4360.27, 3307.38, 2889.17, 3573.17 +200612, 3387.22, 4583.78, 3399.93, 2875.92, 3630.73 +200701, 3438.45, 4598.15, 3415.63, 2894.25, 3756.09 +200702, 3554.57, 4669.47, 3433.79, 3017.93, 3821.31 +200703, 3527.51, 4685.65, 3361.62, 2932.95, 3755.95 +200704, 3543.53, 4834.80, 3420.70, 2929.52, 3753.58 +200705, 3658.60, 5116.71, 3581.37, 3150.87, 3904.19 +200706, 3770.97, 5360.11, 3779.70, 3183.48, 4037.56 +200707, 3589.42, 5011.84, 3785.22, 2836.67, 3678.79 +200708, 3409.27, 4970.72, 3753.98, 2732.75, 3474.81 +200709, 3487.39, 4999.25, 3855.16, 2808.90, 3531.48 +200710, 3549.42, 5299.65, 4009.20, 2901.09, 3626.89 +200711, 3625.86, 5406.51, 4200.52, 2953.65, 3648.77 +200712, 3579.26, 5257.26, 3893.80, 2964.46, 3433.47 +200801, 3519.40, 5441.99, 3965.11, 2851.91, 3329.92 +200802, 3421.59, 5016.53, 3542.10, 2731.05, 3256.65 +200803, 3327.22, 5161.06, 3419.15, 2709.54, 3015.11 +200804, 3364.41, 5133.46, 3457.92, 2669.79, 2971.11 +200805, 3478.25, 5532.50, 3675.33, 2711.92, 3090.91 +200806, 3576.26, 5754.66, 3909.54, 2774.27, 3026.00 +200807, 3068.04, 5238.34, 3386.01, 2531.68, 2457.80 +200808, 3137.97, 4826.38, 3333.99, 2727.09, 2585.36 +200809, 3269.11, 4863.65, 3448.43, 2792.05, 2602.09 +200810, 3123.87, 4250.98, 3053.33, 2636.20, 2437.56 +200811, 2641.01, 3502.78, 2530.66, 2368.48, 1966.74 +200812, 2415.76, 3404.66, 2325.15, 2183.08, 1723.73 +200901, 2485.18, 3388.76, 2395.83, 2334.84, 1737.59 +200902, 2317.27, 3210.89, 2274.16, 2310.52, 1417.01 +200903, 2205.29, 2788.82, 2160.39, 2066.64, 1213.13 +200904, 2392.16, 2952.73, 2390.10, 2056.23, 1378.87 +200905, 2621.37, 3231.26, 2690.63, 2043.50, 1613.29 +200906, 2679.26, 3476.37, 2770.74, 2202.01, 1734.73 +200907, 2685.98, 3353.01, 2865.73, 2259.28, 1798.68 +200908, 2894.23, 3622.87, 3103.34, 2397.27, 1970.01 +200909, 2978.76, 3682.49, 3176.73, 2461.30, 2130.69 +200910, 3075.33, 3876.46, 3353.75, 2532.57, 2209.56 +200911, 3074.09, 3852.29, 3305.48, 2324.33, 2096.34 +200912, 3242.83, 4077.70, 3482.17, 2429.94, 2227.26 +201001, 3325.52, 4164.65, 3712.30, 2517.32, 2255.99 +201002, 3273.12, 3990.99, 3444.94, 2536.37, 2255.17 +201003, 3406.22, 4124.79, 3617.10, 2555.01, 2314.60 +201004, 3623.52, 4333.08, 3878.34, 2667.66, 2512.18 +201005, 3675.76, 4480.36, 4017.30, 2607.51, 2577.28 +201006, 3468.78, 4091.03, 3727.03, 2400.34, 2361.38 +201007, 3157.93, 3754.20, 3400.66, 2343.76, 2327.21 +201008, 3369.69, 4095.58, 3683.29, 2428.24, 2513.85 +201009, 3267.77, 3921.74, 3494.89, 2392.06, 2335.98 +201010, 3588.71, 4296.09, 3914.13, 2608.39, 2537.84 +201011, 3712.30, 4485.82, 4194.50, 2668.72, 2597.45 +201012, 3822.29, 4599.78, 4179.94, 2592.71, 2621.55 +201101, 3977.10, 4983.40, 4479.39, 2769.96, 2884.80 +201102, 3931.78, 5213.39, 4636.05, 2764.21, 2963.12 +201103, 4052.25, 5475.10, 4829.81, 2869.45, 3050.85 +201104, 4149.79, 5598.27, 4811.60, 2940.03, 3028.96 +201105, 4358.18, 5751.62, 4999.71, 3103.41, 3088.18 +201106, 4422.76, 5603.77, 4961.95, 3169.26, 3016.05 +201107, 4261.73, 5428.93, 4624.90, 2998.91, 2986.52 +201108, 4219.75, 5313.29, 4577.41, 2886.55, 2894.32 +201109, 4098.82, 4943.00, 4301.73, 2794.55, 2658.38 +201110, 3885.79, 4453.66, 4089.88, 2685.84, 2368.56 +201111, 4254.00, 5060.86, 4559.07, 2840.24, 2702.74 +201112, 4277.05, 5154.31, 4513.93, 2874.95, 2663.75 +201201, 4342.35, 5180.29, 4512.55, 2983.27, 2716.24 +201202, 4494.59, 5359.94, 4871.93, 3072.99, 2924.00 +201203, 4667.51, 5583.15, 5204.84, 3129.02, 3054.29 +201204, 4852.63, 5550.04, 5446.55, 3265.67, 3210.29 +201205, 4835.65, 5517.90, 5386.39, 3305.88, 3169.52 +201206, 4677.52, 5071.28, 5042.98, 3195.30, 2936.01 +201207, 4560.82, 5036.26, 4966.10, 3257.10, 3018.07 +201208, 4640.20, 5130.96, 5053.09, 3306.36, 2984.19 +201209, 4737.43, 5229.45, 5285.43, 3361.24, 3076.98 +201210, 4833.42, 5371.13, 5397.92, 3544.97, 3217.61 +201211, 4835.63, 5360.39, 5120.95, 3474.59, 3265.67 +201212, 4941.86, 5364.61, 5213.38, 3524.65, 3251.08 +201301, 4878.25, 5504.00, 5279.74, 3536.31, 3373.35 +201302, 5162.76, 5854.87, 5444.59, 3832.21, 3602.87 +201303, 5285.50, 5911.40, 5508.19, 3885.49, 3663.30 +201304, 5535.16, 6082.63, 5729.57, 4170.19, 3828.30 +201305, 5747.81, 6150.34, 5857.98, 4284.05, 3879.89 +201306, 5840.70, 6244.81, 6048.60, 4335.70, 4128.98 +201307, 5554.21, 6035.93, 5662.35, 3952.67, 3974.78 +201308, 5849.05, 6391.03, 5961.60, 4272.84, 4219.17 +201309, 5643.40, 6249.76, 5939.41, 4126.44, 4064.06 +201310, 5851.15, 6470.49, 6207.33, 4302.18, 4223.94 +201311, 6152.78, 6787.82, 6459.17, 4482.79, 4391.04 +201312, 6408.49, 6887.80, 6684.77, 4684.20, 4602.29 +201401, 6473.71, 7084.95, 6989.25, 4713.80, 4750.18 +201402, 6100.82, 6784.52, 6868.03, 4834.70, 4556.20 +201403, 6436.40, 7170.25, 7205.95, 5140.23, 4728.14 +201404, 6475.35, 7303.04, 7175.76, 5013.20, 4827.20 +201405, 6495.12, 7536.14, 7142.10, 4998.25, 4745.89 +201406, 6614.59, 7615.69, 7384.37, 5099.66, 4843.31 +201407, 6659.15, 7433.83, 7102.03, 4229.40, 4681.68 +201408, 6502.20, 7111.66, 7151.84, 4171.62, 4586.71 +201409, 6861.36, 7380.59, 7386.69, 4395.12, 4784.45 +201410, 6743.07, 7046.56, 7297.94, 4391.59, 4740.36 +201411, 6972.09, 7119.22, 7456.79, 4655.84, 4912.61 +201412, 7416.71, 6970.45, 7749.25, 4807.12, 5042.06 +201501, 7420.86, 7010.57, 7690.91, 4770.33, 5110.59 +201502, 7378.60, 6805.82, 7436.40, 4848.08, 4797.86 +201503, 7835.10, 7061.33, 8081.91, 5055.22, 5150.87 +201504, 7799.30, 6949.92, 7917.37, 4950.39, 5141.95 +201505, 7730.64, 7057.09, 8109.41, 4873.16, 5198.13 +201506, 7806.79, 6955.43, 8265.59, 5094.28, 5284.16 +201507, 7357.68, 6627.91, 7619.26, 4298.74, 4992.57 +201508, 7623.39, 6447.95, 7776.08, 4453.40, 5105.59 +201509, 7274.97, 6115.33, 7287.63, 4071.83, 4792.73 +201510, 7148.90, 5927.59, 7198.10, 3771.55, 4653.85 +201511, 7584.97, 6469.53, 7989.08, 4072.48, 4984.67 +201512, 7609.33, 6456.08, 8053.90, 4077.00, 5057.60 +201601, 7613.92, 6165.19, 7844.75, 4094.43, 4962.68 +201602, 7373.31, 6054.13, 7506.69, 3680.98, 4523.16 +201603, 7427.67, 6081.68, 7463.32, 3649.20, 4521.78 +201604, 7816.22, 6606.69, 8079.39, 3787.22, 4864.59 +201605, 7886.61, 6852.64, 7835.99, 3978.99, 4984.33 +201606, 7947.57, 6859.12, 8139.38, 4100.71, 5075.47 +201607, 7834.53, 6963.51, 7940.20, 3695.27, 4674.43 +201608, 8080.13, 7030.69, 8473.60, 3964.80, 4878.44 +201609, 8011.73, 7036.92, 8580.91, 3853.16, 5064.28 +201610, 7909.36, 7113.95, 8739.87, 3878.55, 5032.04 +201611, 7751.90, 6944.42, 8665.30, 3610.59, 5070.34 +201612, 7859.16, 7312.57, 8916.26, 3711.16, 5656.02 +201701, 7970.75, 7435.23, 9117.03, 3750.49, 5849.47 +201702, 8093.31, 7492.59, 9563.76, 3790.19, 5907.73 +201703, 8360.25, 7599.08, 9974.50, 4061.70, 6139.25 +201704, 8446.49, 7597.37, 10169.72, 4078.95, 6059.90 +201705, 8601.68, 7655.36, 10440.36, 4130.22, 6114.07 +201706, 8791.99, 7680.88, 10781.28, 4115.77, 6124.08 +201707, 8303.06, 7424.71, 9825.43, 4134.42, 6151.99 +201708, 8148.04, 7584.72, 10284.59, 4159.25, 6246.53 +201709, 7996.64, 7523.83, 10588.14, 4274.24, 6208.58 +201710, 8162.98, 7658.92, 10713.97, 4364.79, 6510.37 +201711, 8292.66, 7841.99, 11316.24, 4279.21, 6711.26 +201712, 8790.30, 8088.47, 11572.99, 4404.25, 7023.23 +201801, 9042.07, 8232.88, 11687.31, 4389.12, 7173.41 +201802, 9671.02, 8426.78, 12520.72, 4664.95, 7628.16 +201803, 9175.44, 7931.98, 12442.56, 4531.39, 7389.86 +201804, 8986.11, 7955.04, 12100.84, 4405.78, 7197.01 +201805, 9124.91, 8107.05, 12060.94, 4427.33, 7289.22 +201806, 9178.37, 8295.53, 12758.11, 4517.21, 7395.19 +201807, 8950.42, 8294.51, 12236.72, 4218.04, 6896.42 +201808, 9158.43, 8571.60, 12611.95, 4481.35, 7183.68 +201809, 9568.56, 8480.91, 13493.65, 4684.70, 7362.67 +201810, 9596.59, 8611.97, 13554.36, 4793.25, 7258.17 +201811, 9073.85, 7800.38, 12588.55, 4379.96, 6801.16 +201812, 9307.52, 7986.29, 12457.00, 4665.84, 6948.94 +201901, 8312.98, 7262.68, 11495.58, 4293.47, 6207.76 +201902, 9023.67, 7916.18, 12550.26, 4521.15, 6845.29 +201903, 9116.40, 8249.01, 13337.08, 4663.27, 7055.91 +201904, 9402.38, 8322.81, 13764.33, 4703.68, 6979.34 +201905, 9841.67, 8548.93, 14604.67, 4578.92, 7473.27 +201906, 9237.26, 7837.51, 13451.18, 4412.69, 7026.93 +201907, 9649.62, 8130.41, 13804.37, 4251.34, 7165.82 +201908, 9755.13, 8126.64, 14208.78, 4151.53, 7336.97 +201909, 9712.74, 7905.65, 13967.60, 4121.60, 7011.97 +201910, 9893.01, 8224.19, 14236.98, 4083.05, 7151.97 +201911, 10036.40, 8194.20, 14763.17, 4277.39, 7389.24 +201912, 10301.93, 8326.98, 15484.64, 4417.15, 7797.56 +202001, 10549.29, 8635.91, 16113.09, 4582.39, 8001.38 +202002, 10660.85, 8413.40, 16516.52, 4501.40, 7914.24 +202003, 9949.66, 7551.11, 15372.35, 4270.75, 7140.44 +202004, 8974.92, 6104.36, 13879.91, 4048.89, 5760.46 +202005, 10390.38, 6907.08, 15863.06, 4606.73, 6431.95 +202006, 10923.15, 7227.68, 17052.44, 4795.23, 6739.28 +202007, 11138.84, 7368.24, 17631.20, 4579.28, 6443.49 +202008, 12195.37, 7670.26, 18821.20, 4796.44, 6717.62 +202009, 13499.81, 7894.90, 20852.49, 4902.69, 7199.87 +202010, 12957.01, 7736.72, 19916.44, 4830.42, 6991.91 +202011, 12593.34, 7765.73, 19561.32, 4592.14, 6870.77 +202012, 14014.55, 8770.22, 21748.16, 5041.05, 7957.57 +202101, 14593.86, 8979.31, 22940.60, 5301.20, 8405.00 +202102, 14736.79, 8896.98, 23023.48, 5489.26, 8169.94 +202103, 14424.73, 9409.04, 23493.60, 5431.60, 8983.04 +202104, 15166.85, 10117.88, 23778.54, 5439.45, 9461.78 +202105, 16068.56, 10376.56, 25535.69, 5592.10, 10019.54 +202106, 15812.46, 10601.50, 25441.17, 5575.85, 10331.45 +202107, 15280.76, 10399.39, 23584.77, 5123.35, 8576.64 +202108, 15331.12, 10380.83, 24434.53, 5228.10, 8553.42 +202109, 15579.62, 10464.38, 25523.95, 5382.60, 8809.03 +202110, 15082.25, 10166.14, 24096.89, 5055.55, 8618.18 +202111, 16424.14, 10786.24, 25931.04, 5176.02, 9262.83 +202112, 16486.25, 10630.50, 26208.39, 4954.16, 8820.22 +202201, 16772.44, 11272.28, 26773.94, 5290.40, 9265.24 +202202, 15552.77, 11167.57, 24631.85, 4823.57, 9013.54 +202203, 15189.68, 11264.92, 23619.73, 4768.47, 8895.51 +202204, 15878.92, 11854.07, 24381.41, 5020.87, 9001.15 +202205, 14574.32, 11400.57, 21411.42, 4675.58, 8209.60 +202206, 13988.56, 11932.21, 21187.02, 4730.91, 8291.23 +202207, 11540.77, 9944.76, 16873.55, 4200.23, 6402.72 +202208, 13191.45, 10856.48, 18752.37, 4320.59, 6983.58 +202209, 12757.37, 10737.07, 17821.31, 4118.76, 6827.51 +202210, 11788.81, 9555.40, 15870.48, 4044.61, 6279.70 +202211, 12237.10, 10869.63, 16824.08, 4401.87, 7059.73 +202212, 12503.69, 11488.22, 17718.18, 4643.24, 7508.53 +202301, 11369.80, 11195.77, 16418.77, 4569.53, 7438.42 +202302, 12444.88, 11607.02, 18126.47, 4563.42, 8041.39 +202303, 12228.09, 11125.38, 18134.03, 4389.74, 7882.69 +202304, 12546.03, 11261.39, 19976.16, 4532.49, 7585.91 +202305, 12530.04, 11349.04, 20049.85, 4747.25, 7778.10 +202306, 12653.56, 10556.81, 21623.94, 4579.49, 7619.78 +202307, 13457.34, 10931.62, 22075.76, 4674.71, 7751.05 +202308, 13869.41, 11406.84, 22950.85, 4700.45, 8266.84 +202309, 13682.52, 11200.69, 22758.57, 4713.63, 8068.90 +202310, 13016.70, 10775.96, 21511.07, 4519.55, 7823.57 +202311, 12626.23, 10492.52, 21165.72, 4359.13, 7669.24 +202312, 13680.94, 11091.53, 23764.23, 4658.72, 8494.09 +202401, 14428.83, 11521.79, 25098.64, 4976.81, 9217.66 +202402, 14113.19, 11381.06, 26022.52, 5168.01, 9341.36 +202403, 15268.12, 11820.87, 27676.36, 5454.07, 9800.68 +202404, 15677.62, 12597.68, 28451.66, 5596.40, 10329.93 +202405, 15106.23, 12382.94, 27229.85, 5317.76, 9960.93 +202406, 15443.12, 12680.76, 29390.15, 5530.54, 10367.59 +202407, 15420.51, 12179.92, 30613.57, 5596.51, 10088.29 +202408, 15883.76, 12727.15, 30310.60, 5659.18, 10825.19 +202409, 16153.05, 12915.24, 31005.86, 6044.00, 11260.41 +202410, 17004.91, 13169.31, 32119.16, 5976.97, 11436.69 +202411, 16868.08, 12912.95, 32289.65, 5823.02, 11658.88 +202412, 18725.11, 13755.72, 34125.30, 5800.18, 12973.11 +202501, 18792.44, 12674.82, 34525.82, 5535.33, 12151.62 +202502, 19694.36, 13188.76, 34814.20, 5881.22, 13013.24 +202503, 18806.79, 13157.74, 34028.21, 6149.00, 13010.76 +202504, 17584.20, 12974.85, 31193.73, 5926.06, 12553.13 +202505, 17958.43, 12427.22, 31687.68, 5925.20, 12516.69 +202506, 19186.66, 12992.86, 35035.05, 5745.20, 13064.82 +202507, 19502.53, 13679.42, 37444.31, 5907.05, 13409.49 +202508, 19767.71, 14087.95, 39600.28, 5887.36, 13335.63 +202509, 20287.62, 14281.81, 40113.28, 6226.56, 13930.12 +202510, 20960.96, 14574.90, 42908.36, 6381.16, 14034.83 +202511, 21303.82, 14835.48, 45355.67, 6713.09, 13741.89 +202512, 21448.16, 14930.17, 44453.17, 7439.90, 14065.69 + + + Sum of BE / Sum of ME +,Cnsmr,Manuf,HiTec,Hlth,Other + 1926, 0.57, 0.95, 0.69, 0.74, 1.32 + 1927, 0.58, 0.98, 0.71, 0.58, 1.38 + 1928, 0.43, 0.81, 0.57, 0.34, 1.13 + 1929, 0.34, 0.61, 0.44, 0.27, 1.03 + 1930, 0.58, 0.68, 0.46, 0.38, 1.19 + 1931, 0.78, 0.98, 0.69, 0.49, 1.68 + 1932, 1.11, 1.96, 1.22, 0.56, 4.16 + 1933, 1.27, 1.92, 1.33, 0.60, 4.84 + 1934, 0.72, 1.23, 1.08, 0.38, 3.02 + 1935, 0.69, 1.27, 1.08, 0.33, 3.03 + 1936, 0.55, 0.84, 0.68, 0.47, 2.29 + 1937, 0.50, 0.67, 0.57, 0.28, 1.70 + 1938, 0.88, 1.11, 0.73, 0.36, 2.92 + 1939, 0.66, 0.87, 0.66, 0.29, 2.47 + 1940, 0.65, 0.91, 0.67, 0.28, 2.58 + 1941, 0.75, 1.08, 0.73, 0.39, 2.87 + 1942, 0.99, 1.28, 0.93, 0.44, 3.34 + 1943, 0.88, 1.27, 0.89, 0.39, 2.92 + 1944, 0.74, 1.10, 0.70, 0.38, 2.50 + 1945, 0.63, 0.99, 0.66, 0.41, 2.05 + 1946, 0.49, 0.80, 0.61, 0.33, 1.46 + 1947, 0.63, 0.86, 0.66, 0.36, 1.89 + 1948, 0.74, 0.90, 0.77, 0.47, 2.16 + 1949, 0.85, 1.03, 0.85, 0.58, 2.35 + 1950, 0.79, 1.01, 0.83, 0.46, 2.26 + 1951, 0.79, 0.88, 0.83, 0.45, 1.88 + 1952, 0.80, 0.79, 0.80, 0.39, 1.84 + 1953, 0.77, 0.79, 0.77, 0.51, 1.71 + 1954, 0.86, 0.85, 0.79, 0.54, 2.02 + 1955, 0.65, 0.62, 0.62, 0.45, 1.38 + 1956, 0.63, 0.55, 0.59, 0.41, 1.30 + 1957, 0.69, 0.56, 0.61, 0.41, 1.44 + 1958, 0.84, 0.70, 0.64, 0.35, 1.90 + 1959, 0.59, 0.55, 0.48, 0.23, 1.33 + 1960, 0.55, 0.56, 0.41, 0.20, 1.36 + 1961, 0.60, 0.62, 0.39, 0.21, 1.33 + 1962, 0.46, 0.55, 0.34, 0.19, 1.11 + 1963, 0.54, 0.61, 0.46, 0.29, 1.25 + 1964, 0.48, 0.57, 0.40, 0.26, 1.03 + 1965, 0.43, 0.51, 0.44, 0.23, 1.00 + 1966, 0.42, 0.50, 0.43, 0.19, 0.84 + 1967, 0.58, 0.58, 0.46, 0.21, 0.92 + 1968, 0.47, 0.51, 0.37, 0.19, 0.72 + 1969, 0.44, 0.50, 0.39, 0.20, 0.63 + 1970, 0.52, 0.64, 0.43, 0.19, 0.78 + 1971, 0.52, 0.66, 0.52, 0.23, 0.85 + 1972, 0.45, 0.65, 0.51, 0.21, 0.71 + 1973, 0.44, 0.58, 0.47, 0.19, 0.61 + 1974, 0.73, 0.74, 0.67, 0.25, 0.83 + 1975, 1.18, 1.17, 1.02, 0.37, 1.34 + 1976, 0.75, 0.96, 0.83, 0.38, 1.13 + 1977, 0.71, 0.86, 0.72, 0.42, 0.99 + 1978, 0.86, 1.03, 0.80, 0.51, 1.12 + 1979, 0.97, 1.11, 0.81, 0.50, 1.17 + 1980, 0.99, 1.01, 0.89, 0.49, 1.07 + 1981, 0.92, 0.83, 0.79, 0.44, 0.96 + 1982, 0.94, 1.10, 0.88, 0.47, 1.08 + 1983, 0.68, 1.18, 0.72, 0.41, 0.98 + 1984, 0.59, 1.02, 0.61, 0.44, 0.90 + 1985, 0.66, 1.09, 0.65, 0.47, 0.98 + 1986, 0.57, 0.93, 0.58, 0.39, 0.77 + 1987, 0.54, 0.82, 0.62, 0.33, 0.72 + 1988, 0.60, 0.85, 0.67, 0.31, 0.80 + 1989, 0.54, 0.81, 0.64, 0.31, 0.77 + 1990, 0.47, 0.68, 0.53, 0.24, 0.64 + 1991, 0.52, 0.78, 0.63, 0.23, 0.84 + 1992, 0.37, 0.66, 0.51, 0.17, 0.64 + 1993, 0.31, 0.61, 0.44, 0.23, 0.57 + 1994, 0.28, 0.44, 0.30, 0.25, 0.56 + 1995, 0.35, 0.47, 0.32, 0.26, 0.62 + 1996, 0.33, 0.40, 0.25, 0.20, 0.53 + 1997, 0.31, 0.36, 0.26, 0.19, 0.48 + 1998, 0.25, 0.32, 0.22, 0.15, 0.39 + 1999, 0.22, 0.32, 0.17, 0.11, 0.41 + 2000, 0.24, 0.33, 0.14, 0.12, 0.35 + 2001, 0.27, 0.34, 0.30, 0.10, 0.36 + 2002, 0.27, 0.39, 0.32, 0.14, 0.43 + 2003, 0.32, 0.43, 0.43, 0.20, 0.53 + 2004, 0.30, 0.38, 0.34, 0.21, 0.45 + 2005, 0.31, 0.37, 0.33, 0.22, 0.46 + 2006, 0.34, 0.36, 0.34, 0.23, 0.47 + 2007, 0.30, 0.37, 0.35, 0.24, 0.46 + 2008, 0.31, 0.35, 0.32, 0.25, 0.54 + 2009, 0.41, 0.53, 0.52, 0.33, 0.75 + 2010, 0.33, 0.47, 0.37, 0.37, 0.73 + 2011, 0.32, 0.44, 0.36, 0.37, 0.72 + 2012, 0.32, 0.48, 0.39, 0.35, 0.89 + 2013, 0.29, 0.48, 0.35, 0.32, 0.79 + 2014, 0.25, 0.41, 0.29, 0.24, 0.62 + 2015, 0.22, 0.41, 0.27, 0.20, 0.58 + 2016, 0.22, 0.44, 0.29, 0.21, 0.62 + 2017, 0.23, 0.37, 0.27, 0.22, 0.55 + 2018, 0.20, 0.36, 0.23, 0.19, 0.50 + 2019, 0.24, 0.44, 0.26, 0.18, 0.57 + 2020, 0.19, 0.38, 0.21, 0.17, 0.50 + 2021, 0.15, 0.35, 0.16, 0.17, 0.49 + 2022, 0.14, 0.31, 0.14, 0.19, 0.42 + 2023, 0.19, 0.34, 0.22, 0.21, 0.46 + 2024, 0.17, 0.35, 0.16, 0.20, 0.44 + 2025, 0.14, 0.34, 0.13, 0.19, 0.39 + + + Value-Weighted Average of BE/ME +,Cnsmr,Manuf,HiTec,Hlth,Other + 1926, 0.59, 0.93, 0.69, 0.73, 1.31 + 1927, 0.55, 0.98, 0.69, 0.57, 1.43 + 1928, 0.41, 0.80, 0.56, 0.32, 1.15 + 1929, 0.33, 0.57, 0.43, 0.25, 1.00 + 1930, 0.57, 0.66, 0.44, 0.39, 1.16 + 1931, 0.80, 0.96, 0.71, 0.46, 1.74 + 1932, 1.11, 2.09, 1.26, 0.55, 3.82 + 1933, 1.57, 2.39, 1.42, 0.63, 5.97 + 1934, 0.77, 1.26, 1.07, 0.37, 3.06 + 1935, 0.67, 1.24, 1.08, 0.33, 2.80 + 1936, 0.54, 0.84, 0.69, 0.32, 2.37 + 1937, 0.50, 0.68, 0.55, 0.25, 1.79 + 1938, 0.87, 1.10, 0.76, 0.35, 2.78 + 1939, 0.66, 0.85, 0.68, 0.28, 2.15 + 1940, 0.64, 0.90, 0.67, 0.28, 2.42 + 1941, 0.78, 1.09, 0.74, 0.40, 3.33 + 1942, 0.99, 1.32, 0.96, 0.45, 3.71 + 1943, 0.95, 1.37, 0.92, 0.42, 3.73 + 1944, 0.75, 1.11, 0.71, 0.37, 2.73 + 1945, 0.65, 1.02, 0.67, 0.42, 2.26 + 1946, 0.50, 0.79, 0.60, 0.31, 1.44 + 1947, 0.62, 0.86, 0.65, 0.36, 1.84 + 1948, 0.74, 0.92, 0.78, 0.48, 2.34 + 1949, 0.84, 1.01, 0.85, 0.56, 2.16 + 1950, 0.79, 1.01, 0.85, 0.46, 2.35 + 1951, 0.79, 0.87, 0.83, 0.43, 1.83 + 1952, 0.79, 0.79, 0.79, 0.40, 1.97 + 1953, 0.78, 0.79, 0.78, 0.51, 1.72 + 1954, 0.85, 0.88, 0.75, 0.55, 2.01 + 1955, 0.64, 0.63, 0.61, 0.45, 1.43 + 1956, 0.58, 0.56, 0.58, 0.40, 1.29 + 1957, 0.72, 0.56, 0.57, 0.40, 1.44 + 1958, 0.86, 0.73, 0.64, 0.36, 1.94 + 1959, 0.62, 0.55, 0.47, 0.23, 1.34 + 1960, 0.53, 0.55, 0.41, 0.21, 1.33 + 1961, 0.61, 0.64, 0.39, 0.21, 1.34 + 1962, 0.47, 0.57, 0.35, 0.19, 1.21 + 1963, 0.55, 0.63, 0.45, 0.29, 1.38 + 1964, 0.48, 0.57, 0.39, 0.26, 1.13 + 1965, 0.43, 0.52, 0.43, 0.24, 0.97 + 1966, 0.42, 0.49, 0.42, 0.19, 0.81 + 1967, 0.60, 0.59, 0.44, 0.21, 0.96 + 1968, 0.48, 0.53, 0.37, 0.19, 0.78 + 1969, 0.43, 0.50, 0.39, 0.20, 0.60 + 1970, 0.53, 0.67, 0.48, 0.19, 0.74 + 1971, 0.51, 0.67, 0.52, 0.23, 0.85 + 1972, 0.43, 0.63, 0.48, 0.20, 0.69 + 1973, 0.43, 0.59, 0.47, 0.17, 0.61 + 1974, 0.77, 0.77, 0.68, 0.25, 0.88 + 1975, 1.22, 1.21, 1.05, 0.41, 1.39 + 1976, 0.83, 1.01, 0.83, 0.40, 1.20 + 1977, 0.74, 0.90, 0.74, 0.44, 1.04 + 1978, 0.88, 1.04, 0.81, 0.51, 1.14 + 1979, 1.00, 1.15, 0.81, 0.51, 1.21 + 1980, 0.98, 1.02, 0.91, 0.49, 1.08 + 1981, 0.96, 0.90, 0.86, 0.43, 1.02 + 1982, 0.97, 1.17, 0.88, 0.46, 1.10 + 1983, 0.70, 1.21, 0.68, 0.41, 1.00 + 1984, 0.60, 1.05, 0.51, 0.45, 0.91 + 1985, 0.65, 1.11, 0.68, 0.48, 1.00 + 1986, 0.56, 0.93, 0.61, 0.38, 0.78 + 1987, 0.55, 0.83, 0.60, 0.32, 0.75 + 1988, 0.63, 0.88, 0.69, 0.34, 0.88 + 1989, 0.54, 0.85, 0.66, 0.32, 0.82 + 1990, 0.47, 0.68, 0.55, 0.24, 0.66 + 1991, 0.54, 0.80, 0.66, 0.23, 0.92 + 1992, 0.44, 0.68, 0.55, 0.17, 0.73 + 1993, 0.33, 0.63, 0.47, 0.23, 0.60 + 1994, 0.30, 0.44, 0.33, 0.27, 0.58 + 1995, 0.36, 0.49, 0.34, 0.26, 0.64 + 1996, 0.35, 0.40, 0.25, 0.20, 0.53 + 1997, 0.31, 0.36, 0.27, 0.19, 0.48 + 1998, 0.26, 0.32, 0.22, 0.14, 0.39 + 1999, 0.22, 0.33, 0.17, 0.11, 0.40 + 2000, 0.25, 0.35, 0.13, 0.12, 0.33 + 2001, 0.32, 0.37, 0.32, 0.12, 0.38 + 2002, 0.29, 0.40, 0.31, 0.14, 0.43 + 2003, 0.33, 0.46, 0.44, 0.20, 0.54 + 2004, 0.31, 0.39, 0.32, 0.21, 0.45 + 2005, 0.31, 0.38, 0.32, 0.23, 0.47 + 2006, 0.33, 0.37, 0.34, 0.24, 0.47 + 2007, 0.33, 0.37, 0.36, 0.24, 0.45 + 2008, 0.33, 0.35, 0.32, 0.25, 0.52 + 2009, 0.47, 0.57, 0.52, 0.35, 0.85 + 2010, 0.35, 0.47, 0.37, 0.37, 0.83 + 2011, 0.33, 0.44, 0.36, 0.38, 0.71 + 2012, 0.32, 0.47, 0.39, 0.36, 0.92 + 2013, 0.32, 0.48, 0.38, 0.33, 0.81 + 2014, 0.27, 0.42, 0.30, 0.25, 0.61 + 2015, 0.24, 0.41, 0.27, 0.21, 0.59 + 2016, 0.24, 0.48, 0.30, 0.22, 0.61 + 2017, 0.25, 0.37, 0.27, 0.23, 0.54 + 2018, 0.22, 0.39, 0.23, 0.20, 0.47 + 2019, 0.24, 0.45, 0.26, 0.21, 0.56 + 2020, 0.20, 0.37, 0.19, 0.19, 0.44 + 2021, 0.19, 0.41, 0.16, 0.17, 0.50 + 2022, 0.17, 0.37, 0.15, 0.19, 0.43 + 2023, 0.24, 0.35, 0.21, 0.21, 0.44 + 2024, 0.19, 0.37, 0.14, 0.19, 0.44 + 2025, 0.16, 0.35, 0.13, 0.20, 0.39 + +Copyright 2025 Eugene F. Fama and Kenneth R. French diff --git a/GeneralisedFilters/examples/dfsv/Project.toml b/GeneralisedFilters/examples/dfsv/Project.toml new file mode 100644 index 00000000..e0369bed --- /dev/null +++ b/GeneralisedFilters/examples/dfsv/Project.toml @@ -0,0 +1,17 @@ +[deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001" +AdvancedHMC = "0bf59076-c3b1-5ca4-86bd-e02cd72cde3d" +CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" +CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0" +ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" +GeneralisedFilters = "3ef92589-7ab8-43f9-b5b9-a3a0c86ecbb7" +MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d" +PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150" +SSMProblems = "26aad666-b158-4e64-9d35-0e672562fa48" +StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" +Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" diff --git a/GeneralisedFilters/examples/dfsv/script.jl b/GeneralisedFilters/examples/dfsv/script.jl new file mode 100644 index 00000000..b5ed2fe7 --- /dev/null +++ b/GeneralisedFilters/examples/dfsv/script.jl @@ -0,0 +1,364 @@ +# # Dynamic Factor Stochastic Volatility +# +# This example demonstrates Rao-Blackwellised Particle Gibbs (RBPG) with ancestor +# sampling and NUTS parameter updates, applied to a multivariate Dynamic Factor +# Stochastic Volatility (DFSV) model for monthly US industry portfolio returns. +# +# The model decomposes return volatility into: +# - A **common volatility factor** $g_t$ capturing economy-wide variance regimes, and +# - **Idiosyncratic stochastic volatility** $u_{i,t}$ per industry. +# +# Latent **linear factors** $f_t \in \mathbb{R}^2$ drive cross-industry return +# correlations and are integrated out analytically via Rao-Blackwellisation, +# greatly reducing the effective particle dimension. + +using GeneralisedFilters +using SSMProblems +using Distributions +using DistributionsAD +using PDMats +using StaticArrays +using LinearAlgebra +using Random +using Statistics + +using AbstractMCMC: AbstractMCMC +using AdvancedHMC: NUTS, HMC +using ADTypes: ADTypes +using MCMCChains: MCMCChains +using Turing: @model +using Zygote +import ChainRulesCore: ChainRulesCore, NoTangent + +const GF = GeneralisedFilters + +DFSV_PATH = joinpath(@__DIR__, "..", "..", "..", "GeneralisedFilters", "examples", "dfsv"); #hide +# DFSV_PATH = joinpath(@__DIR__) +includet(joinpath(DFSV_PATH, "utilities.jl")); #hide + +# ## Model +# +# Let $y_t \in \mathbb{R}^5$ denote monthly (demeaned) log-returns. The state +# vector splits into two components: +# +# **Nonlinear (particle) state**: $x_t = [g_t,\, u_{1:5,t}] \in \mathbb{R}^6$ +# - $g_t$: common log-volatility factor, $\text{AR}(1)$ with mean zero +# - $u_{i,t}$: industry-level idiosyncratic log-vol, shared AR(1) parameters +# +# **Linear (Rao-Blackwellised) state**: $f_t \in \mathbb{R}^2$, latent return +# factors with diagonal $\text{VAR}(1)$ dynamics. +# +# ### Observation model (conditionally Gaussian) +# +# ```math +# y_t \mid f_t, x_t \;\sim\; \mathcal{N}\!\bigl(\Lambda f_t,\; +# \mathrm{diag}(\exp(a + g_t \mathbf{1} + u_t))\bigr) +# ``` +# +# The per-series log-vol $h_{i,t} = a_i + g_t + u_{i,t}$ enters through an $\exp$ +# link, making the system nonlinear. Conditional on $x_{0:T}$, however, the model +# in $(f_t, y_t)$ is linear-Gaussian, enabling Kalman filtering. +# +# The loadings $\Lambda \in \mathbb{R}^{5 \times 2}$ use a lower-triangular +# identification constraint (first two diagonal entries fixed to 1) to remove +# rotational indeterminacy. +# +# ### Volatility dynamics (outer / particle) +# +# ```math +# g_t = \phi_g\, g_{t-1} + \omega_t, \quad \omega_t \sim \mathcal{N}(0, \sigma_g^2) +# ``` +# ```math +# u_{i,t} = \phi_u\, u_{i,t-1} + \zeta_{i,t}, \quad \zeta_{i,t} \sim \mathcal{N}(0, \sigma_u^2) +# ``` +# +# with stationary initialisations $g_0 \sim \mathcal{N}(0, \sigma_g^2 / (1-\phi_g^2))$ +# and similarly for $u_{i,0}$. +# +# ### Factor dynamics (inner / Rao-Blackwellised) +# +# ```math +# f_t = A f_{t-1} + \eta_t, \quad \eta_t \sim \mathcal{N}(0, Q) +# ``` +# with $A = \mathrm{diag}(\rho_1, \rho_2)$ and $Q = \mathrm{diag}(q_1^2, q_2^2)$. + +# ## Implementation +# +# ### Outer (volatility) model + +# Prior for the outer state $x_0 = [g_0, u_{1:5,0}]$, with stationary variances. +struct VolPrior{T<:Real} <: StatePrior + φ_g::T + σ_g::T + φ_u::T + σ_u::T +end + +function SSMProblems.distribution(p::VolPrior{T}; kwargs...) where {T} + σ²_g = p.σ_g^2 / (1 - p.φ_g^2) + σ²_u = p.σ_u^2 / (1 - p.φ_u^2) + μ₀ = @SVector zeros(T, 6) + vars = SVector{6,T}(σ²_g, σ²_u, σ²_u, σ²_u, σ²_u, σ²_u) + return MvNormal(μ₀, PDiagMat(vars)) +end + +# Transition density for the outer state. +struct VolDynamics{T<:Real} <: LatentDynamics + φ_g::T + σ_g::T + φ_u::T + σ_u::T +end + +function SSMProblems.distribution( + d::VolDynamics{T}, ::Integer, state::AbstractVector; kwargs... +) where {T} + g, u₁, u₂, u₃, u₄, u₅ = state + μ = SVector{6,T}(d.φ_g * g, d.φ_u * u₁, d.φ_u * u₂, d.φ_u * u₃, d.φ_u * u₄, d.φ_u * u₅) + vars = SVector{6,T}(d.σ_g^2, d.σ_u^2, d.σ_u^2, d.σ_u^2, d.σ_u^2, d.σ_u^2) + return MvNormal(μ, PDiagMat(vars)) +end + +# ### Inner (factor) model + +# Diffuse prior for the initial factor state. +struct FactorPrior <: GaussianPrior end + +GF.calc_μ0(::FactorPrior; kwargs...) = @SVector zeros(2) +# GF.calc_Σ0(::FactorPrior; kwargs...) = PDMat(SMatrix{2,2,Float64}(10.0 * I)) +GF.calc_Σ0(::FactorPrior; kwargs...) = _pdmat(SMatrix{2,2,Float64}(10.0 * I)) + +# Diagonal VAR(1) factor dynamics. +struct FactorDynamics{T<:Real} <: LinearGaussianLatentDynamics + ρ::SVector{2,T} + q::SVector{2,T} +end + +function GF.calc_A(d::FactorDynamics{T}, ::Integer; kwargs...) where {T} + return SMatrix{2,2,T,4}(d.ρ[1], zero(T), zero(T), d.ρ[2]) +end + +GF.calc_b(::FactorDynamics{T}, ::Integer; kwargs...) where {T} = @SVector zeros(T, 2) + +function GF.calc_Q(d::FactorDynamics{T}, ::Integer; kwargs...) where {T} + return _pdmat(SMatrix{2,2,T,4}(d.q[1]^2, zero(T), zero(T), d.q[2]^2)) +end + +# Observation process: $y_t = \Lambda f_t + \varepsilon_t$ with time-varying +# diagonal noise covariance driven by the current outer (volatility) state. +struct FactorObservation{T<:Real} <: LinearGaussianObservationProcess + Λ::SMatrix{5,2,T,10} # lower-triangular loadings + a::SVector{5,T} # per-industry log-vol levels +end + +GF.calc_H(obs::FactorObservation, ::Integer; kwargs...) = obs.Λ +GF.calc_c(::FactorObservation{T}, ::Integer; kwargs...) where {T} = @SVector zeros(T, 5) + +# Hard upper bound on log-volatility fed into exp(). exp(20) ≈ 5e8, which is already +# far beyond any plausible monthly-return variance; this prevents particle drift from +# producing an ill-conditioned innovation covariance S = HΣH' + R. +const LOG_VOL_MAX = 20.0 + +# PDiagMat constructor with rrule so the SMatrix{5,5} cotangent from the KF rrule +# propagates correctly back through the diagonal-exponential parameterisation. +# A small jitter (1e-6) floors R to keep S = HΣH' + R positive-definite when extreme +# NUTS proposals drive exp(h) toward zero (H is 5×2, so HΣH' is rank-deficient alone). +function _make_obs_cov(h::SVector{N,T}) where {N,T} + h_clamped = min.(h, T(LOG_VOL_MAX)) + return PDiagMat(exp.(h_clamped) .+ T(1e-6)) +end + +function ChainRulesCore.rrule(::typeof(_make_obs_cov), h::SVector{N,T}) where {N,T} + h_clamped = min.(h, T(LOG_VOL_MAX)) + evh = exp.(h_clamped) + R = PDiagMat(evh .+ T(1e-6)) + function _make_obs_cov_pullback(∂R) + d = ChainRulesCore.unthunk(∂R) + # Zero gradient for clamped components (clamp is not differentiable at the boundary, + # but for stability we treat it as a hard stop). + ∂h = SVector{N,T}( + ntuple(i -> h[i] < T(LOG_VOL_MAX) ? d[i, i] * evh[i] : zero(T), N) + ) + return (NoTangent(), ∂h) + end + return R, _make_obs_cov_pullback +end + +# Transparent PDMat wrapper with rrule to bridge the SMatrix cotangent from the KF rrule. +_pdmat(A::AM) where {AM<:AbstractMatrix} = PDMat(A) + +function ChainRulesCore.rrule(::typeof(_pdmat), A::AM) where {AM<:AbstractMatrix} + P = _pdmat(A) + pullback(∂P) = (NoTangent(), ChainRulesCore.unthunk(∂P)) + return P, pullback +end + +function GF.calc_R(obs::FactorObservation{T}, ::Integer; new_outer, kwargs...) where {T} + g, u₁, u₂, u₃, u₄, u₅ = new_outer + h = obs.a .+ g .+ SVector{5,T}(u₁, u₂, u₃, u₄, u₅) + return _make_obs_cov(h) +end + +# ### Model constructor +# +# Assembles the `HierarchicalSSM` from raw (unconstrained) parameter vectors. +# Stability constraints are enforced via $\tanh$ (AR coefficients) and $\exp$ +# (positive scales). The loadings use a lower-triangular identification: +# $\Lambda_{1,1} = \Lambda_{2,2} = 1$, $\Lambda_{1,2} = 0$; the 7 remaining +# entries are free. + +function build_dfsv(λ_free, ρ_raw, log_q, atanh_φ_g, log_σ_g, atanh_φ_u, log_σ_u, a) + T = promote_type(eltype(λ_free), typeof(atanh_φ_g)) + + ρ = tanh.(ρ_raw) + q = exp.(log_q) + φ_g = tanh(atanh_φ_g) + σ_g = exp(log_σ_g) + φ_u = tanh(atanh_φ_u) + σ_u = exp(log_σ_u) + + # Column-major layout: Λ[:,1] = [1, λ[1..4]], Λ[:,2] = [0, 1, λ[5..7]] + Λ = SMatrix{5,2,T}( + one(T), + λ_free[1], + λ_free[2], + λ_free[3], + λ_free[4], + zero(T), + one(T), + λ_free[5], + λ_free[6], + λ_free[7], + ) + + outer_prior = VolPrior(φ_g, σ_g, φ_u, σ_u) + outer_dyn = VolDynamics(φ_g, σ_g, φ_u, σ_u) + + factor_dyn = FactorDynamics(SVector{2,T}(ρ[1], ρ[2]), SVector{2,T}(q[1], q[2])) + factor_obs = FactorObservation(Λ, SVector{5,T}(a)) + + return HierarchicalSSM(outer_prior, outer_dyn, FactorPrior(), factor_dyn, factor_obs) +end; + +# ## Data +# +# We use monthly value-weighted returns from the Ken French Data Library for the +# 5-industry classification (Consumer, Manufacturing, High-Tech, Healthcare, Other), +# covering January 1985 – December 2024 (480 months). + +df = load_industry_data(; date_from=198501, date_to=202412) +dates = _yyyymm_to_date.(df.date) +Y = Matrix{Float64}(df[:, INDUSTRIES]) + +# Demean each series to remove the constant return component. +Y .-= mean(Y; dims=1) + +plot_returns(df) + +# Convert to a vector of observation vectors for the filter. +ys = [Vector{Float64}(Y[t, :]) for t in 1:size(Y, 1)]; + +# ## Inference +# +# ### Model specification +# +# The 20-dimensional parameter block for NUTS consists of: +# - 7 free loadings ($\lambda_{21}, \lambda_{31..51}, \lambda_{32..52}$) +# - 2 factor AR coefficients (raw, via $\tanh$) +# - 2 factor innovation log-scales +# - Common vol AR and log-scale ($\phi_g, \sigma_g$, raw) +# - Shared idiosyncratic vol AR and log-scale ($\phi_u, \sigma_u$, raw) +# - 5 per-industry log-vol levels $a_i$ +# +# All AR parameters use the reparameterisation $\phi = \tanh(\tilde\phi)$ with a +# $\mathcal{N}(1.5, 0.7^2)$ prior on $\tilde\phi$, centering the prior on +# persistent ($\phi \approx 0.9$) processes as is typical for stochastic volatility. + +@model function dfsv(ys) + λ_free ~ MvNormal(zeros(7), I) + ρ_raw ~ MvNormal(zeros(2), I) + log_q ~ MvNormal(zeros(2), 0.5^2 * I) + atanh_φ_g ~ Normal(1.5, 0.7) + log_σ_g ~ Normal(log(0.15), 0.7) + atanh_φ_u ~ Normal(1.5, 0.7) + log_σ_u ~ Normal(log(0.15), 0.7) + a ~ MvNormal(zeros(5), 2.0^2 * I) + + ssm = build_dfsv(λ_free, ρ_raw, log_q, atanh_φ_g, log_σ_g, atanh_φ_u, log_σ_u, a) + return x ~ SSMTrajectory(ssm, KF(), ys) +end + +# ### Sampler +# +# The particle Gibbs sampler alternates between: +# 1. A conditional SMC sweep over the volatility path $x_{0:T}$ using the +# Rao-Blackwellised particle filter with **ancestor sampling** (PGAS), and +# 2. A NUTS step over the 20-dimensional parameter block, conditioned on the +# current volatility path. +# +# Ancestor sampling (CSMCAS) significantly improves mixing of the reference +# trajectory at negligible extra cost. Zygote is used as the AD backend because +# the Kalman filter log-likelihood has an analytical reverse-mode rule that Zygote +# picks up via ChainRules. + +rng = MersenneTwister(42) + +N_particles = 1000 +N_iter = 50 +N_adapts = 10 + +model = dfsv(ys) + +sampler = HMC(0.01, 10) +# sampler = NUTS(0.85) +pg = ParticleGibbs( + CSMCAS(RBPF(BF(N_particles), KF())), sampler; adtype=ADTypes.AutoZygote() +) + +chain = AbstractMCMC.sample( + rng, model, pg, N_iter; n_adapts=N_adapts, progress=true, chain_type=MCMCChains.Chains +) + +# ## Results + +# ### Posterior parameter traces +# +# The traces (post-adaptation) illustrate NUTS mixing across the 20-dimensional +# parameter block. Slow mixing or high autocorrelation in random-walk MH on this +# block would be expected; NUTS largely avoids this by exploiting gradient +# information. + +plot_chains(chain; burnin=N_adapts) + +# ### Posterior volatility paths +# +# To recover the smoothed volatility paths we re-run a forward filter using the +# posterior mean parameters and collect the particle states via a callback. + +post = MCMCChains.summarize(chain[(N_adapts + 1):end]) + +function posterior_mean_params(chain, burnin) + get_mean(k) = mean(Array(chain[(burnin + 1):end, k, 1])) + λ_free = [get_mean("λ_free[$i]") for i in 1:7] + ρ_raw = [get_mean("ρ_raw[$i]") for i in 1:2] + log_q = [get_mean("log_q[$i]") for i in 1:2] + atanh_φ_g = get_mean(:atanh_φ_g) + log_σ_g = get_mean(:log_σ_g) + atanh_φ_u = get_mean(:atanh_φ_u) + log_σ_u = get_mean(:log_σ_u) + a = [get_mean("a[$i]") for i in 1:5] + return λ_free, ρ_raw, log_q, atanh_φ_g, log_σ_g, atanh_φ_u, log_σ_u, a +end + +params = posterior_mean_params(chain, N_adapts) +ssm_post = build_dfsv(params...) + +cb = GF.AncestorCallback(nothing) +states, _ = GF.filter(rng, ssm_post, RBPF(BF(N_particles), KF()), ys; callback=cb) + +# Collect particle outer states at each time step from the ancestry tree. +paths = GF.get_ancestry(cb.tree) +T_len = length(ys) +vol_paths = [[path[t].x for path in paths] for t in 1:T_len] + +plot_volatilities(vol_paths, dates) diff --git a/GeneralisedFilters/examples/dfsv/utilities.jl b/GeneralisedFilters/examples/dfsv/utilities.jl new file mode 100644 index 00000000..2f1684b5 --- /dev/null +++ b/GeneralisedFilters/examples/dfsv/utilities.jl @@ -0,0 +1,82 @@ +using CSV, DataFrames, Dates, Statistics, CairoMakie + +const DATA_PATH = joinpath(@__DIR__, "5_Industry_Portfolios.csv") +const INDUSTRIES = ["Cnsmr", "Manuf", "HiTec", "Hlth", "Other"] + +# Parse the Ken French 5 Industry Portfolios CSV. The file has multiple sections +# separated by blank lines; we want the first (monthly value-weighted returns). +function load_industry_data(; date_from=198501, date_to=202412) + df = CSV.read( + DATA_PATH, + DataFrame; + header=["date", INDUSTRIES...], + skipto=13, + limit=1200, + missingstring=["-99.99", "-999"], + types=Dict(:date => Int, (Symbol(i) => Float64 for i in INDUSTRIES)...), + silencewarnings=true, + ) + dropmissing!(df) + filter!(row -> date_from <= row.date <= date_to, df) + return df +end + +function _yyyymm_to_date(yyyymm::Integer) + y, m = divrem(yyyymm, 100) + return Date(y, m, 1) +end + +function plot_returns(df) + dates = _yyyymm_to_date.(df.date) + fig = Figure(; size=(1100, 500), fontsize=14) + ax = Axis( + fig[1, 1]; + title="Monthly Value-Weighted Returns — French 5 Industry Portfolios", + xlabel="Date", + ) + colors = Makie.wong_colors() + for (i, ind) in enumerate(INDUSTRIES) + lines!(ax, dates, df[!, ind]; label=ind, color=colors[i], linewidth=0.8) + end + axislegend(ax; position=:rt) + return fig +end + +function plot_volatilities(vol_paths, dates) + T = length(dates) + # vol_paths: Vector of length T, each element a vector of SVector{6} particle states + # Each outer state is [g, u₁, ..., u₅] + + fig = Figure(; size=(1100, 700), fontsize=13) + + # Common volatility factor — full width, top row + ax_g = Axis(fig[1, 1]; title="Common Volatility Factor gₜ", xlabel="Date") + g_mean = [mean(getindex.(vol_paths[t], 1)) for t in 1:T] + lines!(ax_g, dates, g_mean; color=:black) + + # Idiosyncratic log-vols in a nested 2×3 grid so they don't share column + # boundaries with ax_g above. + industry_grid = fig[2, 1] = GridLayout() + colors = Makie.wong_colors() + for (i, ind) in enumerate(INDUSTRIES) + row = 1 + div(i - 1, 3) + col = (i - 1) % 3 + 1 + ax = Axis(industry_grid[row, col]; title="$ind log-vol u_{$(i),t}", xlabel="Date") + u_mean = [mean(getindex.(vol_paths[t], i + 1)) for t in 1:T] + lines!(ax, dates, u_mean; color=colors[i]) + end + + return fig +end + +function plot_chains(chain; burnin=0) + param_names = string.(names(chain, :parameters)) + n_iter = size(chain, 1) - burnin + fig = Figure(; size=(1100, 80 * length(param_names)), fontsize=11) + for (i, pname) in enumerate(param_names) + ax = Axis(fig[i, 1]; ylabel=pname) + samples = Array(chain[(burnin + 1):end, pname, 1]) + lines!(ax, 1:n_iter, samples; linewidth=0.6) + end + return fig +end diff --git a/GeneralisedFilters/src/GFTest/models/dummy_linear_gaussian.jl b/GeneralisedFilters/src/GFTest/models/dummy_linear_gaussian.jl index 00dc4b6b..5d13774d 100644 --- a/GeneralisedFilters/src/GFTest/models/dummy_linear_gaussian.jl +++ b/GeneralisedFilters/src/GFTest/models/dummy_linear_gaussian.jl @@ -14,7 +14,7 @@ dynamics, this model can be used in Rao-Blackwellised settings. """ -export InnerDynamics, create_dummy_linear_gaussian_model +export InnerDynamics, create_dummy_linear_gaussian_model, with_inner_drift """ Inner dynamics of the dummy linear Gaussian model. @@ -152,3 +152,32 @@ function create_dummy_linear_gaussian_model( return full_model, hier_model end + +""" + with_inner_drift(model::HierarchicalSSM, b) + +Return a copy of a dummy linear Gaussian `HierarchicalSSM` with inner drift replaced by `b`. +The helper preserves the existing inner drift container type (e.g. `Vector`/`SVector`). +""" +function with_inner_drift(model::HierarchicalSSM, b::AbstractVector) + inner_dyn = model.inner_model.dyn + b_typed = _convert_like(b, inner_dyn.b) + new_inner_dyn = InnerDynamics(inner_dyn.A, b_typed, inner_dyn.C, inner_dyn.Q) + return HierarchicalSSM( + model.outer_prior, + model.outer_dyn, + model.inner_model.prior, + new_inner_dyn, + model.inner_model.obs, + ) +end + +function _convert_like( + x::AbstractVector, template::StaticArrays.StaticVector{N,T} +) where {N,T} + return SVector{N,T}(x) +end + +function _convert_like(x::AbstractVector, template::AbstractVector{T}) where {T} + return Vector{T}(x) +end diff --git a/GeneralisedFilters/src/GFTest/models/linear_gaussian.jl b/GeneralisedFilters/src/GFTest/models/linear_gaussian.jl index 401d5e9b..2e7347a1 100644 --- a/GeneralisedFilters/src/GFTest/models/linear_gaussian.jl +++ b/GeneralisedFilters/src/GFTest/models/linear_gaussian.jl @@ -1,6 +1,8 @@ using StaticArrays import PDMats: PDMat +export augment_drift_model, augmented_kf_drift_posterior + function create_linear_gaussian_model( rng::AbstractRNG, Dx::Integer, @@ -214,6 +216,33 @@ end ## GRADIENT TEST HELPERS ## +""" + setup_kf_rrule_params(rng, Dx, Dy, T) + +Extract all parameters needed for testing `kf_loglikelihood` and its rrule. +Returns `(μ0, Σ0, A, b, Q, H, c, R, ys_vec)` as dense (non-static) arrays. +""" +function setup_kf_rrule_params(rng::AbstractRNG, Dx::Integer, Dy::Integer, T::Integer) + model = create_linear_gaussian_model(rng, Dx, Dy) + _, _, ys = SSMProblems.sample(rng, model, T) + + pr = SSMProblems.prior(model) + dy = SSMProblems.dyn(model) + ob = SSMProblems.obs(model) + + μ0 = Vector(GeneralisedFilters.calc_μ0(pr)) + Σ0 = PDMat(Matrix(GeneralisedFilters.calc_Σ0(pr))) + A = Matrix(GeneralisedFilters.calc_A(dy, 1)) + b = Vector(GeneralisedFilters.calc_b(dy, 1)) + Q = PDMat(Matrix(GeneralisedFilters.calc_Q(dy, 1))) + H = Matrix(GeneralisedFilters.calc_H(ob, 1)) + c = Vector(GeneralisedFilters.calc_c(ob, 1)) + R = PDMat(Matrix(GeneralisedFilters.calc_R(ob, 1))) + ys_vec = [Vector(y) for y in ys] + + return μ0, Σ0, A, b, Q, H, c, R, ys_vec +end + """ setup_gradient_test(rng; D=2, T=3) @@ -255,6 +284,8 @@ function setup_gradient_test(rng::AbstractRNG; D::Int=2, T::Int=3) ∂R_total = @SMatrix zeros(D, D) ∂A_total = @SMatrix zeros(D, D) ∂b_total = @SVector zeros(D) + ∂H_total = @SMatrix zeros(D, D) + ∂c_total = @SVector zeros(D) for t in T:-1:1 ∂μ_pred, ∂Σ_pred = GeneralisedFilters.backward_gradient_update( @@ -266,6 +297,8 @@ function setup_gradient_test(rng::AbstractRNG; D::Int=2, T::Int=3) ∂μ_pred, ∂Σ_pred, μ_prevs[t], Σ_prevs[t], A ) ∂b_total += GeneralisedFilters.gradient_b(∂μ_pred) + ∂H_total += GeneralisedFilters.gradient_H(∂μ, ∂Σ, caches[t], caches[t].Σ_pred, H) + ∂c_total += GeneralisedFilters.gradient_c(∂μ, caches[t]) ∂μ, ∂Σ = GeneralisedFilters.backward_gradient_predict(∂μ_pred, ∂Σ_pred, A) end @@ -292,6 +325,8 @@ function setup_gradient_test(rng::AbstractRNG; D::Int=2, T::Int=3) ∂R_total, ∂A_total, ∂b_total, + ∂H_total, + ∂c_total, ∂μ0, ∂Σ0, ) @@ -362,6 +397,23 @@ function make_nll_func(model, ys, param::Symbol) _, ll = GeneralisedFilters.filter(m, GeneralisedFilters.KF(), ys) return -ll end + elseif param == :H + return function (x) + H_new = reshape(x, Dy, Dx) + m = GeneralisedFilters.create_homogeneous_linear_gaussian_model( + μ0, Σ0, A, b, Q, H_new, c, R + ) + _, ll = GeneralisedFilters.filter(m, GeneralisedFilters.KF(), ys) + return -ll + end + elseif param == :c + return function (x) + m = GeneralisedFilters.create_homogeneous_linear_gaussian_model( + μ0, Σ0, A, b, Q, H, x, R + ) + _, ll = GeneralisedFilters.filter(m, GeneralisedFilters.KF(), ys) + return -ll + end elseif param == :μ0 return function (x) m = GeneralisedFilters.create_homogeneous_linear_gaussian_model( @@ -383,3 +435,195 @@ function make_nll_func(model, ys, param::Symbol) error("Unknown parameter: $param") end end + +""" + augment_drift_model(model, drift_indices; σ²_b=4.0, μ_b=nothing, ε=1e-12, static_arrays=nothing) + +Construct an augmented linear Gaussian model where selected drift components are treated as +latent constants. If `drift_indices = [i1, ..., ik]`, then the augmented state is +`[x; b_unknown]` with `b_unknown' = b_unknown + ϵ`, `ϵ ~ N(0, εI)`. + +Returns a named tuple with: +- `model`: augmented linear Gaussian model +- `drift_slice`: index range for unknown drift in the augmented state +- `drift_indices`: validated drift indices in the original state +""" +function augment_drift_model( + model, + drift_indices; + σ²_b::Union{Real,AbstractMatrix}=4.0, + μ_b::Union{Nothing,AbstractVector}=nothing, + ε::Real=1e-12, + static_arrays::Union{Nothing,Bool}=nothing, +) + pr = SSMProblems.prior(model) + dy = SSMProblems.dyn(model) + ob = SSMProblems.obs(model) + + μ0_raw = GeneralisedFilters.calc_μ0(pr) + Σ0_raw = GeneralisedFilters.calc_Σ0(pr) + A_raw = GeneralisedFilters.calc_A(dy, 1) + b_raw = GeneralisedFilters.calc_b(dy, 1) + Q_raw = GeneralisedFilters.calc_Q(dy, 1) + H_raw = GeneralisedFilters.calc_H(ob, 1) + c_raw = GeneralisedFilters.calc_c(ob, 1) + R_raw = GeneralisedFilters.calc_R(ob, 1) + + σ²_b_eltype = σ²_b isa Real ? typeof(σ²_b) : eltype(σ²_b) + μ_b_eltype = isnothing(μ_b) ? eltype(b_raw) : eltype(μ_b) + T = promote_type( + eltype(μ0_raw), + eltype(Σ0_raw), + eltype(A_raw), + eltype(b_raw), + eltype(Q_raw), + eltype(H_raw), + eltype(c_raw), + eltype(R_raw), + σ²_b_eltype, + μ_b_eltype, + typeof(ε), + ) + + μ0 = Vector{T}(μ0_raw) + Σ0 = Matrix{T}(Σ0_raw) + A = Matrix{T}(A_raw) + b = Vector{T}(b_raw) + Q = Matrix{T}(Q_raw) + H = Matrix{T}(H_raw) + c = Vector{T}(c_raw) + R = Matrix{T}(R_raw) + + Dx = length(μ0) + Dy = length(c) + idx = _collect_drift_indices(drift_indices) + _validate_drift_indices(idx, Dx) + K = length(idx) + + μ_b_vec = if isnothing(μ_b) + zeros(T, K) + else + μ_vec = Vector{T}(μ_b) + length(μ_vec) == K || throw(ArgumentError("μ_b must have length $K.")) + μ_vec + end + Σ_b = _drift_prior_covariance(σ²_b, K, T) + + b_fixed = copy(b) + b_fixed[idx] .= zero(T) + + A_aug = zeros(T, Dx + K, Dx + K) + A_aug[1:Dx, 1:Dx] = A + for (j, i) in enumerate(idx) + A_aug[i, Dx + j] = one(T) + end + @inbounds for j in 1:K + A_aug[Dx + j, Dx + j] = one(T) + end + + b_aug = vcat(b_fixed, zeros(T, K)) + + Q_aug = zeros(T, Dx + K, Dx + K) + Q_aug[1:Dx, 1:Dx] = Q + @inbounds for j in 1:K + Q_aug[Dx + j, Dx + j] = T(ε) + end + + H_aug = zeros(T, Dy, Dx + K) + H_aug[:, 1:Dx] = H + + μ0_aug = vcat(μ0, μ_b_vec) + Σ0_aug = zeros(T, Dx + K, Dx + K) + Σ0_aug[1:Dx, 1:Dx] = Σ0 + Σ0_aug[(Dx + 1):end, (Dx + 1):end] = Σ_b + + use_static = isnothing(static_arrays) ? _has_static_lg_arrays(model) : static_arrays + + μ0_out = _maybe_static_vector(μ0_aug, use_static) + A_out = _maybe_static_matrix(A_aug, use_static) + b_out = _maybe_static_vector(b_aug, use_static) + H_out = _maybe_static_matrix(H_aug, use_static) + c_out = _maybe_static_vector(c, use_static) + Σ0_out = _maybe_static_matrix((Σ0_aug + Σ0_aug') / 2, use_static) + Q_out = _maybe_static_matrix((Q_aug + Q_aug') / 2, use_static) + R_out = _maybe_static_matrix((R + R') / 2, use_static) + + aug_model = create_homogeneous_linear_gaussian_model( + μ0_out, PDMat(Σ0_out), A_out, b_out, PDMat(Q_out), H_out, c_out, PDMat(R_out) + ) + + return (model=aug_model, drift_slice=(Dx + 1):(Dx + K), drift_indices=idx) +end + +""" + augmented_kf_drift_posterior(model, observations, drift_indices; kwargs...) + +Run a Kalman filter on the augmented model from [`augment_drift_model`](@ref) and return +posterior mean/std for unknown drift components. +""" +function augmented_kf_drift_posterior(model, observations, drift_indices; kwargs...) + aug = augment_drift_model(model, drift_indices; kwargs...) + state, ll = GeneralisedFilters.filter(aug.model, GeneralisedFilters.KF(), observations) + Σ = Matrix(state.Σ) + μ_post = state.μ[aug.drift_slice] + σ_post = sqrt.(diag(Σ)[aug.drift_slice]) + return (; + state, + log_likelihood=ll, + mean=μ_post, + std=σ_post, + augmented_model=aug.model, + drift_slice=aug.drift_slice, + ) +end + +function _collect_drift_indices(drift_indices::Integer) + return [Int(drift_indices)] +end + +function _collect_drift_indices(drift_indices) + return collect(Int, drift_indices) +end + +function _validate_drift_indices(idx::AbstractVector{<:Integer}, Dx::Integer) + isempty(idx) && throw(ArgumentError("drift_indices cannot be empty.")) + any(i -> i < 1 || i > Dx, idx) && + throw(ArgumentError("drift_indices must be between 1 and $Dx.")) + length(unique(idx)) == length(idx) || + throw(ArgumentError("drift_indices must not contain duplicates.")) + return nothing +end + +function _drift_prior_covariance(σ²_b::Real, K::Integer, ::Type{T}) where {T} + Σ = zeros(T, K, K) + @inbounds for i in 1:K + Σ[i, i] = T(σ²_b) + end + return Σ +end + +function _drift_prior_covariance(σ²_b::AbstractMatrix, K::Integer, ::Type{T}) where {T} + size(σ²_b) == (K, K) || throw(ArgumentError("σ²_b matrix must have size ($K, $K).")) + return Matrix{T}(σ²_b) +end + +function _has_static_lg_arrays(model) + return GeneralisedFilters.calc_μ0(SSMProblems.prior(model)) isa StaticArray || + GeneralisedFilters.calc_A(SSMProblems.dyn(model), 1) isa StaticArray || + GeneralisedFilters.calc_H(SSMProblems.obs(model), 1) isa StaticArray +end + +function _maybe_static_vector(x::AbstractVector, static_arrays::Bool) + if static_arrays + return SVector{length(x),eltype(x)}(x) + end + return x +end + +function _maybe_static_matrix(X::AbstractMatrix, static_arrays::Bool) + if static_arrays + nr, nc = size(X) + return SMatrix{nr,nc,eltype(X)}(X) + end + return X +end diff --git a/GeneralisedFilters/src/GFTest/utils.jl b/GeneralisedFilters/src/GFTest/utils.jl index f61a95e2..a1518132 100644 --- a/GeneralisedFilters/src/GFTest/utils.jl +++ b/GeneralisedFilters/src/GFTest/utils.jl @@ -1,3 +1,16 @@ +""" + make_pd(M) + +Project a square matrix to the nearest positive-definite matrix by symmetrising and +clipping negative eigenvalues to `1e-8`. Returns a `PDMat`. +""" +function make_pd(M) + M_sym = (M + M') / 2 + λ, V = eigen(M_sym) + λ_clipped = max.(λ, 1e-8) + return PDMat(Symmetric(V * Diagonal(λ_clipped) * V')) +end + function rand_cov( rng::AbstractRNG, T::Type{<:Real}, d::Int; scale=T(1.0), var_range=(T(0.8), T(1.2)) ) diff --git a/GeneralisedFilters/src/GeneralisedFilters.jl b/GeneralisedFilters/src/GeneralisedFilters.jl index 1fdf3024..407137bb 100644 --- a/GeneralisedFilters/src/GeneralisedFilters.jl +++ b/GeneralisedFilters/src/GeneralisedFilters.jl @@ -1,7 +1,9 @@ module GeneralisedFilters using AbstractMCMC: AbstractMCMC, AbstractSampler +using ADTypes: ADTypes import Distributions: MvNormal, params +using DynamicPPL: DynamicPPL import Random: AbstractRNG, default_rng, rand import SSMProblems: prior, dyn, obs using OffsetArrays @@ -153,6 +155,16 @@ include("algorithms/rbpf.jl") include("ancestor_sampling.jl") +# Conditional SMC (particle Gibbs trajectory sampling) +include("algorithms/csmc.jl") + +# Integrations (log-density interface for particle Gibbs) +include("integrations/logdensity.jl") +include("integrations/kalman_rrule.jl") +include("integrations/ssm_trajectory.jl") +include("integrations/particle_gibbs.jl") +include("integrations/turing.jl") + # Unit-testing helper module include("GFTest/GFTest.jl") diff --git a/GeneralisedFilters/src/algorithms/kalman.jl b/GeneralisedFilters/src/algorithms/kalman.jl index e24dd2d1..0f5d2afd 100644 --- a/GeneralisedFilters/src/algorithms/kalman.jl +++ b/GeneralisedFilters/src/algorithms/kalman.jl @@ -79,23 +79,6 @@ function update( return state, ll end -function kalman_update(state, obs_params, observation, jitter) - μ, Σ = params(state) - H, c, R = obs_params - - z = _compute_innovation(μ, H, c, observation) - S = _compute_innovation_cov(Σ, H, R) - K = _compute_kalman_gain(Σ, H, S) - _, Σ̂_raw = _compute_joseph_update(Σ, K, H, R) - - μ̂ = μ + K * z - Σ̂ = _apply_jitter_and_wrap(Σ̂_raw, jitter) - - ll = logpdf(MvNormal(z, S), zero(z)) - - return MvNormal(μ̂, Σ̂), ll -end - ## KALMAN SMOOTHER ######################################################################### struct KalmanSmoother <: AbstractSmoother end diff --git a/GeneralisedFilters/src/algorithms/kalman_gradient.jl b/GeneralisedFilters/src/algorithms/kalman_gradient.jl index c2c2a6f7..ade3510f 100644 --- a/GeneralisedFilters/src/algorithms/kalman_gradient.jl +++ b/GeneralisedFilters/src/algorithms/kalman_gradient.jl @@ -5,6 +5,29 @@ export update_with_cache export backward_gradient_update, backward_gradient_predict export gradient_Q, gradient_R, gradient_A, gradient_b, gradient_H, gradient_c, gradient_y +## CORE UPDATE WITH CACHE ###################################################################### + +# Raw-parameter update step that all higher-level update functions delegate to. +# Returns the filtered state, log-likelihood increment, and gradient cache. +function _kalman_update_cached(state::MvNormal, H, c, R, y, jitter) + μ_pred, Σ_pred = params(state) + z = _compute_innovation(μ_pred, H, c, y) + S = _compute_innovation_cov(Σ_pred, H, R) + K = _compute_kalman_gain(Σ_pred, H, S) + I_KH, Σ_filt_raw = _compute_joseph_update(Σ_pred, K, H, R) + μ_filt = μ_pred + K * z + Σ_filt = _apply_jitter_and_wrap(Σ_filt_raw, jitter) + ll = logpdf(MvNormal(z, S), zero(z)) + cache = KalmanGradientCache(μ_pred, Σ_pred, μ_filt, Σ_filt, S, K, z, I_KH) + return MvNormal(μ_filt, Σ_filt), ll, cache +end + +function kalman_update(state, obs_params, observation, jitter) + H, c, R = obs_params + state, ll, _ = _kalman_update_cached(state, H, c, R, observation, jitter) + return state, ll +end + """ KalmanGradientCache @@ -53,23 +76,8 @@ function update_with_cache( observation::AbstractVector; kwargs..., ) - μ_pred, Σ_pred = params(state) H, c, R = calc_params(obs, iter; kwargs...) - - z = _compute_innovation(μ_pred, H, c, observation) - S = _compute_innovation_cov(Σ_pred, H, R) - K = _compute_kalman_gain(Σ_pred, H, S) - I_KH, Σ_filt_raw = _compute_joseph_update(Σ_pred, K, H, R) - - μ_filt = μ_pred + K * z - Σ_filt = _apply_jitter_and_wrap(Σ_filt_raw, algo.jitter) - - filtered_state = MvNormal(μ_filt, Σ_filt) - ll = logpdf(MvNormal(H * μ_pred + c, S), observation) - - cache = KalmanGradientCache(μ_pred, Σ_pred, μ_filt, Σ_filt, S, K, z, I_KH) - - return filtered_state, ll, cache + return _kalman_update_cached(state, H, c, R, observation, algo.jitter) end ## BACKWARD GRADIENT PROPAGATION ############################################################## @@ -208,29 +216,35 @@ function gradient_b(∂μ_pred::AbstractVector) end """ - gradient_H(∂μ_filt, ∂Σ_filt, cache, Σ_pred) + gradient_H(∂μ_filt, ∂Σ_filt, cache, Σ_pred, H) Compute gradient of NLL w.r.t. observation matrix H. -Derived via chain rule through z = y - H*μ_pred - c, S = H*Σ_pred*H' + R, and the update. +Derived via chain rule using the information form P_filt⁻¹ = P_pred⁻¹ + H'R⁻¹H to decouple +P_filt from K, then tracing H's effect through: +- NLL local term (via z and S) +- Filtered mean (via z = y - Hμ_pred - c, and K = P_filt H' R⁻¹) +- Filtered covariance (via the information form) """ function gradient_H( - ∂μ_filt::AbstractVector, ∂Σ_filt::AbstractMatrix, cache::KalmanGradientCache, Σ_pred + ∂μ_filt::AbstractVector, ∂Σ_filt::AbstractMatrix, cache::KalmanGradientCache, Σ_pred, H ) - μ_pred, z, S, K, I_KH = cache.μ_pred, cache.z, cache.S, cache.K, cache.I_KH + μ_pred, μ_filt, z, S, K, I_KH = cache.μ_pred, + cache.μ_filt, cache.z, cache.S, cache.K, + cache.I_KH S_inv_z = S \ z - S_inv = S \ I + S_inv = inv(S) + P_filt = I_KH * Σ_pred # Local NLL derivative: l = 0.5*(log|S| + z'S⁻¹z) - # ∂l/∂H = S⁻¹*H*Σ_pred - S⁻¹*z*μ_pred' - S⁻¹*z*z'*S⁻¹*H*Σ_pred - ∂l_∂H = S_inv * Σ_pred - S_inv_z * μ_pred' - (S_inv_z * S_inv_z') * Σ_pred + ∂l_∂H = S_inv * H * Σ_pred - S_inv_z * μ_pred' - (S_inv_z * S_inv_z') * H * Σ_pred - # Contribution through filtered mean: μ_filt = μ_pred + K*z - # ∂z/∂H = -μ_pred' (outer product form for each element) - ∂via_μ = -K' * ∂μ_filt * μ_pred' + # Contribution through filtered mean: + # δμ_filt = P_filt*δH'*S⁻¹*z - K*δH*μ_filt + ∂via_μ = S_inv_z * ∂μ_filt' * P_filt - K' * ∂μ_filt * μ_filt' - # Contribution through filtered covariance via I_KH - ∂via_Σ = -K' * ∂Σ_filt * I_KH * μ_pred' - I_KH' * ∂Σ_filt * K * μ_pred' + # Contribution through filtered covariance (information form) + ∂via_Σ = -2 * K' * ∂Σ_filt * P_filt return ∂l_∂H + ∂via_μ + ∂via_Σ end diff --git a/GeneralisedFilters/src/integrations/kalman_rrule.jl b/GeneralisedFilters/src/integrations/kalman_rrule.jl new file mode 100644 index 00000000..ea4145ae --- /dev/null +++ b/GeneralisedFilters/src/integrations/kalman_rrule.jl @@ -0,0 +1,94 @@ +using ChainRulesCore: ChainRulesCore, NoTangent, @not_implemented + +""" + ChainRulesCore.rrule(::typeof(kf_loglikelihood), μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys) + +Reverse-mode AD rule for the Kalman filter log-likelihood. The forward pass runs the KF +with gradient caching; the pullback computes analytical gradients using the backward recursion +from `kalman_gradient.jl`. +""" +function ChainRulesCore.rrule( + ::typeof(kf_loglikelihood), μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys +) + T = length(ys) + + # Forward pass with caching + state = MvNormal(μ0, Σ0) + μ_prevs = Vector{typeof(μ0)}(undef, T) + Σ_prevs = Vector{typeof(Σ0)}(undef, T) + ll = zero(eltype(μ0)) + + # Compute first cache to get concrete type, then allocate typed vector + μ_prevs[1], Σ_prevs[1] = params(state) + state = kalman_predict(state, (As[1], bs[1], Qs[1])) + state, ll_inc, first_cache = _kalman_update_cached( + state, Hs[1], cs[1], Rs[1], ys[1], nothing + ) + ll += ll_inc + caches = Vector{typeof(first_cache)}(undef, T) + caches[1] = first_cache + + for t in 2:T + μ_prevs[t], Σ_prevs[t] = params(state) + state = kalman_predict(state, (As[t], bs[t], Qs[t])) + state, ll_inc, caches[t] = _kalman_update_cached( + state, Hs[t], cs[t], Rs[t], ys[t], nothing + ) + ll += ll_inc + end + + function kf_loglikelihood_pullback(Δll) + # Backward pass: compute NLL gradients, then negate and scale by Δll + ∂μ = zero(μ0) + ∂Σ = zero(As[1]) + + ∂As = similar(As) + ∂bs = similar(bs) + ∂Qs = similar(As) + ∂Hs = similar(Hs) + ∂cs = similar(cs) + ∂Rs = Vector{typeof(zero(cs[1]) * zero(cs[1])')}(undef, T) + + for t in T:-1:1 + H, R = Hs[t], Rs[t] + cache = caches[t] + + # Obs parameter gradients (NLL convention) + ∂cs[t] = gradient_c(∂μ, cache) + ∂Hs[t] = gradient_H(∂μ, ∂Σ, cache, cache.Σ_pred, Hs[t]) + ∂Rs[t] = gradient_R(∂μ, ∂Σ, cache) + + # Propagate through update step + ∂μ_pred, ∂Σ_pred = backward_gradient_update(∂μ, ∂Σ, cache, H, R) + + # Dynamics parameter gradients (NLL convention) + ∂bs[t] = gradient_b(∂μ_pred) + ∂As[t] = gradient_A(∂μ_pred, ∂Σ_pred, μ_prevs[t], Σ_prevs[t], As[t]) + ∂Qs[t] = gradient_Q(∂Σ_pred) + + # Propagate through predict step + ∂μ, ∂Σ = backward_gradient_predict(∂μ_pred, ∂Σ_pred, As[t]) + end + + # Initial state gradients + ∂μ0_nll = ∂μ + ∂Σ0_nll = ∂Σ + + # Convert NLL gradients → LL gradients and scale by Δll + s = -Δll + return ( + NoTangent(), + s * ∂μ0_nll, + s * ∂Σ0_nll, + s .* ∂As, + s .* ∂bs, + s .* ∂Qs, + s .* ∂Hs, + s .* ∂cs, + s .* ∂Rs, + @not_implemented("Gradient w.r.t. observations not supported"), + ) + end + + return ll, kf_loglikelihood_pullback +end diff --git a/GeneralisedFilters/src/integrations/logdensity.jl b/GeneralisedFilters/src/integrations/logdensity.jl new file mode 100644 index 00000000..742f8d66 --- /dev/null +++ b/GeneralisedFilters/src/integrations/logdensity.jl @@ -0,0 +1,242 @@ +using LogDensityProblems: LogDensityProblems +import Distributions: logpdf + +export trajectory_logdensity, inner_loglikelihood, kf_loglikelihood +export ParameterisedSSM, SSMParameterLogDensity + +## TRAJECTORY LOG-DENSITY ###################################################################### + +""" + trajectory_logdensity(model::StateSpaceModel, trajectory, observations) + +Compute the joint log-density of a trajectory and observations under a regular SSM: + + log p(x₀) + Σ_t [log p(xₜ | xₜ₋₁) + log p(yₜ | xₜ)] + +The `trajectory` should be an OffsetVector indexed from 0 (matching the prior at time 0). +""" +function trajectory_logdensity( + model::StateSpaceModel, trajectory, observations::AbstractVector +) + T = length(observations) + ll = logpdf(SSMProblems.distribution(prior(model)), trajectory[0]) + for t in 1:T + ll += SSMProblems.logdensity(dyn(model), t, trajectory[t - 1], trajectory[t]) + ll += SSMProblems.logdensity(obs(model), t, trajectory[t], observations[t]) + end + return ll +end + +""" + trajectory_logdensity(model::HierarchicalSSM, af::AbstractFilter, outer_trajectory, observations) + +Compute the joint log-density of an outer trajectory and observations under a hierarchical SSM: + + log p(u₀) + Σ_t log p(uₜ | uₜ₋₁) + log p(y₁:T | u₀:T) + +The last term is the marginal log-likelihood of the inner model conditioned on the outer +trajectory, computed by `inner_loglikelihood` using the analytical filter `af`. + +The `outer_trajectory` should be an OffsetVector indexed from 0. +""" +function trajectory_logdensity( + model::HierarchicalSSM, + af::AbstractFilter, + outer_trajectory, + observations::AbstractVector, +) + T = length(observations) + + # Outer prior + transitions + ll = logpdf(SSMProblems.distribution(model.outer_prior), outer_trajectory[0]) + for t in 1:T + ll += SSMProblems.logdensity( + model.outer_dyn, t, outer_trajectory[t - 1], outer_trajectory[t] + ) + end + + # Inner marginal log-likelihood via analytical filter + ll += inner_loglikelihood(af, model.inner_model, outer_trajectory, observations) + + return ll +end + +## INNER LOG-LIKELIHOOD ######################################################################## + +""" + inner_loglikelihood(af::AbstractFilter, inner_model, outer_trajectory, observations) + +Compute the marginal log-likelihood log p(y₁:T | u₀:T) of the inner model conditioned on +the outer trajectory, using the analytical filter `af`. + +Dispatches on the filter type to select the appropriate algorithm. +""" +function inner_loglikelihood end + +""" + inner_loglikelihood(af::KalmanFilter, inner_model, states, observations) + +KalmanFilter specialization: extracts linear-Gaussian parameters at each timestep and +delegates to `kf_loglikelihood`. + +`states` is a 1-indexed vector with `states[1] = u₀` and `states[t+1] = uₜ`. +For OffsetVector trajectories (0-indexed), a converting wrapper is provided. +""" +function inner_loglikelihood( + ::KalmanFilter, + inner_model::StateSpaceModel, + states::AbstractVector, + observations::AbstractVector, +) + T = length(observations) + inner_dyn = inner_model.dyn + inner_obs = inner_model.obs + inner_pr = inner_model.prior + + μ0 = calc_μ0(inner_pr; new_outer=states[1]) + Σ0 = calc_Σ0(inner_pr; new_outer=states[1]) + + As = map(t -> calc_A(inner_dyn, t; prev_outer=states[t], new_outer=states[t + 1]), 1:T) + bs = map(t -> calc_b(inner_dyn, t; prev_outer=states[t], new_outer=states[t + 1]), 1:T) + Qs = map(t -> calc_Q(inner_dyn, t; prev_outer=states[t], new_outer=states[t + 1]), 1:T) + Hs = map(t -> calc_H(inner_obs, t; new_outer=states[t + 1]), 1:T) + cs = map(t -> calc_c(inner_obs, t; new_outer=states[t + 1]), 1:T) + Rs = map(t -> calc_R(inner_obs, t; new_outer=states[t + 1]), 1:T) + + return kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, cs, Rs, observations) +end + +# Wrapper for OffsetVector trajectories (0-indexed), used by trajectory_logdensity. +function inner_loglikelihood( + af::KalmanFilter, + inner_model::StateSpaceModel, + outer_trajectory::OffsetVector, + observations::AbstractVector, +) + T = length(observations) + return inner_loglikelihood( + af, inner_model, [outer_trajectory[t] for t in 0:T], observations + ) +end + +## KF LOG-LIKELIHOOD ########################################################################### + +""" + kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys) + +Compute the marginal log-likelihood of observations under a linear-Gaussian model via the +Kalman filter forward pass. + +Accepts PDMat natively for `Σ0`, `Qs`, `Rs`. A `ChainRulesCore.rrule` is registered for this +function to enable efficient reverse-mode AD gradients using the analytical backward recursion +from `kalman_gradient.jl`. + +# Arguments +- `μ0`: Initial mean vector +- `Σ0`: Initial covariance (AbstractPDMat or AbstractMatrix) +- `As`: Vector of transition matrices, one per timestep +- `bs`: Vector of transition offsets, one per timestep +- `Qs`: Vector of process noise covariances, one per timestep +- `Hs`: Vector of observation matrices, one per timestep +- `cs`: Vector of observation offsets, one per timestep +- `Rs`: Vector of observation noise covariances, one per timestep +- `ys`: Vector of observations + +# Returns +Total log-likelihood: log p(y₁:T) = Σ_t log p(yₜ | y₁:ₜ₋₁) +""" +function kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys) + T = length(ys) + state = MvNormal(μ0, Σ0) + ll = zero(eltype(μ0)) + + for t in 1:T + state = kalman_predict(state, (As[t], bs[t], Qs[t])) + state, ll_inc, _ = _kalman_update_cached(state, Hs[t], cs[t], Rs[t], ys[t], nothing) + ll += ll_inc + end + + return ll +end + +## PARAMETERISED SSM ########################################################################### + +""" + ParameterisedSSM(build, observations) + +A parameterised state-space model that maps parameter vectors to concrete SSMs. + +# Fields +- `build`: A callable `θ -> AbstractStateSpaceModel` that constructs an SSM from parameters. + Fixed model components should be captured via closure. +- `observations`: The observation sequence y₁:T. + +# Example +```julia +function build_model(θ, fixed) + b = θ[1:2] + dyn = HomogeneousLinearGaussianLatentDynamics(fixed.A, b, fixed.Q) + return StateSpaceModel(fixed.prior, dyn, fixed.obs) +end + +pssm = ParameterisedSSM(θ -> build_model(θ, fixed), observations) +model = pssm.build(θ) # returns a concrete SSM +``` +""" +struct ParameterisedSSM{F,YT} + build::F + observations::YT +end + +## SSM PARAMETER LOG-DENSITY ################################################################### + +""" + SSMParameterLogDensity(prior, param_model, af, trajectory) + SSMParameterLogDensity(prior, param_model, trajectory) + +Log-density for SSM parameters θ conditioned on a fixed trajectory: + + log p(θ | trajectory, y) ∝ log p(θ) + log p(trajectory, y | θ) + +Implements the `LogDensityProblems` interface. The `trajectory` field should be a `Ref` so +it can be mutated between Gibbs iterations. + +# Fields +- `prior`: Prior distribution on θ (any Distributions.jl distribution) +- `param_model`: A `ParameterisedSSM` mapping θ to an SSM +- `af`: Inner analytical filter for HierarchicalSSM (e.g., `KalmanFilter()`), or `nothing` + for regular SSMs +- `trajectory`: A `Ref` holding the current reference trajectory (OffsetVector indexed from 0) +""" +struct SSMParameterLogDensity{PT,MT<:ParameterisedSSM,AFT,TT} + prior::PT + param_model::MT + af::AFT + trajectory::TT +end + +function SSMParameterLogDensity(prior, param_model::ParameterisedSSM, trajectory) + return SSMParameterLogDensity(prior, param_model, nothing, trajectory) +end + +function LogDensityProblems.capabilities(::Type{<:SSMParameterLogDensity}) + return LogDensityProblems.LogDensityOrder{0}() +end + +function LogDensityProblems.dimension(ld::SSMParameterLogDensity) + return length(ld.prior) +end + +function LogDensityProblems.logdensity(ld::SSMParameterLogDensity{<:Any,<:Any,Nothing}, θ) + model = ld.param_model.build(θ) + return logpdf(ld.prior, θ) + + trajectory_logdensity(model, ld.trajectory[], ld.param_model.observations) +end + +function LogDensityProblems.logdensity( + ld::SSMParameterLogDensity{<:Any,<:Any,<:AbstractFilter}, θ +) + model = ld.param_model.build(θ) + return logpdf(ld.prior, θ) + + trajectory_logdensity(model, ld.af, ld.trajectory[], ld.param_model.observations) +end diff --git a/GeneralisedFilters/src/integrations/particle_gibbs.jl b/GeneralisedFilters/src/integrations/particle_gibbs.jl new file mode 100644 index 00000000..9b7e84c5 --- /dev/null +++ b/GeneralisedFilters/src/integrations/particle_gibbs.jl @@ -0,0 +1,259 @@ +using LogDensityProblemsAD: LogDensityProblemsAD +using MCMCChains: MCMCChains + +export ParticleGibbs, ParticleGibbsModel + +## TYPES ##################################################################################### + +""" + ParticleGibbs{CS, PS, ADT} <: AbstractMCMC.AbstractSampler + +Particle Gibbs sampler that alternates between a parameter update (e.g., NUTS) and a +trajectory update (conditional SMC). + +# Fields +- `csmc::CS`: Conditional SMC sampler for trajectory updates (e.g., `CSMCAS(RBPF(BF(200), KF()))`) +- `param::PS`: Parameter sampler (e.g., `AdvancedHMC.NUTS(0.8)`) +- `adtype::ADT`: AD backend (`ADTypes.AbstractADType`). `nothing` uses AdvancedHMC's default + (ForwardDiff). For HierarchicalSSM models, specify a reverse-mode backend that uses + ChainRules (e.g., `AutoZygote()`). Requires the corresponding package to be loaded. + +# Examples +```julia + +# Regular SSM +ParticleGibbs(CSMC(BF(100)), NUTS(0.8)) + +# Hierarchical SSM (needs reverse-mode AD for KF rrule) +ParticleGibbs(CSMCAS(RBPF(BF(200), KF())), NUTS(0.8); adtype=AutoZygote()) +``` +""" +struct ParticleGibbs{CS<:ConditionalSMC,PS,ADT<:Union{Nothing,ADTypes.AbstractADType}} <: + AbstractMCMC.AbstractSampler + csmc::CS + param::PS + adtype::ADT +end + +function ParticleGibbs(csmc::ConditionalSMC, param; adtype=nothing) + return ParticleGibbs(csmc, param, adtype) +end + +""" + ParticleGibbsModel{PT, MT} <: AbstractMCMC.AbstractModel + +Model for particle Gibbs inference, combining a prior on parameters with a parameterised SSM. + +# Fields +- `prior::PT`: Prior distribution on θ (any Distributions.jl distribution) +- `param_model::MT`: A `ParameterisedSSM` mapping θ to a concrete SSM + +# Examples +```julia +pssm = ParameterisedSSM(θ -> build_model(θ, fixed), observations) +model = ParticleGibbsModel(MvNormal(zeros(d), 4.0*I), pssm) +``` +""" +struct ParticleGibbsModel{PT,MT<:ParameterisedSSM} <: AbstractMCMC.AbstractModel + prior::PT + param_model::MT +end + +""" + ParticleGibbsState{VT, TT, PS, LDT} + +Internal state of the particle Gibbs sampler. + +# Fields +- `θ`: Current parameter vector +- `trajectory`: Current reference trajectory (OffsetVector) +- `param_state`: Parameter sampler state (e.g., `AdvancedHMC.HMCState`) +- `log_density`: `AbstractMCMC.LogDensityModel` wrapping the SSM log-density (persisted + so the trajectory `Ref` can be updated between steps) +""" +struct ParticleGibbsState{VT,TT,PS,LDT} + θ::VT + trajectory::TT + param_state::PS + log_density::LDT +end + +""" + ParticleGibbsTransition{VT, NT} + +A single transition of the particle Gibbs sampler, containing the parameter values and +diagnostics from the parameter sampler. +""" +struct ParticleGibbsTransition{VT,NT<:NamedTuple} + θ::VT + stat::NT +end + +## INNER FILTER EXTRACTION #################################################################### + +_get_inner_filter(::AbstractParticleFilter) = nothing +_get_inner_filter(pf::RBPF) = pf.af + +# Extract outer trajectory for the log-density (which only needs x, not the inner distribution) +_outer_trajectory(trajectory, ::Nothing) = trajectory +_outer_trajectory(trajectory, ::AbstractFilter) = map(s -> s.x, trajectory) + +## LOG-DENSITY MODEL CONSTRUCTION ############################################################# + +function _get_traj_ref(ld_model::AbstractMCMC.LogDensityModel) + ld = ld_model.logdensity + # Unwrap LogDensityProblemsAD wrapper if present + inner = if hasproperty(ld, :ℓ) + ld.ℓ + else + ld + end + return inner.trajectory +end + +function _create_log_density_model( + model::ParticleGibbsModel, af, trajectory, adtype::Nothing +) + if !isnothing(af) + throw( + ArgumentError( + "HierarchicalSSM models require a reverse-mode AD backend for gradient " * + "computation (the ChainRules rrule on kf_loglikelihood is not picked up " * + "by ForwardDiff). Specify `adtype=AutoZygote()` (or another reverse-mode " * + "backend) when constructing ParticleGibbs, and load the corresponding " * + "package (e.g., `using Zygote`).", + ), + ) + end + ld = SSMParameterLogDensity(model.prior, model.param_model, Ref(trajectory)) + return AbstractMCMC.LogDensityModel(ld) +end + +function _create_log_density_model( + model::ParticleGibbsModel, af, trajectory, adtype::ADTypes.AbstractADType +) + ld = if isnothing(af) + SSMParameterLogDensity(model.prior, model.param_model, Ref(trajectory)) + else + SSMParameterLogDensity(model.prior, model.param_model, af, Ref(trajectory)) + end + ld_with_grad = LogDensityProblemsAD.ADgradient(adtype, ld) + return AbstractMCMC.LogDensityModel(ld_with_grad) +end + +## ABSTRACTMCMC INTERFACE ##################################################################### + +function AbstractMCMC.step( + rng::AbstractRNG, + model::ParticleGibbsModel, + pg::ParticleGibbs; + initial_params=nothing, + kwargs..., +) + # Sample initial θ + θ = if isnothing(initial_params) + Vector{Float64}(rand(rng, model.prior)) + else + Vector{Float64}(initial_params) + end + + # Build SSM and run unconditional CSMC for initial trajectory + ssm = model.param_model.build(θ) + af = _get_inner_filter(pg.csmc.pf) + trajectory, _ = _csmc_sample(rng, ssm, pg.csmc, model.param_model.observations, nothing) + + # Create log-density model (uses outer-only trajectory for hierarchical models) + outer_traj = _outer_trajectory(trajectory, af) + ld_model = _create_log_density_model(model, af, outer_traj, pg.adtype) + + # Run initial parameter step + _, param_state = AbstractMCMC.step(rng, ld_model, pg.param; initial_params=θ, kwargs...) + + # Extract new θ and run CSMC + θ_new = AbstractMCMC.getparams(param_state) + ssm_new = model.param_model.build(θ_new) + trajectory_new, _ = _csmc_sample( + rng, ssm_new, pg.csmc, model.param_model.observations, trajectory + ) + + # Update trajectory ref with outer-only trajectory + traj_ref = _get_traj_ref(ld_model) + traj_ref[] = _outer_trajectory(trajectory_new, af) + + transition = ParticleGibbsTransition(θ_new, AbstractMCMC.getstats(param_state)) + state = ParticleGibbsState(θ_new, trajectory_new, param_state, ld_model) + + return transition, state +end + +function AbstractMCMC.step( + rng::AbstractRNG, + model::ParticleGibbsModel, + pg::ParticleGibbs, + state::ParticleGibbsState; + kwargs..., +) + # Update trajectory ref so the log-density reflects the current trajectory + af = _get_inner_filter(pg.csmc.pf) + traj_ref = _get_traj_ref(state.log_density) + traj_ref[] = _outer_trajectory(state.trajectory, af) + + # Run parameter step (picks up updated trajectory Ref) + _, param_state = AbstractMCMC.step( + rng, state.log_density, pg.param, state.param_state; kwargs... + ) + + # Extract new θ and run CSMC (pass full trajectory for conditioning) + θ_new = AbstractMCMC.getparams(param_state) + ssm_new = model.param_model.build(θ_new) + trajectory_new, _ = _csmc_sample( + rng, ssm_new, pg.csmc, model.param_model.observations, state.trajectory + ) + + transition = ParticleGibbsTransition(θ_new, AbstractMCMC.getstats(param_state)) + new_state = ParticleGibbsState(θ_new, trajectory_new, param_state, state.log_density) + + return transition, new_state +end + +## CHAIN OUTPUT ############################################################################### + +function _build_chains(ts::Vector{<:ParticleGibbsTransition}, names::Vector{Symbol}) + n_samples = length(ts) + d = length(first(ts).θ) + + vals = Matrix{Float64}(undef, n_samples, d) + for (i, t) in enumerate(ts) + vals[i, :] = t.θ + end + + int_names = collect(Symbol, keys(first(ts).stat)) + internals = Matrix{Float64}(undef, n_samples, length(int_names)) + for (i, t) in enumerate(ts) + for (j, v) in enumerate(values(t.stat)) + internals[i, j] = Float64(v) + end + end + + all_vals = hcat(vals, internals) + all_names = vcat(names, int_names) + return MCMCChains.Chains(all_vals, all_names, (parameters=names, internals=int_names)) +end + +function AbstractMCMC.bundle_samples( + ts::Vector{<:ParticleGibbsTransition}, + ::ParticleGibbsModel, + ::ParticleGibbs, + state, + ::Type{MCMCChains.Chains}; + param_names=nothing, + kwargs..., +) + d = length(first(ts).θ) + names = if isnothing(param_names) + [Symbol("θ[$i]") for i in 1:d] + else + Symbol.(param_names) + end + return _build_chains(ts, names) +end diff --git a/GeneralisedFilters/src/integrations/ssm_trajectory.jl b/GeneralisedFilters/src/integrations/ssm_trajectory.jl new file mode 100644 index 00000000..2360f82c --- /dev/null +++ b/GeneralisedFilters/src/integrations/ssm_trajectory.jl @@ -0,0 +1,136 @@ +import Distributions: ContinuousMultivariateDistribution, _logpdf, _rand! +using Bijectors: Bijectors +import Bijectors: bijector + +export SSMTrajectory + +## SSMTrajectory DISTRIBUTION #################################################################### + +# NOTE: ContinuousMultivariateDistribution is a simplification. For discrete trajectories +# (HMMs), this would need to be generalized. + +""" + SSMTrajectory{MT, AFT, YT} <: ContinuousMultivariateDistribution + +A distribution over state-space model trajectories. Used in Turing `@model` blocks to mark +the trajectory variable for `ParticleGibbs`: + +```julia +# Regular SSM (no inner filter needed) +x ~ SSMTrajectory(ssm, ys) + +# HierarchicalSSM (inner analytical filter required for logpdf) +x ~ SSMTrajectory(ssm, KF(), ys) +``` + +The `logpdf` computes the joint log-density of the trajectory and observations: +- Regular SSM: `log p(x₀) + Σ_t [log p(xₜ|xₜ₋₁) + log p(yₜ|xₜ)]` +- HierarchicalSSM: outer transitions + inner marginal likelihood via `af` + +The inner analytical filter `af` must match the filter used in the `RBPF` within the +`ParticleGibbs` sampler (e.g., both `KF()` or both `KF(jitter=1e-8)`). +""" +struct SSMTrajectory{MT<:AbstractStateSpaceModel,AFT,YT} <: + ContinuousMultivariateDistribution + model::MT + af::AFT + observations::YT +end + +# Convenience constructor for regular SSMs (no inner analytical filter needed). +function SSMTrajectory(model::AbstractStateSpaceModel, observations) + return SSMTrajectory(model, nothing, observations) +end + +bijector(::SSMTrajectory) = Bijectors.Identity{1}() + +## DIMENSIONS #################################################################################### + +function _state_dim(d::SSMTrajectory{<:StateSpaceModel}) + return length(SSMProblems.distribution(prior(d.model))) +end +function _state_dim(d::SSMTrajectory{<:HierarchicalSSM}) + return length(SSMProblems.distribution(d.model.outer_prior)) +end + +Base.length(d::SSMTrajectory) = (length(d.observations) + 1) * _state_dim(d) +Base.eltype(::Type{<:SSMTrajectory}) = Float64 + +## FLATTEN / UNFLATTEN ########################################################################### + +function _unflatten_trajectory(x_flat::AbstractVector, T::Integer, Dx::Integer) + states = [x_flat[((i * Dx) + 1):((i + 1) * Dx)] for i in 0:T] + return OffsetVector(states, -1) +end + +function _flatten_trajectory(traj, T::Integer, Dx::Integer) + x_flat = Vector{Float64}(undef, (T + 1) * Dx) + for i in 0:T + x_flat[((i * Dx) + 1):((i + 1) * Dx)] = traj[i] + end + return x_flat +end + +## LOGPDF ######################################################################################## + +# These methods inline the trajectory_logdensity computation with 1-based indexing to avoid +# constructing an OffsetVector, which Zygote cannot differentiate through. + +function _logpdf(d::SSMTrajectory{<:StateSpaceModel}, x_flat::AbstractVector{<:Real}) + T = length(d.observations) + Dx = _state_dim(d) + # 1-indexed: states[1] = x₀, states[t+1] = xₜ + states = [x_flat[((i - 1) * Dx + 1):(i * Dx)] for i in 1:(T + 1)] + + m = d.model + ll = logpdf(SSMProblems.distribution(prior(m)), states[1]) + for t in 1:T + ll += SSMProblems.logdensity(dyn(m), t, states[t], states[t + 1]) + ll += SSMProblems.logdensity(obs(m), t, states[t + 1], d.observations[t]) + end + return ll +end + +function _logpdf(d::SSMTrajectory{<:HierarchicalSSM}, x_flat::AbstractVector{<:Real}) + T = length(d.observations) + Dx = _state_dim(d) + # 1-indexed: states[1] = u₀, states[t+1] = uₜ + states = [x_flat[((i - 1) * Dx + 1):(i * Dx)] for i in 1:(T + 1)] + + m = d.model + ll = logpdf(SSMProblems.distribution(m.outer_prior), states[1]) + for t in 1:T + ll += SSMProblems.logdensity(m.outer_dyn, t, states[t], states[t + 1]) + end + + ll += inner_loglikelihood(d.af, m.inner_model, states, d.observations) + return ll +end + +## RAND ########################################################################################## + +function _rand!( + rng::AbstractRNG, d::SSMTrajectory{<:StateSpaceModel}, x::AbstractVector{<:Real} +) + T = length(d.observations) + Dx = _state_dim(d) + x0, xs, _ = SSMProblems.sample(rng, d.model, T) + x[1:Dx] = x0 + for t in 1:T + x[((t * Dx) + 1):((t + 1) * Dx)] = xs[t] + end + return x +end + +function _rand!( + rng::AbstractRNG, d::SSMTrajectory{<:HierarchicalSSM}, x::AbstractVector{<:Real} +) + T = length(d.observations) + Dx = _state_dim(d) + x0, _, xs, _, _ = SSMProblems.sample(rng, d.model, T) + x[1:Dx] = x0 + for t in 1:T + x[((t * Dx) + 1):((t + 1) * Dx)] = xs[t] + end + return x +end diff --git a/GeneralisedFilters/src/integrations/turing.jl b/GeneralisedFilters/src/integrations/turing.jl new file mode 100644 index 00000000..80119cd5 --- /dev/null +++ b/GeneralisedFilters/src/integrations/turing.jl @@ -0,0 +1,268 @@ +## CSMC CONTEXT ################################################################################ + +""" + CSMCContext <: DynamicPPL.AbstractContext + +A DynamicPPL leaf context that intercepts `x ~ SSMTrajectory(...)` during model evaluation, +capturing the distribution and variable name so the sampler can extract the SSM and run CSMC. + +For non-SSMTrajectory variables, delegates to `DefaultContext`. +""" +struct CSMCContext <: DynamicPPL.AbstractContext + ssm_dist::Ref{Any} + traj_vn::Ref{Any} +end + +CSMCContext() = CSMCContext(Ref{Any}(nothing), Ref{Any}(nothing)) + +function DynamicPPL.tilde_assume!!( + ctx::CSMCContext, + dist::SSMTrajectory, + vn::DynamicPPL.VarName, + vi::DynamicPPL.AbstractVarInfo, +) + ctx.ssm_dist[] = dist + ctx.traj_vn[] = vn + x = DynamicPPL.getindex_internal(vi, vn) + return x, vi +end + +function DynamicPPL.tilde_assume!!( + ::CSMCContext, + dist::Distributions.Distribution, + vn::DynamicPPL.VarName, + vi::DynamicPPL.AbstractVarInfo, +) + return DynamicPPL.tilde_assume!!(DynamicPPL.DefaultContext(), dist, vn, vi) +end + +function DynamicPPL.tilde_observe!!( + ::CSMCContext, + right::Distributions.Distribution, + left, + vn::Union{DynamicPPL.VarName,Nothing}, + vi::DynamicPPL.AbstractVarInfo, +) + return DynamicPPL.tilde_observe!!(DynamicPPL.DefaultContext(), right, left, vn, vi) +end + +## TURING STATE ################################################################################ + +struct ParticleGibbsTuringState{VIT,TT,VNT,PS,CVI} + vi::VIT + trajectory::TT + traj_vn::VNT + param_state::PS + cond_vi_linked::CVI +end + +## HELPERS ##################################################################################### + +function _discover_ssm(model::DynamicPPL.Model, vi::DynamicPPL.AbstractVarInfo) + ctx = CSMCContext() + discovery_model = DynamicPPL.setleafcontext(model, ctx) + DynamicPPL.evaluate!!(discovery_model, vi) + return ctx.ssm_dist[]::SSMTrajectory, ctx.traj_vn[]::DynamicPPL.VarName +end + +function _condition_on_trajectory( + model::DynamicPPL.Model, traj_vn::DynamicPPL.VarName, traj_flat::AbstractVector +) + return DynamicPPL.condition(model, traj_vn => traj_flat) +end + +""" + _make_conditioned_ldf(cond_model, vi, traj_vn, adtype) + +Create a `LogDensityFunction` for the conditioned model (trajectory fixed) plus +a linked VarInfo for parameter recovery after NUTS steps. + +Returns `(ldf, cond_vi_linked)`. +""" +function _make_conditioned_ldf( + cond_model::DynamicPPL.Model, + vi::DynamicPPL.AbstractVarInfo, + traj_vn::DynamicPPL.VarName, + adtype, +) + param_vns = Base.filter(vn -> vn != traj_vn, keys(vi)) + cond_vi = DynamicPPL.subset(vi, param_vns) + cond_vi_linked = DynamicPPL.link(cond_vi, cond_model) + ldf = DynamicPPL.LogDensityFunction( + cond_model, DynamicPPL.getlogjoint_internal, cond_vi_linked; adtype=adtype + ) + return ldf, cond_vi_linked +end + +""" + _recover_params(cond_vi_linked, cond_model, θ_new) + +Given a linked VarInfo template and new unconstrained parameters from NUTS, +recover the constrained parameter values as a NamedTuple. +""" +function _recover_params(cond_vi_linked, cond_model, θ_new) + vi_updated = DynamicPPL.unflatten(cond_vi_linked, θ_new) + vi_constrained = DynamicPPL.invlink(vi_updated, cond_model) + return DynamicPPL.values_as(vi_constrained, NamedTuple) +end + +## ABSTRACTMCMC INTERFACE ###################################################################### + +function AbstractMCMC.step( + rng::AbstractRNG, + model::DynamicPPL.Model, + pg::ParticleGibbs; + initial_params=nothing, + kwargs..., +) + # 1. Create VarInfo (samples all variables from prior) + vi = DynamicPPL.VarInfo(rng, model) + + # 2. Discover trajectory variable + ssm_dist, traj_vn = _discover_ssm(model, vi) + + # 3. Run unconditional CSMC for initial trajectory + af = _get_inner_filter(pg.csmc.pf) + trajectory, _ = _csmc_sample( + rng, ssm_dist.model, pg.csmc, ssm_dist.observations, nothing + ) + + # 4. Flatten trajectory and update VarInfo + T_len = length(ssm_dist.observations) + Dx = _state_dim(ssm_dist) + outer_traj = _outer_trajectory(trajectory, af) + traj_flat = _flatten_trajectory(outer_traj, T_len, Dx) + vi = DynamicPPL.setindex!!(vi, traj_flat, traj_vn) + vi = last(DynamicPPL.evaluate!!(model, vi)) + + # 5. Condition on trajectory, create LogDensityFunction for parameter step + cond_model = _condition_on_trajectory(model, traj_vn, traj_flat) + ldf, cond_vi_linked = _make_conditioned_ldf(cond_model, vi, traj_vn, pg.adtype) + ld_model = AbstractMCMC.LogDensityModel(ldf) + + # 6. Initial parameter step + θ = cond_vi_linked[:] + _, param_state = AbstractMCMC.step(rng, ld_model, pg.param; initial_params=θ, kwargs...) + + # 7. Update VarInfo with new parameters, discover new SSM + θ_new = AbstractMCMC.getparams(param_state) + param_vals = _recover_params(cond_vi_linked, cond_model, θ_new) + for (k, v) in pairs(param_vals) + vn = DynamicPPL.VarName{k}() + vi = DynamicPPL.setindex!!(vi, v, vn) + end + vi = last(DynamicPPL.evaluate!!(model, vi)) + + # 8. Discover new SSM and run CSMC + ssm_dist_new, _ = _discover_ssm(model, vi) + trajectory_new, _ = _csmc_sample( + rng, ssm_dist_new.model, pg.csmc, ssm_dist_new.observations, trajectory + ) + + # 9. Update trajectory in VarInfo + outer_traj_new = _outer_trajectory(trajectory_new, af) + traj_flat_new = _flatten_trajectory(outer_traj_new, T_len, Dx) + vi = DynamicPPL.setindex!!(vi, traj_flat_new, traj_vn) + vi = last(DynamicPPL.evaluate!!(model, vi)) + + transition = ParticleGibbsTransition(θ_new, AbstractMCMC.getstats(param_state)) + state = ParticleGibbsTuringState( + vi, trajectory_new, traj_vn, param_state, cond_vi_linked + ) + + return transition, state +end + +function AbstractMCMC.step( + rng::AbstractRNG, + model::DynamicPPL.Model, + pg::ParticleGibbs, + state::ParticleGibbsTuringState; + kwargs..., +) + af = _get_inner_filter(pg.csmc.pf) + vi = state.vi + traj_vn = state.traj_vn + + # 1. Get trajectory dimensions + ssm_dist, _ = _discover_ssm(model, vi) + T_len = length(ssm_dist.observations) + Dx = _state_dim(ssm_dist) + + # 2. Condition on current trajectory + outer_traj = _outer_trajectory(state.trajectory, af) + traj_flat = _flatten_trajectory(outer_traj, T_len, Dx) + cond_model = _condition_on_trajectory(model, traj_vn, traj_flat) + + # 3. Create LogDensityFunction (new each step since trajectory changed) + ldf, cond_vi_linked = _make_conditioned_ldf(cond_model, vi, traj_vn, pg.adtype) + ld_model = AbstractMCMC.LogDensityModel(ldf) + + # 4. Parameter step (preserves adaptation via state.param_state) + _, param_state = AbstractMCMC.step( + rng, ld_model, pg.param, state.param_state; kwargs... + ) + + # 5. Extract new θ and update VarInfo + θ_new = AbstractMCMC.getparams(param_state) + param_vals = _recover_params(cond_vi_linked, cond_model, θ_new) + for (k, v) in pairs(param_vals) + vn = DynamicPPL.VarName{k}() + vi = DynamicPPL.setindex!!(vi, v, vn) + end + vi = last(DynamicPPL.evaluate!!(model, vi)) + + # 6. Discover new SSM and run CSMC + ssm_dist_new, _ = _discover_ssm(model, vi) + trajectory_new, _ = _csmc_sample( + rng, ssm_dist_new.model, pg.csmc, ssm_dist_new.observations, state.trajectory + ) + + # 7. Update trajectory in VarInfo + outer_traj_new = _outer_trajectory(trajectory_new, af) + traj_flat_new = _flatten_trajectory(outer_traj_new, T_len, Dx) + vi = DynamicPPL.setindex!!(vi, traj_flat_new, traj_vn) + vi = last(DynamicPPL.evaluate!!(model, vi)) + + transition = ParticleGibbsTransition(θ_new, AbstractMCMC.getstats(param_state)) + new_state = ParticleGibbsTuringState( + vi, trajectory_new, traj_vn, param_state, cond_vi_linked + ) + + return transition, new_state +end + +## CHAIN OUTPUT ################################################################################ + +function AbstractMCMC.bundle_samples( + ts::Vector{<:ParticleGibbsTransition}, + ::DynamicPPL.Model, + ::ParticleGibbs, + state::ParticleGibbsTuringState, + ::Type{MCMCChains.Chains}; + param_names=nothing, + kwargs..., +) + names = if isnothing(param_names) + _turing_param_names(state) + else + Symbol.(param_names) + end + return _build_chains(ts, names) +end + +function _turing_param_names(state::ParticleGibbsTuringState) + vi = state.cond_vi_linked + nt = DynamicPPL.values_as(vi, NamedTuple) + names = Symbol[] + for (k, v) in pairs(nt) + if v isa AbstractArray && length(v) > 1 + for i in 1:length(v) + push!(names, Symbol("$(k)[$i]")) + end + else + push!(names, k) + end + end + return names +end diff --git a/GeneralisedFilters/test/algorithms/csmc.jl b/GeneralisedFilters/test/algorithms/csmc.jl index 403dcc38..cf82a404 100644 --- a/GeneralisedFilters/test/algorithms/csmc.jl +++ b/GeneralisedFilters/test/algorithms/csmc.jl @@ -5,21 +5,64 @@ @testitem "CSMC" begin using GeneralisedFilters using StableRNGs - using PDMats - using LinearAlgebra using LogExpFunctions: logsumexp - using Random: randexp - using StatsBase: sample, weights + using StatsBase: sample - using OffsetArrays + SEED = 1234 + Dx = 1 + Dy = 1 + K = 10 + t_smooth = 2 + N_particles = 10 + N_burnin = 1000 + N_sample = 100000 + + rng = StableRNG(SEED) + model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, Dx, Dy) + _, _, ys = sample(rng, model, K) + + # Kalman smoother ground truth + state, ks_ll = GeneralisedFilters.smooth( + rng, model, KalmanSmoother(), ys; t_smooth=t_smooth + ) + + csmc = CSMC(BF(N_particles; threshold=0.6)) + trajectory_samples = [] + lls = Float64[] + + let ref_traj = nothing + for i in 1:(N_burnin + N_sample) + traj, ll = GeneralisedFilters._csmc_sample(rng, model, csmc, ys, ref_traj) + ref_traj = traj + if i > N_burnin + push!(trajectory_samples, traj) + push!(lls, ll) + end + end + end + + # 1/Ẑ is an unbiased estimate of 1/Z (Elements of SMC, Section 5.2) + log_recip_likelihood_estimate = logsumexp(-lls) - log(length(lls)) + + csmc_mean = sum(getindex.(trajectory_samples, t_smooth)) / N_sample + @test csmc_mean ≈ state.μ rtol = 1e-3 + @test log_recip_likelihood_estimate ≈ -ks_ll rtol = 1e-3 +end + +## Ancestor Sampling CSMC ################################################################## + +@testitem "CSMC-AS" begin + using GeneralisedFilters + using StableRNGs + using LogExpFunctions: logsumexp + using StatsBase: sample SEED = 1234 Dx = 1 Dy = 1 K = 10 t_smooth = 2 - T = Float64 - N_particles = 10 # Use small particle number so impact of ref state is significant + N_particles = 10 N_burnin = 1000 N_sample = 100000 @@ -27,35 +70,27 @@ model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, Dx, Dy) _, _, ys = sample(rng, model, K) - # Kalman smoother + # Kalman smoother ground truth state, ks_ll = GeneralisedFilters.smooth( rng, model, KalmanSmoother(), ys; t_smooth=t_smooth ) - N_steps = N_burnin + N_sample - bf = BF(N_particles; threshold=0.6) + csmc = CSMCAS(BF(N_particles; threshold=0.6)) trajectory_samples = [] - lls = [] + lls = Float64[] - # Run CSMC chain let ref_traj = nothing - for i in 1:N_steps - cb = GeneralisedFilters.DenseAncestorCallback(nothing) - bf_state, ll = GeneralisedFilters.filter( - rng, model, bf, ys; ref_state=ref_traj, callback=cb - ) - ws = weights(bf_state) - sampled_idx = sample(rng, 1:N_particles, ws) - ref_traj = GeneralisedFilters.get_ancestry(cb.container, sampled_idx) + for i in 1:(N_burnin + N_sample) + traj, ll = GeneralisedFilters._csmc_sample(rng, model, csmc, ys, ref_traj) + ref_traj = traj if i > N_burnin - push!(trajectory_samples, ref_traj) + push!(trajectory_samples, traj) push!(lls, ll) end end end - # The CSMC estimate of the evidence Z = p(y_{1:T}) is biased but 1 / ̂Z is actually an - # unbiased estimate of 1 / Z. See Elements of Sequential Monte Carlo (Section 5.2) + # 1/Ẑ is an unbiased estimate of 1/Z (Elements of SMC, Section 5.2) log_recip_likelihood_estimate = logsumexp(-lls) - log(length(lls)) csmc_mean = sum(getindex.(trajectory_samples, t_smooth)) / N_sample @@ -68,14 +103,9 @@ end @testitem "RBCSMC" begin using GeneralisedFilters using StableRNGs - using PDMats - using LinearAlgebra - using Random: randexp - using StatsBase: sample, weights using StaticArrays using Statistics - - using OffsetArrays + using StatsBase: sample SEED = 1234 D_outer = 1 @@ -84,7 +114,7 @@ end K = 5 t_smooth = 2 T = Float64 - N_particles = 10 # Use small particle number so impact of ref state is significant + N_particles = 10 N_burnin = 1000 N_sample = 10000 @@ -93,37 +123,27 @@ end rng, D_outer, D_inner, D_obs, T; static_arrays=true ) _, _, ys = sample(rng, full_model, K) - # Convert to static arrays ys = [SVector{1,T}(y) for y in ys] - # Kalman smoother + # Kalman smoother ground truth state, _ = GeneralisedFilters.smooth( rng, full_model, KalmanSmoother(), ys; t_smooth=t_smooth ) - N_steps = N_burnin + N_sample - rbpf = RBPF(BF(N_particles; threshold=0.8), KalmanFilter()) + csmc = CSMC(RBPF(BF(N_particles; threshold=0.8), KalmanFilter())) trajectory_samples = [] - cb = GeneralisedFilters.DenseAncestorCallback(nothing) let ref_traj = nothing - for i in 1:N_steps - bf_state, _ = GeneralisedFilters.filter( - rng, hier_model, rbpf, ys; ref_state=ref_traj, callback=cb - ) - ws = weights(bf_state) - sampled_idx = sample(rng, 1:N_particles, ws) - - full_traj = GeneralisedFilters.get_ancestry(cb.container, sampled_idx) + for i in 1:(N_burnin + N_sample) + traj, _ = GeneralisedFilters._csmc_sample(rng, hier_model, csmc, ys, ref_traj) + ref_traj = traj if i > N_burnin - push!(trajectory_samples, deepcopy(full_traj)) + push!(trajectory_samples, deepcopy(traj)) end - # Reference trajectory should only be nonlinear state for RBPF - ref_traj = getproperty.(full_traj, :x) end end - # Extract inner and outer trajectories + # Extract outer trajectories at t_smooth x_trajectories = getproperty.(getindex.(trajectory_samples, t_smooth), :x) # Smooth the inner (z) component using backward_smooth @@ -131,10 +151,8 @@ end z_smoothed_means = Vector{T}(undef, N_sample) for i in 1:N_sample smoothed_z = trajectory_samples[i][K].z - for t in (K - 1):-1:t_smooth filtered_z = trajectory_samples[i][t].z - # Pass prev_outer to condition the inner dynamics on the outer trajectory smoothed_z = backward_smooth( inner_dyn, KF(), @@ -144,11 +162,9 @@ end prev_outer=trajectory_samples[i][t].x, ) end - z_smoothed_means[i] = only(smoothed_z.μ) end - # Compare to ground truth @test state.μ[1] ≈ only(mean(x_trajectories)) rtol = 1e-2 @test state.μ[2] ≈ mean(z_smoothed_means) rtol = 1e-3 end @@ -158,18 +174,8 @@ end @testitem "RBCSMC-AS" begin using GeneralisedFilters using StableRNGs - using PDMats - using LinearAlgebra - using Random: randexp - using StatsBase: sample, weights - using StaticArrays using Statistics - using LogExpFunctions - - import SSMProblems: prior, dyn, obs - import GeneralisedFilters: resampler, resample, move, RBState, InformationLikelihood - - using OffsetArrays + using StatsBase: sample SEED = 1234 D_outer = 1 @@ -188,93 +194,25 @@ end ) _, _, ys = sample(rng, full_model, K) - # Kalman smoother + # Kalman smoother ground truth state, _ = GeneralisedFilters.smooth( rng, full_model, KalmanSmoother(), ys; t_smooth=t_smooth ) - N_steps = N_burnin + N_sample - rbpf = RBPF(BF(N_particles; threshold=0.8), KalmanFilter()) + csmc = CSMCAS(RBPF(BF(N_particles; threshold=0.8), KalmanFilter())) trajectory_samples = [] - let ref_traj = nothing, - predictive_likelihoods = Vector{InformationLikelihood{Vector{T},PDMat{T,Matrix{T}}}}( - undef, K - ) - - for i in 1:N_steps - cb = GeneralisedFilters.DenseAncestorCallback(nothing) - - # Manual filtering with ancestor resampling - bf_state = initialise(rng, prior(hier_model), rbpf; ref_state=ref_traj) - - # Post-Init callback - cb(hier_model, rbpf, bf_state, ys, PostInit) - - for t in 1:K - bf_state = resample(rng, resampler(rbpf), bf_state; ref_state=ref_traj) - - ancestor_idx = nothing - if !isnothing(ref_traj) - ref_rb_state = RBState(ref_traj[t], predictive_likelihoods[t]) - ancestor_weights = map(bf_state.particles) do particle - ancestor_weight(particle, dyn(hier_model), rbpf, t, ref_rb_state) - end - ancestor_idx = sample( - rng, 1:N_particles, weights(softmax(ancestor_weights)) - ) - end - - bf_state, ll = move( - rng, hier_model, rbpf, t, bf_state, ys[t]; ref_state=ref_traj - ) - - # Set ancestor index - if !isnothing(ref_traj) - bf_state.particles[end] = GeneralisedFilters.Particle( - bf_state.particles[end].state, - bf_state.particles[end].log_w, - ancestor_idx, - ) - end - - # Manually trigger callback - cb(hier_model, rbpf, t, bf_state, ys[t], PostUpdate) - end - - ws = weights(bf_state) - sampled_idx = sample(rng, 1:N_particles, ws) - - full_traj = GeneralisedFilters.get_ancestry(cb.container, sampled_idx) + let ref_traj = nothing + for i in 1:(N_burnin + N_sample) + traj, _ = GeneralisedFilters._csmc_sample(rng, hier_model, csmc, ys, ref_traj) + ref_traj = traj if i > N_burnin - push!(trajectory_samples, deepcopy(full_traj)) - end - # Reference trajectory should only be nonlinear state for RBPF - ref_traj = getproperty.(full_traj, :x) - - bip = BackwardInformationPredictor(; initial_jitter=1e-8) - - pred_lik = backward_initialise(rng, hier_model.inner_model.obs, bip, K, ys[K]) - predictive_likelihoods[K] = deepcopy(pred_lik) - for t in (K - 1):-1:1 - pred_lik = backward_predict( - rng, - hier_model.inner_model.dyn, - bip, - t, - pred_lik; - prev_outer=ref_traj[t], - new_outer=ref_traj[t + 1], - ) - pred_lik = backward_update( - hier_model.inner_model.obs, bip, t, pred_lik, ys[t] - ) - predictive_likelihoods[t] = deepcopy(pred_lik) + push!(trajectory_samples, deepcopy(traj)) end end end - # Extract inner and outer trajectories + # Extract outer trajectories at t_smooth x_trajectories = getproperty.(getindex.(trajectory_samples, t_smooth), :x) # Smooth the inner (z) component using backward_smooth @@ -282,10 +220,8 @@ end z_smoothed_means = Vector{T}(undef, N_sample) for i in 1:N_sample smoothed_z = trajectory_samples[i][K].z - for t in (K - 1):-1:t_smooth filtered_z = trajectory_samples[i][t].z - # Pass prev_outer to condition the inner dynamics on the outer trajectory smoothed_z = backward_smooth( inner_dyn, KF(), @@ -295,11 +231,9 @@ end prev_outer=trajectory_samples[i][t].x, ) end - z_smoothed_means[i] = only(smoothed_z.μ) end - # Compare to ground truth @test state.μ[1] ≈ only(mean(x_trajectories)) rtol = 1e-2 @test state.μ[2] ≈ mean(z_smoothed_means) rtol = 1e-3 end @@ -307,14 +241,8 @@ end @testitem "Discrete RBCSMC-AS" begin using GeneralisedFilters using StableRNGs - using StatsBase: sample, weights + using StatsBase: sample using Statistics - using LogExpFunctions - - import SSMProblems: prior, dyn, obs - import GeneralisedFilters: resampler, resample, move, RBState, DiscreteLikelihood - - using OffsetArrays SEED = 1234 K_outer = 3 @@ -334,7 +262,6 @@ end # Ground truth: smoothed distribution from joint model joint_smoothed, _ = smooth(rng, joint_model, DiscreteSmoother(), ys; t_smooth=t_smooth) - # Extract marginals from joint smoothed distribution true_outer_marginal = zeros(K_outer) true_inner_marginal = zeros(K_inner) for i in 1:K_outer @@ -345,71 +272,15 @@ end end end - N_steps = N_burnin + N_sample - rbpf = RBPF(BF(N_particles; threshold=0.8), DiscreteFilter()) + csmc = CSMCAS(RBPF(BF(N_particles; threshold=0.8), DiscreteFilter())) trajectory_samples = [] - let ref_traj = nothing, - predictive_likelihoods = Vector{DiscreteLikelihood{Vector{Float64}}}(undef, T) - - for i in 1:N_steps - cb = GeneralisedFilters.DenseAncestorCallback(nothing) - - bf_state = initialise(rng, prior(hier_model), rbpf; ref_state=ref_traj) - cb(hier_model, rbpf, bf_state, ys, PostInit) - - for t in 1:T - bf_state = resample(rng, resampler(rbpf), bf_state; ref_state=ref_traj) - - ancestor_idx = nothing - if !isnothing(ref_traj) - ref_rb_state = RBState(ref_traj[t], predictive_likelihoods[t]) - ancestor_weights = map(bf_state.particles) do particle - ancestor_weight(particle, dyn(hier_model), rbpf, t, ref_rb_state) - end - ancestor_idx = sample( - rng, 1:N_particles, weights(softmax(ancestor_weights)) - ) - end - - bf_state, _ = move( - rng, hier_model, rbpf, t, bf_state, ys[t]; ref_state=ref_traj - ) - - if !isnothing(ref_traj) - bf_state.particles[end] = GeneralisedFilters.Particle( - bf_state.particles[end].state, - bf_state.particles[end].log_w, - ancestor_idx, - ) - end - - cb(hier_model, rbpf, t, bf_state, ys[t], PostUpdate) - end - - ws = weights(bf_state) - sampled_idx = sample(rng, 1:N_particles, ws) - - full_traj = GeneralisedFilters.get_ancestry(cb.container, sampled_idx) + let ref_traj = nothing + for i in 1:(N_burnin + N_sample) + traj, _ = GeneralisedFilters._csmc_sample(rng, hier_model, csmc, ys, ref_traj) + ref_traj = traj if i > N_burnin - push!(trajectory_samples, deepcopy(full_traj)) - end - ref_traj = getproperty.(full_traj, :x) - - # Compute backward predictive likelihoods for next iteration - bdp = BackwardDiscretePredictor() - pred_lik = GeneralisedFilters.backward_initialise( - rng, hier_model.inner_model.obs, bdp, T, ys[T]; num_states=K_inner - ) - predictive_likelihoods[T] = deepcopy(pred_lik) - for t in (T - 1):-1:1 - pred_lik = GeneralisedFilters.backward_predict( - rng, hier_model.inner_model.dyn, bdp, t, pred_lik - ) - pred_lik = GeneralisedFilters.backward_update( - hier_model.inner_model.obs, bdp, t, pred_lik, ys[t] - ) - predictive_likelihoods[t] = deepcopy(pred_lik) + push!(trajectory_samples, deepcopy(traj)) end end end @@ -455,102 +326,110 @@ end @test csmc_inner_marginal ≈ true_inner_marginal rtol = 0.05 end +## CSMC AbstractMCMC Interface ############################################################## + +@testitem "CSMC AbstractMCMC interface" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using StableRNGs + using StatsBase: sample + + rng = StableRNG(1234) + model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, 1, 1) + _, _, ys = sample(rng, model, 5) + + csmc_model = CSMCModel(model, ys) + csmc = CSMC(BF(10)) + + # Initial step (unconditional) + transition, state = AbstractMCMC.step(rng, csmc_model, csmc) + @test transition isa CSMCState + @test state isa CSMCState + @test length(state.trajectory) == 6 # T+1 elements (indices 0:5) + + # Subsequent step (conditional) + transition2, state2 = AbstractMCMC.step(rng, csmc_model, csmc, state) + @test state2 isa CSMCState + @test length(state2.trajectory) == 6 + + # CSMC-BS + csmc_bs = CSMCBS(BF(10)) + _, bs_state = AbstractMCMC.step(rng, csmc_model, csmc_bs) + @test bs_state isa CSMCState + @test length(bs_state.trajectory) == 6 + _, bs_state2 = AbstractMCMC.step(rng, csmc_model, csmc_bs, bs_state) + @test bs_state2 isa CSMCState + + # Works with RBPF too + _, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( + rng, 1, 1, 1; static_arrays=false + ) + _, _, _, _, rb_ys = sample(rng, hier_model, 5) + + rb_model = CSMCModel(hier_model, rb_ys) + rb_csmc = CSMCAS(RBPF(BF(10; threshold=0.8), KalmanFilter())) + + _, rb_state = AbstractMCMC.step(rng, rb_model, rb_csmc) + @test rb_state isa CSMCState + _, rb_state2 = AbstractMCMC.step(rng, rb_model, rb_csmc, rb_state) + @test rb_state2 isa CSMCState +end + ## Backward Simulation ###################################################################### -@testitem "Backward simulation" begin +@testitem "CSMC-BS" begin using GeneralisedFilters using StableRNGs - using PDMats - using LinearAlgebra - using StatsBase: sample, weights + using StatsBase: sample using Statistics - using LogExpFunctions - - import SSMProblems: dyn, obs, prior - import GeneralisedFilters: resample, resampler, move, Particle + using LogExpFunctions: logsumexp SEED = 1234 Dx = 1 Dy = 1 K = 5 t_smooth = 3 - T = Float64 N_particles = 50 - N_trajectories = 1000 + N_burnin = 200 + N_sample = 800 rng = StableRNG(SEED) model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, Dx, Dy) _, _, ys = sample(rng, model, K) # Kalman smoother ground truth - ks_state, _ = GeneralisedFilters.smooth( + ks_state, ks_ll = GeneralisedFilters.smooth( rng, model, KalmanSmoother(), ys; t_smooth=t_smooth ) - # Run forward filter manually and store particle states at each time step - bf = BF(N_particles) - - # Storage for particle states at time steps 1:K - particle_states = Vector{Vector{Particle{Vector{T},T,Int}}}(undef, K) - - # Forward filtering pass - final_state = let state = initialise(rng, prior(model), bf) - for t in 1:K - state = resample(rng, resampler(bf), state) - state, _ = move(rng, model, bf, t, state, ys[t]) - particle_states[t] = deepcopy(collect(state.particles)) - end - state - end - - # Backward simulation: sample M trajectories - trajectory_samples = Vector{Vector{T}}(undef, N_trajectories) - - for m in 1:N_trajectories - # Sample from final distribution - final_ws = weights(final_state) - idx = sample(rng, 1:N_particles, final_ws) - - # Initialize trajectory with sampled final state - traj = Vector{Vector{T}}(undef, K) - traj[K] = particle_states[K][idx].state - - # Backward simulation pass - resample ancestors using backward weights - for t in (K - 1):-1:1 - particles_t = particle_states[t] + csmc = CSMCBS(BF(N_particles)) + trajectory_samples = [] + lls = Float64[] - # Compute backward weights: w_t^i * f(x_{t+1} | x_t^i) - ref_state = traj[t + 1] - backward_ws = map(particles_t) do particle - ancestor_weight(particle, dyn(model), bf, t + 1, ref_state) + let ref_traj = nothing + for i in 1:(N_burnin + N_sample) + traj, ll = GeneralisedFilters._csmc_sample(rng, model, csmc, ys, ref_traj) + ref_traj = traj + if i > N_burnin + push!(trajectory_samples, traj) + push!(lls, ll) end - - # Sample new ancestor - idx = sample(rng, 1:N_particles, weights(softmax(backward_ws))) - traj[t] = particles_t[idx].state end - - trajectory_samples[m] = [traj[t][1] for t in 1:K] end - # Extract samples at t_smooth and compare to Kalman smoother - bs_mean = mean(getindex.(trajectory_samples, t_smooth)) + # 1/Ẑ is an unbiased estimate of 1/Z (Elements of SMC, Section 5.2) + log_recip_likelihood_estimate = logsumexp(-lls) - log(length(lls)) + + bs_mean = mean(first.(getindex.(trajectory_samples, t_smooth))) @test bs_mean ≈ only(ks_state.μ) rtol = 5e-2 + @test log_recip_likelihood_estimate ≈ -ks_ll rtol = 1e-2 end -@testitem "RB backward simulation" begin +@testitem "RBCSMC-BS" begin using GeneralisedFilters using StableRNGs - using PDMats - using LinearAlgebra - using StatsBase: sample, weights + using StatsBase: sample using Statistics - using LogExpFunctions - using Distributions: MvNormal - - import SSMProblems: dyn, obs, prior - import GeneralisedFilters: - RBState, InformationLikelihood, resample, resampler, move, Particle SEED = 1234 D_outer = 1 @@ -560,7 +439,8 @@ end t_smooth = 2 T = Float64 N_particles = 50 - N_trajectories = 1000 + N_burnin = 200 + N_sample = 1000 rng = StableRNG(SEED) full_model, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( @@ -568,103 +448,46 @@ end ) _, _, ys = sample(rng, full_model, K) - # Kalman smoother ground truth on full model + # Kalman smoother ground truth ks_state, _ = GeneralisedFilters.smooth( rng, full_model, KalmanSmoother(), ys; t_smooth=t_smooth ) - # Run RBPF forward filter manually and store particle states - rbpf = RBPF(BF(N_particles), KalmanFilter()) - - # Initialize and run first step to get concrete types - init_state = initialise(rng, prior(hier_model), rbpf) - init_state = resample(rng, resampler(rbpf), init_state) - init_state, _ = move(rng, hier_model, rbpf, 1, init_state, ys[1]) - - # Storage for particle states at time steps 1:K - particle_states = Vector{typeof(collect(init_state.particles))}(undef, K) - particle_states[1] = deepcopy(collect(init_state.particles)) - - # Forward filtering pass for remaining steps - final_state = let state = init_state - for t in 2:K - state = resample(rng, resampler(rbpf), state) - state, _ = move(rng, hier_model, rbpf, t, state, ys[t]) - particle_states[t] = deepcopy(collect(state.particles)) - end - state - end - - # Backward simulation: sample M trajectories - x_samples = Vector{T}(undef, N_trajectories) - z_samples = Vector{T}(undef, N_trajectories) - - for m in 1:N_trajectories - # Sample from final distribution - final_ws = weights(final_state) - idx = sample(rng, 1:N_particles, final_ws) - - # Initialize trajectory with sampled final state - traj = Vector{eltype(particle_states[1]).parameters[1]}(undef, K) - traj[K] = particle_states[K][idx].state - - # Extract outer trajectory for computing backward likelihoods - outer_traj = Vector{Vector{T}}(undef, K) - outer_traj[K] = traj[K].x - - # Compute backward predictive likelihoods for this trajectory - bip = BackwardInformationPredictor(; initial_jitter=1e-8) - pred_lik = backward_initialise(rng, hier_model.inner_model.obs, bip, K, ys[K]) - predictive_likelihoods = Vector{typeof(pred_lik)}(undef, K) - predictive_likelihoods[K] = deepcopy(pred_lik) - - # Backward simulation pass - for t in (K - 1):-1:1 - particles_t = particle_states[t] - - # Build reference state with backward predictive likelihood - ref_rb_state = RBState(outer_traj[t + 1], predictive_likelihoods[t + 1]) + csmc = CSMCBS(RBPF(BF(N_particles), KalmanFilter())) + trajectory_samples = [] - # Compute backward weights using ancestor_weight - backward_ws = map(particles_t) do particle - ancestor_weight(particle, dyn(hier_model), rbpf, t, ref_rb_state) + let ref_traj = nothing + for i in 1:(N_burnin + N_sample) + traj, _ = GeneralisedFilters._csmc_sample(rng, hier_model, csmc, ys, ref_traj) + ref_traj = traj + if i > N_burnin + push!(trajectory_samples, deepcopy(traj)) end - - # Sample new ancestor - new_idx = sample(rng, 1:N_particles, weights(softmax(backward_ws))) - traj[t] = particles_t[new_idx].state - outer_traj[t] = traj[t].x - - # Compute backward predictive likelihood at time t - pred_lik = backward_predict( - rng, - hier_model.inner_model.dyn, - bip, - t, - predictive_likelihoods[t + 1]; - prev_outer=outer_traj[t], - new_outer=outer_traj[t + 1], - ) - pred_lik = backward_update(hier_model.inner_model.obs, bip, t, pred_lik, ys[t]) - predictive_likelihoods[t] = deepcopy(pred_lik) end + end - # Store outer state sample at t_smooth - x_samples[m] = only(traj[t_smooth].x) + # Extract outer trajectories at t_smooth + x_trajectories = getproperty.(getindex.(trajectory_samples, t_smooth), :x) - # Smooth the inner (z) component using backward_smooth - inner_dyn = hier_model.inner_model.dyn - smoothed_z = traj[K].z + # Smooth the inner (z) component using backward_smooth + inner_dyn = hier_model.inner_model.dyn + z_smoothed_means = Vector{T}(undef, N_sample) + for i in 1:N_sample + smoothed_z = trajectory_samples[i][K].z for t in (K - 1):-1:t_smooth - filtered_z = traj[t].z + filtered_z = trajectory_samples[i][t].z smoothed_z = backward_smooth( - inner_dyn, KF(), t, filtered_z, smoothed_z; prev_outer=traj[t].x + inner_dyn, + KF(), + t, + filtered_z, + smoothed_z; + prev_outer=trajectory_samples[i][t].x, ) end - z_samples[m] = only(smoothed_z.μ) + z_smoothed_means[i] = only(smoothed_z.μ) end - # Compare to ground truth - @test ks_state.μ[1] ≈ mean(x_samples) rtol = 5e-2 - @test ks_state.μ[2] ≈ mean(z_samples) rtol = 5e-2 + @test ks_state.μ[1] ≈ only(mean(x_trajectories)) rtol = 5e-2 + @test ks_state.μ[2] ≈ mean(z_smoothed_means) rtol = 5e-2 end diff --git a/GeneralisedFilters/test/components/kalman_gradient.jl b/GeneralisedFilters/test/components/kalman_gradient.jl index 72e46178..23c575af 100644 --- a/GeneralisedFilters/test/components/kalman_gradient.jl +++ b/GeneralisedFilters/test/components/kalman_gradient.jl @@ -79,6 +79,45 @@ end @test s.b isa SVector{2,Float64} end +@testitem "Kalman gradient: ∂H" begin + using GeneralisedFilters + using FiniteDifferences + using LinearAlgebra + using PDMats + using StableRNGs + using StaticArrays + using SSMProblems + + rng = StableRNG(1234) + s = GeneralisedFilters.GFTest.setup_gradient_test(rng) + fdm = central_fdm(5, 1) + + nll_H = GeneralisedFilters.GFTest.make_nll_func(s.model, s.ys, :H) + H_vec = vec(Matrix(s.H)) + ∂H_fd = reshape(FiniteDifferences.grad(fdm, nll_H, H_vec)[1], s.D, s.D) + @test Matrix(s.∂H_total) ≈ ∂H_fd rtol = 1e-4 + @test s.H isa SMatrix{2,2,Float64,4} +end + +@testitem "Kalman gradient: ∂c" begin + using GeneralisedFilters + using FiniteDifferences + using LinearAlgebra + using PDMats + using StableRNGs + using StaticArrays + using SSMProblems + + rng = StableRNG(1234) + s = GeneralisedFilters.GFTest.setup_gradient_test(rng) + fdm = central_fdm(5, 1) + + nll_c = GeneralisedFilters.GFTest.make_nll_func(s.model, s.ys, :c) + ∂c_fd = FiniteDifferences.grad(fdm, nll_c, Vector(s.c))[1] + @test Vector(s.∂c_total) ≈ ∂c_fd rtol = 1e-4 + @test s.c isa SVector{2,Float64} +end + @testitem "Kalman gradient: ∂μ0" begin using GeneralisedFilters using FiniteDifferences diff --git a/GeneralisedFilters/test/integrations/logdensity.jl b/GeneralisedFilters/test/integrations/logdensity.jl new file mode 100644 index 00000000..deeffeba --- /dev/null +++ b/GeneralisedFilters/test/integrations/logdensity.jl @@ -0,0 +1,564 @@ +"""Tests for the log-density interface (trajectory_logdensity, kf_loglikelihood, rrule).""" + +## Regular SSM trajectory_logdensity ########################################################### + +@testitem "trajectory_logdensity: regular SSM" begin + using GeneralisedFilters + using SSMProblems + using StableRNGs + using Distributions + using OffsetArrays + + let + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 5 + model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, Dx, Dy) + + x0, xs, ys = SSMProblems.sample(rng, model, T) + trajectory = OffsetVector(vcat([x0], xs), -1) + + ll = trajectory_logdensity(model, trajectory, ys) + + ll_manual = logpdf(SSMProblems.distribution(SSMProblems.prior(model)), x0) + for t in 1:T + ll_manual += SSMProblems.logdensity( + SSMProblems.dyn(model), t, trajectory[t - 1], trajectory[t] + ) + ll_manual += SSMProblems.logdensity( + SSMProblems.obs(model), t, trajectory[t], ys[t] + ) + end + + @test ll ≈ ll_manual + end +end + +## HierarchicalSSM trajectory_logdensity ####################################################### + +@testitem "trajectory_logdensity: HierarchicalSSM" begin + using GeneralisedFilters + using SSMProblems + using StableRNGs + using Distributions + using OffsetArrays + + let + rng = StableRNG(1234) + D_outer, D_inner, D_obs, T = 2, 2, 2, 5 + + full_model, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( + rng, D_outer, D_inner, D_obs; static_arrays=false + ) + x0, z0, xs, zs, ys = SSMProblems.sample(rng, hier_model, T) + outer_traj = OffsetVector(vcat([x0], xs), -1) + + ll = trajectory_logdensity(hier_model, KF(), outer_traj, ys) + + ll_manual = logpdf(SSMProblems.distribution(hier_model.outer_prior), outer_traj[0]) + for t in 1:T + ll_manual += SSMProblems.logdensity( + hier_model.outer_dyn, t, outer_traj[t - 1], outer_traj[t] + ) + end + + inner_model = hier_model.inner_model + state = GeneralisedFilters.initialise( + rng, inner_model.prior, KF(); new_outer=outer_traj[0] + ) + ll_inner = 0.0 + for t in 1:T + state = GeneralisedFilters.predict( + rng, + inner_model.dyn, + KF(), + t, + state, + nothing; + prev_outer=outer_traj[t - 1], + new_outer=outer_traj[t], + ) + state, ll_inc = GeneralisedFilters.update( + inner_model.obs, KF(), t, state, ys[t]; new_outer=outer_traj[t] + ) + ll_inner += ll_inc + end + ll_manual += ll_inner + + @test ll ≈ ll_manual + end +end + +## kf_loglikelihood value ###################################################################### + +@testitem "kf_loglikelihood value" begin + using GeneralisedFilters + using SSMProblems + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 5 + model = GeneralisedFilters.GFTest.create_linear_gaussian_model(rng, Dx, Dy) + _, _, ys = SSMProblems.sample(rng, model, T) + + # Extract parameters + pr = SSMProblems.prior(model) + dy = SSMProblems.dyn(model) + ob = SSMProblems.obs(model) + + μ0 = GeneralisedFilters.calc_μ0(pr) + Σ0 = GeneralisedFilters.calc_Σ0(pr) + A = GeneralisedFilters.calc_A(dy, 1) + b = GeneralisedFilters.calc_b(dy, 1) + Q = GeneralisedFilters.calc_Q(dy, 1) + H = GeneralisedFilters.calc_H(ob, 1) + c = GeneralisedFilters.calc_c(ob, 1) + R = GeneralisedFilters.calc_R(ob, 1) + + # Homogeneous: same params at each timestep + As = fill(A, T) + bs = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + cs = fill(c, T) + Rs = fill(R, T) + + ll_kf = kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys) + + # Compare against filter() + _, ll_filter = GeneralisedFilters.filter(model, KF(), ys) + + @test ll_kf ≈ ll_filter +end + +## kf_loglikelihood rrule gradients ############################################################ + +@testitem "kf_loglikelihood rrule: ∂b" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + Qs = fill(Q, T) + Hs = fill(H, T) + Rs = fill(R, T) + cs_arr = fill(c, T) + + # rrule + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, fill(b, T), Qs, Hs, cs_arr, Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂bs_rrule = cotangents[5] # index 5: bs (after NoTangent, μ0, Σ0, As) + + # Finite differences + fdm = central_fdm(5, 1) + function ll_b(b_vec) + return kf_loglikelihood(μ0, Σ0, As, fill(b_vec, T), Qs, Hs, cs_arr, Rs, ys_vec) + end + ∂b_fd = FiniteDifferences.grad(fdm, ll_b, b)[1] + + # Sum rrule gradients over timesteps (homogeneous model → all the same) + ∂b_rrule_total = sum(∂bs_rrule) + @test ∂b_rrule_total ≈ ∂b_fd rtol = 1e-4 +end + +@testitem "kf_loglikelihood rrule: ∂c" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + bs = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + Rs = fill(R, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, bs, Qs, Hs, fill(c, T), Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂cs_rrule = cotangents[8] # index 8: cs + + fdm = central_fdm(5, 1) + function ll_c(c_vec) + return kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, fill(c_vec, T), Rs, ys_vec) + end + ∂c_fd = FiniteDifferences.grad(fdm, ll_c, c)[1] + + ∂c_rrule_total = sum(∂cs_rrule) + @test ∂c_rrule_total ≈ ∂c_fd rtol = 1e-4 +end + +@testitem "kf_loglikelihood rrule: ∂A" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + bs = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + cs = fill(c, T) + Rs = fill(R, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, fill(A, T), bs, Qs, Hs, cs, Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂As_rrule = cotangents[4] + + fdm = central_fdm(5, 1) + function ll_A(A_vec) + A_mat = reshape(A_vec, size(A)) + return kf_loglikelihood(μ0, Σ0, fill(A_mat, T), bs, Qs, Hs, cs, Rs, ys_vec) + end + ∂A_fd = reshape(FiniteDifferences.grad(fdm, ll_A, vec(A))[1], size(A)) + + ∂A_rrule_total = sum(∂As_rrule) + @test ∂A_rrule_total ≈ ∂A_fd rtol = 1e-4 +end + +@testitem "kf_loglikelihood rrule: ∂μ0" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + bs = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + cs = fill(c, T) + Rs = fill(R, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, bs, Qs, Hs, cs, Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂μ0_rrule = cotangents[2] + + fdm = central_fdm(5, 1) + function ll_μ0(μ0_vec) + return kf_loglikelihood(μ0_vec, Σ0, As, bs, Qs, Hs, cs, Rs, ys_vec) + end + ∂μ0_fd = FiniteDifferences.grad(fdm, ll_μ0, μ0)[1] + + @test ∂μ0_rrule ≈ ∂μ0_fd rtol = 1e-4 +end + +@testitem "kf_loglikelihood rrule: ∂Q" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + bs = fill(b, T) + Hs = fill(H, T) + cs = fill(c, T) + Rs = fill(R, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, bs, fill(Q, T), Hs, cs, Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂Qs_rrule = cotangents[6] + + fdm = central_fdm(5, 1) + function ll_Q(Q_vec) + Q_new = GeneralisedFilters.GFTest.make_pd(reshape(Q_vec, Dx, Dx)) + return kf_loglikelihood(μ0, Σ0, As, bs, fill(Q_new, T), Hs, cs, Rs, ys_vec) + end + ∂Q_fd = reshape(FiniteDifferences.grad(fdm, ll_Q, vec(Matrix(Q)))[1], Dx, Dx) + + ∂Q_rrule_total = sum(∂Qs_rrule) + @test ∂Q_rrule_total ≈ ∂Q_fd rtol = 1e-3 +end + +@testitem "kf_loglikelihood rrule: ∂R" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + bs = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + cs = fill(c, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, bs, Qs, Hs, cs, fill(R, T), ys_vec + ) + cotangents = pullback(1.0) + ∂Rs_rrule = cotangents[9] + + fdm = central_fdm(5, 1) + function ll_R(R_vec) + R_new = GeneralisedFilters.GFTest.make_pd(reshape(R_vec, Dy, Dy)) + return kf_loglikelihood(μ0, Σ0, As, bs, Qs, Hs, cs, fill(R_new, T), ys_vec) + end + ∂R_fd = reshape(FiniteDifferences.grad(fdm, ll_R, vec(Matrix(R)))[1], Dy, Dy) + + ∂R_rrule_total = sum(∂Rs_rrule) + @test ∂R_rrule_total ≈ ∂R_fd rtol = 1e-4 +end + +@testitem "kf_loglikelihood rrule: ∂H" begin + using GeneralisedFilters + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + μ0, Σ0, A, b, Q, H, c, R, ys_vec = GeneralisedFilters.GFTest.setup_kf_rrule_params( + rng, Dx, Dy, T + ) + + As = fill(A, T) + bs = fill(b, T) + Qs = fill(Q, T) + cs = fill(c, T) + Rs = fill(R, T) + + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0, As, bs, Qs, fill(H, T), cs, Rs, ys_vec + ) + cotangents = pullback(1.0) + ∂Hs_rrule = cotangents[7] + + fdm = central_fdm(5, 1) + function ll_H(H_vec) + H_mat = reshape(H_vec, size(H)) + return kf_loglikelihood(μ0, Σ0, As, bs, Qs, fill(H_mat, T), cs, Rs, ys_vec) + end + ∂H_fd = reshape(FiniteDifferences.grad(fdm, ll_H, vec(H))[1], size(H)) + + ∂H_rrule_total = sum(∂Hs_rrule) + @test ∂H_rrule_total ≈ ∂H_fd rtol = 1e-4 +end + +## kf_loglikelihood with StaticArrays ########################################################## + +@testitem "kf_loglikelihood: StaticArrays value + rrule" begin + using GeneralisedFilters + using SSMProblems + using ChainRulesCore + using FiniteDifferences + using StableRNGs + using PDMats + using StaticArrays + using LinearAlgebra + + rng = StableRNG(1234) + Dx, Dy, T = 2, 2, 3 + + # Build static array parameters + μ0 = @SVector randn(rng, Dx) + Σ0_mat = let M = @SMatrix randn(rng, Dx, Dx) + PDMat(Symmetric(M * M' + 0.1I)) + end + A = @SMatrix randn(rng, Dx, Dx) + b = @SVector randn(rng, Dx) + Q = let M = @SMatrix randn(rng, Dx, Dx) + PDMat(Symmetric(M * M' + 0.1I)) + end + H = @SMatrix randn(rng, Dy, Dx) + c = @SVector randn(rng, Dy) + R = let M = @SMatrix randn(rng, Dy, Dy) + PDMat(Symmetric(M * M' + 0.1I)) + end + + As = fill(A, T) + bs_arr = fill(b, T) + Qs = fill(Q, T) + Hs = fill(H, T) + cs_arr = fill(c, T) + Rs = fill(R, T) + + # Generate observations + ys = [SVector{Dy}(randn(rng, Dy)) for _ in 1:T] + + # Value: compare against regular arrays + ll_static = kf_loglikelihood(μ0, Σ0_mat, As, bs_arr, Qs, Hs, cs_arr, Rs, ys) + ll_dense = kf_loglikelihood( + Vector(μ0), + PDMat(Matrix(Σ0_mat)), + [Matrix(A) for A in As], + [Vector(b) for b in bs_arr], + [PDMat(Matrix(Q)) for Q in Qs], + [Matrix(H) for H in Hs], + [Vector(c) for c in cs_arr], + [PDMat(Matrix(R)) for R in Rs], + [Vector(y) for y in ys], + ) + @test ll_static ≈ ll_dense + + # rrule: check output types are static + _, pullback = ChainRulesCore.rrule( + kf_loglikelihood, μ0, Σ0_mat, As, bs_arr, Qs, Hs, cs_arr, Rs, ys + ) + cotangents = pullback(1.0) + ∂μ0 = cotangents[2] + ∂Σ0 = cotangents[3] + ∂As = cotangents[4] + ∂bs = cotangents[5] + + @test ∂μ0 isa SVector{Dx} + @test ∂Σ0 isa SMatrix{Dx,Dx} + @test eltype(∂As) <: SMatrix{Dx,Dx} + @test eltype(∂bs) <: SVector{Dx} + + # rrule: check gradient correctness for b + fdm = central_fdm(5, 1) + function ll_b(b_vec) + b_s = SVector{Dx}(b_vec) + return kf_loglikelihood(μ0, Σ0_mat, As, fill(b_s, T), Qs, Hs, cs_arr, Rs, ys) + end + ∂b_fd = FiniteDifferences.grad(fdm, ll_b, Vector(b))[1] + @test sum(∂bs) ≈ SVector{Dx}(∂b_fd) rtol = 1e-4 +end + +## SSMParameterLogDensity ###################################################################### + +@testitem "SSMParameterLogDensity: regular SSM" begin + using GeneralisedFilters + using SSMProblems + using LogDensityProblems + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using OffsetArrays + + rng = StableRNG(1234) + + # Simple 1D model: x_t = a * x_{t-1} + b + noise, y_t = x_t + noise + # Unknown parameter: b (drift) + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + T_len = 5 + + function build_ssm(θ) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [θ[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_b = 1.0 + true_ssm = build_ssm([true_b]) + _, _, ys = SSMProblems.sample(rng, true_ssm, T_len) + + # Sample a trajectory + x0, xs, _ = SSMProblems.sample(rng, true_ssm, T_len) + trajectory = OffsetVector(vcat([x0], xs), -1) + + prior = MvNormal([0.0], [4.0;;]) + pssm = ParameterisedSSM(build_ssm, ys) + ld = SSMParameterLogDensity(prior, pssm, Ref(trajectory)) + + θ_test = [0.5] + ll = LogDensityProblems.logdensity(ld, θ_test) + + # Manual + model = build_ssm(θ_test) + ll_expected = logpdf(prior, θ_test) + trajectory_logdensity(model, trajectory, ys) + + @test ll ≈ ll_expected + @test LogDensityProblems.dimension(ld) == 1 +end + +@testitem "SSMParameterLogDensity: HierarchicalSSM" begin + using GeneralisedFilters + using SSMProblems + using LogDensityProblems + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using OffsetArrays + + rng = StableRNG(1234) + + D_outer, D_inner, D_obs = 1, 1, 1 + T_len = 5 + + _, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( + rng, D_outer, D_inner, D_obs; static_arrays=false + ) + x0, _, xs, _, ys = SSMProblems.sample(rng, hier_model, T_len) + outer_traj = OffsetVector(vcat([x0], xs), -1) + + # Parameterise the model by b (inner dynamics offset) + fixed_model = hier_model + build_hier(θ) = GeneralisedFilters.GFTest.with_inner_drift(fixed_model, θ) + + prior = MvNormal(zeros(D_inner), 4.0 * I) + pssm = ParameterisedSSM(build_hier, ys) + ld = SSMParameterLogDensity(prior, pssm, KF(), Ref(outer_traj)) + + θ_test = [0.5] + ll = LogDensityProblems.logdensity(ld, θ_test) + + model = build_hier(θ_test) + ll_expected = logpdf(prior, θ_test) + trajectory_logdensity(model, KF(), outer_traj, ys) + + @test ll ≈ ll_expected + @test LogDensityProblems.dimension(ld) == 1 +end diff --git a/GeneralisedFilters/test/integrations/particle_gibbs.jl b/GeneralisedFilters/test/integrations/particle_gibbs.jl new file mode 100644 index 00000000..b7bd3f6d --- /dev/null +++ b/GeneralisedFilters/test/integrations/particle_gibbs.jl @@ -0,0 +1,288 @@ +"""Tests for the ParticleGibbs sampler.""" + +## NUTS: smoke test ############################################################################ + +@testitem "ParticleGibbs NUTS: smoke test" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using SSMProblems + using ForwardDiff + + rng = StableRNG(1234) + + # Simple 1D model: x_t = a*x_{t-1} + b + noise, y_t = x_t + noise + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + + function build_ssm(θ) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [θ[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_ssm = build_ssm([1.0]) + _, _, ys = SSMProblems.sample(rng, true_ssm, 5) + + prior = MvNormal([0.0], [4.0;;]) + pssm = ParameterisedSSM(build_ssm, ys) + model = ParticleGibbsModel(prior, pssm) + pg = ParticleGibbs(CSMC(BF(10)), NUTS(0.8)) + + # Initial step + transition, state = AbstractMCMC.step(rng, model, pg; n_adapts=5) + @test transition isa GeneralisedFilters.ParticleGibbsTransition + @test state isa GeneralisedFilters.ParticleGibbsState + @test length(transition.θ) == 1 + @test haskey(transition.stat, :acceptance_rate) + @test haskey(transition.stat, :step_size) + @test haskey(transition.stat, :tree_depth) + + # Subsequent step + transition2, state2 = AbstractMCMC.step(rng, model, pg, state; n_adapts=5) + @test transition2 isa GeneralisedFilters.ParticleGibbsTransition + @test state2 isa GeneralisedFilters.ParticleGibbsState + + # Chain output via sample + chain = AbstractMCMC.sample( + rng, model, pg, 20; n_adapts=10, progress=false, chain_type=MCMCChains.Chains + ) + @test chain isa MCMCChains.Chains + @test size(chain, 1) == 20 + @test length(names(chain, :parameters)) == 1 + + # Custom parameter names + chain_named = AbstractMCMC.sample( + rng, + model, + pg, + 20; + n_adapts=10, + progress=false, + chain_type=MCMCChains.Chains, + param_names=["b"], + ) + @test :b in names(chain_named, :parameters) +end + +## MH: smoke test ############################################################################# + +@testitem "ParticleGibbs MH: smoke test" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedMH: RWMH + using MCMCChains: MCMCChains + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using SSMProblems + + rng = StableRNG(1234) + + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + + function build_ssm_mh(θ) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [θ[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_ssm = build_ssm_mh([1.0]) + _, _, ys = SSMProblems.sample(rng, true_ssm, 5) + + prior = MvNormal([0.0], [4.0;;]) + pssm = ParameterisedSSM(build_ssm_mh, ys) + model = ParticleGibbsModel(prior, pssm) + pg = ParticleGibbs(CSMC(BF(10)), RWMH(MvNormal(zeros(1), 0.5 * I))) + + # Initial step + transition, state = AbstractMCMC.step(rng, model, pg) + @test transition isa GeneralisedFilters.ParticleGibbsTransition + @test state isa GeneralisedFilters.ParticleGibbsState + @test length(transition.θ) == 1 + @test haskey(transition.stat, :accepted) + + # Subsequent step + transition2, state2 = AbstractMCMC.step(rng, model, pg, state) + @test transition2 isa GeneralisedFilters.ParticleGibbsTransition + + # Chain output + chain = AbstractMCMC.sample( + rng, model, pg, 20; progress=false, chain_type=MCMCChains.Chains + ) + @test chain isa MCMCChains.Chains + @test size(chain, 1) == 20 + @test :accepted in names(chain, :internals) +end + +## NUTS: HierarchicalSSM against augmented KF ################################################### + +@testitem "ParticleGibbs NUTS: HierarchicalSSM" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using ADTypes: ADTypes + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using Statistics + using SSMProblems + using Zygote + + rng = StableRNG(42) + + # Dimensions + Dx, Dz, Dy = 1, 1, 1 + T_len = 10 + N_particles = 50 + N_iter = 5000 + N_adapts = 500 + σ²_b = 4.0 + + # Generate a random hierarchical model and fix everything except the inner drift b + full_model, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( + rng, Dx, Dz, Dy; static_arrays=true + ) + _, _, _, _, ys = SSMProblems.sample(rng, hier_model, T_len) + + # Parameterise: θ controls inner dynamics drift b + fixed = hier_model + build_hier(θ) = GeneralisedFilters.GFTest.with_inner_drift(fixed, θ) + + # Augmented KF ground truth using full linear Gaussian model with unknown inner drift + drift_indices = (Dx + 1):(Dx + Dz) + kf_post = GeneralisedFilters.GFTest.augmented_kf_drift_posterior( + full_model, ys, drift_indices; σ²_b=σ²_b, ε=1e-12 + ) + kf_mean = kf_post.mean + kf_std = kf_post.std + + # Particle Gibbs with RBPF + prior = MvNormal(zeros(Dz), σ²_b * I) + pssm = ParameterisedSSM(build_hier, ys) + model = ParticleGibbsModel(prior, pssm) + pg = ParticleGibbs( + CSMC(RBPF(BF(N_particles), KF())), NUTS(0.8); adtype=ADTypes.AutoZygote() + ) + + chain = AbstractMCMC.sample( + rng, + model, + pg, + N_iter; + n_adapts=N_adapts, + progress=false, + chain_type=MCMCChains.Chains, + param_names=["b_z"], + ) + + post_samples = Array(chain[:b_z])[(N_adapts + 1):end] + + @test mean(post_samples) ≈ kf_mean[1] rtol = 1e-1 + @test std(post_samples) ≈ kf_std[1] rtol = 1e-1 +end + +## NUTS: regular SSM against augmented KF ###################################################### + +@testitem "ParticleGibbs NUTS: regular SSM" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using Statistics + using SSMProblems + using ForwardDiff + + rng = StableRNG(42) + + # Model parameters + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + σ_b² = 4.0 + T_len = 10 + N_particles = 50 + N_iter = 5000 + N_adapts = 500 + + function build_ssm(θ) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [θ[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + # Generate data + true_b = 1.5 + true_ssm = build_ssm([true_b]) + _, _, ys = SSMProblems.sample(rng, true_ssm, T_len) + + # Augmented KF ground truth + ref_model = build_ssm([0.0]) + kf_post = GeneralisedFilters.GFTest.augmented_kf_drift_posterior( + ref_model, ys, 1; σ²_b=σ_b², ε=1e-12 + ) + kf_mean = kf_post.mean[1] + kf_std = kf_post.std[1] + + # Particle Gibbs + prior = MvNormal([0.0], [σ_b²;;]) + pssm = ParameterisedSSM(build_ssm, ys) + model = ParticleGibbsModel(prior, pssm) + pg = ParticleGibbs(CSMC(BF(N_particles)), NUTS(0.8)) + + chain = AbstractMCMC.sample( + rng, + model, + pg, + N_iter; + n_adapts=N_adapts, + progress=false, + chain_type=MCMCChains.Chains, + param_names=["b"], + ) + + # Compare posterior statistics (discard warmup) + post_samples = Array(chain[Symbol("b")])[(N_adapts + 1):end] + + @test mean(post_samples) ≈ kf_mean rtol = 0.1 + @test std(post_samples) ≈ kf_std rtol = 0.2 +end diff --git a/GeneralisedFilters/test/integrations/turing.jl b/GeneralisedFilters/test/integrations/turing.jl new file mode 100644 index 00000000..f739cc4c --- /dev/null +++ b/GeneralisedFilters/test/integrations/turing.jl @@ -0,0 +1,253 @@ +"""Tests for the Turing @model integration with ParticleGibbs.""" + +## NUTS: smoke test ############################################################################ + +@testitem "ParticleGibbs Turing NUTS: smoke test" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using Turing: @model + using DynamicPPL: DynamicPPL + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using SSMProblems + using ForwardDiff + + rng = StableRNG(1234) + + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + T_len = 5 + N_particles = 20 + N_iter = 10 + + function build_ssm_smoke(drift) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [drift[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_ssm = build_ssm_smoke([1.5]) + _, _, ys = SSMProblems.sample(rng, true_ssm, T_len) + + @model function drift_model_smoke(ys) + b ~ MvNormal([0.0], 4.0 * I) + ssm = build_ssm_smoke(b) + x ~ SSMTrajectory(ssm, ys) + end + + m = drift_model_smoke(ys) + pg = ParticleGibbs(CSMC(BF(N_particles)), NUTS(0.8)) + + chain = AbstractMCMC.sample( + rng, m, pg, N_iter; n_adapts=5, progress=false, chain_type=MCMCChains.Chains + ) + + @test chain isa MCMCChains.Chains + @test size(chain, 1) == N_iter +end + +## MH: smoke test ############################################################################# + +@testitem "ParticleGibbs Turing MH: smoke test" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedMH: RWMH + using MCMCChains: MCMCChains + using Turing: @model + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using SSMProblems + + rng = StableRNG(1234) + + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + T_len = 5 + N_particles = 10 + N_iter = 10 + + function build_ssm_mh(drift) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [drift[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_ssm = build_ssm_mh([1.5]) + _, _, ys = SSMProblems.sample(rng, true_ssm, T_len) + + @model function drift_model_mh(ys) + b ~ MvNormal([0.0], 4.0 * I) + ssm = build_ssm_mh(b) + x ~ SSMTrajectory(ssm, ys) + end + + m = drift_model_mh(ys) + pg = ParticleGibbs(CSMC(BF(N_particles)), RWMH(MvNormal(zeros(1), 0.5 * I))) + + chain = AbstractMCMC.sample( + rng, m, pg, N_iter; progress=false, chain_type=MCMCChains.Chains + ) + + @test chain isa MCMCChains.Chains + @test size(chain, 1) == N_iter + @test :accepted in names(chain, :internals) +end + +## NUTS: regular SSM against augmented KF ###################################################### + +@testitem "ParticleGibbs Turing NUTS: regular SSM" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using Turing: @model + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using Statistics + using SSMProblems + using ForwardDiff + + rng = StableRNG(42) + + a = 0.8 + q² = 0.1 + r² = 0.5 + σ₀² = 1.0 + σ_b² = 4.0 + T_len = 10 + N_particles = 50 + N_iter = 5000 + N_adapts = 500 + + function build_ssm_reg(drift) + return create_homogeneous_linear_gaussian_model( + [0.0], + PDMat([σ₀²;;]), + [a;;], + [drift[1]], + PDMat([q²;;]), + [1.0;;], + [0.0], + PDMat([r²;;]), + ) + end + + true_b = 1.5 + true_ssm = build_ssm_reg([true_b]) + _, _, ys = SSMProblems.sample(rng, true_ssm, T_len) + + # Augmented KF ground truth + ref_model = build_ssm_reg([0.0]) + kf_post = GeneralisedFilters.GFTest.augmented_kf_drift_posterior( + ref_model, ys, 1; σ²_b=σ_b², ε=1e-12 + ) + kf_mean = kf_post.mean[1] + kf_std = kf_post.std[1] + + @model function drift_model_reg(ys) + b ~ MvNormal([0.0], σ_b² * I) + ssm = build_ssm_reg(b) + x ~ SSMTrajectory(ssm, ys) + end + + m = drift_model_reg(ys) + pg = ParticleGibbs(CSMC(BF(N_particles)), NUTS(0.8)) + + chain = AbstractMCMC.sample( + rng, m, pg, N_iter; n_adapts=N_adapts, progress=false, chain_type=MCMCChains.Chains + ) + + # Find the parameter column (not trajectory) + post_samples = Array(chain[:b])[(N_adapts + 1):end] + + @test mean(post_samples) ≈ kf_mean rtol = 0.1 + @test std(post_samples) ≈ kf_std rtol = 0.2 +end + +## NUTS: HierarchicalSSM against augmented KF ################################################## + +@testitem "ParticleGibbs Turing NUTS: HierarchicalSSM" begin + using GeneralisedFilters + using AbstractMCMC: AbstractMCMC + using AdvancedHMC: NUTS + using MCMCChains: MCMCChains + using ADTypes: ADTypes + using Turing: @model + using StableRNGs + using Distributions + using PDMats + using LinearAlgebra + using Statistics + using SSMProblems + using Zygote + + rng = StableRNG(42) + + Dx, Dz, Dy = 1, 1, 1 + T_len = 10 + N_particles = 50 + N_iter = 5000 + N_adapts = 500 + σ²_b = 4.0 + + full_model, hier_model = GeneralisedFilters.GFTest.create_dummy_linear_gaussian_model( + rng, Dx, Dz, Dy; static_arrays=true + ) + _, _, _, _, ys = SSMProblems.sample(rng, hier_model, T_len) + + fixed = hier_model + + # Augmented KF ground truth + drift_indices = (Dx + 1):(Dx + Dz) + kf_post = GeneralisedFilters.GFTest.augmented_kf_drift_posterior( + full_model, ys, drift_indices; σ²_b=σ²_b, ε=1e-12 + ) + kf_mean = kf_post.mean + kf_std = kf_post.std + + @model function drift_model_hier(ys) + b ~ MvNormal(zeros(Dz), σ²_b * I) + ssm = GeneralisedFilters.GFTest.with_inner_drift(fixed, b) + x ~ SSMTrajectory(ssm, KF(), ys) + end + + m = drift_model_hier(ys) + pg = ParticleGibbs( + CSMC(RBPF(BF(N_particles), KF())), NUTS(0.8); adtype=ADTypes.AutoZygote() + ) + + chain = AbstractMCMC.sample( + rng, m, pg, N_iter; n_adapts=N_adapts, progress=false, chain_type=MCMCChains.Chains + ) + + post_samples = Array(chain[:b])[(N_adapts + 1):end] + + @test mean(post_samples) ≈ kf_mean[1] rtol = 1e-1 + @test std(post_samples) ≈ kf_std[1] rtol = 1e-1 +end diff --git a/GeneralisedFilters/test/runtests.jl b/GeneralisedFilters/test/runtests.jl index f9bf51ab..a945fc24 100644 --- a/GeneralisedFilters/test/runtests.jl +++ b/GeneralisedFilters/test/runtests.jl @@ -15,6 +15,11 @@ include("algorithms/csmc.jl") include("components/resamplers.jl") include("components/kalman_gradient.jl") +# Integration tests +include("integrations/logdensity.jl") +include("integrations/particle_gibbs.jl") +include("integrations/turing.jl") + # Quality tests include("support/type_stability.jl") include("support/aqua.jl") diff --git a/GeneralisedFilters/test/support/type_stability.jl b/GeneralisedFilters/test/support/type_stability.jl index c3cf282a..e4f0aa91 100644 --- a/GeneralisedFilters/test/support/type_stability.jl +++ b/GeneralisedFilters/test/support/type_stability.jl @@ -20,8 +20,8 @@ # resample (fails test_op) rs = GF.resampler(algo) - @test_opt skip=true GF.maybe_resample(rng, rs, init_state) - @test_call skip=true GF.maybe_resample(rng, rs, init_state) + @test_opt skip = true GF.maybe_resample(rng, rs, init_state) + @test_call skip = true GF.maybe_resample(rng, rs, init_state) state = GF.maybe_resample(rng, rs, init_state) # predict