From a1d278f6b11eedbb3347a4d8a2b9516545fe9eb3 Mon Sep 17 00:00:00 2001 From: Daniel Apatin Date: Mon, 15 Dec 2025 11:48:11 +0300 Subject: [PATCH 1/2] add gopter to vendor --- go.mod | 2 + go.sum | 603 ++++++++++++++++++ vendor/github.com/leanovate/gopter/.gitignore | 30 + .../github.com/leanovate/gopter/.travis.yml | 12 + .../github.com/leanovate/gopter/CHANGELOG.md | 65 ++ vendor/github.com/leanovate/gopter/LICENSE | 21 + vendor/github.com/leanovate/gopter/Makefile | 41 ++ vendor/github.com/leanovate/gopter/README.md | 45 ++ .../github.com/leanovate/gopter/bi_mapper.go | 111 ++++ .../leanovate/gopter/derived_gen.go | 114 ++++ vendor/github.com/leanovate/gopter/doc.go | 35 + vendor/github.com/leanovate/gopter/flag.go | 23 + .../leanovate/gopter/formated_reporter.go | 144 +++++ vendor/github.com/leanovate/gopter/gen.go | 262 ++++++++ .../leanovate/gopter/gen/array_of.go | 62 ++ .../leanovate/gopter/gen/array_shrink.go | 53 ++ .../github.com/leanovate/gopter/gen/bool.go | 10 + .../leanovate/gopter/gen/complex.go | 49 ++ .../leanovate/gopter/gen/complex_shrink.go | 27 + .../github.com/leanovate/gopter/gen/const.go | 11 + vendor/github.com/leanovate/gopter/gen/doc.go | 4 + .../github.com/leanovate/gopter/gen/fail.go | 15 + .../github.com/leanovate/gopter/gen/floats.go | 69 ++ .../leanovate/gopter/gen/floats_shrink.go | 49 ++ .../leanovate/gopter/gen/frequency.go | 33 + .../leanovate/gopter/gen/integers.go | 225 +++++++ .../leanovate/gopter/gen/integers_shrink.go | 95 +++ .../github.com/leanovate/gopter/gen/map_of.go | 87 +++ .../leanovate/gopter/gen/map_shrink.go | 149 +++++ .../github.com/leanovate/gopter/gen/one_of.go | 29 + .../github.com/leanovate/gopter/gen/ptr_of.go | 41 ++ .../leanovate/gopter/gen/ptr_shrink.go | 45 ++ .../github.com/leanovate/gopter/gen/regex.go | 78 +++ .../leanovate/gopter/gen/retry_until.go | 21 + .../github.com/leanovate/gopter/gen/sized.go | 20 + .../leanovate/gopter/gen/slice_of.go | 106 +++ .../leanovate/gopter/gen/slice_shrink.go | 105 +++ .../leanovate/gopter/gen/string_shrink.go | 11 + .../leanovate/gopter/gen/strings.go | 158 +++++ .../github.com/leanovate/gopter/gen/struct.go | 172 +++++ .../github.com/leanovate/gopter/gen/time.go | 37 ++ .../leanovate/gopter/gen/time_shrink.go | 27 + .../leanovate/gopter/gen/weighted.go | 44 ++ .../leanovate/gopter/gen_parameters.go | 81 +++ .../github.com/leanovate/gopter/gen_result.go | 55 ++ .../leanovate/gopter/locked_source.go | 77 +++ vendor/github.com/leanovate/gopter/prop.go | 113 ++++ .../gopter/prop/check_condition_func.go | 50 ++ .../leanovate/gopter/prop/convert_result.go | 37 ++ .../github.com/leanovate/gopter/prop/doc.go | 4 + .../github.com/leanovate/gopter/prop/error.go | 14 + .../leanovate/gopter/prop/forall.go | 130 ++++ .../leanovate/gopter/prop/forall_no_shrink.go | 63 ++ .../github.com/leanovate/gopter/prop_arg.go | 36 ++ .../leanovate/gopter/prop_result.go | 109 ++++ .../github.com/leanovate/gopter/properties.go | 59 ++ .../github.com/leanovate/gopter/reporter.go | 7 + vendor/github.com/leanovate/gopter/runner.go | 77 +++ vendor/github.com/leanovate/gopter/shrink.go | 185 ++++++ .../leanovate/gopter/test_parameters.go | 48 ++ .../leanovate/gopter/test_result.go | 52 ++ vendor/modules.txt | 5 + 62 files changed, 4542 insertions(+) create mode 100644 vendor/github.com/leanovate/gopter/.gitignore create mode 100644 vendor/github.com/leanovate/gopter/.travis.yml create mode 100644 vendor/github.com/leanovate/gopter/CHANGELOG.md create mode 100644 vendor/github.com/leanovate/gopter/LICENSE create mode 100644 vendor/github.com/leanovate/gopter/Makefile create mode 100644 vendor/github.com/leanovate/gopter/README.md create mode 100644 vendor/github.com/leanovate/gopter/bi_mapper.go create mode 100644 vendor/github.com/leanovate/gopter/derived_gen.go create mode 100644 vendor/github.com/leanovate/gopter/doc.go create mode 100644 vendor/github.com/leanovate/gopter/flag.go create mode 100644 vendor/github.com/leanovate/gopter/formated_reporter.go create mode 100644 vendor/github.com/leanovate/gopter/gen.go create mode 100644 vendor/github.com/leanovate/gopter/gen/array_of.go create mode 100644 vendor/github.com/leanovate/gopter/gen/array_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/bool.go create mode 100644 vendor/github.com/leanovate/gopter/gen/complex.go create mode 100644 vendor/github.com/leanovate/gopter/gen/complex_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/const.go create mode 100644 vendor/github.com/leanovate/gopter/gen/doc.go create mode 100644 vendor/github.com/leanovate/gopter/gen/fail.go create mode 100644 vendor/github.com/leanovate/gopter/gen/floats.go create mode 100644 vendor/github.com/leanovate/gopter/gen/floats_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/frequency.go create mode 100644 vendor/github.com/leanovate/gopter/gen/integers.go create mode 100644 vendor/github.com/leanovate/gopter/gen/integers_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/map_of.go create mode 100644 vendor/github.com/leanovate/gopter/gen/map_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/one_of.go create mode 100644 vendor/github.com/leanovate/gopter/gen/ptr_of.go create mode 100644 vendor/github.com/leanovate/gopter/gen/ptr_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/regex.go create mode 100644 vendor/github.com/leanovate/gopter/gen/retry_until.go create mode 100644 vendor/github.com/leanovate/gopter/gen/sized.go create mode 100644 vendor/github.com/leanovate/gopter/gen/slice_of.go create mode 100644 vendor/github.com/leanovate/gopter/gen/slice_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/string_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/strings.go create mode 100644 vendor/github.com/leanovate/gopter/gen/struct.go create mode 100644 vendor/github.com/leanovate/gopter/gen/time.go create mode 100644 vendor/github.com/leanovate/gopter/gen/time_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/gen/weighted.go create mode 100644 vendor/github.com/leanovate/gopter/gen_parameters.go create mode 100644 vendor/github.com/leanovate/gopter/gen_result.go create mode 100644 vendor/github.com/leanovate/gopter/locked_source.go create mode 100644 vendor/github.com/leanovate/gopter/prop.go create mode 100644 vendor/github.com/leanovate/gopter/prop/check_condition_func.go create mode 100644 vendor/github.com/leanovate/gopter/prop/convert_result.go create mode 100644 vendor/github.com/leanovate/gopter/prop/doc.go create mode 100644 vendor/github.com/leanovate/gopter/prop/error.go create mode 100644 vendor/github.com/leanovate/gopter/prop/forall.go create mode 100644 vendor/github.com/leanovate/gopter/prop/forall_no_shrink.go create mode 100644 vendor/github.com/leanovate/gopter/prop_arg.go create mode 100644 vendor/github.com/leanovate/gopter/prop_result.go create mode 100644 vendor/github.com/leanovate/gopter/properties.go create mode 100644 vendor/github.com/leanovate/gopter/reporter.go create mode 100644 vendor/github.com/leanovate/gopter/runner.go create mode 100644 vendor/github.com/leanovate/gopter/shrink.go create mode 100644 vendor/github.com/leanovate/gopter/test_parameters.go create mode 100644 vendor/github.com/leanovate/gopter/test_result.go diff --git a/go.mod b/go.mod index be07803..2d067c2 100644 --- a/go.mod +++ b/go.mod @@ -9,3 +9,5 @@ require ( ) require github.com/google/uuid v1.6.0 + +require github.com/leanovate/gopter v0.2.11 // indirect diff --git a/go.sum b/go.sum index b190e0a..64c8385 100644 --- a/go.sum +++ b/go.sum @@ -1,8 +1,611 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/leanovate/gopter v0.2.11 h1:vRjThO1EKPb/1NsDXuDrzldR28RLkBflWYcU9CvzWu4= +github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f7Xvpt0S9c= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= +github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ= golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/vendor/github.com/leanovate/gopter/.gitignore b/vendor/github.com/leanovate/gopter/.gitignore new file mode 100644 index 0000000..45371a9 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/.gitignore @@ -0,0 +1,30 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +bin/ +*.iml +.idea/ +coverage.txt +.pkg.coverage diff --git a/vendor/github.com/leanovate/gopter/.travis.yml b/vendor/github.com/leanovate/gopter/.travis.yml new file mode 100644 index 0000000..954d526 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/.travis.yml @@ -0,0 +1,12 @@ +sudo: false +language: go +go: +- 1.x +script: make all coverage refreshGodoc +before_install: + - pip install --user codecov +after_success: + - codecov +notifications: + slack: + secure: M0PgOUB0Kzn0maWtd6NNtiKYINxMY/7zgbbDpb8mAa6NTPYuypEYkUgmo6HC74BzDWSjkJaLQOeZrumrOuJUKbGdT+eEYR1pXColp2qb/WxnSCAwlL9iM/k7pj6nIRUdlP7l6WX0QB/DNh+BC/9STHrcSKjBpUu38oO9CwT7klSj2hfPMjzcx7EO4f8pjSfwCrIyYbANKxLzP0lr4PcbdY/ZeGbc8R5/m9torzPjS2YXDl0tQQ7pvSS8UVToLfL0m+omp9A/lOu0n6FpdNIkof2Eu9qWJqsI7jy+Pi+8DGbfEyxSLKAhDiTn0nfO/5nwqWIBhUaVACBDxpaH6ewpiuMbs4RO+wNaEEuVEH8QMKZOx9PGgnzNJ3zZ5Hfm+FP8zBrwrKlsjUoy31waGFjgua2ne4X0wa+Ld4iFEsj+XoMKa1oxRKRXYFhyEywalwgBVjXH2+ZCMlFGV3QxaV5gVuYcfEuNQ4pOlJpk+WSgm7yfXEX2qosOk2p91yGyX2Msbe3B7Ov3PXVzs2CshIsYasHr46pLplMvG6Z+712TPsrFS0zhb8FAsm/Vd7xX2xxmNS/uffh3RgFzeZxg8S9/ObVq+JBkZAtK4j0SwLVsOkjI4W3yUVgfxvhnAM1iLzzeSyD64BSo1VyUZu1eSJ9YxJ1+K6ldo0u0hj2VHwO1vUE= diff --git a/vendor/github.com/leanovate/gopter/CHANGELOG.md b/vendor/github.com/leanovate/gopter/CHANGELOG.md new file mode 100644 index 0000000..40e7cc3 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/CHANGELOG.md @@ -0,0 +1,65 @@ +# Change log + +## [Unreleased] +### Additions +- `gopter.GenParameters` now has a `CloneWithSeed(seed int64)` function to + temparary copies to create rerunable sections of code. +- Added `gopter.Gen.MapResult` for power-user mappings +- Added `gopter.DeriveGen` to derive a generator and it's shrinker from a + bi-directional mapping (`gopter.BiMapper`) + +### Changed +- Refactored `commands` package under the hood to allow the use of mutable state. + Re-runability of commands is provided by invoking the `commands.GenInitialState` + generator with the same `gopter.GenParameters`. Of course `commands.GenInitialState` + is supposed to create the same state for the same parameters every time. +- Fixed a bug in `commands` that might lead to shrinked command sequences not + satisfying the precondtions. +- `commands.Command.PostCondition` was called with the state before running the command. It makes + much more sense to first do `commands.Command.NextState` and then `commands.Command.PostCondition` +- `commands.Commands.NewSystemUnderTest` now takes has an argument `initialState commands.State` to + allow implementators to create/bootstrap a system under test based on an arbitrary initial state. + So far examples were just using a constant initial state ... which is a bit boring. +- Fixed: Actually use `commands.Commands.InitialPreCondition` as sieve for + `commands.Commands.GenInitialState` +- Gen.Map and Shrink.Map now accept `interface{}` instead of `func (interface{}) interface{}` + + This allows cleaner mapping functions without type conversion. E.g. instead of + + ```Go + gen.AnyString().Map(function (v interface{}) interface{} { + return strings.ToUpper(v.(string)) + }) + ``` + you can (and should) now write + + ```Go + gen.AnyString().Map(function (v string) string { + return strings.ToUpper(v) + }) + ``` +- Correspondingly Gen.SuchThat now also ccept `interface{}` instead of `func (interface{}) bool` + + This allows cleaner sieve functions without type conversion. E.g. instead of + + ```Go + gen.AnyString().SuchThat(function (v interface{}) bool { + return HasPrefix(v.(string), "P") + }) + ``` + you can (and should) now write + + ```Go + gen.AnyString().SuchThat(function (v string) bool { + return HasPrefix(v, "P") + }) + ``` +- Gen.FlatMap now has a second parameter `resultType reflect.Type` defining the result type of the mapped generator +- Reason for these changes: The original `Map` and `FlatMap` had a recurring issue with empty results. If the original generator created an empty result there was no clean way to determine the result type of the mapped generator. The new version fixes this by extracting the return type of the mapping functions. + +## [0.1] - 2016-04-30 +### Added +- Initial implementation. + +[Unreleased]: https://github.com/leanovate/gopter/compare/v0.1...HEAD +[0.1]: https://github.com/leanovate/gopter/tree/v0.1 diff --git a/vendor/github.com/leanovate/gopter/LICENSE b/vendor/github.com/leanovate/gopter/LICENSE new file mode 100644 index 0000000..072084c --- /dev/null +++ b/vendor/github.com/leanovate/gopter/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 leanovate + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/leanovate/gopter/Makefile b/vendor/github.com/leanovate/gopter/Makefile new file mode 100644 index 0000000..2846b24 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/Makefile @@ -0,0 +1,41 @@ +PACKAGES=$(shell go list ./...) + +all: format + @go get github.com/smartystreets/goconvey + @go build -v ./... + +format: + @echo "--> Running go fmt" + @gofmt -s -w . + +test: + @echo "--> Running tests" + @go test -v ./... -count=1 + @$(MAKE) vet + +coverage: + @echo "--> Running tests with coverage" + @echo "" > coverage.txt + for pkg in $(shell go list ./...); do \ + (go test -coverprofile=.pkg.coverage -covermode=atomic -v $$pkg && \ + cat .pkg.coverage >> coverage.txt) || exit 1; \ + done + @rm .pkg.coverage + @$(MAKE) vet + +vet: + @go vet 2>/dev/null ; if [ $$? -eq 3 ]; then \ + go get golang.org/x/tools/cmd/vet; \ + fi + @echo "--> Running go vet $(VETARGS)" + @find . -name "*.go" | grep -v "./Godeps/" | xargs go vet $(VETARGS); if [ $$? -eq 1 ]; then \ + echo ""; \ + echo "Vet found suspicious constructs. Please check the reported constructs"; \ + echo "and fix them if necessary before submitting the code for reviewal."; \ + fi + +refreshGodoc: + @echo "--> Refreshing godoc.org" + for pkg in $(shell go list ./...); do \ + curl -d "path=$$pkg" https://godoc.org/-/refresh ; \ + done diff --git a/vendor/github.com/leanovate/gopter/README.md b/vendor/github.com/leanovate/gopter/README.md new file mode 100644 index 0000000..103f536 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/README.md @@ -0,0 +1,45 @@ +# GOPTER + +... the GOlang Property TestER +[![Build Status](https://travis-ci.org/leanovate/gopter.svg?branch=master)](https://travis-ci.org/leanovate/gopter) +[![codecov](https://codecov.io/gh/leanovate/gopter/branch/master/graph/badge.svg)](https://codecov.io/gh/leanovate/gopter) +[![GoDoc](https://godoc.org/github.com/leanovate/gopter?status.png)](https://godoc.org/github.com/leanovate/gopter) +[![Go Report Card](https://goreportcard.com/badge/github.com/leanovate/gopter)](https://goreportcard.com/report/github.com/leanovate/gopter) + +[Change Log](CHANGELOG.md) + +## Synopsis + +Gopter tries to bring the goodness of [ScalaCheck](https://www.scalacheck.org/) (and implicitly, the goodness of [QuickCheck](http://hackage.haskell.org/package/QuickCheck)) to Go. +It can also be seen as a more sophisticated version of the testing/quick package. + +Main differences to ScalaCheck: + +* It is Go ... duh +* ... nevertheless: Do not expect the same typesafety and elegance as in ScalaCheck. +* For simplicity [Shrink](https://javadoc.io/doc/org.scalacheck/scalacheck_2.11/1.14.1/index.html#org.scalacheck.Shrink) has become part of the generators. They can still be easily changed if necessary. +* There is no [Pretty](https://javadoc.io/doc/org.scalacheck/scalacheck_2.11/1.14.1/index.html#org.scalacheck.util.Pretty) ... so far gopter feels quite comfortable being ugly. +* A generator for regex matches +* No parallel commands ... yet? + +Main differences to the testing/quick package: + +* Much tighter control over generators +* Shrinkers, i.e. automatically find the minimum value falsifying a property +* A generator for regex matches (already mentioned that ... but it's cool) +* Support for stateful tests + +## Documentation + +Current godocs: + +* [gopter](https://godoc.org/github.com/leanovate/gopter): Main interfaces +* [gopter/gen](https://godoc.org/github.com/leanovate/gopter/gen): All commonly used generators +* [gopter/prop](https://godoc.org/github.com/leanovate/gopter/prop): Common helpers to create properties from a condition function and specific generators +* [gopter/arbitrary](https://godoc.org/github.com/leanovate/gopter/arbitrary): Helpers automatically combine generators for arbitrary types +* [gopter/commands](https://godoc.org/github.com/leanovate/gopter/commands): Helpers to create stateful tests based on arbitrary commands +* [gopter/convey](https://godoc.org/github.com/leanovate/gopter/convey): Helpers used by gopter inside goconvey tests + +## License + +[MIT Licence](http://opensource.org/licenses/MIT) diff --git a/vendor/github.com/leanovate/gopter/bi_mapper.go b/vendor/github.com/leanovate/gopter/bi_mapper.go new file mode 100644 index 0000000..9df1129 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/bi_mapper.go @@ -0,0 +1,111 @@ +package gopter + +import ( + "fmt" + "reflect" +) + +// BiMapper is a bi-directional (or bijective) mapper of a tuple of values (up) +// to another tuple of values (down). +type BiMapper struct { + UpTypes []reflect.Type + DownTypes []reflect.Type + Downstream reflect.Value + Upstream reflect.Value +} + +// NewBiMapper creates a BiMapper of two functions `downstream` and its +// inverse `upstream`. +// That is: The return values of `downstream` must match the parameters of +// `upstream` and vice versa. +func NewBiMapper(downstream interface{}, upstream interface{}) *BiMapper { + downstreamVal := reflect.ValueOf(downstream) + if downstreamVal.Kind() != reflect.Func { + panic("downstream has to be a function") + } + upstreamVal := reflect.ValueOf(upstream) + if upstreamVal.Kind() != reflect.Func { + panic("upstream has to be a function") + } + + downstreamType := downstreamVal.Type() + upTypes := make([]reflect.Type, downstreamType.NumIn()) + for i := 0; i < len(upTypes); i++ { + upTypes[i] = downstreamType.In(i) + } + downTypes := make([]reflect.Type, downstreamType.NumOut()) + for i := 0; i < len(downTypes); i++ { + downTypes[i] = downstreamType.Out(i) + } + + upstreamType := upstreamVal.Type() + if len(upTypes) != upstreamType.NumOut() { + panic(fmt.Sprintf("upstream is expected to have %d return values", len(upTypes))) + } + for i, upType := range upTypes { + if upstreamType.Out(i) != upType { + panic(fmt.Sprintf("upstream has wrong return type %d: %v != %v", i, upstreamType.Out(i), upType)) + } + } + if len(downTypes) != upstreamType.NumIn() { + panic(fmt.Sprintf("upstream is expected to have %d parameters", len(downTypes))) + } + for i, downType := range downTypes { + if upstreamType.In(i) != downType { + panic(fmt.Sprintf("upstream has wrong parameter type %d: %v != %v", i, upstreamType.In(i), downType)) + } + } + + return &BiMapper{ + UpTypes: upTypes, + DownTypes: downTypes, + Downstream: downstreamVal, + Upstream: upstreamVal, + } +} + +// ConvertUp calls the Upstream function on the arguments in the down array +// and returns the results. +func (b *BiMapper) ConvertUp(down []interface{}) []interface{} { + if len(down) != len(b.DownTypes) { + panic(fmt.Sprintf("Expected %d values != %d", len(b.DownTypes), len(down))) + } + downVals := make([]reflect.Value, len(b.DownTypes)) + for i, val := range down { + if val == nil { + downVals[i] = reflect.Zero(b.DownTypes[i]) + } else { + downVals[i] = reflect.ValueOf(val) + } + } + upVals := b.Upstream.Call(downVals) + up := make([]interface{}, len(upVals)) + for i, upVal := range upVals { + up[i] = upVal.Interface() + } + + return up +} + +// ConvertDown calls the Downstream function on the elements of the up array +// and returns the results. +func (b *BiMapper) ConvertDown(up []interface{}) []interface{} { + if len(up) != len(b.UpTypes) { + panic(fmt.Sprintf("Expected %d values != %d", len(b.UpTypes), len(up))) + } + upVals := make([]reflect.Value, len(b.UpTypes)) + for i, val := range up { + if val == nil { + upVals[i] = reflect.Zero(b.UpTypes[i]) + } else { + upVals[i] = reflect.ValueOf(val) + } + } + downVals := b.Downstream.Call(upVals) + down := make([]interface{}, len(downVals)) + for i, downVal := range downVals { + down[i] = downVal.Interface() + } + + return down +} diff --git a/vendor/github.com/leanovate/gopter/derived_gen.go b/vendor/github.com/leanovate/gopter/derived_gen.go new file mode 100644 index 0000000..74933b4 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/derived_gen.go @@ -0,0 +1,114 @@ +package gopter + +import ( + "fmt" + "reflect" +) + +type derivedGen struct { + biMapper *BiMapper + upGens []Gen + resultType reflect.Type +} + +func (d *derivedGen) Generate(genParams *GenParameters) *GenResult { + labels := []string{} + up := make([]interface{}, len(d.upGens)) + shrinkers := make([]Shrinker, len(d.upGens)) + sieves := make([]func(v interface{}) bool, len(d.upGens)) + + var ok bool + for i, gen := range d.upGens { + result := gen(genParams) + labels = append(labels, result.Labels...) + shrinkers[i] = result.Shrinker + sieves[i] = result.Sieve + up[i], ok = result.Retrieve() + if !ok { + return &GenResult{ + Shrinker: d.Shrinker(result.Shrinker), + Result: nil, + Labels: result.Labels, + ResultType: d.resultType, + Sieve: d.Sieve(sieves...), + } + } + } + down := d.biMapper.ConvertDown(up) + if len(down) == 1 { + return &GenResult{ + Shrinker: d.Shrinker(CombineShrinker(shrinkers...)), + Result: down[0], + Labels: labels, + ResultType: reflect.TypeOf(down[0]), + Sieve: d.Sieve(sieves...), + } + } + return &GenResult{ + Shrinker: d.Shrinker(CombineShrinker(shrinkers...)), + Result: down, + Labels: labels, + ResultType: reflect.TypeOf(down), + Sieve: d.Sieve(sieves...), + } +} + +func (d *derivedGen) Sieve(baseSieve ...func(interface{}) bool) func(interface{}) bool { + return func(down interface{}) bool { + if down == nil { + return false + } + downs, ok := down.([]interface{}) + if !ok { + downs = []interface{}{down} + } + ups := d.biMapper.ConvertUp(downs) + for i, up := range ups { + if baseSieve[i] != nil && !baseSieve[i](up) { + return false + } + } + return true + } +} + +func (d *derivedGen) Shrinker(baseShrinker Shrinker) func(down interface{}) Shrink { + return func(down interface{}) Shrink { + downs, ok := down.([]interface{}) + if !ok { + downs = []interface{}{down} + } + ups := d.biMapper.ConvertUp(downs) + upShrink := baseShrinker(ups) + + return upShrink.Map(func(shrunkUps []interface{}) interface{} { + downs := d.biMapper.ConvertDown(shrunkUps) + if len(downs) == 1 { + return downs[0] + } + return downs + }) + } +} + +// DeriveGen derives a generator with shrinkers from a sequence of other +// generators mapped by a bijective function (BiMapper) +func DeriveGen(downstream interface{}, upstream interface{}, gens ...Gen) Gen { + biMapper := NewBiMapper(downstream, upstream) + + if len(gens) != len(biMapper.UpTypes) { + panic(fmt.Sprintf("Expected %d generators != %d", len(biMapper.UpTypes), len(gens))) + } + + resultType := reflect.TypeOf([]interface{}{}) + if len(biMapper.DownTypes) == 1 { + resultType = biMapper.DownTypes[0] + } + + derived := &derivedGen{ + biMapper: biMapper, + upGens: gens, + resultType: resultType, + } + return derived.Generate +} diff --git a/vendor/github.com/leanovate/gopter/doc.go b/vendor/github.com/leanovate/gopter/doc.go new file mode 100644 index 0000000..5f188bb --- /dev/null +++ b/vendor/github.com/leanovate/gopter/doc.go @@ -0,0 +1,35 @@ +/* +Package gopter contain the main interfaces of the GOlang Property TestER. + +A simple property test might look like this: + + func TestSqrt(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("greater one of all greater one", prop.ForAll( + func(v float64) bool { + return math.Sqrt(v) >= 1 + }, + gen.Float64Range(1, math.MaxFloat64), + )) + + properties.Property("squared is equal to value", prop.ForAll( + func(v float64) bool { + r := math.Sqrt(v) + return math.Abs(r*r-v) < 1e-10*v + }, + gen.Float64Range(0, math.MaxFloat64), + )) + + properties.TestingRun(t) + } + +Generally a property is just a function that takes GenParameters and produces +a PropResult: + + type Prop func(*GenParameters) *PropResult + +but usually you will use prop.ForAll, prop.ForAllNoShrink or arbitrary.ForAll. +There is also the commands package, which can be helpful for stateful testing. +*/ +package gopter diff --git a/vendor/github.com/leanovate/gopter/flag.go b/vendor/github.com/leanovate/gopter/flag.go new file mode 100644 index 0000000..b32b574 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/flag.go @@ -0,0 +1,23 @@ +package gopter + +import "sync/atomic" + +// Flag is a convenient helper for an atomic boolean +type Flag struct { + flag int32 +} + +// Get the value of the flag +func (f *Flag) Get() bool { + return atomic.LoadInt32(&f.flag) > 0 +} + +// Set the the flag +func (f *Flag) Set() { + atomic.StoreInt32(&f.flag, 1) +} + +// Unset the flag +func (f *Flag) Unset() { + atomic.StoreInt32(&f.flag, 0) +} diff --git a/vendor/github.com/leanovate/gopter/formated_reporter.go b/vendor/github.com/leanovate/gopter/formated_reporter.go new file mode 100644 index 0000000..280708f --- /dev/null +++ b/vendor/github.com/leanovate/gopter/formated_reporter.go @@ -0,0 +1,144 @@ +package gopter + +import ( + "fmt" + "io" + "os" + "strings" + "unicode" +) + +const newLine = "\n" + +// FormatedReporter reports test results in a human readable manager. +type FormatedReporter struct { + verbose bool + width int + output io.Writer +} + +// NewFormatedReporter create a new formated reporter +// verbose toggles verbose output of the property results +// width is the maximal width per line +// output is the writer were the report will be written to +func NewFormatedReporter(verbose bool, width int, output io.Writer) Reporter { + return &FormatedReporter{ + verbose: verbose, + width: width, + output: output, + } +} + +// ConsoleReporter creates a FormatedReporter writing to the console (i.e. stdout) +func ConsoleReporter(verbose bool) Reporter { + return NewFormatedReporter(verbose, 75, os.Stdout) +} + +// ReportTestResult reports a single property result +func (r *FormatedReporter) ReportTestResult(propName string, result *TestResult) { + if result.Passed() { + fmt.Fprintln(r.output, r.formatLines(fmt.Sprintf("+ %s: %s", propName, r.reportResult(result)), "", "")) + } else { + fmt.Fprintln(r.output, r.formatLines(fmt.Sprintf("! %s: %s", propName, r.reportResult(result)), "", "")) + } +} + +func (r *FormatedReporter) reportResult(result *TestResult) string { + status := "" + switch result.Status { + case TestProved: + status = "OK, proved property.\n" + r.reportPropArgs(result.Args) + case TestPassed: + status = fmt.Sprintf("OK, passed %d tests.", result.Succeeded) + case TestFailed: + status = fmt.Sprintf("Falsified after %d passed tests.\n%s%s", result.Succeeded, r.reportLabels(result.Labels), r.reportPropArgs(result.Args)) + case TestExhausted: + status = fmt.Sprintf("Gave up after only %d passed tests. %d tests were discarded.", result.Succeeded, result.Discarded) + case TestError: + if r.verbose { + status = fmt.Sprintf("Error on property evaluation after %d passed tests: %s\n%s\n%s", result.Succeeded, result.Error.Error(), result.ErrorStack, r.reportPropArgs(result.Args)) + } else { + status = fmt.Sprintf("Error on property evaluation after %d passed tests: %s\n%s", result.Succeeded, result.Error.Error(), r.reportPropArgs(result.Args)) + } + } + + if r.verbose { + return concatLines(status, fmt.Sprintf("Elapsed time: %s", result.Time.String())) + } + return status +} + +func (r *FormatedReporter) reportLabels(labels []string) string { + if labels != nil && len(labels) > 0 { + return fmt.Sprintf("> Labels of failing property: %s\n", strings.Join(labels, newLine)) + } + return "" +} + +func (r *FormatedReporter) reportPropArgs(p PropArgs) string { + result := "" + for i, arg := range p { + if result != "" { + result += newLine + } + result += r.reportPropArg(i, arg) + } + return result +} + +func (r *FormatedReporter) reportPropArg(idx int, propArg *PropArg) string { + label := propArg.Label + if label == "" { + label = fmt.Sprintf("ARG_%d", idx) + } + result := fmt.Sprintf("%s: %s", label, propArg.ArgFormatted) + if propArg.Shrinks > 0 { + result += fmt.Sprintf("\n%s_ORIGINAL (%d shrinks): %s", label, propArg.Shrinks, propArg.OrigArgFormatted) + } + + return result +} + +func (r *FormatedReporter) formatLines(str, lead, trail string) string { + result := "" + for _, line := range strings.Split(str, "\n") { + if result != "" { + result += newLine + } + result += r.breakLine(lead+line+trail, " ") + } + return result +} + +func (r *FormatedReporter) breakLine(str, lead string) string { + if len(str) <= r.width { + return str + } + + result := "" + for len(str) > r.width { + idx := strings.LastIndexFunc(str[0:r.width], func(ch rune) bool { + return unicode.IsSpace(ch) + }) + if idx <= 0 { + idx = r.width + } + result += str[0:idx] + "\n" + lead + str = str[idx:] + } + result += str + return result +} + +func concatLines(strs ...string) string { + result := "" + for _, str := range strs { + if str != "" { + if result != "" { + result += "\n" + } + result += str + } + } + return result +} diff --git a/vendor/github.com/leanovate/gopter/gen.go b/vendor/github.com/leanovate/gopter/gen.go new file mode 100644 index 0000000..346bc68 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen.go @@ -0,0 +1,262 @@ +package gopter + +import ( + "fmt" + "reflect" +) + +// Gen generator of arbitrary values. +// Usually properties are checked by verifing a condition holds true for +// arbitrary input parameters generated by a Gen. +// +// IMPORTANT: Even though a generator is supposed to generate random values, it +// should do this in a reproducible way. Therefore a generator has to create the +// same result for the same GenParameters, i.e. ensure that you just use the +// RNG provided by GenParameters and no external one. +// If you just plug generators together you do not have to worry about this. +type Gen func(*GenParameters) *GenResult + +var ( + // DefaultGenParams can be used as default für *GenParameters + DefaultGenParams = DefaultGenParameters() + MinGenParams = MinGenParameters() +) + +// Sample generate a sample value. +// Depending on the state of the RNG the generate might fail to provide a sample +func (g Gen) Sample() (interface{}, bool) { + return g(DefaultGenParameters()).Retrieve() +} + +// WithLabel adds a label to a generated value. +// Labels are usually used for reporting for the arguments of a property check. +func (g Gen) WithLabel(label string) Gen { + return func(genParams *GenParameters) *GenResult { + result := g(genParams) + result.Labels = append(result.Labels, label) + return result + } +} + +// SuchThat creates a derived generator by adding a sieve. +// f: has to be a function with one parameter (matching the generated value) returning a bool. +// All generated values are expected to satisfy +// +// f(value) == true. +// +// Use this care, if the sieve to to fine the generator will have many misses which results +// in an undecided property. +func (g Gen) SuchThat(f interface{}) Gen { + checkVal := reflect.ValueOf(f) + checkType := checkVal.Type() + + if checkVal.Kind() != reflect.Func { + panic(fmt.Sprintf("Param of SuchThat has to be a func, but is %v", checkType.Kind())) + } + if checkType.NumIn() != 1 { + panic(fmt.Sprintf("Param of SuchThat has to be a func with one param, but is %v", checkType.NumIn())) + } else { + genResultType := g(MinGenParams).ResultType + if !genResultType.AssignableTo(checkType.In(0)) { + panic(fmt.Sprintf("Param of SuchThat has to be a func with one param assignable to %v, but is %v", genResultType, checkType.In(0))) + } + } + if checkType.NumOut() != 1 { + panic(fmt.Sprintf("Param of SuchThat has to be a func with one return value, but is %v", checkType.NumOut())) + } else if checkType.Out(0).Kind() != reflect.Bool { + panic(fmt.Sprintf("Param of SuchThat has to be a func with one return value of bool, but is %v", checkType.Out(0).Kind())) + } + sieve := func(v interface{}) bool { + valueOf := reflect.ValueOf(v) + if !valueOf.IsValid() { + return false + } + return checkVal.Call([]reflect.Value{valueOf})[0].Bool() + } + + return func(genParams *GenParameters) *GenResult { + result := g(genParams) + prevSieve := result.Sieve + if prevSieve == nil { + result.Sieve = sieve + } else { + result.Sieve = func(value interface{}) bool { + return prevSieve(value) && sieve(value) + } + } + return result + } +} + +// WithShrinker creates a derived generator with a specific shrinker +func (g Gen) WithShrinker(shrinker Shrinker) Gen { + return func(genParams *GenParameters) *GenResult { + result := g(genParams) + if shrinker == nil { + result.Shrinker = NoShrinker + } else { + result.Shrinker = shrinker + } + return result + } +} + +// Map creates a derived generator by mapping all generatored values with a given function. +// f: has to be a function with one parameter (matching the generated value) and a single return. +// Note: The derived generator will not have a sieve or shrinker unless you are mapping to the same type +// Note: The mapping function may have a second parameter "*GenParameters" +// Note: The first parameter of the mapping function and its return may be a *GenResult (this makes MapResult obsolete) +func (g Gen) Map(f interface{}) Gen { + mapperVal := reflect.ValueOf(f) + mapperType := mapperVal.Type() + needsGenParameters := false + genResultInput := false + genResultOutput := false + + if mapperVal.Kind() != reflect.Func { + panic(fmt.Sprintf("Param of Map has to be a func, but is %v", mapperType.Kind())) + } + if mapperType.NumIn() != 1 && mapperType.NumIn() != 2 { + panic(fmt.Sprintf("Param of Map has to be a func with one or two params, but is %v", mapperType.NumIn())) + } else { + if mapperType.NumIn() == 2 { + if !reflect.TypeOf(&GenParameters{}).AssignableTo(mapperType.In(1)) { + panic("Second parameter of mapper function has to be a *GenParameters") + } + needsGenParameters = true + } + genResultType := g(MinGenParams).ResultType + if reflect.TypeOf(&GenResult{}).AssignableTo(mapperType.In(0)) { + genResultInput = true + } else if !genResultType.AssignableTo(mapperType.In(0)) { + panic(fmt.Sprintf("Param of Map has to be a func with one param assignable to %v, but is %v", genResultType, mapperType.In(0))) + } + } + if mapperType.NumOut() != 1 { + panic(fmt.Sprintf("Param of Map has to be a func with one return value, but is %v", mapperType.NumOut())) + } else if reflect.TypeOf(&GenResult{}).AssignableTo(mapperType.Out(0)) { + genResultOutput = true + } + + return func(genParams *GenParameters) *GenResult { + result := g(genParams) + if genResultInput { + var mapped reflect.Value + if needsGenParameters { + mapped = mapperVal.Call([]reflect.Value{reflect.ValueOf(result), reflect.ValueOf(genParams)})[0] + } else { + mapped = mapperVal.Call([]reflect.Value{reflect.ValueOf(result)})[0] + } + if genResultOutput { + return mapped.Interface().(*GenResult) + } + return &GenResult{ + Shrinker: NoShrinker, + Result: mapped.Interface(), + Labels: result.Labels, + ResultType: mapperType.Out(0), + } + } + value, ok := result.RetrieveAsValue() + if ok { + var mapped reflect.Value + shrinker := NoShrinker + if needsGenParameters { + mapped = mapperVal.Call([]reflect.Value{value, reflect.ValueOf(genParams)})[0] + } else { + mapped = mapperVal.Call([]reflect.Value{value})[0] + } + if genResultOutput { + return mapped.Interface().(*GenResult) + } + if mapperType.In(0) == mapperType.Out(0) { + shrinker = result.Shrinker + } + return &GenResult{ + Shrinker: shrinker, + Result: mapped.Interface(), + Labels: result.Labels, + ResultType: mapperType.Out(0), + } + } + return &GenResult{ + Shrinker: NoShrinker, + Result: nil, + Labels: result.Labels, + ResultType: mapperType.Out(0), + } + } +} + +// FlatMap creates a derived generator by passing a generated value to a function which itself +// creates a generator. +func (g Gen) FlatMap(f func(interface{}) Gen, resultType reflect.Type) Gen { + return func(genParams *GenParameters) *GenResult { + result := g(genParams) + value, ok := result.Retrieve() + if ok { + return f(value)(genParams) + } + return &GenResult{ + Shrinker: NoShrinker, + Result: nil, + Labels: result.Labels, + ResultType: resultType, + } + } +} + +// MapResult creates a derived generator by mapping the GenResult directly. +// Contrary to `Map` and `FlatMap` this also allow the conversion of +// shrinkers and sieves, but implementation is more cumbersome. +// Deprecation note: Map now has the same functionality +func (g Gen) MapResult(f func(*GenResult) *GenResult) Gen { + return func(genParams *GenParameters) *GenResult { + return f(g(genParams)) + } +} + +// CombineGens creates a generators from a list of generators. +// The result type will be a []interface{} containing the generated values of each generators in +// the list. +// Note: The combined generator will not have a sieve or shrinker. +func CombineGens(gens ...Gen) Gen { + return func(genParams *GenParameters) *GenResult { + labels := []string{} + values := make([]interface{}, len(gens)) + shrinkers := make([]Shrinker, len(gens)) + sieves := make([]func(v interface{}) bool, len(gens)) + + var ok bool + for i, gen := range gens { + result := gen(genParams) + labels = append(labels, result.Labels...) + shrinkers[i] = result.Shrinker + sieves[i] = result.Sieve + values[i], ok = result.Retrieve() + if !ok { + return &GenResult{ + Shrinker: NoShrinker, + Result: nil, + Labels: result.Labels, + ResultType: reflect.TypeOf(values), + } + } + } + return &GenResult{ + Shrinker: CombineShrinker(shrinkers...), + Result: values, + Labels: labels, + ResultType: reflect.TypeOf(values), + Sieve: func(v interface{}) bool { + values := v.([]interface{}) + for i, value := range values { + if sieves[i] != nil && !sieves[i](value) { + return false + } + } + return true + }, + } + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/array_of.go b/vendor/github.com/leanovate/gopter/gen/array_of.go new file mode 100644 index 0000000..3c4267e --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/array_of.go @@ -0,0 +1,62 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// ArrayOfN generates an array of generated elements with definied length +func ArrayOfN(desiredlen int, elementGen gopter.Gen, typeOverrides ...reflect.Type) gopter.Gen { + var typeOverride reflect.Type + if len(typeOverrides) > 1 { + panic("too many type overrides specified, at most 1 may be provided.") + } else if len(typeOverrides) == 1 { + typeOverride = typeOverrides[0] + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + result, elementSieve, elementShrinker := genArray(elementGen, genParams, desiredlen, typeOverride) + + genResult := gopter.NewGenResult(result.Interface(), ArrayShrinkerOne(elementShrinker)) + if elementSieve != nil { + genResult.Sieve = func(v interface{}) bool { + rv := reflect.ValueOf(v) + return rv.Len() == desiredlen && forAllSieve(elementSieve)(v) + } + } else { + genResult.Sieve = func(v interface{}) bool { + return reflect.ValueOf(v).Len() == desiredlen + } + } + return genResult + } +} + +func genArray(elementGen gopter.Gen, genParams *gopter.GenParameters, desiredlen int, typeOverride reflect.Type) (reflect.Value, func(interface{}) bool, gopter.Shrinker) { + element := elementGen(genParams) + elementSieve := element.Sieve + elementShrinker := element.Shrinker + + sliceType := typeOverride + if sliceType == nil { + sliceType = element.ResultType + } + + arrayType := reflect.ArrayOf(desiredlen, sliceType) + result := reflect.New(arrayType).Elem() + + for i := 0; i < desiredlen; i++ { + value, ok := element.Retrieve() + + if ok { + if value == nil { + result.Index(i).Set(reflect.Zero(sliceType)) + } else { + result.Index(i).Set(reflect.ValueOf(value)) + } + } + element = elementGen(genParams) + } + + return result, elementSieve, elementShrinker +} diff --git a/vendor/github.com/leanovate/gopter/gen/array_shrink.go b/vendor/github.com/leanovate/gopter/gen/array_shrink.go new file mode 100644 index 0000000..b902ef1 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/array_shrink.go @@ -0,0 +1,53 @@ +package gen + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +type arrayShrinkOne struct { + original reflect.Value + index int + elementShrink gopter.Shrink +} + +func (s *arrayShrinkOne) Next() (interface{}, bool) { + value, ok := s.elementShrink() + if !ok { + return nil, false + } + result := reflect.New(s.original.Type()).Elem() + reflect.Copy(result, s.original) + if value == nil { + result.Index(s.index).Set(reflect.Zero(s.original.Type().Elem())) + } else { + result.Index(s.index).Set(reflect.ValueOf(value)) + } + + return result.Interface(), true +} + +// ArrayShrinkerOne creates an array shrinker from a shrinker for the elements of the slice. +// The length of the array will remains unchanged, instead each element is shrunk after the +// other. +func ArrayShrinkerOne(elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Array { + panic(fmt.Sprintf("%#v is not an array", v)) + } + + shrinks := make([]gopter.Shrink, 0, rv.Len()) + for i := 0; i < rv.Len(); i++ { + arrayShrinkOne := &arrayShrinkOne{ + original: rv, + index: i, + elementShrink: elementShrinker(rv.Index(i).Interface()), + } + shrinks = append(shrinks, arrayShrinkOne.Next) + } + return gopter.ConcatShrinks(shrinks...) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/bool.go b/vendor/github.com/leanovate/gopter/gen/bool.go new file mode 100644 index 0000000..019f93b --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/bool.go @@ -0,0 +1,10 @@ +package gen + +import "github.com/leanovate/gopter" + +// Bool generates an arbitrary bool value +func Bool() gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + return gopter.NewGenResult(genParams.NextBool(), gopter.NoShrinker) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/complex.go b/vendor/github.com/leanovate/gopter/gen/complex.go new file mode 100644 index 0000000..2bc57a0 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/complex.go @@ -0,0 +1,49 @@ +package gen + +import "github.com/leanovate/gopter" + +// Complex128Box generate complex128 numbers within a rectangle/box in the complex plane +func Complex128Box(min, max complex128) gopter.Gen { + return gopter.CombineGens( + Float64Range(real(min), real(max)), + Float64Range(imag(min), imag(max)), + ).Map(func(values []interface{}) complex128 { + return complex(values[0].(float64), values[1].(float64)) + }).SuchThat(func(v complex128) bool { + return real(v) >= real(min) && real(v) <= real(max) && + imag(v) >= imag(min) && imag(v) <= imag(max) + }).WithShrinker(Complex128Shrinker) +} + +// Complex128 generate arbitrary complex128 numbers +func Complex128() gopter.Gen { + return gopter.CombineGens( + Float64(), + Float64(), + ).Map(func(values []interface{}) complex128 { + return complex(values[0].(float64), values[1].(float64)) + }).WithShrinker(Complex128Shrinker) +} + +// Complex64Box generate complex64 numbers within a rectangle/box in the complex plane +func Complex64Box(min, max complex64) gopter.Gen { + return gopter.CombineGens( + Float32Range(real(min), real(max)), + Float32Range(imag(min), imag(max)), + ).Map(func(values []interface{}) complex64 { + return complex(values[0].(float32), values[1].(float32)) + }).SuchThat(func(v complex64) bool { + return real(v) >= real(min) && real(v) <= real(max) && + imag(v) >= imag(min) && imag(v) <= imag(max) + }).WithShrinker(Complex64Shrinker) +} + +// Complex64 generate arbitrary complex64 numbers +func Complex64() gopter.Gen { + return gopter.CombineGens( + Float32(), + Float32(), + ).Map(func(values []interface{}) complex64 { + return complex(values[0].(float32), values[1].(float32)) + }).WithShrinker(Complex64Shrinker) +} diff --git a/vendor/github.com/leanovate/gopter/gen/complex_shrink.go b/vendor/github.com/leanovate/gopter/gen/complex_shrink.go new file mode 100644 index 0000000..68ec8bd --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/complex_shrink.go @@ -0,0 +1,27 @@ +package gen + +import "github.com/leanovate/gopter" + +// Complex128Shrinker is a shrinker for complex128 numbers +func Complex128Shrinker(v interface{}) gopter.Shrink { + c := v.(complex128) + realShrink := Float64Shrinker(real(c)).Map(func(r float64) complex128 { + return complex(r, imag(c)) + }) + imagShrink := Float64Shrinker(imag(c)).Map(func(i float64) complex128 { + return complex(real(c), i) + }) + return realShrink.Interleave(imagShrink) +} + +// Complex64Shrinker is a shrinker for complex64 numbers +func Complex64Shrinker(v interface{}) gopter.Shrink { + c := v.(complex64) + realShrink := Float64Shrinker(float64(real(c))).Map(func(r float64) complex64 { + return complex(float32(r), imag(c)) + }) + imagShrink := Float64Shrinker(float64(imag(c))).Map(func(i float64) complex64 { + return complex(real(c), float32(i)) + }) + return realShrink.Interleave(imagShrink) +} diff --git a/vendor/github.com/leanovate/gopter/gen/const.go b/vendor/github.com/leanovate/gopter/gen/const.go new file mode 100644 index 0000000..c213e16 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/const.go @@ -0,0 +1,11 @@ +package gen + +import "github.com/leanovate/gopter" + +// Const creates a generator for a constant value +// Not the most exciting generator, but can be helpful from time to time +func Const(value interface{}) gopter.Gen { + return func(*gopter.GenParameters) *gopter.GenResult { + return gopter.NewGenResult(value, gopter.NoShrinker) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/doc.go b/vendor/github.com/leanovate/gopter/gen/doc.go new file mode 100644 index 0000000..4859239 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/doc.go @@ -0,0 +1,4 @@ +/* +Package gen contains all commonly used generators and shrinkers. +*/ +package gen diff --git a/vendor/github.com/leanovate/gopter/gen/fail.go b/vendor/github.com/leanovate/gopter/gen/fail.go new file mode 100644 index 0000000..9d0efc7 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/fail.go @@ -0,0 +1,15 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// Fail is a generator that always fails to generate a value +// Useful as fallback +func Fail(resultType reflect.Type) gopter.Gen { + return func(*gopter.GenParameters) *gopter.GenResult { + return gopter.NewEmptyResult(resultType) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/floats.go b/vendor/github.com/leanovate/gopter/gen/floats.go new file mode 100644 index 0000000..901cac6 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/floats.go @@ -0,0 +1,69 @@ +package gen + +import ( + "math" + "reflect" + + "github.com/leanovate/gopter" +) + +// Float64Range generates float64 numbers within a given range +func Float64Range(min, max float64) gopter.Gen { + d := max - min + if d < 0 || d > math.MaxFloat64 { + return Fail(reflect.TypeOf(float64(0))) + } + + return func(genParams *gopter.GenParameters) *gopter.GenResult { + genResult := gopter.NewGenResult(min+genParams.Rng.Float64()*d, Float64Shrinker) + genResult.Sieve = func(v interface{}) bool { + return v.(float64) >= min && v.(float64) <= max + } + return genResult + } +} + +// Float64 generates arbitrary float64 numbers that do not contain NaN or Inf +func Float64() gopter.Gen { + return gopter.CombineGens( + Int64Range(0, 1), + Int64Range(0, 0x7fe), + Int64Range(0, 0xfffffffffffff), + ).Map(func(values []interface{}) float64 { + sign := uint64(values[0].(int64)) + exponent := uint64(values[1].(int64)) + mantissa := uint64(values[2].(int64)) + + return math.Float64frombits((sign << 63) | (exponent << 52) | mantissa) + }).WithShrinker(Float64Shrinker) +} + +// Float32Range generates float32 numbers within a given range +func Float32Range(min, max float32) gopter.Gen { + d := max - min + if d < 0 || d > math.MaxFloat32 { + return Fail(reflect.TypeOf(float32(0))) + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + genResult := gopter.NewGenResult(min+genParams.Rng.Float32()*d, Float32Shrinker) + genResult.Sieve = func(v interface{}) bool { + return v.(float32) >= min && v.(float32) <= max + } + return genResult + } +} + +// Float32 generates arbitrary float32 numbers that do not contain NaN or Inf +func Float32() gopter.Gen { + return gopter.CombineGens( + Int32Range(0, 1), + Int32Range(0, 0xfe), + Int32Range(0, 0x7fffff), + ).Map(func(values []interface{}) float32 { + sign := uint32(values[0].(int32)) + exponent := uint32(values[1].(int32)) + mantissa := uint32(values[2].(int32)) + + return math.Float32frombits((sign << 31) | (exponent << 23) | mantissa) + }).WithShrinker(Float32Shrinker) +} diff --git a/vendor/github.com/leanovate/gopter/gen/floats_shrink.go b/vendor/github.com/leanovate/gopter/gen/floats_shrink.go new file mode 100644 index 0000000..00cd540 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/floats_shrink.go @@ -0,0 +1,49 @@ +package gen + +import ( + "math" + + "github.com/leanovate/gopter" +) + +type float64Shrink struct { + original float64 + half float64 +} + +func (s *float64Shrink) isZeroOrVeryClose() bool { + if s.half == 0 { + return true + } + muliple := s.half * 100000 + return math.Abs(muliple) < 1 && muliple != 0 +} + +func (s *float64Shrink) Next() (interface{}, bool) { + if s.isZeroOrVeryClose() { + return nil, false + } + value := s.original - s.half + s.half /= 2 + return value, true +} + +// Float64Shrinker is a shrinker for float64 numbers +func Float64Shrinker(v interface{}) gopter.Shrink { + negShrink := float64Shrink{ + original: -v.(float64), + half: -v.(float64), + } + posShrink := float64Shrink{ + original: v.(float64), + half: v.(float64) / 2, + } + return gopter.Shrink(negShrink.Next).Interleave(gopter.Shrink(posShrink.Next)) +} + +// Float32Shrinker is a shrinker for float32 numbers +func Float32Shrinker(v interface{}) gopter.Shrink { + return Float64Shrinker(float64(v.(float32))).Map(func(e float64) float32 { + return float32(e) + }) +} diff --git a/vendor/github.com/leanovate/gopter/gen/frequency.go b/vendor/github.com/leanovate/gopter/gen/frequency.go new file mode 100644 index 0000000..5f5c707 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/frequency.go @@ -0,0 +1,33 @@ +package gen + +import ( + "sort" + + "github.com/leanovate/gopter" +) + +// Frequency combines multiple weighted generators of the the same result type +// The generators from weightedGens will be used accrding to the weight, i.e. generators +// with a hight weight will be used more often than generators with a low weight. +func Frequency(weightedGens map[int]gopter.Gen) gopter.Gen { + if len(weightedGens) == 0 { + return Fail(nil) + } + weights := make(sort.IntSlice, 0, len(weightedGens)) + max := 0 + for weight := range weightedGens { + if weight > max { + max = weight + } + weights = append(weights, weight) + } + weights.Sort() + return func(genParams *gopter.GenParameters) *gopter.GenResult { + idx := weights.Search(genParams.Rng.Intn(max + 1)) + gen := weightedGens[weights[idx]] + + result := gen(genParams) + result.Sieve = nil + return result + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/integers.go b/vendor/github.com/leanovate/gopter/gen/integers.go new file mode 100644 index 0000000..518530a --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/integers.go @@ -0,0 +1,225 @@ +package gen + +import ( + "math" + "reflect" + + "github.com/leanovate/gopter" +) + +// Int64Range generates int64 numbers within a given range +func Int64Range(min, max int64) gopter.Gen { + if max < min { + return Fail(reflect.TypeOf(int64(0))) + } + if max == math.MaxInt64 && min == math.MinInt64 { // Check for range overflow + return func(genParams *gopter.GenParameters) *gopter.GenResult { + return gopter.NewGenResult(genParams.NextInt64(), Int64Shrinker) + } + } + + rangeSize := uint64(max - min + 1) + return func(genParams *gopter.GenParameters) *gopter.GenResult { + var nextResult = uint64(min) + (genParams.NextUint64() % rangeSize) + genResult := gopter.NewGenResult(int64(nextResult), Int64Shrinker) + genResult.Sieve = func(v interface{}) bool { + return v.(int64) >= min && v.(int64) <= max + } + return genResult + } +} + +// UInt64Range generates uint64 numbers within a given range +func UInt64Range(min, max uint64) gopter.Gen { + if max < min { + return Fail(reflect.TypeOf(uint64(0))) + } + d := max - min + 1 + if d == 0 { // Check overflow (i.e. max = MaxInt64, min = MinInt64) + return func(genParams *gopter.GenParameters) *gopter.GenResult { + return gopter.NewGenResult(genParams.NextUint64(), UInt64Shrinker) + } + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + genResult := gopter.NewGenResult(min+genParams.NextUint64()%d, UInt64Shrinker) + genResult.Sieve = func(v interface{}) bool { + return v.(uint64) >= min && v.(uint64) <= max + } + return genResult + } +} + +// Int64 generates an arbitrary int64 number +func Int64() gopter.Gen { + return Int64Range(math.MinInt64, math.MaxInt64) +} + +// UInt64 generates an arbitrary Uint64 number +func UInt64() gopter.Gen { + return UInt64Range(0, math.MaxUint64) +} + +// Int32Range generates int32 numbers within a given range +func Int32Range(min, max int32) gopter.Gen { + return Int64Range(int64(min), int64(max)). + Map(int64To32). + WithShrinker(Int32Shrinker). + SuchThat(func(v int32) bool { + return v >= min && v <= max + }) +} + +// UInt32Range generates uint32 numbers within a given range +func UInt32Range(min, max uint32) gopter.Gen { + return UInt64Range(uint64(min), uint64(max)). + Map(uint64To32). + WithShrinker(UInt32Shrinker). + SuchThat(func(v uint32) bool { + return v >= min && v <= max + }) +} + +// Int32 generate arbitrary int32 numbers +func Int32() gopter.Gen { + return Int32Range(math.MinInt32, math.MaxInt32) +} + +// UInt32 generate arbitrary int32 numbers +func UInt32() gopter.Gen { + return UInt32Range(0, math.MaxUint32) +} + +// Int16Range generates int16 numbers within a given range +func Int16Range(min, max int16) gopter.Gen { + return Int64Range(int64(min), int64(max)). + Map(int64To16). + WithShrinker(Int16Shrinker). + SuchThat(func(v int16) bool { + return v >= min && v <= max + }) +} + +// UInt16Range generates uint16 numbers within a given range +func UInt16Range(min, max uint16) gopter.Gen { + return UInt64Range(uint64(min), uint64(max)). + Map(uint64To16). + WithShrinker(UInt16Shrinker). + SuchThat(func(v uint16) bool { + return v >= min && v <= max + }) +} + +// Int16 generate arbitrary int16 numbers +func Int16() gopter.Gen { + return Int16Range(math.MinInt16, math.MaxInt16) +} + +// UInt16 generate arbitrary uint16 numbers +func UInt16() gopter.Gen { + return UInt16Range(0, math.MaxUint16) +} + +// Int8Range generates int8 numbers within a given range +func Int8Range(min, max int8) gopter.Gen { + return Int64Range(int64(min), int64(max)). + Map(int64To8). + WithShrinker(Int8Shrinker). + SuchThat(func(v int8) bool { + return v >= min && v <= max + }) +} + +// UInt8Range generates uint8 numbers within a given range +func UInt8Range(min, max uint8) gopter.Gen { + return UInt64Range(uint64(min), uint64(max)). + Map(uint64To8). + WithShrinker(UInt8Shrinker). + SuchThat(func(v uint8) bool { + return v >= min && v <= max + }) +} + +// Int8 generate arbitrary int8 numbers +func Int8() gopter.Gen { + return Int8Range(math.MinInt8, math.MaxInt8) +} + +// UInt8 generate arbitrary uint8 numbers +func UInt8() gopter.Gen { + return UInt8Range(0, math.MaxUint8) +} + +// IntRange generates int numbers within a given range +func IntRange(min, max int) gopter.Gen { + return Int64Range(int64(min), int64(max)). + Map(int64ToInt). + WithShrinker(IntShrinker). + SuchThat(func(v int) bool { + return v >= min && v <= max + }) +} + +// Int generate arbitrary int numbers +func Int() gopter.Gen { + return Int64Range(math.MinInt32, math.MaxInt32). + Map(int64ToInt). + WithShrinker(IntShrinker) +} + +// UIntRange generates uint numbers within a given range +func UIntRange(min, max uint) gopter.Gen { + return UInt64Range(uint64(min), uint64(max)). + Map(uint64ToUint). + WithShrinker(UIntShrinker). + SuchThat(func(v uint) bool { + return v >= min && v <= max + }) +} + +// UInt generate arbitrary uint numbers +func UInt() gopter.Gen { + return UInt64Range(0, math.MaxUint32). + Map(uint64ToUint). + WithShrinker(UIntShrinker) +} + +// Size just extracts the MaxSize field of the GenParameters. +// This can be helpful to generate limited integer value in a more structued +// manner. +func Size() gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + return gopter.NewGenResult(genParams.MaxSize, IntShrinker) + } +} + +func int64To32(value int64) int32 { + return int32(value) +} + +func uint64To32(value uint64) uint32 { + return uint32(value) +} + +func int64To16(value int64) int16 { + return int16(value) +} + +func uint64To16(value uint64) uint16 { + return uint16(value) +} + +func int64To8(value int64) int8 { + return int8(value) +} + +func uint64To8(value uint64) uint8 { + return uint8(value) +} + +func int64ToInt(value int64) int { + return int(value) +} + +func uint64ToUint(value uint64) uint { + return uint(value) +} diff --git a/vendor/github.com/leanovate/gopter/gen/integers_shrink.go b/vendor/github.com/leanovate/gopter/gen/integers_shrink.go new file mode 100644 index 0000000..ede4189 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/integers_shrink.go @@ -0,0 +1,95 @@ +package gen + +import ( + "github.com/leanovate/gopter" +) + +type int64Shrink struct { + original int64 + half int64 +} + +func (s *int64Shrink) Next() (interface{}, bool) { + if s.half == 0 { + return nil, false + } + value := s.original - s.half + s.half /= 2 + return value, true +} + +type uint64Shrink struct { + original uint64 + half uint64 +} + +func (s *uint64Shrink) Next() (interface{}, bool) { + if s.half == 0 { + return nil, false + } + value := s.original - s.half + s.half >>= 1 + return value, true +} + +// Int64Shrinker is a shrinker for int64 numbers +func Int64Shrinker(v interface{}) gopter.Shrink { + negShrink := int64Shrink{ + original: -v.(int64), + half: -v.(int64), + } + posShrink := int64Shrink{ + original: v.(int64), + half: v.(int64) / 2, + } + return gopter.Shrink(negShrink.Next).Interleave(gopter.Shrink(posShrink.Next)) +} + +// UInt64Shrinker is a shrinker for uint64 numbers +func UInt64Shrinker(v interface{}) gopter.Shrink { + shrink := uint64Shrink{ + original: v.(uint64), + half: v.(uint64), + } + return shrink.Next +} + +// Int32Shrinker is a shrinker for int32 numbers +func Int32Shrinker(v interface{}) gopter.Shrink { + return Int64Shrinker(int64(v.(int32))).Map(int64To32) +} + +// UInt32Shrinker is a shrinker for uint32 numbers +func UInt32Shrinker(v interface{}) gopter.Shrink { + return UInt64Shrinker(uint64(v.(uint32))).Map(uint64To32) +} + +// Int16Shrinker is a shrinker for int16 numbers +func Int16Shrinker(v interface{}) gopter.Shrink { + return Int64Shrinker(int64(v.(int16))).Map(int64To16) +} + +// UInt16Shrinker is a shrinker for uint16 numbers +func UInt16Shrinker(v interface{}) gopter.Shrink { + return UInt64Shrinker(uint64(v.(uint16))).Map(uint64To16) +} + +// Int8Shrinker is a shrinker for int8 numbers +func Int8Shrinker(v interface{}) gopter.Shrink { + return Int64Shrinker(int64(v.(int8))).Map(int64To8) +} + +// UInt8Shrinker is a shrinker for uint8 numbers +func UInt8Shrinker(v interface{}) gopter.Shrink { + return UInt64Shrinker(uint64(v.(uint8))).Map(uint64To8) +} + +// IntShrinker is a shrinker for int numbers +func IntShrinker(v interface{}) gopter.Shrink { + return Int64Shrinker(int64(v.(int))).Map(int64ToInt) +} + +// UIntShrinker is a shrinker for uint numbers +func UIntShrinker(v interface{}) gopter.Shrink { + return UInt64Shrinker(uint64(v.(uint))).Map(uint64ToUint) +} diff --git a/vendor/github.com/leanovate/gopter/gen/map_of.go b/vendor/github.com/leanovate/gopter/gen/map_of.go new file mode 100644 index 0000000..17a3e21 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/map_of.go @@ -0,0 +1,87 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// MapOf generates an arbitrary map of generated kay values. +// genParams.MaxSize sets an (exclusive) upper limit on the size of the map +// genParams.MinSize sets an (inclusive) lower limit on the size of the map +func MapOf(keyGen, elementGen gopter.Gen) gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + len := 0 + if genParams.MaxSize > 0 || genParams.MinSize > 0 { + if genParams.MinSize > genParams.MaxSize { + panic("GenParameters.MinSize must be <= GenParameters.MaxSize") + } + + if genParams.MaxSize == genParams.MinSize { + len = genParams.MaxSize + } else { + len = genParams.Rng.Intn(genParams.MaxSize-genParams.MinSize) + genParams.MinSize + } + } + + result, keySieve, keyShrinker, elementSieve, elementShrinker := genMap(keyGen, elementGen, genParams, len) + + genResult := gopter.NewGenResult(result.Interface(), MapShrinker(keyShrinker, elementShrinker)) + if keySieve != nil || elementSieve != nil { + genResult.Sieve = forAllKeyValueSieve(keySieve, elementSieve) + } + return genResult + } +} + +func genMap(keyGen, elementGen gopter.Gen, genParams *gopter.GenParameters, len int) (reflect.Value, func(interface{}) bool, gopter.Shrinker, func(interface{}) bool, gopter.Shrinker) { + element := elementGen(genParams) + elementSieve := element.Sieve + elementShrinker := element.Shrinker + + key := keyGen(genParams) + keySieve := key.Sieve + keyShrinker := key.Shrinker + + result := reflect.MakeMapWithSize(reflect.MapOf(key.ResultType, element.ResultType), len) + + for i := 0; i < len; i++ { + keyValue, keyOk := key.Retrieve() + elementValue, elementOk := element.Retrieve() + + if keyOk && elementOk { + if key == nil { + if elementValue == nil { + result.SetMapIndex(reflect.Zero(key.ResultType), reflect.Zero(element.ResultType)) + } else { + result.SetMapIndex(reflect.Zero(key.ResultType), reflect.ValueOf(elementValue)) + } + } else { + if elementValue == nil { + result.SetMapIndex(reflect.ValueOf(keyValue), reflect.Zero(element.ResultType)) + } else { + result.SetMapIndex(reflect.ValueOf(keyValue), reflect.ValueOf(elementValue)) + } + } + } + key = keyGen(genParams) + element = elementGen(genParams) + } + + return result, keySieve, keyShrinker, elementSieve, elementShrinker +} + +func forAllKeyValueSieve(keySieve, elementSieve func(interface{}) bool) func(interface{}) bool { + return func(v interface{}) bool { + rv := reflect.ValueOf(v) + for _, key := range rv.MapKeys() { + if keySieve != nil && !keySieve(key.Interface()) { + return false + } + if elementSieve != nil && !elementSieve(rv.MapIndex(key).Interface()) { + return false + } + } + return true + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/map_shrink.go b/vendor/github.com/leanovate/gopter/gen/map_shrink.go new file mode 100644 index 0000000..b0787fc --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/map_shrink.go @@ -0,0 +1,149 @@ +package gen + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +type mapShrinkOne struct { + original reflect.Value + key reflect.Value + keyShrink gopter.Shrink + elementShrink gopter.Shrink + state bool + keyExhausted bool + lastKey interface{} + elementExhausted bool + lastElement interface{} +} + +func (s *mapShrinkOne) nextKeyValue() (interface{}, interface{}, bool) { + for !s.keyExhausted && !s.elementExhausted { + s.state = !s.state + if s.state && !s.keyExhausted { + value, ok := s.keyShrink() + if ok { + s.lastKey = value + return s.lastKey, s.lastElement, true + } + s.keyExhausted = true + } else if !s.state && !s.elementExhausted { + value, ok := s.elementShrink() + if ok { + s.lastElement = value + return s.lastKey, s.lastElement, true + } + s.elementExhausted = true + } + } + return nil, nil, false +} + +func (s *mapShrinkOne) Next() (interface{}, bool) { + nextKey, nextValue, ok := s.nextKeyValue() + if !ok { + return nil, false + } + result := reflect.MakeMapWithSize(s.original.Type(), s.original.Len()) + for _, key := range s.original.MapKeys() { + if !reflect.DeepEqual(key.Interface(), s.key.Interface()) { + result.SetMapIndex(key, s.original.MapIndex(key)) + } + } + result.SetMapIndex(reflect.ValueOf(nextKey), reflect.ValueOf(nextValue)) + + return result.Interface(), true +} + +// MapShrinkerOne creates a map shrinker from a shrinker for the key values of a map. +// The length of the map will remain (mostly) unchanged, instead each key value pair is +// shrunk after the other. +func MapShrinkerOne(keyShrinker, elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Map { + panic(fmt.Sprintf("%#v is not a map", v)) + } + + keys := rv.MapKeys() + shrinks := make([]gopter.Shrink, 0, len(keys)) + for _, key := range keys { + mapShrinkOne := &mapShrinkOne{ + original: rv, + key: key, + keyShrink: keyShrinker(key.Interface()), + lastKey: key.Interface(), + elementShrink: elementShrinker(rv.MapIndex(key).Interface()), + lastElement: rv.MapIndex(key).Interface(), + } + shrinks = append(shrinks, mapShrinkOne.Next) + } + return gopter.ConcatShrinks(shrinks...) + } +} + +type mapShrink struct { + original reflect.Value + originalKeys []reflect.Value + length int + offset int + chunkLength int +} + +func (s *mapShrink) Next() (interface{}, bool) { + if s.chunkLength == 0 { + return nil, false + } + keys := make([]reflect.Value, 0, s.length-s.chunkLength) + keys = append(keys, s.originalKeys[0:s.offset]...) + s.offset += s.chunkLength + if s.offset < s.length { + keys = append(keys, s.originalKeys[s.offset:s.length]...) + } else { + s.offset = 0 + s.chunkLength >>= 1 + } + + result := reflect.MakeMapWithSize(s.original.Type(), len(keys)) + for _, key := range keys { + result.SetMapIndex(key, s.original.MapIndex(key)) + } + + return result.Interface(), true +} + +// MapShrinker creates a map shrinker from shrinker for the key values. +// The length of the map will be shrunk as well +func MapShrinker(keyShrinker, elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Map { + panic(fmt.Sprintf("%#v is not a Map", v)) + } + keys := rv.MapKeys() + mapShrink := &mapShrink{ + original: rv, + originalKeys: keys, + offset: 0, + length: rv.Len(), + chunkLength: rv.Len() >> 1, + } + + shrinks := make([]gopter.Shrink, 0, rv.Len()+1) + shrinks = append(shrinks, mapShrink.Next) + for _, key := range keys { + mapShrinkOne := &mapShrinkOne{ + original: rv, + key: key, + keyShrink: keyShrinker(key.Interface()), + lastKey: key.Interface(), + elementShrink: elementShrinker(rv.MapIndex(key).Interface()), + lastElement: rv.MapIndex(key).Interface(), + } + shrinks = append(shrinks, mapShrinkOne.Next) + } + return gopter.ConcatShrinks(shrinks...) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/one_of.go b/vendor/github.com/leanovate/gopter/gen/one_of.go new file mode 100644 index 0000000..9cb5370 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/one_of.go @@ -0,0 +1,29 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// OneConstOf generate one of a list of constant values +func OneConstOf(consts ...interface{}) gopter.Gen { + if len(consts) == 0 { + return Fail(reflect.TypeOf(nil)) + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + idx := genParams.Rng.Intn(len(consts)) + return gopter.NewGenResult(consts[idx], gopter.NoShrinker) + } +} + +// OneGenOf generate one value from a a list of generators +func OneGenOf(gens ...gopter.Gen) gopter.Gen { + if len(gens) == 0 { + return Fail(reflect.TypeOf(nil)) + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + idx := genParams.Rng.Intn(len(gens)) + return gens[idx](genParams) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/ptr_of.go b/vendor/github.com/leanovate/gopter/gen/ptr_of.go new file mode 100644 index 0000000..24d5c3d --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/ptr_of.go @@ -0,0 +1,41 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// PtrOf generates either a pointer to a generated element or a nil pointer +func PtrOf(elementGen gopter.Gen) gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + element := elementGen(genParams) + elementShrinker := element.Shrinker + elementSieve := element.Sieve + value, ok := element.Retrieve() + if !ok || genParams.NextBool() { + result := gopter.NewEmptyResult(reflect.PtrTo(element.ResultType)) + result.Sieve = func(v interface{}) bool { + if elementSieve == nil { + return true + } + r := reflect.ValueOf(v) + return !r.IsValid() || r.IsNil() || elementSieve(r.Elem().Interface()) + } + return result + } + // To get the right pointer type we have to create a slice with one element + slice := reflect.MakeSlice(reflect.SliceOf(element.ResultType), 0, 1) + slice = reflect.Append(slice, reflect.ValueOf(value)) + + result := gopter.NewGenResult(slice.Index(0).Addr().Interface(), PtrShrinker(elementShrinker)) + result.Sieve = func(v interface{}) bool { + if elementSieve == nil { + return true + } + r := reflect.ValueOf(v) + return !r.IsValid() || r.IsNil() || elementSieve(r.Elem().Interface()) + } + return result + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/ptr_shrink.go b/vendor/github.com/leanovate/gopter/gen/ptr_shrink.go new file mode 100644 index 0000000..f3cdc40 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/ptr_shrink.go @@ -0,0 +1,45 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +type nilShrink struct { + done bool +} + +func (s *nilShrink) Next() (interface{}, bool) { + if !s.done { + s.done = true + return nil, true + } + return nil, false +} + +// PtrShrinker convert a value shrinker to a pointer to value shrinker +func PtrShrinker(elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + if v == nil { + return gopter.NoShrink + } + elem := reflect.ValueOf(v).Elem() + if !elem.IsValid() || !elem.CanInterface() { + return gopter.NoShrink + } + rt := reflect.TypeOf(v) + elementShink := elementShrinker(reflect.ValueOf(v).Elem().Interface()) + + nilShrink := &nilShrink{} + return gopter.ConcatShrinks( + nilShrink.Next, + elementShink.Map(func(elem interface{}) interface{} { + slice := reflect.MakeSlice(reflect.SliceOf(rt.Elem()), 0, 1) + slice = reflect.Append(slice, reflect.ValueOf(elem)) + + return slice.Index(0).Addr().Interface() + }), + ) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/regex.go b/vendor/github.com/leanovate/gopter/gen/regex.go new file mode 100644 index 0000000..ec27512 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/regex.go @@ -0,0 +1,78 @@ +package gen + +import ( + "reflect" + "regexp" + "regexp/syntax" + "strings" + + "github.com/leanovate/gopter" +) + +// RegexMatch generates matches for a given regular expression +// regexStr is supposed to conform to the perl regular expression syntax +func RegexMatch(regexStr string) gopter.Gen { + regexSyntax, err1 := syntax.Parse(regexStr, syntax.Perl) + regex, err2 := regexp.Compile(regexStr) + if err1 != nil || err2 != nil { + return Fail(reflect.TypeOf("")) + } + return regexMatchGen(regexSyntax.Simplify()).SuchThat(func(v string) bool { + return regex.MatchString(v) + }).WithShrinker(StringShrinker) +} + +func regexMatchGen(regex *syntax.Regexp) gopter.Gen { + switch regex.Op { + case syntax.OpLiteral: + return Const(string(regex.Rune)) + case syntax.OpCharClass: + gens := make([]gopter.Gen, 0, len(regex.Rune)/2) + for i := 0; i+1 < len(regex.Rune); i += 2 { + gens = append(gens, RuneRange(regex.Rune[i], regex.Rune[i+1]).Map(runeToString)) + } + return OneGenOf(gens...) + case syntax.OpAnyChar: + return Rune().Map(runeToString) + case syntax.OpAnyCharNotNL: + return RuneNoControl().Map(runeToString) + case syntax.OpCapture: + return regexMatchGen(regex.Sub[0]) + case syntax.OpStar: + elementGen := regexMatchGen(regex.Sub[0]) + return SliceOf(elementGen).Map(func(v []string) string { + return strings.Join(v, "") + }) + case syntax.OpPlus: + elementGen := regexMatchGen(regex.Sub[0]) + return gopter.CombineGens(elementGen, SliceOf(elementGen)).Map(func(vs []interface{}) string { + return vs[0].(string) + strings.Join(vs[1].([]string), "") + }) + case syntax.OpQuest: + elementGen := regexMatchGen(regex.Sub[0]) + return OneGenOf(Const(""), elementGen) + case syntax.OpConcat: + gens := make([]gopter.Gen, len(regex.Sub)) + for i, sub := range regex.Sub { + gens[i] = regexMatchGen(sub) + } + return gopter.CombineGens(gens...).Map(func(v []interface{}) string { + result := "" + for _, str := range v { + result += str.(string) + } + return result + }) + case syntax.OpAlternate: + gens := make([]gopter.Gen, len(regex.Sub)) + for i, sub := range regex.Sub { + gens[i] = regexMatchGen(sub) + } + return OneGenOf(gens...) + } + return Const("") +} + +func runeToString(v rune) string { + return string(v) +} diff --git a/vendor/github.com/leanovate/gopter/gen/retry_until.go b/vendor/github.com/leanovate/gopter/gen/retry_until.go new file mode 100644 index 0000000..de9cf1d --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/retry_until.go @@ -0,0 +1,21 @@ +package gen + +import "github.com/leanovate/gopter" + +// RetryUntil creates a generator that retries a given generator until a condition in met. +// condition: has to be a function with one parameter (matching the generated value of gen) returning a bool. +// Note: The new generator will only create an empty result once maxRetries is reached. +// Depending on the hit-ratio of the condition is may result in long running tests, use with care. +func RetryUntil(gen gopter.Gen, condition interface{}, maxRetries int) gopter.Gen { + genWithSieve := gen.SuchThat(condition) + return func(genParams *gopter.GenParameters) *gopter.GenResult { + for i := 0; i < maxRetries; i++ { + result := genWithSieve(genParams) + if _, ok := result.Retrieve(); ok { + return result + } + } + resultType := gen(genParams).ResultType + return gopter.NewEmptyResult(resultType) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/sized.go b/vendor/github.com/leanovate/gopter/gen/sized.go new file mode 100644 index 0000000..ca46c27 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/sized.go @@ -0,0 +1,20 @@ +package gen + +import ( + "github.com/leanovate/gopter" +) + +// Sized derives a generator from based on size +// This honors the `MinSize` and `MaxSize` of the `GenParameters` of the test suite. +// Keep an eye on memory consumption, by default MaxSize is 100. +func Sized(f func(int) gopter.Gen) gopter.Gen { + return func(params *gopter.GenParameters) *gopter.GenResult { + var size int + if params.MaxSize == params.MinSize { + size = params.MaxSize + } else { + size = params.Rng.Intn(params.MaxSize-params.MinSize) + params.MinSize + } + return f(size)(params) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/slice_of.go b/vendor/github.com/leanovate/gopter/gen/slice_of.go new file mode 100644 index 0000000..fa18ad5 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/slice_of.go @@ -0,0 +1,106 @@ +package gen + +import ( + "reflect" + + "github.com/leanovate/gopter" +) + +// SliceOf generates an arbitrary slice of generated elements +// genParams.MaxSize sets an (exclusive) upper limit on the size of the slice +// genParams.MinSize sets an (inclusive) lower limit on the size of the slice +func SliceOf(elementGen gopter.Gen, typeOverrides ...reflect.Type) gopter.Gen { + var typeOverride reflect.Type + if len(typeOverrides) > 1 { + panic("too many type overrides specified, at most 1 may be provided.") + } else if len(typeOverrides) == 1 { + typeOverride = typeOverrides[0] + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + len := 0 + if genParams.MaxSize > 0 || genParams.MinSize > 0 { + if genParams.MinSize > genParams.MaxSize { + panic("GenParameters.MinSize must be <= GenParameters.MaxSize") + } + + if genParams.MaxSize == genParams.MinSize { + len = genParams.MaxSize + } else { + len = genParams.Rng.Intn(genParams.MaxSize-genParams.MinSize) + genParams.MinSize + } + } + result, elementSieve, elementShrinker := genSlice(elementGen, genParams, len, typeOverride) + + genResult := gopter.NewGenResult(result.Interface(), SliceShrinker(elementShrinker)) + if elementSieve != nil { + genResult.Sieve = forAllSieve(elementSieve) + } + return genResult + } +} + +// SliceOfN generates a slice of generated elements with definied length +func SliceOfN(desiredlen int, elementGen gopter.Gen, typeOverrides ...reflect.Type) gopter.Gen { + var typeOverride reflect.Type + if len(typeOverrides) > 1 { + panic("too many type overrides specified, at most 1 may be provided.") + } else if len(typeOverrides) == 1 { + typeOverride = typeOverrides[0] + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + result, elementSieve, elementShrinker := genSlice(elementGen, genParams, desiredlen, typeOverride) + + genResult := gopter.NewGenResult(result.Interface(), SliceShrinkerOne(elementShrinker)) + if elementSieve != nil { + genResult.Sieve = func(v interface{}) bool { + rv := reflect.ValueOf(v) + return rv.Len() == desiredlen && forAllSieve(elementSieve)(v) + } + } else { + genResult.Sieve = func(v interface{}) bool { + return reflect.ValueOf(v).Len() == desiredlen + } + } + return genResult + } +} + +func genSlice(elementGen gopter.Gen, genParams *gopter.GenParameters, desiredlen int, typeOverride reflect.Type) (reflect.Value, func(interface{}) bool, gopter.Shrinker) { + element := elementGen(genParams) + elementSieve := element.Sieve + elementShrinker := element.Shrinker + + sliceType := typeOverride + if sliceType == nil { + sliceType = element.ResultType + } + + result := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, desiredlen) + + for i := 0; i < desiredlen; i++ { + value, ok := element.Retrieve() + + if ok { + if value == nil { + result = reflect.Append(result, reflect.Zero(sliceType)) + } else { + result = reflect.Append(result, reflect.ValueOf(value)) + } + } + element = elementGen(genParams) + } + + return result, elementSieve, elementShrinker +} + +func forAllSieve(elementSieve func(interface{}) bool) func(interface{}) bool { + return func(v interface{}) bool { + rv := reflect.ValueOf(v) + for i := rv.Len() - 1; i >= 0; i-- { + if !elementSieve(rv.Index(i).Interface()) { + return false + } + } + return true + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/slice_shrink.go b/vendor/github.com/leanovate/gopter/gen/slice_shrink.go new file mode 100644 index 0000000..5a056e4 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/slice_shrink.go @@ -0,0 +1,105 @@ +package gen + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +type sliceShrinkOne struct { + original reflect.Value + index int + elementShrink gopter.Shrink +} + +func (s *sliceShrinkOne) Next() (interface{}, bool) { + value, ok := s.elementShrink() + if !ok { + return nil, false + } + result := reflect.MakeSlice(s.original.Type(), s.original.Len(), s.original.Len()) + reflect.Copy(result, s.original) + if value == nil { + result.Index(s.index).Set(reflect.Zero(s.original.Type().Elem())) + } else { + result.Index(s.index).Set(reflect.ValueOf(value)) + } + + return result.Interface(), true +} + +// SliceShrinkerOne creates a slice shrinker from a shrinker for the elements of the slice. +// The length of the slice will remains unchanged, instead each element is shrunk after the +// other. +func SliceShrinkerOne(elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Slice { + panic(fmt.Sprintf("%#v is not a slice", v)) + } + + shrinks := make([]gopter.Shrink, 0, rv.Len()) + for i := 0; i < rv.Len(); i++ { + sliceShrinkOne := &sliceShrinkOne{ + original: rv, + index: i, + elementShrink: elementShrinker(rv.Index(i).Interface()), + } + shrinks = append(shrinks, sliceShrinkOne.Next) + } + return gopter.ConcatShrinks(shrinks...) + } +} + +type sliceShrink struct { + original reflect.Value + length int + offset int + chunkLength int +} + +func (s *sliceShrink) Next() (interface{}, bool) { + if s.chunkLength == 0 { + return nil, false + } + value := reflect.AppendSlice(reflect.MakeSlice(s.original.Type(), 0, s.length-s.chunkLength), s.original.Slice(0, s.offset)) + s.offset += s.chunkLength + if s.offset < s.length { + value = reflect.AppendSlice(value, s.original.Slice(s.offset, s.length)) + } else { + s.offset = 0 + s.chunkLength >>= 1 + } + + return value.Interface(), true +} + +// SliceShrinker creates a slice shrinker from a shrinker for the elements of the slice. +// The length of the slice will be shrunk as well +func SliceShrinker(elementShrinker gopter.Shrinker) gopter.Shrinker { + return func(v interface{}) gopter.Shrink { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Slice { + panic(fmt.Sprintf("%#v is not a slice", v)) + } + sliceShrink := &sliceShrink{ + original: rv, + offset: 0, + length: rv.Len(), + chunkLength: rv.Len() >> 1, + } + + shrinks := make([]gopter.Shrink, 0, rv.Len()+1) + shrinks = append(shrinks, sliceShrink.Next) + for i := 0; i < rv.Len(); i++ { + sliceShrinkOne := &sliceShrinkOne{ + original: rv, + index: i, + elementShrink: elementShrinker(rv.Index(i).Interface()), + } + shrinks = append(shrinks, sliceShrinkOne.Next) + } + return gopter.ConcatShrinks(shrinks...) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/string_shrink.go b/vendor/github.com/leanovate/gopter/gen/string_shrink.go new file mode 100644 index 0000000..019ee13 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/string_shrink.go @@ -0,0 +1,11 @@ +package gen + +import "github.com/leanovate/gopter" + +var runeSliceShrinker = SliceShrinker(gopter.NoShrinker) + +// StringShrinker is a shrinker for strings. +// It is very similar to a slice shrinker just that the elements themselves will not be shrunk. +func StringShrinker(v interface{}) gopter.Shrink { + return runeSliceShrinker([]rune(v.(string))).Map(runesToString) +} diff --git a/vendor/github.com/leanovate/gopter/gen/strings.go b/vendor/github.com/leanovate/gopter/gen/strings.go new file mode 100644 index 0000000..79a69ef --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/strings.go @@ -0,0 +1,158 @@ +package gen + +import ( + "reflect" + "unicode" + "unicode/utf8" + + "github.com/leanovate/gopter" +) + +// RuneRange generates runes within a given range +func RuneRange(min, max rune) gopter.Gen { + return genRune(Int64Range(int64(min), int64(max))) +} + +// Rune generates an arbitrary character rune +func Rune() gopter.Gen { + return genRune(Frequency(map[int]gopter.Gen{ + 0xD800: Int64Range(0, 0xD800), + utf8.MaxRune - 0xDFFF: Int64Range(0xDFFF, int64(utf8.MaxRune)), + })) +} + +// RuneNoControl generates an arbitrary character rune that is not a control character +func RuneNoControl() gopter.Gen { + return genRune(Frequency(map[int]gopter.Gen{ + 0xD800: Int64Range(32, 0xD800), + utf8.MaxRune - 0xDFFF: Int64Range(0xDFFF, int64(utf8.MaxRune)), + })) +} + +func genRune(int64Gen gopter.Gen) gopter.Gen { + return int64Gen.Map(func(value int64) rune { + return rune(value) + }).SuchThat(func(v rune) bool { + return utf8.ValidRune(v) + }) +} + +// NumChar generates arbitrary numberic character runes +func NumChar() gopter.Gen { + return RuneRange('0', '9') +} + +// AlphaUpperChar generates arbitrary uppercase alpha character runes +func AlphaUpperChar() gopter.Gen { + return RuneRange('A', 'Z') +} + +// AlphaLowerChar generates arbitrary lowercase alpha character runes +func AlphaLowerChar() gopter.Gen { + return RuneRange('a', 'z') +} + +// AlphaChar generates arbitrary character runes (upper- and lowercase) +func AlphaChar() gopter.Gen { + return Frequency(map[int]gopter.Gen{ + 0: AlphaUpperChar(), + 9: AlphaLowerChar(), + }) +} + +// AlphaNumChar generates arbitrary alpha-numeric character runes +func AlphaNumChar() gopter.Gen { + return Frequency(map[int]gopter.Gen{ + 0: NumChar(), + 9: AlphaChar(), + }) +} + +// UnicodeChar generates arbitrary character runes with a given unicode table +func UnicodeChar(table *unicode.RangeTable) gopter.Gen { + if table == nil || len(table.R16)+len(table.R32) == 0 { + return Fail(reflect.TypeOf(rune('a'))) + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + tableIdx := genParams.Rng.Intn(len(table.R16) + len(table.R32)) + + var selectedRune rune + if tableIdx < len(table.R16) { + r := table.R16[tableIdx] + runeOffset := uint16(genParams.Rng.Int63n(int64((r.Hi-r.Lo+1)/r.Stride))) * r.Stride + selectedRune = rune(runeOffset + r.Lo) + } else { + r := table.R32[tableIdx-len(table.R16)] + runeOffset := uint32(genParams.Rng.Int63n(int64((r.Hi-r.Lo+1)/r.Stride))) * r.Stride + selectedRune = rune(runeOffset + r.Lo) + } + genResult := gopter.NewGenResult(selectedRune, gopter.NoShrinker) + genResult.Sieve = func(v interface{}) bool { + return unicode.Is(table, v.(rune)) + } + return genResult + } +} + +// AnyString generates an arbitrary string +func AnyString() gopter.Gen { + return genString(Rune(), utf8.ValidRune) +} + +// AlphaString generates an arbitrary string with letters +func AlphaString() gopter.Gen { + return genString(AlphaChar(), unicode.IsLetter) +} + +// NumString generates an arbitrary string with digits +func NumString() gopter.Gen { + return genString(NumChar(), unicode.IsDigit) +} + +// Identifier generates an arbitrary identifier string +// Identitiers are supporsed to start with a lowercase letter and contain only +// letters and digits +func Identifier() gopter.Gen { + return gopter.CombineGens( + AlphaLowerChar(), + SliceOf(AlphaNumChar()), + ).Map(func(values []interface{}) string { + first := values[0].(rune) + tail := values[1].([]rune) + result := make([]rune, 0, len(tail)+1) + return string(append(append(result, first), tail...)) + }).SuchThat(func(str string) bool { + if len(str) < 1 || !unicode.IsLower(([]rune(str))[0]) { + return false + } + for _, ch := range str { + if !unicode.IsLetter(ch) && !unicode.IsDigit(ch) { + return false + } + } + return true + }).WithShrinker(StringShrinker) +} + +// UnicodeString generates an arbitrary string from a given +// unicode table. +func UnicodeString(table *unicode.RangeTable) gopter.Gen { + return genString(UnicodeChar(table), func(ch rune) bool { + return unicode.Is(table, ch) + }) +} + +func genString(runeGen gopter.Gen, runeSieve func(ch rune) bool) gopter.Gen { + return SliceOf(runeGen).Map(runesToString).SuchThat(func(v string) bool { + for _, ch := range v { + if !runeSieve(ch) { + return false + } + } + return true + }).WithShrinker(StringShrinker) +} + +func runesToString(v []rune) string { + return string(v) +} diff --git a/vendor/github.com/leanovate/gopter/gen/struct.go b/vendor/github.com/leanovate/gopter/gen/struct.go new file mode 100644 index 0000000..7940109 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/struct.go @@ -0,0 +1,172 @@ +package gen + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +// Struct generates a given struct type. +// rt has to be the reflect type of the struct, gens contains a map of field generators. +// Note that the result types of the generators in gen have to match the type of the corresponding +// field in the struct. Also note that only public fields of a struct can be generated +func Struct(rt reflect.Type, gens map[string]gopter.Gen) gopter.Gen { + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + if rt.Kind() != reflect.Struct { + return Fail(rt) + } + fieldGens := []gopter.Gen{} + fieldTypes := []reflect.Type{} + assignable := reflect.New(rt).Elem() + for i := 0; i < rt.NumField(); i++ { + fieldName := rt.Field(i).Name + if !assignable.Field(i).CanSet() { + continue + } + + gen := gens[fieldName] + if gen != nil { + fieldGens = append(fieldGens, gen) + fieldTypes = append(fieldTypes, rt.Field(i).Type) + } + } + + buildStructType := reflect.FuncOf(fieldTypes, []reflect.Type{rt}, false) + unbuildStructType := reflect.FuncOf([]reflect.Type{rt}, fieldTypes, false) + + buildStructFunc := reflect.MakeFunc(buildStructType, func(args []reflect.Value) []reflect.Value { + result := reflect.New(rt) + for i := 0; i < rt.NumField(); i++ { + if _, ok := gens[rt.Field(i).Name]; !ok { + continue + } + if !assignable.Field(i).CanSet() { + continue + } + result.Elem().Field(i).Set(args[0]) + args = args[1:] + } + return []reflect.Value{result.Elem()} + }) + unbuildStructFunc := reflect.MakeFunc(unbuildStructType, func(args []reflect.Value) []reflect.Value { + s := args[0] + results := []reflect.Value{} + for i := 0; i < s.NumField(); i++ { + if _, ok := gens[rt.Field(i).Name]; !ok { + continue + } + if !assignable.Field(i).CanSet() { + continue + } + results = append(results, s.Field(i)) + } + return results + }) + + return gopter.DeriveGen( + buildStructFunc.Interface(), + unbuildStructFunc.Interface(), + fieldGens..., + ) +} + +// StructPtr generates pointers to a given struct type. +// Note that StructPtr does not generate nil, if you want to include nil in your +// testing you should combine gen.PtrOf with gen.Struct. +// rt has to be the reflect type of the struct, gens contains a map of field generators. +// Note that the result types of the generators in gen have to match the type of the corresponding +// field in the struct. Also note that only public fields of a struct can be generated +func StructPtr(rt reflect.Type, gens map[string]gopter.Gen) gopter.Gen { + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + + buildPtrType := reflect.FuncOf([]reflect.Type{rt}, []reflect.Type{reflect.PtrTo(rt)}, false) + unbuildPtrType := reflect.FuncOf([]reflect.Type{reflect.PtrTo(rt)}, []reflect.Type{rt}, false) + + buildPtrFunc := reflect.MakeFunc(buildPtrType, func(args []reflect.Value) []reflect.Value { + sp := reflect.New(rt) + sp.Elem().Set(args[0]) + return []reflect.Value{sp} + }) + unbuildPtrFunc := reflect.MakeFunc(unbuildPtrType, func(args []reflect.Value) []reflect.Value { + return []reflect.Value{args[0].Elem()} + }) + + return gopter.DeriveGen( + buildPtrFunc.Interface(), + unbuildPtrFunc.Interface(), + Struct(rt, gens), + ) +} + +// checkFieldsMatch panics unless the keys in gens exactly match the public +// fields on rt. With an extra bool argument of value "true", it only panics if +// there's a key in gens which is not a field on rt. +func checkFieldsMatch( + rt reflect.Type, + gens map[string]gopter.Gen, + allowFieldsWithNoGenerator ...bool, +) { + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + } + fields := make(map[string]bool, rt.NumField()) + for i := 0; i < rt.NumField(); i++ { + fields[rt.Field(i).Name] = true + } + for field := range gens { + if _, ok := fields[field]; !ok { + panic(fmt.Errorf("generator for non-existent field %s on struct %s", + field, rt.Name())) + } + delete(fields, field) + } + if len(allowFieldsWithNoGenerator) > 0 && allowFieldsWithNoGenerator[0] { + return // Don't check that every field is present in gens + } + if len(allowFieldsWithNoGenerator) > 1 { + panic("expect at most one boolean argument in StrictStruct/StrictStructPtr") + } + if len(fields) != 0 { // Check that every field is present in gens + var missingFields []string + for field := range fields { + missingFields = append(missingFields, field) + } + panic(fmt.Errorf("generator missing for fields %v on struct %s", + missingFields, rt.Name())) + } +} + +// StrictStruct behaves the same as Struct, except it requires the keys in gens +// to exactly match the public fields of rt. It panics if gens contains extra +// keys, or has missing keys. +// +// If given a third true argument, it only requires the keys of gens to be +// fields of rt. In that case, unspecified fields will remain unset. +func StrictStruct( + rt reflect.Type, + gens map[string]gopter.Gen, + allowFieldsWithNoGenerator ...bool, +) gopter.Gen { + checkFieldsMatch(rt, gens, allowFieldsWithNoGenerator...) + return Struct(rt, gens) +} + +// StrictStructPtr behaves the same as StructPtr, except it requires the keys in +// gens to exactly match the public fields of rt. It panics if gens contains +// extra keys, or has missing keys. +// +// If given a third true argument, it only requires the keys of gens to be +// fields of rt. In that case, unspecified fields will remain unset. +func StrictStructPtr( + rt reflect.Type, + gens map[string]gopter.Gen, + allowFieldsWithNoGenerator ...bool, +) gopter.Gen { + checkFieldsMatch(rt, gens, allowFieldsWithNoGenerator...) + return StructPtr(rt, gens) +} diff --git a/vendor/github.com/leanovate/gopter/gen/time.go b/vendor/github.com/leanovate/gopter/gen/time.go new file mode 100644 index 0000000..72e658c --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/time.go @@ -0,0 +1,37 @@ +package gen + +import ( + "time" + + "github.com/leanovate/gopter" +) + +// Time generates an arbitrary time.Time within year [0, 9999] +func Time() gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + sec := genParams.Rng.Int63n(253402214400) // Ensure year in [0, 9999] + usec := genParams.Rng.Int63n(1000000000) + + return gopter.NewGenResult(time.Unix(sec, usec), TimeShrinker) + } +} + +// AnyTime generates an arbitrary time.Time struct (might be way out of bounds of any reason) +func AnyTime() gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + sec := genParams.NextInt64() + usec := genParams.NextInt64() + + return gopter.NewGenResult(time.Unix(sec, usec), TimeShrinker) + } +} + +// TimeRange generates an arbitrary time.Time with a range +// from defines the start of the time range +// duration defines the overall duration of the time range +func TimeRange(from time.Time, duration time.Duration) gopter.Gen { + return func(genParams *gopter.GenParameters) *gopter.GenResult { + v := from.Add(time.Duration(genParams.Rng.Int63n(int64(duration)))) + return gopter.NewGenResult(v, TimeShrinker) + } +} diff --git a/vendor/github.com/leanovate/gopter/gen/time_shrink.go b/vendor/github.com/leanovate/gopter/gen/time_shrink.go new file mode 100644 index 0000000..f7b76e5 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/time_shrink.go @@ -0,0 +1,27 @@ +package gen + +import ( + "time" + + "github.com/leanovate/gopter" +) + +// TimeShrinker is a shrinker for time.Time structs +func TimeShrinker(v interface{}) gopter.Shrink { + t := v.(time.Time) + sec := t.Unix() + nsec := int64(t.Nanosecond()) + secShrink := uint64Shrink{ + original: uint64(sec), + half: uint64(sec), + } + nsecShrink := uint64Shrink{ + original: uint64(nsec), + half: uint64(nsec), + } + return gopter.Shrink(secShrink.Next).Map(func(v uint64) time.Time { + return time.Unix(int64(v), nsec) + }).Interleave(gopter.Shrink(nsecShrink.Next).Map(func(v uint64) time.Time { + return time.Unix(sec, int64(v)) + })) +} diff --git a/vendor/github.com/leanovate/gopter/gen/weighted.go b/vendor/github.com/leanovate/gopter/gen/weighted.go new file mode 100644 index 0000000..af97c50 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen/weighted.go @@ -0,0 +1,44 @@ +package gen + +import ( + "fmt" + "sort" + + "github.com/leanovate/gopter" +) + +// WeightedGen adds a weight number to a generator. +// To be used as parameter to gen.Weighted +type WeightedGen struct { + Weight int + Gen gopter.Gen +} + +// Weighted combines multiple generators, where each generator has a weight. +// The weight of a generator is proportional to the probability that the +// generator gets selected. +func Weighted(weightedGens []WeightedGen) gopter.Gen { + if len(weightedGens) == 0 { + panic("weightedGens must be non-empty") + } + weights := make(sort.IntSlice, 0, len(weightedGens)) + + totalWeight := 0 + for _, weightedGen := range weightedGens { + w := weightedGen.Weight + if w <= 0 { + panic(fmt.Sprintf( + "weightedGens must have positive weights; got %d", + w)) + } + totalWeight += weightedGen.Weight + weights = append(weights, totalWeight) + } + return func(genParams *gopter.GenParameters) *gopter.GenResult { + idx := weights.Search(1 + genParams.Rng.Intn(totalWeight)) + gen := weightedGens[idx].Gen + result := gen(genParams) + result.Sieve = nil + return result + } +} diff --git a/vendor/github.com/leanovate/gopter/gen_parameters.go b/vendor/github.com/leanovate/gopter/gen_parameters.go new file mode 100644 index 0000000..ee00496 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen_parameters.go @@ -0,0 +1,81 @@ +package gopter + +import ( + "math/rand" + "time" +) + +// GenParameters encapsulates the parameters for all generators. +type GenParameters struct { + MinSize int + MaxSize int + MaxShrinkCount int + Rng *rand.Rand +} + +// WithSize modifies the size parameter. The size parameter defines an upper bound for the size of +// generated slices or strings. +func (p *GenParameters) WithSize(size int) *GenParameters { + newParameters := *p + newParameters.MaxSize = size + return &newParameters +} + +// NextBool create a random boolean using the underlying Rng. +func (p *GenParameters) NextBool() bool { + return p.Rng.Int63()&1 == 0 +} + +// NextInt64 create a random int64 using the underlying Rng. +func (p *GenParameters) NextInt64() int64 { + v := p.Rng.Int63() + if p.NextBool() { + return -v + } + return v +} + +// NextUint64 create a random uint64 using the underlying Rng. +func (p *GenParameters) NextUint64() uint64 { + first := uint64(p.Rng.Int63()) + second := uint64(p.Rng.Int63()) + + return (first << 1) ^ second +} + +// CloneWithSeed clone the current parameters with a new seed. +// This is useful to create subsections that can rerun (provided you keep the +// seed) +func (p *GenParameters) CloneWithSeed(seed int64) *GenParameters { + return &GenParameters{ + MinSize: p.MinSize, + MaxSize: p.MaxSize, + MaxShrinkCount: p.MaxShrinkCount, + Rng: rand.New(NewLockedSource(seed)), + } +} + +// DefaultGenParameters creates default GenParameters. +func DefaultGenParameters() *GenParameters { + seed := time.Now().UnixNano() + + return &GenParameters{ + MinSize: 0, + MaxSize: 100, + MaxShrinkCount: 1000, + Rng: rand.New(NewLockedSource(seed)), + } +} + +// MinGenParameters creates minimal GenParameters. +// Note: Most likely you do not want to use these for actual testing +func MinGenParameters() *GenParameters { + seed := time.Now().UnixNano() + + return &GenParameters{ + MinSize: 0, + MaxSize: 0, + MaxShrinkCount: 0, + Rng: rand.New(NewLockedSource(seed)), + } +} diff --git a/vendor/github.com/leanovate/gopter/gen_result.go b/vendor/github.com/leanovate/gopter/gen_result.go new file mode 100644 index 0000000..f692f4e --- /dev/null +++ b/vendor/github.com/leanovate/gopter/gen_result.go @@ -0,0 +1,55 @@ +package gopter + +import "reflect" + +// GenResult contains the result of a generator. +type GenResult struct { + Labels []string + Shrinker Shrinker + ResultType reflect.Type + Result interface{} + Sieve func(interface{}) bool +} + +// NewGenResult creates a new generator result from for a concrete value and +// shrinker. +// Note: The concrete value "result" not be nil +func NewGenResult(result interface{}, shrinker Shrinker) *GenResult { + return &GenResult{ + Shrinker: shrinker, + ResultType: reflect.TypeOf(result), + Result: result, + } +} + +// NewEmptyResult creates an empty generator result. +// Unless the sieve does not explicitly allow it, empty (i.e. nil-valued) +// results are considered invalid. +func NewEmptyResult(resultType reflect.Type) *GenResult { + return &GenResult{ + ResultType: resultType, + Shrinker: NoShrinker, + } +} + +// Retrieve gets the concrete generator result. +// If the result is invalid or does not pass the sieve there is no concrete +// value and the property using the generator should be undecided. +func (r *GenResult) Retrieve() (interface{}, bool) { + if (r.Sieve == nil && r.Result != nil) || (r.Sieve != nil && r.Sieve(r.Result)) { + return r.Result, true + } + return nil, false +} + +// RetrieveAsValue get the concrete generator result as reflect value. +// If the result is invalid or does not pass the sieve there is no concrete +// value and the property using the generator should be undecided. +func (r *GenResult) RetrieveAsValue() (reflect.Value, bool) { + if r.Result != nil && (r.Sieve == nil || r.Sieve(r.Result)) { + return reflect.ValueOf(r.Result), true + } else if r.Result == nil && r.Sieve != nil && r.Sieve(r.Result) { + return reflect.Zero(r.ResultType), true + } + return reflect.Zero(r.ResultType), false +} diff --git a/vendor/github.com/leanovate/gopter/locked_source.go b/vendor/github.com/leanovate/gopter/locked_source.go new file mode 100644 index 0000000..34f5241 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/locked_source.go @@ -0,0 +1,77 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Taken from golang lockedSource implementation https://github.com/golang/go/blob/master/src/math/rand/rand.go#L371-L410 + +package gopter + +import ( + "math/rand" + "sync" +) + +type lockedSource struct { + lk sync.Mutex + src rand.Source64 +} + +// NewLockedSource takes a seed and returns a new +// lockedSource for use with rand.New +func NewLockedSource(seed int64) *lockedSource { + return &lockedSource{ + src: rand.NewSource(seed).(rand.Source64), + } +} + +func (r *lockedSource) Int63() (n int64) { + r.lk.Lock() + n = r.src.Int63() + r.lk.Unlock() + return +} + +func (r *lockedSource) Uint64() (n uint64) { + r.lk.Lock() + n = r.src.Uint64() + r.lk.Unlock() + return +} + +func (r *lockedSource) Seed(seed int64) { + r.lk.Lock() + r.src.Seed(seed) + r.lk.Unlock() +} + +// seedPos implements Seed for a lockedSource without a race condition. +func (r *lockedSource) seedPos(seed int64, readPos *int8) { + r.lk.Lock() + r.src.Seed(seed) + *readPos = 0 + r.lk.Unlock() +} + +// read implements Read for a lockedSource without a race condition. +func (r *lockedSource) read(p []byte, readVal *int64, readPos *int8) (n int, err error) { + r.lk.Lock() + n, err = read(p, r.src.Int63, readVal, readPos) + r.lk.Unlock() + return +} + +func read(p []byte, int63 func() int64, readVal *int64, readPos *int8) (n int, err error) { + pos := *readPos + val := *readVal + for n = 0; n < len(p); n++ { + if pos == 0 { + val = int63() + pos = 7 + } + p[n] = byte(val) + val >>= 8 + pos-- + } + *readPos = pos + *readVal = val + return +} diff --git a/vendor/github.com/leanovate/gopter/prop.go b/vendor/github.com/leanovate/gopter/prop.go new file mode 100644 index 0000000..6ea5afc --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop.go @@ -0,0 +1,113 @@ +package gopter + +import ( + "fmt" + "math" + "runtime/debug" +) + +// Prop represent some kind of property that (drums please) can and should be checked +type Prop func(*GenParameters) *PropResult + +// SaveProp creates s save property by handling all panics from an inner property +func SaveProp(prop Prop) Prop { + return func(genParams *GenParameters) (result *PropResult) { + defer func() { + if r := recover(); r != nil { + result = &PropResult{ + Status: PropError, + Error: fmt.Errorf("Check paniced: %v", r), + ErrorStack: debug.Stack(), + } + } + }() + + return prop(genParams) + } +} + +// Check the property using specific parameters +func (prop Prop) Check(parameters *TestParameters) *TestResult { + iterations := math.Ceil(float64(parameters.MinSuccessfulTests) / float64(parameters.Workers)) + sizeStep := float64(parameters.MaxSize-parameters.MinSize) / (iterations * float64(parameters.Workers)) + + genParameters := GenParameters{ + MinSize: parameters.MinSize, + MaxSize: parameters.MaxSize, + MaxShrinkCount: parameters.MaxShrinkCount, + Rng: parameters.Rng, + } + runner := &runner{ + parameters: parameters, + worker: func(workerIdx int, shouldStop shouldStop) *TestResult { + var n int + var d int + + isExhaused := func() bool { + return n+d > parameters.MinSuccessfulTests && + 1.0+float64(parameters.Workers*n)*parameters.MaxDiscardRatio < float64(d) + } + + for !shouldStop() && n < int(iterations) { + size := float64(parameters.MinSize) + (sizeStep * float64(workerIdx+(parameters.Workers*(n+d)))) + propResult := prop(genParameters.WithSize(int(size))) + + switch propResult.Status { + case PropUndecided: + d++ + if isExhaused() { + return &TestResult{ + Status: TestExhausted, + Succeeded: n, + Discarded: d, + } + } + case PropTrue: + n++ + case PropProof: + n++ + return &TestResult{ + Status: TestProved, + Succeeded: n, + Discarded: d, + Labels: propResult.Labels, + Args: propResult.Args, + } + case PropFalse: + return &TestResult{ + Status: TestFailed, + Succeeded: n, + Discarded: d, + Labels: propResult.Labels, + Args: propResult.Args, + } + case PropError: + return &TestResult{ + Status: TestError, + Succeeded: n, + Discarded: d, + Labels: propResult.Labels, + Error: propResult.Error, + ErrorStack: propResult.ErrorStack, + Args: propResult.Args, + } + } + } + + if isExhaused() { + return &TestResult{ + Status: TestExhausted, + Succeeded: n, + Discarded: d, + } + } + return &TestResult{ + Status: TestPassed, + Succeeded: n, + Discarded: d, + } + }, + } + + return runner.runWorkers() +} diff --git a/vendor/github.com/leanovate/gopter/prop/check_condition_func.go b/vendor/github.com/leanovate/gopter/prop/check_condition_func.go new file mode 100644 index 0000000..ea3dbe9 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/check_condition_func.go @@ -0,0 +1,50 @@ +package prop + +import ( + "errors" + "fmt" + "reflect" + "runtime/debug" + + "github.com/leanovate/gopter" +) + +func checkConditionFunc(check interface{}, numArgs int) (func([]reflect.Value) *gopter.PropResult, error) { + checkVal := reflect.ValueOf(check) + checkType := checkVal.Type() + + if checkType.Kind() != reflect.Func { + return nil, fmt.Errorf("First param of ForrAll has to be a func: %v", checkVal.Kind()) + } + if checkType.NumIn() != numArgs { + return nil, fmt.Errorf("Number of parameters does not match number of generators: %d != %d", checkType.NumIn(), numArgs) + } + if checkType.NumOut() == 0 { + return nil, errors.New("At least one output parameters is required") + } else if checkType.NumOut() > 2 { + return nil, fmt.Errorf("No more than 2 output parameters are allowed: %d", checkType.NumOut()) + } else if checkType.NumOut() == 2 && !checkType.Out(1).Implements(typeOfError) { + return nil, fmt.Errorf("No 2 output has to be error: %v", checkType.Out(1).Kind()) + } else if checkType.NumOut() == 2 { + return func(values []reflect.Value) *gopter.PropResult { + results := checkVal.Call(values) + if results[1].IsNil() { + return convertResult(results[0].Interface(), nil) + } + return convertResult(results[0].Interface(), results[1].Interface().(error)) + }, nil + } + return func(values []reflect.Value) (result *gopter.PropResult) { + defer func() { + if r := recover(); r != nil { + result = &gopter.PropResult{ + Status: gopter.PropError, + Error: fmt.Errorf("Check paniced: %v", r), + ErrorStack: debug.Stack(), + } + } + }() + results := checkVal.Call(values) + return convertResult(results[0].Interface(), nil) + }, nil +} diff --git a/vendor/github.com/leanovate/gopter/prop/convert_result.go b/vendor/github.com/leanovate/gopter/prop/convert_result.go new file mode 100644 index 0000000..12e38ce --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/convert_result.go @@ -0,0 +1,37 @@ +package prop + +import ( + "fmt" + + "github.com/leanovate/gopter" +) + +func convertResult(result interface{}, err error) *gopter.PropResult { + if err != nil { + return &gopter.PropResult{ + Status: gopter.PropError, + Error: err, + } + } + switch result.(type) { + case bool: + if result.(bool) { + return &gopter.PropResult{Status: gopter.PropTrue} + } + return &gopter.PropResult{Status: gopter.PropFalse} + case string: + if result.(string) == "" { + return &gopter.PropResult{Status: gopter.PropTrue} + } + return &gopter.PropResult{ + Status: gopter.PropFalse, + Labels: []string{result.(string)}, + } + case *gopter.PropResult: + return result.(*gopter.PropResult) + } + return &gopter.PropResult{ + Status: gopter.PropError, + Error: fmt.Errorf("Invalid check result: %#v", result), + } +} diff --git a/vendor/github.com/leanovate/gopter/prop/doc.go b/vendor/github.com/leanovate/gopter/prop/doc.go new file mode 100644 index 0000000..256ab2e --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/doc.go @@ -0,0 +1,4 @@ +/* +Package prop contains the most common implementations of a gopter.Prop. +*/ +package prop diff --git a/vendor/github.com/leanovate/gopter/prop/error.go b/vendor/github.com/leanovate/gopter/prop/error.go new file mode 100644 index 0000000..0c91d53 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/error.go @@ -0,0 +1,14 @@ +package prop + +import "github.com/leanovate/gopter" + +// ErrorProp creates a property that will always fail with an error. +// Mostly used as a fallback when setup/initialization fails +func ErrorProp(err error) gopter.Prop { + return func(genParams *gopter.GenParameters) *gopter.PropResult { + return &gopter.PropResult{ + Status: gopter.PropError, + Error: err, + } + } +} diff --git a/vendor/github.com/leanovate/gopter/prop/forall.go b/vendor/github.com/leanovate/gopter/prop/forall.go new file mode 100644 index 0000000..00c6343 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/forall.go @@ -0,0 +1,130 @@ +package prop + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +var typeOfError = reflect.TypeOf((*error)(nil)).Elem() + +/* +ForAll creates a property that requires the check condition to be true for all values, if the +condition falsiies the generated values will be shrunk. + +"condition" has to be a function with the same number of parameters as the provided +generators "gens". The function may return a simple bool (true means that the +condition has passed), a string (empty string means that condition has passed), +a *PropResult, or one of former combined with an error. +*/ +func ForAll(condition interface{}, gens ...gopter.Gen) gopter.Prop { + callCheck, err := checkConditionFunc(condition, len(gens)) + if err != nil { + return ErrorProp(err) + } + + return gopter.SaveProp(func(genParams *gopter.GenParameters) *gopter.PropResult { + genResults := make([]*gopter.GenResult, len(gens)) + values := make([]reflect.Value, len(gens)) + valuesFormated := make([]string, len(gens)) + var ok bool + for i, gen := range gens { + result := gen(genParams) + genResults[i] = result + values[i], ok = result.RetrieveAsValue() + if !ok { + return &gopter.PropResult{ + Status: gopter.PropUndecided, + } + } + valuesFormated[i] = fmt.Sprintf("%+v", values[i].Interface()) + } + result := callCheck(values) + if result.Success() { + for i, genResult := range genResults { + result = result.AddArgs(gopter.NewPropArg(genResult, 0, values[i].Interface(), valuesFormated[i], values[i].Interface(), valuesFormated[i])) + } + } else { + for i, genResult := range genResults { + nextResult, nextValue := shrinkValue(genParams.MaxShrinkCount, genResult, values[i].Interface(), valuesFormated[i], result, + func(v interface{}) *gopter.PropResult { + shrunkOne := make([]reflect.Value, len(values)) + copy(shrunkOne, values) + if v == nil { + shrunkOne[i] = reflect.Zero(values[i].Type()) + } else { + shrunkOne[i] = reflect.ValueOf(v) + } + return callCheck(shrunkOne) + }) + result = nextResult + if nextValue == nil { + values[i] = reflect.Zero(values[i].Type()) + } else { + values[i] = reflect.ValueOf(nextValue) + } + } + } + return result + }) +} + +// ForAll1 legacy interface to be removed in the future +func ForAll1(gen gopter.Gen, check func(v interface{}) (interface{}, error)) gopter.Prop { + checkFunc := func(v interface{}) *gopter.PropResult { + return convertResult(check(v)) + } + return gopter.SaveProp(func(genParams *gopter.GenParameters) *gopter.PropResult { + genResult := gen(genParams) + value, ok := genResult.Retrieve() + if !ok { + return &gopter.PropResult{ + Status: gopter.PropUndecided, + } + } + valueFormated := fmt.Sprintf("%+v", value) + result := checkFunc(value) + if result.Success() { + return result.AddArgs(gopter.NewPropArg(genResult, 0, value, valueFormated, value, valueFormated)) + } + + result, _ = shrinkValue(genParams.MaxShrinkCount, genResult, value, valueFormated, result, checkFunc) + return result + }) +} + +func shrinkValue(maxShrinkCount int, genResult *gopter.GenResult, origValue interface{}, orgiValueFormated string, + firstFail *gopter.PropResult, check func(interface{}) *gopter.PropResult) (*gopter.PropResult, interface{}) { + lastFail := firstFail + lastValue := origValue + lastValueFormated := orgiValueFormated + + shrinks := 0 + shrink := genResult.Shrinker(lastValue).Filter(genResult.Sieve) + nextResult, nextValue, nextValueFormated := firstFailure(shrink, check) + for nextResult != nil && shrinks < maxShrinkCount { + shrinks++ + lastValue = nextValue + lastValueFormated = nextValueFormated + lastFail = nextResult + + shrink = genResult.Shrinker(lastValue).Filter(genResult.Sieve) + nextResult, nextValue, nextValueFormated = firstFailure(shrink, check) + } + + return lastFail.WithArgs(firstFail.Args).AddArgs(gopter.NewPropArg(genResult, shrinks, lastValue, lastValueFormated, origValue, orgiValueFormated)), lastValue +} + +func firstFailure(shrink gopter.Shrink, check func(interface{}) *gopter.PropResult) (*gopter.PropResult, interface{}, string) { + value, ok := shrink() + for ok { + valueFormated := fmt.Sprintf("%+v", value) + result := check(value) + if !result.Success() { + return result, value, valueFormated + } + value, ok = shrink() + } + return nil, nil, "" +} diff --git a/vendor/github.com/leanovate/gopter/prop/forall_no_shrink.go b/vendor/github.com/leanovate/gopter/prop/forall_no_shrink.go new file mode 100644 index 0000000..affcd3b --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop/forall_no_shrink.go @@ -0,0 +1,63 @@ +package prop + +import ( + "fmt" + "reflect" + + "github.com/leanovate/gopter" +) + +/* +ForAllNoShrink creates a property that requires the check condition to be true for all values. +As the name suggests the generated values will not be shrunk if the condition falsiies. + +"condition" has to be a function with the same number of parameters as the provided +generators "gens". The function may return a simple bool (true means that the +condition has passed), a string (empty string means that condition has passed), +a *PropResult, or one of former combined with an error. +*/ +func ForAllNoShrink(condition interface{}, gens ...gopter.Gen) gopter.Prop { + callCheck, err := checkConditionFunc(condition, len(gens)) + if err != nil { + return ErrorProp(err) + } + + return gopter.SaveProp(func(genParams *gopter.GenParameters) *gopter.PropResult { + genResults := make([]*gopter.GenResult, len(gens)) + values := make([]reflect.Value, len(gens)) + valuesFormated := make([]string, len(gens)) + var ok bool + for i, gen := range gens { + result := gen(genParams) + genResults[i] = result + values[i], ok = result.RetrieveAsValue() + if !ok { + return &gopter.PropResult{ + Status: gopter.PropUndecided, + } + } + valuesFormated[i] = fmt.Sprintf("%+v", values[i].Interface()) + } + result := callCheck(values) + for i, genResult := range genResults { + result = result.AddArgs(gopter.NewPropArg(genResult, 0, values[i].Interface(), valuesFormated[i], values[i].Interface(), valuesFormated[i])) + } + return result + }) +} + +// ForAllNoShrink1 creates a property that requires the check condition to be true for all values +// As the name suggests the generated values will not be shrunk if the condition falsiies +func ForAllNoShrink1(gen gopter.Gen, check func(interface{}) (interface{}, error)) gopter.Prop { + return gopter.SaveProp(func(genParams *gopter.GenParameters) *gopter.PropResult { + genResult := gen(genParams) + value, ok := genResult.Retrieve() + if !ok { + return &gopter.PropResult{ + Status: gopter.PropUndecided, + } + } + valueFormated := fmt.Sprintf("%+v", value) + return convertResult(check(value)).AddArgs(gopter.NewPropArg(genResult, 0, value, valueFormated, value, valueFormated)) + }) +} diff --git a/vendor/github.com/leanovate/gopter/prop_arg.go b/vendor/github.com/leanovate/gopter/prop_arg.go new file mode 100644 index 0000000..cd7af6f --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop_arg.go @@ -0,0 +1,36 @@ +package gopter + +import ( + "fmt" + "strings" +) + +// PropArg contains information about the specific values for a certain property check. +// This is mostly used for reporting when a property has falsified. +type PropArg struct { + Arg interface{} + ArgFormatted string + OrigArg interface{} + OrigArgFormatted string + Label string + Shrinks int +} + +func (p *PropArg) String() string { + return fmt.Sprintf("%v", p.Arg) +} + +// PropArgs is a list of PropArg. +type PropArgs []*PropArg + +// NewPropArg creates a new PropArg. +func NewPropArg(genResult *GenResult, shrinks int, value interface{}, valueFormated string, origValue interface{}, origValueFormated string) *PropArg { + return &PropArg{ + Label: strings.Join(genResult.Labels, ", "), + Arg: value, + ArgFormatted: valueFormated, + OrigArg: origValue, + OrigArgFormatted: origValueFormated, + Shrinks: shrinks, + } +} diff --git a/vendor/github.com/leanovate/gopter/prop_result.go b/vendor/github.com/leanovate/gopter/prop_result.go new file mode 100644 index 0000000..0a8bbeb --- /dev/null +++ b/vendor/github.com/leanovate/gopter/prop_result.go @@ -0,0 +1,109 @@ +package gopter + +type propStatus int + +const ( + // PropProof THe property was proved (i.e. it is known to be correct and will be always true) + PropProof propStatus = iota + // PropTrue The property was true this time + PropTrue + // PropFalse The property was false this time + PropFalse + // PropUndecided The property has no clear outcome this time + PropUndecided + // PropError The property has generated an error + PropError +) + +func (s propStatus) String() string { + switch s { + case PropProof: + return "PROOF" + case PropTrue: + return "TRUE" + case PropFalse: + return "FALSE" + case PropUndecided: + return "UNDECIDED" + case PropError: + return "ERROR" + } + return "" +} + +// PropResult contains the result of a property +type PropResult struct { + Status propStatus + Error error + ErrorStack []byte + Args []*PropArg + Labels []string +} + +// NewPropResult create a PropResult with label +func NewPropResult(success bool, label string) *PropResult { + if success { + return &PropResult{ + Status: PropTrue, + Labels: []string{label}, + Args: make([]*PropArg, 0), + } + } + return &PropResult{ + Status: PropFalse, + Labels: []string{label}, + Args: make([]*PropArg, 0), + } +} + +// Success checks if the result was successful +func (r *PropResult) Success() bool { + return r.Status == PropTrue || r.Status == PropProof +} + +// WithArgs sets argument descriptors to the PropResult for reporting +func (r *PropResult) WithArgs(args []*PropArg) *PropResult { + r.Args = args + return r +} + +// AddArgs add argument descriptors to the PropResult for reporting +func (r *PropResult) AddArgs(args ...*PropArg) *PropResult { + r.Args = append(r.Args, args...) + return r +} + +// And combines two PropResult by an and operation. +// The resulting PropResult will be only true if both PropResults are true. +func (r *PropResult) And(other *PropResult) *PropResult { + switch { + case r.Status == PropError: + return r + case other.Status == PropError: + return other + case r.Status == PropFalse: + return r + case other.Status == PropFalse: + return other + case r.Status == PropUndecided: + return r + case other.Status == PropUndecided: + return other + case r.Status == PropProof: + return r.mergeWith(other, other.Status) + case other.Status == PropProof: + return r.mergeWith(other, r.Status) + case r.Status == PropTrue && other.Status == PropTrue: + return r.mergeWith(other, PropTrue) + default: + return r + } +} + +func (r *PropResult) mergeWith(other *PropResult, status propStatus) *PropResult { + return &PropResult{ + Status: status, + Args: append(append(make([]*PropArg, 0, len(r.Args)+len(other.Args)), r.Args...), other.Args...), + Labels: append(append(make([]string, 0, len(r.Labels)+len(other.Labels)), r.Labels...), other.Labels...), + } +} diff --git a/vendor/github.com/leanovate/gopter/properties.go b/vendor/github.com/leanovate/gopter/properties.go new file mode 100644 index 0000000..af82354 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/properties.go @@ -0,0 +1,59 @@ +package gopter + +import "testing" + +// Properties is a collection of properties that should be checked in a test +type Properties struct { + parameters *TestParameters + props map[string]Prop + propNames []string +} + +// NewProperties create new Properties with given test parameters. +// If parameters is nil default test parameters will be used +func NewProperties(parameters *TestParameters) *Properties { + if parameters == nil { + parameters = DefaultTestParameters() + } + return &Properties{ + parameters: parameters, + props: make(map[string]Prop, 0), + propNames: make([]string, 0), + } +} + +// Property add/defines a property in a test. +func (p *Properties) Property(name string, prop Prop) { + p.propNames = append(p.propNames, name) + p.props[name] = prop +} + +// Run checks all definied propertiesand reports the result +func (p *Properties) Run(reporter Reporter) bool { + success := true + for _, propName := range p.propNames { + prop := p.props[propName] + + result := prop.Check(p.parameters) + + reporter.ReportTestResult(propName, result) + if !result.Passed() { + success = false + } + } + return success +} + +// TestingRun checks all definied properties with a testing.T context. +// This the preferred wait to run property tests as part of a go unit test. +func (p *Properties) TestingRun(t *testing.T, opts ...interface{}) { + reporter := ConsoleReporter(true) + for _, opt := range opts { + if r, ok := opt.(Reporter); ok { + reporter = r + } + } + if !p.Run(reporter) { + t.Errorf("failed with initial seed: %d", p.parameters.Seed()) + } +} diff --git a/vendor/github.com/leanovate/gopter/reporter.go b/vendor/github.com/leanovate/gopter/reporter.go new file mode 100644 index 0000000..40655aa --- /dev/null +++ b/vendor/github.com/leanovate/gopter/reporter.go @@ -0,0 +1,7 @@ +package gopter + +// Reporter is a simple interface to report/format the results of a property check. +type Reporter interface { + // ReportTestResult reports a single property result + ReportTestResult(propName string, result *TestResult) +} diff --git a/vendor/github.com/leanovate/gopter/runner.go b/vendor/github.com/leanovate/gopter/runner.go new file mode 100644 index 0000000..2464f89 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/runner.go @@ -0,0 +1,77 @@ +package gopter + +import ( + "sync" + "time" +) + +type shouldStop func() bool + +type worker func(int, shouldStop) *TestResult + +type runner struct { + sync.RWMutex + parameters *TestParameters + worker worker +} + +func (r *runner) mergeCheckResults(r1, r2 *TestResult) *TestResult { + var result TestResult + + switch { + case r1 == nil: + return r2 + case r1.Status != TestPassed && r1.Status != TestExhausted: + result = *r1 + case r2.Status != TestPassed && r2.Status != TestExhausted: + result = *r2 + default: + result.Status = TestExhausted + + if r1.Succeeded+r2.Succeeded >= r.parameters.MinSuccessfulTests && + float64(r1.Discarded+r2.Discarded) <= float64(r1.Succeeded+r2.Succeeded)*r.parameters.MaxDiscardRatio { + result.Status = TestPassed + } + } + + result.Succeeded = r1.Succeeded + r2.Succeeded + result.Discarded = r1.Discarded + r2.Discarded + + return &result +} + +func (r *runner) runWorkers() *TestResult { + var stopFlag Flag + defer stopFlag.Set() + + start := time.Now() + if r.parameters.Workers < 2 { + result := r.worker(0, stopFlag.Get) + result.Time = time.Since(start) + return result + } + var waitGroup sync.WaitGroup + waitGroup.Add(r.parameters.Workers) + results := make(chan *TestResult, r.parameters.Workers) + combinedResult := make(chan *TestResult) + + go func() { + var combined *TestResult + for result := range results { + combined = r.mergeCheckResults(combined, result) + } + combinedResult <- combined + }() + for i := 0; i < r.parameters.Workers; i++ { + go func(workerIdx int) { + defer waitGroup.Done() + results <- r.worker(workerIdx, stopFlag.Get) + }(i) + } + waitGroup.Wait() + close(results) + + result := <-combinedResult + result.Time = time.Since(start) + return result +} diff --git a/vendor/github.com/leanovate/gopter/shrink.go b/vendor/github.com/leanovate/gopter/shrink.go new file mode 100644 index 0000000..ebc3858 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/shrink.go @@ -0,0 +1,185 @@ +package gopter + +import ( + "fmt" + "reflect" +) + +// Shrink is a stream of shrunk down values. +// Once the result of a shrink is false, it is considered to be exhausted. +// Important notes for implementors: +// - Ensure that the returned stream is finite, even though shrinking will +// eventually be aborted, infinite streams may result in very slow running +// test. +// - Ensure that modifications to the returned value will not affect the +// internal state of your Shrink. If in doubt return by value not by reference +type Shrink func() (interface{}, bool) + +// Filter creates a shrink filtered by a condition +func (s Shrink) Filter(condition func(interface{}) bool) Shrink { + if condition == nil { + return s + } + return func() (interface{}, bool) { + value, ok := s() + for ok && !condition(value) { + value, ok = s() + } + return value, ok + } +} + +// Map creates a shrink by applying a converter to each element of a shrink. +// f: has to be a function with one parameter (matching the generated value) and a single return. +func (s Shrink) Map(f interface{}) Shrink { + mapperVal := reflect.ValueOf(f) + mapperType := mapperVal.Type() + + if mapperVal.Kind() != reflect.Func { + panic(fmt.Sprintf("Param of Map has to be a func, but is %v", mapperType.Kind())) + } + if mapperType.NumIn() != 1 { + panic(fmt.Sprintf("Param of Map has to be a func with one param, but is %v", mapperType.NumIn())) + } + if mapperType.NumOut() != 1 { + panic(fmt.Sprintf("Param of Map has to be a func with one return value, but is %v", mapperType.NumOut())) + } + + return func() (interface{}, bool) { + value, ok := s() + if ok { + return mapperVal.Call([]reflect.Value{reflect.ValueOf(value)})[0].Interface(), ok + } + return nil, false + } +} + +// All collects all shrinks as a slice. Use with care as this might create +// large results depending on the complexity of the shrink +func (s Shrink) All() []interface{} { + result := []interface{}{} + value, ok := s() + for ok { + result = append(result, value) + value, ok = s() + } + return result +} + +type concatedShrink struct { + index int + shrinks []Shrink +} + +func (c *concatedShrink) Next() (interface{}, bool) { + for c.index < len(c.shrinks) { + value, ok := c.shrinks[c.index]() + if ok { + return value, ok + } + c.index++ + } + return nil, false +} + +// ConcatShrinks concats an array of shrinks to a single shrinks +func ConcatShrinks(shrinks ...Shrink) Shrink { + concated := &concatedShrink{ + index: 0, + shrinks: shrinks, + } + return concated.Next +} + +type interleaved struct { + first Shrink + second Shrink + firstExhausted bool + secondExhaused bool + state bool +} + +func (i *interleaved) Next() (interface{}, bool) { + for !i.firstExhausted || !i.secondExhaused { + i.state = !i.state + if i.state && !i.firstExhausted { + value, ok := i.first() + if ok { + return value, true + } + i.firstExhausted = true + } else if !i.state && !i.secondExhaused { + value, ok := i.second() + if ok { + return value, true + } + i.secondExhaused = true + } + } + return nil, false +} + +// Interleave this shrink with another +// Both shrinks are expected to produce the same result +func (s Shrink) Interleave(other Shrink) Shrink { + interleaved := &interleaved{ + first: s, + second: other, + } + return interleaved.Next +} + +// Shrinker creates a shrink for a given value +type Shrinker func(value interface{}) Shrink + +type elementShrink struct { + original []interface{} + index int + elementShrink Shrink +} + +func (e *elementShrink) Next() (interface{}, bool) { + element, ok := e.elementShrink() + if !ok { + return nil, false + } + shrunk := make([]interface{}, len(e.original)) + copy(shrunk, e.original) + shrunk[e.index] = element + + return shrunk, true +} + +// CombineShrinker create a shrinker by combining a list of shrinkers. +// The resulting shrinker will shrink an []interface{} where each element will be shrunk by +// the corresonding shrinker in 'shrinkers'. +// This method is implicitly used by CombineGens. +func CombineShrinker(shrinkers ...Shrinker) Shrinker { + return func(v interface{}) Shrink { + values := v.([]interface{}) + shrinks := make([]Shrink, 0, len(values)) + for i, shrinker := range shrinkers { + if i >= len(values) { + break + } + shrink := &elementShrink{ + original: values, + index: i, + elementShrink: shrinker(values[i]), + } + shrinks = append(shrinks, shrink.Next) + } + return ConcatShrinks(shrinks...) + } +} + +// NoShrink is an empty shrink. +var NoShrink = Shrink(func() (interface{}, bool) { + return nil, false +}) + +// NoShrinker is a shrinker for NoShrink, i.e. a Shrinker that will not shrink any values. +// This is the default Shrinker if none is provided. +var NoShrinker = Shrinker(func(value interface{}) Shrink { + return NoShrink +}) diff --git a/vendor/github.com/leanovate/gopter/test_parameters.go b/vendor/github.com/leanovate/gopter/test_parameters.go new file mode 100644 index 0000000..5234c59 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/test_parameters.go @@ -0,0 +1,48 @@ +package gopter + +import ( + "math/rand" + "time" +) + +// TestParameters to run property tests +type TestParameters struct { + MinSuccessfulTests int + // MinSize is an (inclusive) lower limit on the size of the parameters + MinSize int + // MaxSize is an (exclusive) upper limit on the size of the parameters + MaxSize int + MaxShrinkCount int + seed int64 + Rng *rand.Rand + Workers int + MaxDiscardRatio float64 +} + +func (t *TestParameters) Seed() int64 { + return t.seed +} + +func (t *TestParameters) SetSeed(seed int64) { + t.seed = seed + t.Rng.Seed(seed) +} + +// DefaultTestParameterWithSeeds creates reasonable default Parameters for most cases based on a fixed RNG-seed +func DefaultTestParametersWithSeed(seed int64) *TestParameters { + return &TestParameters{ + MinSuccessfulTests: 100, + MinSize: 0, + MaxSize: 100, + MaxShrinkCount: 1000, + seed: seed, + Rng: rand.New(NewLockedSource(seed)), + Workers: 1, + MaxDiscardRatio: 5, + } +} + +// DefaultTestParameterWithSeeds creates reasonable default Parameters for most cases with an undefined RNG-seed +func DefaultTestParameters() *TestParameters { + return DefaultTestParametersWithSeed(time.Now().UnixNano()) +} diff --git a/vendor/github.com/leanovate/gopter/test_result.go b/vendor/github.com/leanovate/gopter/test_result.go new file mode 100644 index 0000000..ce4ac41 --- /dev/null +++ b/vendor/github.com/leanovate/gopter/test_result.go @@ -0,0 +1,52 @@ +package gopter + +import "time" + +type testStatus int + +const ( + // TestPassed indicates that the property check has passed. + TestPassed testStatus = iota + // TestProved indicates that the property has been proved. + TestProved + // TestFailed indicates that the property check has failed. + TestFailed + // TestExhausted indicates that the property check has exhausted, i.e. the generators have + // generated too many empty results. + TestExhausted + // TestError indicates that the property check has finished with an error. + TestError +) + +func (s testStatus) String() string { + switch s { + case TestPassed: + return "PASSED" + case TestProved: + return "PROVED" + case TestFailed: + return "FAILED" + case TestExhausted: + return "EXHAUSTED" + case TestError: + return "ERROR" + } + return "" +} + +// TestResult contains the result of a property property check. +type TestResult struct { + Status testStatus + Succeeded int + Discarded int + Labels []string + Error error + ErrorStack []byte + Args PropArgs + Time time.Duration +} + +// Passed checks if the check has passed +func (r *TestResult) Passed() bool { + return r.Status == TestPassed || r.Status == TestProved +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 9786230..f6cf25e 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -8,6 +8,11 @@ github.com/golang/freetype/truetype # github.com/google/uuid v1.6.0 ## explicit github.com/google/uuid +# github.com/leanovate/gopter v0.2.11 +## explicit; go 1.12 +github.com/leanovate/gopter +github.com/leanovate/gopter/gen +github.com/leanovate/gopter/prop # golang.org/x/image v0.18.0 ## explicit; go 1.18 golang.org/x/image/draw From 8fbe1b1251f49ba490e4852ec3c063e9c131b018 Mon Sep 17 00:00:00 2001 From: Daniel Apatin Date: Mon, 15 Dec 2025 18:52:31 +0300 Subject: [PATCH 2/2] add multiaccount --- .gitignore | 2 +- config.json | 2 +- handlers/auth.go | 491 ++++- handlers/cameras.go | 38 +- handlers/const.go | 50 + handlers/factory.go | 79 + handlers/finances.go | 37 +- handlers/handlers.go | 26 + handlers/places.go | 40 +- main.go | 140 +- multiconfig/account.go | 397 ++++ multiconfig/account_test.go | 2995 +++++++++++++++++++++++++++++++ multiconfig/aggregator.go | 381 ++++ multiconfig/data_merger.go | 348 ++++ multiconfig/data_merger_test.go | 937 ++++++++++ multiconfig/interfaces.go | 64 + multiconfig/legacy_adapter.go | 95 + multiconfig/migration.go | 202 +++ multiconfig/test_factory.go | 82 + templates/accounts.html | 51 +- templates/home.html | 264 ++- templates/login.html | 16 +- 22 files changed, 6637 insertions(+), 100 deletions(-) create mode 100644 handlers/factory.go create mode 100644 multiconfig/account.go create mode 100644 multiconfig/account_test.go create mode 100644 multiconfig/aggregator.go create mode 100644 multiconfig/data_merger.go create mode 100644 multiconfig/data_merger_test.go create mode 100644 multiconfig/interfaces.go create mode 100644 multiconfig/legacy_adapter.go create mode 100644 multiconfig/migration.go create mode 100644 multiconfig/test_factory.go diff --git a/.gitignore b/.gitignore index c20e8da..37248f1 100644 --- a/.gitignore +++ b/.gitignore @@ -13,7 +13,7 @@ # Dependency directories (remove the comment below to include it) # vendor/ -domru +/domru deploy.sh .DS_Store diff --git a/config.json b/config.json index d51ee02..b82a81b 100644 --- a/config.json +++ b/config.json @@ -1,6 +1,6 @@ { "name": "Domofon", - "version": "0.3.6", + "version": "0.4.0", "slug": "domofon", "description": "", "startup": "application", diff --git a/handlers/auth.go b/handlers/auth.go index 86b8ec7..7eae895 100644 --- a/handlers/auth.go +++ b/handlers/auth.go @@ -9,7 +9,10 @@ import ( "log" "net/http" "strconv" + "strings" "time" + + "github.com/ad/domru/multiconfig" ) // LoginHandler ... @@ -173,9 +176,21 @@ func (h *Handler) HomeHandler(w http.ResponseWriter, r *http.Request) { ingressPath := r.Header.Get("X-Ingress-Path") // log.Println(r.Method, "/", ingressPath) - if h.Config.Token == "" || h.Config.RefreshToken == "" { - http.Redirect(w, r, ingressPath+"/login", http.StatusSeeOther) - return + // Check if we have multi-account support + isMultiAccount := h.MultiAccountAggregator != nil + + // For multi-account, check if we have any accounts + if isMultiAccount { + if !h.LegacyAdapter.HasPrimaryAccount() { + http.Redirect(w, r, ingressPath+"/login", http.StatusSeeOther) + return + } + } else { + // Legacy single-account check + if h.Config.Token == "" || h.Config.RefreshToken == "" { + http.Redirect(w, r, ingressPath+"/login", http.StatusSeeOther) + return + } } w.Header().Set("Content-Type", "text/html") @@ -189,32 +204,108 @@ func (h *Handler) HomeHandler(w http.ResponseWriter, r *http.Request) { } var cameras Cameras + var places Places + var finances *Finances + + // Multi-account data + var multiAccountCameras []MultiAccountCamera + var multiAccountPlaces []MultiAccountPlace + var multiAccountFinances []MultiAccountFinance + + if isMultiAccount { + // Get aggregated data from all accounts + dataMerger := h.MultiAccountAggregator.GetDataMerger() + + // Get cameras from all accounts + if aggCameras, err := dataMerger.GetAggregatedCameras(); err == nil { + for _, camera := range aggCameras { + multiAccountCameras = append(multiAccountCameras, MultiAccountCamera{ + ID: camera.ID, + Name: camera.Name, + IsActive: camera.IsActive, + AccountID: camera.AccountID, + AccountName: camera.AccountName, + }) + } + } else { + loginError = "multi-account cameras error: " + err.Error() + } + + // Get places from all accounts + if aggPlaces, err := dataMerger.GetAggregatedPlaces(); err == nil { + for _, place := range aggPlaces { + multiAccountPlaces = append(multiAccountPlaces, MultiAccountPlace{ + ID: place.ID, + Place: place.Place, + Subscriber: place.Subscriber, + Blocked: place.Blocked, + AccountID: place.AccountID, + AccountName: place.AccountName, + }) + } + } else { + loginError = "multi-account places error: " + err.Error() + } + // Get finances from all accounts + if aggFinances, err := dataMerger.GetAggregatedFinances(); err == nil { + for _, finance := range aggFinances { + multiAccountFinances = append(multiAccountFinances, MultiAccountFinance{ + Balance: finance.Balance, + BlockType: finance.BlockType, + AmountSum: finance.AmountSum, + TargetDate: finance.TargetDate, + PaymentLink: finance.PaymentLink, + Blocked: finance.Blocked, + AccountID: finance.AccountID, + AccountName: finance.AccountName, + }) + } + } else { + loginError = "multi-account finances error: " + err.Error() + } + + // For backward compatibility, also populate single-account data from primary account + if primaryConfig, err := h.LegacyAdapter.GetPrimaryConfig(); err == nil { + h.Config = primaryConfig // Update current config + } + } + + // Get single-account data (for backward compatibility or single-account mode) if loginError == "" { camerasData, err := h.Cameras() if err != nil { - loginError = "cameras (" + camerasData + ") got " + err.Error() + if !isMultiAccount { + loginError = "cameras (" + camerasData + ") got " + err.Error() + } } else { if err := json.Unmarshal([]byte(camerasData), &cameras); err != nil { - loginError = "cameras (" + camerasData + ") Unmarshal got " + err.Error() + if !isMultiAccount { + loginError = "cameras (" + camerasData + ") Unmarshal got " + err.Error() + } } } } - var places Places - if loginError == "" { placesData, err := h.Places() if err != nil { - loginError = "places (" + placesData + ") got " + err.Error() + if !isMultiAccount { + loginError = "places (" + placesData + ") got " + err.Error() + } } else { if err := json.Unmarshal([]byte(placesData), &places); err != nil { - loginError = "places (" + placesData + ") Unmarshal got " + err.Error() + if !isMultiAccount { + loginError = "places (" + placesData + ") Unmarshal got " + err.Error() + } } } } - finances, _ := h.GetFinances() + finances, _ = h.GetFinances() + if finances == nil { + finances = &Finances{} + } data := HomePageData{ HassioIngress: ingressPath, @@ -227,6 +318,12 @@ func (h *Handler) HomeHandler(w http.ResponseWriter, r *http.Request) { Cameras: cameras, Places: places, Finances: *finances, + + // Multi-account data + MultiAccountCameras: multiAccountCameras, + MultiAccountPlaces: multiAccountPlaces, + MultiAccountFinances: multiAccountFinances, + IsMultiAccount: isMultiAccount, } var tmpl []byte @@ -485,6 +582,380 @@ func (h *Handler) AccountsHandler(w http.ResponseWriter, r *http.Request) { } } +// MultiAccountsHandler handles listing all accounts in multi-account mode +func (h *Handler) MultiAccountsHandler(w http.ResponseWriter, r *http.Request) { + // Only work in multi-account mode + if h.MultiAccountAggregator == nil { + http.Error(w, "Multi-account mode not enabled", http.StatusServiceUnavailable) + return + } + + switch r.Method { + case http.MethodGet: + h.handleListAccounts(w, r) + case http.MethodPost: + h.handleAddAccount(w, r) + default: + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + } +} + +// AccountByIDHandler handles operations on specific accounts +func (h *Handler) AccountByIDHandler(w http.ResponseWriter, r *http.Request) { + // Only work in multi-account mode + if h.MultiAccountAggregator == nil { + http.Error(w, "Multi-account mode not enabled", http.StatusServiceUnavailable) + return + } + + // Extract account ID from URL path + path := r.URL.Path + // Expected format: /accounts/{id} or /accounts/{id}/action + parts := strings.Split(strings.Trim(path, "/"), "/") + if len(parts) < 2 || parts[0] != "accounts" { + http.Error(w, "Invalid URL format", http.StatusBadRequest) + return + } + + accountID := parts[1] + if accountID == "" { + http.Error(w, "Account ID is required", http.StatusBadRequest) + return + } + + // Check for specific actions + if len(parts) >= 3 { + action := parts[2] + switch action { + case "enable": + h.handleEnableAccount(w, r, accountID) + case "disable": + h.handleDisableAccount(w, r, accountID) + default: + http.Error(w, "Unknown action", http.StatusBadRequest) + } + return + } + + // Handle basic CRUD operations + switch r.Method { + case http.MethodGet: + h.handleGetAccount(w, r, accountID) + case http.MethodPut: + h.handleUpdateAccount(w, r, accountID) + case http.MethodDelete: + h.handleDeleteAccount(w, r, accountID) + default: + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + } +} + +// handleListAccounts returns all accounts +func (h *Handler) handleListAccounts(w http.ResponseWriter, r *http.Request) { + accounts := h.MultiAccountAggregator.GetAccounts() + + // Convert to response format (without sensitive tokens) + type AccountResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Login int `json:"login"` + UUID string `json:"uuid"` + Enabled bool `json:"enabled"` + } + + response := make([]AccountResponse, 0, len(accounts)) + for _, account := range accounts { + response = append(response, AccountResponse{ + ID: account.ID, + Name: account.Name, + Login: account.Login, + UUID: account.UUID, + Enabled: account.Enabled, + }) + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Failed to encode accounts response: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + } +} + +// handleAddAccount adds a new account +func (h *Handler) handleAddAccount(w http.ResponseWriter, r *http.Request) { + type AddAccountRequest struct { + Name string `json:"name"` + Login int `json:"login"` + Token string `json:"token"` + RefreshToken string `json:"refresh_token"` + UUID string `json:"uuid"` + Operator int `json:"operator"` + Enabled bool `json:"enabled"` + } + + var req AddAccountRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON", http.StatusBadRequest) + return + } + + // Validate required fields + if req.Login == 0 { + http.Error(w, "Login is required", http.StatusBadRequest) + return + } + if req.UUID == "" { + http.Error(w, "UUID is required", http.StatusBadRequest) + return + } + + // Create new account + account := &multiconfig.AccountConfig{ + Name: req.Name, + Login: req.Login, + Token: req.Token, + RefreshToken: req.RefreshToken, + UUID: req.UUID, + Operator: req.Operator, + Enabled: req.Enabled, + } + + // Add account + if err := h.MultiAccountAggregator.AddAccount(account); err != nil { + log.Printf("Failed to add account: %v", err) + http.Error(w, "Failed to add account", http.StatusInternalServerError) + return + } + + // Return created account (without sensitive tokens) + type AccountResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Login int `json:"login"` + UUID string `json:"uuid"` + Enabled bool `json:"enabled"` + } + + response := AccountResponse{ + ID: account.ID, + Name: account.Name, + Login: account.Login, + UUID: account.UUID, + Enabled: account.Enabled, + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Failed to encode account response: %v", err) + } +} + +// handleGetAccount returns a specific account +func (h *Handler) handleGetAccount(w http.ResponseWriter, r *http.Request, accountID string) { + account, err := h.MultiAccountAggregator.GetAccount(accountID) + if err != nil { + http.Error(w, "Account not found", http.StatusNotFound) + return + } + + // Return account (without sensitive tokens) + type AccountResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Login int `json:"login"` + UUID string `json:"uuid"` + Enabled bool `json:"enabled"` + } + + response := AccountResponse{ + ID: account.ID, + Name: account.Name, + Login: account.Login, + UUID: account.UUID, + Enabled: account.Enabled, + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Failed to encode account response: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + } +} + +// handleUpdateAccount updates an existing account +func (h *Handler) handleUpdateAccount(w http.ResponseWriter, r *http.Request, accountID string) { + // Check if account exists + existingAccount, err := h.MultiAccountAggregator.GetAccount(accountID) + if err != nil { + http.Error(w, "Account not found", http.StatusNotFound) + return + } + + type UpdateAccountRequest struct { + Name *string `json:"name,omitempty"` + Token *string `json:"token,omitempty"` + RefreshToken *string `json:"refresh_token,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + } + + var req UpdateAccountRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid JSON", http.StatusBadRequest) + return + } + + // Update fields if provided + updated := false + if req.Name != nil && *req.Name != existingAccount.Name { + // Update name by removing and re-adding account with new name + updatedAccount := *existingAccount + updatedAccount.Name = *req.Name + + if err := h.MultiAccountAggregator.RemoveAccount(accountID); err != nil { + log.Printf("Failed to remove account for name update: %v", err) + http.Error(w, "Failed to update account", http.StatusInternalServerError) + return + } + + if err := h.MultiAccountAggregator.AddAccount(&updatedAccount); err != nil { + log.Printf("Failed to re-add account after name update: %v", err) + http.Error(w, "Failed to update account", http.StatusInternalServerError) + return + } + updated = true + } + + if req.Token != nil || req.RefreshToken != nil { + token := existingAccount.Token + refreshToken := existingAccount.RefreshToken + + if req.Token != nil { + token = *req.Token + } + if req.RefreshToken != nil { + refreshToken = *req.RefreshToken + } + + if err := h.MultiAccountAggregator.UpdateAccountTokens(accountID, token, refreshToken); err != nil { + log.Printf("Failed to update account tokens: %v", err) + http.Error(w, "Failed to update account tokens", http.StatusInternalServerError) + return + } + updated = true + } + + if req.Enabled != nil { + if *req.Enabled != existingAccount.Enabled { + if *req.Enabled { + if err := h.MultiAccountAggregator.EnableAccount(accountID); err != nil { + log.Printf("Failed to enable account: %v", err) + http.Error(w, "Failed to enable account", http.StatusInternalServerError) + return + } + } else { + if err := h.MultiAccountAggregator.DisableAccount(accountID); err != nil { + log.Printf("Failed to disable account: %v", err) + http.Error(w, "Failed to disable account", http.StatusInternalServerError) + return + } + } + updated = true + } + } + + if !updated { + http.Error(w, "No changes provided", http.StatusBadRequest) + return + } + + // Return updated account + updatedAccount, err := h.MultiAccountAggregator.GetAccount(accountID) + if err != nil { + log.Printf("Failed to get updated account: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + return + } + + type AccountResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Login int `json:"login"` + UUID string `json:"uuid"` + Enabled bool `json:"enabled"` + } + + response := AccountResponse{ + ID: updatedAccount.ID, + Name: updatedAccount.Name, + Login: updatedAccount.Login, + UUID: updatedAccount.UUID, + Enabled: updatedAccount.Enabled, + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Failed to encode account response: %v", err) + } +} + +// handleDeleteAccount deletes an account +func (h *Handler) handleDeleteAccount(w http.ResponseWriter, r *http.Request, accountID string) { + if err := h.MultiAccountAggregator.RemoveAccount(accountID); err != nil { + if strings.Contains(err.Error(), "not found") { + http.Error(w, "Account not found", http.StatusNotFound) + } else { + log.Printf("Failed to delete account: %v", err) + http.Error(w, "Failed to delete account", http.StatusInternalServerError) + } + return + } + + w.WriteHeader(http.StatusNoContent) +} + +// handleEnableAccount enables an account +func (h *Handler) handleEnableAccount(w http.ResponseWriter, r *http.Request, accountID string) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + if err := h.MultiAccountAggregator.EnableAccount(accountID); err != nil { + if strings.Contains(err.Error(), "not found") { + http.Error(w, "Account not found", http.StatusNotFound) + } else { + log.Printf("Failed to enable account: %v", err) + http.Error(w, "Failed to enable account", http.StatusInternalServerError) + } + return + } + + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"status": "enabled"}`)) +} + +// handleDisableAccount disables an account +func (h *Handler) handleDisableAccount(w http.ResponseWriter, r *http.Request, accountID string) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + if err := h.MultiAccountAggregator.DisableAccount(accountID); err != nil { + if strings.Contains(err.Error(), "not found") { + http.Error(w, "Account not found", http.StatusNotFound) + } else { + log.Printf("Failed to disable account: %v", err) + http.Error(w, "Failed to disable account", http.StatusInternalServerError) + } + return + } + + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"status": "disabled"}`)) +} + // LoginSMSHandler ... func (h *Handler) LoginSMSHandler(w http.ResponseWriter, r *http.Request) { // log.Println("/sms") diff --git a/handlers/cameras.go b/handlers/cameras.go index 6ced649..a8f0f67 100644 --- a/handlers/cameras.go +++ b/handlers/cameras.go @@ -2,6 +2,7 @@ package handlers import ( "context" + "encoding/json" "log" "net/http" "strconv" @@ -68,14 +69,37 @@ func (h *Handler) Cameras() (string, error) { func (h *Handler) CamerasHandler(w http.ResponseWriter, r *http.Request) { // log.Println("/camerasHandler") - data, err := h.Cameras() - if err != nil { - log.Println("camerasHandler", err.Error()) - } - w.Header().Set("Content-Type", "application/json") - if _, err := w.Write([]byte(data)); err != nil { - log.Println("camerasHandler", err.Error()) + // Check if we have multi-account support + if h.MultiAccountAggregator != nil { + // Multi-account mode: return aggregated cameras from all accounts + dataMerger := h.MultiAccountAggregator.GetDataMerger() + cameras, err := dataMerger.GetAggregatedCameras() + if err != nil { + log.Println("multi-account camerasHandler", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Convert to JSON and return + if jsonData, err := json.Marshal(cameras); err != nil { + log.Println("camerasHandler JSON marshal error", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + } else { + if _, err := w.Write(jsonData); err != nil { + log.Println("camerasHandler write error", err.Error()) + } + } + } else { + // Legacy single-account mode + data, err := h.Cameras() + if err != nil { + log.Println("camerasHandler", err.Error()) + } + + if _, err := w.Write([]byte(data)); err != nil { + log.Println("camerasHandler", err.Error()) + } } } diff --git a/handlers/const.go b/handlers/const.go index 4171890..b3f957d 100644 --- a/handlers/const.go +++ b/handlers/const.go @@ -117,6 +117,56 @@ type HomePageData struct { Cameras Cameras Places Places Finances Finances + + // Multi-account data + MultiAccountCameras []MultiAccountCamera `json:"multi_account_cameras,omitempty"` + MultiAccountPlaces []MultiAccountPlace `json:"multi_account_places,omitempty"` + MultiAccountFinances []MultiAccountFinance `json:"multi_account_finances,omitempty"` + IsMultiAccount bool `json:"is_multi_account"` +} + +// MultiAccountCamera represents a camera with account information for templates +type MultiAccountCamera struct { + ID int `json:"ID"` + Name string `json:"Name"` + IsActive int `json:"IsActive"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// MultiAccountPlace represents a place with account information for templates +type MultiAccountPlace struct { + ID int `json:"id"` + Place struct { + ID int `json:"id"` + Address struct { + VisibleAddress string `json:"visibleAddress"` + } `json:"address"` + AccessControls []struct { + ID int `json:"id"` + Name string `json:"name"` + } `json:"accessControls"` + } `json:"place"` + Subscriber struct { + ID int `json:"id"` + Name string `json:"name"` + AccountID string `json:"accountId"` + } `json:"subscriber"` + Blocked bool `json:"blocked"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// MultiAccountFinance represents finances with account information for templates +type MultiAccountFinance struct { + Balance float64 `json:"balance"` + BlockType string `json:"blockType"` + AmountSum float64 `json:"amountSum"` + TargetDate string `json:"targetDate"` + PaymentLink string `json:"paymentLink"` + Blocked bool `json:"blocked"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` } type HAConfig struct { diff --git a/handlers/factory.go b/handlers/factory.go new file mode 100644 index 0000000..f50fa61 --- /dev/null +++ b/handlers/factory.go @@ -0,0 +1,79 @@ +package handlers + +import ( + "embed" + + "github.com/ad/domru/config" + "github.com/ad/domru/multiconfig" +) + +// HandlerFactory implements multiconfig.HandlerFactory +type HandlerFactory struct{} + +// NewHandlerFactory creates a new handler factory +func NewHandlerFactory() *HandlerFactory { + return &HandlerFactory{} +} + +// CreateHandler creates an API handler for an account +func (f *HandlerFactory) CreateHandler(accountConfig *multiconfig.AccountConfig, templateFs embed.FS) multiconfig.APIHandler { + // Convert AccountConfig to legacy Config for handlers + legacyConfig := &config.Config{ + Token: accountConfig.Token, + RefreshToken: accountConfig.RefreshToken, + Login: accountConfig.Login, + Operator: accountConfig.Operator, + UUID: accountConfig.UUID, + Port: 18000, // Default port + } + + // Create handler with the legacy config + handler := NewHandlers(legacyConfig, templateFs) + + // Return a wrapper that implements APIHandler + return &HandlerAPIWrapper{handler: handler} +} + +// HandlerAPIWrapper wraps Handler to implement multiconfig.APIHandler +type HandlerAPIWrapper struct { + handler *Handler +} + +// Cameras implements multiconfig.APIHandler +func (w *HandlerAPIWrapper) Cameras() (string, error) { + return w.handler.Cameras() +} + +// Places implements multiconfig.APIHandler +func (w *HandlerAPIWrapper) Places() (string, error) { + return w.handler.Places() +} + +// GetFinances implements multiconfig.APIHandler +func (w *HandlerAPIWrapper) GetFinances() (*multiconfig.FinanceData, error) { + finances, err := w.handler.GetFinances() + if err != nil { + return nil, err + } + + // Convert to multiconfig.FinanceData + return &multiconfig.FinanceData{ + Balance: finances.Balance, + BlockType: finances.BlockType, + AmountSum: finances.AmountSum, + TargetDate: finances.TargetDate, + PaymentLink: finances.PaymentLink, + Blocked: finances.Blocked, + }, nil +} + +// UpdateTokens implements multiconfig.APIHandler +func (w *HandlerAPIWrapper) UpdateTokens(token, refreshToken string) { + w.handler.Config.Token = token + w.handler.Config.RefreshToken = refreshToken +} + +// GetHandler returns the underlying handler for backward compatibility +func (w *HandlerAPIWrapper) GetHandler() *Handler { + return w.handler +} \ No newline at end of file diff --git a/handlers/finances.go b/handlers/finances.go index 14b54c5..4768d6f 100644 --- a/handlers/finances.go +++ b/handlers/finances.go @@ -75,15 +75,38 @@ func (h *Handler) Finances() ([]byte, error) { func (h *Handler) FinancesHandler(w http.ResponseWriter, r *http.Request) { // log.Println("/financesHandler") - data, err := h.Finances() - if err != nil { - log.Println("financesHandler", err.Error()) - } - w.Header().Set("Content-Type", "application/json") - if _, err := w.Write(data); err != nil { - log.Println("financesHandler", err.Error()) + // Check if we have multi-account support + if h.MultiAccountAggregator != nil { + // Multi-account mode: return aggregated finances from all accounts + dataMerger := h.MultiAccountAggregator.GetDataMerger() + finances, err := dataMerger.GetAggregatedFinances() + if err != nil { + log.Println("multi-account financesHandler", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Convert to JSON and return + if jsonData, err := json.Marshal(finances); err != nil { + log.Println("financesHandler JSON marshal error", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + } else { + if _, err := w.Write(jsonData); err != nil { + log.Println("financesHandler write error", err.Error()) + } + } + } else { + // Legacy single-account mode + data, err := h.Finances() + if err != nil { + log.Println("financesHandler", err.Error()) + } + + if _, err := w.Write(data); err != nil { + log.Println("financesHandler", err.Error()) + } } } diff --git a/handlers/handlers.go b/handlers/handlers.go index 3cf1ba2..8a212a8 100644 --- a/handlers/handlers.go +++ b/handlers/handlers.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/ad/domru/config" + "github.com/ad/domru/multiconfig" ) type Handler struct { @@ -15,6 +16,10 @@ type Handler struct { UserAccounts []Account Account *Account + // Multi-account support + MultiAccountAggregator *multiconfig.MultiAccountAggregator + LegacyAdapter *multiconfig.LegacyConfigAdapter + TemplateFs embed.FS } @@ -27,6 +32,27 @@ func NewHandlers(config *config.Config, templateFs embed.FS) (h *Handler) { return h } +// NewMultiAccountHandlers creates a new handler with multi-account support +func NewMultiAccountHandlers(aggregator *multiconfig.MultiAccountAggregator, templateFs embed.FS) (h *Handler) { + legacyAdapter := multiconfig.NewLegacyConfigAdapter(aggregator) + + // Get primary config for backward compatibility + primaryConfig, err := legacyAdapter.GetPrimaryConfig() + if err != nil { + // Create empty config if no primary account exists + primaryConfig = &config.Config{Port: 18000} + } + + h = &Handler{ + Config: primaryConfig, + MultiAccountAggregator: aggregator, + LegacyAdapter: legacyAdapter, + TemplateFs: templateFs, + } + + return h +} + // Header ... type Header struct { http.Header diff --git a/handlers/places.go b/handlers/places.go index d93f2e0..461ee2a 100644 --- a/handlers/places.go +++ b/handlers/places.go @@ -2,6 +2,7 @@ package handlers import ( "context" + "encoding/json" "log" "net/http" "strconv" @@ -66,15 +67,38 @@ func (h *Handler) Places() (string, error) { func (h *Handler) PlacesHandler(w http.ResponseWriter, r *http.Request) { // log.Println("/placesHandler") - data, err := h.Places() - if err != nil { - data = err.Error() - log.Println("placesHandler", err.Error()) - } - w.Header().Set("Content-Type", "application/json") - if _, err := w.Write([]byte(data)); err != nil { - log.Println("placesHandler", err.Error()) + // Check if we have multi-account support + if h.MultiAccountAggregator != nil { + // Multi-account mode: return aggregated places from all accounts + dataMerger := h.MultiAccountAggregator.GetDataMerger() + places, err := dataMerger.GetAggregatedPlaces() + if err != nil { + log.Println("multi-account placesHandler", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Convert to JSON and return + if jsonData, err := json.Marshal(places); err != nil { + log.Println("placesHandler JSON marshal error", err.Error()) + http.Error(w, err.Error(), http.StatusInternalServerError) + } else { + if _, err := w.Write(jsonData); err != nil { + log.Println("placesHandler write error", err.Error()) + } + } + } else { + // Legacy single-account mode + data, err := h.Places() + if err != nil { + data = err.Error() + log.Println("placesHandler", err.Error()) + } + + if _, err := w.Write([]byte(data)); err != nil { + log.Println("placesHandler", err.Error()) + } } } diff --git a/main.go b/main.go index 8bf3a0d..3aee307 100644 --- a/main.go +++ b/main.go @@ -4,22 +4,152 @@ import ( "embed" "log" "net/http" + "os" + "path/filepath" "strconv" + "strings" "github.com/ad/domru/config" "github.com/ad/domru/handlers" + "github.com/ad/domru/multiconfig" ) //go:embed templates/* var templateFs embed.FS func main() { - // Init Config - addonConfig := config.InitConfig() + log.Println("Starting Domofon application...") + + // Determine configuration directory + configDir := "/share/domofon" + if homeDir, err := os.UserHomeDir(); err == nil { + configDir = filepath.Join(homeDir, ".domofon") + } + log.Printf("Using configuration directory: %s", configDir) + + // Initialize multi-account system + log.Println("Initializing multi-account system...") + + // Ensure configuration directory exists + if err := os.MkdirAll(configDir, 0755); err != nil { + log.Printf("WARNING: Failed to create configuration directory %s: %v", configDir, err) + log.Println("This may cause issues with configuration persistence") + } + + storage := multiconfig.NewAccountStorage(configDir) + factory := &handlers.HandlerFactory{} + aggregator := multiconfig.NewMultiAccountAggregator(storage, factory, templateFs) + + // Check for migration from legacy config + legacyConfigPath := filepath.Join(configDir, "account.json") + migration := multiconfig.NewMigrationService(storage, legacyConfigPath) + + if migration.NeedsMigration() { + log.Println("Legacy configuration detected, starting migration process...") + log.Printf("Legacy config path: %s", legacyConfigPath) + log.Printf("Target config directory: %s", configDir) + + if err := migration.MigrateFromLegacy(); err != nil { + log.Printf("ERROR: Migration failed: %v", err) + log.Println("Migration process encountered an error, falling back to legacy mode...") + log.Println("Your original configuration has been preserved and will continue to work") + + // Fallback to legacy mode + addonConfig := config.InitConfig() + if addonConfig == nil { + log.Fatal("FATAL: Failed to initialize legacy configuration and migration failed") + } + h := handlers.NewHandlers(addonConfig, templateFs) + log.Println("Successfully initialized legacy mode as fallback") + initializeLegacyMode(h, addonConfig) + return + } + + log.Println("Migration completed successfully!") + log.Println("Your account has been migrated to the new multi-account format") + log.Println("A backup of your original configuration has been created") + + // Clean up old backup files to prevent disk space issues + if err := migration.CleanupBackups(); err != nil { + log.Printf("WARNING: Failed to cleanup old migration backups: %v", err) + } + } else { + log.Println("No migration needed - using existing multi-account configuration") + } + + // Load accounts after migration (if any) + if err := aggregator.LoadAccounts(); err != nil { + log.Printf("ERROR: Failed to load accounts configuration: %v", err) + + // Check if we have legacy config to fall back to + if _, err := os.Stat(legacyConfigPath); err == nil { + log.Println("Multi-account configuration failed, attempting fallback to legacy mode...") + addonConfig := config.InitConfig() + if addonConfig == nil { + log.Fatal("FATAL: Both multi-account and legacy configurations failed to load") + } + h := handlers.NewHandlers(addonConfig, templateFs) + log.Println("Successfully fell back to legacy mode") + initializeLegacyMode(h, addonConfig) + return + } + + log.Println("No existing accounts found, starting in multi-account mode with empty configuration") + log.Println("You can add accounts through the web interface at /accounts") + } + + // Initialize multi-account handlers + h := handlers.NewMultiAccountHandlers(aggregator, templateFs) + + // In multi-account mode, token refresh is handled per account by the aggregator + // No need for global token refresh logic + + // Register existing endpoints + http.HandleFunc("/", h.HomeHandler) + http.HandleFunc("/login", h.LoginHandler) + http.HandleFunc("/login/address", h.LoginAddressHandler) + http.HandleFunc("/sms", h.LoginSMSHandler) - // Init Handlers - h := handlers.NewHandlers(addonConfig, templateFs) + http.HandleFunc("/cameras", h.CamerasHandler) + http.HandleFunc("/door", h.DoorHandler) + http.HandleFunc("/events/last", h.LastEventHandler) + http.HandleFunc("/events", h.EventsHandler) + http.HandleFunc("/finances", h.FinancesHandler) + http.HandleFunc("/operators", h.OperatorsHandler) + http.HandleFunc("/places", h.PlacesHandler) + http.HandleFunc("/snapshot", h.SnapshotHandler) + http.HandleFunc("/stream", h.StreamHandler) + + // Register new multi-account management endpoints + http.HandleFunc("/accounts", h.MultiAccountsHandler) + http.HandleFunc("/accounts/", func(w http.ResponseWriter, r *http.Request) { + // Route to AccountByIDHandler for paths like /accounts/{id} and /accounts/{id}/action + if strings.HasPrefix(r.URL.Path, "/accounts/") && r.URL.Path != "/accounts/" { + h.AccountByIDHandler(w, r) + } else { + http.NotFound(w, r) + } + }) + + // Get port from primary account or use default + port := 18000 + if primaryAccount, err := aggregator.GetPrimaryAccount(); err == nil { + // Port is not stored in AccountConfig, use default + log.Printf("Starting multi-account mode with primary account: %s", primaryAccount.Name) + } else { + log.Println("Starting multi-account mode with no primary account") + } + + log.Printf("Starting server on port %d in multi-account mode", port) + + if err := http.ListenAndServe(":"+strconv.Itoa(port), nil); err != nil { + panic("ListenAndServe: " + err.Error()) + } +} +// initializeLegacyMode initializes the server in legacy single-account mode +func initializeLegacyMode(h *handlers.Handler, addonConfig *config.Config) { + // Handle token refresh for legacy mode switch { case addonConfig.Token != "" || addonConfig.RefreshToken != "": if addonConfig.RefreshToken != "" { @@ -39,11 +169,11 @@ func main() { log.Println("auth/refresh token or login and password must be provided") } + // Register legacy endpoints (no multi-account management endpoints) http.HandleFunc("/", h.HomeHandler) http.HandleFunc("/login", h.LoginHandler) http.HandleFunc("/login/address", h.LoginAddressHandler) http.HandleFunc("/sms", h.LoginSMSHandler) - // http.HandleFunc("/network", h.HANetworkHandler) http.HandleFunc("/cameras", h.CamerasHandler) http.HandleFunc("/door", h.DoorHandler) diff --git a/multiconfig/account.go b/multiconfig/account.go new file mode 100644 index 0000000..ff83579 --- /dev/null +++ b/multiconfig/account.go @@ -0,0 +1,397 @@ +package multiconfig + +import ( + "encoding/json" + "fmt" + "log" + "os" + "path/filepath" + "time" + + "github.com/google/uuid" +) + +// AccountConfig represents a single account configuration +type AccountConfig struct { + ID string `json:"id"` + Token string `json:"token"` + RefreshToken string `json:"refresh"` + Login int `json:"login"` + Operator int `json:"operator"` + UUID string `json:"uuid"` + Name string `json:"name"` + Enabled bool `json:"enabled"` +} + +// MultiAccountConfig represents the complete multi-account configuration +type MultiAccountConfig struct { + Accounts map[string]*AccountConfig `json:"accounts"` + PrimaryAccount string `json:"primary_account"` + Version string `json:"version"` +} + +// AccountStorage handles reading and writing account configurations +type AccountStorage struct { + configDir string + configFile string + backupDir string +} + +// NewAccountStorage creates a new AccountStorage instance +func NewAccountStorage(configDir string) *AccountStorage { + return &AccountStorage{ + configDir: configDir, + configFile: filepath.Join(configDir, "accounts.json"), + backupDir: filepath.Join(configDir, "backups"), + } +} + +// LoadAccounts loads all accounts from the configuration file +func (s *AccountStorage) LoadAccounts() (map[string]*AccountConfig, error) { + log.Printf("[AccountStorage] Loading accounts from %s", s.configFile) + config, err := s.loadConfig() + if err != nil { + log.Printf("[AccountStorage] Failed to load accounts: %v", err) + return nil, err + } + log.Printf("[AccountStorage] Successfully loaded %d accounts", len(config.Accounts)) + return config.Accounts, nil +} + +// loadConfig loads the complete configuration including primary account info +func (s *AccountStorage) loadConfig() (*MultiAccountConfig, error) { + log.Printf("[AccountStorage] Loading configuration from %s", s.configFile) + + // Ensure directory exists + if err := s.ensureDir(s.configDir); err != nil { + log.Printf("[AccountStorage] Failed to create config directory %s: %v", s.configDir, err) + return nil, fmt.Errorf("failed to create config directory: %w", err) + } + + // Check if file exists + if _, err := os.Stat(s.configFile); os.IsNotExist(err) { + log.Printf("[AccountStorage] Config file does not exist, returning empty configuration") + // Return empty config if file doesn't exist + return &MultiAccountConfig{ + Accounts: make(map[string]*AccountConfig), + Version: "2.0", + }, nil + } + + // Read file + data, err := os.ReadFile(s.configFile) + if err != nil { + log.Printf("[AccountStorage] Failed to read config file %s: %v", s.configFile, err) + return nil, fmt.Errorf("failed to read config file: %w", err) + } + + // Parse JSON + var config MultiAccountConfig + if err := json.Unmarshal(data, &config); err != nil { + log.Printf("[AccountStorage] Failed to parse JSON from %s: %v", s.configFile, err) + return nil, fmt.Errorf("invalid configuration format: %w", err) + } + + // Validate configuration + if err := s.validateConfig(&config); err != nil { + log.Printf("[AccountStorage] Configuration validation failed: %v", err) + return nil, fmt.Errorf("configuration validation failed: %w", err) + } + + log.Printf("[AccountStorage] Successfully loaded and validated configuration") + return &config, nil +} + +// SaveAccounts saves all accounts to the configuration file +func (s *AccountStorage) SaveAccounts(accounts map[string]*AccountConfig) error { + log.Printf("[AccountStorage] Saving %d accounts to %s", len(accounts), s.configFile) + + // Ensure directory exists + if err := s.ensureDir(s.configDir); err != nil { + log.Printf("[AccountStorage] Failed to create config directory %s: %v", s.configDir, err) + return fmt.Errorf("failed to create config directory: %w", err) + } + + // Try to preserve existing primary account + primaryAccount := "" + + // First, try to load existing config to get current primary account + if existingConfig, err := s.loadConfig(); err == nil && existingConfig.PrimaryAccount != "" { + // Check if existing primary account is still enabled + if account, exists := accounts[existingConfig.PrimaryAccount]; exists && account.Enabled { + primaryAccount = existingConfig.PrimaryAccount + log.Printf("[AccountStorage] Preserving existing primary account: %s", primaryAccount) + } + } + + // If no valid existing primary account, find first enabled account + if primaryAccount == "" { + for id, account := range accounts { + if account.Enabled { + primaryAccount = id + log.Printf("[AccountStorage] Setting new primary account: %s", primaryAccount) + break + } + } + } + + config := MultiAccountConfig{ + Accounts: accounts, + PrimaryAccount: primaryAccount, + Version: "2.0", + } + + // Marshal to JSON + data, err := json.MarshalIndent(config, "", " ") + if err != nil { + log.Printf("[AccountStorage] Failed to marshal config to JSON: %v", err) + return fmt.Errorf("failed to marshal config: %w", err) + } + + // Write with retry logic + log.Printf("[AccountStorage] Writing configuration with retry logic") + if err := s.writeWithRetry(s.configFile, data); err != nil { + log.Printf("[AccountStorage] Failed to save accounts after retries: %v", err) + return err + } + + log.Printf("[AccountStorage] Successfully saved accounts to %s", s.configFile) + return nil +} + +// AddAccount adds a new account with a unique ID +func (s *AccountStorage) AddAccount(account *AccountConfig) error { + log.Printf("[AccountStorage] Adding new account with login %d", account.Login) + + accounts, err := s.LoadAccounts() + if err != nil { + log.Printf("[AccountStorage] Failed to load existing accounts: %v", err) + return fmt.Errorf("failed to load existing accounts: %w", err) + } + + // Generate unique ID if not provided + if account.ID == "" { + account.ID = uuid.New().String() + log.Printf("[AccountStorage] Generated new account ID: %s", account.ID) + } + + // Ensure ID is unique + for accounts[account.ID] != nil { + account.ID = uuid.New().String() + log.Printf("[AccountStorage] ID collision detected, generated new ID: %s", account.ID) + } + + // Set default values + if account.UUID == "" { + account.UUID = uuid.New().String() + log.Printf("[AccountStorage] Generated device UUID: %s", account.UUID) + } + if account.Name == "" { + account.Name = fmt.Sprintf("Account %d", account.Login) + log.Printf("[AccountStorage] Generated account name: %s", account.Name) + } + + accounts[account.ID] = account + log.Printf("[AccountStorage] Account %s added to collection, saving to storage", account.ID) + + if err := s.SaveAccounts(accounts); err != nil { + log.Printf("[AccountStorage] Failed to save accounts after adding %s: %v", account.ID, err) + return err + } + + log.Printf("[AccountStorage] Successfully added account %s", account.ID) + return nil +} + +// RemoveAccount removes an account by ID +func (s *AccountStorage) RemoveAccount(accountID string) error { + log.Printf("[AccountStorage] Removing account %s", accountID) + + accounts, err := s.LoadAccounts() + if err != nil { + log.Printf("[AccountStorage] Failed to load existing accounts: %v", err) + return fmt.Errorf("failed to load existing accounts: %w", err) + } + + // Check if account exists before removal + if _, exists := accounts[accountID]; !exists { + log.Printf("[AccountStorage] Account %s not found for removal", accountID) + return fmt.Errorf("account %s not found", accountID) + } + + delete(accounts, accountID) + log.Printf("[AccountStorage] Account %s removed from collection, saving to storage", accountID) + + if err := s.SaveAccounts(accounts); err != nil { + log.Printf("[AccountStorage] Failed to save accounts after removing %s: %v", accountID, err) + return err + } + + log.Printf("[AccountStorage] Successfully removed account %s", accountID) + return nil +} + +// validateConfig validates the configuration structure and data +func (s *AccountStorage) validateConfig(config *MultiAccountConfig) error { + if config.Accounts == nil { + return fmt.Errorf("accounts map cannot be nil") + } + + // Validate each account + for id, account := range config.Accounts { + if account == nil { + return fmt.Errorf("account %s cannot be nil", id) + } + if account.ID != id { + return fmt.Errorf("account ID mismatch: key=%s, account.ID=%s", id, account.ID) + } + if account.Login == 0 { + return fmt.Errorf("account %s has invalid login", id) + } + if account.UUID == "" { + return fmt.Errorf("account %s missing UUID", id) + } + } + + // Validate primary account exists if specified + if config.PrimaryAccount != "" { + if _, exists := config.Accounts[config.PrimaryAccount]; !exists { + return fmt.Errorf("primary account %s does not exist", config.PrimaryAccount) + } + } + + return nil +} + +// writeWithRetry writes data to file with retry logic +func (s *AccountStorage) writeWithRetry(filename string, data []byte) error { + const maxRetries = 3 + const retryDelay = 100 * time.Millisecond + + log.Printf("[AccountStorage] Writing file %s with retry logic (max %d retries)", filename, maxRetries) + + var lastErr error + for i := 0; i < maxRetries; i++ { + if err := os.WriteFile(filename, data, 0644); err != nil { + lastErr = err + log.Printf("[AccountStorage] Write attempt %d failed for %s: %v", i+1, filename, err) + if i < maxRetries-1 { + sleepDuration := retryDelay * time.Duration(i+1) // Exponential backoff + log.Printf("[AccountStorage] Retrying write in %v", sleepDuration) + time.Sleep(sleepDuration) + } + continue + } + if i > 0 { + log.Printf("[AccountStorage] Write succeeded on attempt %d for %s", i+1, filename) + } else { + log.Printf("[AccountStorage] Write succeeded on first attempt for %s", filename) + } + return nil + } + + log.Printf("[AccountStorage] All write attempts failed for %s: %v", filename, lastErr) + return fmt.Errorf("failed to write file after %d retries: %w", maxRetries, lastErr) +} + +// ensureDir creates directory if it doesn't exist +func (s *AccountStorage) ensureDir(dirPath string) error { + if _, err := os.Stat(dirPath); os.IsNotExist(err) { + log.Printf("[AccountStorage] Creating directory structure: %s", dirPath) + if err := os.MkdirAll(dirPath, 0755); err != nil { + log.Printf("[AccountStorage] Failed to create directory %s: %v", dirPath, err) + return err + } + log.Printf("[AccountStorage] Successfully created directory: %s", dirPath) + } else { + log.Printf("[AccountStorage] Directory already exists: %s", dirPath) + } + return nil +} + +// CreateBackup creates a backup of the current configuration +func (s *AccountStorage) CreateBackup() error { + log.Printf("[AccountStorage] Creating backup of configuration") + + if _, err := os.Stat(s.configFile); os.IsNotExist(err) { + log.Printf("[AccountStorage] No configuration file to backup") + return nil // No file to backup + } + + // Ensure backup directory exists + if err := s.ensureDir(s.backupDir); err != nil { + log.Printf("[AccountStorage] Failed to create backup directory: %v", err) + return fmt.Errorf("failed to create backup directory: %w", err) + } + + // Read current config + data, err := os.ReadFile(s.configFile) + if err != nil { + log.Printf("[AccountStorage] Failed to read config for backup: %v", err) + return fmt.Errorf("failed to read config for backup: %w", err) + } + + // Create backup filename with timestamp + timestamp := time.Now().Format("20060102_150405") + backupFile := filepath.Join(s.backupDir, fmt.Sprintf("accounts_%s.json", timestamp)) + + // Write backup + if err := os.WriteFile(backupFile, data, 0644); err != nil { + log.Printf("[AccountStorage] Failed to write backup file %s: %v", backupFile, err) + return fmt.Errorf("failed to write backup file: %w", err) + } + + log.Printf("[AccountStorage] Successfully created backup: %s", backupFile) + return nil +} + +// RestoreFromBackup attempts to restore from the most recent backup +func (s *AccountStorage) RestoreFromBackup() error { + log.Printf("[AccountStorage] Attempting to restore from backup") + + // Find most recent backup + backupFiles, err := filepath.Glob(filepath.Join(s.backupDir, "accounts_*.json")) + if err != nil || len(backupFiles) == 0 { + log.Printf("[AccountStorage] No backup files found in %s", s.backupDir) + return fmt.Errorf("no backup files found") + } + + // Get the most recent backup (files are sorted by name which includes timestamp) + var mostRecent string + for _, file := range backupFiles { + if file > mostRecent { + mostRecent = file + } + } + + log.Printf("[AccountStorage] Using most recent backup: %s", mostRecent) + + // Read backup + data, err := os.ReadFile(mostRecent) + if err != nil { + log.Printf("[AccountStorage] Failed to read backup file %s: %v", mostRecent, err) + return fmt.Errorf("failed to read backup file: %w", err) + } + + // Validate backup before restoring + var config MultiAccountConfig + if err := json.Unmarshal(data, &config); err != nil { + log.Printf("[AccountStorage] Backup file %s is corrupted: %v", mostRecent, err) + return fmt.Errorf("backup file is corrupted: %w", err) + } + + if err := s.validateConfig(&config); err != nil { + log.Printf("[AccountStorage] Backup file %s validation failed: %v", mostRecent, err) + return fmt.Errorf("backup file validation failed: %w", err) + } + + // Restore backup + log.Printf("[AccountStorage] Restoring configuration from backup") + if err := s.writeWithRetry(s.configFile, data); err != nil { + log.Printf("[AccountStorage] Failed to restore backup: %v", err) + return err + } + + log.Printf("[AccountStorage] Successfully restored configuration from backup %s", mostRecent) + return nil +} \ No newline at end of file diff --git a/multiconfig/account_test.go b/multiconfig/account_test.go new file mode 100644 index 0000000..4e1ec03 --- /dev/null +++ b/multiconfig/account_test.go @@ -0,0 +1,2995 @@ +package multiconfig + +import ( + "embed" + "encoding/json" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/ad/domru/config" + "github.com/leanovate/gopter" + "github.com/leanovate/gopter/gen" + "github.com/leanovate/gopter/prop" +) + +func TestProperty_AccountStorageUniqueIdentifiers(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("account storage with unique identifiers", prop.ForAll( + func(accounts []AccountConfig) bool { + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "account_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Convert slice to map and add accounts + accountMap := make(map[string]*AccountConfig) + usedIDs := make(map[string]bool) + + for i := range accounts { + account := &accounts[i] + + // Ensure account has required fields for validation + if account.Login == 0 { + account.Login = 79123456789 + i // Valid phone number format + } + if account.UUID == "" { + account.UUID = "test-uuid-" + string(rune('a'+i)) + } + + // Add account using AddAccount method which ensures unique IDs + err := storage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + // Track the ID that was assigned + usedIDs[account.ID] = true + accountMap[account.ID] = account + } + + // Load accounts back and verify uniqueness + loadedAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Check that all IDs are unique + loadedIDs := make(map[string]bool) + for id := range loadedAccounts { + if loadedIDs[id] { + t.Logf("Duplicate ID found: %s", id) + return false + } + loadedIDs[id] = true + } + + // Verify that the number of loaded accounts matches what we added + if len(loadedAccounts) != len(accountMap) { + t.Logf("Account count mismatch: expected %d, got %d", len(accountMap), len(loadedAccounts)) + return false + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_ConfigurationValidationOnRead(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("configuration validation on read", prop.ForAll( + func(corruptionType int) bool { + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "validation_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + configFile := filepath.Join(tempDir, "accounts.json") + + // Create different types of invalid configurations based on corruptionType + var invalidJSON string + switch corruptionType % 6 { + case 0: + // Invalid JSON syntax + invalidJSON = `{"accounts": {invalid json}` + case 1: + // Nil accounts map + invalidJSON = `{"accounts": null, "version": "2.0"}` + case 2: + // Account with mismatched ID + invalidJSON = `{"accounts": {"id1": {"id": "id2", "login": 79123456789, "uuid": "test-uuid"}}, "version": "2.0"}` + case 3: + // Account with invalid login (zero) + invalidJSON = `{"accounts": {"id1": {"id": "id1", "login": 0, "uuid": "test-uuid"}}, "version": "2.0"}` + case 4: + // Account with missing UUID + invalidJSON = `{"accounts": {"id1": {"id": "id1", "login": 79123456789, "uuid": ""}}, "version": "2.0"}` + case 5: + // Primary account that doesn't exist + invalidJSON = `{"accounts": {"id1": {"id": "id1", "login": 79123456789, "uuid": "test-uuid"}}, "primary_account": "nonexistent", "version": "2.0"}` + } + + // Write invalid configuration + err = os.WriteFile(configFile, []byte(invalidJSON), 0644) + if err != nil { + t.Logf("Failed to write invalid config: %v", err) + return false + } + + // Attempt to load accounts - should fail with validation error + _, err = storage.LoadAccounts() + if err == nil { + t.Logf("Expected validation error but got none for corruption type %d", corruptionType%6) + return false + } + + // Verify error message indicates validation failure + errorMsg := err.Error() + validationErrors := []string{ + "invalid configuration format", + "configuration validation failed", + "accounts map cannot be nil", + "account ID mismatch", + "has invalid login", + "missing UUID", + "primary account", + "does not exist", + } + + foundValidationError := false + for _, validationError := range validationErrors { + if len(errorMsg) > 0 && contains(errorMsg, validationError) { + foundValidationError = true + break + } + } + + if !foundValidationError { + t.Logf("Error message doesn't indicate validation failure: %s", errorMsg) + return false + } + + return true + }, + gen.IntRange(0, 100), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +// Helper function to check if string contains substring +func contains(s, substr string) bool { + return len(s) >= len(substr) && (len(substr) == 0 || findSubstring(s, substr)) +} + +func findSubstring(s, substr string) bool { + for i := 0; i <= len(s)-len(substr); i++ { + match := true + for j := 0; j < len(substr); j++ { + if s[i+j] != substr[j] { + match = false + break + } + } + if match { + return true + } + } + return false +} + +// Generator for account slices +func genAccountSlice() gopter.Gen { + return gen.SliceOfN(5, genAccount()) // Generate up to 5 accounts +} + +// Generator for individual accounts +func genAccount() gopter.Gen { + return gopter.CombineGens( + gen.AlphaString(), // Name + gen.Bool(), // Enabled + gen.AlphaString(), // Token + gen.AlphaString(), // RefreshToken + gen.IntRange(1, 2), // Operator (0 or 1) + ).Map(func(values []interface{}) AccountConfig { + return AccountConfig{ + Name: values[0].(string), + Enabled: values[1].(bool), + Token: values[2].(string), + RefreshToken: values[3].(string), + Operator: values[4].(int), + // ID, Login, and UUID will be set by the test or AddAccount method + } + }) +} + +// Unit tests for basic functionality +func TestAccountStorage_Basic(t *testing.T) { + tempDir, err := os.MkdirTemp("", "account_basic_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Test loading from non-existent file + accounts, err := storage.LoadAccounts() + if err != nil { + t.Fatalf("Expected no error for non-existent file, got: %v", err) + } + if len(accounts) != 0 { + t.Fatalf("Expected empty accounts map, got %d accounts", len(accounts)) + } + + // Test adding an account + account := &AccountConfig{ + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + Token: "test-token", + RefreshToken: "test-refresh", + Operator: 0, + } + + err = storage.AddAccount(account) + if err != nil { + t.Fatalf("Failed to add account: %v", err) + } + + // Verify account was added with unique ID + if account.ID == "" { + t.Fatalf("Account ID should be generated") + } + + // Load and verify + accounts, err = storage.LoadAccounts() + if err != nil { + t.Fatalf("Failed to load accounts: %v", err) + } + + if len(accounts) != 1 { + t.Fatalf("Expected 1 account, got %d", len(accounts)) + } + + loadedAccount := accounts[account.ID] + if loadedAccount == nil { + t.Fatalf("Account not found by ID") + } + + if loadedAccount.Login != account.Login { + t.Fatalf("Login mismatch: expected %d, got %d", account.Login, loadedAccount.Login) + } +} + +func TestAccountStorage_WriteRetry(t *testing.T) { + tempDir, err := os.MkdirTemp("", "retry_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Test successful write + data := []byte(`{"test": "data"}`) + testFile := filepath.Join(tempDir, "test.json") + + err = storage.writeWithRetry(testFile, data) + if err != nil { + t.Fatalf("Write should succeed: %v", err) + } + + // Verify file was written + readData, err := os.ReadFile(testFile) + if err != nil { + t.Fatalf("Failed to read written file: %v", err) + } + + if string(readData) != string(data) { + t.Fatalf("Data mismatch: expected %s, got %s", string(data), string(readData)) + } +} + +func TestAccountStorage_DirectoryCreation(t *testing.T) { + tempDir, err := os.MkdirTemp("", "dir_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + // Create storage with nested directory path + nestedDir := filepath.Join(tempDir, "nested", "config") + storage := NewAccountStorage(nestedDir) + + // Add account should create directory structure + account := &AccountConfig{ + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + } + + err = storage.AddAccount(account) + if err != nil { + t.Fatalf("Failed to add account with directory creation: %v", err) + } + + // Verify directory was created + if _, err := os.Stat(nestedDir); os.IsNotExist(err) { + t.Fatalf("Directory should have been created: %s", nestedDir) + } + + // Verify config file exists + configFile := filepath.Join(nestedDir, "accounts.json") + if _, err := os.Stat(configFile); os.IsNotExist(err) { + t.Fatalf("Config file should have been created: %s", configFile) + } +} + +func TestProperty_MigrationDataPreservation(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("migration data preservation", prop.ForAll( + func(legacyConfig config.Config) bool { + // Ensure valid legacy config + if legacyConfig.Login == 0 { + legacyConfig.Login = 79123456789 + } + if legacyConfig.UUID == "" { + legacyConfig.UUID = "test-uuid-migration" + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "migration_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + // Create legacy config file + legacyPath := filepath.Join(tempDir, "account.json") + legacyData, err := json.Marshal(legacyConfig) + if err != nil { + t.Logf("Failed to marshal legacy config: %v", err) + return false + } + + err = os.WriteFile(legacyPath, legacyData, 0644) + if err != nil { + t.Logf("Failed to write legacy config: %v", err) + return false + } + + // Create storage and migration service + storage := NewAccountStorage(tempDir) + migration := NewMigrationService(storage, legacyPath) + + // Perform migration + err = migration.MigrateFromLegacy() + if err != nil { + t.Logf("Migration failed: %v", err) + return false + } + + // Load migrated accounts + accounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load migrated accounts: %v", err) + return false + } + + // Should have exactly one account + if len(accounts) != 1 { + t.Logf("Expected 1 account after migration, got %d", len(accounts)) + return false + } + + // Get the migrated account + var migratedAccount *AccountConfig + for _, account := range accounts { + migratedAccount = account + break + } + + // Verify all original data is preserved + if migratedAccount.Token != legacyConfig.Token { + t.Logf("Token not preserved: expected %s, got %s", legacyConfig.Token, migratedAccount.Token) + return false + } + + if migratedAccount.RefreshToken != legacyConfig.RefreshToken { + t.Logf("RefreshToken not preserved: expected %s, got %s", legacyConfig.RefreshToken, migratedAccount.RefreshToken) + return false + } + + if migratedAccount.Login != legacyConfig.Login { + t.Logf("Login not preserved: expected %d, got %d", legacyConfig.Login, migratedAccount.Login) + return false + } + + if migratedAccount.Operator != legacyConfig.Operator { + t.Logf("Operator not preserved: expected %d, got %d", legacyConfig.Operator, migratedAccount.Operator) + return false + } + + if migratedAccount.UUID != legacyConfig.UUID { + t.Logf("UUID not preserved: expected %s, got %s", legacyConfig.UUID, migratedAccount.UUID) + return false + } + + // Verify account has required new fields + if migratedAccount.ID == "" { + t.Logf("Migrated account missing ID") + return false + } + + if migratedAccount.Name == "" { + t.Logf("Migrated account missing Name") + return false + } + + if !migratedAccount.Enabled { + t.Logf("Migrated account should be enabled by default") + return false + } + + return true + }, + genLegacyConfig(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_MigrationBackupCreation(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("migration backup creation", prop.ForAll( + func(legacyConfig config.Config) bool { + // Ensure valid legacy config + if legacyConfig.Login == 0 { + legacyConfig.Login = 79123456789 + } + if legacyConfig.UUID == "" { + legacyConfig.UUID = "test-uuid-backup" + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "backup_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + // Create legacy config file + legacyPath := filepath.Join(tempDir, "account.json") + legacyData, err := json.Marshal(legacyConfig) + if err != nil { + t.Logf("Failed to marshal legacy config: %v", err) + return false + } + + err = os.WriteFile(legacyPath, legacyData, 0644) + if err != nil { + t.Logf("Failed to write legacy config: %v", err) + return false + } + + // Create storage and migration service + storage := NewAccountStorage(tempDir) + migration := NewMigrationService(storage, legacyPath) + + // Perform migration + err = migration.MigrateFromLegacy() + if err != nil { + t.Logf("Migration failed: %v", err) + return false + } + + // Check that backup was created + backupDir := filepath.Join(tempDir, "migration_backups") + backupFiles, err := filepath.Glob(filepath.Join(backupDir, "legacy_account_*.json")) + if err != nil { + t.Logf("Failed to list backup files: %v", err) + return false + } + + if len(backupFiles) == 0 { + t.Logf("No backup files found") + return false + } + + // Verify backup contains original data + backupData, err := os.ReadFile(backupFiles[0]) + if err != nil { + t.Logf("Failed to read backup file: %v", err) + return false + } + + var backupConfig config.Config + err = json.Unmarshal(backupData, &backupConfig) + if err != nil { + t.Logf("Failed to parse backup config: %v", err) + return false + } + + // Verify backup contains original data + if backupConfig.Token != legacyConfig.Token || + backupConfig.RefreshToken != legacyConfig.RefreshToken || + backupConfig.Login != legacyConfig.Login || + backupConfig.Operator != legacyConfig.Operator || + backupConfig.UUID != legacyConfig.UUID { + t.Logf("Backup data doesn't match original") + return false + } + + // Verify original legacy file was removed after successful migration + if _, err := os.Stat(legacyPath); !os.IsNotExist(err) { + t.Logf("Legacy file should be removed after successful migration") + return false + } + + return true + }, + genLegacyConfig(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_MigrationFailureRecovery(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("migration failure recovery", prop.ForAll( + func(legacyConfig config.Config) bool { + // Ensure valid legacy config + if legacyConfig.Login == 0 { + legacyConfig.Login = 79123456789 + } + if legacyConfig.UUID == "" { + legacyConfig.UUID = "test-uuid-recovery" + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "recovery_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + // Create legacy config file + legacyPath := filepath.Join(tempDir, "account.json") + legacyData, err := json.Marshal(legacyConfig) + if err != nil { + t.Logf("Failed to marshal legacy config: %v", err) + return false + } + + err = os.WriteFile(legacyPath, legacyData, 0644) + if err != nil { + t.Logf("Failed to write legacy config: %v", err) + return false + } + + // Create a read-only directory to force migration failure + readOnlyDir := filepath.Join(tempDir, "readonly") + err = os.MkdirAll(readOnlyDir, 0444) // Read-only directory + if err != nil { + t.Logf("Failed to create read-only dir: %v", err) + return false + } + + // Create storage pointing to read-only directory (will cause save failure) + storage := NewAccountStorage(readOnlyDir) + migration := NewMigrationService(storage, legacyPath) + + // Attempt migration - should fail + err = migration.MigrateFromLegacy() + if err == nil { + t.Logf("Expected migration to fail due to read-only directory") + return false + } + + // Verify original legacy file still exists (recovery) + if _, err := os.Stat(legacyPath); os.IsNotExist(err) { + t.Logf("Legacy file should still exist after failed migration") + return false + } + + // Verify original data is intact + recoveredData, err := os.ReadFile(legacyPath) + if err != nil { + t.Logf("Failed to read recovered legacy file: %v", err) + return false + } + + var recoveredConfig config.Config + err = json.Unmarshal(recoveredData, &recoveredConfig) + if err != nil { + t.Logf("Failed to parse recovered config: %v", err) + return false + } + + // Verify all data is preserved in recovery + if recoveredConfig.Token != legacyConfig.Token || + recoveredConfig.RefreshToken != legacyConfig.RefreshToken || + recoveredConfig.Login != legacyConfig.Login || + recoveredConfig.Operator != legacyConfig.Operator || + recoveredConfig.UUID != legacyConfig.UUID { + t.Logf("Recovered data doesn't match original") + return false + } + + return true + }, + genLegacyConfig(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +// Generator for legacy config +func genLegacyConfig() gopter.Gen { + return gopter.CombineGens( + gen.AlphaString(), // Token + gen.AlphaString(), // RefreshToken + gen.IntRange(0, 2), // Operator + gen.IntRange(8000, 9000), // Port + ).Map(func(values []interface{}) config.Config { + return config.Config{ + Token: values[0].(string), + RefreshToken: values[1].(string), + Operator: values[2].(int), + Port: values[3].(int), + // Login and UUID will be set by the test to ensure validity + } + }) +} + +// Unit tests for migration service +func TestMigrationService_NeedsMigration(t *testing.T) { + tempDir, err := os.MkdirTemp("", "migration_needs_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + legacyPath := filepath.Join(tempDir, "account.json") + migration := NewMigrationService(storage, legacyPath) + + // Test 1: No legacy file exists + if migration.NeedsMigration() { + t.Fatalf("Should not need migration when no legacy file exists") + } + + // Test 2: Legacy file exists, no new format + legacyConfig := config.Config{ + Token: "test-token", + RefreshToken: "test-refresh", + Login: 79123456789, + Operator: 0, + UUID: "test-uuid", + } + legacyData, _ := json.Marshal(legacyConfig) + os.WriteFile(legacyPath, legacyData, 0644) + + if !migration.NeedsMigration() { + t.Fatalf("Should need migration when legacy file exists and new format doesn't") + } + + // Test 3: Both legacy and new format exist + accounts := map[string]*AccountConfig{ + "test-id": { + ID: "test-id", + Login: 79123456789, + UUID: "test-uuid", + Enabled: true, + }, + } + storage.SaveAccounts(accounts) + + if migration.NeedsMigration() { + t.Fatalf("Should not need migration when new format already exists") + } +} + +func TestMigrationService_MigrateFromLegacy_Success(t *testing.T) { + tempDir, err := os.MkdirTemp("", "migration_success_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + // Create legacy config + legacyPath := filepath.Join(tempDir, "account.json") + legacyConfig := config.Config{ + Token: "test-token", + RefreshToken: "test-refresh", + Login: 79123456789, + Operator: 1, + UUID: "test-uuid", + Port: 18000, + } + legacyData, _ := json.Marshal(legacyConfig) + os.WriteFile(legacyPath, legacyData, 0644) + + // Perform migration + storage := NewAccountStorage(tempDir) + migration := NewMigrationService(storage, legacyPath) + + err = migration.MigrateFromLegacy() + if err != nil { + t.Fatalf("Migration should succeed: %v", err) + } + + // Verify migration results + accounts, err := storage.LoadAccounts() + if err != nil { + t.Fatalf("Failed to load migrated accounts: %v", err) + } + + if len(accounts) != 1 { + t.Fatalf("Expected 1 account, got %d", len(accounts)) + } + + var account *AccountConfig + for _, acc := range accounts { + account = acc + break + } + + // Verify all data preserved + if account.Token != legacyConfig.Token { + t.Fatalf("Token not preserved") + } + if account.RefreshToken != legacyConfig.RefreshToken { + t.Fatalf("RefreshToken not preserved") + } + if account.Login != legacyConfig.Login { + t.Fatalf("Login not preserved") + } + if account.Operator != legacyConfig.Operator { + t.Fatalf("Operator not preserved") + } + if account.UUID != legacyConfig.UUID { + t.Fatalf("UUID not preserved") + } + + // Verify new fields + if account.ID == "" { + t.Fatalf("ID should be generated") + } + if account.Name == "" { + t.Fatalf("Name should be generated") + } + if !account.Enabled { + t.Fatalf("Account should be enabled by default") + } + + // Verify legacy file was removed + if _, err := os.Stat(legacyPath); !os.IsNotExist(err) { + t.Fatalf("Legacy file should be removed after successful migration") + } + + // Verify backup was created + backupDir := filepath.Join(tempDir, "migration_backups") + backupFiles, _ := filepath.Glob(filepath.Join(backupDir, "legacy_account_*.json")) + if len(backupFiles) == 0 { + t.Fatalf("Backup should be created") + } +} + +func TestMigrationService_InvalidLegacyConfig(t *testing.T) { + tempDir, err := os.MkdirTemp("", "migration_invalid_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + legacyPath := filepath.Join(tempDir, "account.json") + migration := NewMigrationService(storage, legacyPath) + + // Test invalid JSON + os.WriteFile(legacyPath, []byte(`{invalid json`), 0644) + err = migration.MigrateFromLegacy() + if err == nil { + t.Fatalf("Should fail with invalid JSON") + } + + // Test missing required fields + invalidConfig := config.Config{ + Token: "test-token", + // Missing Login and UUID + } + invalidData, _ := json.Marshal(invalidConfig) + os.WriteFile(legacyPath, invalidData, 0644) + err = migration.MigrateFromLegacy() + if err == nil { + t.Fatalf("Should fail with missing required fields") + } +} +func TestProperty_TokenIsolationBetweenAccounts(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("token isolation between accounts", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 2 accounts to test isolation + if len(accounts) < 2 { + return true // Skip test for insufficient accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "token_isolation_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare accounts with unique tokens and required fields + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + + // Ensure unique tokens for isolation testing + account.Token = fmt.Sprintf("unique-token-%d", i) + account.RefreshToken = fmt.Sprintf("unique-refresh-%d", i) + account.Enabled = true + + // Add account + err := aggregator.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + // Load accounts to initialize clients + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Get all clients + clients := aggregator.GetAllClients() + if len(clients) != len(accounts) { + t.Logf("Expected %d clients, got %d", len(accounts), len(clients)) + return false + } + + // Verify token isolation - each client should have its own unique tokens + seenTokens := make(map[string]string) // token -> accountID + seenRefreshTokens := make(map[string]string) // refreshToken -> accountID + + for accountID, client := range clients { + config := client.GetConfig() + + // Check if token is unique + if existingAccountID, exists := seenTokens[config.Token]; exists { + t.Logf("Token collision: accounts %s and %s have same token %s", + accountID, existingAccountID, config.Token) + return false + } + seenTokens[config.Token] = accountID + + // Check if refresh token is unique + if existingAccountID, exists := seenRefreshTokens[config.RefreshToken]; exists { + t.Logf("Refresh token collision: accounts %s and %s have same refresh token %s", + accountID, existingAccountID, config.RefreshToken) + return false + } + seenRefreshTokens[config.RefreshToken] = accountID + + // Verify token matches what we set + expectedToken := fmt.Sprintf("unique-token-%d", findAccountIndex(accounts, accountID)) + expectedRefresh := fmt.Sprintf("unique-refresh-%d", findAccountIndex(accounts, accountID)) + + if config.Token != expectedToken { + t.Logf("Token mismatch for account %s: expected %s, got %s", + accountID, expectedToken, config.Token) + return false + } + + if config.RefreshToken != expectedRefresh { + t.Logf("Refresh token mismatch for account %s: expected %s, got %s", + accountID, expectedRefresh, config.RefreshToken) + return false + } + } + + // Test token updates don't affect other accounts + clientList := make([]*AccountAPIClient, 0, len(clients)) + accountIDs := make([]string, 0, len(clients)) + for id, client := range clients { + clientList = append(clientList, client) + accountIDs = append(accountIDs, id) + } + + if len(clientList) >= 2 { + // Update tokens for first client + newToken := "updated-token-123" + newRefresh := "updated-refresh-123" + clientList[0].UpdateTokens(newToken, newRefresh) + + // Verify first client has new tokens + config1 := clientList[0].GetConfig() + if config1.Token != newToken || config1.RefreshToken != newRefresh { + t.Logf("Token update failed for first client") + return false + } + + // Verify other clients still have original tokens + for i := 1; i < len(clientList); i++ { + config := clientList[i].GetConfig() + expectedToken := fmt.Sprintf("unique-token-%d", findAccountIndex(accounts, accountIDs[i])) + expectedRefresh := fmt.Sprintf("unique-refresh-%d", findAccountIndex(accounts, accountIDs[i])) + + if config.Token != expectedToken { + t.Logf("Token isolation violated: client %d token changed unexpectedly", i) + return false + } + if config.RefreshToken != expectedRefresh { + t.Logf("Refresh token isolation violated: client %d refresh token changed unexpectedly", i) + return false + } + } + } + + return true + }, + gen.SliceOfN(4, genAccount()), // Generate 2-4 accounts for isolation testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_AccountDeletionConsistency(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("account deletion consistency", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test deletion + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "deletion_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + accountIDs := make([]string, 0, len(accounts)) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + + // Add account + err := aggregator.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + accountIDs = append(accountIDs, account.ID) + } + + // Load accounts to initialize clients + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Verify all accounts are present initially + allAccounts := aggregator.GetAccounts() + if len(allAccounts) != len(accounts) { + t.Logf("Initial account count mismatch: expected %d, got %d", len(accounts), len(allAccounts)) + return false + } + + // Verify all clients are present initially + allClients := aggregator.GetAllClients() + if len(allClients) != len(accounts) { + t.Logf("Initial client count mismatch: expected %d, got %d", len(accounts), len(allClients)) + return false + } + + // Delete each account one by one and verify consistency + for i, accountID := range accountIDs { + // Delete the account + err := aggregator.RemoveAccount(accountID) + if err != nil { + t.Logf("Failed to remove account %s: %v", accountID, err) + return false + } + + // Verify account is no longer in accounts list + remainingAccounts := aggregator.GetAccounts() + if _, exists := remainingAccounts[accountID]; exists { + t.Logf("Account %s still exists in accounts list after deletion", accountID) + return false + } + + // Verify account count decreased + expectedCount := len(accounts) - (i + 1) + if len(remainingAccounts) != expectedCount { + t.Logf("Account count after deletion %d: expected %d, got %d", + i+1, expectedCount, len(remainingAccounts)) + return false + } + + // Verify client is no longer in client pool + remainingClients := aggregator.GetAllClients() + if _, exists := remainingClients[accountID]; exists { + t.Logf("Client for account %s still exists after deletion", accountID) + return false + } + + // Verify client count decreased + if len(remainingClients) != expectedCount { + t.Logf("Client count after deletion %d: expected %d, got %d", + i+1, expectedCount, len(remainingClients)) + return false + } + + // Verify GetClient returns error for deleted account + _, err = aggregator.GetClient(accountID) + if err == nil { + t.Logf("GetClient should return error for deleted account %s", accountID) + return false + } + + // Verify GetAccount returns error for deleted account + _, err = aggregator.GetAccount(accountID) + if err == nil { + t.Logf("GetAccount should return error for deleted account %s", accountID) + return false + } + + // Verify remaining accounts are still accessible + for _, remainingID := range accountIDs[i+1:] { + _, err := aggregator.GetAccount(remainingID) + if err != nil { + t.Logf("Remaining account %s should still be accessible: %v", remainingID, err) + return false + } + + _, err = aggregator.GetClient(remainingID) + if err != nil { + t.Logf("Remaining client %s should still be accessible: %v", remainingID, err) + return false + } + } + } + + // After deleting all accounts, verify empty state + finalAccounts := aggregator.GetAccounts() + if len(finalAccounts) != 0 { + t.Logf("Expected no accounts after deleting all, got %d", len(finalAccounts)) + return false + } + + finalClients := aggregator.GetAllClients() + if len(finalClients) != 0 { + t.Logf("Expected no clients after deleting all, got %d", len(finalClients)) + return false + } + + // Verify GetPrimaryAccount returns error when no accounts exist + _, err = aggregator.GetPrimaryAccount() + if err == nil { + t.Logf("GetPrimaryAccount should return error when no accounts exist") + return false + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for deletion testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +// Helper function to find account index by ID +func findAccountIndex(accounts []AccountConfig, accountID string) int { + for i, account := range accounts { + if account.ID == accountID { + return i + } + } + return -1 +} +// Unit tests for MultiAccountAggregator +func TestMultiAccountAggregator_Basic(t *testing.T) { + tempDir, err := os.MkdirTemp("", "aggregator_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Test loading empty accounts + err = aggregator.LoadAccounts() + if err != nil { + t.Fatalf("Failed to load empty accounts: %v", err) + } + + accounts := aggregator.GetAccounts() + if len(accounts) != 0 { + t.Fatalf("Expected 0 accounts, got %d", len(accounts)) + } + + // Test adding account + account := &AccountConfig{ + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + Token: "test-token", + RefreshToken: "test-refresh", + Operator: 0, + } + + err = aggregator.AddAccount(account) + if err != nil { + t.Fatalf("Failed to add account: %v", err) + } + + // Verify account was added + accounts = aggregator.GetAccounts() + if len(accounts) != 1 { + t.Fatalf("Expected 1 account, got %d", len(accounts)) + } + + // Test getting specific account + retrievedAccount, err := aggregator.GetAccount(account.ID) + if err != nil { + t.Fatalf("Failed to get account: %v", err) + } + + if retrievedAccount.Login != account.Login { + t.Fatalf("Account data mismatch") + } + + // Test getting client + client, err := aggregator.GetClient(account.ID) + if err != nil { + t.Fatalf("Failed to get client: %v", err) + } + + if client.GetAccountID() != account.ID { + t.Fatalf("Client account ID mismatch") + } + + // Test getting primary account + primaryAccount, err := aggregator.GetPrimaryAccount() + if err != nil { + t.Fatalf("Failed to get primary account: %v", err) + } + + if primaryAccount.ID != account.ID { + t.Fatalf("Primary account mismatch") + } +} + +func TestMultiAccountAggregator_EnableDisable(t *testing.T) { + tempDir, err := os.MkdirTemp("", "enable_disable_test_*") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Add account + account := &AccountConfig{ + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + Token: "test-token", + RefreshToken: "test-refresh", + Operator: 0, + } + + err = aggregator.AddAccount(account) + if err != nil { + t.Fatalf("Failed to add account: %v", err) + } + + // Verify client exists + _, err = aggregator.GetClient(account.ID) + if err != nil { + t.Fatalf("Client should exist for enabled account: %v", err) + } + + // Disable account + err = aggregator.DisableAccount(account.ID) + if err != nil { + t.Fatalf("Failed to disable account: %v", err) + } + + // Verify client is removed + _, err = aggregator.GetClient(account.ID) + if err == nil { + t.Fatalf("Client should not exist for disabled account") + } + + // Verify account is still in storage but disabled + retrievedAccount, err := aggregator.GetAccount(account.ID) + if err != nil { + t.Fatalf("Account should still exist when disabled: %v", err) + } + + if retrievedAccount.Enabled { + t.Fatalf("Account should be disabled") + } + + // Re-enable account + err = aggregator.EnableAccount(account.ID) + if err != nil { + t.Fatalf("Failed to enable account: %v", err) + } + + // Verify client is recreated + _, err = aggregator.GetClient(account.ID) + if err != nil { + t.Fatalf("Client should exist for re-enabled account: %v", err) + } + + // Verify account is enabled + retrievedAccount, err = aggregator.GetAccount(account.ID) + if err != nil { + t.Fatalf("Failed to get account: %v", err) + } + + if !retrievedAccount.Enabled { + t.Fatalf("Account should be enabled") + } +} + +func TestAccountAPIClient_Basic(t *testing.T) { + account := &AccountConfig{ + ID: "test-id", + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + Token: "test-token", + RefreshToken: "test-refresh", + Operator: 0, + } + + factory := &MockHandlerFactory{} + var templateFs embed.FS + client := NewAccountAPIClient(account, factory, templateFs) + + // Test basic getters + if client.GetAccountID() != account.ID { + t.Fatalf("Account ID mismatch") + } + + if client.GetAccountName() != account.Name { + t.Fatalf("Account name mismatch") + } + + if !client.IsEnabled() { + t.Fatalf("Account should be enabled") + } + + // Test config retrieval + config := client.GetConfig() + if config.Token != account.Token { + t.Fatalf("Token mismatch") + } + + // Test token update + newToken := "new-token" + newRefresh := "new-refresh" + client.UpdateTokens(newToken, newRefresh) + + updatedConfig := client.GetConfig() + if updatedConfig.Token != newToken { + t.Fatalf("Token not updated") + } + + if updatedConfig.RefreshToken != newRefresh { + t.Fatalf("Refresh token not updated") + } + + // Note: Handler config verification removed since APIHandler interface doesn't expose Config +} + +func TestAccountClientPool_Basic(t *testing.T) { + pool := NewAccountClientPool() + + // Test empty pool + clients := pool.GetAllClients() + if len(clients) != 0 { + t.Fatalf("Expected empty pool, got %d clients", len(clients)) + } + + // Test getting non-existent client + _, err := pool.GetClient("non-existent") + if err == nil { + t.Fatalf("Should return error for non-existent client") + } + + // Add client + account := &AccountConfig{ + ID: "test-id", + Login: 79123456789, + UUID: "test-uuid", + Name: "Test Account", + Enabled: true, + Token: "test-token", + RefreshToken: "test-refresh", + Operator: 0, + } + + factory := &MockHandlerFactory{} + var templateFs embed.FS + client := NewAccountAPIClient(account, factory, templateFs) + pool.AddClient(account.ID, client) + + // Test getting client + retrievedClient, err := pool.GetClient(account.ID) + if err != nil { + t.Fatalf("Failed to get client: %v", err) + } + + if retrievedClient.GetAccountID() != account.ID { + t.Fatalf("Client ID mismatch") + } + + // Test getting all clients + clients = pool.GetAllClients() + if len(clients) != 1 { + t.Fatalf("Expected 1 client, got %d", len(clients)) + } + + // Remove client + pool.RemoveClient(account.ID) + + // Verify client is removed + _, err = pool.GetClient(account.ID) + if err == nil { + t.Fatalf("Client should be removed") + } + + clients = pool.GetAllClients() + if len(clients) != 0 { + t.Fatalf("Expected empty pool after removal, got %d clients", len(clients)) + } +} +func TestProperty_LegacyAPIPrimaryAccountCompatibility(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("legacy API primary account compatibility", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test legacy compatibility + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "legacy_compat_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("token-%d", i) + account.RefreshToken = fmt.Sprintf("refresh-%d", i) + + // Add account + err := aggregator.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + // Load accounts to initialize clients + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Create legacy adapter + adapter := NewLegacyConfigAdapter(aggregator) + + // Get the actual primary account from the system + actualPrimaryAccount, err := aggregator.GetPrimaryAccount() + if err != nil { + t.Logf("Failed to get actual primary account: %v", err) + return false + } + + // Test GetPrimaryConfig returns data in legacy format + legacyConfig, err := adapter.GetPrimaryConfig() + if err != nil { + t.Logf("Failed to get primary config: %v", err) + return false + } + + // Verify legacy config matches actual primary account data + if legacyConfig.Token != actualPrimaryAccount.Token { + t.Logf("Legacy token mismatch: expected %s, got %s", + actualPrimaryAccount.Token, legacyConfig.Token) + return false + } + + if legacyConfig.RefreshToken != actualPrimaryAccount.RefreshToken { + t.Logf("Legacy refresh token mismatch: expected %s, got %s", + actualPrimaryAccount.RefreshToken, legacyConfig.RefreshToken) + return false + } + + if legacyConfig.Login != actualPrimaryAccount.Login { + t.Logf("Legacy login mismatch: expected %d, got %d", + actualPrimaryAccount.Login, legacyConfig.Login) + return false + } + + if legacyConfig.Operator != actualPrimaryAccount.Operator { + t.Logf("Legacy operator mismatch: expected %d, got %d", + actualPrimaryAccount.Operator, legacyConfig.Operator) + return false + } + + if legacyConfig.UUID != actualPrimaryAccount.UUID { + t.Logf("Legacy UUID mismatch: expected %s, got %s", + actualPrimaryAccount.UUID, legacyConfig.UUID) + return false + } + + // Verify legacy config has default port for backward compatibility + if legacyConfig.Port != 18000 { + t.Logf("Legacy port should be 18000 for compatibility, got %d", legacyConfig.Port) + return false + } + + // Test that legacy config is a proper config.Config type + // This ensures existing handlers can use it without modification + if legacyConfig == nil { + t.Logf("Legacy config should not be nil") + return false + } + + // Test GetAccountConfig for specific account + if len(accounts) > 1 { + // Find a non-primary account to test specific account retrieval + var testAccount *AccountConfig + for i := range accounts { + account := &accounts[i] + if account.ID != actualPrimaryAccount.ID { + testAccount = account + break + } + } + + if testAccount != nil { + specificLegacyConfig, err := adapter.GetAccountConfig(testAccount.ID) + if err != nil { + t.Logf("Failed to get specific account config: %v", err) + return false + } + + // Verify specific account data + if specificLegacyConfig.Token != testAccount.Token { + t.Logf("Specific account token mismatch: expected %s, got %s", + testAccount.Token, specificLegacyConfig.Token) + return false + } + + if specificLegacyConfig.Login != testAccount.Login { + t.Logf("Specific account login mismatch: expected %d, got %d", + testAccount.Login, specificLegacyConfig.Login) + return false + } + } + } + + // Test HasPrimaryAccount + if !adapter.HasPrimaryAccount() { + t.Logf("Should have primary account when accounts exist") + return false + } + + // Test GetPrimaryAccountID + primaryID, err := adapter.GetPrimaryAccountID() + if err != nil { + t.Logf("Failed to get primary account ID: %v", err) + return false + } + + if primaryID != actualPrimaryAccount.ID { + t.Logf("Primary account ID mismatch: expected %s, got %s", + actualPrimaryAccount.ID, primaryID) + return false + } + + // Test token updates through legacy adapter + newToken := "updated-legacy-token" + newRefresh := "updated-legacy-refresh" + + err = adapter.UpdatePrimaryAccountTokens(newToken, newRefresh) + if err != nil { + t.Logf("Failed to update primary account tokens: %v", err) + return false + } + + // Reload accounts to ensure we get fresh data + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to reload accounts after token update: %v", err) + return false + } + + // Verify tokens were updated + updatedLegacyConfig, err := adapter.GetPrimaryConfig() + if err != nil { + t.Logf("Failed to get updated primary config: %v", err) + return false + } + + if updatedLegacyConfig.Token != newToken { + t.Logf("Token update failed: expected %s, got %s", newToken, updatedLegacyConfig.Token) + return false + } + + if updatedLegacyConfig.RefreshToken != newRefresh { + t.Logf("Refresh token update failed: expected %s, got %s", newRefresh, updatedLegacyConfig.RefreshToken) + return false + } + + // Verify the update persisted in the underlying account + updatedAccount, err := aggregator.GetAccount(actualPrimaryAccount.ID) + if err != nil { + t.Logf("Failed to get updated account: %v", err) + return false + } + + if updatedAccount.Token != newToken { + t.Logf("Account token not updated: expected %s, got %s", newToken, updatedAccount.Token) + return false + } + + if updatedAccount.RefreshToken != newRefresh { + t.Logf("Account refresh token not updated: expected %s, got %s", newRefresh, updatedAccount.RefreshToken) + return false + } + + // Test behavior when no accounts exist + // Remove all accounts + allAccounts := aggregator.GetAccounts() + for accountID := range allAccounts { + err := aggregator.RemoveAccount(accountID) + if err != nil { + t.Logf("Failed to remove account %s: %v", accountID, err) + return false + } + } + + // Verify HasPrimaryAccount returns false + if adapter.HasPrimaryAccount() { + t.Logf("Should not have primary account when no accounts exist") + return false + } + + // Verify GetPrimaryConfig returns error + _, err = adapter.GetPrimaryConfig() + if err == nil { + t.Logf("GetPrimaryConfig should return error when no accounts exist") + return false + } + + // Verify GetPrimaryAccountID returns error + _, err = adapter.GetPrimaryAccountID() + if err == nil { + t.Logf("GetPrimaryAccountID should return error when no accounts exist") + return false + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for legacy compatibility testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_AccountParameterAcceptance(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("account parameter acceptance", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test parameter acceptance + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "param_acceptance_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + accountIDs := make([]string, 0, len(accounts)) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("token-%d", i) + account.RefreshToken = fmt.Sprintf("refresh-%d", i) + + // Add account + err := aggregator.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + accountIDs = append(accountIDs, account.ID) + } + + // Load accounts to initialize clients + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test 1: Valid account ID parameter should be accepted + for _, accountID := range accountIDs { + // Test GetAccount accepts valid account ID + account, err := aggregator.GetAccount(accountID) + if err != nil { + t.Logf("Valid account ID %s should be accepted by GetAccount: %v", accountID, err) + return false + } + if account == nil { + t.Logf("GetAccount should return account for valid ID %s", accountID) + return false + } + if account.ID != accountID { + t.Logf("Returned account ID mismatch: expected %s, got %s", accountID, account.ID) + return false + } + + // Test GetClient accepts valid account ID + client, err := aggregator.GetClient(accountID) + if err != nil { + t.Logf("Valid account ID %s should be accepted by GetClient: %v", accountID, err) + return false + } + if client == nil { + t.Logf("GetClient should return client for valid ID %s", accountID) + return false + } + if client.GetAccountID() != accountID { + t.Logf("Returned client account ID mismatch: expected %s, got %s", accountID, client.GetAccountID()) + return false + } + + // Test EnableAccount accepts valid account ID + err = aggregator.EnableAccount(accountID) + if err != nil { + t.Logf("Valid account ID %s should be accepted by EnableAccount: %v", accountID, err) + return false + } + + // Test DisableAccount accepts valid account ID + err = aggregator.DisableAccount(accountID) + if err != nil { + t.Logf("Valid account ID %s should be accepted by DisableAccount: %v", accountID, err) + return false + } + + // Re-enable for further tests + err = aggregator.EnableAccount(accountID) + if err != nil { + t.Logf("Failed to re-enable account %s: %v", accountID, err) + return false + } + + // Test UpdateAccountTokens accepts valid account ID + newToken := fmt.Sprintf("new-token-%s", accountID) + newRefresh := fmt.Sprintf("new-refresh-%s", accountID) + err = aggregator.UpdateAccountTokens(accountID, newToken, newRefresh) + if err != nil { + t.Logf("Valid account ID %s should be accepted by UpdateAccountTokens: %v", accountID, err) + return false + } + + // Verify tokens were updated + updatedAccount, err := aggregator.GetAccount(accountID) + if err != nil { + t.Logf("Failed to get updated account: %v", err) + return false + } + if updatedAccount.Token != newToken { + t.Logf("Token update failed: expected %s, got %s", newToken, updatedAccount.Token) + return false + } + if updatedAccount.RefreshToken != newRefresh { + t.Logf("Refresh token update failed: expected %s, got %s", newRefresh, updatedAccount.RefreshToken) + return false + } + } + + // Test 2: Invalid account ID parameters should be properly rejected + invalidAccountIDs := []string{ + "non-existent-id", + "", + "invalid-uuid-format", + "12345", + "test-invalid", + } + + for _, invalidID := range invalidAccountIDs { + // Test GetAccount rejects invalid account ID + _, err := aggregator.GetAccount(invalidID) + if err == nil { + t.Logf("Invalid account ID %s should be rejected by GetAccount", invalidID) + return false + } + + // Test GetClient rejects invalid account ID + _, err = aggregator.GetClient(invalidID) + if err == nil { + t.Logf("Invalid account ID %s should be rejected by GetClient", invalidID) + return false + } + + // Test EnableAccount rejects invalid account ID + err = aggregator.EnableAccount(invalidID) + if err == nil { + t.Logf("Invalid account ID %s should be rejected by EnableAccount", invalidID) + return false + } + + // Test DisableAccount rejects invalid account ID + err = aggregator.DisableAccount(invalidID) + if err == nil { + t.Logf("Invalid account ID %s should be rejected by DisableAccount", invalidID) + return false + } + + // Test UpdateAccountTokens rejects invalid account ID + err = aggregator.UpdateAccountTokens(invalidID, "token", "refresh") + if err == nil { + t.Logf("Invalid account ID %s should be rejected by UpdateAccountTokens", invalidID) + return false + } + + // Test RemoveAccount rejects invalid account ID + err = aggregator.RemoveAccount(invalidID) + if err == nil { + t.Logf("Invalid account ID %s should be rejected by RemoveAccount", invalidID) + return false + } + } + + // Test 3: Empty or nil parameters should be handled gracefully + // Test UpdateAccountTokens with empty tokens + if len(accountIDs) > 0 { + firstAccountID := accountIDs[0] + + // Empty token should be accepted (might be valid for logout scenarios) + err = aggregator.UpdateAccountTokens(firstAccountID, "", "") + if err != nil { + t.Logf("Empty tokens should be accepted by UpdateAccountTokens: %v", err) + return false + } + + // Verify empty tokens were set + account, err := aggregator.GetAccount(firstAccountID) + if err != nil { + t.Logf("Failed to get account after empty token update: %v", err) + return false + } + if account.Token != "" || account.RefreshToken != "" { + t.Logf("Empty tokens should be set correctly") + return false + } + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for parameter testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_AccountSpecificOperationExecution(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("account-specific operation execution", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 2 accounts to test account-specific operations + if len(accounts) < 2 { + return true // Skip test for insufficient accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "operation_execution_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts with unique identifiable data + accountIDs := make([]string, 0, len(accounts)) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields with unique values + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("unique-token-%d", i) + account.RefreshToken = fmt.Sprintf("unique-refresh-%d", i) + account.Name = fmt.Sprintf("Account-%d", i) + + // Add account + err := aggregator.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + accountIDs = append(accountIDs, account.ID) + } + + // Load accounts to initialize clients + err = aggregator.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test 1: Operations on specific accounts should only affect those accounts + firstAccountID := accountIDs[0] + secondAccountID := accountIDs[1] + + // Get initial state of both accounts + secondAccount, err := aggregator.GetAccount(secondAccountID) + if err != nil { + t.Logf("Failed to get second account: %v", err) + return false + } + + // Test token update on first account only + newToken := "updated-token-first" + newRefresh := "updated-refresh-first" + err = aggregator.UpdateAccountTokens(firstAccountID, newToken, newRefresh) + if err != nil { + t.Logf("Failed to update tokens for first account: %v", err) + return false + } + + // Verify first account was updated + updatedFirstAccount, err := aggregator.GetAccount(firstAccountID) + if err != nil { + t.Logf("Failed to get updated first account: %v", err) + return false + } + if updatedFirstAccount.Token != newToken { + t.Logf("First account token not updated: expected %s, got %s", newToken, updatedFirstAccount.Token) + return false + } + if updatedFirstAccount.RefreshToken != newRefresh { + t.Logf("First account refresh token not updated: expected %s, got %s", newRefresh, updatedFirstAccount.RefreshToken) + return false + } + + // Verify second account was NOT affected + unchangedSecondAccount, err := aggregator.GetAccount(secondAccountID) + if err != nil { + t.Logf("Failed to get second account after first account update: %v", err) + return false + } + if unchangedSecondAccount.Token != secondAccount.Token { + t.Logf("Second account token should not change: expected %s, got %s", + secondAccount.Token, unchangedSecondAccount.Token) + return false + } + if unchangedSecondAccount.RefreshToken != secondAccount.RefreshToken { + t.Logf("Second account refresh token should not change: expected %s, got %s", + secondAccount.RefreshToken, unchangedSecondAccount.RefreshToken) + return false + } + + // Test 2: Enable/Disable operations should be account-specific + // Disable first account + err = aggregator.DisableAccount(firstAccountID) + if err != nil { + t.Logf("Failed to disable first account: %v", err) + return false + } + + // Verify first account is disabled + disabledFirstAccount, err := aggregator.GetAccount(firstAccountID) + if err != nil { + t.Logf("Failed to get disabled first account: %v", err) + return false + } + if disabledFirstAccount.Enabled { + t.Logf("First account should be disabled") + return false + } + + // Verify second account is still enabled + stillEnabledSecondAccount, err := aggregator.GetAccount(secondAccountID) + if err != nil { + t.Logf("Failed to get second account after first account disable: %v", err) + return false + } + if !stillEnabledSecondAccount.Enabled { + t.Logf("Second account should still be enabled") + return false + } + + // Verify first account client is removed but second account client still exists + _, err = aggregator.GetClient(firstAccountID) + if err == nil { + t.Logf("First account client should be removed when disabled") + return false + } + + _, err = aggregator.GetClient(secondAccountID) + if err != nil { + t.Logf("Second account client should still exist: %v", err) + return false + } + + // Test 3: Account removal should only affect the specified account + // Remove first account + err = aggregator.RemoveAccount(firstAccountID) + if err != nil { + t.Logf("Failed to remove first account: %v", err) + return false + } + + // Verify first account is completely removed + _, err = aggregator.GetAccount(firstAccountID) + if err == nil { + t.Logf("First account should be removed") + return false + } + + // Verify second account still exists and is accessible + finalSecondAccount, err := aggregator.GetAccount(secondAccountID) + if err != nil { + t.Logf("Second account should still exist after first account removal: %v", err) + return false + } + if finalSecondAccount.ID != secondAccountID { + t.Logf("Second account ID should be unchanged: expected %s, got %s", + secondAccountID, finalSecondAccount.ID) + return false + } + + // Verify account count decreased by exactly one + remainingAccounts := aggregator.GetAccounts() + expectedCount := len(accounts) - 1 + if len(remainingAccounts) != expectedCount { + t.Logf("Account count after removal: expected %d, got %d", expectedCount, len(remainingAccounts)) + return false + } + + // Test 4: Operations should work correctly with remaining accounts + // Test that we can still perform operations on remaining accounts + for _, accountID := range accountIDs[1:] { // Skip first account (removed) + // Test GetAccount still works + _, err := aggregator.GetAccount(accountID) + if err != nil { + t.Logf("Remaining account %s should be accessible: %v", accountID, err) + return false + } + + // Test token update still works + testToken := fmt.Sprintf("test-token-%s", accountID) + testRefresh := fmt.Sprintf("test-refresh-%s", accountID) + err = aggregator.UpdateAccountTokens(accountID, testToken, testRefresh) + if err != nil { + t.Logf("Token update should work for remaining account %s: %v", accountID, err) + return false + } + + // Verify update worked + updatedAccount, err := aggregator.GetAccount(accountID) + if err != nil { + t.Logf("Failed to get updated remaining account %s: %v", accountID, err) + return false + } + if updatedAccount.Token != testToken { + t.Logf("Token update failed for remaining account %s: expected %s, got %s", + accountID, testToken, updatedAccount.Token) + return false + } + + // Test enable/disable still works + err = aggregator.DisableAccount(accountID) + if err != nil { + t.Logf("Disable should work for remaining account %s: %v", accountID, err) + return false + } + + err = aggregator.EnableAccount(accountID) + if err != nil { + t.Logf("Enable should work for remaining account %s: %v", accountID, err) + return false + } + } + + return true + }, + gen.SliceOfN(4, genAccount()), // Generate 2-4 accounts for operation testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_ImmediatePersistenceOfChanges(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("immediate persistence of changes", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test persistence + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "persistence_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Prepare and add accounts + accountMap := make(map[string]*AccountConfig) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("token-%d", i) + account.RefreshToken = fmt.Sprintf("refresh-%d", i) + + // Add account + err := storage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + accountMap[account.ID] = account + } + + // Test 1: Verify accounts are immediately readable after addition + loadedAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts immediately after addition: %v", err) + return false + } + + if len(loadedAccounts) != len(accountMap) { + t.Logf("Account count mismatch after addition: expected %d, got %d", len(accountMap), len(loadedAccounts)) + return false + } + + // Verify all added accounts are present and correct + for id, originalAccount := range accountMap { + loadedAccount, exists := loadedAccounts[id] + if !exists { + t.Logf("Account %s not found after immediate load", id) + return false + } + + if loadedAccount.Token != originalAccount.Token { + t.Logf("Token mismatch for account %s: expected %s, got %s", + id, originalAccount.Token, loadedAccount.Token) + return false + } + + if loadedAccount.Login != originalAccount.Login { + t.Logf("Login mismatch for account %s: expected %d, got %d", + id, originalAccount.Login, loadedAccount.Login) + return false + } + } + + // Test 2: Modify accounts and verify immediate persistence + modifiedAccounts := make(map[string]*AccountConfig) + for id, account := range loadedAccounts { + // Create modified copy + modified := *account + modified.Token = "modified-" + account.Token + modified.RefreshToken = "modified-" + account.RefreshToken + modified.Name = "Modified " + account.Name + modifiedAccounts[id] = &modified + } + + // Save modifications + err = storage.SaveAccounts(modifiedAccounts) + if err != nil { + t.Logf("Failed to save modified accounts: %v", err) + return false + } + + // Immediately load and verify changes are persisted + reloadedAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to reload accounts after modification: %v", err) + return false + } + + if len(reloadedAccounts) != len(modifiedAccounts) { + t.Logf("Account count mismatch after modification: expected %d, got %d", + len(modifiedAccounts), len(reloadedAccounts)) + return false + } + + // Verify all modifications are persisted + for id, modifiedAccount := range modifiedAccounts { + reloadedAccount, exists := reloadedAccounts[id] + if !exists { + t.Logf("Modified account %s not found after reload", id) + return false + } + + if reloadedAccount.Token != modifiedAccount.Token { + t.Logf("Modified token not persisted for account %s: expected %s, got %s", + id, modifiedAccount.Token, reloadedAccount.Token) + return false + } + + if reloadedAccount.RefreshToken != modifiedAccount.RefreshToken { + t.Logf("Modified refresh token not persisted for account %s: expected %s, got %s", + id, modifiedAccount.RefreshToken, reloadedAccount.RefreshToken) + return false + } + + if reloadedAccount.Name != modifiedAccount.Name { + t.Logf("Modified name not persisted for account %s: expected %s, got %s", + id, modifiedAccount.Name, reloadedAccount.Name) + return false + } + } + + // Test 3: Remove accounts and verify immediate persistence + if len(accountMap) > 1 { + // Remove first account + var firstAccountID string + for id := range accountMap { + firstAccountID = id + break + } + + err = storage.RemoveAccount(firstAccountID) + if err != nil { + t.Logf("Failed to remove account %s: %v", firstAccountID, err) + return false + } + + // Immediately verify removal is persisted + afterRemovalAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts after removal: %v", err) + return false + } + + if len(afterRemovalAccounts) != len(accountMap)-1 { + t.Logf("Account count after removal: expected %d, got %d", + len(accountMap)-1, len(afterRemovalAccounts)) + return false + } + + // Verify removed account is not present + if _, exists := afterRemovalAccounts[firstAccountID]; exists { + t.Logf("Removed account %s still exists after immediate load", firstAccountID) + return false + } + + // Verify remaining accounts are still present + for id := range accountMap { + if id == firstAccountID { + continue // Skip removed account + } + if _, exists := afterRemovalAccounts[id]; !exists { + t.Logf("Remaining account %s not found after removal", id) + return false + } + } + } + + // Test 4: Test persistence across multiple storage instances + // Create a new storage instance pointing to the same directory + storage2 := NewAccountStorage(tempDir) + + // Load accounts with new storage instance + crossInstanceAccounts, err := storage2.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts with new storage instance: %v", err) + return false + } + + // Verify data is consistent across storage instances + currentAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts with original storage: %v", err) + return false + } + + if len(crossInstanceAccounts) != len(currentAccounts) { + t.Logf("Account count mismatch across storage instances: expected %d, got %d", + len(currentAccounts), len(crossInstanceAccounts)) + return false + } + + for id, currentAccount := range currentAccounts { + crossInstanceAccount, exists := crossInstanceAccounts[id] + if !exists { + t.Logf("Account %s not found in cross-instance load", id) + return false + } + + if crossInstanceAccount.Token != currentAccount.Token { + t.Logf("Token mismatch across instances for account %s: expected %s, got %s", + id, currentAccount.Token, crossInstanceAccount.Token) + return false + } + + if crossInstanceAccount.Login != currentAccount.Login { + t.Logf("Login mismatch across instances for account %s: expected %d, got %d", + id, currentAccount.Login, crossInstanceAccount.Login) + return false + } + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for persistence testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_BackupRecoveryOnCorruption(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("backup recovery on corruption", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test backup recovery + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "backup_recovery_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Prepare and add accounts to create initial valid state + accountMap := make(map[string]*AccountConfig) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("token-%d", i) + account.RefreshToken = fmt.Sprintf("refresh-%d", i) + + // Add account + err := storage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + + accountMap[account.ID] = account + } + + // Create a backup of the current valid configuration + err = storage.CreateBackup() + if err != nil { + t.Logf("Failed to create backup: %v", err) + return false + } + + // Verify backup was created + backupFiles, err := filepath.Glob(filepath.Join(storage.backupDir, "accounts_*.json")) + if err != nil || len(backupFiles) == 0 { + t.Logf("Backup file not created") + return false + } + + // Test 1: Corrupt the main configuration file with invalid JSON + configFile := storage.configFile + corruptData := []byte(`{"accounts": {invalid json syntax}`) + err = os.WriteFile(configFile, corruptData, 0644) + if err != nil { + t.Logf("Failed to write corrupt data: %v", err) + return false + } + + // Verify loading fails due to corruption + _, err = storage.LoadAccounts() + if err == nil { + t.Logf("Loading should fail with corrupted config") + return false + } + + // Attempt recovery from backup + err = storage.RestoreFromBackup() + if err != nil { + t.Logf("Failed to restore from backup: %v", err) + return false + } + + // Verify recovery was successful - should be able to load accounts + recoveredAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts after recovery: %v", err) + return false + } + + // Verify all original accounts are recovered + if len(recoveredAccounts) != len(accountMap) { + t.Logf("Account count after recovery: expected %d, got %d", + len(accountMap), len(recoveredAccounts)) + return false + } + + for id, originalAccount := range accountMap { + recoveredAccount, exists := recoveredAccounts[id] + if !exists { + t.Logf("Account %s not found after recovery", id) + return false + } + + if recoveredAccount.Token != originalAccount.Token { + t.Logf("Token mismatch after recovery for account %s: expected %s, got %s", + id, originalAccount.Token, recoveredAccount.Token) + return false + } + + if recoveredAccount.Login != originalAccount.Login { + t.Logf("Login mismatch after recovery for account %s: expected %d, got %d", + id, originalAccount.Login, recoveredAccount.Login) + return false + } + + if recoveredAccount.UUID != originalAccount.UUID { + t.Logf("UUID mismatch after recovery for account %s: expected %s, got %s", + id, originalAccount.UUID, recoveredAccount.UUID) + return false + } + } + + // Test 2: Corrupt with invalid account structure + invalidStructureData := []byte(`{"accounts": {"test": null}, "version": "2.0"}`) + err = os.WriteFile(configFile, invalidStructureData, 0644) + if err != nil { + t.Logf("Failed to write invalid structure data: %v", err) + return false + } + + // Verify loading fails + _, err = storage.LoadAccounts() + if err == nil { + t.Logf("Loading should fail with invalid structure") + return false + } + + // Restore from backup again + err = storage.RestoreFromBackup() + if err != nil { + t.Logf("Failed to restore from backup (second time): %v", err) + return false + } + + // Verify recovery works again + secondRecoveredAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts after second recovery: %v", err) + return false + } + + if len(secondRecoveredAccounts) != len(accountMap) { + t.Logf("Account count after second recovery: expected %d, got %d", + len(accountMap), len(secondRecoveredAccounts)) + return false + } + + // Test 3: Test recovery when no backup exists + // Remove all backup files + backupFiles, _ = filepath.Glob(filepath.Join(storage.backupDir, "accounts_*.json")) + for _, backupFile := range backupFiles { + os.Remove(backupFile) + } + + // Corrupt config again + err = os.WriteFile(configFile, corruptData, 0644) + if err != nil { + t.Logf("Failed to write corrupt data for no-backup test: %v", err) + return false + } + + // Attempt recovery should fail when no backup exists + err = storage.RestoreFromBackup() + if err == nil { + t.Logf("Recovery should fail when no backup exists") + return false + } + + // Verify error message indicates no backup found + if !contains(err.Error(), "no backup files found") { + t.Logf("Error should indicate no backup found: %s", err.Error()) + return false + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for backup recovery testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_WriteOperationRetryOnFailure(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("write operation retry on failure", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test write retry + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "write_retry_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + + // Prepare accounts + accountMap := make(map[string]*AccountConfig) + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + account.Enabled = true + account.Token = fmt.Sprintf("token-%d", i) + account.RefreshToken = fmt.Sprintf("refresh-%d", i) + + accountMap[account.ID] = account + } + + // Test 1: Normal write should succeed without retry + err = storage.SaveAccounts(accountMap) + if err != nil { + t.Logf("Normal write should succeed: %v", err) + return false + } + + // Verify accounts were saved + loadedAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts after normal write: %v", err) + return false + } + + if len(loadedAccounts) != len(accountMap) { + t.Logf("Account count mismatch after normal write: expected %d, got %d", + len(accountMap), len(loadedAccounts)) + return false + } + + // Test 2: Test writeWithRetry method directly with valid data + testData := []byte(`{"test": "data"}`) + testFile := filepath.Join(tempDir, "test_retry.json") + + err = storage.writeWithRetry(testFile, testData) + if err != nil { + t.Logf("writeWithRetry should succeed with valid data: %v", err) + return false + } + + // Verify file was written correctly + readData, err := os.ReadFile(testFile) + if err != nil { + t.Logf("Failed to read test file: %v", err) + return false + } + + if string(readData) != string(testData) { + t.Logf("Data mismatch: expected %s, got %s", string(testData), string(readData)) + return false + } + + // Test 3: Test retry behavior with temporary permission issues + // Create a file and make it temporarily read-only to simulate write failure + readOnlyFile := filepath.Join(tempDir, "readonly_test.json") + err = os.WriteFile(readOnlyFile, []byte("initial"), 0644) + if err != nil { + t.Logf("Failed to create initial readonly file: %v", err) + return false + } + + // Make file read-only + err = os.Chmod(readOnlyFile, 0444) + if err != nil { + t.Logf("Failed to make file read-only: %v", err) + return false + } + + // Attempt write - should fail even with retry + err = storage.writeWithRetry(readOnlyFile, []byte("new data")) + if err == nil { + t.Logf("Write should fail for read-only file") + return false + } + + // Verify error message indicates retry failure + if !contains(err.Error(), "failed to write file after") || !contains(err.Error(), "retries") { + t.Logf("Error should indicate retry failure: %s", err.Error()) + return false + } + + // Restore write permissions and verify retry would work + err = os.Chmod(readOnlyFile, 0644) + if err != nil { + t.Logf("Failed to restore write permissions: %v", err) + return false + } + + // Now write should succeed + err = storage.writeWithRetry(readOnlyFile, []byte("new data")) + if err != nil { + t.Logf("Write should succeed after restoring permissions: %v", err) + return false + } + + // Test 4: Test that retry logic is actually used by checking timing + // This is a behavioral test - we can't easily simulate transient failures, + // but we can verify the retry mechanism exists by testing with invalid paths + invalidPath := filepath.Join("/nonexistent/path/file.json") + + err = storage.writeWithRetry(invalidPath, []byte("test")) + if err == nil { + t.Logf("Write to invalid path should fail") + return false + } + + // The error should indicate multiple retries were attempted + if !contains(err.Error(), "retries") { + t.Logf("Error should indicate retries were attempted: %s", err.Error()) + return false + } + + // Test 5: Verify SaveAccounts uses retry logic + // Create accounts with a problematic directory structure + problematicDir := filepath.Join(tempDir, "problematic") + problematicStorage := NewAccountStorage(problematicDir) + + // First, create the directory as a file to cause issues + err = os.WriteFile(problematicDir, []byte("not a directory"), 0644) + if err != nil { + t.Logf("Failed to create problematic file: %v", err) + return false + } + + // Attempt to save accounts - should fail due to directory conflict + err = problematicStorage.SaveAccounts(accountMap) + if err == nil { + t.Logf("SaveAccounts should fail with directory conflict") + return false + } + + // Remove the problematic file and create proper directory + err = os.Remove(problematicDir) + if err != nil { + t.Logf("Failed to remove problematic file: %v", err) + return false + } + + // Now SaveAccounts should succeed + err = problematicStorage.SaveAccounts(accountMap) + if err != nil { + t.Logf("SaveAccounts should succeed after fixing directory: %v", err) + return false + } + + // Verify accounts were saved correctly + finalAccounts, err := problematicStorage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts from problematic storage: %v", err) + return false + } + + if len(finalAccounts) != len(accountMap) { + t.Logf("Final account count mismatch: expected %d, got %d", + len(accountMap), len(finalAccounts)) + return false + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for write retry testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_DirectoryStructureCreation(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("directory structure creation", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test directory creation + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary base directory for testing + baseDir, err := os.MkdirTemp("", "dir_creation_test_*") + if err != nil { + t.Logf("Failed to create base temp dir: %v", err) + return false + } + defer os.RemoveAll(baseDir) + + // Test 1: Create storage with nested directory path that doesn't exist + nestedPath := filepath.Join(baseDir, "level1", "level2", "level3", "config") + storage := NewAccountStorage(nestedPath) + + // Verify directory doesn't exist initially + if _, err := os.Stat(nestedPath); !os.IsNotExist(err) { + t.Logf("Nested directory should not exist initially") + return false + } + + // Prepare account + account := &accounts[0] + if account.Login == 0 { + account.Login = 79123456789 + } + if account.UUID == "" { + account.UUID = "test-uuid-dir" + } + account.Enabled = true + account.Token = "test-token" + account.RefreshToken = "test-refresh" + + // Add account - this should create the directory structure + err = storage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account with nested directory: %v", err) + return false + } + + // Verify directory structure was created + if _, err := os.Stat(nestedPath); os.IsNotExist(err) { + t.Logf("Nested directory should be created: %s", nestedPath) + return false + } + + // Verify config file was created + configFile := filepath.Join(nestedPath, "accounts.json") + if _, err := os.Stat(configFile); os.IsNotExist(err) { + t.Logf("Config file should be created: %s", configFile) + return false + } + + // Verify account was saved correctly + loadedAccounts, err := storage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts from nested directory: %v", err) + return false + } + + if len(loadedAccounts) != 1 { + t.Logf("Expected 1 account in nested directory, got %d", len(loadedAccounts)) + return false + } + + // Test 2: Test backup directory creation + err = storage.CreateBackup() + if err != nil { + t.Logf("Failed to create backup: %v", err) + return false + } + + // Verify backup directory was created + backupDir := filepath.Join(nestedPath, "backups") + if _, err := os.Stat(backupDir); os.IsNotExist(err) { + t.Logf("Backup directory should be created: %s", backupDir) + return false + } + + // Verify backup file was created + backupFiles, err := filepath.Glob(filepath.Join(backupDir, "accounts_*.json")) + if err != nil || len(backupFiles) == 0 { + t.Logf("Backup file should be created in backup directory") + return false + } + + // Test 3: Test with very deep nested structure + veryDeepPath := filepath.Join(baseDir, "a", "b", "c", "d", "e", "f", "g", "config") + deepStorage := NewAccountStorage(veryDeepPath) + + // Add multiple accounts to test directory creation with multiple operations + accountMap := make(map[string]*AccountConfig) + for i := range accounts { + acc := &accounts[i] + + // Ensure required fields + if acc.Login == 0 { + acc.Login = 79123456789 + i + } + if acc.UUID == "" { + acc.UUID = fmt.Sprintf("test-uuid-deep-%d", i) + } + acc.Enabled = true + acc.Token = fmt.Sprintf("token-deep-%d", i) + acc.RefreshToken = fmt.Sprintf("refresh-deep-%d", i) + + err = deepStorage.AddAccount(acc) + if err != nil { + t.Logf("Failed to add account %d to deep directory: %v", i, err) + return false + } + + accountMap[acc.ID] = acc + } + + // Verify very deep directory structure was created + if _, err := os.Stat(veryDeepPath); os.IsNotExist(err) { + t.Logf("Very deep directory should be created: %s", veryDeepPath) + return false + } + + // Verify all accounts were saved + deepAccounts, err := deepStorage.LoadAccounts() + if err != nil { + t.Logf("Failed to load accounts from deep directory: %v", err) + return false + } + + if len(deepAccounts) != len(accountMap) { + t.Logf("Account count mismatch in deep directory: expected %d, got %d", + len(accountMap), len(deepAccounts)) + return false + } + + // Test 4: Test directory creation with special characters (if supported by OS) + specialPath := filepath.Join(baseDir, "config-with-dashes", "under_scores", "config") + specialStorage := NewAccountStorage(specialPath) + + err = specialStorage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account to directory with special characters: %v", err) + return false + } + + // Verify special character directory was created + if _, err := os.Stat(specialPath); os.IsNotExist(err) { + t.Logf("Directory with special characters should be created: %s", specialPath) + return false + } + + // Test 5: Test that existing directories are not affected + existingDir := filepath.Join(baseDir, "existing") + err = os.MkdirAll(existingDir, 0755) + if err != nil { + t.Logf("Failed to create existing directory: %v", err) + return false + } + + // Create a test file in existing directory + testFile := filepath.Join(existingDir, "existing_file.txt") + err = os.WriteFile(testFile, []byte("existing content"), 0644) + if err != nil { + t.Logf("Failed to create existing file: %v", err) + return false + } + + // Create storage in existing directory + existingStorage := NewAccountStorage(existingDir) + err = existingStorage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account to existing directory: %v", err) + return false + } + + // Verify existing file is still there + if _, err := os.Stat(testFile); os.IsNotExist(err) { + t.Logf("Existing file should not be affected: %s", testFile) + return false + } + + // Verify existing file content is unchanged + content, err := os.ReadFile(testFile) + if err != nil || string(content) != "existing content" { + t.Logf("Existing file content should be unchanged") + return false + } + + // Test 6: Test permission handling (create directory with proper permissions) + permissionPath := filepath.Join(baseDir, "permission_test", "config") + permissionStorage := NewAccountStorage(permissionPath) + + err = permissionStorage.AddAccount(account) + if err != nil { + t.Logf("Failed to add account for permission test: %v", err) + return false + } + + // Verify directory has proper permissions (readable and writable) + info, err := os.Stat(permissionPath) + if err != nil { + t.Logf("Failed to stat permission test directory: %v", err) + return false + } + + if !info.IsDir() { + t.Logf("Permission test path should be a directory") + return false + } + + // Verify we can write to the directory (permissions are adequate) + testPermFile := filepath.Join(permissionPath, "perm_test.txt") + err = os.WriteFile(testPermFile, []byte("test"), 0644) + if err != nil { + t.Logf("Should be able to write to created directory: %v", err) + return false + } + + return true + }, + gen.SliceOfN(3, genAccount()), // Generate 1-3 accounts for directory creation testing + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} \ No newline at end of file diff --git a/multiconfig/aggregator.go b/multiconfig/aggregator.go new file mode 100644 index 0000000..039fa87 --- /dev/null +++ b/multiconfig/aggregator.go @@ -0,0 +1,381 @@ +package multiconfig + +import ( + "embed" + "fmt" + "sync" +) + +// MultiAccountAggregator manages multiple accounts and provides aggregated data access +type MultiAccountAggregator struct { + storage *AccountStorage + clientPool *AccountClientPool + dataMerger *DataMerger + mutex sync.RWMutex + accounts map[string]*AccountConfig + handlerFactory HandlerFactory + templateFs embed.FS +} + +// NewMultiAccountAggregator creates a new multi-account aggregator +func NewMultiAccountAggregator(storage *AccountStorage, handlerFactory HandlerFactory, templateFs embed.FS) *MultiAccountAggregator { + aggregator := &MultiAccountAggregator{ + storage: storage, + clientPool: NewAccountClientPool(), + accounts: make(map[string]*AccountConfig), + handlerFactory: handlerFactory, + templateFs: templateFs, + } + aggregator.dataMerger = NewDataMerger(aggregator) + return aggregator +} + +// LoadAccounts loads all accounts from storage and initializes clients +func (m *MultiAccountAggregator) LoadAccounts() error { + m.mutex.Lock() + defer m.mutex.Unlock() + + accounts, err := m.storage.LoadAccounts() + if err != nil { + return fmt.Errorf("failed to load accounts: %w", err) + } + + m.accounts = accounts + + // Initialize clients for enabled accounts + for id, account := range accounts { + if account.Enabled { + client := NewAccountAPIClient(account, m.handlerFactory, m.templateFs) + m.clientPool.AddClient(id, client) + } + } + + return nil +} + +// GetAccounts returns a copy of all accounts +func (m *MultiAccountAggregator) GetAccounts() map[string]*AccountConfig { + m.mutex.RLock() + defer m.mutex.RUnlock() + + result := make(map[string]*AccountConfig) + for id, account := range m.accounts { + // Create a copy to prevent external modification + accountCopy := *account + result[id] = &accountCopy + } + return result +} + +// GetAccount returns a specific account by ID +func (m *MultiAccountAggregator) GetAccount(accountID string) (*AccountConfig, error) { + m.mutex.RLock() + defer m.mutex.RUnlock() + + account, exists := m.accounts[accountID] + if !exists { + return nil, fmt.Errorf("account %s not found", accountID) + } + + // Return a copy to prevent external modification + accountCopy := *account + return &accountCopy, nil +} + +// AddAccount adds a new account +func (m *MultiAccountAggregator) AddAccount(account *AccountConfig) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + // Add to storage + if err := m.storage.AddAccount(account); err != nil { + return fmt.Errorf("failed to add account to storage: %w", err) + } + + // Add to local cache + m.accounts[account.ID] = account + + // Initialize client if account is enabled + if account.Enabled { + client := NewAccountAPIClient(account, m.handlerFactory, m.templateFs) + m.clientPool.AddClient(account.ID, client) + } + + return nil +} + +// RemoveAccount removes an account by ID +func (m *MultiAccountAggregator) RemoveAccount(accountID string) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + // Check if account exists + if _, exists := m.accounts[accountID]; !exists { + return fmt.Errorf("account %s not found", accountID) + } + + // Remove from storage + if err := m.storage.RemoveAccount(accountID); err != nil { + return fmt.Errorf("failed to remove account from storage: %w", err) + } + + // Remove from local cache + delete(m.accounts, accountID) + + // Remove client + m.clientPool.RemoveClient(accountID) + + return nil +} + +// EnableAccount enables an account and initializes its client +func (m *MultiAccountAggregator) EnableAccount(accountID string) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + account, exists := m.accounts[accountID] + if !exists { + return fmt.Errorf("account %s not found", accountID) + } + + if account.Enabled { + return nil // Already enabled + } + + // Update account status + account.Enabled = true + + // Save to storage + if err := m.storage.SaveAccounts(m.accounts); err != nil { + account.Enabled = false // Rollback + return fmt.Errorf("failed to save account status: %w", err) + } + + // Initialize client + client := NewAccountAPIClient(account, m.handlerFactory, m.templateFs) + m.clientPool.AddClient(accountID, client) + + return nil +} + +// DisableAccount disables an account and removes its client +func (m *MultiAccountAggregator) DisableAccount(accountID string) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + account, exists := m.accounts[accountID] + if !exists { + return fmt.Errorf("account %s not found", accountID) + } + + if !account.Enabled { + return nil // Already disabled + } + + // Update account status + account.Enabled = false + + // Save to storage + if err := m.storage.SaveAccounts(m.accounts); err != nil { + account.Enabled = true // Rollback + return fmt.Errorf("failed to save account status: %w", err) + } + + // Remove client + m.clientPool.RemoveClient(accountID) + + return nil +} + +// GetEnabledAccounts returns only enabled accounts +func (m *MultiAccountAggregator) GetEnabledAccounts() map[string]*AccountConfig { + m.mutex.RLock() + defer m.mutex.RUnlock() + + result := make(map[string]*AccountConfig) + for id, account := range m.accounts { + if account.Enabled { + // Create a copy to prevent external modification + accountCopy := *account + result[id] = &accountCopy + } + } + return result +} + +// GetPrimaryAccount returns the primary account for legacy compatibility +func (m *MultiAccountAggregator) GetPrimaryAccount() (*AccountConfig, error) { + m.mutex.RLock() + defer m.mutex.RUnlock() + + // Load the current configuration to get the primary account ID + config, err := m.storage.loadConfig() + if err != nil { + return nil, fmt.Errorf("failed to load config: %w", err) + } + + // If primary account is specified and exists, return it + if config.PrimaryAccount != "" { + if account, exists := m.accounts[config.PrimaryAccount]; exists && account.Enabled { + // Return a copy to prevent external modification + accountCopy := *account + return &accountCopy, nil + } + } + + // Fallback: find first enabled account as primary + for _, account := range m.accounts { + if account.Enabled { + // Return a copy to prevent external modification + accountCopy := *account + return &accountCopy, nil + } + } + + return nil, fmt.Errorf("no enabled accounts found") +} + +// GetClient returns the API client for a specific account +func (m *MultiAccountAggregator) GetClient(accountID string) (*AccountAPIClient, error) { + return m.clientPool.GetClient(accountID) +} + +// GetAllClients returns all active API clients +func (m *MultiAccountAggregator) GetAllClients() map[string]*AccountAPIClient { + return m.clientPool.GetAllClients() +} + +// AccountClientPool manages API clients for multiple accounts +type AccountClientPool struct { + clients map[string]*AccountAPIClient + mutex sync.RWMutex +} + +// NewAccountClientPool creates a new client pool +func NewAccountClientPool() *AccountClientPool { + return &AccountClientPool{ + clients: make(map[string]*AccountAPIClient), + } +} + +// AddClient adds a client for an account +func (p *AccountClientPool) AddClient(accountID string, client *AccountAPIClient) { + p.mutex.Lock() + defer p.mutex.Unlock() + p.clients[accountID] = client +} + +// RemoveClient removes a client for an account +func (p *AccountClientPool) RemoveClient(accountID string) { + p.mutex.Lock() + defer p.mutex.Unlock() + delete(p.clients, accountID) +} + +// GetClient returns the client for a specific account +func (p *AccountClientPool) GetClient(accountID string) (*AccountAPIClient, error) { + p.mutex.RLock() + defer p.mutex.RUnlock() + + client, exists := p.clients[accountID] + if !exists { + return nil, fmt.Errorf("client for account %s not found", accountID) + } + return client, nil +} + +// GetAllClients returns all clients +func (p *AccountClientPool) GetAllClients() map[string]*AccountAPIClient { + p.mutex.RLock() + defer p.mutex.RUnlock() + + result := make(map[string]*AccountAPIClient) + for id, client := range p.clients { + result[id] = client + } + return result +} + +// AccountAPIClient wraps an API handler for individual account operations +type AccountAPIClient struct { + config *AccountConfig + handler APIHandler + factory HandlerFactory +} + +// NewAccountAPIClient creates a new API client for an account +func NewAccountAPIClient(account *AccountConfig, factory HandlerFactory, templateFs embed.FS) *AccountAPIClient { + // Create handler using the factory + handler := factory.CreateHandler(account, templateFs) + + return &AccountAPIClient{ + config: account, + handler: handler, + factory: factory, + } +} + +// GetConfig returns the account configuration +func (c *AccountAPIClient) GetConfig() *AccountConfig { + // Return a copy to prevent external modification + configCopy := *c.config + return &configCopy +} + +// GetHandler returns the underlying handler +func (c *AccountAPIClient) GetHandler() APIHandler { + return c.handler +} + +// UpdateTokens updates the authentication tokens for this client +func (c *AccountAPIClient) UpdateTokens(token, refreshToken string) { + c.config.Token = token + c.config.RefreshToken = refreshToken + + // Update the handler's tokens as well + c.handler.UpdateTokens(token, refreshToken) +} + +// IsEnabled returns whether this account is enabled +func (c *AccountAPIClient) IsEnabled() bool { + return c.config.Enabled +} + +// GetAccountID returns the account ID +func (c *AccountAPIClient) GetAccountID() string { + return c.config.ID +} + +// GetAccountName returns the account name +func (c *AccountAPIClient) GetAccountName() string { + return c.config.Name +} + +// GetDataMerger returns the data merger instance +func (m *MultiAccountAggregator) GetDataMerger() *DataMerger { + return m.dataMerger +} + +// UpdateAccountTokens updates the authentication tokens for a specific account +func (m *MultiAccountAggregator) UpdateAccountTokens(accountID, token, refreshToken string) error { + m.mutex.Lock() + defer m.mutex.Unlock() + + // Check if account exists + account, exists := m.accounts[accountID] + if !exists { + return fmt.Errorf("account %s not found", accountID) + } + + // Update tokens in the account configuration + account.Token = token + account.RefreshToken = refreshToken + + // Update tokens in the client if it exists + if client, err := m.clientPool.GetClient(accountID); err == nil { + client.UpdateTokens(token, refreshToken) + } + + // Save the updated accounts + return m.storage.SaveAccounts(m.accounts) +} \ No newline at end of file diff --git a/multiconfig/data_merger.go b/multiconfig/data_merger.go new file mode 100644 index 0000000..74c4d5e --- /dev/null +++ b/multiconfig/data_merger.go @@ -0,0 +1,348 @@ +package multiconfig + +import ( + "encoding/json" + "fmt" + "sync" +) + +// DataMerger component for combining data from multiple accounts +type DataMerger struct { + aggregator *MultiAccountAggregator +} + +// NewDataMerger creates a new DataMerger instance +func NewDataMerger(aggregator *MultiAccountAggregator) *DataMerger { + return &DataMerger{ + aggregator: aggregator, + } +} + +// CameraWithAccount represents a camera with account information +type CameraWithAccount struct { + ID int `json:"ID"` + Name string `json:"Name"` + IsActive int `json:"IsActive"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// PlaceWithAccount represents a place with account information +type PlaceWithAccount struct { + ID int `json:"id"` + Place struct { + ID int `json:"id"` + Address struct { + VisibleAddress string `json:"visibleAddress"` + } `json:"address"` + AccessControls []struct { + ID int `json:"id"` + Name string `json:"name"` + } `json:"accessControls"` + } `json:"place"` + Subscriber struct { + ID int `json:"id"` + Name string `json:"name"` + AccountID string `json:"accountId"` + } `json:"subscriber"` + Blocked bool `json:"blocked"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// FinanceWithAccount represents finances with account information +type FinanceWithAccount struct { + Balance float64 `json:"balance"` + BlockType string `json:"blockType"` + AmountSum float64 `json:"amountSum"` + TargetDate string `json:"targetDate"` + PaymentLink string `json:"paymentLink"` + Blocked bool `json:"blocked"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// EventWithAccount represents an event with account information +type EventWithAccount struct { + ID string `json:"id,omitempty"` + PlaceID int `json:"placeId,omitempty"` + EventTypeName string `json:"eventTypeName,omitempty"` + Timestamp string `json:"timestamp,omitempty"` + Message string `json:"message,omitempty"` + Source struct { + Type string `json:"type,omitempty"` + ID int `json:"id,omitempty"` + } `json:"source,omitempty"` + Value struct { + Type string `json:"type,omitempty"` + Value bool `json:"value,omitempty"` + } `json:"value,omitempty"` + EventStatusValue interface{} `json:"eventStatusValue,omitempty"` + Actions []interface{} `json:"actions,omitempty"` + AccountID string `json:"account_id"` + AccountName string `json:"account_name"` +} + +// AggregatedData contains all aggregated data from multiple accounts +type AggregatedData struct { + Cameras []CameraWithAccount `json:"cameras"` + Places []PlaceWithAccount `json:"places"` + Finances []FinanceWithAccount `json:"finances"` + Events []EventWithAccount `json:"events"` +} + +// GetAggregatedCameras returns cameras from all enabled accounts with account labels +func (m *DataMerger) GetAggregatedCameras() ([]CameraWithAccount, error) { + var result []CameraWithAccount + var wg sync.WaitGroup + var mu sync.Mutex + var errors []error + + clients := m.aggregator.GetAllClients() + + for accountID, client := range clients { + if !client.IsEnabled() { + continue + } + + wg.Add(1) + go func(accID string, c *AccountAPIClient) { + defer wg.Done() + + camerasJSON, err := c.GetHandler().Cameras() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get cameras for account %s: %w", accID, err)) + mu.Unlock() + return + } + + // Skip if token expired + if camerasJSON == "token can't be refreshed" { + return + } + + var cameras CamerasResponse + if err := json.Unmarshal([]byte(camerasJSON), &cameras); err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to unmarshal cameras for account %s: %w", accID, err)) + mu.Unlock() + return + } + + mu.Lock() + for _, camera := range cameras.Data { + result = append(result, CameraWithAccount{ + ID: camera.ID, + Name: camera.Name, + IsActive: camera.IsActive, + AccountID: accID, + AccountName: c.GetAccountName(), + }) + } + mu.Unlock() + }(accountID, client) + } + + wg.Wait() + + if len(errors) > 0 && len(result) == 0 { + return nil, fmt.Errorf("failed to get cameras from any account: %v", errors) + } + + return result, nil +} + +// GetAggregatedPlaces returns places from all enabled accounts with account identification +func (m *DataMerger) GetAggregatedPlaces() ([]PlaceWithAccount, error) { + var result []PlaceWithAccount + var wg sync.WaitGroup + var mu sync.Mutex + var errors []error + + clients := m.aggregator.GetAllClients() + + for accountID, client := range clients { + if !client.IsEnabled() { + continue + } + + wg.Add(1) + go func(accID string, c *AccountAPIClient) { + defer wg.Done() + + placesJSON, err := c.GetHandler().Places() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get places for account %s: %w", accID, err)) + mu.Unlock() + return + } + + // Skip if token expired + if placesJSON == "token can't be refreshed" { + return + } + + var places PlacesResponse + if err := json.Unmarshal([]byte(placesJSON), &places); err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to unmarshal places for account %s: %w", accID, err)) + mu.Unlock() + return + } + + mu.Lock() + for _, place := range places.Data { + result = append(result, PlaceWithAccount{ + ID: place.ID, + Place: place.Place, + Subscriber: place.Subscriber, + Blocked: place.Blocked, + AccountID: accID, + AccountName: c.GetAccountName(), + }) + } + mu.Unlock() + }(accountID, client) + } + + wg.Wait() + + if len(errors) > 0 && len(result) == 0 { + return nil, fmt.Errorf("failed to get places from any account: %v", errors) + } + + return result, nil +} + +// GetAggregatedFinances returns finances from all enabled accounts with account labels +func (m *DataMerger) GetAggregatedFinances() ([]FinanceWithAccount, error) { + var result []FinanceWithAccount + var wg sync.WaitGroup + var mu sync.Mutex + var errors []error + + clients := m.aggregator.GetAllClients() + + for accountID, client := range clients { + if !client.IsEnabled() { + continue + } + + wg.Add(1) + go func(accID string, c *AccountAPIClient) { + defer wg.Done() + + finances, err := c.GetHandler().GetFinances() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get finances for account %s: %w", accID, err)) + mu.Unlock() + return + } + + mu.Lock() + result = append(result, FinanceWithAccount{ + Balance: finances.Balance, + BlockType: finances.BlockType, + AmountSum: finances.AmountSum, + TargetDate: finances.TargetDate, + PaymentLink: finances.PaymentLink, + Blocked: finances.Blocked, + AccountID: accID, + AccountName: c.GetAccountName(), + }) + mu.Unlock() + }(accountID, client) + } + + wg.Wait() + + if len(errors) > 0 && len(result) == 0 { + return nil, fmt.Errorf("failed to get finances from any account: %v", errors) + } + + return result, nil +} + +// GetAggregatedEvents returns events from all enabled accounts for a specific place with account labels +func (m *DataMerger) GetAggregatedEvents(placeID string) ([]EventWithAccount, error) { + var result []EventWithAccount + + // Note: Events aggregation is not implemented yet as it requires HTTP request context + // This would need to be implemented at the handler level where proper HTTP requests + // can be constructed with the placeID parameter + + return result, nil +} + +// GetAllAggregatedData returns all aggregated data from all enabled accounts +func (m *DataMerger) GetAllAggregatedData() (*AggregatedData, error) { + var wg sync.WaitGroup + var mu sync.Mutex + var errors []error + + data := &AggregatedData{ + Cameras: make([]CameraWithAccount, 0), + Places: make([]PlaceWithAccount, 0), + Finances: make([]FinanceWithAccount, 0), + Events: make([]EventWithAccount, 0), + } + + // Get cameras + wg.Add(1) + go func() { + defer wg.Done() + cameras, err := m.GetAggregatedCameras() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get aggregated cameras: %w", err)) + mu.Unlock() + return + } + mu.Lock() + data.Cameras = cameras + mu.Unlock() + }() + + // Get places + wg.Add(1) + go func() { + defer wg.Done() + places, err := m.GetAggregatedPlaces() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get aggregated places: %w", err)) + mu.Unlock() + return + } + mu.Lock() + data.Places = places + mu.Unlock() + }() + + // Get finances + wg.Add(1) + go func() { + defer wg.Done() + finances, err := m.GetAggregatedFinances() + if err != nil { + mu.Lock() + errors = append(errors, fmt.Errorf("failed to get aggregated finances: %w", err)) + mu.Unlock() + return + } + mu.Lock() + data.Finances = finances + mu.Unlock() + }() + + wg.Wait() + + if len(errors) > 0 { + return data, fmt.Errorf("errors occurred while aggregating data: %v", errors) + } + + return data, nil +} \ No newline at end of file diff --git a/multiconfig/data_merger_test.go b/multiconfig/data_merger_test.go new file mode 100644 index 0000000..56f8c22 --- /dev/null +++ b/multiconfig/data_merger_test.go @@ -0,0 +1,937 @@ +package multiconfig + +import ( + "embed" + "fmt" + "os" + "testing" + + "github.com/leanovate/gopter" + "github.com/leanovate/gopter/gen" + "github.com/leanovate/gopter/prop" +) + +func TestProperty_SimultaneousMultiAccountDataDisplay(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("simultaneous multi-account data display", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test data display + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "data_display_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + dataMerger := aggregator.GetDataMerger() + + // Prepare and add accounts + enabledAccountCount := 0 + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + enabledAccountCount++ + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + if enabledAccountCount == 0 { + return true // Skip if no enabled accounts + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test simultaneous data display from all accounts + cameras, err := dataMerger.GetAggregatedCameras() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Cameras aggregation error (acceptable): %v", err) + } + + places, err := dataMerger.GetAggregatedPlaces() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Places aggregation error (acceptable): %v", err) + } + + finances, err := dataMerger.GetAggregatedFinances() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Finances aggregation error (acceptable): %v", err) + } + + // Verify that data from all accounts is displayed simultaneously + // Each data item should have account identification + for _, camera := range cameras { + if camera.AccountID == "" || camera.AccountName == "" { + t.Logf("Camera missing account identification: %+v", camera) + return false + } + } + + for _, place := range places { + if place.AccountID == "" || place.AccountName == "" { + t.Logf("Place missing account identification: %+v", place) + return false + } + } + + for _, finance := range finances { + if finance.AccountID == "" || finance.AccountName == "" { + t.Logf("Finance missing account identification: %+v", finance) + return false + } + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_CameraGroupingByAccount(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("camera grouping by account", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 2 accounts to test grouping + if len(accounts) < 2 { + return true // Skip test for insufficient accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "camera_grouping_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + dataMerger := aggregator.GetDataMerger() + + // Prepare and add accounts with unique names + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test camera grouping by account + cameras, err := dataMerger.GetAggregatedCameras() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Cameras aggregation error (acceptable): %v", err) + return true + } + + // Group cameras by account + camerasByAccount := make(map[string][]CameraWithAccount) + for _, camera := range cameras { + camerasByAccount[camera.AccountID] = append(camerasByAccount[camera.AccountID], camera) + } + + // Verify that cameras from different accounts have different account names + if len(camerasByAccount) > 1 { + accountIDs := make([]string, 0, len(camerasByAccount)) + for accountID := range camerasByAccount { + accountIDs = append(accountIDs, accountID) + } + + for i := 0; i < len(accountIDs); i++ { + for j := i + 1; j < len(accountIDs); j++ { + if accountIDs[i] == accountIDs[j] { + t.Logf("Duplicate account ID in camera grouping: %s", accountIDs[i]) + return false + } + + // Verify cameras from different accounts have different account names + cameras1 := camerasByAccount[accountIDs[i]] + cameras2 := camerasByAccount[accountIDs[j]] + + if len(cameras1) > 0 && len(cameras2) > 0 { + if cameras1[0].AccountName == cameras2[0].AccountName { + t.Logf("Cameras from different accounts have same AccountName: %s", + cameras1[0].AccountName) + return false + } + } + } + } + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_PlaceIdentificationByAccount(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("place identification by account", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 2 accounts to test identification + if len(accounts) < 2 { + return true // Skip test for insufficient accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "place_identification_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + dataMerger := aggregator.GetDataMerger() + + // Prepare and add accounts with unique names + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test place identification by account + places, err := dataMerger.GetAggregatedPlaces() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Places aggregation error (acceptable): %v", err) + return true + } + + // Group places by account + placesByAccount := make(map[string][]PlaceWithAccount) + for _, place := range places { + placesByAccount[place.AccountID] = append(placesByAccount[place.AccountID], place) + } + + // Verify that places from different accounts have different account names + if len(placesByAccount) > 1 { + accountIDs := make([]string, 0, len(placesByAccount)) + for accountID := range placesByAccount { + accountIDs = append(accountIDs, accountID) + } + + for i := 0; i < len(accountIDs); i++ { + for j := i + 1; j < len(accountIDs); j++ { + if accountIDs[i] == accountIDs[j] { + t.Logf("Duplicate account ID in place identification: %s", accountIDs[i]) + return false + } + + // Verify places from different accounts have different account names + places1 := placesByAccount[accountIDs[i]] + places2 := placesByAccount[accountIDs[j]] + + if len(places1) > 0 && len(places2) > 0 { + if places1[0].AccountName == places2[0].AccountName { + t.Logf("Places from different accounts have same AccountName: %s", + places1[0].AccountName) + return false + } + } + } + } + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_MultiAccountAPIDataAggregation(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("multi-account API data aggregation", prop.ForAll( + func(accounts []AccountConfig, requestAccountID string) bool { + // Need at least 1 account to test aggregation + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "api_aggregation_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + enabledAccountCount := 0 + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + enabledAccountCount++ + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + if enabledAccountCount == 0 { + return true // Skip if no enabled accounts + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test API data aggregation behavior + dataMerger := aggregator.GetDataMerger() + + // Test cameras aggregation + cameras, err := dataMerger.GetAggregatedCameras() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Cameras aggregation error (acceptable): %v", err) + } + + // Test places aggregation + places, err := dataMerger.GetAggregatedPlaces() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Places aggregation error (acceptable): %v", err) + } + + // Test finances aggregation + finances, err := dataMerger.GetAggregatedFinances() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("Finances aggregation error (acceptable): %v", err) + } + + // Verify that data from all accounts is properly aggregated + // Each data item should have account identification + for _, camera := range cameras { + if camera.AccountID == "" || camera.AccountName == "" { + t.Logf("Camera missing account identification: %+v", camera) + return false + } + } + + for _, place := range places { + if place.AccountID == "" || place.AccountName == "" { + t.Logf("Place missing account identification: %+v", place) + return false + } + } + + for _, finance := range finances { + if finance.AccountID == "" || finance.AccountName == "" { + t.Logf("Finance missing account identification: %+v", finance) + return false + } + } + + // Test account-specific data retrieval if requestAccountID is valid + if requestAccountID != "" { + // Check if the requested account exists + if _, err := aggregator.GetAccount(requestAccountID); err == nil { + // Account exists, verify we can get its specific data + client, err := aggregator.GetClient(requestAccountID) + if err == nil && client.IsEnabled() { + // Verify client can provide account-specific data + if client.GetAccountID() != requestAccountID { + t.Logf("Client account ID mismatch: expected %s, got %s", requestAccountID, client.GetAccountID()) + return false + } + } + } + } + + return true + }, + genAccountSlice(), + gen.AlphaString(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_NewAPIAggregatedDataConsistency(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("new API aggregated data consistency", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test consistency + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "api_consistency_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + enabledAccountCount := 0 + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + enabledAccountCount++ + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + if enabledAccountCount == 0 { + return true // Skip if no enabled accounts + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test new API aggregated data consistency + dataMerger := aggregator.GetDataMerger() + + // Get all aggregated data + allData, err := dataMerger.GetAllAggregatedData() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("All data aggregation error (acceptable): %v", err) + } + + // Verify consistency: all data should be from enabled accounts only + enabledAccounts := aggregator.GetEnabledAccounts() + + // Check cameras consistency + for _, camera := range allData.Cameras { + if _, exists := enabledAccounts[camera.AccountID]; !exists { + t.Logf("Camera from disabled account found: %s", camera.AccountID) + return false + } + // Verify account name matches + if enabledAccounts[camera.AccountID].Name != camera.AccountName { + t.Logf("Camera account name mismatch: expected %s, got %s", + enabledAccounts[camera.AccountID].Name, camera.AccountName) + return false + } + } + + // Check places consistency + for _, place := range allData.Places { + if _, exists := enabledAccounts[place.AccountID]; !exists { + t.Logf("Place from disabled account found: %s", place.AccountID) + return false + } + // Verify account name matches + if enabledAccounts[place.AccountID].Name != place.AccountName { + t.Logf("Place account name mismatch: expected %s, got %s", + enabledAccounts[place.AccountID].Name, place.AccountName) + return false + } + } + + // Check finances consistency + for _, finance := range allData.Finances { + if _, exists := enabledAccounts[finance.AccountID]; !exists { + t.Logf("Finance from disabled account found: %s", finance.AccountID) + return false + } + // Verify account name matches + if enabledAccounts[finance.AccountID].Name != finance.AccountName { + t.Logf("Finance account name mismatch: expected %s, got %s", + enabledAccounts[finance.AccountID].Name, finance.AccountName) + return false + } + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_AccountSpecificAuthenticationErrorIndication(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("account-specific authentication error indication", prop.ForAll( + func(accounts []AccountConfig, invalidTokenAccountIndex int) bool { + // Need at least 1 account to test error indication + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "auth_error_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + + // Prepare and add accounts + enabledAccountCount := 0 + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Set valid token by default + account.Token = "valid-token" + account.RefreshToken = "valid-refresh-token" + + // Invalidate token for one specific account if index is valid + if i == invalidTokenAccountIndex%len(accounts) { + account.Token = "invalid-token" + account.RefreshToken = "invalid-refresh-token" + } + + // Enable account for testing + account.Enabled = true + enabledAccountCount++ + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + if enabledAccountCount == 0 { + return true // Skip if no enabled accounts + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test authentication error indication + dataMerger := aggregator.GetDataMerger() + + // Try to get data from all accounts + cameras, _ := dataMerger.GetAggregatedCameras() + places, _ := dataMerger.GetAggregatedPlaces() + finances, _ := dataMerger.GetAggregatedFinances() + + // Verify that accounts with invalid tokens don't affect other accounts + // The system should continue to work for accounts with valid tokens + + // Count accounts that should have valid data + validAccountsCount := 0 + invalidAccountID := "" + + for i, account := range accounts { + if i == invalidTokenAccountIndex%len(accounts) { + invalidAccountID = account.ID + } else { + validAccountsCount++ + } + } + + // If there are valid accounts, we should not see data from the invalid account + if validAccountsCount > 0 && invalidAccountID != "" { + // Check that invalid account data is not included + for _, camera := range cameras { + if camera.AccountID == invalidAccountID { + t.Logf("Data from invalid account found in cameras: %s", invalidAccountID) + return false + } + } + + for _, place := range places { + if place.AccountID == invalidAccountID { + t.Logf("Data from invalid account found in places: %s", invalidAccountID) + return false + } + } + + for _, finance := range finances { + if finance.AccountID == invalidAccountID { + t.Logf("Data from invalid account found in finances: %s", invalidAccountID) + return false + } + } + } + + // Verify that the system can identify which account has authentication issues + clients := aggregator.GetAllClients() + for accountID, client := range clients { + if accountID == invalidAccountID { + // This account should have authentication issues + // We can't directly test HTTP errors here, but we can verify + // that the client exists and has the invalid token + if client.GetConfig().Token != "invalid-token" { + t.Logf("Expected invalid token for account %s", accountID) + return false + } + } + } + + return true + }, + genAccountSlice(), + gen.IntRange(0, 100), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} + +func TestProperty_MultiAccountDataLabelingConsistency(t *testing.T) { + properties := gopter.NewProperties(nil) + + properties.Property("multi-account data labeling consistency", prop.ForAll( + func(accounts []AccountConfig) bool { + // Need at least 1 account to test labeling consistency + if len(accounts) == 0 { + return true // Skip test for no accounts + } + + // Create temporary directory for testing + tempDir, err := os.MkdirTemp("", "data_labeling_test_*") + if err != nil { + t.Logf("Failed to create temp dir: %v", err) + return false + } + defer os.RemoveAll(tempDir) + + storage := NewAccountStorage(tempDir) + factory := &MockHandlerFactory{} + var templateFs embed.FS + aggregator := NewMultiAccountAggregator(storage, factory, templateFs) + dataMerger := aggregator.GetDataMerger() + + // Prepare and add accounts with unique names + enabledAccountCount := 0 + for i := range accounts { + account := &accounts[i] + + // Ensure required fields + if account.Login == 0 { + account.Login = 79123456789 + i + } + if account.UUID == "" { + account.UUID = fmt.Sprintf("test-uuid-%d", i) + } + // Always set unique name to avoid duplicates from generator + account.Name = fmt.Sprintf("Test Account %d", i) + if account.ID == "" { + account.ID = fmt.Sprintf("account-%d", i) + } + + // Enable account for testing + account.Enabled = true + enabledAccountCount++ + + if err := aggregator.AddAccount(account); err != nil { + t.Logf("Failed to add account: %v", err) + return false + } + } + + if enabledAccountCount == 0 { + return true // Skip if no enabled accounts + } + + // Load accounts into aggregator + if err := aggregator.LoadAccounts(); err != nil { + t.Logf("Failed to load accounts: %v", err) + return false + } + + // Test multi-account data labeling consistency + // Get all aggregated data + allData, err := dataMerger.GetAllAggregatedData() + if err != nil { + // Error is acceptable if no valid data is available + t.Logf("All data aggregation error (acceptable): %v", err) + } + + // Verify that each data item is clearly labeled with account information + // Check cameras labeling consistency + for _, camera := range allData.Cameras { + // Each camera must have both AccountID and AccountName + if camera.AccountID == "" { + t.Logf("Camera missing AccountID: %+v", camera) + return false + } + if camera.AccountName == "" { + t.Logf("Camera missing AccountName: %+v", camera) + return false + } + + // Verify that the AccountID corresponds to an actual account + account, err := aggregator.GetAccount(camera.AccountID) + if err != nil { + t.Logf("Camera references non-existent account ID %s: %+v", camera.AccountID, camera) + return false + } + + // Verify that AccountName matches the actual account name + if camera.AccountName != account.Name { + t.Logf("Camera AccountName mismatch: expected %s, got %s", account.Name, camera.AccountName) + return false + } + } + + // Check places labeling consistency + for _, place := range allData.Places { + // Each place must have both AccountID and AccountName + if place.AccountID == "" { + t.Logf("Place missing AccountID: %+v", place) + return false + } + if place.AccountName == "" { + t.Logf("Place missing AccountName: %+v", place) + return false + } + + // Verify that the AccountID corresponds to an actual account + account, err := aggregator.GetAccount(place.AccountID) + if err != nil { + t.Logf("Place references non-existent account ID %s: %+v", place.AccountID, place) + return false + } + + // Verify that AccountName matches the actual account name + if place.AccountName != account.Name { + t.Logf("Place AccountName mismatch: expected %s, got %s", account.Name, place.AccountName) + return false + } + } + + // Check finances labeling consistency + for _, finance := range allData.Finances { + // Each finance must have both AccountID and AccountName + if finance.AccountID == "" { + t.Logf("Finance missing AccountID: %+v", finance) + return false + } + if finance.AccountName == "" { + t.Logf("Finance missing AccountName: %+v", finance) + return false + } + + // Verify that the AccountID corresponds to an actual account + account, err := aggregator.GetAccount(finance.AccountID) + if err != nil { + t.Logf("Finance references non-existent account ID %s: %+v", finance.AccountID, finance) + return false + } + + // Verify that AccountName matches the actual account name + if finance.AccountName != account.Name { + t.Logf("Finance AccountName mismatch: expected %s, got %s", account.Name, finance.AccountName) + return false + } + } + + // Verify consistency across different data types for the same account + // All data items from the same account should have identical AccountID and AccountName + accountLabels := make(map[string]string) // AccountID -> AccountName + + // Collect labels from cameras + for _, camera := range allData.Cameras { + if existingName, exists := accountLabels[camera.AccountID]; exists { + if existingName != camera.AccountName { + t.Logf("Inconsistent AccountName for account %s: camera has %s, expected %s", + camera.AccountID, camera.AccountName, existingName) + return false + } + } else { + accountLabels[camera.AccountID] = camera.AccountName + } + } + + // Verify labels from places match + for _, place := range allData.Places { + if existingName, exists := accountLabels[place.AccountID]; exists { + if existingName != place.AccountName { + t.Logf("Inconsistent AccountName for account %s: place has %s, expected %s", + place.AccountID, place.AccountName, existingName) + return false + } + } else { + accountLabels[place.AccountID] = place.AccountName + } + } + + // Verify labels from finances match + for _, finance := range allData.Finances { + if existingName, exists := accountLabels[finance.AccountID]; exists { + if existingName != finance.AccountName { + t.Logf("Inconsistent AccountName for account %s: finance has %s, expected %s", + finance.AccountID, finance.AccountName, existingName) + return false + } + } else { + accountLabels[finance.AccountID] = finance.AccountName + } + } + + return true + }, + genAccountSlice(), + )) + + properties.TestingRun(t, gopter.ConsoleReporter(false)) +} \ No newline at end of file diff --git a/multiconfig/interfaces.go b/multiconfig/interfaces.go new file mode 100644 index 0000000..4a80ffb --- /dev/null +++ b/multiconfig/interfaces.go @@ -0,0 +1,64 @@ +package multiconfig + +import "embed" + +// APIHandler defines the interface for API operations needed by multiconfig +type APIHandler interface { + Cameras() (string, error) + Places() (string, error) + GetFinances() (*FinanceData, error) + UpdateTokens(token, refreshToken string) +} + +// FinanceData represents the finance data structure +type FinanceData struct { + Balance float64 `json:"balance"` + BlockType string `json:"blockType"` + AmountSum float64 `json:"amountSum"` + TargetDate string `json:"targetDate"` + PaymentLink string `json:"paymentLink"` + Blocked bool `json:"blocked"` +} + +// CameraData represents camera data structure +type CameraData struct { + ID int `json:"ID"` + Name string `json:"Name"` + IsActive int `json:"IsActive"` +} + +// CamerasResponse represents the cameras API response +type CamerasResponse struct { + Data []CameraData `json:"data"` +} + +// PlaceData represents place data structure +type PlaceData struct { + ID int `json:"id"` + Place struct { + ID int `json:"id"` + Address struct { + VisibleAddress string `json:"visibleAddress"` + } `json:"address"` + AccessControls []struct { + ID int `json:"id"` + Name string `json:"name"` + } `json:"accessControls"` + } `json:"place"` + Subscriber struct { + ID int `json:"id"` + Name string `json:"name"` + AccountID string `json:"accountId"` + } `json:"subscriber"` + Blocked bool `json:"blocked"` +} + +// PlacesResponse represents the places API response +type PlacesResponse struct { + Data []PlaceData `json:"data"` +} + +// HandlerFactory creates API handlers for accounts +type HandlerFactory interface { + CreateHandler(config *AccountConfig, templateFs embed.FS) APIHandler +} \ No newline at end of file diff --git a/multiconfig/legacy_adapter.go b/multiconfig/legacy_adapter.go new file mode 100644 index 0000000..e2a6081 --- /dev/null +++ b/multiconfig/legacy_adapter.go @@ -0,0 +1,95 @@ +package multiconfig + +import ( + "fmt" + + "github.com/ad/domru/config" +) + +// LegacyConfigAdapter provides backward compatibility by adapting multi-account system +// to the legacy single-account interface +type LegacyConfigAdapter struct { + aggregator *MultiAccountAggregator +} + +// NewLegacyConfigAdapter creates a new legacy configuration adapter +func NewLegacyConfigAdapter(aggregator *MultiAccountAggregator) *LegacyConfigAdapter { + return &LegacyConfigAdapter{ + aggregator: aggregator, + } +} + +// GetPrimaryConfig returns the primary account configuration in legacy format +// This method ensures existing handlers continue working without changes +func (a *LegacyConfigAdapter) GetPrimaryConfig() (*config.Config, error) { + primaryAccount, err := a.aggregator.GetPrimaryAccount() + if err != nil { + return nil, fmt.Errorf("failed to get primary account: %w", err) + } + + // Convert AccountConfig to legacy Config format + legacyConfig := &config.Config{ + Token: primaryAccount.Token, + RefreshToken: primaryAccount.RefreshToken, + Login: primaryAccount.Login, + Operator: primaryAccount.Operator, + UUID: primaryAccount.UUID, + Port: 18000, // Default port for backward compatibility + } + + return legacyConfig, nil +} + +// GetAccountConfig returns a specific account configuration in legacy format +// This allows handlers to work with specific accounts when needed +func (a *LegacyConfigAdapter) GetAccountConfig(accountID string) (*config.Config, error) { + account, err := a.aggregator.GetAccount(accountID) + if err != nil { + return nil, fmt.Errorf("failed to get account %s: %w", accountID, err) + } + + // Convert AccountConfig to legacy Config format + legacyConfig := &config.Config{ + Token: account.Token, + RefreshToken: account.RefreshToken, + Login: account.Login, + Operator: account.Operator, + UUID: account.UUID, + Port: 18000, // Default port for backward compatibility + } + + return legacyConfig, nil +} + +// UpdatePrimaryAccountTokens updates the tokens for the primary account +// This maintains compatibility with existing token refresh logic +func (a *LegacyConfigAdapter) UpdatePrimaryAccountTokens(token, refreshToken string) error { + primaryAccount, err := a.aggregator.GetPrimaryAccount() + if err != nil { + return fmt.Errorf("failed to get primary account: %w", err) + } + + // Use the aggregator's method to update account tokens + return a.aggregator.UpdateAccountTokens(primaryAccount.ID, token, refreshToken) +} + +// UpdateAccountTokens updates the tokens for a specific account +func (a *LegacyConfigAdapter) UpdateAccountTokens(accountID, token, refreshToken string) error { + // Use the aggregator's method to update account tokens + return a.aggregator.UpdateAccountTokens(accountID, token, refreshToken) +} + +// HasPrimaryAccount checks if there is a primary account available +func (a *LegacyConfigAdapter) HasPrimaryAccount() bool { + _, err := a.aggregator.GetPrimaryAccount() + return err == nil +} + +// GetPrimaryAccountID returns the ID of the primary account +func (a *LegacyConfigAdapter) GetPrimaryAccountID() (string, error) { + primaryAccount, err := a.aggregator.GetPrimaryAccount() + if err != nil { + return "", fmt.Errorf("failed to get primary account: %w", err) + } + return primaryAccount.ID, nil +} \ No newline at end of file diff --git a/multiconfig/migration.go b/multiconfig/migration.go new file mode 100644 index 0000000..2aec64f --- /dev/null +++ b/multiconfig/migration.go @@ -0,0 +1,202 @@ +package multiconfig + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "time" + + "github.com/ad/domru/config" + "github.com/google/uuid" +) + +// MigrationService handles migration from legacy single-account configuration +type MigrationService struct { + storage *AccountStorage + legacyPath string + backupDir string +} + +// NewMigrationService creates a new migration service +func NewMigrationService(storage *AccountStorage, legacyPath string) *MigrationService { + return &MigrationService{ + storage: storage, + legacyPath: legacyPath, + backupDir: filepath.Join(filepath.Dir(storage.configFile), "migration_backups"), + } +} + +// NeedsMigration checks if migration is needed (legacy file exists and new format doesn't) +func (m *MigrationService) NeedsMigration() bool { + // Check if legacy file exists + if _, err := os.Stat(m.legacyPath); os.IsNotExist(err) { + return false + } + + // Check if new format already exists + if _, err := os.Stat(m.storage.configFile); err == nil { + return false // New format already exists, no migration needed + } + + return true +} + +// MigrateFromLegacy migrates from legacy configuration to multi-account format +func (m *MigrationService) MigrateFromLegacy() error { + // Check if migration is needed + if !m.NeedsMigration() { + return nil + } + + // Read legacy configuration + legacyConfig, err := m.readLegacyConfig() + if err != nil { + return fmt.Errorf("failed to read legacy config: %w", err) + } + + // Create backup of legacy configuration + backupPath, err := m.createLegacyBackup() + if err != nil { + return fmt.Errorf("failed to create backup: %w", err) + } + + // Convert to new format + account, err := m.convertLegacyToAccount(legacyConfig) + if err != nil { + return fmt.Errorf("failed to convert legacy config: %w", err) + } + + // Save in new format + accounts := map[string]*AccountConfig{ + account.ID: account, + } + + if err := m.storage.SaveAccounts(accounts); err != nil { + // Migration failed, attempt to restore + if restoreErr := m.restoreLegacyFromBackup(backupPath); restoreErr != nil { + return fmt.Errorf("migration failed and restore failed: migration error: %w, restore error: %v", err, restoreErr) + } + return fmt.Errorf("migration failed, legacy config restored: %w", err) + } + + // Migration successful, remove legacy file + if err := os.Remove(m.legacyPath); err != nil { + // Log warning but don't fail migration + fmt.Printf("Warning: failed to remove legacy config file: %v\n", err) + } + + return nil +} + +// readLegacyConfig reads the legacy configuration file +func (m *MigrationService) readLegacyConfig() (*config.Config, error) { + data, err := os.ReadFile(m.legacyPath) + if err != nil { + return nil, fmt.Errorf("failed to read legacy file: %w", err) + } + + var legacyConfig config.Config + if err := json.Unmarshal(data, &legacyConfig); err != nil { + return nil, fmt.Errorf("failed to parse legacy config: %w", err) + } + + return &legacyConfig, nil +} + +// convertLegacyToAccount converts legacy config to new account format +func (m *MigrationService) convertLegacyToAccount(legacyConfig *config.Config) (*AccountConfig, error) { + // Generate unique ID for the account + accountID := uuid.New().String() + + // Preserve all data from legacy config + account := &AccountConfig{ + ID: accountID, + Token: legacyConfig.Token, + RefreshToken: legacyConfig.RefreshToken, + Login: legacyConfig.Login, + Operator: legacyConfig.Operator, + UUID: legacyConfig.UUID, + Name: fmt.Sprintf("Migrated Account (%d)", legacyConfig.Login), + Enabled: true, // Default to enabled for migrated accounts + } + + // Validate required fields + if account.Login == 0 { + return nil, fmt.Errorf("legacy config has invalid login") + } + if account.UUID == "" { + return nil, fmt.Errorf("legacy config missing UUID") + } + + return account, nil +} + +// createLegacyBackup creates a backup of the legacy configuration +func (m *MigrationService) createLegacyBackup() (string, error) { + // Ensure backup directory exists + if err := os.MkdirAll(m.backupDir, 0755); err != nil { + return "", fmt.Errorf("failed to create backup directory: %w", err) + } + + // Read legacy file + data, err := os.ReadFile(m.legacyPath) + if err != nil { + return "", fmt.Errorf("failed to read legacy file for backup: %w", err) + } + + // Create backup filename with timestamp + timestamp := time.Now().Format("20060102_150405") + backupPath := filepath.Join(m.backupDir, fmt.Sprintf("legacy_account_%s.json", timestamp)) + + // Write backup + if err := os.WriteFile(backupPath, data, 0644); err != nil { + return "", fmt.Errorf("failed to write backup file: %w", err) + } + + return backupPath, nil +} + +// restoreLegacyFromBackup restores legacy configuration from backup +func (m *MigrationService) restoreLegacyFromBackup(backupPath string) error { + // Read backup + data, err := os.ReadFile(backupPath) + if err != nil { + return fmt.Errorf("failed to read backup file: %w", err) + } + + // Restore to original location + if err := os.WriteFile(m.legacyPath, data, 0644); err != nil { + return fmt.Errorf("failed to restore legacy file: %w", err) + } + + return nil +} + +// GetLegacyConfigPath returns the path to the legacy configuration file +func (m *MigrationService) GetLegacyConfigPath() string { + return m.legacyPath +} + +// CleanupBackups removes old migration backup files (keeps last 5) +func (m *MigrationService) CleanupBackups() error { + backupFiles, err := filepath.Glob(filepath.Join(m.backupDir, "legacy_account_*.json")) + if err != nil { + return fmt.Errorf("failed to list backup files: %w", err) + } + + // Keep only the 5 most recent backups + if len(backupFiles) <= 5 { + return nil + } + + // Sort files by name (which includes timestamp) + // Remove oldest files + for i := 0; i < len(backupFiles)-5; i++ { + if err := os.Remove(backupFiles[i]); err != nil { + fmt.Printf("Warning: failed to remove old backup %s: %v\n", backupFiles[i], err) + } + } + + return nil +} \ No newline at end of file diff --git a/multiconfig/test_factory.go b/multiconfig/test_factory.go new file mode 100644 index 0000000..d2514b3 --- /dev/null +++ b/multiconfig/test_factory.go @@ -0,0 +1,82 @@ +package multiconfig + +import ( + "embed" + "fmt" +) + +// MockHandlerFactory is a test implementation of HandlerFactory +type MockHandlerFactory struct{} + +// CreateHandler creates a mock API handler for testing +func (f *MockHandlerFactory) CreateHandler(accountConfig *AccountConfig, templateFs embed.FS) APIHandler { + return &MockAPIHandler{ + config: accountConfig, + } +} + +// MockAPIHandler is a test implementation of APIHandler +type MockAPIHandler struct { + config *AccountConfig +} + +// Cameras returns mock camera data specific to the account +func (m *MockAPIHandler) Cameras() (string, error) { + // Return different data based on account ID to ensure proper isolation + if m.config.Token == "invalid-token" { + return "Not authorized", fmt.Errorf("invalid token") + } + + // Return account-specific camera data + cameraID := 1 + if m.config.ID != "" { + // Use account ID to generate unique camera ID + for _, char := range m.config.ID { + cameraID += int(char) + } + } + + return fmt.Sprintf(`{"data":[{"ID":%d,"Name":"Camera-%s","IsActive":1}]}`, cameraID, m.config.ID), nil +} + +// Places returns mock place data specific to the account +func (m *MockAPIHandler) Places() (string, error) { + // Return different data based on account ID to ensure proper isolation + if m.config.Token == "invalid-token" { + return "Not authorized", fmt.Errorf("invalid token") + } + + // Return account-specific place data + placeID := 1 + if m.config.ID != "" { + // Use account ID to generate unique place ID + for _, char := range m.config.ID { + placeID += int(char) + } + } + + return fmt.Sprintf(`{"data":[{"id":%d,"place":{"id":%d,"address":{"visibleAddress":"Address-%s"},"accessControls":[]},"subscriber":{"id":1,"name":"User-%s","accountId":"%s"},"blocked":false}]}`, placeID, placeID, m.config.ID, m.config.ID, m.config.ID), nil +} + +// GetFinances returns mock finance data specific to the account +func (m *MockAPIHandler) GetFinances() (*FinanceData, error) { + // Return error for invalid tokens + if m.config.Token == "invalid-token" { + return nil, fmt.Errorf("invalid token") + } + + return &FinanceData{ + Balance: 100.0, + BlockType: "none", + AmountSum: 0.0, + TargetDate: "2024-12-31", + PaymentLink: "http://test.com", + Blocked: false, + }, nil +} + +// UpdateTokens updates the mock tokens +func (m *MockAPIHandler) UpdateTokens(token, refreshToken string) { + m.config.Token = token + m.config.RefreshToken = refreshToken +} \ No newline at end of file diff --git a/templates/accounts.html b/templates/accounts.html index 158111d..dca8501 100644 --- a/templates/accounts.html +++ b/templates/accounts.html @@ -151,23 +151,40 @@

{{ .Phone }}

{{ .LoginError }}
- + +
+

Выберите аккаунт для добавления:

+ +
+ + +
diff --git a/templates/home.html b/templates/home.html index b59603b..f4fe518 100644 --- a/templates/home.html +++ b/templates/home.html @@ -51,6 +51,52 @@ pre { text-align: left; } + +/* Multi-account styles */ +.account-group { + border: 1px solid #ddd; + border-radius: 8px; + margin: 15px 0; + padding: 10px; + background-color: #fafafa; +} + +.account-status-section, .cameras-section, .places-section, .config-section { + margin: 20px 0; + padding: 15px; + border: 1px solid #e0e0e0; + border-radius: 5px; + background-color: #f9f9f9; +} + +.account-status-section h3, .cameras-section h3, .places-section h3, .config-section h3 { + margin-top: 0; + color: #5b5983; + border-bottom: 2px solid #5b5983; + padding-bottom: 5px; +} + +.account-group h4 { + margin: 0 0 10px 0; + border-radius: 5px; + font-size: 1.1em; +} + +.account-error { + background-color: #f8d7da; + color: #721c24; + padding: 10px; + border-radius: 5px; + margin: 10px 0; +} + +.account-success { + background-color: #d4edda; + color: #155724; + padding: 10px; + border-radius: 5px; + margin: 10px 0; +} @@ -60,53 +106,184 @@ {{ .LoginError }} {{ else }} -
-
-
-
Номер телефона:
-
+{{ .Phone }}
-
-
-
Токен доступа:
-
{{ .Token }}
-
-
-
Токен обновления:
-
{{ .RefreshToken }}
-
- {{ with .Finances }} -
-
Статус:
-
{{ .BlockType }}
-
-
-
Баланс:
-
{{ .Balance }}₽
+ + {{ if .IsMultiAccount }} + +

Мульти-аккаунт режим

+ + + + + + {{ if .MultiAccountCameras }} +
+

Камеры по аккаунтам

+ {{ $currentAccount := "" }} + {{ range .MultiAccountCameras }} + {{ if ne .AccountName $currentAccount }} + {{ if ne $currentAccount "" }}
{{ end }} + {{ end }} +
+ {{ end }} + + +
+

Конфигурация Home Assistant

- Камера +
+# Мульти-аккаунт конфигурация
+rest_command:
+{{ range .MultiAccountPlaces }}
+    domru_open_door_{{ .AccountID }}_{{ .Place.ID }}:
+        url: http://{{ $.HostIP }}:{{ $.Port }}/door?placeID={{ .Place.ID }}&accessControlID={{ (index .Place.AccessControls 0).ID }}
+{{ end }}
+
+camera:
+{{ range .MultiAccountCameras }}
+    - platform: generic
+        name: domofon_{{ .AccountID }}_{{ .ID }}
+        still_image_url: http://{{ $.HostIP }}:{{ $.Port }}/snapshot?placeID={{ .ID }}&accessControlID=1
+        stream_source: http://{{ $.HostIP }}:{{ $.Port }}/stream?cameraID={{ .ID }}
+{{ end }}
+                        
-
-
-
-
+            
+ + {{ else }} + +
+
+
+
Номер телефона:
+
+{{ .Phone }}
+
+
+
Токен доступа:
+
{{ .Token }}
+
+
+
Токен обновления:
+
{{ .RefreshToken }}
+
+ {{ with .Finances }} +
+
Статус:
+
{{ .BlockType }}
+
+
+
Баланс:
+
{{ .Balance }}₽
+
+ {{ end }} + {{ with .Places.Data }} + {{ range $index, $element := . }} +
+
Адрес:
+
+ {{ (index $element.Place.AccessControls 0).Name }} + +
+
+
+
Видеопоток:
+
http://{{ $.HostIP }}:{{ $.Port }}/stream?cameraID={{ (index $.Cameras.Data 0).ID }}
+
+
+
+
+ Камера +
+
+
+
+
+
 rest_command:
     domru_open_door:
         url: http://{{ $.HostIP }}:{{ $.Port }}/door?placeID={{ $element.Place.ID }}&accessControlID={{ (index $element.Place.AccessControls 0).ID }}
@@ -116,13 +293,14 @@
         name: domofon
         still_image_url: http://{{ $.HostIP }}:{{ $.Port }}/snapshot?placeID={{ $element.Place.ID }}&accessControlID={{ (index $element.Place.AccessControls 0).ID }}
         stream_source: http://{{ $.HostIP }}:{{ $.Port }}/stream?cameraID={{ (index $.Cameras.Data 0).ID }}
-                        
+ +
+ {{ end }} + {{ end }}
- {{ end }} - {{ end }}
-
+ {{ end }} {{ end }} diff --git a/templates/login.html b/templates/login.html index 6cd522c..49a38e3 100644 --- a/templates/login.html +++ b/templates/login.html @@ -149,7 +149,21 @@
-

Вход

+

Вход в систему

+ + +
+

+ Теперь вы можете управлять несколькими аккаунтами домофонов одновременно! +

+
    +
  • Добавляйте несколько аккаунтов поочередно
  • +
  • Просматривайте данные от всех аккаунтов в одном интерфейсе
  • +
  • Четкая группировка по аккаунтам
  • +
  • Индикаторы статуса для каждого аккаунта
  • +
+
+