diff --git a/compute/simulation_test.go b/compute/simulation_test.go index 5e60739..188ced8 100644 --- a/compute/simulation_test.go +++ b/compute/simulation_test.go @@ -101,6 +101,30 @@ func Test_StreamAbstract_MultiFrequency(t *testing.T) { //compute consequences. StreamAbstractMultiFrequency(hazardProviders, frequencies, nsp, w) } + +func Test_StreamAbstract_MultiHazard(t *testing.T) { + //initialize the NSI API structure provider + nsp := structureprovider.InitNSISPwithOcctypeFilePath("/workspaces/go-consequences/data/lifecycle/occtypes_reconstruction.json") + now := time.Now() + fmt.Println(now) + + root := "/workspaces/go-consequences/data/lifecycle/" + filepath := root + "test_arrival-depth-duration_hazards.json" + w, _ := resultswriters.InitSpatialResultsWriter(root+"multihazardtest_consequences.json", "results", "GeoJSON") + //w := consequences.InitSummaryResultsWriterFromFile(root + "_consequences_SUMMARY.json") + //create a result writer based on the name of the depth grid. + //w, _ := resultswriters.InitGpkResultsWriter(root+"_consequences_nsi.gpkg", "nsi_result") + defer w.Close() + //initialize a hazard provider based on the depth grid. + dfr, err := hazardproviders.InitADDMHP(filepath) + if err != nil { + panic(err) + } + //compute consequences. + StreamAbstract(dfr, nsp, w) + fmt.Println(time.Since(now)) +} + func Test_Config(t *testing.T) { config := Config{ StructureProviderInfo: structureprovider.StructureProviderInfo{ diff --git a/hazardproviders/interfaces.go b/hazardproviders/interfaces.go index b7b298f..19c4585 100644 --- a/hazardproviders/interfaces.go +++ b/hazardproviders/interfaces.go @@ -48,6 +48,25 @@ func ArrivalAndDurationHazardFunction() HazardFunction { } } +func ArrivalDepthAndDurationHazardFunction() HazardFunction { + return func(valueIn hazards.HazardData, hazard hazards.HazardEvent) (hazards.HazardEvent, error) { + d := hazards.ArrivalDepthandDurationEvent{} + d.SetDepth(valueIn.Depth) + d.SetDuration(valueIn.Duration) + d.SetArrivalTime(valueIn.ArrivalTime) + return d, nil + } +} + +func DepthAndDurationHazardFunction() HazardFunction { + return func(valueIn hazards.HazardData, hazard hazards.HazardEvent) (hazards.HazardEvent, error) { + d := hazards.ArrivalDepthandDurationEvent{} + d.SetDepth(valueIn.Depth) + d.SetDuration(valueIn.Duration) + return d, nil + } +} + // NoHazardFoundError is an error for a situation where no hazard could be computed for the given args type NoHazardFoundError struct { Input string diff --git a/hazardproviders/json_multi_hazard.go b/hazardproviders/json_multi_hazard.go new file mode 100644 index 0000000..0a7a8f7 --- /dev/null +++ b/hazardproviders/json_multi_hazard.go @@ -0,0 +1,106 @@ +package hazardproviders + +import ( + "encoding/json" + "fmt" + "os" + "time" + + "github.com/USACE/go-consequences/geography" + "github.com/USACE/go-consequences/hazards" +) + +type jsonArrivalDepthDurationMultiHazardProvider struct { + arrivals []time.Time + depthCRs []cogReader + durations []float64 + process HazardFunction +} + +type ADDProperties struct { // will try to not use this + Year float64 `json:"year"` + Month float64 `json:"month"` + Day float64 `json:"day"` + Depth float64 `json:"depth"` + Depthgrid string `json:"depthgrid"` + Duration float64 `json:"duration"` + Xmin float64 `json:"xmin"` + Xmax float64 `json:"xmax"` + Ymin float64 `json:"ymin"` + Ymax float64 `json:"ymax"` +} + +type ADDEvents struct { + Events []ADDProperties `json:"events"` +} + +func InitADDMHP(fp string) (jsonArrivalDepthDurationMultiHazardProvider, error) { + fmt.Println("Connecting to: " + fp) + c, err := os.ReadFile(fp) + if err != nil { + panic(err) + } + + var events ADDEvents + json.Unmarshal(c, &events) + + arrivalTimes := make([]time.Time, len(events.Events)) + durations := make([]float64, len(events.Events)) + depthCRs := make([]cogReader, len(events.Events)) + + for i, e := range events.Events { + at := time.Date(int(e.Year), time.Month(e.Month), int(e.Day), 0, 0, 0, 0, time.UTC) + cr, err := initCR(e.Depthgrid) + if err != nil { + panic(err) + } + + arrivalTimes[i] = at + durations[i] = e.Duration + depthCRs[i] = cr + } + + return jsonArrivalDepthDurationMultiHazardProvider{ + arrivals: arrivalTimes, + depthCRs: depthCRs, + durations: durations, + process: ArrivalDepthAndDurationHazardFunction(), + }, nil +} + +func (j jsonArrivalDepthDurationMultiHazardProvider) Close() { + for _, c := range j.depthCRs { + c.Close() + } +} + +func (j jsonArrivalDepthDurationMultiHazardProvider) Hazard(l geography.Location) (hazards.HazardEvent, error) { + var hm hazards.ArrivalDepthandDurationEventMulti + for i, cr := range j.depthCRs { + d, err := cr.ProvideValue(l) + if err != nil { + return hm, err + } + hd := hazards.HazardData{ + Depth: d, + Velocity: 0, + ArrivalTime: j.arrivals[i], + Erosion: 0, + Duration: j.durations[i], + WaveHeight: 0, + Salinity: false, + Qualitative: "", + } + var h hazards.HazardEvent + h, err = j.process(hd, h) + hm.Append(h.(hazards.ArrivalDepthandDurationEvent)) + } + return &hm, nil +} + +func (j jsonArrivalDepthDurationMultiHazardProvider) HazardBoundary() (geography.BBox, error) { + // We'll probably want to do something different here. + // Probably allow user to define study bbox with a + // shapefile/geojson/directly entering bbox coords + return j.depthCRs[0].GetBoundingBox() +} diff --git a/hazardproviders/json_multi_hazard_test.go b/hazardproviders/json_multi_hazard_test.go new file mode 100644 index 0000000..9fbb064 --- /dev/null +++ b/hazardproviders/json_multi_hazard_test.go @@ -0,0 +1,42 @@ +package hazardproviders + +import ( + "fmt" + "testing" + + "github.com/USACE/go-consequences/geography" + "github.com/USACE/go-consequences/hazards" +) + +func TestInitADDMHP(t *testing.T) { + file := "/workspaces/go-consequences/data/lifecycle/test_arrival-depth-duration_hazards.json" + + ADDMHP, err := InitADDMHP(file) + if err != nil { + panic(err) + } + + loc := geography.Location{ + X: -71.481, + Y: 43.001, + SRID: "", + } + + haz, err := ADDMHP.Hazard(loc) + h := haz.(hazards.ArrivalDepthandDurationEventMulti) + if err != nil { + panic(err) + } + + for { + fmt.Printf( + "%d: Depth: %3.2f, Duration: %3.2f, Arrival: %v\n", + h.Index(), h.Depth(), h.Duration(), h.ArrivalTime(), + ) + if h.HasNext() { + h.Increment() + } else { + break + } + } +} diff --git a/hazards/flood.go b/hazards/flood.go index c07f77f..0a94504 100644 --- a/hazards/flood.go +++ b/hazards/flood.go @@ -1,7 +1,9 @@ package hazards import ( + "errors" "fmt" + "sort" "strings" "time" ) @@ -443,3 +445,125 @@ func (d MultiParameterEvent) MarshalJSON() ([]byte, error) { s += "}}" return []byte(s), nil } + +// ArrivalandDurationEventMulti describes a series of events with an arrival time, depth and a duration in days +type ArrivalDepthandDurationEventMulti struct { + index int + Events []ArrivalDepthandDurationEvent +} + +func (h ArrivalDepthandDurationEventMulti) Depth() float64 { + return h.Events[h.index].Depth() +} + +func (h ArrivalDepthandDurationEventMulti) Velocity() float64 { + return h.Events[h.index].Velocity() +} + +func (h ArrivalDepthandDurationEventMulti) ArrivalTime() time.Time { + return h.Events[h.index].ArrivalTime() +} + +func (h ArrivalDepthandDurationEventMulti) Erosion() float64 { + return h.Events[h.index].Erosion() +} + +func (h ArrivalDepthandDurationEventMulti) Duration() float64 { + return h.Events[h.index].Duration() +} + +func (h ArrivalDepthandDurationEventMulti) WaveHeight() float64 { + return h.Events[h.index].WaveHeight() +} + +func (h ArrivalDepthandDurationEventMulti) Salinity() bool { + return h.Events[h.index].Salinity() +} + +func (h ArrivalDepthandDurationEventMulti) Qualitative() string { + return h.Events[h.index].Qualitative() +} + +func (h ArrivalDepthandDurationEventMulti) DV() float64 { + return h.Events[h.index].DV() +} + +func (h ArrivalDepthandDurationEventMulti) Parameters() Parameter { + return h.Events[h.index].Parameters() +} + +func (h ArrivalDepthandDurationEventMulti) Has(p Parameter) bool { + return h.Events[h.index].Has(p) +} + +func (h ArrivalDepthandDurationEventMulti) Index() int { + return h.index +} + +func (h ArrivalDepthandDurationEventMulti) HasNext() bool { + return h.index < (len(h.Events) - 1) +} + +func (h ArrivalDepthandDurationEventMulti) HasPrevious() bool { + return h.index > 0 +} + +func (h ArrivalDepthandDurationEventMulti) This() HazardEvent { + return h.Events[h.index] +} + +func (h ArrivalDepthandDurationEventMulti) Next() (HazardEvent, error) { + var err error = nil + if h.HasNext() { + return h.Events[h.index+1], err + } else { + return ArrivalDepthandDurationEvent{}, errors.New("hazards: ArrivalDepthandDurationEventMulti does not have Next event") + } +} + +func (h ArrivalDepthandDurationEventMulti) Previous() (HazardEvent, error) { + var err error = nil + if h.HasPrevious() { + return h.Events[h.index-1], err + } else { + return ArrivalDepthandDurationEvent{}, errors.New("hazards: ArrivalDepthandDurationEventMulti does not have Previous event") + } +} + +func (h *ArrivalDepthandDurationEventMulti) Increment() { + if h.HasNext() { + h.index++ + } +} + +func (h *ArrivalDepthandDurationEventMulti) ResetIndex() { + h.index = 0 +} + +func (h *ArrivalDepthandDurationEventMulti) Append(n HazardEvent) { + newEvent := n.(ArrivalDepthandDurationEvent) + h.Events = append(h.Events, newEvent) +} + +func (h ArrivalDepthandDurationEventMulti) Sort() { // ensure the hazard events are in order of arrival time + sort.Sort(h) +} + +func (h ArrivalDepthandDurationEventMulti) IsSorted() bool { + return sort.IsSorted(h) +} + +// Len is part of sort.Interface. +func (h ArrivalDepthandDurationEventMulti) Len() int { + return len(h.Events) +} + +// Swap is part of sort.Interface. +func (h ArrivalDepthandDurationEventMulti) Swap(i, j int) { + h.Events[i], h.Events[j] = h.Events[j], h.Events[i] +} + +// Less is part of sort.Interface +func (h ArrivalDepthandDurationEventMulti) Less(i, j int) bool { + return h.Events[i].ArrivalTime().Before(h.Events[j].ArrivalTime()) +} diff --git a/hazards/flood_test.go b/hazards/flood_test.go index 0cb54ff..3a3eefe 100644 --- a/hazards/flood_test.go +++ b/hazards/flood_test.go @@ -108,3 +108,81 @@ func TestMarshalParameterJSON(t *testing.T) { b, _ := json.Marshal(d) fmt.Println(string(b)) } + +func TestADDMulti(t *testing.T) { + // create a series of hazardEvents + var d1 = hazards.ArrivalDepthandDurationEvent{} + d1.SetDuration(0) + d1.SetDepth(1.0) + t1 := time.Date(1984, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d1.SetArrivalTime(t1) + + var d2 = hazards.ArrivalDepthandDurationEvent{} + d2.SetDuration(5.0) + d2.SetDepth(1.0) + t2 := time.Date(1984, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d2.SetArrivalTime(t2) + + var d3 = hazards.ArrivalDepthandDurationEvent{} + d3.SetDuration(0.0) + d3.SetDepth(1.0) + t3 := time.Date(1984, time.Month(1), 21, 0, 0, 0, 0, time.UTC) + d3.SetArrivalTime(t3) + + var d4 = hazards.ArrivalDepthandDurationEvent{} + d4.SetDuration(0.0) + d4.SetDepth(2.0) + t4 := time.Date(1985, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d4.SetArrivalTime(t4) + + var d5 = hazards.ArrivalDepthandDurationEvent{} + d5.SetDuration(0.0) + d5.SetDepth(2.0) + t5 := time.Date(1985, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d5.SetArrivalTime(t5) + + events := []hazards.ArrivalDepthandDurationEvent{d1, d2, d3, d4, d5} + addm := hazards.ArrivalDepthandDurationEventMulti{Events: events} + + depths := []float64{} + expected := []float64{1.0, 1.0, 1.0, 2.0, 2.0} + + for { + depths = append(depths, addm.Depth()) + if addm.HasNext() { + addm.Increment() + } else { + break + } + } + + // check that we properly read all depths and iterated through the events + for i := range expected { + if expected[i] != depths[i] { + t.Errorf("Expected: %v. Got: %v\n", expected[i], depths[i]) + } + } + + // check that we can reset the index + addm.ResetIndex() + if addm.Index() != 0 { + t.Errorf("Index not reset") + } + // check that we can read the Parameters for the events + for { + if !addm.Has(hazards.Depth) { + t.Errorf("Event didn't have Depth") + } + if !addm.Has(hazards.Duration) { + t.Errorf("Event didn't have Duration") + } + if !addm.Has(hazards.ArrivalTime) { + t.Errorf("Event didn't have ArrivalTime") + } + if addm.HasNext() { + addm.Increment() + } else { + break + } + } +} diff --git a/hazards/interfaces.go b/hazards/interfaces.go index 98a9c3f..23a4f13 100644 --- a/hazards/interfaces.go +++ b/hazards/interfaces.go @@ -233,3 +233,23 @@ func (p *Parameter) UnmarshalJSON(b []byte) error { *p = toParameter(s) return nil } + +type MultiHazardEvent interface { + HazardEvent + Index() int + HasNext() bool + HasPrevious() bool + This() HazardEvent + Next() (HazardEvent, error) + Previous() (HazardEvent, error) + Increment() + ResetIndex() + Append(HazardEvent) + Sort() + IsSorted() bool +} + +// type MultiHazardFrequencyEvent interface { +// MultiHazardEvent +// Frequency() float64 +// } diff --git a/notes.md b/notes.md new file mode 100644 index 0000000..8037aa4 --- /dev/null +++ b/notes.md @@ -0,0 +1,66 @@ +# Reconcstruction/Life Cycle Implementation + +## 1. Implement basic reconstruction as a component damage function (COMPLETE) + +- current component damage functions exist for "structure" and "contents" + +- the existing format can be leveraged to return a reconstruction time in days for a given input + - rather than using the hazard intensity (e.g. flood depth) to return a percent loss, we use the pct_loss as the input. Return value is number of days to return to 0 damage. e.g.: + | damcat | dmg_pct | t_rebuild_min | t_rebuild_mostlikely | t_rebuild_max | + |---------|-----------|----------------|------------------------|-----------------| + | RES | 0 | 0 | 0 | 0 | + | RES | .5 | 13.5 | 15 | 16.5 | + | RES | 1 | 27 | 30 | 33 | + + + +## 2. Implement MultiHazard + +- `computeConsequencesMulti(events []hazards.HazardEvent, s StructureDeterministic) ([]consequences.Result, error){}` + - this function implements the logic to compute losses and reconstruction across a series of Hazards based on the implementation in g2crm + - Downside is that the use of slices means we can't go through the `Compute()` method on the structure + +- `computeConsequencesMultiHazard(event hazards.MultiHazardEvent, s StructureDeterministic) (consequences.Result, error)` + - this function implements the same logic as the function above, but takes a new `MultiHazardEvent` interface. + - the `MultiHazardEvent` interface also satisfies the `HazardEvent` interface so both types can be passed to `Compute()` + +- added logic to Compute to call `computeConsequencesMultiHazard` if the `HazardEvent` paramter can be asserted as a `MultiHazardEvent` + - what happens if we pass `MultiHazardEvent` to the `Compute` method of a different receptor that we haven't provided multi-hazard logic for? + - It should work fine, but Compute will only run on the first `HazardEvent` in the `MultiHazardEvent`. + +## G2CRM Implementation - Structure.cs + +### Rebuild + +- param "rebuildFactor" - amount of structure to rebuild. + - range: 0.0-1.0 + - rebuildFactor < 1.0 ==> partial rebuild due to storm occuring during rebuild + - e.g. rebuildFactor = 0.75 means rebuild 75% of the structure damage + - **Where does this comefrom?** + +- structureAmountToRebuild = CalculateStructureDepreciatedReplacementValue(year) - CalculateCurrentStructureValue(year) + - = CSDRV(year) - CCSV(year) + - = CSDRV(year) - (CSDRV(year) * (1 - CurrentStructureDamageFactor) + - **structureAmountToRebuild = Structure Value * pct_damage** + - ==> So rebuild the damage that occurred from the event + +- `if (rebuildFactor < 1.0) { structureAmountToRebuild *= rebuildFactor }` + - When function is called we know rebuildFactor. e.g. We know we want to repair 75% of the damage. + - but why is it only partial rebuild? where is rebuildFactor calculated? What does this represent? + - + +### Damage + +- structureModifier ==> pct_damage + - From damage function triangular distribution (`G2CRM.Core.Math.TriangularDistribution.triangularDegen()`) + +- structureAmountToDamage = CalculateCurrentStructureValue(year) * structureModifier + - ==> like go-consequences `sval * sdampercent` + + + +## General Brainstorming + +- In `compute-configuration.go` the `Computable` struct includes a `ComputeLifeloss` bool. We could add another bool for `ComputeReconstruction` that would enable that calculation without making it a default. + + diff --git a/structures/structure.go b/structures/structure.go index c416286..4dc6d8f 100644 --- a/structures/structure.go +++ b/structures/structure.go @@ -6,6 +6,7 @@ import ( "math" "math/rand" "strings" + "time" "github.com/USACE/go-consequences/consequences" "github.com/USACE/go-consequences/geography" @@ -127,10 +128,10 @@ func (s StructureStochastic) Compute(d hazards.HazardEvent) (consequences.Result // Compute implements the consequences.Receptor interface on StrucutreDeterminstic func (s StructureDeterministic) Compute(d hazards.HazardEvent) (consequences.Result, error) { - /*add, addok := d.(hazards.ArrivalDepthandDurationEvent) - if addok { - return computeConsequencesWithReconstruction(add, s) - }*/ + addMulti, ok := d.(hazards.MultiHazardEvent) + if ok { + return computeConsequencesMultiHazard(addMulti, s) + } return computeConsequences(d, s) } @@ -148,6 +149,7 @@ func (s StructureDeterministic) Clone() StructureDeterministic { NumStories: s.NumStories, BaseStructure: BaseStructure{Name: s.Name, CBFips: s.CBFips, X: s.X, Y: s.Y, DamCat: s.DamCat, GroundElevation: s.GroundElevation}} } + func computeConsequences(e hazards.HazardEvent, s StructureDeterministic) (consequences.Result, error) { header := []string{"fd_id", "x", "y", "hazard", "damage category", "occupancy type", "structure damage", "content damage", "pop2amu65", "pop2amo65", "pop2pmu65", "pop2pmo65", "cbfips", "s_dam_per", "c_dam_per"} results := []interface{}{"updateme", 0.0, 0.0, e, "dc", "ot", 0.0, 0.0, 0, 0, 0, 0, "CENSUSBLOCKFIPS", 0, 0} @@ -163,6 +165,7 @@ func computeConsequences(e hazards.HazardEvent, s StructureDeterministic) (conse if cderr != nil { return ret, cderr } + if sDamFun.DamageDriver == hazards.Depth { damagefunctionMax := 24.0 //default in case it doesnt cast to paired data. damagefunctionMax = sDamFun.DamageFunction.Xvals[len(sDamFun.DamageFunction.Xvals)-1] @@ -228,55 +231,451 @@ func computeConsequences(e hazards.HazardEvent, s StructureDeterministic) (conse return ret, err } -/* -func computeConsequencesWithReconstruction(e hazards.ArrivalDepthandDurationEvent, s StructureDeterministic) (consequences.Result, error) { - header := []string{"fd_id", "x", "y", "hazard", "damage category", "occupancy type", "structure damage", "content damage", "pop2amu65", "pop2amo65", "pop2pmu65", "pop2pmo65", "cbfips", "daystoreconstruction", "rebuilddate"} - results := []interface{}{"updateme", 0.0, 0.0, e, "dc", "ot", 0.0, 0.0, 0, 0, 0, 0, "CENSUSBLOCKFIPS", 0.0, time.Now()} +func computeConsequencesWithReconstruction(e hazards.HazardEvent, s StructureDeterministic) (consequences.Result, error) { + // NOTE: This version gets reconstruction as a damage function on the structure's occtype + + header := []string{"fd_id", "x", "y", "hazard", "damage category", "occupancy type", "structure damage", "content damage", "pop2amu65", "pop2amo65", "pop2pmu65", "pop2pmo65", "cbfips", "s_dam_per", "c_dam_per", "reconstruction_days"} + results := []interface{}{"updateme", 0.0, 0.0, e, "dc", "ot", 0.0, 0.0, 0, 0, 0, 0, "CENSUSBLOCKFIPS", 0, 0, 0.0} var ret = consequences.Result{Headers: header, Result: results} var err error = nil + sval := s.StructVal + conval := s.ContVal + sDamFun, sderr := s.OccType.GetComponentDamageFunctionForHazard("structure", e) + if sderr != nil { + return ret, sderr + } + cDamFun, cderr := s.OccType.GetComponentDamageFunctionForHazard("contents", e) + if cderr != nil { + return ret, cderr + } - if e.Has(hazards.Depth) { //currently the damage functions are depth based, so depth is required, the getstructuredamagefunctionforhazard method chooses approprate damage functions for a hazard. - if e.Depth() < 0.0 { - err = errors.New("depth above ground was less than zero") - } - if e.Depth() > 9999.0 { - err = errors.New("depth above ground was greater than 9999") + rDamFun, rderr := s.OccType.GetComponentDamageFunctionForHazard("reconstruction", e) + if rderr != nil { + return ret, cderr + } + + //TODO: Do we want to return the date that construction will be complete? Only useful if event has arrival time + if sDamFun.DamageDriver == hazards.Depth { + damagefunctionMax := 24.0 //default in case it doesnt cast to paired data. + damagefunctionMax = sDamFun.DamageFunction.Xvals[len(sDamFun.DamageFunction.Xvals)-1] + representativeStories := math.Ceil(damagefunctionMax / 9.0) + if s.NumStories > int32(representativeStories) { + //there is great potential that the value of the structure is not representative of the damage function range. + modifier := representativeStories / float64(s.NumStories) + sval *= modifier + conval *= modifier } - depthAboveFFE := e.Depth() - s.FoundHt - damagePercent := s.OccType.GetStructureDamageFunctionForHazard(e).SampleValue(depthAboveFFE) / 100 //assumes what type the damage array is in - cdamagePercent := s.OccType.GetContentDamageFunctionForHazard(e).SampleValue(depthAboveFFE) / 100 - reconstructiondays := 0.0 - switch s.DamCat { - case "RES": - reconstructiondays = 30.0 - case "COM": - reconstructiondays = 90.0 - case "IND": - reconstructiondays = 270.0 - case "PUB": - reconstructiondays = 360.0 + } //else dont modify value because damage is not driven by depth + if e.Has(sDamFun.DamageDriver) && e.Has(cDamFun.DamageDriver) && e.Has(rDamFun.DamageDriver) { + //they exist! + sdampercent := 0.0 + cdampercent := 0.0 + reconstruction_days := 0.0 + + switch sDamFun.DamageDriver { + case hazards.Depth: + depthAboveFFE := e.Depth() - s.FoundHt + sdampercent = sDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 + duration := 0.0 + if e.Duration() > 0.0 { // nodata value for e.Duration == -901.0 + duration = e.Duration() + } + reconstruction_days = rDamFun.DamageFunction.SampleValue(sdampercent) + duration + case hazards.Erosion: + sdampercent = sDamFun.DamageFunction.SampleValue(e.Erosion()) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(e.Erosion()) / 100 + reconstruction_days = rDamFun.DamageFunction.SampleValue(sdampercent) default: - reconstructiondays = 180.0 + return consequences.Result{}, errors.New("structures: could not understand the damage driver") } + ret.Result[0] = s.BaseStructure.Name ret.Result[1] = s.BaseStructure.X ret.Result[2] = s.BaseStructure.Y ret.Result[3] = e ret.Result[4] = s.BaseStructure.DamCat ret.Result[5] = s.OccType.Name - ret.Result[6] = damagePercent * s.StructVal - ret.Result[7] = cdamagePercent * s.ContVal + ret.Result[6] = sdampercent * sval + ret.Result[7] = cdampercent * conval ret.Result[8] = s.Pop2amu65 ret.Result[9] = s.Pop2amo65 ret.Result[10] = s.Pop2pmu65 ret.Result[11] = s.Pop2pmo65 ret.Result[12] = s.CBFips - rebuilddays := (damagePercent * reconstructiondays) + e.Duration() - ret.Result[13] = rebuilddays - ret.Result[14] = e.ArrivalTime().AddDate(0, 0, int(rebuilddays)) //rounds to int + ret.Result[13] = sdampercent + ret.Result[14] = cdampercent + ret.Result[15] = math.Ceil(reconstruction_days) + } else { - err = errors.New("Hazard did not contain depth") + err = errors.New("structure: hazard did not contain valid parameters to impact a structure") + } + return ret, err +} + +func computeConsequencesMulti(events []hazards.HazardEvent, s StructureDeterministic) ([]consequences.Result, error) { + + var ret = make([]consequences.Result, len(events)) + var err error = nil + + // get damage functions for structure based on first hazard event (assumes same parameters) to prevent repeated lookups + sDamFun, sderr := s.OccType.GetComponentDamageFunctionForHazard("structure", events[0]) + if sderr != nil { + return ret, sderr + } + cDamFun, cderr := s.OccType.GetComponentDamageFunctionForHazard("contents", events[0]) + if cderr != nil { + return ret, cderr + } + rDamFun, rderr := s.OccType.GetComponentDamageFunctionForHazard("reconstruction", events[0]) + if rderr != nil { + return ret, cderr + } + + sval := s.StructVal + svalcurr := sval + sDamageFactor := 0.0 // this is the current pct_damage to the structure + conval := s.ContVal + convalcurr := conval + cDamageFactor := 0.0 // this is the current pct_damage to the contents + + if sDamFun.DamageDriver == hazards.Depth { + damagefunctionMax := 24.0 //default in case it doesnt cast to paired data. + damagefunctionMax = sDamFun.DamageFunction.Xvals[len(sDamFun.DamageFunction.Xvals)-1] + representativeStories := math.Ceil(damagefunctionMax / 9.0) + if s.NumStories > int32(representativeStories) { + //there is great potential that the value of the structure is not representative of the damage function range. + modifier := representativeStories / float64(s.NumStories) + sval *= modifier + conval *= modifier + } + } //else dont modify value because damage is not driven by depth + + for i, e := range events { + if !e.Has(hazards.ArrivalTime) { + return ret, errors.New("structures: hazard event does not have ArrivalTime") + } + + if i > 0 { + // update structure value and damagefactor to reflect construction progress from previous event + tc0, err := ret[i-1].Fetch("completion_date") // + if err != nil { + return ret, errors.New("structures: unable to get completion date for previous hazard event") + } + last_completion_time := tc0.(time.Time) + + // calculate reconstruction progress assuming linear rebuild + t0 := events[i-1].ArrivalTime().AddDate(0, 0, int(events[i-1].Duration())) // this is the time reconstruction began + pct_complete := (float64(e.ArrivalTime().Sub(t0)) / float64(last_completion_time.Sub(t0))) // this is the percentage of the reconstruction that is complete + + if pct_complete > 1.0 { + pct_complete = 1.0 + } + + sPctloss_rebuilt := sDamageFactor * pct_complete + cPctloss_rebuilt := cDamageFactor * pct_complete + + // update structure damage factor to reflect completed construction + sDamageFactor = sDamageFactor - sPctloss_rebuilt + if sDamageFactor < 0.0 { + sDamageFactor = 0 + } + + cDamageFactor = cDamageFactor - cPctloss_rebuilt + if cDamageFactor < 0.0 { + cDamageFactor = 0 + } + + // update structure value to reflect completed construction + svalcurr = sval * (1 - sDamageFactor) + convalcurr = conval * (1 - cDamageFactor) + } + + header := []string{"fd_id", "structure damage", "content damage", "s_dam_per", "c_dam_per", "reconstruction_days", "completion_date", "structure_value", "content_value"} + values := []interface{}{"updateme", 0.0, 0.0, 0.0, 0.0, 0.0, time.Time{}, 0.0, 0.0} + result := consequences.Result{Headers: header, Result: values} + + if e.Has(sDamFun.DamageDriver) && e.Has(cDamFun.DamageDriver) && e.Has(rDamFun.DamageDriver) { + //they exist! + sdampercent := 0.0 + sdamage := 0.0 + cdampercent := 0.0 + cdamage := 0.0 + + reconstruction_days := 0.0 + completion_date := time.Time{} + + switch sDamFun.DamageDriver { + case hazards.Depth: + depthAboveFFE := e.Depth() - s.FoundHt + sdampercent = sDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 + sdamage = svalcurr * sdampercent + cdamage = convalcurr * cdampercent + + sDamageFactor = 1 - (1-sDamageFactor)*(1-sdampercent) + cDamageFactor = 1 - (1-cDamageFactor)*(1-cdampercent) + // total time to complete reconstruction consists of three parts + // 1. Time between the start and end of the event. This is e.Duration() + // 2. Time between the end of the event and the beginning of reconstruction. + // - In reality, this would depend on a lot but simplest assumption is that reconstruction can begin as soon as event ends. + // 3. Time between reconstruction start and reconstruction end. This is the value returned from the damage function + + arrival := e.ArrivalTime() // do we need a check that the ArrivalTime is not just the default time.Time{}? + + duration := 0.0 + if e.Duration() > 0.0 { // nodata value for e.Duration == -901.0 + duration = e.Duration() + } + + // calculate reconstruction_days based on damageFactor to account for potential remaining damage from previous events + reconstruction_days = math.Ceil(rDamFun.DamageFunction.SampleValue(sDamageFactor) + duration) + completion_date = arrival.AddDate(0, 0, int(reconstruction_days)) + + case hazards.Erosion: + sdampercent = sDamFun.DamageFunction.SampleValue(e.Erosion()) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(e.Erosion()) / 100 + sdamage = svalcurr * sdampercent + cdamage = convalcurr * cdampercent + + sDamageFactor = 1 - (1-sDamageFactor)*(1-sdampercent) + cDamageFactor = 1 - (1-cDamageFactor)*(1-cdampercent) + arrival := e.ArrivalTime() + // calculate reconstruction_days based on damageFactor to account for potential remaining damage from previous events + reconstruction_days = math.Ceil(rDamFun.DamageFunction.SampleValue(sDamageFactor)) + completion_date = arrival.AddDate(0, 0, int(reconstruction_days)) + + default: + return ret, errors.New("structures: could not understand the damage driver") + } + + svalcurr = svalcurr * (1 - sDamageFactor) + convalcurr = convalcurr * (1 - cDamageFactor) + + result.Result[0] = s.BaseStructure.Name + result.Result[1] = sdamage + result.Result[2] = cdamage + result.Result[3] = sdampercent + result.Result[3] = cdampercent + result.Result[5] = math.Ceil(reconstruction_days) + result.Result[6] = completion_date + result.Result[7] = svalcurr + result.Result[8] = convalcurr + + } else { + err = errors.New("structure: hazard did not contain valid parameters to impact a structure") + } + ret[i] = result + } + return ret, err +} + +func computeConsequencesMultiHazard(event hazards.MultiHazardEvent, s StructureDeterministic) (consequences.Result, error) { + // this function needs to return a single result. Not a slice of results + // Make a nested Result where each column is itself a Result + + // Rethinking the results format. The current version repeats the structure info for each result. + // The main result body can include the structure info, and we can simply store the details of the iteration results in a column as json. + mainHeader := []string{ + "fd_id", "x", "y", "damage category", "occupancy type", + "pop2amu65", "pop2amo65", "pop2pmu65", "pop2pmo65", "cbfips", + "original structure value", "original content value", "final structure value", "final content value", // + "original ffe", "final ffe", "times rebuilt", "times raised", "StructureTotalLoss", "ContentsTotalLoss", + "hazard results", + } + subResultsHeader := make([]string, 0) + subResultsResult := make([]interface{}, 0) + subResult := consequences.Result{Headers: subResultsHeader, Result: subResultsResult} + mainResults := []interface{}{ + "updateme", 0.0, 0.0, "damcat", "occtype", + 0.0, 0.0, 0.0, 0.0, "cbfips", + 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + subResult, + } + var ret = consequences.Result{Headers: mainHeader, Result: mainResults} + var err error = nil + + // damage functions for structure + sDamFun, sderr := s.OccType.GetComponentDamageFunctionForHazard("structure", event) + if sderr != nil { + return ret, sderr + } + cDamFun, cderr := s.OccType.GetComponentDamageFunctionForHazard("contents", event) + if cderr != nil { + return ret, cderr + } + rDamFun, rderr := s.OccType.GetComponentDamageFunctionForHazard("reconstruction", event) + if rderr != nil { + return ret, cderr + } + + // variables for tracking value and damage across hazards + sval := s.StructVal + svalcurr := sval + sDamageFactor := 0.0 // this is the current pct_damage to the structure + conval := s.ContVal + convalcurr := conval + cDamageFactor := 0.0 // this is the current pct_damage to the contents + + // adjust value for tall structures + if sDamFun.DamageDriver == hazards.Depth { + damagefunctionMax := 24.0 //default in case it doesnt cast to paired data. + damagefunctionMax = sDamFun.DamageFunction.Xvals[len(sDamFun.DamageFunction.Xvals)-1] + representativeStories := math.Ceil(damagefunctionMax / 9.0) + if s.NumStories > int32(representativeStories) { + //there is great potential that the value of the structure is not representative of the damage function range. + modifier := representativeStories / float64(s.NumStories) + sval *= modifier + conval *= modifier + } + } //else dont modify value because damage is not driven by depth + + for { + // Calculate reconstruction from previous hazard if we aren't on the first one + if event.HasPrevious() { + + pr, err := subResult.Fetch(fmt.Sprintf("%d", event.Index()-1)) + // pr, err := ret.Fetch(fmt.Sprintf("%d", event.Index()-1)) + if err != nil { + return ret, fmt.Errorf("structures: Unable to fetch previous result at index = %v", event.Index()) + } + previous_result := pr.(consequences.Result) + tc0, err := previous_result.Fetch("completion_date") + if err != nil { + return ret, errors.New("structures: unable to get completion date for previous hazard event") + } + last_completion_time := tc0.(time.Time) + + previous_event, err := event.Previous() + if err != nil { + return ret, err + } + + // calculate reconstruction progress assuming linear rebuild + t0 := previous_event.ArrivalTime().AddDate(0, 0, int(previous_event.Duration())) // this is the time reconstruction began + pct_complete := (float64(event.ArrivalTime().Sub(t0)) / float64(last_completion_time.Sub(t0))) // this is the percentage of the reconstruction that is complete + + if pct_complete > 1.0 { + pct_complete = 1.0 + } + + sPctloss_rebuilt := sDamageFactor * pct_complete + cPctloss_rebuilt := cDamageFactor * pct_complete + + // update structure damage factor to reflect completed construction + sDamageFactor = sDamageFactor - sPctloss_rebuilt + if sDamageFactor < 0.0 { + sDamageFactor = 0 + } + + cDamageFactor = cDamageFactor - cPctloss_rebuilt + if cDamageFactor < 0.0 { + cDamageFactor = 0 + } + + // update structure value to reflect completed construction + svalcurr = sval * (1 - sDamageFactor) + convalcurr = conval * (1 - cDamageFactor) + } + + header := []string{"hazard", "structure damage", "content damage", "s_dam_per", "c_dam_per", "reconstruction_days", "completion_date", "structure_value", "content_value"} + values := []interface{}{event.This(), 0.0, 0.0, 0.0, 0.0, 0.0, time.Time{}, 0.0, 0.0} + result := consequences.Result{Headers: header, Result: values} + + if event.Has(sDamFun.DamageDriver) && event.Has(cDamFun.DamageDriver) && event.Has(rDamFun.DamageDriver) { + //they exist! + sdampercent := 0.0 + sdamage := 0.0 + cdampercent := 0.0 + cdamage := 0.0 + + reconstruction_days := 0.0 + completion_date := time.Time{} + + switch sDamFun.DamageDriver { + case hazards.Depth: + depthAboveFFE := event.Depth() - s.FoundHt + sdampercent = sDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(depthAboveFFE) / 100 + sdamage = svalcurr * sdampercent + cdamage = convalcurr * cdampercent + + sDamageFactor = 1 - (1-sDamageFactor)*(1-sdampercent) + cDamageFactor = 1 - (1-cDamageFactor)*(1-cdampercent) + // total time to complete reconstruction consists of three parts + // 1. Time between the start and end of the event. This is e.Duration() + // 2. Time between the end of the event and the beginning of reconstruction. + // - In reality, this would depend on a lot but simplest assumption is that reconstruction can begin as soon as event ends. + // 3. Time between reconstruction start and reconstruction end. This is the value returned from the damage function + + arrival := event.ArrivalTime() // do we need a check that the ArrivalTime is not just the default time.Time{}? + + duration := 0.0 + if event.Duration() > 0.0 { // nodata value for e.Duration == -901.0 + duration = event.Duration() + } + + // calculate reconstruction_days based on damageFactor to account for potential remaining damage from previous events + reconstruction_days = math.Ceil(rDamFun.DamageFunction.SampleValue(sDamageFactor) + duration) + completion_date = arrival.AddDate(0, 0, int(reconstruction_days)) + + case hazards.Erosion: + sdampercent = sDamFun.DamageFunction.SampleValue(event.Erosion()) / 100 //assumes what type the damage array is in + cdampercent = cDamFun.DamageFunction.SampleValue(event.Erosion()) / 100 + sdamage = svalcurr * sdampercent + cdamage = convalcurr * cdampercent + + sDamageFactor = 1 - (1-sDamageFactor)*(1-sdampercent) + cDamageFactor = 1 - (1-cDamageFactor)*(1-cdampercent) + arrival := event.ArrivalTime() + // calculate reconstruction_days based on damageFactor to account for potential remaining damage from previous events + reconstruction_days = math.Ceil(rDamFun.DamageFunction.SampleValue(sDamageFactor)) + completion_date = arrival.AddDate(0, 0, int(reconstruction_days)) + + default: + return ret, errors.New("structures: could not understand the damage driver") + } + + svalcurr = svalcurr * (1 - sDamageFactor) + convalcurr = convalcurr * (1 - cDamageFactor) + + result.Result[1] = sdamage + result.Result[2] = cdamage + result.Result[3] = sdampercent + result.Result[3] = cdampercent + result.Result[5] = math.Ceil(reconstruction_days) + result.Result[6] = completion_date + result.Result[7] = svalcurr + result.Result[8] = convalcurr + + } else { + err = errors.New("structure: hazard did not contain valid parameters to impact a structure") + } + subResultsHeader = append(subResultsHeader, fmt.Sprintf("%d", event.Index())) + subResultsResult = append(subResultsResult, result) + subResult = consequences.Result{Headers: subResultsHeader, Result: subResultsResult} + ret.Result[0] = s.BaseStructure.Name + ret.Result[1] = s.BaseStructure.X + ret.Result[2] = s.BaseStructure.Y + ret.Result[3] = s.BaseStructure.DamCat + ret.Result[4] = s.OccType.Name + ret.Result[5] = s.Pop2amu65 + ret.Result[6] = s.Pop2amo65 + ret.Result[7] = s.Pop2pmu65 + ret.Result[8] = s.Pop2pmo65 + ret.Result[9] = s.CBFips + ret.Result[10] = sval + ret.Result[11] = conval + ret.Result[12] = svalcurr + ret.Result[13] = convalcurr + ret.Result[20] = subResult + + if event.HasNext() { + event.Increment() // go to the next event and restart loop + } else { + break + } } return ret, err } -*/ diff --git a/structures/structures_test.go b/structures/structures_test.go index 621d70c..feb6677 100644 --- a/structures/structures_test.go +++ b/structures/structures_test.go @@ -1,9 +1,11 @@ package structures import ( + "fmt" "math" "math/rand" "testing" + "time" "github.com/HydrologicEngineeringCenter/go-statistics/paireddata" "github.com/HydrologicEngineeringCenter/go-statistics/statistics" @@ -183,7 +185,6 @@ func TestComputeConsequences_erosion(t *testing.T) { } } -/* func TestComputeConsequencesWithReconstruction(t *testing.T) { //build a basic structure with a defined depth damage relationship. @@ -200,7 +201,24 @@ func TestComputeConsequencesWithReconstruction(t *testing.T) { cm := make(map[hazards.Parameter]DamageFunction) var cdf = DamageFunctionFamily{DamageFunctions: cm} cdf.DamageFunctions[hazards.Default] = pddf - var o = OccupancyTypeDeterministic{Name: "test", StructureDFF: sdf, ContentDFF: cdf} + + xr := []float64{0.0, 1.0} + yr := []float64{0, 100.0} + pdr := paireddata.PairedData{Xvals: xr, Yvals: yr} + pdrdf := DamageFunction{} + pdrdf.DamageFunction = pdr + pdrdf.DamageDriver = hazards.Depth + pdrdf.Source = "created for this test" + dm := make(map[hazards.Parameter]DamageFunction) + var rdf = DamageFunctionFamily{DamageFunctions: dm} + rdf.DamageFunctions[hazards.Default] = pdrdf + + components := make(map[string]DamageFunctionFamily) + components["structure"] = sdf + components["contents"] = cdf + components["reconstruction"] = rdf + + var o = OccupancyTypeDeterministic{Name: "test", ComponentDamageFunctions: components} var s = StructureDeterministic{OccType: o, StructVal: 100.0, ContVal: 100.0, FoundHt: 0.0, BaseStructure: BaseStructure{DamCat: "category"}} //test depth values @@ -212,29 +230,270 @@ func TestComputeConsequencesWithReconstruction(t *testing.T) { expectedResults := []float64{0.0, 0.0, 10.0, 10.001, 22.5, 25.0, 27.5, 39.9, 40.0, 40.0} for idx := range depths { d.SetDepth(depths[idx]) - r, err := s.Compute(d) + r, err := computeConsequencesWithReconstruction(d, s) if err != nil { panic(err) } - out, err := r.Fetch("daystoreconstruction") + + out, err := r.Fetch("reconstruction_days") if err != nil { panic(err) } got := out.(float64) - - diff := (expectedResults[idx]*1.8 + d.Duration()) - got //180.0/100=1.8 + fmt.Printf("Reconstruction time was %3.2f days.\n", got) + diff := (expectedResults[idx] + d.Duration()) - got //180.0/100=1.8 if math.Abs(diff) > .0000000000001 { t.Errorf("Compute(%f) = %f; expected %f", depths[idx], got, expectedResults[idx]*1.8+d.Duration()) } - out2, err := r.Fetch("rebuilddate") + + } + +} + +func TestComputeConsequencesMulti(t *testing.T) { + + //build a basic structure with a defined depth damage relationship. + x := []float64{1.0, 2.0, 3.0, 4.0} + y := []float64{10.0, 20.0, 30.0, 40.0} + pd := paireddata.PairedData{Xvals: x, Yvals: y} + pddf := DamageFunction{} + pddf.DamageFunction = pd + pddf.DamageDriver = hazards.Depth + pddf.Source = "created for this test" + sm := make(map[hazards.Parameter]DamageFunction) + var sdf = DamageFunctionFamily{DamageFunctions: sm} + sdf.DamageFunctions[hazards.Default] = pddf + cm := make(map[hazards.Parameter]DamageFunction) + var cdf = DamageFunctionFamily{DamageFunctions: cm} + cdf.DamageFunctions[hazards.Default] = pddf + + xr := []float64{0.0, 1.0} + yr := []float64{0, 100.0} + pdr := paireddata.PairedData{Xvals: xr, Yvals: yr} + pdrdf := DamageFunction{} + pdrdf.DamageFunction = pdr + pdrdf.DamageDriver = hazards.Depth + pdrdf.Source = "created for this test" + dm := make(map[hazards.Parameter]DamageFunction) + var rdf = DamageFunctionFamily{DamageFunctions: dm} + rdf.DamageFunctions[hazards.Default] = pdrdf + + components := make(map[string]DamageFunctionFamily) + components["structure"] = sdf + components["contents"] = cdf + components["reconstruction"] = rdf + + var o = OccupancyTypeDeterministic{Name: "test", ComponentDamageFunctions: components} + var s = StructureDeterministic{OccType: o, StructVal: 100.0, ContVal: 100.0, FoundHt: 0.0, BaseStructure: BaseStructure{DamCat: "category"}} + + // create a series of hazardEvents + var d1 = hazards.ArrivalDepthandDurationEvent{} + d1.SetDuration(0) + d1.SetDepth(1.0) + t1 := time.Date(1984, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d1.SetArrivalTime(t1) + + var d2 = hazards.ArrivalDepthandDurationEvent{} + d2.SetDuration(5.0) + d2.SetDepth(1.0) + t2 := time.Date(1984, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d2.SetArrivalTime(t2) + + var d3 = hazards.ArrivalDepthandDurationEvent{} + d3.SetDuration(0.0) + d3.SetDepth(1.0) + t3 := time.Date(1984, time.Month(1), 21, 0, 0, 0, 0, time.UTC) + d3.SetArrivalTime(t3) + + var d4 = hazards.ArrivalDepthandDurationEvent{} + d4.SetDuration(0.0) + d4.SetDepth(2.0) + t4 := time.Date(1985, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d4.SetArrivalTime(t4) + + var d5 = hazards.ArrivalDepthandDurationEvent{} + d5.SetDuration(0.0) + d5.SetDepth(2.0) + t5 := time.Date(1985, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d5.SetArrivalTime(t5) + + events := []hazards.HazardEvent{d1, d2, d3, d4, d5} + + et1 := time.Date(1984, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + et2 := time.Date(1984, time.Month(1), 26, 0, 0, 0, 0, time.UTC) + et3 := time.Date(1984, time.Month(2), 5, 0, 0, 0, 0, time.UTC) + et4 := time.Date(1985, time.Month(1), 21, 0, 0, 0, 0, time.UTC) + et5 := time.Date(1985, time.Month(2), 8, 0, 0, 0, 0, time.UTC) + + expectedResults := []time.Time{et1, et2, et3, et4, et5} + expectedDmgs := []float64{10.0, 10.0, 9.5, 20.0, 18.0} + // event 1 (10% dmg) arrives first (obviously). No adjustments required. dmg = 100*0.1 = 10.0 + // event 2 (10% dmg) arrives after event 1 completes reconstruction. No adjustments required. dmg = 100*0.1 = 10.0 + // event 3 interrupts event 2 reconstruction @ 50% complete + // event 2 had dmg=10, so structure was repaired by 5 when event 3 hit. + // structure value when event 3 hits is 100-(10-5)=95. + // event 3 does 10% damage ==> expected damage = 0.1*95 = 9.5 + // damageFactor to calculate reconstruction = 1 - (1-sDamageFactor)*(1-sdampercent) = 1 - (1-0.05)*(1-.1) = 0.145 ==> 14.5 reconstruction days (rounds to 15) + // completion date should be 1984/2/5 + // event 4 occurs after event 3 reconstruction complete. No adjustments required. dmg = 100*0.2 = 20 + // event 5 interrupts event 4 reconstruction @ 50% complete + // structure value when event 5 hits is 100 - (20-10) = 90. + // event 5 does 20% damage ==> expected damage = 0.2*90 = 18.0 + // damageFactor to calculate reconstruction = 1 - (1-sDamageFactor)*(1-sdampercent) = 1 - (1-0.1)*(1-.2) = 0.28 ==> 28 reconstruction days + + results, err := computeConsequencesMulti(events, s) + if err != nil { + panic(err) + } + + for idx := range results { + out, err := results[idx].Fetch("completion_date") + if err != nil { + panic(err) + } + + dif := expectedResults[idx].Sub(out.(time.Time)) + fmt.Printf("Completion date was %v. Expected: %v. Diff: %v\n", out, expectedResults[idx], dif) + if math.Abs(float64(dif)) > float64(time.Minute) { // if the error is greater than about 1 minute + t.Errorf("Completion date was %v. Expected: %v. Diff: %v\n", out, expectedResults[idx], dif) + + } + + dmgout, err := results[idx].Fetch("structure damage") if err != nil { panic(err) } - gotdate := out2.(time.Time) - if !gotdate.Equal(d.ArrivalTime().AddDate(0, 0, int(expectedResults[idx]*1.8+d.Duration()))) { - t.Errorf("Compute(%f) = %s; expected %s", depths[idx], gotdate, d.ArrivalTime().AddDate(0, 0, int(expectedResults[idx]*1.8+d.Duration()))) + fmt.Printf("Damage was %3.2f. Expected: %3.2f\n", dmgout, expectedDmgs[idx]) + if math.Abs(dmgout.(float64)-float64(expectedDmgs[idx])) > 0.000000001 { + t.Errorf("Damage was %3.2f. Expected: %3.2f\n", dmgout, expectedDmgs[idx]) } + } } -*/ + +func TestComputeConsequencesMultiHazard(t *testing.T) { + //build a basic structure with a defined depth damage relationship. + x := []float64{1.0, 2.0, 3.0, 4.0} + y := []float64{10.0, 20.0, 30.0, 40.0} + pd := paireddata.PairedData{Xvals: x, Yvals: y} + pddf := DamageFunction{} + pddf.DamageFunction = pd + pddf.DamageDriver = hazards.Depth + pddf.Source = "created for this test" + sm := make(map[hazards.Parameter]DamageFunction) + var sdf = DamageFunctionFamily{DamageFunctions: sm} + sdf.DamageFunctions[hazards.Default] = pddf + cm := make(map[hazards.Parameter]DamageFunction) + var cdf = DamageFunctionFamily{DamageFunctions: cm} + cdf.DamageFunctions[hazards.Default] = pddf + + xr := []float64{0.0, 1.0} + yr := []float64{0, 100.0} + pdr := paireddata.PairedData{Xvals: xr, Yvals: yr} + pdrdf := DamageFunction{} + pdrdf.DamageFunction = pdr + pdrdf.DamageDriver = hazards.Depth + pdrdf.Source = "created for this test" + dm := make(map[hazards.Parameter]DamageFunction) + var rdf = DamageFunctionFamily{DamageFunctions: dm} + rdf.DamageFunctions[hazards.Default] = pdrdf + + components := make(map[string]DamageFunctionFamily) + components["structure"] = sdf + components["contents"] = cdf + components["reconstruction"] = rdf + + var o = OccupancyTypeDeterministic{Name: "test", ComponentDamageFunctions: components} + var s = StructureDeterministic{OccType: o, StructVal: 100.0, ContVal: 100.0, FoundHt: 0.0, BaseStructure: BaseStructure{DamCat: "category"}} + + // create a series of hazardEvents + var d1 = hazards.ArrivalDepthandDurationEvent{} + d1.SetDuration(0) + d1.SetDepth(1.0) + t1 := time.Date(1984, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d1.SetArrivalTime(t1) + + var d2 = hazards.ArrivalDepthandDurationEvent{} + d2.SetDuration(5.0) + d2.SetDepth(1.0) + t2 := time.Date(1984, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d2.SetArrivalTime(t2) + + var d3 = hazards.ArrivalDepthandDurationEvent{} + d3.SetDuration(0.0) + d3.SetDepth(1.0) + t3 := time.Date(1984, time.Month(1), 21, 0, 0, 0, 0, time.UTC) + d3.SetArrivalTime(t3) + + var d4 = hazards.ArrivalDepthandDurationEvent{} + d4.SetDuration(0.0) + d4.SetDepth(2.0) + t4 := time.Date(1985, time.Month(1), 1, 0, 0, 0, 0, time.UTC) + d4.SetArrivalTime(t4) + + var d5 = hazards.ArrivalDepthandDurationEvent{} + d5.SetDuration(0.0) + d5.SetDepth(2.0) + t5 := time.Date(1985, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + d5.SetArrivalTime(t5) + + events := []hazards.ArrivalDepthandDurationEvent{d5, d1, d2, d3, d4} + + addMulti := &hazards.ArrivalDepthandDurationEventMulti{Events: events} //need to use the pointer reference because methods on MultiHazardEvent require pointers + + // test that we can sort events by ArrivalTime + if !addMulti.IsSorted() { + fmt.Println("Sorting...") + addMulti.Sort() + } + + et1 := time.Date(1984, time.Month(1), 11, 0, 0, 0, 0, time.UTC) + et2 := time.Date(1984, time.Month(1), 26, 0, 0, 0, 0, time.UTC) + et3 := time.Date(1984, time.Month(2), 5, 0, 0, 0, 0, time.UTC) + et4 := time.Date(1985, time.Month(1), 21, 0, 0, 0, 0, time.UTC) + et5 := time.Date(1985, time.Month(2), 8, 0, 0, 0, 0, time.UTC) + + expectedResults := []time.Time{et1, et2, et3, et4, et5} + expectedDmgs := []float64{10.0, 10.0, 9.5, 20.0, 18.0} + + results, err := s.Compute(addMulti) + if err != nil { + panic(err) + } + + hr, err := results.Fetch("hazard results") + if err != nil { + panic(err) + } + hazardResults := hr.(consequences.Result) + + for i := range expectedResults { + r, err := hazardResults.Fetch(fmt.Sprintf("%d", i)) + if err != nil { + panic(err) + } + result := r.(consequences.Result) + out, err := result.Fetch("completion_date") + if err != nil { + panic(err) + } + + dif := expectedResults[i].Sub(out.(time.Time)) + fmt.Printf("Completion date was %v. Expected: %v. Diff: %v\n", out, expectedResults[i], dif) + if math.Abs(float64(dif)) > float64(time.Minute) { // if the error is greater than about 1 minute + t.Errorf("Completion date was %v. Expected: %v. Diff: %v\n", out, expectedResults[i], dif) + + } + + dmgout, err := result.Fetch("structure damage") + if err != nil { + panic(err) + } + fmt.Printf("Damage was %3.2f. Expected: %3.2f\n", dmgout, expectedDmgs[i]) + if math.Abs(dmgout.(float64)-float64(expectedDmgs[i])) > 0.000000001 { + t.Errorf("Damage was %3.2f. Expected: %3.2f\n", dmgout, expectedDmgs[i]) + } + + } +}