Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion internal/generated/gqlout/generated.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions schema/graphql/schema.graphqls
Original file line number Diff line number Diff line change
Expand Up @@ -1928,6 +1928,8 @@ input StopBuffer {
input CensusDatasetGeographyFilter {
"Geographies with these integer IDs"
ids: [Int!]
"Search within this dataset"
dataset: String
"Search within this layer"
layer: String
"Search for geographies matching this string"
Expand Down
89 changes: 52 additions & 37 deletions server/finders/dbfinder/census.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,9 @@ func (f *Finder) CensusGeographiesByEntityIDs(ctx context.Context, limit *int, w
}
forStopids := func(stopIds []int) *model.CensusDatasetGeographyFilter {
return &model.CensusDatasetGeographyFilter{
Layer: where.Layer,
Search: where.Search,
Dataset: where.Dataset,
Layer: where.Layer,
Search: where.Search,
Location: &model.CensusDatasetGeographyLocationFilter{
StopBuffer: &model.StopBuffer{
StopIds: stopIds,
Expand Down Expand Up @@ -109,12 +110,12 @@ func (f *Finder) CensusGeographiesByEntityIDs(ctx context.Context, limit *int, w
return ret, nil
}

func (f *Finder) CensusValuesByGeographyIDs(ctx context.Context, limit *int, tableNames []string, keys []string) ([][]*model.CensusValue, error) {
func (f *Finder) CensusValuesByGeographyIDs(ctx context.Context, limit *int, datasetName string, tableNames []string, keys []string) ([][]*model.CensusValue, error) {
var ents []*model.CensusValue
err := dbutil.Select(
ctx,
f.db,
censusValueSelect(limit, "", tableNames, keys),
censusValueSelect(limit, datasetName, tableNames, keys),
&ents,
)
return arrangeGroup(keys, ents, func(ent *model.CensusValue) string { return ent.Geoid }), err
Expand Down Expand Up @@ -192,17 +193,10 @@ func (f *Finder) CensusSourceLayersBySourceIDs(ctx context.Context, keys []int)

func (f *Finder) CensusGeographiesByDatasetIDs(ctx context.Context, limit *int, p *model.CensusDatasetGeographyFilter, keys []int) ([][]*model.CensusGeography, error) {
var ents []*model.CensusGeography
q := censusDatasetGeographySelect(limit, p, getCensusGeographySelectFields(ctx))
q := censusDatasetGeographySelect(limit, p, getCensusGeographySelectFields(ctx)).Where(sq.Eq{"tlcd.id": keys})
err := dbutil.Select(ctx,
f.db,
lateralWrap(
q,
"tl_census_datasets",
"id",
"tlcd",
"id",
keys,
),
q,
&ents,
)
return arrangeGroup(keys, ents, func(ent *model.CensusGeography) int { return ent.DatasetID }), err
Expand Down Expand Up @@ -364,77 +358,97 @@ func censusDatasetGeographySelect(limit *int, where *model.CensusDatasetGeograph
q := sq.StatementBuilder.
Select(cols...).
From("tl_census_geographies tlcg").
Join("tl_census_sources tlcs on tlcs.id = tlcg.source_id").
Join("tl_census_datasets tlcd on tlcd.id = tlcs.dataset_id").
Join("tl_census_layers tlcl on tlcl.id = tlcg.layer_id").
Join("tl_census_datasets tlcd on tlcd.id = tlcl.dataset_id").
Join("tl_census_sources tlcs on tlcs.id = tlcg.source_id").
Limit(finderCheckLimit(limit))

if where != nil && where.Location != nil {
// qJoin must have a buffer and match_entity_id column
loc := where.Location
found := true
areaIntersection := true
qBufferUse := false
var qBuffer sq.SelectBuilder
qPointsUse := false
var qPoints sq.SelectBuilder

loc := where.Location
if loc.Bbox != nil {
qBufferUse = true
qBuffer = sq.StatementBuilder.Select().
Column("ST_MakeEnvelope(?,?,?,?,4326) as buffer", loc.Bbox.MinLon, loc.Bbox.MinLat, loc.Bbox.MaxLon, loc.Bbox.MaxLat).
Column("0 as match_entity_id")
} else if loc.Within != nil && loc.Within.Valid {
jj, _ := geojson.Marshal(loc.Within.Val)
qBufferUse = true
qBuffer = sq.StatementBuilder.Select().
Column("ST_GeomFromGeoJSON(?) as buffer", string(jj)).
Column("0 as match_entity_id")
} else if loc.Near != nil {
radius := checkFloat(&loc.Near.Radius, 0, 1_000_000)
qBufferUse = true
qBuffer = sq.StatementBuilder.Select().
Column("ST_Buffer(ST_MakePoint(?,?)::geography, ?) as buffer", loc.Near.Lon, loc.Near.Lat, radius).
Column("0 as match_entity_id")
} else if loc.StopBuffer != nil && len(loc.StopBuffer.StopIds) > 0 {
radius := checkFloat(loc.StopBuffer.Radius, 0, 1_000)
if radius == 0 {
areaIntersection = false
qBuffer = sq.StatementBuilder.Select().
qPointsUse = true
qPoints = sq.StatementBuilder.Select().
Column("gtfs_stops.geometry as buffer").
Column("gtfs_stops.id as match_entity_id").
From("gtfs_stops").
Where(In("gtfs_stops.id", loc.StopBuffer.StopIds))
} else {
qBuffer = sq.StatementBuilder.Select().
Column("0 as match_entity_id").
Column("ST_Buffer(ST_Collect(ST_Buffer(gtfs_stops.geometry::geography, ?)::geometry), 0) as buffer", radius).
// Add this as a pre-CTE
qBufferUse = true
qBufferOuter := sq.StatementBuilder.Select().
Column("ST_Union(ST_Buffer(gtfs_stops.geometry::geography, ?)::geometry) as buffer", radius).
From("gtfs_stops").
Where(In("gtfs_stops.id", loc.StopBuffer.StopIds))
qBuffer = sq.StatementBuilder.Select().
Column("0 as match_entity_id").
Column("(ST_Dump(buffer)).geom as buffer").
From("buffer_outer")
q = q.WithCTE(sq.CTE{
Alias: "buffer_outer",
Materialized: true,
Expression: qBufferOuter,
})
}
} else {
found = false
}
if found {
if qBufferUse {
q = q.WithCTE(sq.CTE{
Alias: "buffer",
Materialized: true,
Expression: qBuffer,
})
q = q.Join("buffer ON tlcg.geometry && buffer.buffer")
// Buffer radius > 0: use area approximation (better performance)
if areaIntersection {
q = q.Where(sq.Expr("ST_Area(ST_Intersection(tlcg.geometry, buffer.buffer)) > 0"))
} else {
q = q.Where(sq.Expr("ST_Intersects(tlcg.geometry, buffer.buffer)"))
}
q = q.Join("buffer ON tlcg.geometry && buffer.buffer").
Where(sq.Expr("ST_Area(ST_Intersection(tlcg.geometry, buffer.buffer)) > 0"))
if fields.intersectionArea {
q = q.Column("ST_Area(ST_Intersection(tlcg.geometry, buffer.buffer)) as intersection_area")
}
if fields.intersectionGeometry {
q = q.Column("ST_Intersection(tlcg.geometry, buffer.buffer) as intersection_geometry")
}
}
if qPointsUse {
q = q.WithCTE(sq.CTE{
Alias: "buffer",
Materialized: true,
Expression: qPoints,
})
q = q.Join("buffer ON tlcg.geometry && buffer.buffer").
Column("buffer.match_entity_id").
Where(sq.Expr("ST_Intersects(tlcg.geometry, buffer.buffer)"))
}
if loc.Focus != nil {
orderBy = sq.Expr("ST_Distance(tlcg.geometry, ST_MakePoint(?,?))", loc.Focus.Lon, loc.Focus.Lat)
}
}

// Check layer, dataset
if where != nil {
if where.Dataset != nil {
q = q.Where(sq.Eq{"tlcd.name": *where.Dataset})
}
if where.Layer != nil {
q = q.Where(sq.Eq{"tlcl.name": where.Layer})
}
Expand All @@ -456,11 +470,12 @@ func getBufferStopIds(ctx context.Context, db tldb.Ext, entityType string, entit
Select("id").
Distinct().Options("on (gtfs_stops.id)").
From("gtfs_stops")
if entityType == "route" {
switch entityType {
case "route":
q = q.Join("tl_route_stops ON tl_route_stops.stop_id = gtfs_stops.id").Where(sq.Eq{"tl_route_stops.route_id": entityId})
} else if entityType == "agency" {
case "agency":
q = q.Join("tl_route_stops ON tl_route_stops.stop_id = gtfs_stops.id").Where(sq.Eq{"tl_route_stops.agency_id": entityId})
} else if entityType == "stop" {
case "stop":
// No need to query, just return the single stop ID
return []int{entityId}, nil
}
Expand Down
11 changes: 0 additions & 11 deletions server/finders/dbfinder/finder.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,17 +162,6 @@ func tzTruncate(s time.Time, loc *time.Location) *tt.Date {
return ptr(tt.NewDate(time.Date(s.Year(), s.Month(), s.Day(), 0, 0, 0, 0, loc)))
}

func checkRange(limit *int, min, max int) uint64 {
if limit == nil {
return uint64(max)
} else if *limit >= max {
return uint64(max)
} else if *limit < min {
return uint64(min)
}
return uint64(*limit)
}

func checkFloat(v *float64, min float64, max float64) float64 {
if v == nil || *v < min {
return min
Expand Down
19 changes: 13 additions & 6 deletions server/gql/census_resolver_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package gql

import (
"fmt"
"testing"

"github.com/stretchr/testify/assert"
Expand Down Expand Up @@ -310,27 +311,27 @@ func TestCensusResolver(t *testing.T) {
},
{
name: "agency intersection areas - county",
query: `query { agencies(where:{agency_id:"BART"}) { agency_id census_geographies(where:{layer:"county", radius:1000.0}) { name geometry_area intersection_geometry intersection_area } } }`,
query: `query { agencies(where:{agency_id:"BART"}) { agency_id census_geographies(where:{layer:"county", radius:1000.0}) { name geoid geometry_area intersection_geometry intersection_area } } }`,
vars: vars,
f: func(t *testing.T, jj string) {
testIntersectionArea(
t,
gjson.Get(jj, "agencies.0.census_geographies").Array(),
1,
18,
countyArea,
65341022.43,
)
},
},
{
name: "agency intersection areas - tract",
query: `query { agencies(where:{agency_id:"BART"}) { agency_id census_geographies(where:{layer:"tract", radius:100.0}) { name geometry_area intersection_geometry intersection_area } } }`,
query: `query { agencies(where:{agency_id:"BART"}) { agency_id census_geographies(where:{layer:"tract", radius:100.0}) { name geoid geometry_area intersection_geometry intersection_area } } }`,
vars: vars,
f: func(t *testing.T, jj string) {
testIntersectionArea(
t,
gjson.Get(jj, "agencies.0.census_geographies").Array(),
37,
39,
73325034.5592,
687170.8023156085,
)
Expand All @@ -340,11 +341,17 @@ func TestCensusResolver(t *testing.T) {
queryTestcases(t, c, testcases)
}
func testIntersectionArea(t *testing.T, a []gjson.Result, expectCount int, expectGeometryArea float64, expectIntersectionArea float64) {
// Only count each geometry once
geometryAreas := map[string]float64{}
intersectionArea := 0.0
geometryArea := 0.0
for _, v := range a {
intersectionArea += v.Get("intersection_area").Float()
geometryArea += v.Get("geometry_area").Float()
geometryAreas[v.Get("geoid").String()] = v.Get("geometry_area").Float()
}
fmt.Printf("areas: %+v\n", geometryAreas)
geometryArea := 0.0
for _, v := range geometryAreas {
geometryArea += v
}
assert.InDelta(t, expectIntersectionArea, intersectionArea, 1.0, "expected intersection area")
assert.InDelta(t, expectGeometryArea, geometryArea, 1.0, "expected geometry area")
Expand Down
13 changes: 8 additions & 5 deletions server/gql/loaders.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,15 +180,18 @@ func NewLoaders(dbf model.Finder, batchSize int, stopTimeBatchSize int) *Loaders
},
),
CensusValuesByGeographyIDs: withWaitAndCapacityGroup(waitTime, batchSize,
func(ctx context.Context, limit *int, tableNames string, keys []string) ([][]*model.CensusValue, error) {
func(ctx context.Context, limit *int, param *censusValueLoaderParam, keys []string) ([][]*model.CensusValue, error) {
var tnames []string
for _, t := range strings.Split(tableNames, ",") {
for _, t := range strings.Split(param.TableNames, ",") {
tnames = append(tnames, strings.ToLower(strings.TrimSpace(t)))
}
return dbf.CensusValuesByGeographyIDs(ctx, limit, tnames, keys)
if param.Dataset == nil {
return nil, nil
}
return dbf.CensusValuesByGeographyIDs(ctx, limit, *param.Dataset, tnames, keys)
},
func(p censusValueLoaderParam) (string, string, *int) {
return p.Geoid, p.TableNames, p.Limit
func(p censusValueLoaderParam) (string, *censusValueLoaderParam, *int) {
return p.Geoid, &censusValueLoaderParam{TableNames: p.TableNames, Dataset: p.Dataset}, p.Limit
},
),
FeedFetchesByFeedIDs: withWaitAndCapacityGroup(waitTime, batchSize, dbf.FeedFetchesByFeedIDs,
Expand Down
2 changes: 1 addition & 1 deletion server/model/finders.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ type EntityLoader interface {
CensusSourceLayersBySourceIDs(context.Context, []int) ([][]*CensusLayer, []error)
CensusSourcesByDatasetIDs(context.Context, *int, *CensusSourceFilter, []int) ([][]*CensusSource, error)
CensusTableByIDs(context.Context, []int) ([]*CensusTable, []error)
CensusValuesByGeographyIDs(context.Context, *int, []string, []string) ([][]*CensusValue, error)
CensusValuesByGeographyIDs(context.Context, *int, string, []string, []string) ([][]*CensusValue, error)
FeedFetchesByFeedIDs(context.Context, *int, *FeedFetchFilter, []int) ([][]*FeedFetch, error)
FeedInfosByFeedVersionIDs(context.Context, *int, []int) ([][]*FeedInfo, error)
FeedsByIDs(context.Context, []int) ([]*Feed, []error)
Expand Down
2 changes: 2 additions & 0 deletions server/model/models_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading