diff --git a/config/account.go b/config/account.go index a54da82d212..147f35cc0de 100644 --- a/config/account.go +++ b/config/account.go @@ -42,6 +42,7 @@ type Account struct { DefaultBidLimit int `mapstructure:"default_bid_limit" json:"default_bid_limit"` BidAdjustments *openrtb_ext.ExtRequestPrebidBidAdjustments `mapstructure:"bidadjustments" json:"bidadjustments"` Privacy AccountPrivacy `mapstructure:"privacy" json:"privacy"` + GeoLocation AccountGeoLocation `mapstructure:"geolocation" json:"geolocation"` } // CookieSync represents the account-level defaults for the cookie sync endpoint. @@ -156,9 +157,10 @@ type AccountGDPR struct { Purpose9 AccountGDPRPurpose `mapstructure:"purpose9" json:"purpose9"` Purpose10 AccountGDPRPurpose `mapstructure:"purpose10" json:"purpose10"` // Hash table of purpose configs for convenient purpose config lookup - PurposeConfigs map[consentconstants.Purpose]*AccountGDPRPurpose - PurposeOneTreatment AccountGDPRPurposeOneTreatment `mapstructure:"purpose_one_treatment" json:"purpose_one_treatment"` - SpecialFeature1 AccountGDPRSpecialFeature `mapstructure:"special_feature1" json:"special_feature1"` + PurposeConfigs map[consentconstants.Purpose]*AccountGDPRPurpose + PurposeOneTreatment AccountGDPRPurposeOneTreatment `mapstructure:"purpose_one_treatment" json:"purpose_one_treatment"` + SpecialFeature1 AccountGDPRSpecialFeature `mapstructure:"special_feature1" json:"special_feature1"` + ConsentStringMeansInScope *bool `mapstructure:"consent_string_means_in_scope" json:"consent_string_means_in_scope"` } // EnabledForChannelType indicates whether GDPR is turned on at the account level for the specified channel type @@ -351,6 +353,14 @@ type CookieDeprecation struct { TTLSec int `mapstructure:"ttl_sec"` } +type AccountGeoLocation struct { + Enabled bool `mapstructure:"enabled" json:"enabled,omitempty"` +} + +func (g *AccountGeoLocation) IsGeoLocationEnabled() bool { + return g.Enabled +} + // AccountDSA represents DSA configuration type AccountDSA struct { Default string `mapstructure:"default" json:"default"` diff --git a/config/account_test.go b/config/account_test.go index 97bb5bd0d3f..10fc6945375 100644 --- a/config/account_test.go +++ b/config/account_test.go @@ -1014,3 +1014,27 @@ func TestIPMaskingValidate(t *testing.T) { }) } } + +func TestGeoLocation(t *testing.T) { + tests := []struct { + geoloc *AccountGeoLocation + expected bool + }{ + { + geoloc: &AccountGeoLocation{ + Enabled: true, + }, + expected: true, + }, + { + geoloc: &AccountGeoLocation{ + Enabled: false, + }, + expected: false, + }, + } + + for _, test := range tests { + assert.Equal(t, test.expected, test.geoloc.IsGeoLocationEnabled()) + } +} diff --git a/config/config.go b/config/config.go index 30f8d69adc1..15171af1b4d 100644 --- a/config/config.go +++ b/config/config.go @@ -103,6 +103,7 @@ type Configuration struct { Hooks Hooks `mapstructure:"hooks"` Validations Validations `mapstructure:"validations"` PriceFloors PriceFloors `mapstructure:"price_floors"` + GeoLocation GeoLocation `mapstructure:"geolocation"` } type Admin struct { @@ -253,8 +254,9 @@ type GDPR struct { // If the gdpr flag is unset in a request, but geo.country is set, we will assume GDPR applies if and only // if the country matches one on this list. If both the GDPR flag and country are not set, we default // to DefaultValue - EEACountries []string `mapstructure:"eea_countries"` - EEACountriesMap map[string]struct{} + EEACountries []string `mapstructure:"eea_countries"` + EEACountriesMap map[string]struct{} + ConsentStringMeansInScope bool `mapstructure:"consent_string_means_in_scope"` } func (cfg *GDPR) validate(v *viper.Viper, errs []error) []error { @@ -659,6 +661,27 @@ type DefReqFiles struct { FileName string `mapstructure:"name"` } +type GeoLocation struct { + Enabled bool `mapstructure:"enabled"` + Type string `mapstructure:"type"` + Maxmind GeoLocationMaxmind `mapstructure:"maxmind"` +} + +type GeoLocationMaxmind struct { + RemoteFileSyncer MaxmindRemoteFileSyncer `mapstructure:"remote_file_syncer"` +} + +type MaxmindRemoteFileSyncer struct { + HttpClient HTTPClient `mapstructure:"http_client"` + DownloadURL string `mapstructure:"download_url"` + SaveFilePath string `mapstructure:"save_filepath"` + TmpFilePath string `mapstructure:"tmp_filepath"` + RetryCount int `mapstructure:"retry_count"` + RetryIntervalMillis int `mapstructure:"retry_interval_ms"` + TimeoutMillis int `mapstructure:"timeout_ms"` + UpdateIntervalMillis int `mapstructure:"update_interval_ms"` +} + type Debug struct { TimeoutNotification TimeoutNotification `mapstructure:"timeout_notification"` OverrideToken string `mapstructure:"override_token"` @@ -1140,6 +1163,7 @@ func SetupViper(v *viper.Viper, filename string, bidderInfos BidderInfos) { "FIN", "FRA", "GUF", "DEU", "GIB", "GRC", "GLP", "GGY", "HUN", "ISL", "IRL", "IMN", "ITA", "JEY", "LVA", "LIE", "LTU", "LUX", "MLT", "MTQ", "MYT", "NLD", "NOR", "POL", "PRT", "REU", "ROU", "BLM", "MAF", "SPM", "SVK", "SVN", "ESP", "SWE", "GBR"}) + v.SetDefault("gdpr.consent_string_means_in_scope", false) v.SetDefault("ccpa.enforce", false) v.SetDefault("lmt.enforce", true) v.SetDefault("currency_converter.fetch_url", "https://cdn.jsdelivr.net/gh/prebid/currency-file@1/latest.json") @@ -1170,6 +1194,7 @@ func SetupViper(v *viper.Viper, filename string, bidderInfos BidderInfos) { v.SetDefault("account_defaults.privacy.privacysandbox.topicsdomain", "") v.SetDefault("account_defaults.privacy.privacysandbox.cookiedeprecation.enabled", false) v.SetDefault("account_defaults.privacy.privacysandbox.cookiedeprecation.ttl_sec", 604800) + v.SetDefault("account_defaults.geolocation.enabled", false) v.SetDefault("account_defaults.events_enabled", false) v.BindEnv("account_defaults.privacy.dsa.default") @@ -1187,6 +1212,20 @@ func SetupViper(v *viper.Viper, filename string, bidderInfos BidderInfos) { v.SetDefault("price_floors.fetcher.http_client.idle_connection_timeout_seconds", 60) v.SetDefault("price_floors.fetcher.max_retries", 10) + v.SetDefault("geolocation.enabled", false) + v.SetDefault("geolocation.type", "maxmind") + v.SetDefault("geolocation.maxmind.remote_file_syncer.download_url", "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz") + v.SetDefault("geolocation.maxmind.remote_file_syncer.save_filepath", "/var/tmp/prebid/GeoLite2-City.tar.gz") + v.SetDefault("geolocation.maxmind.remote_file_syncer.tmp_filepath", "/var/tmp/prebid/tmp/GeoLite2-City.tar.gz") + v.SetDefault("geolocation.maxmind.remote_file_syncer.retry_count", 3) + v.SetDefault("geolocation.maxmind.remote_file_syncer.retry_interval_ms", 3000) + v.SetDefault("geolocation.maxmind.remote_file_syncer.timeout_ms", 300000) + v.SetDefault("geolocation.maxmind.remote_file_syncer.update_interval_ms", 0) + v.SetDefault("geolocation.maxmind.remote_file_syncer.http_client.max_connections_per_host", 0) + v.SetDefault("geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections", 40) + v.SetDefault("geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections_per_host", 2) + v.SetDefault("geolocation.maxmind.remote_file_syncer.http_client.idle_connection_timeout_seconds", 60) + v.SetDefault("account_defaults.events_enabled", false) v.SetDefault("compression.response.enable_gzip", false) v.SetDefault("compression.request.enable_gzip", false) diff --git a/config/config_test.go b/config/config_test.go index a68f478a642..1f871749d68 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -209,9 +209,24 @@ func TestDefaults(t *testing.T) { cmpStrings(t, "account_defaults.privacy.topicsdomain", "", cfg.AccountDefaults.Privacy.PrivacySandbox.TopicsDomain) cmpBools(t, "account_defaults.privacy.privacysandbox.cookiedeprecation.enabled", false, cfg.AccountDefaults.Privacy.PrivacySandbox.CookieDeprecation.Enabled) cmpInts(t, "account_defaults.privacy.privacysandbox.cookiedeprecation.ttl_sec", 604800, cfg.AccountDefaults.Privacy.PrivacySandbox.CookieDeprecation.TTLSec) + cmpBools(t, "account_defaults.geolocation.enabled", false, cfg.AccountDefaults.GeoLocation.Enabled) cmpBools(t, "account_defaults.events.enabled", false, cfg.AccountDefaults.Events.Enabled) + cmpBools(t, "geolocation.enabled", false, cfg.GeoLocation.Enabled) + cmpStrings(t, "geolocation.type", "maxmind", cfg.GeoLocation.Type) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_connections_per_host", 0, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxConnsPerHost) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections", 40, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConns) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections_per_host", 2, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConnsPerHost) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.idle_connection_timeout_seconds", 60, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.IdleConnTimeout) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.download_url", "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.DownloadURL) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.save_filepath", "/var/tmp/prebid/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.SaveFilePath) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.tmp_filepath", "/var/tmp/prebid/tmp/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.TmpFilePath) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.retry_count", 3, cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryCount) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.retry_interval_ms", 3000, cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryIntervalMillis) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.timeout_ms", 300000, cfg.GeoLocation.Maxmind.RemoteFileSyncer.TimeoutMillis) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.update_interval_ms", 0, cfg.GeoLocation.Maxmind.RemoteFileSyncer.UpdateIntervalMillis) + cmpBools(t, "hooks.enabled", false, cfg.Hooks.Enabled) cmpStrings(t, "validations.banner_creative_max_size", "skip", cfg.Validations.BannerCreativeMaxSize) cmpStrings(t, "validations.secure_markup", "skip", cfg.Validations.SecureMarkup) @@ -227,6 +242,8 @@ func TestDefaults(t *testing.T) { cmpInts(t, "account_defaults.privacy.ipv6.anon_keep_bits", 56, cfg.AccountDefaults.Privacy.IPv6Config.AnonKeepBits) cmpInts(t, "account_defaults.privacy.ipv4.anon_keep_bits", 24, cfg.AccountDefaults.Privacy.IPv4Config.AnonKeepBits) + cmpBools(t, "gdpr.consent_string_means_in_scope", false, cfg.GDPR.ConsentStringMeansInScope) + //Assert purpose VendorExceptionMap hash tables were built correctly cmpBools(t, "analytics.agma.enabled", false, cfg.Analytics.Agma.Enabled) cmpStrings(t, "analytics.agma.endpoint.timeout", "2s", cfg.Analytics.Agma.Endpoint.Timeout) @@ -350,6 +367,7 @@ gdpr: default_value: "1" non_standard_publishers: ["pub1", "pub2"] eea_countries: ["eea1", "eea2"] + consent_string_means_in_scope: false tcf2: purpose1: enforce_vendors: false @@ -494,6 +512,23 @@ price_floors: max_idle_connections_per_host: 2 idle_connection_timeout_seconds: 10 max_retries: 5 +geolocation: + enabled: false + type: maxmind + maxmind: + remote_file_syncer: + http_client: + max_connections_per_host: 0 + max_idle_connections: 40 + max_idle_connections_per_host: 2 + idle_connection_timeout_seconds: 60 + download_url: "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz" + save_filepath: "/var/tmp/prebid/GeoLite2-City.tar.gz" + tmp_filepath: "/var/tmp/prebid/tmp/GeoLite2-City.tar.gz" + retry_count: 3 + retry_interval_ms: 3000 + timeout_ms: 300000 + update_interval_ms: 0 account_defaults: events: enabled: true @@ -541,6 +576,8 @@ account_defaults: cookiedeprecation: enabled: true ttl_sec: 86400 + geolocation: + enabled: false tmax_adjustments: enabled: true bidder_response_duration_min_ms: 700 @@ -631,6 +668,7 @@ func TestFullConfig(t *testing.T) { cmpInts(t, "http_client_cache.idle_connection_timeout_seconds", 3, cfg.CacheClient.IdleConnTimeout) cmpInts(t, "gdpr.host_vendor_id", 15, cfg.GDPR.HostVendorID) cmpStrings(t, "gdpr.default_value", "1", cfg.GDPR.DefaultValue) + cmpBools(t, "gdpr.consent_string_means_in_scope", false, cfg.GDPR.ConsentStringMeansInScope) cmpStrings(t, "host_schain_node.asi", "pbshostcompany.com", cfg.HostSChainNode.ASI) cmpStrings(t, "host_schain_node.sid", "00001", cfg.HostSChainNode.SID) cmpStrings(t, "host_schain_node.rid", "BidRequest", cfg.HostSChainNode.RID) @@ -890,6 +928,21 @@ func TestFullConfig(t *testing.T) { cmpStrings(t, "analytics.agma.accounts.0.publisher_id", "publisher-id", cfg.Analytics.Agma.Accounts[0].PublisherId) cmpStrings(t, "analytics.agma.accounts.0.code", "agma-code", cfg.Analytics.Agma.Accounts[0].Code) cmpStrings(t, "analytics.agma.accounts.0.site_app_id", "site-or-app-id", cfg.Analytics.Agma.Accounts[0].SiteAppId) + + cmpBools(t, "account_defaults.geolocation.enabled", false, cfg.GeoLocation.Enabled) + cmpBools(t, "geolocation.enabled", false, cfg.GeoLocation.Enabled) + cmpStrings(t, "geolocation.type", "maxmind", cfg.GeoLocation.Type) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_connections_per_host", 0, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxConnsPerHost) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections", 40, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConns) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.max_idle_connections_per_host", 2, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConnsPerHost) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.http_client.idle_connection_timeout_seconds", 60, cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.IdleConnTimeout) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.download_url", "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.DownloadURL) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.save_filepath", "/var/tmp/prebid/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.SaveFilePath) + cmpStrings(t, "geolocation.maxmind.remote_file_syncer.tmp_filepath", "/var/tmp/prebid/tmp/GeoLite2-City.tar.gz", cfg.GeoLocation.Maxmind.RemoteFileSyncer.TmpFilePath) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.retry_count", 3, cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryCount) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.retry_interval_ms", 3000, cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryIntervalMillis) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.timeout_ms", 300000, cfg.GeoLocation.Maxmind.RemoteFileSyncer.TimeoutMillis) + cmpInts(t, "geolocation.maxmind.remote_file_syncer.update_interval_ms", 0, cfg.GeoLocation.Maxmind.RemoteFileSyncer.UpdateIntervalMillis) } func TestValidateConfig(t *testing.T) { diff --git a/config/countrycode/countrycode.go b/config/countrycode/countrycode.go new file mode 100644 index 00000000000..0292f3ba72b --- /dev/null +++ b/config/countrycode/countrycode.go @@ -0,0 +1,63 @@ +package countrycode + +import ( + "strings" +) + +type CountryCode struct { + map2To3 map[string]string + map3To2 map[string]string +} + +func New() *CountryCode { + return &CountryCode{ + map2To3: make(map[string]string), + map3To2: make(map[string]string), + } +} + +// Load loads country code mapping data +func (c *CountryCode) Load(data string) { + toAlpha2 := make(map[string]string) + toAlpha3 := make(map[string]string) + for _, line := range strings.Split(data, "\n") { + if line == "" { + continue + } + fields := strings.Split(line, ",") + if len(fields) < 2 { + continue + } + alpha2 := strings.TrimSpace(fields[0]) + alpha3 := strings.TrimSpace(fields[1]) + toAlpha2[alpha3] = alpha2 + toAlpha3[alpha2] = alpha3 + } + + c.map2To3 = toAlpha3 + c.map3To2 = toAlpha2 +} + +// ToAlpha3 converts country code alpha2 to alpha3 +func (c *CountryCode) ToAlpha3(alpha2 string) string { + return c.map2To3[alpha2] +} + +// ToAlpha2 converts country code alpha3 to alpha2 +func (c *CountryCode) ToAlpha2(alpha3 string) string { + return c.map3To2[alpha3] +} + +var defaultCountryCode = New() + +func Load(data string) { + defaultCountryCode.Load(data) +} + +func ToAlpha3(alpha2 string) string { + return defaultCountryCode.ToAlpha3(alpha2) +} + +func ToAlpha2(alpha3 string) string { + return defaultCountryCode.ToAlpha2(alpha3) +} diff --git a/config/countrycode/countrycode_test.go b/config/countrycode/countrycode_test.go new file mode 100644 index 00000000000..196b936d56c --- /dev/null +++ b/config/countrycode/countrycode_test.go @@ -0,0 +1,60 @@ +package countrycode + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCountryCode(t *testing.T) { + Load(` +AD,AND +AE,ARE +AF,AFG +`) + assert.Equal(t, "AND", ToAlpha3("AD"), "map AD to AND") + assert.Equal(t, "AE", ToAlpha2("ARE"), "map ARE to AE") +} + +func TestCountryCodeToAlpha3(t *testing.T) { + c := New() + c.Load(` +AD,AND +AE,ARE +AF,AFG +`) + tests := []struct { + input string + expected string + }{ + {"AD", "AND"}, + {"AE", "ARE"}, + {"XX", ""}, + {"", ""}, + } + + for _, test := range tests { + assert.Equal(t, test.expected, c.ToAlpha3(test.input), "map %s to alpha3", test.input) + } +} + +func TestCountryCodeToAlpha2(t *testing.T) { + c := New() + c.Load(` +AD,AND +AE,ARE +AF,AFG +`) + tests := []struct { + input string + expected string + }{ + {"AND", "AD"}, + {"ARE", "AE"}, + {"", ""}, + } + + for _, test := range tests { + assert.Equal(t, test.expected, c.ToAlpha2(test.input), "map %s to alpha2", test.input) + } +} diff --git a/country-codes.csv b/country-codes.csv new file mode 100644 index 00000000000..2895f157982 --- /dev/null +++ b/country-codes.csv @@ -0,0 +1,244 @@ +AF,AFG +AL,ALB +DZ,DZA +AS,ASM +AD,AND +AO,AGO +AI,AIA +AQ,ATA +AG,ATG +AR,ARG +AM,ARM +AW,ABW +AU,AUS +AT,AUT +AZ,AZE +BS,BHS +BH,BHR +BD,BGD +BB,BRB +BY,BLR +BE,BEL +BZ,BLZ +BJ,BEN +BM,BMU +BT,BTN +BO,BOL +BA,BIH +BW,BWA +BV,BVT +BR,BRA +IO,IOT +BN,BRN +BG,BGR +BF,BFA +BI,BDI +KH,KHM +CM,CMR +CA,CAN +CV,CPV +KY,CYM +CF,CAF +TD,TCD +CL,CHL +CN,CHN +CX,CXR +CC,CCK +CO,COL +KM,COM +CG,COG +CD,COD +CK,COK +CR,CRI +CI,CIV +HR,HRV +CU,CUB +CY,CYP +CZ,CZE +DK,DNK +DJ,DJI +DM,DMA +DO,DOM +EC,ECU +EG,EGY +SV,SLV +GQ,GNQ +ER,ERI +EE,EST +ET,ETH +FK,FLK +FO,FRO +FJ,FJI +FI,FIN +FR,FRA +GF,GUF +PF,PYF +TF,ATF +GA,GAB +GM,GMB +GE,GEO +DE,DEU +GH,GHA +GI,GIB +GR,GRC +GL,GRL +GD,GRD +GP,GLP +GU,GUM +GT,GTM +GG,GGY +GN,GIN +GW,GNB +GY,GUY +HT,HTI +HM,HMD +VA,VAT +HN,HND +HK,HKG +HU,HUN +IS,ISL +IN,IND +ID,IDN +IR,IRN +IQ,IRQ +IE,IRL +IM,IMN +IL,ISR +IT,ITA +JM,JAM +JP,JPN +JE,JEY +JO,JOR +KZ,KAZ +KE,KEN +KI,KIR +KP,PRK +KR,KOR +KW,KWT +KG,KGZ +LA,LAO +LV,LVA +LB,LBN +LS,LSO +LR,LBR +LY,LBY +LI,LIE +LT,LTU +LU,LUX +MO,MAC +MK,MKD +MG,MDG +MW,MWI +MY,MYS +MV,MDV +ML,MLI +MT,MLT +MH,MHL +MQ,MTQ +MR,MRT +MU,MUS +YT,MYT +MX,MEX +FM,FSM +MD,MDA +MC,MCO +MN,MNG +ME,MNE +MS,MSR +MA,MAR +MZ,MOZ +MM,MMR +NA,NAM +NR,NRU +NP,NPL +NL,NLD +AN,ANT +NC,NCL +NZ,NZL +NI,NIC +NE,NER +NG,NGA +NU,NIU +NF,NFK +MP,MNP +NO,NOR +OM,OMN +PK,PAK +PW,PLW +PS,PSE +PA,PAN +PG,PNG +PY,PRY +PE,PER +PH,PHL +PN,PCN +PL,POL +PT,PRT +PR,PRI +QA,QAT +RE,REU +RO,ROU +RU,RUS +RW,RWA +SH,SHN +KN,KNA +LC,LCA +PM,SPM +VC,VCT +WS,WSM +SM,SMR +ST,STP +SA,SAU +SN,SEN +RS,SRB +SC,SYC +SL,SLE +SG,SGP +SK,SVK +SI,SVN +SB,SLB +SO,SOM +ZA,ZAF +GS,SGS +ES,ESP +LK,LKA +SD,SDN +SR,SUR +SJ,SJM +SZ,SWZ +SE,SWE +CH,CHE +SY,SYR +TW,TWN +TJ,TJK +TZ,TZA +TH,THA +TL,TLS +TG,TGO +TK,TKL +TO,TON +TT,TTO +TN,TUN +TR,TUR +TM,TKM +TC,TCA +TV,TUV +UG,UGA +UA,UKR +AE,ARE +GB,GBR +UK,GBR +US,USA +UM,UMI +UY,URY +UZ,UZB +VU,VUT +VE,VEN +VN,VNM +VG,VGB +VI,VIR +WF,WLF +EH,ESH +YE,YEM +ZM,ZMB +ZW,ZWE diff --git a/endpoints/openrtb2/auction_benchmark_test.go b/endpoints/openrtb2/auction_benchmark_test.go index 9697687407e..4a846307921 100644 --- a/endpoints/openrtb2/auction_benchmark_test.go +++ b/endpoints/openrtb2/auction_benchmark_test.go @@ -99,6 +99,7 @@ func BenchmarkOpenrtbEndpoint(b *testing.B) { &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, + &exchange.NilGeoLocationResolver{}, ) endpoint, _ := NewEndpoint( diff --git a/endpoints/openrtb2/test_utils.go b/endpoints/openrtb2/test_utils.go index e869ded80b8..74315fd3eb8 100644 --- a/endpoints/openrtb2/test_utils.go +++ b/endpoints/openrtb2/test_utils.go @@ -1254,6 +1254,7 @@ func buildTestExchange(testCfg *testConfigValues, adapterMap map[openrtb_ext.Bid &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, + &exchange.NilGeoLocationResolver{}, ) testExchange = &exchangeTestWrapper{ diff --git a/errortypes/severity.go b/errortypes/severity.go index 5f9cd80dd28..b589e92a6f8 100644 --- a/errortypes/severity.go +++ b/errortypes/severity.go @@ -39,6 +39,16 @@ func ContainsFatalError(errors []error) bool { return false } +// FirstFatalError returns the first Fatal error found +func FirstFatalError(errors []error) error { + for _, err := range errors { + if isFatal(err) { + return err + } + } + return nil +} + // FatalOnly returns a new error list with only the fatal severity errors. func FatalOnly(errs []error) []error { errsFatal := make([]error, 0, len(errs)) diff --git a/errortypes/severity_test.go b/errortypes/severity_test.go index 8330316a8d2..f6fa04de2dc 100644 --- a/errortypes/severity_test.go +++ b/errortypes/severity_test.go @@ -56,6 +56,34 @@ func TestContainsFatalError(t *testing.T) { } } +func TestFirstFatalErrors(t *testing.T) { + fatalError := &stubError{severity: SeverityFatal} + fatalError2 := &stubError{severity: SeverityFatal} + notFatalError := &stubError{severity: SeverityWarning} + unknownSeverityError := errors.New("anyError") + + tests := []struct { + errors []error + first error + }{ + {[]error{}, nil}, + {[]error{fatalError}, fatalError}, + {[]error{fatalError2}, fatalError2}, + {[]error{notFatalError}, nil}, + {[]error{unknownSeverityError}, unknownSeverityError}, + {[]error{notFatalError, unknownSeverityError}, unknownSeverityError}, + {[]error{fatalError, fatalError2}, fatalError}, + {[]error{fatalError2, fatalError}, fatalError2}, + {[]error{notFatalError, fatalError, fatalError2}, fatalError}, + {[]error{fatalError2, unknownSeverityError, fatalError}, fatalError}, + {[]error{notFatalError, fatalError2, unknownSeverityError, fatalError}, fatalError2}, + } + + for _, test := range tests { + assert.Equal(t, test.first, FirstFatalError(test.errors), "FirstFatalError(%v)", test.errors) + } +} + func TestFatalOnly(t *testing.T) { fatalError := &stubError{severity: SeverityFatal} notFatalError := &stubError{severity: SeverityWarning} diff --git a/exchange/exchange.go b/exchange/exchange.go index 9ab91ee9ea3..97008c76dd1 100644 --- a/exchange/exchange.go +++ b/exchange/exchange.go @@ -43,6 +43,9 @@ import ( "github.com/buger/jsonparser" "github.com/gofrs/uuid" "github.com/golang/glog" + gdprAPI "github.com/prebid/go-gdpr/api" + "github.com/prebid/go-gdpr/vendorconsent" + gpplib "github.com/prebid/go-gpp" "github.com/prebid/openrtb/v20/openrtb2" "github.com/prebid/openrtb/v20/openrtb3" ) @@ -85,6 +88,8 @@ type exchange struct { macroReplacer macros.Replacer priceFloorEnabled bool priceFloorFetcher floors.FloorFetcher + geoLocationEnabled bool + geoLocationResolver GeoLocationResolver } // Container to pass out response ext data from the GetAllBids goroutines back into the main thread @@ -136,7 +141,7 @@ func (randomDeduplicateBidBooleanGenerator) Generate() bool { return rand.Intn(100) < 50 } -func NewExchange(adapters map[openrtb_ext.BidderName]AdaptedBidder, cache prebid_cache_client.Client, cfg *config.Configuration, requestValidator ortb.RequestValidator, syncersByBidder map[string]usersync.Syncer, metricsEngine metrics.MetricsEngine, infos config.BidderInfos, gdprPermsBuilder gdpr.PermissionsBuilder, currencyConverter *currency.RateConverter, categoriesFetcher stored_requests.CategoryFetcher, adsCertSigner adscert.Signer, macroReplacer macros.Replacer, priceFloorFetcher floors.FloorFetcher) Exchange { +func NewExchange(adapters map[openrtb_ext.BidderName]AdaptedBidder, cache prebid_cache_client.Client, cfg *config.Configuration, requestValidator ortb.RequestValidator, syncersByBidder map[string]usersync.Syncer, metricsEngine metrics.MetricsEngine, infos config.BidderInfos, gdprPermsBuilder gdpr.PermissionsBuilder, currencyConverter *currency.RateConverter, categoriesFetcher stored_requests.CategoryFetcher, adsCertSigner adscert.Signer, macroReplacer macros.Replacer, priceFloorFetcher floors.FloorFetcher, geoLocationResolver GeoLocationResolver) Exchange { bidderToSyncerKey := map[string]string{} for bidder, syncer := range syncersByBidder { bidderToSyncerKey[bidder] = syncer.Key() @@ -184,6 +189,8 @@ func NewExchange(adapters map[openrtb_ext.BidderName]AdaptedBidder, cache prebid macroReplacer: macroReplacer, priceFloorEnabled: cfg.PriceFloors.Enabled, priceFloorFetcher: priceFloorFetcher, + geoLocationEnabled: cfg.GeoLocation.Enabled, + geoLocationResolver: geoLocationResolver, } } @@ -224,6 +231,46 @@ type AuctionRequest struct { TmaxAdjustments *TmaxAdjustmentsPreprocessed } +// RequestPrivacy holds privacies of request +type RequestPrivacy struct { + // GDPR + Consent string + ParsedConsent gdprAPI.VendorConsents + GDPRDefaultValue gdpr.Signal + GDPRSignal gdpr.Signal + GDPRChannelEnabled bool + GDPREnforced bool + + // LMT + LMTEnforced bool + + // CCPA + CCPAProvided bool + CCPAEnforced bool + + // COPPA + COPPAEnforced bool + + // GPP + ParsedGPP gpplib.GppContainer +} + +func (p *RequestPrivacy) MakePrivacyLabels() (labels metrics.PrivacyLabels) { + if p == nil { + return + } + labels.CCPAProvided = p.CCPAProvided + labels.CCPAEnforced = p.CCPAEnforced + labels.COPPAEnforced = p.COPPAEnforced + labels.LMTEnforced = p.LMTEnforced + labels.GDPREnforced = p.GDPREnforced + if p.GDPREnforced && p.ParsedConsent != nil { + version := int(p.ParsedConsent.Version()) + labels.GDPRTCFVersion = metrics.TCFVersionToValue(version) + } + return +} + // BidderRequest holds the bidder specific request and all other // information needed to process that bidder request. type BidderRequest struct { @@ -241,6 +288,8 @@ func (e *exchange) HoldAuction(ctx context.Context, r *AuctionRequest, debugLog return nil, nil } + errs := EnrichGeoLocation(ctx, r.BidRequestWrapper, r.Account, e.geoLocationResolver) + err := r.HookExecutor.ExecuteProcessedAuctionStage(r.BidRequestWrapper) if err != nil { return nil, err @@ -272,12 +321,22 @@ func (e *exchange) HoldAuction(ctx context.Context, r *AuctionRequest, debugLog _, targData.cacheHost, targData.cachePath = e.cache.GetExtCacheData() } + requestPrivacy, privacyErrs := e.extractRequestPrivacy(r) + if errf := errortypes.FirstFatalError(privacyErrs); errf != nil { + return nil, errf + } + errs = append(errs, privacyErrs...) + + geoPrivacyErrs := EnrichGeoLocationWithPrivacy(ctx, r.BidRequestWrapper, r.Account, e.geoLocationResolver, requestPrivacy, r.TCF2Config) + errs = append(errs, geoPrivacyErrs...) + // Get currency rates conversions for the auction conversions := currency.GetAuctionCurrencyRates(e.currencyConverter, requestExtPrebid.CurrencyConversions) var floorErrs []error if e.priceFloorEnabled { floorErrs = floors.EnrichWithPriceFloors(r.BidRequestWrapper, r.Account, conversions, e.priceFloorFetcher) + errs = append(errs, floorErrs...) } responseDebugAllow, accountDebugAllow, debugLog := getDebugInfo(r.BidRequestWrapper.Test, requestExtPrebid, r.Account.DebugAllow, debugLog) @@ -317,16 +376,9 @@ func (e *exchange) HoldAuction(ctx context.Context, r *AuctionRequest, debugLog recordImpMetrics(r.BidRequestWrapper, e.me) // Make our best guess if GDPR applies - gdprDefaultValue := e.parseGDPRDefaultValue(r.BidRequestWrapper) - gdprSignal, err := getGDPR(r.BidRequestWrapper) - if err != nil { - return nil, err - } - channelEnabled := r.TCF2Config.ChannelEnabled(channelTypeMap[r.LegacyLabels.RType]) - gdprEnforced := enforceGDPR(gdprSignal, gdprDefaultValue, channelEnabled) dsaWriter := dsa.Writer{ Config: r.Account.Privacy.DSA, - GDPRInScope: gdprEnforced, + GDPRInScope: requestPrivacy.GDPREnforced, } if err := dsaWriter.Write(r.BidRequestWrapper); err != nil { return nil, err @@ -342,13 +394,13 @@ func (e *exchange) HoldAuction(ctx context.Context, r *AuctionRequest, debugLog Prebid: *requestExtPrebid, SChain: requestExt.GetSChain(), } - bidderRequests, privacyLabels, errs := e.requestSplitter.cleanOpenRTBRequests(ctx, *r, requestExtLegacy, gdprSignal, gdprEnforced, bidAdjustmentFactors) - for _, err := range errs { + bidderRequests, cleanErrs := e.requestSplitter.cleanOpenRTBRequests(ctx, *r, requestExtLegacy, requestPrivacy, bidAdjustmentFactors) + for _, err := range cleanErrs { if errortypes.ReadCode(err) == errortypes.InvalidImpFirstPartyDataErrorCode { return nil, err } } - errs = append(errs, floorErrs...) + errs = append(errs, cleanErrs...) mergedBidAdj, err := bidadjustment.Merge(r.BidRequestWrapper, r.Account.BidAdjustments) if err != nil { @@ -359,7 +411,7 @@ func (e *exchange) HoldAuction(ctx context.Context, r *AuctionRequest, debugLog } bidAdjustmentRules := bidadjustment.BuildRules(mergedBidAdj) - e.me.RecordRequestPrivacy(privacyLabels) + e.me.RecordRequestPrivacy(requestPrivacy.MakePrivacyLabels()) if len(r.StoredAuctionResponses) > 0 || len(r.StoredBidResponses) > 0 { e.me.RecordStoredResponse(r.PubID) @@ -571,9 +623,20 @@ func buildMultiBidMap(prebid *openrtb_ext.ExtRequestPrebid) map[string]openrtb_e return multiBidMap } -func (e *exchange) parseGDPRDefaultValue(r *openrtb_ext.RequestWrapper) gdpr.Signal { +func (e *exchange) parseGDPRDefaultValue(r *openrtb_ext.RequestWrapper, account config.Account, parsedConsent gdprAPI.VendorConsents) gdpr.Signal { gdprDefaultValue := e.gdprDefaultValue + // requests may have consent without gdpr signal. check if setting is enabled to assume gdpr applies + if parsedConsent != nil && parsedConsent.Version() > 0 { + if account.GDPR.ConsentStringMeansInScope != nil { + if *account.GDPR.ConsentStringMeansInScope { + gdprDefaultValue = gdpr.SignalYes + } + } else if e.privacyConfig.GDPR.ConsentStringMeansInScope { + gdprDefaultValue = gdpr.SignalYes + } + } + var geo *openrtb2.Geo if r.User != nil && r.User.Geo != nil { geo = r.User.Geo @@ -595,6 +658,52 @@ func (e *exchange) parseGDPRDefaultValue(r *openrtb_ext.RequestWrapper) gdpr.Sig return gdprDefaultValue } +func (e *exchange) extractRequestPrivacy(r *AuctionRequest) (p *RequestPrivacy, errs []error) { + req := r.BidRequestWrapper + + var gpp gpplib.GppContainer + if req.BidRequest.Regs != nil && len(req.BidRequest.Regs.GPP) > 0 { + var gppErrs []error + gpp, gppErrs = gpplib.Parse(req.BidRequest.Regs.GPP) + if len(gppErrs) > 0 { + errs = append(errs, gppErrs[0]) + } + } + + consent, err := getConsent(req, gpp) + if err != nil { + errs = append(errs, err) + } + parsedConsent, err := vendorconsent.ParseString(consent) + if err != nil { + parsedConsent = nil + } + + gdprDefaultValue := e.parseGDPRDefaultValue(req, r.Account, parsedConsent) + gdprSignal, err := getGDPR(req) + if err != nil { + errs = append(errs, err) + return + } + channelEnabled := r.TCF2Config.ChannelEnabled(channelTypeMap[r.LegacyLabels.RType]) + gdprEnforced := enforceGDPR(gdprSignal, gdprDefaultValue, channelEnabled) + + lmtEnforcer := extractLMT(req.BidRequest, e.privacyConfig) + + p = &RequestPrivacy{ + Consent: consent, + ParsedConsent: parsedConsent, + GDPRDefaultValue: gdprDefaultValue, + GDPRSignal: gdprSignal, + GDPRChannelEnabled: channelEnabled, + GDPREnforced: gdprEnforced, + COPPAEnforced: req.BidRequest.Regs != nil && req.BidRequest.Regs.COPPA == 1, + LMTEnforced: lmtEnforcer.ShouldEnforce(unknownBidder), + ParsedGPP: gpp, + } + return +} + func recordImpMetrics(r *openrtb_ext.RequestWrapper, metricsEngine metrics.MetricsEngine) { for _, impInRequest := range r.GetImp() { var impLabels metrics.ImpLabels = metrics.ImpLabels{ diff --git a/exchange/exchange_test.go b/exchange/exchange_test.go index 87b53b101e0..7dc246987af 100644 --- a/exchange/exchange_test.go +++ b/exchange/exchange_test.go @@ -19,6 +19,9 @@ import ( "time" "github.com/buger/jsonparser" + "github.com/prebid/go-gdpr/vendorconsent" + gpplib "github.com/prebid/go-gpp" + gppConstants "github.com/prebid/go-gpp/constants" "github.com/prebid/openrtb/v20/openrtb2" "github.com/prebid/prebid-server/v3/adapters" "github.com/prebid/prebid-server/v3/config" @@ -84,7 +87,7 @@ func TestNewExchange(t *testing.T) { }, }.Builder - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) for _, bidderName := range knownAdapters { if _, ok := e.adapterMap[bidderName]; !ok { if biddersInfo[string(bidderName)].IsEnabled() { @@ -134,7 +137,7 @@ func TestCharacterEscape(t *testing.T) { }, }.Builder - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) // 3) Build all the parameters e.buildBidResponse(ctx.Background(), liveA... ) needs //liveAdapters []openrtb_ext.BidderName, @@ -1237,7 +1240,7 @@ func TestGetBidCacheInfoEndToEnd(t *testing.T) { }, }.Builder - e := NewExchange(adapters, pbc, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, pbc, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) // 3) Build all the parameters e.buildBidResponse(ctx.Background(), liveA... ) needs liveAdapters := []openrtb_ext.BidderName{bidderName} @@ -1596,7 +1599,7 @@ func TestBidResponseCurrency(t *testing.T) { }, }.Builder - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) liveAdapters := make([]openrtb_ext.BidderName, 1) liveAdapters[0] = "appnexus" @@ -1744,7 +1747,7 @@ func TestBidResponseImpExtInfo(t *testing.T) { t.Fatalf("Error intializing adapters: %v", adaptersErr) } - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, nil, gdprPermsBuilder, nil, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, nil, gdprPermsBuilder, nil, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) liveAdapters := make([]openrtb_ext.BidderName, 1) liveAdapters[0] = "appnexus" @@ -1838,7 +1841,7 @@ func TestRaceIntegration(t *testing.T) { }, }.Builder - ex := NewExchange(adapters, &wellBehavedCache{}, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, &nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + ex := NewExchange(adapters, &wellBehavedCache{}, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, &nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) _, err = ex.HoldAuction(context.Background(), auctionRequest, &debugLog) if err != nil { t.Errorf("HoldAuction returned unexpected error: %v", err) @@ -1936,7 +1939,7 @@ func TestPanicRecovery(t *testing.T) { }, }.Builder - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) chBids := make(chan *bidResponseWrapper, 1) panicker := func(bidderRequest BidderRequest, conversions currency.Conversions) { @@ -2006,7 +2009,7 @@ func TestPanicRecoveryHighLevel(t *testing.T) { allowAllBidders: true, }, }.Builder - e := NewExchange(adapters, &mockCache{}, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, categoriesFetcher, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, &mockCache{}, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, categoriesFetcher, &adscert.NilSigner{}, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) e.adapterMap[openrtb_ext.BidderBeachfront] = panicingAdapter{} e.adapterMap[openrtb_ext.BidderAppnexus] = panicingAdapter{} @@ -4581,7 +4584,7 @@ func TestPassExperimentConfigsToHoldAuction(t *testing.T) { }, }.Builder - e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &signer, macros.NewStringIndexBasedReplacer(), nil).(*exchange) + e := NewExchange(adapters, nil, cfg, &mockRequestValidator{}, map[string]usersync.Syncer{}, &metricsConf.NilMetricsEngine{}, biddersInfo, gdprPermsBuilder, currencyConverter, nilCategoryFetcher{}, &signer, macros.NewStringIndexBasedReplacer(), nil, nil).(*exchange) // Define mock incoming bid requeset mockBidRequest := &openrtb2.BidRequest{ @@ -6358,3 +6361,515 @@ type mockRequestValidator struct { func (mrv *mockRequestValidator) ValidateImp(imp *openrtb_ext.ImpWrapper, cfg ortb.ValidationConfig, index int, aliases map[string]string, hasStoredResponses bool, storedBidResponses stored_responses.ImpBidderStoredResp) []error { return mrv.errors } + +func TestParseGDPRDefaultValue(t *testing.T) { + var ( + boolTrue = true + boolFalse = false + ) + + tests := []struct { + name string + defaultValue gdpr.Signal + privacyConfig config.Privacy + req *openrtb2.BidRequest + account config.Account + consent string + output gdpr.Signal + }{ + { + "Exchange default value is SignalYes, no other settings", + gdpr.SignalYes, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{}, + "", + gdpr.SignalYes, + }, + { + "Exchange default value is SignalNo, no other settings", + gdpr.SignalNo, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{}, + "", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, User is in EEA, Device is not in EEA", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: "USA"}}, + User: &openrtb2.User{Geo: &openrtb2.Geo{Country: "ALA"}}, + }, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: nil, + }, + }, + "", + gdpr.SignalYes, + }, + { + "Exchange default value is SignalNo, User is not in EEA, Device is not in EEA", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: "USA"}}, + User: &openrtb2.User{Geo: &openrtb2.Geo{Country: "USA"}}, + }, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: nil, + }, + }, + "", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, Device is in EEA", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: "ALA"}}, + }, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: nil, + }, + }, + "", + gdpr.SignalYes, + }, + { + "Exchange default value is SignalNo, Device is not in EEA", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: "USA"}}, + }, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: nil, + }, + }, + "", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, with consent, and means in scope", + gdpr.SignalNo, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: &boolTrue, + }, + }, + "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + gdpr.SignalYes, + }, + { + "Exchange default value is SignalNo, with consent, and not means in scope", + gdpr.SignalNo, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: &boolFalse, + }, + }, + "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, without consent, and means in scope", + gdpr.SignalNo, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: &boolTrue, + }, + }, + "", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, with invalid consent, and means in scope", + gdpr.SignalNo, + config.Privacy{}, + &openrtb2.BidRequest{}, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: &boolTrue, + }, + }, + "invalid consent", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, with consent, default means in scope", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + ConsentStringMeansInScope: true, + }, + }, + &openrtb2.BidRequest{}, + config.Account{ + GDPR: config.AccountGDPR{ + ConsentStringMeansInScope: nil, + }, + }, + "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + gdpr.SignalYes, + }, + { + "Exchange default value is SignalNo, with consent, and means in scope, in EEA", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + ConsentStringMeansInScope: true, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: "USA"}}, + }, + config.Account{}, + "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + gdpr.SignalNo, + }, + { + "Exchange default value is SignalNo, with consent, and means in scope, in EEA unknown", + gdpr.SignalNo, + config.Privacy{ + GDPR: config.GDPR{ + EEACountriesMap: map[string]struct{}{"ALA": {}}, + ConsentStringMeansInScope: true, + }, + }, + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Geo: &openrtb2.Geo{Country: ""}}, + }, + config.Account{}, + "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + gdpr.SignalYes, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + e := new(exchange) + e.gdprDefaultValue = test.defaultValue + e.privacyConfig = test.privacyConfig + req := &openrtb_ext.RequestWrapper{BidRequest: test.req} + parsedConsent, _ := vendorconsent.ParseString(test.consent) + + assert.Equal(t, test.output, e.parseGDPRDefaultValue(req, test.account, parsedConsent)) + }) + } +} + +func TestExtractRequestPrivacyGDPR(t *testing.T) { + var ( + SignalNo int8 = 0 + SignalYes int8 = 1 + SignalInvalid int8 = 9 + BoolTrue = true + ) + + tests := []struct { + name string + req *openrtb2.BidRequest + account config.Account + tcf2config gdpr.TCF2ConfigReader + legacyLabels metrics.Labels + requestPrivacy *RequestPrivacy + errsCount int + errsHaveFatal bool + }{ + { + "Request without consent, default no, signal ambiguous, channel disabled", + &openrtb2.BidRequest{}, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: false}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalAmbiguous, + GDPRChannelEnabled: false, + GDPREnforced: false, + }, + 0, + false, + }, + { + "Request without consent, default no, signal ambiguous, channel enabled", + &openrtb2.BidRequest{}, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalAmbiguous, + GDPRChannelEnabled: true, + GDPREnforced: false, + }, + 0, + false, + }, + { + "Request with consent, default no, signal yes", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GDPR: &SignalYes}, + User: &openrtb2.User{Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA"}, + }, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalYes, + GDPRChannelEnabled: true, + GDPREnforced: true, + }, + 0, + false, + }, + { + "Request with consent, default no, signal no", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GDPR: &SignalNo}, + User: &openrtb2.User{Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA"}, + }, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalNo, + GDPRChannelEnabled: true, + GDPREnforced: false, + }, + 0, + false, + }, + { + "Request with consent, default yes, signal yes", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GDPR: &SignalYes}, + User: &openrtb2.User{Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA"}, + }, + config.Account{GDPR: config.AccountGDPR{ConsentStringMeansInScope: &BoolTrue}}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + GDPRDefaultValue: gdpr.SignalYes, + GDPRSignal: gdpr.SignalYes, + GDPRChannelEnabled: true, + GDPREnforced: true, + }, + 0, + false, + }, + { + "Request with consent, default no, signal invalid, should return fatal error", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GDPR: &SignalInvalid}, + User: &openrtb2.User{Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA"}, + }, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + nil, + 1, + true, + }, + { + "Request without consent, default no, signal no, but gpp exists", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GPP: "DBACNYA~CPXxRfAPXxRfAAfKABENB-CgAAAAAAAAAAYgAAAAAAAA~1NYN", GPPSID: []int8{2, 6}}, + User: &openrtb2.User{}, + }, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "CPXxRfAPXxRfAAfKABENB-CgAAAAAAAAAAYgAAAAAAAA", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalYes, + GDPRChannelEnabled: true, + GDPREnforced: true, + }, + 0, + false, + }, + { + "Request without consent, default no, signal no, but invalid gpp exists", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{GPP: "CPXxRfAPXxRfAAfKABENB-CgAAAAAAAAAAYgAAAAAAAA~1NYN", GPPSID: []int8{2, 6}}, + User: &openrtb2.User{}, + }, + config.Account{}, + gdpr.NewTCF2Config(config.TCF2{Enabled: true}, config.AccountGDPR{}), + metrics.Labels{}, + &RequestPrivacy{ + Consent: "", + GDPRDefaultValue: gdpr.SignalNo, + GDPRSignal: gdpr.SignalYes, + GDPRChannelEnabled: true, + GDPREnforced: true, + }, + 1, + false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + req := test.req + if req.User != nil && test.requestPrivacy != nil { + test.requestPrivacy.ParsedConsent, _ = vendorconsent.ParseString(req.User.Consent) + } + + if req.Regs != nil && req.Regs.GPP != "" && test.requestPrivacy != nil { + test.requestPrivacy.ParsedGPP, _ = gpplib.Parse(req.Regs.GPP) + for _, section := range test.requestPrivacy.ParsedGPP.Sections { + if section.GetID() == gppConstants.SectionTCFEU2 { + test.requestPrivacy.ParsedConsent, _ = vendorconsent.ParseString(section.GetValue()) + } + } + } + + e := new(exchange) + auctionRequest := new(AuctionRequest) + auctionRequest.BidRequestWrapper = &openrtb_ext.RequestWrapper{BidRequest: req} + auctionRequest.Account = test.account + auctionRequest.TCF2Config = test.tcf2config + auctionRequest.LegacyLabels = test.legacyLabels + + output, errs := e.extractRequestPrivacy(auctionRequest) + assert.True(t, reflect.DeepEqual(test.requestPrivacy, output), "expected output to match. Expected: %+v, Got: %+v", test.requestPrivacy, output) + assert.Equal(t, test.errsCount, len(errs)) + if test.errsHaveFatal { + assert.True(t, errortypes.FirstFatalError(errs) != nil) + } + }) + } +} + +func TestExtractRequestPrivacyLMT(t *testing.T) { + var ( + Lmt0 int8 = 0 + Lmt1 int8 = 1 + ) + tests := []struct { + name string + req *openrtb2.BidRequest + privacyConfig config.Privacy + expected bool + }{ + { + "Request device lmt is 0", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Lmt: &Lmt0}, + }, + config.Privacy{ + LMT: config.LMT{Enforce: true}, + }, + false, + }, + { + "Request device lmt is 1", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Lmt: &Lmt1}, + }, + config.Privacy{ + LMT: config.LMT{Enforce: true}, + }, + true, + }, + { + "Request device lmt is nil", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{Lmt: nil}, + }, + config.Privacy{ + LMT: config.LMT{Enforce: true}, + }, + false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + e := new(exchange) + e.privacyConfig = test.privacyConfig + auctionRequest := new(AuctionRequest) + auctionRequest.BidRequestWrapper = &openrtb_ext.RequestWrapper{BidRequest: test.req} + auctionRequest.TCF2Config = gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}) + + output, _ := e.extractRequestPrivacy(auctionRequest) + assert.Equal(t, test.expected, output.LMTEnforced) + }) + } +} + +func TestExtractRequestPrivacyCOPPA(t *testing.T) { + tests := []struct { + name string + req *openrtb2.BidRequest + expected bool + }{ + { + "Request COPPA is 0", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{COPPA: 0}, + }, + false, + }, + { + "Request COPPA is 1", + &openrtb2.BidRequest{ + Regs: &openrtb2.Regs{COPPA: 1}, + }, + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + e := new(exchange) + auctionRequest := new(AuctionRequest) + auctionRequest.BidRequestWrapper = &openrtb_ext.RequestWrapper{BidRequest: test.req} + auctionRequest.TCF2Config = gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}) + + output, _ := e.extractRequestPrivacy(auctionRequest) + assert.Equal(t, test.expected, output.COPPAEnforced) + }) + } +} diff --git a/exchange/geolocation.go b/exchange/geolocation.go new file mode 100644 index 00000000000..6c51674a8fc --- /dev/null +++ b/exchange/geolocation.go @@ -0,0 +1,168 @@ +package exchange + +import ( + "context" + "errors" + + tcf2 "github.com/prebid/go-gdpr/vendorconsent/tcf2" + "github.com/prebid/openrtb/v20/openrtb2" + "github.com/prebid/prebid-server/v3/config" + "github.com/prebid/prebid-server/v3/config/countrycode" + "github.com/prebid/prebid-server/v3/gdpr" + "github.com/prebid/prebid-server/v3/geolocation" + "github.com/prebid/prebid-server/v3/metrics" + "github.com/prebid/prebid-server/v3/openrtb_ext" + "github.com/prebid/prebid-server/v3/privacy" + "github.com/prebid/prebid-server/v3/util/iputil" +) + +type GeoLocationResolver interface { + Lookup(ctx context.Context, ip string, country string) (*geolocation.GeoInfo, error) +} + +type geoLocationResolver struct { + geoloc geolocation.GeoLocation + me metrics.MetricsEngine +} + +func (g *geoLocationResolver) Lookup(ctx context.Context, ip string, country string) (*geolocation.GeoInfo, error) { + if g.geoloc == nil || ip == "" || country != "" { + return nil, errors.New("geolocation lookup skipped") + } + geoinfo, err := g.geoloc.Lookup(ctx, ip) + g.me.RecordGeoLocationRequest(err == nil) + return geoinfo, err +} + +func NewGeoLocationResolver(geoloc geolocation.GeoLocation, me metrics.MetricsEngine) *geoLocationResolver { + return &geoLocationResolver{ + geoloc: geoloc, + me: me, + } +} + +func countryFromDevice(device *openrtb2.Device) string { + if device == nil || device.Geo == nil { + return "" + } + return device.Geo.Country +} + +func EnrichGeoLocation(ctx context.Context, req *openrtb_ext.RequestWrapper, account config.Account, geoResolver GeoLocationResolver) (errs []error) { + if !account.GeoLocation.IsGeoLocationEnabled() { + return nil + } + + device := req.BidRequest.Device + if device == nil { + return []error{errors.New("device is nil")} + } + + ip := device.IP + if ip == "" { + ip = device.IPv6 + } + country := countryFromDevice(device) + geoinfo, err := geoResolver.Lookup(ctx, ip, country) + if err != nil { + return []error{err} + } + + updateDeviceGeo(req.BidRequest, geoinfo) + + return +} + +func EnrichGeoLocationWithPrivacy( + ctx context.Context, + req *openrtb_ext.RequestWrapper, + account config.Account, + geoResolver GeoLocationResolver, + requestPrivacy *RequestPrivacy, + tcf2Config gdpr.TCF2ConfigReader, +) (errs []error) { + if !account.GeoLocation.IsGeoLocationEnabled() { + return nil + } + + device := req.BidRequest.Device + if device == nil { + return []error{errors.New("device is nil")} + } + + if requestPrivacy.GDPREnforced { + return + } + + country := countryFromDevice(device) + ip := maybeMaskIP(device, account.Privacy, requestPrivacy, tcf2Config) + geoinfo, err := geoResolver.Lookup(ctx, ip, country) + if err != nil { + return []error{err} + } + + updateDeviceGeo(req.BidRequest, geoinfo) + + return +} + +func maybeMaskIP(device *openrtb2.Device, accountPrivacy config.AccountPrivacy, requestPrivacy *RequestPrivacy, tcf2Config gdpr.TCF2ConfigReader) string { + if device == nil { + return "" + } + + shouldBeMasked := shouldMaskIP(requestPrivacy, tcf2Config) + if device.IP != "" { + if shouldBeMasked { + return privacy.ScrubIP(device.IP, accountPrivacy.IPv4Config.AnonKeepBits, iputil.IPv4BitSize) + } + return device.IP + } else if device.IPv6 != "" { + if shouldBeMasked { + return privacy.ScrubIP(device.IPv6, accountPrivacy.IPv6Config.AnonKeepBits, iputil.IPv6BitSize) + } + return device.IPv6 + } + return "" +} + +func shouldMaskIP(requestPrivacy *RequestPrivacy, tcf2Config gdpr.TCF2ConfigReader) bool { + if requestPrivacy.COPPAEnforced || requestPrivacy.LMTEnforced { + return true + } + if requestPrivacy.ParsedConsent != nil { + cm, ok := requestPrivacy.ParsedConsent.(tcf2.ConsentMetadata) + return ok && !tcf2Config.FeatureOneEnforced() && !cm.SpecialFeatureOptIn(1) + } + return false +} + +func updateDeviceGeo(req *openrtb2.BidRequest, geoinfo *geolocation.GeoInfo) { + if req.Device == nil || geoinfo == nil { + return + } + + device := *req.Device + if device.Geo == nil { + device.Geo = &openrtb2.Geo{} + } + + geo := device.Geo + if alpha3 := countrycode.ToAlpha3(geoinfo.Country); alpha3 != "" { + geo.Country = alpha3 + } + if geoinfo.Region != "" { + geo.Region = geoinfo.Region + } + if offset, err := geolocation.TimezoneToUTCOffset(geoinfo.TimeZone); err == nil { + geo.UTCOffset = int64(offset) + } + + req.Device = &device +} + +type NilGeoLocationResolver struct{} + +func (g *NilGeoLocationResolver) Lookup(ctx context.Context, ip string, country string) (*geolocation.GeoInfo, error) { + return &geolocation.GeoInfo{}, nil +} diff --git a/exchange/geolocation_test.go b/exchange/geolocation_test.go new file mode 100644 index 00000000000..8d7392fa103 --- /dev/null +++ b/exchange/geolocation_test.go @@ -0,0 +1,643 @@ +package exchange + +import ( + "context" + "sync/atomic" + "testing" + + "github.com/prebid/go-gdpr/vendorconsent" + "github.com/prebid/openrtb/v20/openrtb2" + "github.com/prebid/prebid-server/v3/config" + "github.com/prebid/prebid-server/v3/config/countrycode" + "github.com/prebid/prebid-server/v3/gdpr" + "github.com/prebid/prebid-server/v3/geolocation" + "github.com/prebid/prebid-server/v3/geolocation/geolocationtest" + "github.com/prebid/prebid-server/v3/metrics" + "github.com/prebid/prebid-server/v3/openrtb_ext" + "github.com/prebid/prebid-server/v3/util/jsonutil" + "github.com/stretchr/testify/assert" +) + +type mockMetrics struct { + metrics.MetricsEngineMock + success int64 + fail int64 +} + +func (m *mockMetrics) RecordGeoLocationRequest(success bool) { + if success { + atomic.AddInt64(&m.success, 1) + } else { + atomic.AddInt64(&m.fail, 1) + } +} + +type mockGeoLocationResolverResult struct { + geo *geolocation.GeoInfo + err error +} + +type mockGeoLocationResolver struct { + data map[string]mockGeoLocationResolverResult +} + +func (g *mockGeoLocationResolver) Lookup(ctx context.Context, ip string, country string) (*geolocation.GeoInfo, error) { + if ip == "" || country != "" { + return &geolocation.GeoInfo{}, assert.AnError + } + + if g.data == nil { + return &geolocation.GeoInfo{}, nil + } + if result, ok := g.data[ip]; ok { + return result.geo, result.err + } + return &geolocation.GeoInfo{}, nil +} + +func makeMockGeoLocationResolver(data map[string]mockGeoLocationResolverResult) GeoLocationResolver { + return &mockGeoLocationResolver{data: data} +} + +func TestGeoLocationResolver(t *testing.T) { + me := &mockMetrics{} + geoservice := geolocationtest.NewMockGeoLocation(map[string]*geolocation.GeoInfo{ + "1.1.1.1": {Country: "CN"}, + "1.1.1.2": {Country: "US"}, + }) + tests := []struct { + name string + geoloc geolocation.GeoLocation + ip string + country string + geoCountry string + geoErr bool + }{ + { + "Resolver is nil", + nil, "1.1.1.1", "", "", true, + }, + { + "Lookup empty IP", + geoservice, "", "", "", true, + }, + { + "Lookup valid IP, country has value", + geoservice, "1.1.1.1", "CN", "", true, + }, + { + "Lookup unknown IP", + geoservice, "2.2.2.2", "", "", true, + }, + { + "Lookup successful, response country is CN", + geoservice, "1.1.1.1", "", "CN", false, + }, + { + "Lookup successful, response country is US", + geoservice, "1.1.1.2", "", "US", false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolver := NewGeoLocationResolver(test.geoloc, me) + geo, err := resolver.Lookup(context.Background(), test.ip, test.country) + if test.geoErr { + assert.Error(t, err, "geolocation should return error") + } else { + assert.NoError(t, err, "geolocation should not return error. Error: %v", err) + assert.Equal(t, test.geoCountry, geo.Country) + } + }) + } + + assert.Equal(t, int64(2), me.success, "metrics success count should be 2") + assert.Equal(t, int64(1), me.fail, "metrics fail count should be 1") +} + +func TestEnrichGeoLocation(t *testing.T) { + countrycode.Load("CN,CHN\n") + + resolver := makeMockGeoLocationResolver(map[string]mockGeoLocationResolverResult{ + "1.1.1.1": { + geo: &geolocation.GeoInfo{Country: "CN", Region: "Shanghai", TimeZone: "Asia/Shanghai"}, + err: nil, + }, + "1111:2222:3333:4400::": { + geo: &geolocation.GeoInfo{Country: "CN", Region: "Sichuan", TimeZone: "UTC"}, + err: nil, + }, + "2.2.2.2": { + geo: nil, + err: assert.AnError, + }, + }) + tests := []struct { + name string + req *openrtb2.BidRequest + account config.Account + resolver GeoLocationResolver + expectedCountry string + expectedRegion string + expectedUTCOffset int64 + errsCount int + }{ + { + "Enrich device. geoLocation is disabled", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1"}, + }, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: false}}, + resolver, + "", + "", + 0, + 0, + }, + { + "Enrich device. GeoLocation is enabled, IPv4 is used", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1", IPv6: "1111:2222:3333:4400::"}, + }, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: true}}, + resolver, + "CHN", + "Shanghai", + 480, + 0, + }, + { + "Enrich device. GeoLocation is enabled, IPv6 is used", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IPv6: "1111:2222:3333:4400::"}, + }, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: true}}, + resolver, + "CHN", + "Sichuan", + 0, + 0, + }, + { + "Enrich device. device is nil", + &openrtb2.BidRequest{}, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: true}}, + resolver, + "", + "", + 0, + 1, + }, + { + "Enrich device. country exists", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1", Geo: &openrtb2.Geo{Country: "USA"}}, + }, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: true}}, + resolver, + "USA", + "", + 0, + 1, + }, + { + "Enrich device. resolver returns error", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "2.2.2.2"}, + }, + config.Account{GeoLocation: config.AccountGeoLocation{Enabled: true}}, + resolver, + "", + "", + 0, + 1, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + req := &openrtb_ext.RequestWrapper{BidRequest: test.req} + errs := EnrichGeoLocation(context.Background(), req, test.account, test.resolver) + assert.Equal(t, test.errsCount, len(errs), "errors count should be %d", test.errsCount) + + var ( + country string + region string + utcoffset int64 + ) + if req.BidRequest.Device != nil && req.BidRequest.Device.Geo != nil { + country = req.BidRequest.Device.Geo.Country + region = req.BidRequest.Device.Geo.Region + utcoffset = req.BidRequest.Device.Geo.UTCOffset + } + assert.Equal(t, test.expectedCountry, country, "country should be %s", test.expectedCountry) + assert.Equal(t, test.expectedRegion, region, "region should be %s", test.expectedRegion) + assert.Equal(t, test.expectedUTCOffset, utcoffset, "utc offset should be %d", test.expectedUTCOffset) + }) + } +} + +func TestEnrichGeoLocationWithPrivacy(t *testing.T) { + countrycode.Load("CN,CHN\n") + + resolver := makeMockGeoLocationResolver(map[string]mockGeoLocationResolverResult{ + "1.1.1.0": { + geo: &geolocation.GeoInfo{Country: "CN", Region: "", TimeZone: "UTC"}, + err: nil, + }, + "1.1.1.1": { + geo: &geolocation.GeoInfo{Country: "CN", Region: "Shanghai", TimeZone: "Asia/Shanghai"}, + err: nil, + }, + "1111:2222:3333:4400::": { + geo: &geolocation.GeoInfo{Country: "CN", Region: "Sichuan", TimeZone: "UTC"}, + err: nil, + }, + "2.2.2.2": { + geo: nil, + err: assert.AnError, + }, + }) + tests := []struct { + name string + req *openrtb2.BidRequest + account config.Account + resolver GeoLocationResolver + requestPrivacy *RequestPrivacy + tcf2config gdpr.TCF2ConfigReader + expectedCountry string + expectedRegion string + expectedUTCOffset int64 + errsCount int + }{ + { + "Enrich device. geoLocation is disabled", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: false}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + "", + 0, + 0, + }, + { + "Enrich device. GDPR not enforced, LMT not enforced, device is nil", + &openrtb2.BidRequest{}, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + "", + 0, + 1, + }, + { + "Enrich device. GDPR not enforced, LMT not enforced, resolver returns error", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "2.2.2.2"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + "", + 0, + 1, + }, + { + "Enrich device. GDPR enforced, LMT not enforced, should not enrich", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: true, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + "", + 0, + 0, + }, + { + "Enrich device. GDPR not enforced, LMT not enforced, country exists", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1", Geo: &openrtb2.Geo{Country: "USA"}}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "USA", + "", + 0, + 1, + }, + { + "Enrich device. GDPR not enforced, LMT not enforced, IPv4 is used", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1", IPv6: "1111:2222:3333:4400::"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "CHN", + "Shanghai", + 480, + 0, + }, + { + "Enrich device. GDPR not enforced, LMT enforced, IPv4 is used", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IP: "1.1.1.1", IPv6: "1111:2222:3333:4400::"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: true}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "CHN", + "", + 0, + 0, + }, + { + "Enrich device. GDPR not enforced, LMT enforced, IPv6 is used", + &openrtb2.BidRequest{ + Device: &openrtb2.Device{IPv6: "1111:2222:3333:4400::"}, + }, + config.Account{ + Privacy: config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}, IPv6Config: config.IPv6{AnonKeepBits: 56}}, + GeoLocation: config.AccountGeoLocation{Enabled: true}, + }, + resolver, + &RequestPrivacy{GDPREnforced: false, LMTEnforced: true}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "CHN", + "Sichuan", + 0, + 0, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + req := &openrtb_ext.RequestWrapper{BidRequest: test.req} + errs := EnrichGeoLocationWithPrivacy(context.Background(), req, test.account, test.resolver, test.requestPrivacy, test.tcf2config) + assert.Equal(t, test.errsCount, len(errs), "errors count should be %d", test.errsCount) + + var ( + country string + region string + utcoffset int64 + ) + if req.BidRequest.Device != nil && req.BidRequest.Device.Geo != nil { + country = req.BidRequest.Device.Geo.Country + region = req.BidRequest.Device.Geo.Region + utcoffset = req.BidRequest.Device.Geo.UTCOffset + } + assert.Equal(t, test.expectedCountry, country, "country should be %s", test.expectedCountry) + assert.Equal(t, test.expectedRegion, region, "region should be %s", test.expectedRegion) + assert.Equal(t, test.expectedUTCOffset, utcoffset, "utc offset should be %d", test.expectedUTCOffset) + }) + } +} + +func TestCountryFromDevice(t *testing.T) { + tests := []struct { + device *openrtb2.Device + country string + }{ + {nil, ""}, + {&openrtb2.Device{}, ""}, + {&openrtb2.Device{Geo: &openrtb2.Geo{}}, ""}, + {&openrtb2.Device{Geo: &openrtb2.Geo{Country: "US"}}, "US"}, + } + + for _, test := range tests { + assert.Equal(t, test.country, countryFromDevice(test.device)) + } +} + +func TestMaybeMaskIP(t *testing.T) { + tests := []struct { + name string + device *openrtb2.Device + accountPrivacy config.AccountPrivacy + reqPrivacy *RequestPrivacy + tcf2Config gdpr.TCF2ConfigReader + output string + }{ + { + "Device is nil, ip should be empty", + nil, + config.AccountPrivacy{}, + &RequestPrivacy{LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + }, + { + "IPv4 and IPv6 both empty, ip should be empty", + &openrtb2.Device{IP: "", IPv6: ""}, + config.AccountPrivacy{}, + &RequestPrivacy{LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "", + }, + { + "IPv4 with no privacy", + &openrtb2.Device{IP: "1.1.1.1", IPv6: "1111:2222:3333:4444:5555:6666:7777:8888"}, + config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}}, + &RequestPrivacy{LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "1.1.1.1", + }, + { + "IPv6 with no privacy", + &openrtb2.Device{IPv6: "1111:2222:3333:4444:5555:6666:7777:8888"}, + config.AccountPrivacy{IPv6Config: config.IPv6{AnonKeepBits: 56}}, + &RequestPrivacy{LMTEnforced: false}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "1111:2222:3333:4444:5555:6666:7777:8888", + }, + { + "IPv4 and IPv6 with privacy, IPv4 is preferred", + &openrtb2.Device{IP: "1.1.1.1", IPv6: "1111:2222:3333:4444:5555:6666:7777:8888"}, + config.AccountPrivacy{IPv4Config: config.IPv4{AnonKeepBits: 24}}, + &RequestPrivacy{LMTEnforced: true}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "1.1.1.0", + }, + { + "IPv6 with privacy", + &openrtb2.Device{IPv6: "1111:2222:3333:4444:5555:6666:7777:8888"}, + config.AccountPrivacy{IPv6Config: config.IPv6{AnonKeepBits: 56}}, + &RequestPrivacy{LMTEnforced: true}, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + "1111:2222:3333:4400::", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + ip := maybeMaskIP(test.device, test.accountPrivacy, test.reqPrivacy, test.tcf2Config) + assert.Equal(t, test.output, ip) + }) + } +} + +func TestShouldMaskIP(t *testing.T) { + tests := []struct { + desc string + reqPrivacy *RequestPrivacy + tcf2Config gdpr.TCF2ConfigReader + output bool + }{ + { + "Nothing enforced", + &RequestPrivacy{ + COPPAEnforced: false, + LMTEnforced: false, + Consent: "", + }, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + false, + }, + { + "COPPA enforced", + &RequestPrivacy{ + COPPAEnforced: true, + LMTEnforced: false, + Consent: "", + }, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + true, + }, + { + "LMT enforced", + &RequestPrivacy{ + COPPAEnforced: false, + LMTEnforced: true, + Consent: "", + }, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + true, + }, + { + "TCF2 without SP1 consent enforced", + &RequestPrivacy{ + COPPAEnforced: false, + LMTEnforced: false, + Consent: "CPuKGCPPuKGCPNEAAAENCZCAAAAAAAAAAAAAAAAAAAAA", + }, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + true, + }, + { + "TCF2 with SP1 consent enforced", + &RequestPrivacy{ + COPPAEnforced: false, + LMTEnforced: false, + Consent: "CQDkxqbQDkxqbHcAAAENCZCIAAAAAAAAAAAAAAAAAAAA.II7Nd_X__bX9n-_7_6ft0eY1f9_r37uQzDhfNs-8F3L_W_LwX32E7NF36tq4KmR4ku1bBIQNtHMnUDUmxaolVrzHsak2cpyNKJ_JkknsZe2dYGF9Pn9lD-YKZ7_5_9_f52T_9_9_-39z3_9f___dv_-__-vjf_599n_v9fV_78_Kf9______-____________8A", + }, + gdpr.NewTCF2Config(config.TCF2{}, config.AccountGDPR{}), + false, + }, + { + "TCF2 with SP1 host enforced", + &RequestPrivacy{ + COPPAEnforced: false, + LMTEnforced: false, + Consent: "", + }, + gdpr.NewTCF2Config( + config.TCF2{SpecialFeature1: config.TCF2SpecialFeature{Enforce: true}}, + config.AccountGDPR{}, + ), + false, + }, + } + + for _, test := range tests { + t.Run(test.desc, func(t *testing.T) { + if test.reqPrivacy.Consent != "" { + parsedConsent, err := vendorconsent.ParseString(test.reqPrivacy.Consent) + assert.NoError(t, err, "Failed to parse consent string") + test.reqPrivacy.ParsedConsent = parsedConsent + } + assert.Equal(t, test.output, shouldMaskIP(test.reqPrivacy, test.tcf2Config)) + }) + } +} + +func TestUpdateDeviceGeo(t *testing.T) { + countrycode.Load("CN,CHN\n") + + tests := []struct { + device *openrtb2.Device + geoinfo *geolocation.GeoInfo + expectedDevice *openrtb2.Device + }{ + { + nil, + &geolocation.GeoInfo{Country: "CN"}, + nil, + }, + { + &openrtb2.Device{}, + nil, + &openrtb2.Device{}, + }, + { + &openrtb2.Device{Geo: &openrtb2.Geo{}}, + nil, + &openrtb2.Device{Geo: &openrtb2.Geo{}}, + }, + { + &openrtb2.Device{}, + &geolocation.GeoInfo{Country: "CN", Region: "Shanghai", TimeZone: "Asia/Shanghai"}, + &openrtb2.Device{Geo: &openrtb2.Geo{Country: "CHN", Region: "Shanghai", UTCOffset: 480}}, + }, + // bad geo info + { + &openrtb2.Device{Geo: &openrtb2.Geo{Country: "CN", Region: "Chongqing", UTCOffset: 420}}, + &geolocation.GeoInfo{Country: "", Region: "", TimeZone: "UNKNOWN"}, + &openrtb2.Device{Geo: &openrtb2.Geo{Country: "CN", Region: "Chongqing", UTCOffset: 420}}, + }, + } + + for _, test := range tests { + req := &openrtb2.BidRequest{Device: test.device} + updateDeviceGeo(req, test.geoinfo) + expected, _ := jsonutil.Marshal(test.expectedDevice) + updated, _ := jsonutil.Marshal(req.Device) + assert.Equal(t, string(expected), string(updated), "device should be %s", string(expected)) + } +} diff --git a/exchange/utils.go b/exchange/utils.go index 8c8e63a0147..e68ba2dee1f 100644 --- a/exchange/utils.go +++ b/exchange/utils.go @@ -8,7 +8,6 @@ import ( "math/rand" "strings" - "github.com/prebid/go-gdpr/vendorconsent" gpplib "github.com/prebid/go-gpp" gppConstants "github.com/prebid/go-gpp/constants" "github.com/prebid/openrtb/v20/openrtb2" @@ -59,10 +58,9 @@ type requestSplitter struct { func (rs *requestSplitter) cleanOpenRTBRequests(ctx context.Context, auctionReq AuctionRequest, requestExt *openrtb_ext.ExtRequest, - gdprSignal gdpr.Signal, - gdprEnforced bool, + requestPrivacy *RequestPrivacy, bidAdjustmentFactors map[string]float64, -) (bidderRequests []BidderRequest, privacyLabels metrics.PrivacyLabels, errs []error) { +) (bidderRequests []BidderRequest, errs []error) { req := auctionReq.BidRequestWrapper if err := PreloadExts(req); err != nil { return @@ -105,50 +103,21 @@ func (rs *requestSplitter) cleanOpenRTBRequests(ctx context.Context, return } - var gpp gpplib.GppContainer - if req.BidRequest.Regs != nil && len(req.BidRequest.Regs.GPP) > 0 { - var gppErrs []error - gpp, gppErrs = gpplib.Parse(req.BidRequest.Regs.GPP) - if len(gppErrs) > 0 { - errs = append(errs, gppErrs[0]) - } - } - - consent, err := getConsent(req, gpp) + ccpaEnforcer, err := extractCCPA(req.BidRequest, rs.privacyConfig, &auctionReq.Account, requestAliases, channelTypeMap[auctionReq.LegacyLabels.RType], requestPrivacy.ParsedGPP) if err != nil { errs = append(errs, err) } - ccpaEnforcer, err := extractCCPA(req.BidRequest, rs.privacyConfig, &auctionReq.Account, requestAliases, channelTypeMap[auctionReq.LegacyLabels.RType], gpp) - if err != nil { - errs = append(errs, err) - } - - lmtEnforcer := extractLMT(req.BidRequest, rs.privacyConfig) - - // request level privacy policies - coppa := req.BidRequest.Regs != nil && req.BidRequest.Regs.COPPA == 1 - lmt := lmtEnforcer.ShouldEnforce(unknownBidder) - - privacyLabels.CCPAProvided = ccpaEnforcer.CanEnforce() - privacyLabels.CCPAEnforced = ccpaEnforcer.ShouldEnforce(unknownBidder) - privacyLabels.COPPAEnforced = coppa - privacyLabels.LMTEnforced = lmt + requestPrivacy.CCPAProvided = ccpaEnforcer.CanEnforce() + requestPrivacy.CCPAEnforced = ccpaEnforcer.ShouldEnforce(unknownBidder) var gdprPerms gdpr.Permissions = &gdpr.AlwaysAllow{} - if gdprEnforced { - privacyLabels.GDPREnforced = true - parsedConsent, err := vendorconsent.ParseString(consent) - if err == nil { - version := int(parsedConsent.Version()) - privacyLabels.GDPRTCFVersion = metrics.TCFVersionToValue(version) - } - + if requestPrivacy.GDPREnforced { gdprRequestInfo := gdpr.RequestInfo{ AliasGVLIDs: requestAliasesGVLIDs, - Consent: consent, - GDPRSignal: gdprSignal, + Consent: requestPrivacy.Consent, + GDPRSignal: requestPrivacy.GDPRSignal, PublisherID: auctionReq.LegacyLabels.PubID, } gdprPerms = rs.gdprPermsBuilder(auctionReq.TCF2Config, gdprRequestInfo) @@ -201,15 +170,15 @@ func (rs *requestSplitter) cleanOpenRTBRequests(ctx context.Context, applyFPD(auctionReq.FirstPartyData, coreBidder, openrtb_ext.BidderName(bidder), isRequestAlias, reqWrapperCopy, fpdUserEIDsPresent) // privacy scrubbing - if err := rs.applyPrivacy(reqWrapperCopy, coreBidder, bidder, auctionReq, auctionPermissions, ccpaEnforcer, lmt, coppa); err != nil { + if err := rs.applyPrivacy(reqWrapperCopy, coreBidder, bidder, auctionReq, auctionPermissions, ccpaEnforcer, requestPrivacy.LMTEnforced, requestPrivacy.COPPAEnforced); err != nil { errs = append(errs, err) continue } // GPP downgrade: always downgrade unless we can confirm GPP is supported if shouldSetLegacyPrivacy(rs.bidderInfo, string(coreBidder)) { - setLegacyGDPRFromGPP(reqWrapperCopy, gpp) - setLegacyUSPFromGPP(reqWrapperCopy, gpp) + setLegacyGDPRFromGPP(reqWrapperCopy, requestPrivacy.ParsedGPP) + setLegacyUSPFromGPP(reqWrapperCopy, requestPrivacy.ParsedGPP) } // remove imps with stored responses so they aren't sent to the bidder diff --git a/exchange/utils_test.go b/exchange/utils_test.go index 67ad46dd725..2e6e97c1984 100644 --- a/exchange/utils_test.go +++ b/exchange/utils_test.go @@ -7,6 +7,7 @@ import ( "sort" "testing" + "github.com/prebid/go-gdpr/vendorconsent" gpplib "github.com/prebid/go-gpp" "github.com/prebid/go-gpp/constants" "github.com/prebid/openrtb/v20/openrtb2" @@ -764,7 +765,7 @@ func TestCleanOpenRTBRequests(t *testing.T) { hostSChainNode: nil, bidderInfo: config.BidderInfos{}, } - bidderRequests, _, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, &RequestPrivacy{COPPAEnforced: test.applyCOPPA}, map[string]float64{}) if test.hasError { assert.NotNil(t, err, "Error shouldn't be nil") } else { @@ -830,7 +831,7 @@ func TestCleanOpenRTBRequestsWithFPD(t *testing.T) { bidderInfo: config.BidderInfos{}, } - bidderRequests, _, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, &RequestPrivacy{}, map[string]float64{}) assert.Empty(t, err, "No errors should be returned") for _, bidderRequest := range bidderRequests { bidderName := bidderRequest.BidderName @@ -1145,7 +1146,7 @@ func TestCleanOpenRTBRequestsWithBidResponses(t *testing.T) { bidderInfo: config.BidderInfos{}, } - actualBidderRequests, _, err := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, map[string]float64{}) + actualBidderRequests, err := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, &RequestPrivacy{}, map[string]float64{}) assert.Empty(t, err, "No errors should be returned") assert.Len(t, actualBidderRequests, len(test.expectedBidderRequests), "result len doesn't match for testCase %s", test.description) for _, actualBidderRequest := range actualBidderRequests { @@ -1317,7 +1318,8 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { bidderInfo: config.BidderInfos{}, } - bidderRequests, privacyLabels, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, map[string]float64{}) + requestPrivacy := &RequestPrivacy{} + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, requestPrivacy, map[string]float64{}) result := bidderRequests[0] assert.Nil(t, errs) @@ -1330,7 +1332,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { assert.NotEqual(t, result.BidRequest.Device.DIDMD5, "", test.description+":Device.DIDMD5") metricsMock.AssertNotCalled(t, "RecordAdapterBuyerUIDScrubbed", openrtb_ext.BidderAppnexus) } - assert.Equal(t, test.expectPrivacyLabels, privacyLabels, test.description+":PrivacyLabels") + assert.Equal(t, test.expectPrivacyLabels, requestPrivacy.MakePrivacyLabels(), test.description+":PrivacyLabels") } } @@ -1396,7 +1398,7 @@ func TestCleanOpenRTBRequestsCCPAErrors(t *testing.T) { bidderInfo: config.BidderInfos{}, } - _, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, &reqExtStruct, gdpr.SignalNo, false, map[string]float64{}) + _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, &reqExtStruct, &RequestPrivacy{}, map[string]float64{}) assert.ElementsMatch(t, []error{test.expectError}, errs, test.description) } @@ -1455,7 +1457,8 @@ func TestCleanOpenRTBRequestsCOPPA(t *testing.T) { bidderInfo: config.BidderInfos{}, } - bidderRequests, privacyLabels, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, map[string]float64{}) + requestPrivacy := &RequestPrivacy{COPPAEnforced: test.coppa == 1} + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, requestPrivacy, map[string]float64{}) result := bidderRequests[0] assert.Nil(t, errs) @@ -1466,7 +1469,7 @@ func TestCleanOpenRTBRequestsCOPPA(t *testing.T) { assert.NotEqual(t, result.BidRequest.User.BuyerUID, "", test.description+":User.BuyerUID") assert.NotEqual(t, result.BidRequest.User.Yob, int64(0), test.description+":User.Yob") } - assert.Equal(t, test.expectPrivacyLabels, privacyLabels, test.description+":PrivacyLabels") + assert.Equal(t, test.expectPrivacyLabels, requestPrivacy.MakePrivacyLabels(), test.description+":PrivacyLabels") } } @@ -1616,7 +1619,7 @@ func TestCleanOpenRTBRequestsSChain(t *testing.T) { bidderInfo: config.BidderInfos{"appnexus": config.BidderInfo{OpenRTB: &config.OpenRTBInfo{Version: test.ortbVersion}}}, } - bidderRequests, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, &RequestPrivacy{}, map[string]float64{}) if test.hasError == true { assert.NotNil(t, errs) assert.Len(t, bidderRequests, 0) @@ -1687,7 +1690,7 @@ func TestCleanOpenRTBRequestsBidderParams(t *testing.T) { bidderInfo: config.BidderInfos{}, } - bidderRequests, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, &RequestPrivacy{}, map[string]float64{}) if test.hasError == true { assert.NotNil(t, errs) assert.Len(t, bidderRequests, 0) @@ -2279,7 +2282,9 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { bidderInfo: config.BidderInfos{}, } - results, privacyLabels, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, map[string]float64{}) + lmtEnforcer := extractLMT(req, privacyConfig) + requestPrivacy := &RequestPrivacy{LMTEnforced: lmtEnforcer.ShouldEnforce(unknownBidder)} + results, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, requestPrivacy, map[string]float64{}) result := results[0] assert.Nil(t, errs) @@ -2290,7 +2295,7 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { assert.NotEqual(t, result.BidRequest.User.BuyerUID, "", test.description+":User.BuyerUID") assert.NotEqual(t, result.BidRequest.Device.DIDMD5, "", test.description+":Device.DIDMD5") } - assert.Equal(t, test.expectPrivacyLabels, privacyLabels, test.description+":PrivacyLabels") + assert.Equal(t, test.expectPrivacyLabels, requestPrivacy.MakePrivacyLabels(), test.description+":PrivacyLabels") } } @@ -2392,7 +2397,15 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { bidderInfo: config.BidderInfos{}, } - results, privacyLabels, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, test.gdprSignal, test.gdprEnforced, map[string]float64{}) + parsedConsent, _ := vendorconsent.ParseString(test.gdprConsent) + + requestPrivacy := &RequestPrivacy{ + GDPRSignal: test.gdprSignal, + GDPREnforced: test.gdprEnforced, + Consent: test.gdprConsent, + ParsedConsent: parsedConsent, + } + results, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, requestPrivacy, map[string]float64{}) result := results[0] if test.expectError { @@ -2410,7 +2423,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { assert.NotEqual(t, result.BidRequest.Device.DIDMD5, "", test.description+":Device.DIDMD5") metricsMock.AssertNotCalled(t, "RecordAdapterBuyerUIDScrubbed", openrtb_ext.BidderAppnexus) } - assert.Equal(t, test.expectPrivacyLabels, privacyLabels, test.description+":PrivacyLabels") + assert.Equal(t, test.expectPrivacyLabels, requestPrivacy.MakePrivacyLabels(), test.description+":PrivacyLabels") } } @@ -2487,7 +2500,11 @@ func TestCleanOpenRTBRequestsGDPRBlockBidRequest(t *testing.T) { bidderInfo: config.BidderInfos{}, } - results, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalYes, test.gdprEnforced, map[string]float64{}) + requestPrivacy := &RequestPrivacy{ + GDPRSignal: gdpr.SignalYes, + GDPREnforced: test.gdprEnforced, + } + results, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, requestPrivacy, map[string]float64{}) // extract bidder name from each request in the results bidders := []openrtb_ext.BidderName{} @@ -2567,6 +2584,11 @@ func TestCleanOpenRTBRequestsWithOpenRTBDowngrade(t *testing.T) { }, }.Builder + var gpp gpplib.GppContainer + if test.req.BidRequestWrapper.BidRequest.Regs != nil && len(test.req.BidRequestWrapper.BidRequest.Regs.GPP) > 0 { + gpp, _ = gpplib.Parse(test.req.BidRequestWrapper.BidRequest.Regs.GPP) + } + reqSplitter := &requestSplitter{ bidderToSyncerKey: map[string]string{}, me: &metrics.MetricsEngineMock{}, @@ -2575,7 +2597,10 @@ func TestCleanOpenRTBRequestsWithOpenRTBDowngrade(t *testing.T) { hostSChainNode: nil, bidderInfo: test.bidderInfos, } - bidderRequests, _, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, gdpr.SignalNo, false, map[string]float64{}) + requstPrivacy := &RequestPrivacy{ + ParsedGPP: gpp, + } + bidderRequests, err := reqSplitter.cleanOpenRTBRequests(context.Background(), test.req, nil, requstPrivacy, map[string]float64{}) assert.Nil(t, err, "Err should be nil") bidRequest := bidderRequests[0] assert.Equal(t, test.expectRegs, bidRequest.BidRequest.Regs) @@ -3267,7 +3292,7 @@ func TestCleanOpenRTBRequestsSChainMultipleBidders(t *testing.T) { hostSChainNode: nil, bidderInfo: config.BidderInfos{"appnexus": ortb26enabled, "axonix": ortb26enabled}, } - bidderRequests, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, &RequestPrivacy{}, map[string]float64{}) assert.Nil(t, errs) assert.Len(t, bidderRequests, 2, "Bid request count is not 2") @@ -3416,7 +3441,7 @@ func TestCleanOpenRTBRequestsBidAdjustment(t *testing.T) { hostSChainNode: nil, bidderInfo: config.BidderInfos{}, } - results, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, test.bidAdjustmentFactor) + results, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, &RequestPrivacy{}, test.bidAdjustmentFactor) result := results[0] assert.Nil(t, errs) assert.Equal(t, test.expectedImp, result.BidRequest.Imp, test.description) @@ -3888,7 +3913,7 @@ func TestCleanOpenRTBRequestsFilterBidderRequestExt(t *testing.T) { bidderInfo: config.BidderInfos{}, } - bidderRequests, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, extRequest, &RequestPrivacy{}, map[string]float64{}) assert.Equal(t, test.wantError, len(errs) != 0, test.desc) sort.Slice(bidderRequests, func(i, j int) bool { return bidderRequests[i].BidderCoreName < bidderRequests[j].BidderCoreName @@ -4972,7 +4997,7 @@ func TestCleanOpenRTBRequestsActivities(t *testing.T) { bidderInfo: config.BidderInfos{"appnexus": config.BidderInfo{OpenRTB: &config.OpenRTBInfo{Version: test.ortbVersion}}}, } - bidderRequests, _, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, gdpr.SignalNo, false, map[string]float64{}) + bidderRequests, errs := reqSplitter.cleanOpenRTBRequests(context.Background(), auctionReq, nil, &RequestPrivacy{}, map[string]float64{}) assert.Empty(t, errs) assert.Len(t, bidderRequests, test.expectedReqNumber) diff --git a/geolocation/geoinfo.go b/geolocation/geoinfo.go new file mode 100644 index 00000000000..5740bb7093f --- /dev/null +++ b/geolocation/geoinfo.go @@ -0,0 +1,36 @@ +package geolocation + +type GeoInfo struct { + // Name of the geo location data provider. + Vendor string + + // Continent code in two-letter format. + Continent string + + // Country code in ISO-3166-1-alpha-2 format. + Country string + + // Region code in ISO-3166-2 format. + Region string + + // Numeric region code. + RegionCode int + + City string + + // Google Metro code. + MetroGoogle string + + // Nielsen Designated Market Areas (DMA's). + MetroNielsen int + + Zip string + + ConnectionSpeed string + + Lat float64 + + Lon float64 + + TimeZone string +} diff --git a/geolocation/geolocation.go b/geolocation/geolocation.go new file mode 100644 index 00000000000..69a85b1cc33 --- /dev/null +++ b/geolocation/geolocation.go @@ -0,0 +1,43 @@ +package geolocation + +import ( + "context" + "errors" + "time" + + "github.com/prebid/prebid-server/v3/util/timeutil" +) + +var ( + ErrDatabaseUnavailable = errors.New("database is unavailable") + ErrLookupIPInvalid = errors.New("lookup IP is invalid") + ErrLookupTimeout = errors.New("lookup timeout") +) + +// Retrieves geolocation information by IP address. +// +// Provided default implementation - MaxMind +// Each vendor (host company) might provide its own implementation. +type GeoLocation interface { + Lookup(ctx context.Context, ip string) (*GeoInfo, error) +} + +type NilGeoLocation struct{} + +func (g *NilGeoLocation) Lookup(ctx context.Context, ip string) (*GeoInfo, error) { + return &GeoInfo{}, nil +} + +func NewNilGeoLocation() *NilGeoLocation { + return &NilGeoLocation{} +} + +// TimezoneToUTCOffset returns UTC offset of timezone in minutes. +func TimezoneToUTCOffset(name string) (int, error) { + loc, err := timeutil.LoadLocation(name) + if err != nil { + return 0, err + } + _, offset := time.Now().In(loc).Zone() + return offset / 60, nil +} diff --git a/geolocation/geolocation_test.go b/geolocation/geolocation_test.go new file mode 100644 index 00000000000..5cda272de2a --- /dev/null +++ b/geolocation/geolocation_test.go @@ -0,0 +1,38 @@ +package geolocation + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTimezoneToUTCOffset(t *testing.T) { + tests := []struct { + timezone string + offset int + failed bool + }{ + {"Asia/Shanghai", 8 * 60, false}, + {"Asia/Tokyo", 9 * 60, false}, + {"UTC", 0, false}, + {"Unknown", 0, true}, + } + + for _, test := range tests { + offset, err := TimezoneToUTCOffset(test.timezone) + if test.failed { + assert.Error(t, err, "timezone %s should be invalid", test.timezone) + } else { + assert.NoError(t, err, "timezone %s should be valid", test.timezone) + assert.Equal(t, test.offset, offset, "timezone %s should have offset minutes %d", test.timezone, test.offset) + } + } +} + +func TestNilGeoLocation(t *testing.T) { + loc := NewNilGeoLocation() + geo, err := loc.Lookup(context.Background(), "") + assert.NoError(t, err, "nil geolocation should not return error") + assert.NotNil(t, geo, "nil geolocation should return empty geo info") +} diff --git a/geolocation/geolocationtest/geolocationtest.go b/geolocation/geolocationtest/geolocationtest.go new file mode 100644 index 00000000000..73fbab4c2ca --- /dev/null +++ b/geolocation/geolocationtest/geolocationtest.go @@ -0,0 +1,44 @@ +package geolocationtest + +import ( + "context" + "errors" + "sync" + + "github.com/prebid/prebid-server/v3/geolocation" +) + +type MockGeoLocation struct { + mu sync.RWMutex + data map[string]*geolocation.GeoInfo +} + +func NewMockGeoLocation(data map[string]*geolocation.GeoInfo) *MockGeoLocation { + if data == nil { + data = make(map[string]*geolocation.GeoInfo) + } + return &MockGeoLocation{ + data: data, + } +} + +func (m *MockGeoLocation) Add(ip string, info *geolocation.GeoInfo) { + m.mu.Lock() + defer m.mu.Unlock() + m.data[ip] = info +} + +func (m *MockGeoLocation) Remove(ip string, info *geolocation.GeoInfo) { + m.mu.Lock() + defer m.mu.Unlock() + delete(m.data, ip) +} + +func (m *MockGeoLocation) Lookup(ctx context.Context, ip string) (*geolocation.GeoInfo, error) { + m.mu.RLock() + defer m.mu.RUnlock() + if info, ok := m.data[ip]; ok { + return info, nil + } + return nil, errors.New("not found") +} diff --git a/geolocation/maxmind/maxmind.go b/geolocation/maxmind/maxmind.go new file mode 100644 index 00000000000..da2ea90dca3 --- /dev/null +++ b/geolocation/maxmind/maxmind.go @@ -0,0 +1,97 @@ +package maxmind + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "context" + "errors" + "io" + "net" + "os" + "sync/atomic" + + "github.com/prebid/prebid-server/v3/geolocation" + + geoip2 "github.com/oschwald/geoip2-golang" +) + +const Vendor = "maxmind" + +const DatabaseFileName = "GeoLite2-City.mmdb" + +// GeoLocation implementations geolocation.GeoLocation interface. +type GeoLocation struct { + reader atomic.Pointer[geoip2.Reader] +} + +func (g *GeoLocation) Lookup(_ context.Context, ipAddress string) (*geolocation.GeoInfo, error) { + ip := net.ParseIP(ipAddress) + if len(ip) == 0 { + return nil, geolocation.ErrLookupIPInvalid + } + + reader := g.reader.Load() + if reader == nil { + return nil, geolocation.ErrDatabaseUnavailable + } + + record, err := reader.City(ip) + if err != nil { + return nil, err + } + + info := &geolocation.GeoInfo{ + Vendor: Vendor, + Continent: record.Continent.Code, + Country: record.Country.IsoCode, + Zip: record.Postal.Code, + Lat: record.Location.Latitude, + Lon: record.Location.Longitude, + TimeZone: record.Location.TimeZone, + } + if len(record.Subdivisions) > 0 { + info.Region = record.Subdivisions[0].IsoCode + } + if len(record.City.Names) > 0 { + info.City = record.City.Names["en"] + } + return info, nil +} + +// SetDataPath loads data and updates the reader. +func (g *GeoLocation) SetDataPath(filepath string) error { + file, err := os.Open(filepath) + if err != nil { + return err + } + defer file.Close() + + gzipReader, err := gzip.NewReader(file) + if err != nil { + return err + } + defer gzipReader.Close() + + tarReader := tar.NewReader(gzipReader) + for { + header, err := tarReader.Next() + // io.EOF and other errors + if err != nil { + return errors.New("failed to read tar file: " + err.Error()) + } + + if header.Name == DatabaseFileName { + buf := new(bytes.Buffer) + if _, err := io.Copy(buf, tarReader); err != nil { + return err + } + reader, err := geoip2.FromBytes(buf.Bytes()) + if err != nil { + return err + } + g.reader.Store(reader) + return nil + } + } +} diff --git a/geolocation/maxmind/maxmind_test.go b/geolocation/maxmind/maxmind_test.go new file mode 100644 index 00000000000..a881fc6fb2e --- /dev/null +++ b/geolocation/maxmind/maxmind_test.go @@ -0,0 +1,147 @@ +package maxmind + +import ( + "context" + "testing" + + "github.com/prebid/prebid-server/v3/geolocation" + + "github.com/stretchr/testify/assert" +) + +// File is only for testing purposes, never used in the production environment. +// File is taken from the official MaxMind repository. +// https://github.com/maxmind/MaxMind-DB/blob/main/test-data/GeoLite2-City-Test.mmdb +const testDataPath = "./test-data/GeoLite2-City.tar.gz" + +const ( + testIP = "2.125.160.216" + testIPv6 = "2001:480::" +) + +func TestGeoLocationNoReader(t *testing.T) { + geo := &GeoLocation{} + _, err := geo.Lookup(context.Background(), testIP) + assert.Error(t, err, "should return error if data path is not set") +} + +func TestGeoLocationSetDataPath(t *testing.T) { + geo := &GeoLocation{} + tests := []struct { + name string + path string + failed bool + }{ + { + "File not exists", + "no_file", + true, + }, + { + "File is not a tar.gz archive", + "./test-data/nothing.mmdb", + true, + }, + { + "Archive does not contain GeoLite2-City.mmdb", + "./test-data/nothing.tar.gz", + true, + }, + { + "Archive contains GeoLite2-City.mmdb, but GeoLite2-City.mmdb has bad data", + "./test-data/GeoLite2-City-Bad-Data.tar.gz", + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := geo.SetDataPath(test.path) + if test.failed { + assert.Error(t, err, "data path %s should return error", test.path) + } else { + assert.NoError(t, err, "data path %s should not return error", test.path) + } + }) + } +} + +func TestGeoLocationLookup(t *testing.T) { + geo := &GeoLocation{} + err := geo.SetDataPath(testDataPath) + assert.NoError(t, err, "geolocation should load data from %s", testDataPath) + + tests := []struct { + name string + ip string + expected *geolocation.GeoInfo + failed bool + }{ + { + "Lookup empty IP", + "", + nil, + true, + }, + { + "Lookup incorrect IP", + "bad ip", + nil, + true, + }, + { + "Lookup valid IPv4", + testIP, + &geolocation.GeoInfo{ + Vendor: Vendor, + Continent: "EU", + Country: "GB", + Region: "ENG", + RegionCode: 0, + City: "Boxford", + Zip: "OX1", + Lat: 51.75, + Lon: -1.25, + TimeZone: "Europe/London", + }, + false, + }, + { + "Lookup valid IPv6", + testIPv6, + &geolocation.GeoInfo{ + Vendor: Vendor, + Continent: "NA", + Country: "US", + Region: "CA", + RegionCode: 0, + City: "San Diego", + Zip: "92101", + Lat: 32.7203, + Lon: -117.1552, + TimeZone: "America/Los_Angeles", + }, + false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + geoInfo, err := geo.Lookup(context.Background(), test.ip) + if test.failed { + assert.Error(t, err, "geolocation lookup should return error. IP: %s", test.ip) + } else { + assert.NoError(t, err, "geolocation lookup should not return error. IP: %s", test.ip) + assert.Equal(t, test.expected, geoInfo, "geolocation should be equal. IP: %s", test.ip) + } + }) + } +} + +func TestGeoLocationReaderClosed(t *testing.T) { + geo := &GeoLocation{} + geo.SetDataPath(testDataPath) + geo.reader.Load().Close() + _, err := geo.Lookup(context.Background(), testIP) + assert.Error(t, err, "should return error if reader is closed") +} diff --git a/geolocation/maxmind/test-data/GeoLite2-City-Bad-Data.tar.gz b/geolocation/maxmind/test-data/GeoLite2-City-Bad-Data.tar.gz new file mode 100644 index 00000000000..72831ae2d04 Binary files /dev/null and b/geolocation/maxmind/test-data/GeoLite2-City-Bad-Data.tar.gz differ diff --git a/geolocation/maxmind/test-data/GeoLite2-City.tar.gz b/geolocation/maxmind/test-data/GeoLite2-City.tar.gz new file mode 100644 index 00000000000..f229e56d949 Binary files /dev/null and b/geolocation/maxmind/test-data/GeoLite2-City.tar.gz differ diff --git a/geolocation/maxmind/test-data/nothing.mmdb b/geolocation/maxmind/test-data/nothing.mmdb new file mode 100644 index 00000000000..e69de29bb2d diff --git a/geolocation/maxmind/test-data/nothing.tar.gz b/geolocation/maxmind/test-data/nothing.tar.gz new file mode 100644 index 00000000000..1bc5cf38586 Binary files /dev/null and b/geolocation/maxmind/test-data/nothing.tar.gz differ diff --git a/go.mod b/go.mod index e844fe5bf25..d5c856b0644 100644 --- a/go.mod +++ b/go.mod @@ -24,6 +24,7 @@ require ( github.com/lib/pq v1.10.4 github.com/mitchellh/copystructure v1.2.0 github.com/modern-go/reflect2 v1.0.2 + github.com/oschwald/geoip2-golang v1.11.0 github.com/pkg/errors v0.9.1 github.com/prebid/go-gdpr v1.12.0 github.com/prebid/go-gpp v0.2.0 @@ -33,7 +34,7 @@ require ( github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 github.com/rs/cors v1.11.0 github.com/spf13/viper v1.12.0 - github.com/stretchr/testify v1.8.1 + github.com/stretchr/testify v1.9.0 github.com/vrischmann/go-metrics-influxdb v0.1.1 github.com/xeipuuv/gojsonschema v1.2.0 github.com/yudai/gojsondiff v1.0.0 @@ -58,6 +59,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/oschwald/maxminddb-golang v1.13.1 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -68,7 +70,7 @@ require ( github.com/spf13/cast v1.5.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.3.0 // indirect github.com/tidwall/gjson v1.17.1 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -79,7 +81,7 @@ require ( github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 // indirect github.com/yudai/pp v2.0.1+incompatible // indirect golang.org/x/crypto v0.21.0 // indirect - golang.org/x/sys v0.18.0 // indirect + golang.org/x/sys v0.21.0 // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.66.4 // indirect diff --git a/go.sum b/go.sum index a6a6226c616..d6867966d4e 100644 --- a/go.sum +++ b/go.sum @@ -386,6 +386,10 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.11.0 h1:+CqWgvj0OZycCaqclBD1pxKHAU+tOkHmQIWvDHq2aug= github.com/onsi/gomega v1.11.0/go.mod h1:azGKhqFUon9Vuj0YmTfLSmx0FUwqXYSTl5re8lQLTUg= +github.com/oschwald/geoip2-golang v1.11.0 h1:hNENhCn1Uyzhf9PTmquXENiWS6AlxAEnBII6r8krA3w= +github.com/oschwald/geoip2-golang v1.11.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo= +github.com/oschwald/maxminddb-golang v1.13.1 h1:G3wwjdN9JmIK2o/ermkHM+98oX5fS+k5MbwsmL4MRQE= +github.com/oschwald/maxminddb-golang v1.13.1/go.mod h1:K4pgV9N/GcK694KSTmVSDTODk4IsCNThNdTmnaBZ/F8= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= @@ -474,8 +478,9 @@ github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiu github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -484,8 +489,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= @@ -744,8 +750,8 @@ golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/main.go b/main.go index 0063b4ee0b6..48ce8456ba9 100644 --- a/main.go +++ b/main.go @@ -1,6 +1,7 @@ package main import ( + _ "embed" "flag" "net/http" "path/filepath" @@ -9,6 +10,7 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prebid/prebid-server/v3/config" + "github.com/prebid/prebid-server/v3/config/countrycode" "github.com/prebid/prebid-server/v3/currency" "github.com/prebid/prebid-server/v3/openrtb_ext" "github.com/prebid/prebid-server/v3/router" @@ -55,10 +57,17 @@ func main() { } } +// CountryCodeData will be packaged into the binary file at compile time. +// +//go:embed country-codes.csv +var CountryCodeData string + const configFileName = "pbs" const infoDirectory = "./static/bidder-info" func loadConfig(bidderInfos config.BidderInfos) (*config.Configuration, error) { + countrycode.Load(CountryCodeData) + v := viper.New() config.SetupViper(v, configFileName, bidderInfos) return config.New(v, bidderInfos, openrtb_ext.NormalizeBidderName) diff --git a/main_test.go b/main_test.go index 494e6d67746..06dc87fad6e 100644 --- a/main_test.go +++ b/main_test.go @@ -65,3 +65,7 @@ func TestViperEnv(t *testing.T) { assert.Equal(t, 60, v.Get("host_cookie.ttl_days"), "Config With Underscores") assert.ElementsMatch(t, []string{"1.1.1.1/24", "2.2.2.2/24"}, v.Get("request_validation.ipv4_private_networks"), "Arrays") } + +func TestCountryCodeData(t *testing.T) { + assert.NotEqual(t, "", CountryCodeData, "CountryCodeData should not be empty") +} diff --git a/metrics/config/metrics.go b/metrics/config/metrics.go index dcb330c47f9..5859a1844bb 100644 --- a/metrics/config/metrics.go +++ b/metrics/config/metrics.go @@ -371,6 +371,12 @@ func (me *MultiMetricsEngine) RecordModuleTimeout(labels metrics.ModuleLabels) { } } +func (me *MultiMetricsEngine) RecordGeoLocationRequest(success bool) { + for _, thisME := range *me { + thisME.RecordGeoLocationRequest(success) + } +} + // NilMetricsEngine implements the MetricsEngine interface where no metrics are actually captured. This is // used if no metric backend is configured and also for tests. type NilMetricsEngine struct{} @@ -546,3 +552,6 @@ func (me *NilMetricsEngine) RecordModuleExecutionError(labels metrics.ModuleLabe func (me *NilMetricsEngine) RecordModuleTimeout(labels metrics.ModuleLabels) { } + +func (me *NilMetricsEngine) RecordGeoLocationRequest(success bool) { +} diff --git a/metrics/go_metrics.go b/metrics/go_metrics.go index 428b1891e69..485aebf4245 100644 --- a/metrics/go_metrics.go +++ b/metrics/go_metrics.go @@ -82,6 +82,10 @@ type Metrics struct { // Module metrics ModuleMetrics map[string]map[string]*ModuleMetrics + // GeoLocation metrics + GeoLocationRequestsSuccess metrics.Meter + GeoLocationRequestsFailure metrics.Meter + OverheadTimer map[OverheadType]metrics.Timer } @@ -206,6 +210,9 @@ func NewBlankMetrics(registry metrics.Registry, exchanges []string, disabledMetr ModuleMetrics: make(map[string]map[string]*ModuleMetrics), + GeoLocationRequestsSuccess: blankMeter, + GeoLocationRequestsFailure: blankMeter, + exchanges: exchanges, modules: getModuleNames(moduleStageNames), @@ -376,6 +383,9 @@ func NewMetrics(registry metrics.Registry, exchanges []openrtb_ext.BidderName, d registerModuleMetrics(registry, module, stages, newMetrics.ModuleMetrics[module]) } + newMetrics.GeoLocationRequestsSuccess = metrics.GetOrRegisterMeter("geolocation_requests.ok", registry) + newMetrics.GeoLocationRequestsFailure = metrics.GetOrRegisterMeter("geolocation_requests.failed", registry) + return newMetrics } @@ -1164,3 +1174,11 @@ func (me *Metrics) getModuleMetric(labels ModuleLabels) (*ModuleMetrics, error) return mm, nil } + +func (me *Metrics) RecordGeoLocationRequest(success bool) { + if success { + me.GeoLocationRequestsSuccess.Mark(1) + } else { + me.GeoLocationRequestsFailure.Mark(1) + } +} diff --git a/metrics/go_metrics_test.go b/metrics/go_metrics_test.go index 791b4ee0ec8..90801f95ba6 100644 --- a/metrics/go_metrics_test.go +++ b/metrics/go_metrics_test.go @@ -91,6 +91,9 @@ func TestNewMetrics(t *testing.T) { ensureContainsModuleMetrics(t, registry, fmt.Sprintf("modules.module.%s.stage.%s", module, stage), m.ModuleMetrics[module][stage]) } } + + ensureContains(t, registry, "geolocation_requests.ok", m.GeoLocationRequestsSuccess) + ensureContains(t, registry, "geolocation_requests.failed", m.GeoLocationRequestsFailure) } func TestRecordBidType(t *testing.T) { @@ -1312,3 +1315,35 @@ func TestRecordAdapterRequest(t *testing.T) { }) } } + +func TestRecordGeoLocationRequestMetric(t *testing.T) { + testCases := []struct { + description string + requestSuccess bool + expectedSuccessRequestsCount int64 + expectedFailedRequestsCount int64 + }{ + { + description: "Record GeoLocation failed request, expected success request count is 0 and failed request count is 1", + requestSuccess: false, + expectedSuccessRequestsCount: 0, + expectedFailedRequestsCount: 1, + }, + { + description: "Record GeoLocation successful request, expected success request count is 1 and failed request count is 0", + requestSuccess: true, + expectedSuccessRequestsCount: 1, + expectedFailedRequestsCount: 0, + }, + } + + for _, test := range testCases { + registry := metrics.NewRegistry() + m := NewMetrics(registry, []openrtb_ext.BidderName{openrtb_ext.BidderName("AnyName")}, config.DisabledMetrics{}, nil, nil) + + m.RecordGeoLocationRequest(test.requestSuccess) + + assert.Equal(t, test.expectedSuccessRequestsCount, m.GeoLocationRequestsSuccess.Count(), test.description) + assert.Equal(t, test.expectedFailedRequestsCount, m.GeoLocationRequestsFailure.Count(), test.description) + } +} diff --git a/metrics/metrics.go b/metrics/metrics.go index 45484cab7fd..4b6ae528297 100644 --- a/metrics/metrics.go +++ b/metrics/metrics.go @@ -472,4 +472,5 @@ type MetricsEngine interface { RecordModuleSuccessRejected(labels ModuleLabels) RecordModuleExecutionError(labels ModuleLabels) RecordModuleTimeout(labels ModuleLabels) + RecordGeoLocationRequest(success bool) } diff --git a/metrics/metrics_mock.go b/metrics/metrics_mock.go index e979c6db72e..474683cfca4 100644 --- a/metrics/metrics_mock.go +++ b/metrics/metrics_mock.go @@ -226,3 +226,7 @@ func (me *MetricsEngineMock) RecordModuleExecutionError(labels ModuleLabels) { func (me *MetricsEngineMock) RecordModuleTimeout(labels ModuleLabels) { me.Called(labels) } + +func (me *MetricsEngineMock) RecordGeoLocationRequest(success bool) { + me.Called(success) +} diff --git a/metrics/prometheus/preload.go b/metrics/prometheus/preload.go index c59dfeece02..53ea0885c12 100644 --- a/metrics/prometheus/preload.go +++ b/metrics/prometheus/preload.go @@ -172,6 +172,10 @@ func preloadLabelValues(m *Metrics, syncerKeys []string, moduleStageNames map[st successLabel: boolValues, }) + preloadLabelValuesForCounter(m.geoLocationRequests, map[string][]string{ + successLabel: boolValues, + }) + if !m.metricsDisabled.AdapterConnectionMetrics { preloadLabelValuesForCounter(m.adapterCreatedConnections, map[string][]string{ adapterLabel: adapterValues, diff --git a/metrics/prometheus/prometheus.go b/metrics/prometheus/prometheus.go index 7273a97f07d..f5404b1e11d 100644 --- a/metrics/prometheus/prometheus.go +++ b/metrics/prometheus/prometheus.go @@ -58,6 +58,7 @@ type Metrics struct { adsCertRequests *prometheus.CounterVec adsCertSignTimer prometheus.Histogram bidderServerResponseTimer prometheus.Histogram + geoLocationRequests *prometheus.CounterVec // Adapter Metrics adapterBids *prometheus.CounterVec @@ -507,6 +508,11 @@ func NewMetrics(cfg config.PrometheusMetrics, disabledMetrics config.DisabledMet createModulesMetrics(cfg, reg, &metrics, moduleStageNames, standardTimeBuckets) + metrics.geoLocationRequests = newCounter(cfg, reg, + "geolocation_requests", + "Count of GeoLocation request, and if they were successfully sent.", + []string{successLabel}) + metrics.Gatherer = reg metricsPrefix := "" @@ -1089,3 +1095,15 @@ func (m *Metrics) RecordModuleTimeout(labels metrics.ModuleLabels) { stageLabel: labels.Stage, }).Inc() } + +func (m *Metrics) RecordGeoLocationRequest(success bool) { + if success { + m.geoLocationRequests.With(prometheus.Labels{ + successLabel: requestSuccessful, + }).Inc() + } else { + m.geoLocationRequests.With(prometheus.Labels{ + successLabel: requestFailed, + }).Inc() + } +} diff --git a/metrics/prometheus/prometheus_test.go b/metrics/prometheus/prometheus_test.go index 3ec5969bb6f..7cec1fa7469 100644 --- a/metrics/prometheus/prometheus_test.go +++ b/metrics/prometheus/prometheus_test.go @@ -2028,3 +2028,32 @@ func TestRecordModuleMetrics(t *testing.T) { } } } + +func TestRecordGeoLocationRequestMetric(t *testing.T) { + testCases := []struct { + description string + requestSuccess bool + expectedSuccessRequestsCount float64 + expectedFailedRequestsCount float64 + }{ + { + description: "Record failed request, expected success request count is 0 and failed request count is 1", + requestSuccess: false, + expectedSuccessRequestsCount: 0, + expectedFailedRequestsCount: 1, + }, + { + description: "Record successful request, expected success request count is 1 and failed request count is 0", + requestSuccess: true, + expectedSuccessRequestsCount: 1, + expectedFailedRequestsCount: 0, + }, + } + + for _, test := range testCases { + m := createMetricsForTesting() + m.RecordGeoLocationRequest(test.requestSuccess) + assertCounterVecValue(t, test.description, "successfully geolocation requests", m.geoLocationRequests, test.expectedSuccessRequestsCount, prometheus.Labels{successLabel: requestSuccessful}) + assertCounterVecValue(t, test.description, "unsuccessfully geolocation requests", m.geoLocationRequests, test.expectedFailedRequestsCount, prometheus.Labels{successLabel: requestFailed}) + } +} diff --git a/privacy/scrubber.go b/privacy/scrubber.go index 8f4a9b103f6..8ea35e24b7d 100644 --- a/privacy/scrubber.go +++ b/privacy/scrubber.go @@ -110,8 +110,8 @@ func scrubGeoFull(reqWrapper *openrtb_ext.RequestWrapper) { func scrubDeviceIP(reqWrapper *openrtb_ext.RequestWrapper, ipConf IPConf) { if reqWrapper.Device != nil { - reqWrapper.Device.IP = scrubIP(reqWrapper.Device.IP, ipConf.IPV4.AnonKeepBits, iputil.IPv4BitSize) - reqWrapper.Device.IPv6 = scrubIP(reqWrapper.Device.IPv6, ipConf.IPV6.AnonKeepBits, iputil.IPv6BitSize) + reqWrapper.Device.IP = ScrubIP(reqWrapper.Device.IP, ipConf.IPV4.AnonKeepBits, iputil.IPv4BitSize) + reqWrapper.Device.IPv6 = ScrubIP(reqWrapper.Device.IPv6, ipConf.IPV6.AnonKeepBits, iputil.IPv6BitSize) } } @@ -146,7 +146,7 @@ func ScrubGeoAndDeviceIP(reqWrapper *openrtb_ext.RequestWrapper, ipConf IPConf) scrubGEO(reqWrapper) } -func scrubIP(ip string, ones, bits int) string { +func ScrubIP(ip string, ones, bits int) string { if ip == "" { return "" } diff --git a/privacy/scrubber_test.go b/privacy/scrubber_test.go index ccb065bccbc..60a1d7e3a3c 100644 --- a/privacy/scrubber_test.go +++ b/privacy/scrubber_test.go @@ -409,7 +409,7 @@ func TestScrubIP(t *testing.T) { for _, test := range testCases { t.Run(test.IP, func(t *testing.T) { // bits: ipv6 - 128, ipv4 - 32 - result := scrubIP(test.IP, test.maskBits, test.bits) + result := ScrubIP(test.IP, test.maskBits, test.bits) assert.Equal(t, test.cleanedIP, result) }) } diff --git a/router/router.go b/router/router.go index d412b5152e2..036ae89e324 100644 --- a/router/router.go +++ b/router/router.go @@ -23,6 +23,8 @@ import ( "github.com/prebid/prebid-server/v3/experiment/adscert" "github.com/prebid/prebid-server/v3/floors" "github.com/prebid/prebid-server/v3/gdpr" + "github.com/prebid/prebid-server/v3/geolocation" + "github.com/prebid/prebid-server/v3/geolocation/maxmind" "github.com/prebid/prebid-server/v3/hooks" "github.com/prebid/prebid-server/v3/macros" "github.com/prebid/prebid-server/v3/metrics" @@ -38,6 +40,7 @@ import ( storedRequestsConf "github.com/prebid/prebid-server/v3/stored_requests/config" "github.com/prebid/prebid-server/v3/usersync" "github.com/prebid/prebid-server/v3/util/jsonutil" + "github.com/prebid/prebid-server/v3/util/remotefilesyncer" "github.com/prebid/prebid-server/v3/util/uuidutil" "github.com/prebid/prebid-server/v3/version" @@ -230,10 +233,46 @@ func New(cfg *config.Configuration, rateConvertor *currency.RateConverter) (r *R requestValidator := ortb.NewRequestValidator(activeBidders, disabledBidders, paramsValidator) priceFloorFetcher := floors.NewPriceFloorFetcher(cfg.PriceFloors, floorFechterHttpClient, r.MetricsEngine) + var geolocationService geolocation.GeoLocation + if cfg.GeoLocation.Enabled { + switch cfg.GeoLocation.Type { + case maxmind.Vendor: + maxmindGeo := &maxmind.GeoLocation{} + maxmindSyncerHttpClient := &http.Client{ + Transport: &http.Transport{ + Proxy: http.ProxyFromEnvironment, + MaxConnsPerHost: cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxConnsPerHost, + MaxIdleConns: cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConns, + MaxIdleConnsPerHost: cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.MaxIdleConnsPerHost, + IdleConnTimeout: time.Duration(cfg.GeoLocation.Maxmind.RemoteFileSyncer.HttpClient.IdleConnTimeout) * time.Second, + }, + } + maxmindSyncer, err := remotefilesyncer.NewRemoteFileSyncer(remotefilesyncer.Options{ + Processor: maxmindGeo, + Client: maxmindSyncerHttpClient, + DownloadURL: cfg.GeoLocation.Maxmind.RemoteFileSyncer.DownloadURL, + SaveFilePath: cfg.GeoLocation.Maxmind.RemoteFileSyncer.SaveFilePath, + TmpFilePath: cfg.GeoLocation.Maxmind.RemoteFileSyncer.TmpFilePath, + RetryCount: cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryCount, + RetryInterval: time.Duration(cfg.GeoLocation.Maxmind.RemoteFileSyncer.RetryIntervalMillis) * time.Millisecond, + Timeout: time.Duration(cfg.GeoLocation.Maxmind.RemoteFileSyncer.TimeoutMillis) * time.Millisecond, + UpdateInterval: time.Duration(cfg.GeoLocation.Maxmind.RemoteFileSyncer.UpdateIntervalMillis) * time.Millisecond, + }) + if err != nil { + return nil, err + } + _ = maxmindSyncer.Start() + geolocationService = maxmindGeo + default: + return nil, fmt.Errorf("Unknown geolocation type: %s", cfg.GeoLocation.Type) + } + } + geolocationResolver := exchange.NewGeoLocationResolver(geolocationService, r.MetricsEngine) + tmaxAdjustments := exchange.ProcessTMaxAdjustments(cfg.TmaxAdjustments) planBuilder := hooks.NewExecutionPlanBuilder(cfg.Hooks, repo) macroReplacer := macros.NewStringIndexBasedReplacer() - theExchange := exchange.NewExchange(adapters, cacheClient, cfg, requestValidator, syncersByBidder, r.MetricsEngine, cfg.BidderInfos, gdprPermsBuilder, rateConvertor, categoriesFetcher, adsCertSigner, macroReplacer, priceFloorFetcher) + theExchange := exchange.NewExchange(adapters, cacheClient, cfg, requestValidator, syncersByBidder, r.MetricsEngine, cfg.BidderInfos, gdprPermsBuilder, rateConvertor, categoriesFetcher, adsCertSigner, macroReplacer, priceFloorFetcher, geolocationResolver) var uuidGenerator uuidutil.UUIDRandomGenerator openrtbEndpoint, err := openrtb2.NewEndpoint(uuidGenerator, theExchange, requestValidator, fetcher, accounts, cfg, r.MetricsEngine, analyticsRunner, disabledBidders, defReqJSON, activeBidders, storedRespFetcher, planBuilder, tmaxAdjustments) if err != nil { diff --git a/util/remotefilesyncer/remotefilesyncer.go b/util/remotefilesyncer/remotefilesyncer.go new file mode 100644 index 00000000000..d5b551febaa --- /dev/null +++ b/util/remotefilesyncer/remotefilesyncer.go @@ -0,0 +1,284 @@ +package remotefilesyncer + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "sync/atomic" + "time" + + "github.com/prebid/prebid-server/v3/util/task" + + "github.com/golang/glog" +) + +var ( + ErrSyncInProgress = errors.New("sync in progress") +) + +type RemoteFileProcessor interface { + SetDataPath(datapath string) error +} + +type HTTPClient interface { + Do(req *http.Request) (*http.Response, error) +} + +type Options struct { + Processor RemoteFileProcessor + Client HTTPClient + DownloadURL string + SaveFilePath string + TmpFilePath string + RetryCount int + RetryInterval time.Duration + Timeout time.Duration + UpdateInterval time.Duration +} + +func (o Options) Validate() error { + if o.Processor == nil || o.Client == nil { + return fmt.Errorf("processor and client must not be nil") + } + if o.DownloadURL == "" || o.SaveFilePath == "" || o.TmpFilePath == "" { + return fmt.Errorf("downloadURL, saveFilePath and tmpFilePath must not be empty") + } + if o.RetryCount < 0 || o.RetryInterval < 0 { + return fmt.Errorf("retryCount and retryInterval must not be negative") + } + if o.Timeout < 0 || o.UpdateInterval < 0 { + return fmt.Errorf("timeout and updateInterval must not be negative") + } + return nil +} + +type RemoteFileSyncer struct { + ttask *task.TickerTask + done chan struct{} + processor RemoteFileProcessor + client HTTPClient + syncing atomic.Bool + downloadURL string + saveFilePath string + tmpFilePath string + retryCount int + retryInterval time.Duration + timeout time.Duration + updateInterval time.Duration +} + +func NewRemoteFileSyncer(opt Options) (*RemoteFileSyncer, error) { + if err := opt.Validate(); err != nil { + return nil, err + } + if err := createAndCheckWritePermissionsFor(opt.SaveFilePath); err != nil { + return nil, err + } + if err := createAndCheckWritePermissionsFor(opt.TmpFilePath); err != nil { + return nil, err + } + + syncer := &RemoteFileSyncer{ + done: make(chan struct{}), + processor: opt.Processor, + client: opt.Client, + downloadURL: opt.DownloadURL, + saveFilePath: opt.SaveFilePath, + tmpFilePath: opt.TmpFilePath, + retryCount: opt.RetryCount, + retryInterval: opt.RetryInterval, + timeout: opt.Timeout, + updateInterval: opt.UpdateInterval, + } + syncer.ttask = task.NewTickerTaskWithOptions(task.Options{ + Interval: opt.UpdateInterval, + Runner: task.NewFuncRunner(func() error { + err := syncer.updateIfNeeded() + if err != nil { + glog.Errorf("updateIfNeeded error: %v", err) + } + return nil + }), + SkipInitialRun: true, + }) + return syncer, nil +} + +// Start sync now and starts a ticker to sync the file periodically. +func (s *RemoteFileSyncer) Start() error { + if _, err := os.Stat(s.saveFilePath); errors.Is(err, os.ErrNotExist) { + errRun := s.run() + if errRun != nil { + glog.Errorf("run error: %v", errRun) + return errRun + } + } else { + errPSF := s.processSavedFile() + if errPSF != nil { + glog.Errorf("process saved file error: %v", errPSF) + return errPSF + } + } + + s.ttask.Start() + + return nil +} + +// Stop the ticker and close the done channel. +func (s *RemoteFileSyncer) Stop() { + if s.ttask != nil { + s.ttask.Stop() + } + close(s.done) +} + +// run starts the job. +// there is only one syncing process allowed at a time. +func (s *RemoteFileSyncer) run() error { + if !s.syncing.CompareAndSwap(false, true) { + return ErrSyncInProgress + } + defer s.syncing.Store(false) + + for retries := 0; ; retries++ { + err := s.sync() + if err == nil { + if errPSF := s.processSavedFile(); errPSF == nil { + break + } else { + glog.Infof("process saved file error: %v", errPSF) + } + } else { + glog.Infof("sync file error: %v", err) + } + + if retries >= s.retryCount { + return fmt.Errorf("sync file max retries exceeded (%d)", s.retryCount) + } + + select { + case <-time.After(s.retryInterval): + continue + case <-s.done: + return errors.New("sync file stopped") + } + } + + return nil +} + +func (s *RemoteFileSyncer) sync() error { + err := downloadFileFromURL(s.client, s.downloadURL, s.tmpFilePath, s.timeout) + if err != nil { + return err + } + + err = os.Rename(s.tmpFilePath, s.saveFilePath) + if err != nil { + _ = os.Remove(s.tmpFilePath) + return err + } + return nil +} + +func (s *RemoteFileSyncer) processSavedFile() error { + if err := s.processor.SetDataPath(s.saveFilePath); err != nil { + _ = os.Remove(s.saveFilePath) + return err + } + return nil +} + +func (s *RemoteFileSyncer) updateIfNeeded() error { + fileinfo, err := os.Stat(s.saveFilePath) + if errors.Is(err, os.ErrNotExist) { + return s.run() + } + + remoteSize, err := remoteFileSize(s.client, s.downloadURL, s.timeout) + if err != nil { + return err + } + if remoteSize != fileinfo.Size() { + return s.run() + } + return nil +} + +func createAndCheckWritePermissionsFor(datapath string) error { + dir := filepath.Dir(datapath) + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + return err + } + + temp, err := os.CreateTemp(dir, "permission_test_") + if err != nil { + return fmt.Errorf("no write permission in directory: %v", err) + } + defer os.Remove(temp.Name()) + defer temp.Close() + + r, err := os.Open(temp.Name()) + if err != nil { + return fmt.Errorf("no read permission in directory: %v", err) + } + defer r.Close() + + return nil +} + +// downloadFileFromURL downloads a file to the datapath. overwrite datapath if it exists. +func downloadFileFromURL(client HTTPClient, url string, datapath string, timeout time.Duration) error { + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return err + } + resp, err := client.Do(req) + if err != nil { + return err + } + defer func() { + if resp.Body != nil { + _, _ = io.ReadAll(resp.Body) + resp.Body.Close() + } + }() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("bad status code: %d", resp.StatusCode) + } + + output, err := os.Create(datapath) + if err != nil { + return err + } + defer output.Close() + + if _, err := io.Copy(output, resp.Body); err != nil { + return err + } + return nil +} + +func remoteFileSize(client HTTPClient, url string, timeout time.Duration) (int64, error) { + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, http.MethodHead, url, nil) + if err != nil { + return 0, err + } + resp, err := client.Do(req) + if err != nil { + return 0, err + } + return resp.ContentLength, nil +} diff --git a/util/remotefilesyncer/remotefilesyncer_test.go b/util/remotefilesyncer/remotefilesyncer_test.go new file mode 100644 index 00000000000..bbffb04e465 --- /dev/null +++ b/util/remotefilesyncer/remotefilesyncer_test.go @@ -0,0 +1,915 @@ +package remotefilesyncer + +import ( + "bytes" + "io" + "net/http" + "os" + "path/filepath" + "sync/atomic" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +type testRemoteFileProcessor struct { + setDataPath func(datapath string) error +} + +func (p *testRemoteFileProcessor) SetDataPath(datapath string) error { + return p.setDataPath(datapath) +} + +func makeTestRemoteFileProcessor(setDataPath func(datapath string) error) RemoteFileProcessor { + return &testRemoteFileProcessor{setDataPath: setDataPath} +} + +func makeTestRemoteFileProcessorOK() RemoteFileProcessor { + return makeTestRemoteFileProcessor(func(datapath string) error { + return nil + }) +} + +type testHTTPClient struct { + do func(req *http.Request) (*http.Response, error) +} + +func (c *testHTTPClient) Do(req *http.Request) (*http.Response, error) { + return c.do(req) +} + +func makeTestHTTPClient(do func(req *http.Request) (*http.Response, error)) HTTPClient { + return &testHTTPClient{do: do} +} + +func makeTestHTTPClient200() HTTPClient { + return makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewReader([]byte("imdata"))), + }, nil + }) +} + +func createFile(datapath string, content string) { + file, _ := os.Create(datapath) + file.Write([]byte(content)) + file.Close() +} + +func createTempFile(dir string, content string) (string, error) { + file, err := os.CreateTemp(dir, "") + if err != nil { + return "", err + } + defer file.Close() + file.Write([]byte(content)) + return file.Name(), nil +} + +func assertFileContent(t *testing.T, datapath string, content string) { + file, err := os.Open(datapath) + assert.NoError(t, err, "File should exist") + defer file.Close() + buf := new(bytes.Buffer) + io.Copy(buf, file) + assert.Equal(t, content, buf.String(), "File content should be correct") +} + +func TestOptionsValidate(t *testing.T) { + dir := t.TempDir() + + tests := []struct { + processor RemoteFileProcessor + client HTTPClient + downloadURL string + saveFilePath string + tmpFilePath string + retryCount int + retryInterval time.Duration + timeout time.Duration + updateInterval time.Duration + hasError bool + }{ + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + false, + }, + { + nil, + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + nil, + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + "", + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + "", + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + -1, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + -10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + -10 * time.Second, + 0, + true, + }, + { + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + -1, + true, + }, + } + + for _, test := range tests { + t.Run("OptionsValidate", func(t *testing.T) { + opts := Options{ + Processor: test.processor, + Client: test.client, + DownloadURL: test.downloadURL, + SaveFilePath: test.saveFilePath, + TmpFilePath: test.tmpFilePath, + RetryCount: test.retryCount, + RetryInterval: test.retryInterval, + Timeout: test.timeout, + UpdateInterval: test.updateInterval, + } + err := opts.Validate() + if test.hasError { + assert.Error(t, err, "Options.Validate() should return error") + } else { + assert.NoError(t, err, "Options.Validate() should not return error. Error: %v", err) + } + }) + } +} + +func TestNewRemoteFileSyncer(t *testing.T) { + dir := t.TempDir() + readdir := filepath.Join(dir, "readonly") + os.MkdirAll(readdir, 0555) + + tests := []struct { + name string + processor RemoteFileProcessor + client HTTPClient + downloadURL string + saveFilePath string + tmpFilePath string + retryCount int + retryInterval time.Duration + timeout time.Duration + updateInterval time.Duration + hasError bool + }{ + { + "New syncer, successful", + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + false, + }, + { + "New syncer, invalid options", + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + -1, + true, + }, + { + "New syncer, read-only save file path", + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(readdir, "foo"), + filepath.Join(dir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + { + "New syncer, read-only tmp file path", + makeTestRemoteFileProcessorOK(), + makeTestHTTPClient200(), + "http://example.com", + filepath.Join(dir, "foo"), + filepath.Join(readdir, "tmp"), + 0, + 10 * time.Second, + 10 * time.Second, + 0, + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + opts := Options{ + Processor: test.processor, + Client: test.client, + DownloadURL: test.downloadURL, + SaveFilePath: test.saveFilePath, + TmpFilePath: test.tmpFilePath, + RetryCount: test.retryCount, + RetryInterval: test.retryInterval, + Timeout: test.timeout, + UpdateInterval: test.updateInterval, + } + syncer, err := NewRemoteFileSyncer(opts) + if test.hasError { + assert.Error(t, err, "NewRemoteFileSyncer should return error") + } else { + assert.NoError(t, err, "NewRemoteFileSyncer should not return error. Error: %v", err) + assert.NotNil(t, syncer.ttask) + } + }) + } +} + +func TestRemoteFileSyncerStart(t *testing.T) { + const filecontent = "imdata" + var ( + processorCalled int64 = 0 + clientHeadCalled int64 = 0 + clientGetCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessor(func(datapath string) error { + atomic.AddInt64(&processorCalled, 1) + return nil + }), + Client: makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + switch req.Method { + case http.MethodHead: + atomic.AddInt64(&clientHeadCalled, 1) + return &http.Response{ + StatusCode: 200, + ContentLength: int64(len(filecontent)), + }, nil + case http.MethodGet: + atomic.AddInt64(&clientGetCalled, 1) + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewReader([]byte(filecontent))), + }, nil + } + return nil, assert.AnError + }), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(t.TempDir(), "foo"), + TmpFilePath: filepath.Join(t.TempDir(), "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 10 * time.Millisecond, + }) + err := syncer.Start() + defer syncer.Stop() + + // wait for updating to be checked + <-time.After(20 * time.Millisecond) + + assert.NoError(t, err) + assert.Equal(t, int64(1), atomic.LoadInt64(&processorCalled), "Processor should be called once") + assert.Equal(t, int64(1), atomic.LoadInt64(&clientGetCalled), "HTTPClient should be called once for GET") + assert.True(t, atomic.LoadInt64(&clientHeadCalled) >= 1, "HTTPClient should be called at least once for HEAD") + assertFileContent(t, syncer.saveFilePath, filecontent) +} + +func TestRemoteFileSyncerStartIsSyncing(t *testing.T) { + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient200(), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(t.TempDir(), "foo"), + TmpFilePath: filepath.Join(t.TempDir(), "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + syncer.syncing.Store(true) // Set syncing to true to know run called + err := syncer.Start() + defer syncer.Stop() + + assert.Equal(t, ErrSyncInProgress, err) + assert.NoFileExists(t, syncer.saveFilePath) +} + +func TestRemoteFileSyncerStartFileExists(t *testing.T) { + dir := t.TempDir() + datapath, _ := createTempFile(dir, "imdata") + + var ( + processorCalled int64 = 0 + clientCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessor(func(datapath string) error { + atomic.AddInt64(&processorCalled, 1) + return nil + }), + Client: makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + atomic.AddInt64(&clientCalled, 1) + return nil, nil + }), + DownloadURL: "http://example.com", + SaveFilePath: datapath, + TmpFilePath: filepath.Join(t.TempDir(), "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + err := syncer.Start() + defer syncer.Stop() + + assert.NoError(t, err) + assert.Equal(t, int64(1), atomic.LoadInt64(&processorCalled), "Processor should be called once") + assert.Equal(t, int64(0), atomic.LoadInt64(&clientCalled), "HTTPClient should not be called") +} + +func TestRemoteFileSyncerStartFileExistsInvalid(t *testing.T) { + dir := t.TempDir() + datapath, _ := createTempFile(dir, "imdata") + + var ( + processorCalled int64 = 0 + clientCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessor(func(datapath string) error { + atomic.AddInt64(&processorCalled, 1) + return assert.AnError + }), + Client: makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + atomic.AddInt64(&clientCalled, 1) + return nil, nil + }), + DownloadURL: "http://example.com", + SaveFilePath: datapath, + TmpFilePath: filepath.Join(t.TempDir(), "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + err := syncer.Start() + defer syncer.Stop() + + assert.Error(t, err) + assert.Equal(t, int64(1), atomic.LoadInt64(&processorCalled), "Processor should be called once") + assert.Equal(t, int64(0), atomic.LoadInt64(&clientCalled), "HTTPClient should not be called") +} + +func TestRemoteFileSyncerStop(t *testing.T) { + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient200(), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(t.TempDir(), "foo"), + TmpFilePath: filepath.Join(t.TempDir(), "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + syncer.Stop() + + _, ok := <-syncer.done + assert.False(t, ok, "done channel should be closed") + _, ok = <-syncer.ttask.Done() + assert.False(t, ok, "TickerTask should be closed") +} + +func TestRemoteFileSyncerRunRetryWhenSyncErr(t *testing.T) { + dir := t.TempDir() + + var ( + clientCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + atomic.AddInt64(&clientCalled, 1) + return nil, assert.AnError + }), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(dir, "foo"), + TmpFilePath: filepath.Join(dir, "tmp"), + RetryCount: 2, + RetryInterval: 10 * time.Millisecond, + Timeout: 10 * time.Millisecond, + UpdateInterval: 0, + }) + err := syncer.run() + assert.Error(t, err) + assert.Equal(t, int64(3), clientCalled, "HTTPClient should be called 1 + 2 times") +} + +func TestRemoteFileSyncerRunRetryWhenProcessSavedFileErr(t *testing.T) { + dir := t.TempDir() + + var ( + processorCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessor(func(datapath string) error { + atomic.AddInt64(&processorCalled, 1) + return assert.AnError + }), + Client: makeTestHTTPClient200(), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(dir, "foo"), + TmpFilePath: filepath.Join(dir, "tmp"), + RetryCount: 2, + RetryInterval: 10 * time.Millisecond, + Timeout: 10 * time.Millisecond, + UpdateInterval: 0, + }) + err := syncer.run() + assert.Error(t, err) + assert.Equal(t, int64(3), processorCalled, "processor should be called 1 + 2 times") +} + +func TestRemoteFileSyncerRunRetryWhenTaskStop(t *testing.T) { + dir := t.TempDir() + + var ( + clientCalled int64 = 0 + ) + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + atomic.AddInt64(&clientCalled, 1) + return nil, assert.AnError + }), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(dir, "foo"), + TmpFilePath: filepath.Join(dir, "tmp"), + RetryCount: 2, + RetryInterval: 10 * time.Millisecond, + Timeout: 10 * time.Millisecond, + UpdateInterval: 0, + }) + syncer.Stop() + err := syncer.run() + assert.Error(t, err) + assert.Equal(t, int64(1), clientCalled, "HTTPClient should be called 1 times because syncer is stopped") +} + +func TestRemoteFileSyncerSync(t *testing.T) { + dir := t.TempDir() + readdir := filepath.Join(dir, "readonly") + os.MkdirAll(readdir, 0555) + + tests := []struct { + name string + client HTTPClient + saveFilePath string + hasErr bool + }{ + { + "Sync successful", + makeTestHTTPClient200(), + filepath.Join(dir, "foo"), + false, + }, + { + "Sync failed, client returns error", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return nil, assert.AnError + }), + filepath.Join(dir, "foo"), + true, + }, + { + "Sync failed, save file path is read-only", + makeTestHTTPClient200(), + readdir, + true, + }, + } + + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient(nil), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(dir, "foo"), + TmpFilePath: filepath.Join(dir, "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + syncer.client = test.client + syncer.saveFilePath = test.saveFilePath + err := syncer.sync() + if test.hasErr { + assert.Error(t, err, "RemoteFileSyncer.sync should return error") + } else { + assert.NoError(t, err, "RemoteFileSyncer.sync should not return error. Error %v", err) + } + }) + } +} + +func TestRemoteFileSyncerProcessSavedFile(t *testing.T) { + dir := t.TempDir() + datapath, _ := createTempFile(dir, "imdata") + syncer := &RemoteFileSyncer{ + processor: makeTestRemoteFileProcessor(func(datapath string) error { + return assert.AnError + }), + saveFilePath: datapath, + } + err := syncer.processSavedFile() + assert.Error(t, err) + assert.NoFileExists(t, datapath, "should remove file if process failed") +} + +func TestRemoteFileSyncerUpdateIfNeeded(t *testing.T) { + dir := t.TempDir() + syncer, _ := NewRemoteFileSyncer(Options{ + Processor: makeTestRemoteFileProcessorOK(), + Client: makeTestHTTPClient(nil), + DownloadURL: "http://example.com", + SaveFilePath: filepath.Join(dir, "foo"), + TmpFilePath: filepath.Join(dir, "tmp"), + RetryCount: 0, + RetryInterval: 10 * time.Second, + Timeout: 10 * time.Second, + UpdateInterval: 0, + }) + + tests := []struct { + name string + client HTTPClient + saveFilePath string + saveFileData string + needed bool + hasError bool + }{ + { + "File not exists", + makeTestHTTPClient200(), + filepath.Join(dir, "foo"), + "", + true, + true, + }, + { + "File exists, get content length error", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return nil, assert.AnError + }), + filepath.Join(dir, "foo"), + "imdata", + false, + true, + }, + { + "File exists, content length is different", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ContentLength: 1}, nil + }), + filepath.Join(dir, "foo"), + "imdata", + true, + true, + }, + { + "File exists, content length is the same", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ContentLength: 6}, nil + }), + filepath.Join(dir, "foo"), + "imdata", + false, + false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if test.saveFileData != "" { + createFile(test.saveFilePath, test.saveFileData) + } + syncer.client = test.client + syncer.saveFilePath = test.saveFilePath + syncer.syncing.Store(true) // let run return a known error for checking run is called + + err := syncer.updateIfNeeded() + if test.hasError { + assert.Error(t, err, "RemoteFileSyncer.updateIfNeeded should return error") + } else { + assert.NoError(t, err, "RemoteFileSyncer.updateIfNeeded should not return error. Error: %v", err) + } + if test.needed { + assert.Equal(t, ErrSyncInProgress, err) + } + }) + } +} + +func TestCreateAndCheckWritePermissionsFor(t *testing.T) { + dir := t.TempDir() + readdir := filepath.Join(dir, "readonly") + os.MkdirAll(readdir, 0555) + + tests := []struct { + name string + datapath string + hasError bool + }{ + { + "Directory write permissions are granted", + filepath.Join(dir, "foo"), + false, + }, + { + "Directory write permissions are granted to a nested directory", + filepath.Join(dir, "foo/bar"), + false, + }, + { + "Directory is read-only, can not create file", + filepath.Join(readdir, "foo"), + true, + }, + { + "Directory is read-only, can not create directory", + filepath.Join(readdir, "foo/bar"), + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := createAndCheckWritePermissionsFor(test.datapath) + if test.hasError { + assert.Error(t, err, "should return error") + assert.NoFileExists(t, test.datapath, "should not create file") + } else { + assert.NoError(t, err, "should not return error. Error: %v", err) + } + }) + } +} + +func TestDownloadFileFromURL(t *testing.T) { + tests := []struct { + name string + client HTTPClient + url string + datapath string + timeout time.Duration + data string + hasError bool + }{ + { + "Succesful. Downloaded data is 'imdata'", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewReader([]byte("imdata"))), + }, nil + }), + "http://example.com", + filepath.Join(t.TempDir(), "foo"), + 1 * time.Second, + "imdata", + false, + }, + { + "Download from a invalid URL", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewReader([]byte("imdata"))), + }, nil + }), + "!@#$%^&*()", + filepath.Join(t.TempDir(), "foo"), + 1 * time.Second, + "", + true, + }, + { + "Download from a valid URL, returns error", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return nil, assert.AnError + }), + "http://example.com", + filepath.Join(t.TempDir(), "foo"), + 1 * time.Second, + "", + true, + }, + { + "Download from a valid URL, returns status code 404", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{StatusCode: 404}, nil + }), + "http://example.com", + filepath.Join(t.TempDir(), "foo"), + 1 * time.Second, + "", + true, + }, + { + "Download from a valid URL, save to a directory", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + StatusCode: 200, + Body: io.NopCloser(bytes.NewReader([]byte("imdata"))), + }, nil + }), + "http://example.com", + t.TempDir(), + 1 * time.Second, + "", + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := downloadFileFromURL(test.client, test.url, test.datapath, test.timeout) + if test.hasError { + assert.Error(t, err, "DownloadFileFromURL should return error") + } else { + assert.NoError(t, err, "DownloadFileFromURL should not return error. Error: %v", err) + assertFileContent(t, test.datapath, test.data) + } + }) + } +} + +func TestRemoteFileSize(t *testing.T) { + tests := []struct { + name string + client HTTPClient + url string + timeout time.Duration + length int64 + hasError bool + }{ + { + "Successful. ContentLength is 100", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + ContentLength: 100, + }, nil + }), + "http://example.com", + 1 * time.Second, + 100, + false, + }, + { + "Request a invalid URL", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return &http.Response{ + ContentLength: 100, + }, nil + }), + "!@#$%^&*()", + 1 * time.Second, + 0, + true, + }, + { + "Request a valid URL, returns error", + makeTestHTTPClient(func(req *http.Request) (*http.Response, error) { + return nil, assert.AnError + }), + "http://example.com", + 1 * time.Second, + 0, + true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + length, err := remoteFileSize(test.client, test.url, test.timeout) + if test.hasError { + assert.Error(t, err, "RemoteFileSize should return error") + } else { + assert.NoError(t, err, "RemoteFileSize should not return error. Error: %v", err) + assert.Equal(t, test.length, length, "RemoteFileSize should return correct file size") + } + }) + } +} diff --git a/util/task/func_runner.go b/util/task/func_runner.go index fe6de614d2d..7a06246d188 100644 --- a/util/task/func_runner.go +++ b/util/task/func_runner.go @@ -2,14 +2,18 @@ package task import "time" -type funcRunner struct { +type FuncRunner struct { run func() error } -func (r funcRunner) Run() error { +func (r FuncRunner) Run() error { return r.run() } func NewTickerTaskFromFunc(interval time.Duration, runner func() error) *TickerTask { - return NewTickerTask(interval, funcRunner{run: runner}) + return NewTickerTask(interval, FuncRunner{run: runner}) +} + +func NewFuncRunner(f func() error) *FuncRunner { + return &FuncRunner{run: f} } diff --git a/util/task/func_runner_test.go b/util/task/func_runner_test.go index 86481e4ee5f..c8787f82aee 100644 --- a/util/task/func_runner_test.go +++ b/util/task/func_runner_test.go @@ -2,6 +2,7 @@ package task import ( "sync" + "sync/atomic" "testing" "time" @@ -26,3 +27,16 @@ func TestNewTickerTaskFromFunc(t *testing.T) { assert.NoError(t, err) assert.Equal(t, 1, runCount) } + +func TestNewFuncRunner(t *testing.T) { + var count int64 + + runner := NewFuncRunner(func() error { + atomic.AddInt64(&count, 1) + return nil + }) + + err := runner.Run() + assert.NoError(t, err) + assert.Equal(t, int64(1), count) +} diff --git a/util/task/ticker_task.go b/util/task/ticker_task.go index a8d523b75d5..5c1a6b1b454 100644 --- a/util/task/ticker_task.go +++ b/util/task/ticker_task.go @@ -9,23 +9,40 @@ type Runner interface { } type TickerTask struct { - interval time.Duration - runner Runner - done chan struct{} + interval time.Duration + runner Runner + skipInitialRun bool + done chan struct{} } func NewTickerTask(interval time.Duration, runner Runner) *TickerTask { + return NewTickerTaskWithOptions(Options{ + Interval: interval, + Runner: runner, + }) +} + +type Options struct { + Interval time.Duration + Runner Runner + SkipInitialRun bool +} + +func NewTickerTaskWithOptions(opt Options) *TickerTask { return &TickerTask{ - interval: interval, - runner: runner, - done: make(chan struct{}), + interval: opt.Interval, + runner: opt.Runner, + skipInitialRun: opt.SkipInitialRun, + done: make(chan struct{}), } } // Start runs the task immediately and then schedules the task to run periodically // if a positive fetching interval has been specified. func (t *TickerTask) Start() { - t.runner.Run() + if !t.skipInitialRun { + t.runner.Run() + } if t.interval > 0 { go t.runRecurring() @@ -37,6 +54,11 @@ func (t *TickerTask) Stop() { close(t.done) } +// Done exports readonly done channel +func (t *TickerTask) Done() <-chan struct{} { + return t.done +} + // run creates a ticker that ticks at the specified interval. On each tick, // the task is executed func (t *TickerTask) runRecurring() { diff --git a/util/task/ticker_task_test.go b/util/task/ticker_task_test.go index b1acb35ffa1..14e8156562d 100644 --- a/util/task/ticker_task_test.go +++ b/util/task/ticker_task_test.go @@ -85,3 +85,43 @@ func TestStartWithPeriodicRun(t *testing.T) { time.Sleep(50 * time.Millisecond) assert.Equal(t, expectedRuns, runner.RunCount(), "runner should not run after Stop is called") } + +func TestSkipInitialRun(t *testing.T) { + // Setup One Periodic Run: + expectedRuns := 0 + runner := NewMockRunner(expectedRuns) + interval := 0 * time.Millisecond + ticker := task.NewTickerTaskWithOptions(task.Options{ + Interval: interval, + Runner: runner, + SkipInitialRun: true, + }) + + // Execute: + ticker.Start() + + // Verify No Additional Runs After Stop: + time.Sleep(50 * time.Millisecond) + assert.Equal(t, expectedRuns, runner.RunCount(), "runner should not run") +} + +func TestChannelDone(t *testing.T) { + runner := NewMockRunner(1) + interval := 10 * time.Millisecond + ticker := task.NewTickerTask(interval, runner) + + // Execute: + ticker.Start() + + go func() { + time.Sleep(10 * time.Millisecond) + ticker.Stop() + }() + + select { + case <-ticker.Done(): + // Expected + case <-time.After(250 * time.Millisecond): + assert.Failf(t, "Ticker Done", "expected stop signal") + } +} diff --git a/util/timeutil/time.go b/util/timeutil/time.go index e8eaae7d61f..7e70b0b5017 100644 --- a/util/timeutil/time.go +++ b/util/timeutil/time.go @@ -1,7 +1,9 @@ package timeutil import ( + "sync" "time" + _ "time/tzdata" ) type Time interface { @@ -14,3 +16,49 @@ type RealTime struct{} func (c *RealTime) Now() time.Time { return time.Now() } + +type LocationCache struct { + cache map[string]*LocationCacheResult + mu sync.RWMutex +} + +func NewLocationCache() *LocationCache { + return &LocationCache{ + cache: make(map[string]*LocationCacheResult), + } +} + +type LocationCacheResult struct { + loc *time.Location + err error +} + +// LoadLocation wraps standard package time.LoadLocation, cache the results +func (l *LocationCache) LoadLocation(name string) (*time.Location, error) { + l.mu.RLock() + result, ok := l.cache[name] + l.mu.RUnlock() + + if ok { + return result.loc, result.err + } + + l.mu.Lock() + defer l.mu.Unlock() + + result, ok = l.cache[name] + if ok { + return result.loc, result.err + } + + loc, err := time.LoadLocation(name) + // cache it whether it succeeds or fails. avoid cache penetration caused by invalid timezones. + l.cache[name] = &LocationCacheResult{loc: loc, err: err} + return loc, err +} + +var defaultLocationCache = NewLocationCache() + +func LoadLocation(name string) (*time.Location, error) { + return defaultLocationCache.LoadLocation(name) +} diff --git a/util/timeutil/time_test.go b/util/timeutil/time_test.go new file mode 100644 index 00000000000..229ebe13503 --- /dev/null +++ b/util/timeutil/time_test.go @@ -0,0 +1,64 @@ +package timeutil + +import ( + "testing" + "time" + _ "time/tzdata" + + "github.com/stretchr/testify/assert" +) + +func TestLocationCacheLoadLocation(t *testing.T) { + _, err := LoadLocation("Asia/Shanghai") + assert.Nil(t, err, "should load location Asia/Shanghai") + + c := NewLocationCache() + _, ok := c.cache["America/New_York"] + assert.False(t, ok, "cache should not contain America/New_York") + + newyork, err := c.LoadLocation("America/New_York") + assert.Nil(t, err) + assert.NotNil(t, newyork) + + _, ok = c.cache["America/New_York"] + assert.True(t, ok, "cache should contain America/New_York") + + cacheNewyork, _ := c.LoadLocation("America/New_York") + assert.Equal(t, newyork, cacheNewyork) +} + +func TestLocationCacheLoadLocationUnknown(t *testing.T) { + c := NewLocationCache() + _, ok := c.cache["America/Unknown"] + assert.False(t, ok, "cache should not contain America/Unknown") + + unknown, err := c.LoadLocation("America/Unknown") + assert.NotNil(t, err, "should return error") + assert.Nil(t, unknown, "should return nil location") + + result, ok := c.cache["America/Unknown"] + assert.True(t, ok, "cache should contain America/Unknown") + assert.NotNil(t, result.err, "cache should contain error") +} + +// goos: darwin +// goarch: amd64 +// pkg: github.com/prebid/prebid-server/v2/util/timeutil +// cpu: Intel(R) Core(TM) i5-8257U CPU @ 1.40GHz +// BenchmarkLocationCacheLoadLocation-8 66584589 18.2 ns/op 0 B/op 0 allocs/op +func BenchmarkLocationCacheLoadLocation(b *testing.B) { + for i := 0; i < b.N; i++ { + _, _ = LoadLocation("America/New_York") + } +} + +// goos: darwin +// goarch: amd64 +// pkg: github.com/prebid/prebid-server/v2/util/timeutil +// cpu: Intel(R) Core(TM) i5-8257U CPU @ 1.40GHz +// BenchmarkTimeLoadLocation-8 51571 23117 ns/op 8635 B/op 13 allocs/op +func BenchmarkTimeLoadLocation(b *testing.B) { + for i := 0; i < b.N; i++ { + _, _ = time.LoadLocation("America/New_York") + } +}