diff --git a/build/generate/generate.go b/build/generate/generate.go index c2743851a..764e7d7c0 100644 --- a/build/generate/generate.go +++ b/build/generate/generate.go @@ -5,14 +5,13 @@ import ( ) func main() { - //go:generate esc -modtime 0 -o js/static.go -pkg js -include helpers\.js -ignore go -prefix js js conf := &embed.Config{ ModTime: "0", - OutputFile: "js/static.go", + OutputFile: "pkg/js/static.go", Package: "js", - Prefix: "js", + Prefix: "pkg/js", Private: true, - Files: []string{`js/helpers.js`}, + Files: []string{`pkg/js/helpers.js`}, } embed.Run(conf) } diff --git a/misc/convertzone/README.md b/cmd/convertzone/README.md similarity index 100% rename from misc/convertzone/README.md rename to cmd/convertzone/README.md diff --git a/misc/convertzone/main.go b/cmd/convertzone/main.go similarity index 100% rename from misc/convertzone/main.go rename to cmd/convertzone/main.go diff --git a/cmd/spftest/main.go b/cmd/spftest/main.go new file mode 100644 index 000000000..2e698621c --- /dev/null +++ b/cmd/spftest/main.go @@ -0,0 +1,66 @@ +package main + +import ( + "fmt" + "strings" + + "github.com/StackExchange/dnscontrol/pkg/dnsresolver" + "github.com/StackExchange/dnscontrol/pkg/spflib" +) + +func main() { + + h := dnsresolver.NewResolverLive("spf-store.json") + fmt.Println(h.GetTxt("_spf.google.com")) + fmt.Println(h.GetTxt("spf-basic.fogcreek.com")) + h.Close() + + i, err := dnsresolver.NewResolverPreloaded("spf-store.json") + if err != nil { + panic(err) + } + fmt.Println(i.GetTxt("_spf.google.com")) + fmt.Println(i.GetTxt("spf-basic.fogcreek.com")) + fmt.Println(i.GetTxt("wontbefound")) + + fmt.Println() + fmt.Println("---------------------") + fmt.Println() + + //res := dnsresolver.NewResolverLive("spf-store2.json") + res, err := dnsresolver.NewResolverPreloaded("spf-store2.json") + if err != nil { + panic(err) + } + rec, err := spflib.Parse(strings.Join([]string{"v=spf1", + "ip4:198.252.206.0/24", + "ip4:192.111.0.0/24", + "include:_spf.google.com", + "include:mailgun.org", + "include:spf-basic.fogcreek.com", + "include:mail.zendesk.com", + "include:servers.mcsv.net", + "include:sendgrid.net", + "include:spf.mtasv.net", + "~all"}, " "), res) + if err != nil { + panic(err) + } + spflib.DumpSPF(rec, "") + + fmt.Println() + fmt.Println("---------------------") + fmt.Println() + + var spf string + spf, err = spflib.Lookup("whatexit.org", res) + if err != nil { + panic(err) + } + rec, err = spflib.Parse(spf, res) + if err != nil { + panic(err) + } + spflib.DumpSPF(rec, "") + //res.Close() +} diff --git a/cmd/spftest/spf-store.json b/cmd/spftest/spf-store.json new file mode 100644 index 000000000..d1e1eaa81 --- /dev/null +++ b/cmd/spftest/spf-store.json @@ -0,0 +1,12 @@ +{ + "_spf.google.com": { + "txt": [ + "v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all" + ] + }, + "spf-basic.fogcreek.com": { + "txt": [ + "v=spf1 ip4:64.34.80.172 -all" + ] + } +} diff --git a/cmd/spftest/spf-store2.json b/cmd/spftest/spf-store2.json new file mode 100644 index 000000000..8cddaa5a3 --- /dev/null +++ b/cmd/spftest/spf-store2.json @@ -0,0 +1,69 @@ +{ + "_netblocks.google.com": { + "txt": [ + "v=spf1 ip4:64.18.0.0/20 ip4:64.233.160.0/19 ip4:66.102.0.0/20 ip4:66.249.80.0/20 ip4:72.14.192.0/18 ip4:74.125.0.0/16 ip4:108.177.8.0/21 ip4:173.194.0.0/16 ip4:207.126.144.0/20 ip4:209.85.128.0/17 ip4:216.58.192.0/19 ip4:216.239.32.0/19 ~all" + ] + }, + "_netblocks2.google.com": { + "txt": [ + "v=spf1 ip6:2001:4860:4000::/36 ip6:2404:6800:4000::/36 ip6:2607:f8b0:4000::/36 ip6:2800:3f0:4000::/36 ip6:2a00:1450:4000::/36 ip6:2c0f:fb50:4000::/36 ~all" + ] + }, + "_netblocks3.google.com": { + "txt": [ + "v=spf1 ip4:172.217.0.0/19 ip4:108.177.96.0/19 ~all" + ] + }, + "_spf.google.com": { + "txt": [ + "v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all" + ] + }, + "mail.zendesk.com": { + "txt": [ + "v=spf1 ip4:192.161.144.0/20 ip4:185.12.80.0/22 ip4:96.46.150.192/27 ip4:174.137.46.0/24 ip4:188.172.128.0/20 ip4:216.198.0.0/18 ~all" + ] + }, + "mailgun.org": { + "txt": [ + "google-site-verification=FIGVOKZm6lQFDBJaiC2DdwvBy8TInunoGCt-1gnL4PA", + "v=spf1 include:spf1.mailgun.org include:spf2.mailgun.org ~all" + ] + }, + "sendgrid.net": { + "txt": [ + "v=spf1 ip4:167.89.0.0/17 ip4:208.117.48.0/20 ip4:50.31.32.0/19 ip4:198.37.144.0/20 ip4:198.21.0.0/21 ip4:192.254.112.0/20 ip4:168.245.0.0/17 ~all", + "google-site-verification=NxyooVvVaIgddVa23KTlOEuVPuhffcDqJFV8RzWrAys" + ] + }, + "servers.mcsv.net": { + "txt": [ + "v=spf1 ip4:205.201.128.0/20 ip4:198.2.128.0/18 ip4:148.105.8.0/21 ?all" + ] + }, + "spf-basic.fogcreek.com": { + "txt": [ + "v=spf1 ip4:64.34.80.172 -all" + ] + }, + "spf.mtasv.net": { + "txt": [ + "v=spf1 ip4:50.31.156.96/27 ip4:104.245.209.192/26 ~all" + ] + }, + "spf1.mailgun.org": { + "txt": [ + "v=spf1 ip4:173.193.210.32/27 ip4:50.23.218.192/27 ip4:174.37.226.64/27 ip4:208.43.239.136/30 ip4:184.173.105.0/24 ip4:184.173.153.0/24 ip4:104.130.122.0/23 ip4:146.20.112.0/26 ~all" + ] + }, + "spf2.mailgun.org": { + "txt": [ + "v=spf1 ip4:209.61.151.0/24 ip4:166.78.68.0/22 ip4:198.61.254.0/23 ip4:192.237.158.0/23 ip4:23.253.182.0/23 ip4:104.130.96.0/28 ip4:146.20.113.0/24 ip4:146.20.191.0/24 ~all" + ] + }, + "whatexit.org": { + "txt": [ + "v=spf1 ip6:2607:f2f8:a9c0::3 ip4:174.136.107.195 include:servers.mcsv.net include:_spf.google.com mx:evite.com -all" + ] + } +} diff --git a/docs/_providers/cloudflare.md b/docs/_providers/cloudflare.md index 246816f98..256c5bc7e 100644 --- a/docs/_providers/cloudflare.md +++ b/docs/_providers/cloudflare.md @@ -22,13 +22,14 @@ username and access token: ## Metadata Record level metadata availible: - * cloudflare_proxy ('on', 'off', or 'full') + * `cloudflare_proxy` ("on", "off", or "full") Domain level metadata availible: - * cloudflare_proxy_default ('on', 'off', or 'full') + * `cloudflare_proxy_default` ("on", "off", or "full") Provider level metadata availible: - * ip_conversions + * `ip_conversions` + * `manage_redirects`: set to `true` to manage page-rule based redirects What does on/off/full mean? @@ -100,3 +101,31 @@ DNSControl depends on a Cloudflare Global API Key that's available under "My Set If a domain does not exist in your CloudFlare account, DNSControl will *not* automatically add it. You'll need to do that via the control panel manually or via the `dnscontrol create-domains` command. + +## Redirects + +The cloudflare provider can manage Page-Rule based redirects for your domains. Simply use the `CF_REDIRECT` and `CF_TEMP_REDIRECT` functions to make redirects: + +{% highlight js %} + +// chiphacker.com is an alias for electronics.stackexchange.com + +D("chiphacker.com", REG_NAMECOM, DnsProvider(CFLARE), + // must have A records with orange cloud on. Otherwise page rule will never run. + A("@","1.2.3.4", CF_PROXY_ON), + A("www", "1.2.3.4", CF_PROXY_ON) + A("meta", "1.2.3.4", CF_PROXY_ON), + + // 302 for meta subdomain + CF_TEMP_REDIRECT("meta.chiphacker.com/*", "https://electronics.meta.stackexchange.com/$1), + + // 301 all subdomains and preserve path + CF_REDIRECT("*chiphacker.com/*", "https://electronics.stackexchange.com/$2), +); +{%endhighlight%} + +Notice a few details: + +1. We need an A record with cloudflare proxy on, or the page rule will never run. +2. The IP address in those A records may be mostly irrelevant, as cloudflare should handle all requests (assuming some page rule matches). +3. Ordering matters for priority. CF_REDIRECT records will be added in the order they appear in your js. So put catch-alls at the bottom. diff --git a/integrationTest/integration_test.go b/integrationTest/integration_test.go index 6428abf94..ad6ba9f14 100644 --- a/integrationTest/integration_test.go +++ b/integrationTest/integration_test.go @@ -10,7 +10,7 @@ import ( "strings" "github.com/StackExchange/dnscontrol/models" - "github.com/StackExchange/dnscontrol/nameservers" + "github.com/StackExchange/dnscontrol/pkg/nameservers" "github.com/StackExchange/dnscontrol/providers" _ "github.com/StackExchange/dnscontrol/providers/_all" "github.com/StackExchange/dnscontrol/providers/config" diff --git a/js/error_tests/01-oldDSP.js b/js/error_tests/01-oldDSP.js deleted file mode 100644 index 4d47e66c4..000000000 --- a/js/error_tests/01-oldDSP.js +++ /dev/null @@ -1 +0,0 @@ -D("foo.com","reg","dsp") \ No newline at end of file diff --git a/js/parse_tests/008-import.js b/js/parse_tests/008-import.js deleted file mode 100644 index 99e27afef..000000000 --- a/js/parse_tests/008-import.js +++ /dev/null @@ -1,2 +0,0 @@ - -require("js/parse_tests/import.js") diff --git a/main.go b/main.go index 8665bdbbd..78f28783f 100644 --- a/main.go +++ b/main.go @@ -12,10 +12,10 @@ import ( "strings" "time" - "github.com/StackExchange/dnscontrol/js" "github.com/StackExchange/dnscontrol/models" - "github.com/StackExchange/dnscontrol/nameservers" - "github.com/StackExchange/dnscontrol/normalize" + "github.com/StackExchange/dnscontrol/pkg/js" + "github.com/StackExchange/dnscontrol/pkg/nameservers" + "github.com/StackExchange/dnscontrol/pkg/normalize" "github.com/StackExchange/dnscontrol/providers" _ "github.com/StackExchange/dnscontrol/providers/_all" "github.com/StackExchange/dnscontrol/providers/config" @@ -207,8 +207,8 @@ func main() { if !ok { log.Fatalf("Registrar %s not declared.", reg) } - if len(domain.Nameservers) == 0 { - //fmt.Printf("No nameservers declared; skipping registrar.\n") + if len(domain.Nameservers) == 0 && domain.Metadata["no_ns"] != "true" { + fmt.Printf("No nameservers declared; skipping registrar. Add {no_ns:'true'} to force.\n") continue } dc, err := domain.Copy() diff --git a/models/dns.go b/models/dns.go index 47c0bbb7b..7d95cb740 100644 --- a/models/dns.go +++ b/models/dns.go @@ -10,7 +10,7 @@ import ( "reflect" "strconv" - "github.com/StackExchange/dnscontrol/transform" + "github.com/StackExchange/dnscontrol/pkg/transform" "github.com/miekg/dns" "golang.org/x/net/idna" ) diff --git a/pkg/dnsresolver/dnscache.go b/pkg/dnsresolver/dnscache.go new file mode 100644 index 000000000..edb102d04 --- /dev/null +++ b/pkg/dnsresolver/dnscache.go @@ -0,0 +1,28 @@ +package dnsresolver + +// dnsCache implements a very simple DNS cache. +// It caches the entire answer (i.e. all TXT records), filtering +// out the non-SPF answers is done at a higher layer. +// At this time the only rtype is "TXT". Eventually we'll need +// to cache A/AAAA/CNAME records to to CNAME flattening. +type dnsCache map[string]map[string][]string // map[fqdn]map[rtype] -> answers + +func (c dnsCache) get(label, rtype string) ([]string, bool) { + v1, ok := c[label] + if !ok { + return nil, false + } + v2, ok := v1[rtype] + if !ok { + return nil, false + } + return v2, true +} + +func (c dnsCache) put(label, rtype string, answers []string) { + _, ok := c[label] + if !ok { + c[label] = make(map[string][]string) + } + c[label][rtype] = answers +} diff --git a/pkg/dnsresolver/dnscache_test.go b/pkg/dnsresolver/dnscache_test.go new file mode 100644 index 000000000..e4f32845c --- /dev/null +++ b/pkg/dnsresolver/dnscache_test.go @@ -0,0 +1,31 @@ +package dnsresolver + +import "testing" + +func TestDnsCache(t *testing.T) { + + cache := &dnsCache{} + cache.put("one", "txt", []string{"a", "b", "c"}) + cache.put("two", "txt", []string{"d", "e", "f"}) + + a, b := cache.get("one", "txt") + if !(b == true && len(a) == 3 && a[0] == "a" && a[1] == "b" && a[2] == "c") { + t.Errorf("one-txt didn't work") + } + + a, b = cache.get("two", "txt") + if !(b == true && len(a) == 3 && a[0] == "d" && a[1] == "e" && a[2] == "f") { + t.Errorf("one-txt didn't work") + } + + a, b = cache.get("three", "txt") + if !(b == false) { + t.Errorf("three-txt didn't work") + } + + a, b = cache.get("two", "not") + if !(b == false) { + t.Errorf("two-not didn't work") + } + +} diff --git a/pkg/dnsresolver/resolver.go b/pkg/dnsresolver/resolver.go new file mode 100644 index 000000000..706dda89d --- /dev/null +++ b/pkg/dnsresolver/resolver.go @@ -0,0 +1,83 @@ +package dnsresolver + +import ( + "encoding/json" + "io/ioutil" + "net" + + "github.com/pkg/errors" +) + +// This file includes all the DNS Resolvers used by package spf. + +// DnsResolver looks up txt strings associated with a FQDN. +type DnsResolver interface { + GetTxt(string) ([]string, error) // Given a DNS label, return the TXT values records. +} + +// The "Live DNS" Resolver: + +type dnsLive struct { + filename string + cache dnsCache +} + +func NewResolverLive(filename string) *dnsLive { + // Does live DNS lookups. Records them. Writes file on Close. + c := &dnsLive{filename: filename} + c.cache = dnsCache{} + return c +} + +func (c *dnsLive) GetTxt(label string) ([]string, error) { + // Try the cache. + txts, ok := c.cache.get(label, "txt") + if ok { + return txts, nil + } + + // Populate the cache: + t, err := net.LookupTXT(label) + if err == nil { + c.cache.put(label, "txt", t) + } + + return t, err +} + +func (c *dnsLive) Close() { + // Write out and close the file. + m, _ := json.MarshalIndent(c.cache, "", " ") + m = append(m, "\n"...) + ioutil.WriteFile(c.filename, m, 0666) +} + +// The "Pre-Cached DNS" Resolver: + +type dnsPreloaded struct { + cache dnsCache +} + +func NewResolverPreloaded(filename string) (*dnsPreloaded, error) { + c := &dnsPreloaded{} + c.cache = dnsCache{} + j, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + err = json.Unmarshal(j, &(*c).cache) + return c, err +} + +func (c *dnsPreloaded) DumpCache() dnsCache { + return c.cache +} + +func (c *dnsPreloaded) GetTxt(label string) ([]string, error) { + // Try the cache. + txts, ok := c.cache.get(label, "txt") + if ok { + return txts, nil + } + return nil, errors.Errorf("No preloaded DNS entry for: %#v", label) +} diff --git a/js/helpers.js b/pkg/js/helpers.js similarity index 93% rename from js/helpers.js rename to pkg/js/helpers.js index ae5de6584..87dedb710 100644 --- a/js/helpers.js +++ b/pkg/js/helpers.js @@ -304,6 +304,7 @@ function num2dot(num) return d; } + // Cloudflare aliases: // Meta settings for individual records. @@ -315,3 +316,21 @@ var CF_PROXY_FULL = {'cloudflare_proxy': 'full'}; // Proxy+Railgun enabled. var CF_PROXY_DEFAULT_OFF = {'cloudflare_proxy_default': 'off'}; // Proxy default on for entire domain: var CF_PROXY_DEFAULT_ON = {'cloudflare_proxy_default': 'on'}; + +// CUSTOM, PROVIDER SPECIFIC RECORD TYPES +function CF_REDIRECT(src, dst) { + return function(d) { + if (src.indexOf(",") !== -1 || dst.indexOf(",") !== -1){ + throw("redirect src and dst must not have commas") + } + addRecord(d,"CF_REDIRECT","@",src+","+dst) + } +} +function CF_TEMP_REDIRECT(src, dst) { + return function(d) { + if (src.indexOf(",") !== -1 || dst.indexOf(",") !== -1){ + throw("redirect src and dst must not have commas") + } + addRecord(d,"CF_TEMP_REDIRECT","@",src+","+dst) + } +} diff --git a/js/js.go b/pkg/js/js.go similarity index 100% rename from js/js.go rename to pkg/js/js.go diff --git a/js/js_test.go b/pkg/js/js_test.go similarity index 85% rename from js/js_test.go rename to pkg/js/js_test.go index f6241520d..a2432011b 100644 --- a/js/js_test.go +++ b/pkg/js/js_test.go @@ -12,12 +12,12 @@ import ( ) const ( - testDir = "js/parse_tests" - errorDir = "js/error_tests" + testDir = "pkg/js/parse_tests" + errorDir = "pkg/js/error_tests" ) func init() { - os.Chdir("..") // go up a directory so we helpers.js is in a consistent place. + os.Chdir("../..") // go up a directory so we helpers.js is in a consistent place. } func TestParsedFiles(t *testing.T) { @@ -72,6 +72,8 @@ func TestErrors(t *testing.T) { {"old dsp style", `D("foo.com","reg","dsp")`}, {"MX no priority", `D("foo.com","reg",MX("@","test."))`}, {"MX reversed", `D("foo.com","reg",MX("@","test.", 5))`}, + {"CF_REDIRECT With comma", `D("foo.com","reg",CF_REDIRECT("foo.com,","baaa"))`}, + {"CF_TEMP_REDIRECT With comma", `D("foo.com","reg",CF_TEMP_REDIRECT("foo.com","baa,a"))`}, } for _, tst := range tests { t.Run(tst.desc, func(t *testing.T) { diff --git a/js/parse_tests/001-basic.js b/pkg/js/parse_tests/001-basic.js similarity index 100% rename from js/parse_tests/001-basic.js rename to pkg/js/parse_tests/001-basic.js diff --git a/js/parse_tests/001-basic.json b/pkg/js/parse_tests/001-basic.json similarity index 100% rename from js/parse_tests/001-basic.json rename to pkg/js/parse_tests/001-basic.json diff --git a/js/parse_tests/002-ttl.js b/pkg/js/parse_tests/002-ttl.js similarity index 100% rename from js/parse_tests/002-ttl.js rename to pkg/js/parse_tests/002-ttl.js diff --git a/js/parse_tests/002-ttl.json b/pkg/js/parse_tests/002-ttl.json similarity index 100% rename from js/parse_tests/002-ttl.json rename to pkg/js/parse_tests/002-ttl.json diff --git a/js/parse_tests/003-meta.js b/pkg/js/parse_tests/003-meta.js similarity index 100% rename from js/parse_tests/003-meta.js rename to pkg/js/parse_tests/003-meta.js diff --git a/js/parse_tests/003-meta.json b/pkg/js/parse_tests/003-meta.json similarity index 100% rename from js/parse_tests/003-meta.json rename to pkg/js/parse_tests/003-meta.json diff --git a/js/parse_tests/004-ips.js b/pkg/js/parse_tests/004-ips.js similarity index 100% rename from js/parse_tests/004-ips.js rename to pkg/js/parse_tests/004-ips.js diff --git a/js/parse_tests/004-ips.json b/pkg/js/parse_tests/004-ips.json similarity index 100% rename from js/parse_tests/004-ips.json rename to pkg/js/parse_tests/004-ips.json diff --git a/js/parse_tests/005-multipleDomains.js b/pkg/js/parse_tests/005-multipleDomains.js similarity index 100% rename from js/parse_tests/005-multipleDomains.js rename to pkg/js/parse_tests/005-multipleDomains.js diff --git a/js/parse_tests/005-multipleDomains.json b/pkg/js/parse_tests/005-multipleDomains.json similarity index 100% rename from js/parse_tests/005-multipleDomains.json rename to pkg/js/parse_tests/005-multipleDomains.json diff --git a/js/parse_tests/006-transforms.js b/pkg/js/parse_tests/006-transforms.js similarity index 100% rename from js/parse_tests/006-transforms.js rename to pkg/js/parse_tests/006-transforms.js diff --git a/js/parse_tests/006-transforms.json b/pkg/js/parse_tests/006-transforms.json similarity index 100% rename from js/parse_tests/006-transforms.json rename to pkg/js/parse_tests/006-transforms.json diff --git a/js/parse_tests/007-importTransformTTL.js b/pkg/js/parse_tests/007-importTransformTTL.js similarity index 100% rename from js/parse_tests/007-importTransformTTL.js rename to pkg/js/parse_tests/007-importTransformTTL.js diff --git a/js/parse_tests/007-importTransformTTL.json b/pkg/js/parse_tests/007-importTransformTTL.json similarity index 100% rename from js/parse_tests/007-importTransformTTL.json rename to pkg/js/parse_tests/007-importTransformTTL.json diff --git a/pkg/js/parse_tests/008-import.js b/pkg/js/parse_tests/008-import.js new file mode 100644 index 000000000..d1c689a5f --- /dev/null +++ b/pkg/js/parse_tests/008-import.js @@ -0,0 +1,2 @@ + +require("pkg/js/parse_tests/import.js") diff --git a/js/parse_tests/008-import.json b/pkg/js/parse_tests/008-import.json similarity index 100% rename from js/parse_tests/008-import.json rename to pkg/js/parse_tests/008-import.json diff --git a/js/parse_tests/010-alias.js b/pkg/js/parse_tests/010-alias.js similarity index 100% rename from js/parse_tests/010-alias.js rename to pkg/js/parse_tests/010-alias.js diff --git a/js/parse_tests/010-alias.json b/pkg/js/parse_tests/010-alias.json similarity index 100% rename from js/parse_tests/010-alias.json rename to pkg/js/parse_tests/010-alias.json diff --git a/pkg/js/parse_tests/011-cfRedirect.js b/pkg/js/parse_tests/011-cfRedirect.js new file mode 100644 index 000000000..9c9dad939 --- /dev/null +++ b/pkg/js/parse_tests/011-cfRedirect.js @@ -0,0 +1,4 @@ +D("foo.com","none", + CF_REDIRECT("test.foo.com","https://goo.com/$1"), + CF_TEMP_REDIRECT("test.foo.com","https://goo.com/$1") +); \ No newline at end of file diff --git a/pkg/js/parse_tests/011-cfRedirect.json b/pkg/js/parse_tests/011-cfRedirect.json new file mode 100644 index 000000000..729e2a0e7 --- /dev/null +++ b/pkg/js/parse_tests/011-cfRedirect.json @@ -0,0 +1,24 @@ +{ + "registrars": [], + "dns_providers": [], + "domains": [ + { + "name": "foo.com", + "registrar": "none", + "dnsProviders": {}, + "records": [ + { + "type": "CF_REDIRECT", + "name": "@", + "target": "test.foo.com,https://goo.com/$1" + }, + { + "type": "CF_TEMP_REDIRECT", + "name": "@", + "target": "test.foo.com,https://goo.com/$1" + } + ], + "keepunknown": false + } + ] + } \ No newline at end of file diff --git a/js/parse_tests/import.js b/pkg/js/parse_tests/import.js similarity index 100% rename from js/parse_tests/import.js rename to pkg/js/parse_tests/import.js diff --git a/js/static.go b/pkg/js/static.go similarity index 51% rename from js/static.go rename to pkg/js/static.go index 97d5a54f1..f6b9a67a7 100644 --- a/js/static.go +++ b/pkg/js/static.go @@ -189,57 +189,60 @@ func _escFSMustString(useLocal bool, name string) string { var _escData = map[string]*_escFile{ "/helpers.js": { - local: "js/helpers.js", - size: 8291, + local: "pkg/js/helpers.js", + size: 8855, modtime: 0, compressed: ` -H4sIAAAAAAAA/7xZ/W/bvPH/3X/FPQK+tfS1orz0aTbI9TCvSR4US5wgcbYMhmEwEm2zlUSBpJxmhfO3 -D3yRREl2kwLr+kNqiffyuePx7nhyCo6BC0Yi4Qx7vQ1iENFsCSP43gMAYHhFuGCI8RBmc1+9izO+yBnd -kBg3XtMUkUy96G2NrBgvUZGIMVtxGMFsPuz1lkUWCUIzIBkRBCXk39j1tLKG5n3af4CgjUI+b4caXAfI -1oIywU+3pSo3Qyn2xXOO/RQL5Bk4ZAmufOlV8OQTjEbgXI0n9+NLRyvaqr/SdoZX0hgpLgQlVLGE6q8P -Unio/hqI0vqgtjjIC752GV55Q7MTomCZEtQBf5bxG+MOt9akdVgGgKtMoEu1AKPRCPr08QuORN+Dd+/A -7ZN8EdFsgxknNON9IJmW4VmbIl8ETUIYwZKyFImFEO6Oda/lmpjnP++axqZr78Q8f807GX46UyGhHVP5 -16sCXDE2sFREYf3ToPq+lcsRZTEPZ3NfRuJNHYhy1UTadHoZwpGvJHLMpCfC2XzbBJczGmHOzxBbcTf1 -TfDazj48lJ4FjKI1pDQmS4KZL/eSCCAcUBAEDVojOYQIJYkkeiJibeTahIgx9ByWAKRJBeNkg5Nnm0oH -h9wKtsJKZSaockSMBKoo5dlYBIRfGO1u2giYMm5cY96wWtkCTjiu+McS1A5m6QFXxs0XFZBd2U0/zr7M -K1c2CLf7FF8rO3doXgT4m8BZbKAH0nQ/7Vpgc4k1o0/g/HN8O/k8+SM0SKrd03mjyHiR55QJHIfgDKA8 -lzAAB3TAqvdGr47r2o5tr3d4CGftmA7hE8NIYEBwNrkzcgK45xjEGkOOGEqxwIwD4mUYA8piCY4HdVx2 -BBsD1dnV5oz2nywNtNo0AiM4GgL5aCfhIMHZSqyHQAYDr/JeYx8t6hmZ+9aGbrsKTqQCxFZFijPRlG5t -jqROYQQV4YzMa7fuOY117tJpSBcYk4AMidmP84vx/eX0Dkya4oCAYwF0WZpeawZBAeV58qx+JAksC1Ew -XNavQMo7l6deHWRBa+FPJEkgSjBigLJnyBneEFpw2KCkwFwqtHfScJUltlsHd+/Vq66091K5wvapV9ZC -7Zfp9NLdeCHcYaHicDq9VCp1lOo4tDBr8mZ+LhddZoNggRAJjGDT1HdWpeCG2nIPSvXqnT4ilsNs3j0Y -4oYjgjrjt6BoMFZtdsr6NUEpdnw48kCSZPwTLTIVJ0eQYpRxiGnWFyCbM8pMEcJ6v62CEtjMGRVl3DEj -RLKjJLGt6zQKht0rm4SyQyjFqiahyGK8JBmO+/VZrSng4NjufV7zllUxZxLDXOYSLau5jWMNkeRlyb0y -KZQHQeDVRhk6ILmdp2RKgxGssKjY6hj1T7zXsaI4vlV63dh3xo5fopGSvSbS8fjNYCvSX4x3PP4x5MvP -4zvT6yK2wuI13DU9aIZfCV4qM+gNupYF0oRPk/HV+U+YYNH/ehOUsh+aIBPjw/Qn8FfUvx799GH6Gvar -Bw0mZ4QyIp7fZkPJBRVby5hojaOvsqq4M9mZ3QlGspUP8vekSB9l91u/n/t1QfXBuXoA/C3HkeCwT4vj -vdFl79/gMtU1qeJX6rE6Q9ufEprjg715PrRcWrmo9oD6xZWNXF4seOTVl1FUd1HwUTOVz1aSVs2oq1it -FL2jN2sIaLVlSt9vmmJG5kq1rPJes1mudQ0cOKh2BpwBGTjytiJLVEQZw5FQDa/jWS2tHVuTn8lMk/9Z -Wpr8OCdJ4OOr87vz23+c39oG2GBbBC3Qr9ROu/aruGteoZWo0Py/3RVb9S1dMJRx+bgQ6DExYw2ZkqT+ -2SyhTyEc+7Amq3UIJ77s9v+GOA7h/dwHvfx7ufxBLX++CeF0Ptdi1EXROYYXOIEXeA8vQ/gdXuADvAC8 -wKnT0xuUkAzrRrRnR+VIxiR8hBbIXb2oos9h1KatOntJoNDBCEgeqJ/D6hSpx0akWzdRvdiK8lLWIkhR -rkn8ar+I972cRBTpSUyFS7ytF3yhJHMd3453eW3cLbjk1NqHnSNiGSV3pDJLPjQMky9+YJpa7hpnZFbm -yef/moFGuGWiQrHfSHmVHsHMrFc68yChT57ffS0Dsn5v0PcsB6vfejSogs+M2eiTsQFewPGkGRKDMVUT -mvUhOOV97/PVzfXtdDG9HU/uLq5vr/ShSpD0lI7C+hJZHcG3M/lCJG9KDHraGMmLbaPotFU5Pjh/dSrx -lVv1v+/91hHqh+18YaP0tnOvUSAk2uaGMxyZC5oQSXePtRNv7m//OHctB+kXxsA4+DvG+X32NaNPGYxg -iRKOy2R7vegwV+/28AtW4EZGbNcG7nOB2K4qsvOyrIiH6r6896pctwll4ezeliRNczZob6Uai3Yqj1Eh -s+3SJH1VZU2bhDgvUiyTI4pjhjkPQI9kBRARVImi7qxcU4ts7EZsfWQNTXfYLcPvuz3F3V+afBkPoX1x -rjs1NTQ1o1Yz/d09A41xRGIMj4jjGGimB8gl/QFctCahXE9C5Z1fdxOAuHoq+4Ga9Xrn1FPSNiafilZ7 -LoTPF3D1UEvWnlfbURpWOdzeu0486WZMRcyeaAJrjiXpZmTeWHvbMBZSl+HISrzwE1NR0OaX0VSlDTXU -4qoz510GZXtQEcO7d2ANfeuFdk2qEFu8je8NFmuXcdt5Vc10ZXrqDHTfTtXyljlDqfqSUn8benB2eE/K -LONCbuNOwV0vRDTjVLZBdOXW8+WrvYNlx6/myj447t1XkuckW/3mOW1TdtbfODAj4vJTVNT82MJwNNSp -mORQf+2pihSHJaMprIXIw8NDLlD0lW4wWyb0KYhoeogO/3x89OFPvx8dHp8cn54eyZy+Iahk+II2iEeM -5CJAj7QQiichjwyx58PHhOQm/oK1SK3yeuPGVHg9a2ANI4ipCHieEOH2g37TClf9G8Szo7n3/ycfTr2B -fDiee9bTSePp/dxrfWMq25kiLRWTpXxS07NqeObZHzaVbqfx0bCMJH23VdK6LFmRtlJvrLPz/518ON1R -oN7LTvovKq8cHOjzYY3wJES4QmIdLBNKmdR5KO2sw8OSDgPoB30YQLxj3BebUIBPCS3iZYIYBpQQxDEP -9bwACzUNFzI7KIwki8mGxAVKym8Rgfpo/OlicXN7/fCvxfXFhSwq/agSucgZ/fbcD6FPl8v+dqggyiZC -voaYcNmZxG0xk/1SslKIJQZnu6Rc3F9e7pWzLJJESyqlDG4RSVZFVkuTK5gdlJ+DbHeEvdoGM6Smy6Wu -epkg1WcBcK05thc2AZpR/16vLQxf7b0dWrOu0n1qdnu1oUV6t/efAAAA//8jBMkCYyAAAA== +H4sIAAAAAAAA/9wZa2/byPG7fsUcgUZkRdOPXNKCOhVVbflg1JINWb76IAjCmlxJm/CF3aUcNyf/9mIf +JJekFDtA0w/NB0fcnffMzszOWjnDwDglAbf6nc4WUQjSZAUD+NoBAKB4TRiniDIf5gtXroUJW2Y03ZIQ +15bTGJFELnR2mlaIVyiP+JCuGQxgvuh3Oqs8CThJEyAJ4QRF5N/YdhSzGudD3L8hQVMK8b3rK+FaguwM +USb4aVqwshMUY5c/Z9iNMUeOFoeswBaLTime+ILBAKzxcHI/vLYUo538K3SneC2UEeR8kEQlii//uiCI ++/KvFlFo71Uae1nONjbFa6evPcFzmkhCLeEvEnarzWFXnBQPQwGwpQrpSm7AYDCAbvr4CQe868C7d2B3 +SbYM0mSLKSNpwrpAEkXDMZwiFrw6IAxgldIY8SXn9p59p2GakGXfb5qa05V1Qpa9Zp0EP13IkFCGKe3r +lAEuEWuylEB+9VNL9XUntoOUhsyfL1wRibdVIIpdHWmz2bUPJ66kyDAVlvDni11duIymAWbsAtE1s2NX +B69p7ONjYVnAKNhAnIZkRTB1hS8JB8IAeZ5Xg9WUfQhQFAmgJ8I3mq4JiChFz34hgFApp4xscfRsQqng +EK6gayxZJjyVhggRRyWkOBtLj7BLzd2OawFTxI2t1euXOzvAEcMl/lAItQdZWMAWcfNJBmSbdt2O80+L +0pQ1wN0hxjdSzz2clx7+wnESatE9obobtzUwsfiGpk9g/Ws4nVxNfvW1JKX3VN7IE5ZnWUo5Dn2welCc +S+iBBSpg5brmq+K60mPX6Rwfw0Uzpn04pxhxDAguJneajgf3DAPfYMgQRTHmmDJArAhjQEkohGNeFZct +wlpBeXaVOoPDJ0sJWjqNwABO+kB+MZOwF+FkzTd9IL2eU1qv5kcDek4WruHQXZvBmWCA6DqPccLr1A3n +COgYBlACzsmiMuuB01jlLpWGVIHRCUiDaH+MLof317M70GmKAQKGOaSrQvWKM/AUUJZFz/JHFMEq5znF +Rf3yBL2ROPXyIPO0Iv5EogiCCCMKKHmGjOItSXMGWxTlmAmGpic1VlFi23Vwv69eNaXpS2kK06ZOUQuV +XWaza3vr+HCHuYzD2exaslRRquLQkFmB1/NzsWlTUwjqcR7BALZ1fhdlCq6xLXxQsJdr6ogYBjNxD8gQ +1gzhVRm/IYoSxqjNVlG/JijGlgsnDgiQhJ2neSLj5ARijBIGYZp0OYjmLKW6CGHlb6OgeCZykvIi7qgm +ItBRFJnatRoFje4UTULRIRRkZZOQJyFekQSH3eqsVhBwdGr2Pq9Zy6iYcyHDQuQSRavuxqESkWRFyR3r +FMo8z3MqpTQckMzMUyKlwQDWmJdoVYy6Z87rsqIwnEq+duhaQ8stpBGUnbqkw+GbhS1Bf7C8w+G3Rb6+ +Gt7pXhfRNeavyV3Bg0L4kcILZlp6LV1DA6HC+WQ4Hn2HCgb8j1dBMvumCiIxPsy+Q/4S+sdLP3uYvSb7 ++EEJk1GSUsKf36ZDgQUlWkOZYIODz6Kq2HPRmd1xSpK1C+L3JI8fRfdbrS/cqqC6YI0fAH/JcMAZHOJi +OW802fs3mEx2TbL4FXyMztC0pxDNcsF0ngsNk5YmqiwgfzGpIxMXCxY41WUUVV0U/KKQim8jSctm1Jao +Rore05vVCDTaMsnvJwUxJwvJWlR5p94sV7x6FhyVngGrR3qWuK2IEhWklOKAy4bXcoyW1oytyfdkpsn/ +LC1Nvp2ThODD8ehuNP1tNDUVMIVtADSEfqV2mrVfxl39Ci1J+fr/3b7Yqm7pnKKEic8lR4+RHmuIlCT4 +z+dR+uTDqQsbst74cOaKbv8fiGEf3i9cUNs/F9sf5PbVrQ8fFwtFRl4UrVN4gTN4gffw0oef4QU+wAvA +C3y0OspBEUmwakQ7ZlQOREzCL9AQcl8vKuEzGDRhy85eAEjpYAAk8+TPfnmK5Gct0o2bqNpsRHlBa+nF +KFMgbukv4nwtJhF5fBam3CbOzvE+pSSxLdeMd3Ft3E+4wFTc+60jYiglPFKqJT5qiomFb6gmt9vKaZql +euL7v6agJm6oKKU4rKS4Sg9grvdLnpkXpU+O214WAVmta+k7hoHlbzUalMGnx2zpk9YBXsByhBpCBq2q +AtT7fbCK+97V+PZmOlvOpsPJ3eXNdKwOVYSEpVQUVpfI8gi+HcnlPHpTYlDTxkBcbGtFp8nKcsH6u1WS +L82q/n3tNo5Q12/mC1NKZ7dwagVCSFt3OMWBvqBxHrV9rIx4ez/9dWQbBlILWsHQ+yfG2X3yOUmfEhjA +CkUMF8n2ZtlCLtcO4HOa41pGbNYG5jKO6L4qsveyLIH78r588KpctQlF4WzflgRMfTZoulKORVuVR7MQ +2Xalk76ssrpNQozlMRbJEYUhxYx5oEayHAj3ykRRdVa2rkWm7JpsdWQ1THvYLcLvqznFPVyaXBEPvnlx +rjo1OTTVo1Y9/d0/Aw1xQEIMj4jhENJEDZAL+CO4bExCmZqEiju/6iYAMflV9AMV6s3eqaeArU0+Jayy +nA9XlzB+qCgry0t3FIqVBjd914on1YzJiDkQTWDMsQTcnCxqe28bxkJsUxwYiRe+YyoKSv0imsq0IYda +THbmrI0gdfdKYHj3Doyhb7XRrEmlxAZu7b3BQG0j7lpL5UxXpKfWQPftUA1r6TMUy5eU6m3owdpjPUGz +iAvhxr2E21YI0oSlog1K13Y1Xx4fHCxbbjlXdsGy7z6TLCPJ+ifHaqqyt/6Gnh4RF09RQf2xheKgr1Ix +yaB67SmLFIMVTWPYcJ75x8eMo+BzusV0FaVPXpDGx+j4r6cnH/7y88nx6dnpx48nIqdvCSoQPqEtYgEl +GffQY5pziRORR4ro8/FjRDIdf96Gx0Z5vbXDlDsdY2ANAwhT7rEsItzuet26Frb81wvnJwvnz2cfPjo9 +8XG6cIyvs9rX+4XTeGMq2pk8LhiTlfiS07NyeOaYD5uSt1V7NCwiSd1tJbU2SpLHjdQbquz8p7MPH/cU +qPeik/6bzCtHR+p8GCM8ISKMEd94qyhNqeB5LPSswsOgDj3oel3oQbhn3Bf2y7FMlObhKkIUA4oIYpj5 +amCAuRyHc5EepJAkCcmWhDmKiscIT74an18ub6c3D78vby4vRVXpBiXJZUbTL89dH7rpatXd9aWMoosQ +yxASJlqTsElmcphKUhAxyOBkH5XL++vrg3RWeRQpSgWV3hSRaJ0nFTWxg+lR8R5kmsPvVDroKXW6Wqmy +l3BSvguAbQyyHb8uoJ71H7TaUuNV1tvDNWkzPcRmv1VrXIR1VVDc381uxi7cTm9+u7oYTeHudnR+dXl1 +DtPR+c30Ama/347ujFnd5XI6uriajs5nNqOBCyF72yVZHCJGA48kIf5ys5KXEvhpMICjU/jjD0Fm39be +SYZFcUjksILRQD6ThYxDnDM1bN+gLYYgjWPEWoMMaI0DK30sVzThjAY9y7V6Qq+yHzbVn43Gt/93Nqgp +9Q1D/CcAAP//qJ7f15ciAAA= `, }, "/": { isDir: true, - local: "js", + local: "pkg/js", }, } diff --git a/nameservers/nameservers.go b/pkg/nameservers/nameservers.go similarity index 100% rename from nameservers/nameservers.go rename to pkg/nameservers/nameservers.go diff --git a/normalize/importTransform_test.go b/pkg/normalize/importTransform_test.go similarity index 100% rename from normalize/importTransform_test.go rename to pkg/normalize/importTransform_test.go diff --git a/normalize/validate.go b/pkg/normalize/validate.go similarity index 88% rename from normalize/validate.go rename to pkg/normalize/validate.go index 2bec1ea9f..6d917cdd6 100644 --- a/normalize/validate.go +++ b/pkg/normalize/validate.go @@ -6,8 +6,8 @@ import ( "strings" "github.com/StackExchange/dnscontrol/models" + "github.com/StackExchange/dnscontrol/pkg/transform" "github.com/StackExchange/dnscontrol/providers" - "github.com/StackExchange/dnscontrol/transform" "github.com/miekg/dns" "github.com/miekg/dns/dnsutil" ) @@ -44,7 +44,7 @@ func checkTarget(target string) error { } // validateRecordTypes list of valid rec.Type values. Returns true if this is a real DNS record type, false means it is a pseudo-type used internally. -func validateRecordTypes(rec *models.RecordConfig, domain string) error { +func validateRecordTypes(rec *models.RecordConfig, domain string, pTypes []string) error { var validTypes = map[string]bool{ "A": true, "AAAA": true, @@ -55,9 +55,22 @@ func validateRecordTypes(rec *models.RecordConfig, domain string) error { "NS": true, "ALIAS": false, } - - if _, ok := validTypes[rec.Type]; !ok { - return fmt.Errorf("Unsupported record type (%v) domain=%v name=%v", rec.Type, domain, rec.Name) + _, ok := validTypes[rec.Type] + if !ok { + cType := providers.GetCustomRecordType(rec.Type) + if cType == nil { + return fmt.Errorf("Unsupported record type (%v) domain=%v name=%v", rec.Type, domain, rec.Name) + } + for _, providerType := range pTypes { + if providerType != cType.Provider { + return fmt.Errorf("Custom record type %s is not compatible with provider type %s", rec.Type, providerType) + } + } + //it is ok. Lets replace the type with real type and add metadata to say we checked it + rec.Metadata["orig_custom_type"] = rec.Type + if cType.RealType != "" { + rec.Type = cType.RealType + } } return nil } @@ -128,6 +141,10 @@ func checkTargets(rec *models.RecordConfig, domain string) (errs []error) { check(checkTarget(target)) case "TXT", "IMPORT_TRANSFORM": default: + if rec.Metadata["orig_custom_type"] != "" { + //it is a valid custom type. We perform no validation on target + return + } errs = append(errs, fmt.Errorf("Unimplemented record type (%v) domain=%v name=%v", rec.Type, domain, rec.Name)) } @@ -207,21 +224,34 @@ type Warning struct { } func NormalizeAndValidateConfig(config *models.DNSConfig) (errs []error) { + ptypeMap := map[string]string{} + for _, p := range config.DNSProviders { + ptypeMap[p.Name] = p.Type + } for _, domain := range config.Domains { + pTypes := []string{} + for p := range domain.DNSProviders { + pType, ok := ptypeMap[p] + if !ok { + errs = append(errs, fmt.Errorf("%s uses undefined DNS provider %s", domain.Name, p)) + } else { + pTypes = append(pTypes, pType) + } + } + // Normalize Nameservers. for _, ns := range domain.Nameservers { ns.Name = dnsutil.AddOrigin(ns.Name, domain.Name) ns.Name = strings.TrimRight(ns.Name, ".") } - // Normalize Records. for _, rec := range domain.Records { if rec.TTL == 0 { rec.TTL = models.DefaultTTL } // Validate the unmodified inputs: - if err := validateRecordTypes(rec, domain.Name); err != nil { + if err := validateRecordTypes(rec, domain.Name, pTypes); err != nil { errs = append(errs, err) } if err := checkLabel(rec.Name, rec.Type, domain.Name); err != nil { diff --git a/normalize/validate_test.go b/pkg/normalize/validate_test.go similarity index 100% rename from normalize/validate_test.go rename to pkg/normalize/validate_test.go diff --git a/pkg/spflib/parse.go b/pkg/spflib/parse.go new file mode 100644 index 000000000..52a3c5ee4 --- /dev/null +++ b/pkg/spflib/parse.go @@ -0,0 +1,107 @@ +package spflib + +import ( + "fmt" + "strings" + + "github.com/StackExchange/dnscontrol/pkg/dnsresolver" +) + +type SPFRecord struct { + Lookups int + Parts []*SPFPart +} + +type SPFPart struct { + Text string + Lookups int + IncludeRecord *SPFRecord +} + +func Lookup(target string, dnsres dnsresolver.DnsResolver) (string, error) { + txts, err := dnsres.GetTxt(target) + if err != nil { + return "", err + } + var result []string + for _, txt := range txts { + if strings.HasPrefix(txt, "v=spf1 ") { + result = append(result, txt) + } + } + if len(result) == 0 { + return "", fmt.Errorf("%s has no spf TXT records", target) + } + if len(result) != 1 { + return "", fmt.Errorf("%s has multiple spf TXT records", target) + } + return result[0], nil +} + +var qualifiers = map[byte]bool{ + '?': true, + '~': true, + '-': true, + '+': true, +} + +func Parse(text string, dnsres dnsresolver.DnsResolver) (*SPFRecord, error) { + if !strings.HasPrefix(text, "v=spf1 ") { + return nil, fmt.Errorf("Not an spf record") + } + parts := strings.Split(text, " ") + rec := &SPFRecord{} + for _, part := range parts[1:] { + p := &SPFPart{Text: part} + if qualifiers[part[0]] { + part = part[1:] + } + rec.Parts = append(rec.Parts, p) + if part == "all" { + //all. nothing else matters. + break + } else if strings.HasPrefix(part, "a") || strings.HasPrefix(part, "mx") { + rec.Lookups++ + p.Lookups = 1 + } else if strings.HasPrefix(part, "ip4:") || strings.HasPrefix(part, "ip6:") { + //ip address, 0 lookups + continue + } else if strings.HasPrefix(part, "include:") { + rec.Lookups++ + includeTarget := strings.TrimPrefix(part, "include:") + subRecord, err := Lookup(includeTarget, dnsres) + if err != nil { + return nil, err + } + p.IncludeRecord, err = Parse(subRecord, dnsres) + if err != nil { + return nil, fmt.Errorf("In included spf: %s", err) + } + rec.Lookups += p.IncludeRecord.Lookups + p.Lookups = p.IncludeRecord.Lookups + 1 + } else { + return nil, fmt.Errorf("Unsupported spf part %s", part) + } + + } + return rec, nil +} + +// DumpSPF outputs an SPFRecord and related data for debugging purposes. +func DumpSPF(rec *SPFRecord, indent string) { + fmt.Printf("%sTotal Lookups: %d\n", indent, rec.Lookups) + fmt.Print(indent + "v=spf1") + for _, p := range rec.Parts { + fmt.Print(" " + p.Text) + } + fmt.Println() + indent += "\t" + for _, p := range rec.Parts { + if p.Lookups > 0 { + fmt.Println(indent + p.Text) + } + if p.IncludeRecord != nil { + DumpSPF(p.IncludeRecord, indent+"\t") + } + } +} diff --git a/pkg/spflib/parse_test.go b/pkg/spflib/parse_test.go new file mode 100644 index 000000000..b6c1513ee --- /dev/null +++ b/pkg/spflib/parse_test.go @@ -0,0 +1,30 @@ +package spflib + +import ( + "strings" + "testing" + + "github.com/StackExchange/dnscontrol/pkg/dnsresolver" +) + +func TestParse(t *testing.T) { + dnsres, err := dnsresolver.NewResolverPreloaded("testdata-dns1.json") + if err != nil { + t.Fatal(err) + } + rec, err := Parse(strings.Join([]string{"v=spf1", + "ip4:198.252.206.0/24", + "ip4:192.111.0.0/24", + "include:_spf.google.com", + "include:mailgun.org", + "include:spf-basic.fogcreek.com", + "include:mail.zendesk.com", + "include:servers.mcsv.net", + "include:sendgrid.net", + "include:spf.mtasv.net", + "~all"}, " "), dnsres) + if err != nil { + t.Fatal(err) + } + DumpSPF(rec, "") +} diff --git a/pkg/spflib/testdata-dns1.json b/pkg/spflib/testdata-dns1.json new file mode 100644 index 000000000..15f8266fe --- /dev/null +++ b/pkg/spflib/testdata-dns1.json @@ -0,0 +1,64 @@ +{ + "_netblocks.google.com": { + "txt": [ + "v=spf1 ip4:64.18.0.0/20 ip4:64.233.160.0/19 ip4:66.102.0.0/20 ip4:66.249.80.0/20 ip4:72.14.192.0/18 ip4:74.125.0.0/16 ip4:108.177.8.0/21 ip4:173.194.0.0/16 ip4:207.126.144.0/20 ip4:209.85.128.0/17 ip4:216.58.192.0/19 ip4:216.239.32.0/19 ~all" + ] + }, + "_netblocks2.google.com": { + "txt": [ + "v=spf1 ip6:2001:4860:4000::/36 ip6:2404:6800:4000::/36 ip6:2607:f8b0:4000::/36 ip6:2800:3f0:4000::/36 ip6:2a00:1450:4000::/36 ip6:2c0f:fb50:4000::/36 ~all" + ] + }, + "_netblocks3.google.com": { + "txt": [ + "v=spf1 ip4:172.217.0.0/19 ip4:108.177.96.0/19 ~all" + ] + }, + "_spf.google.com": { + "txt": [ + "v=spf1 include:_netblocks.google.com include:_netblocks2.google.com include:_netblocks3.google.com ~all" + ] + }, + "mail.zendesk.com": { + "txt": [ + "v=spf1 ip4:192.161.144.0/20 ip4:185.12.80.0/22 ip4:96.46.150.192/27 ip4:174.137.46.0/24 ip4:188.172.128.0/20 ip4:216.198.0.0/18 ~all" + ] + }, + "mailgun.org": { + "txt": [ + "google-site-verification=FIGVOKZm6lQFDBJaiC2DdwvBy8TInunoGCt-1gnL4PA", + "v=spf1 include:spf1.mailgun.org include:spf2.mailgun.org ~all" + ] + }, + "sendgrid.net": { + "txt": [ + "google-site-verification=NxyooVvVaIgddVa23KTlOEuVPuhffcDqJFV8RzWrAys", + "v=spf1 ip4:167.89.0.0/17 ip4:208.117.48.0/20 ip4:50.31.32.0/19 ip4:198.37.144.0/20 ip4:198.21.0.0/21 ip4:192.254.112.0/20 ip4:168.245.0.0/17 ~all" + ] + }, + "servers.mcsv.net": { + "txt": [ + "v=spf1 ip4:205.201.128.0/20 ip4:198.2.128.0/18 ip4:148.105.8.0/21 ?all" + ] + }, + "spf-basic.fogcreek.com": { + "txt": [ + "v=spf1 ip4:64.34.80.172 -all" + ] + }, + "spf.mtasv.net": { + "txt": [ + "v=spf1 ip4:50.31.156.96/27 ip4:104.245.209.192/26 ~all" + ] + }, + "spf1.mailgun.org": { + "txt": [ + "v=spf1 ip4:173.193.210.32/27 ip4:50.23.218.192/27 ip4:174.37.226.64/27 ip4:208.43.239.136/30 ip4:184.173.105.0/24 ip4:184.173.153.0/24 ip4:104.130.122.0/23 ip4:146.20.112.0/26 ~all" + ] + }, + "spf2.mailgun.org": { + "txt": [ + "v=spf1 ip4:209.61.151.0/24 ip4:166.78.68.0/22 ip4:198.61.254.0/23 ip4:192.237.158.0/23 ip4:23.253.182.0/23 ip4:104.130.96.0/28 ip4:146.20.113.0/24 ip4:146.20.191.0/24 ~all" + ] + } +} diff --git a/transform/transform.go b/pkg/transform/transform.go similarity index 100% rename from transform/transform.go rename to pkg/transform/transform.go diff --git a/transform/transform_test.go b/pkg/transform/transform_test.go similarity index 100% rename from transform/transform_test.go rename to pkg/transform/transform_test.go diff --git a/providers/cloudflare/cloudflareProvider.go b/providers/cloudflare/cloudflareProvider.go index 895eff201..a847cf592 100644 --- a/providers/cloudflare/cloudflareProvider.go +++ b/providers/cloudflare/cloudflareProvider.go @@ -9,9 +9,9 @@ import ( "time" "github.com/StackExchange/dnscontrol/models" + "github.com/StackExchange/dnscontrol/pkg/transform" "github.com/StackExchange/dnscontrol/providers" "github.com/StackExchange/dnscontrol/providers/diff" - "github.com/StackExchange/dnscontrol/transform" "github.com/miekg/dns/dnsutil" ) @@ -33,13 +33,20 @@ Domain level metadata available: - ip_conversions */ +func init() { + providers.RegisterDomainServiceProviderType("CLOUDFLAREAPI", newCloudflare, providers.CanUseAlias) + providers.RegisterCustomRecordType("CF_REDIRECT", "CLOUDFLAREAPI", "") + providers.RegisterCustomRecordType("CF_TEMP_REDIRECT", "CLOUDFLAREAPI", "") +} + type CloudflareApi struct { - ApiKey string `json:"apikey"` - ApiUser string `json:"apiuser"` - domainIndex map[string]string - nameservers map[string][]string - ipConversions []transform.IpConversion - ignoredLabels []string + ApiKey string `json:"apikey"` + ApiUser string `json:"apiuser"` + domainIndex map[string]string + nameservers map[string][]string + ipConversions []transform.IpConversion + ignoredLabels []string + manageRedirects bool } func labelMatches(label string, matches []string) bool { @@ -51,6 +58,7 @@ func labelMatches(label string, matches []string) bool { } return false } + func (c *CloudflareApi) GetNameservers(domain string) ([]*models.Nameserver, error) { if c.domainIndex == nil { if err := c.fetchDomainList(); err != nil { @@ -89,6 +97,13 @@ func (c *CloudflareApi) GetDomainCorrections(dc *models.DomainConfig) ([]*models records = append(records[:i], records[i+1:]...) } } + if c.manageRedirects { + prs, err := c.getPageRules(id, dc.Name) + if err != nil { + return nil, err + } + records = append(records, prs...) + } for _, rec := range dc.Records { if rec.Type == "ALIAS" { rec.Type = "CNAME" @@ -103,19 +118,45 @@ func (c *CloudflareApi) GetDomainCorrections(dc *models.DomainConfig) ([]*models corrections := []*models.Correction{} for _, d := range del { - corrections = append(corrections, c.deleteRec(d.Existing.Original.(*cfRecord), id)) + ex := d.Existing + if ex.Type == "PAGE_RULE" { + corrections = append(corrections, &models.Correction{ + Msg: d.String(), + F: func() error { return c.deletePageRule(ex.Original.(*pageRule).ID, id) }, + }) + + } else { + corrections = append(corrections, c.deleteRec(ex.Original.(*cfRecord), id)) + } } for _, d := range create { - corrections = append(corrections, c.createRec(d.Desired, id)...) + des := d.Desired + if des.Type == "PAGE_RULE" { + corrections = append(corrections, &models.Correction{ + Msg: d.String(), + F: func() error { return c.createPageRule(id, des.Target) }, + }) + } else { + corrections = append(corrections, c.createRec(des, id)...) + } } for _, d := range mod { - e, rec := d.Existing.Original.(*cfRecord), d.Desired - proxy := e.Proxiable && rec.Metadata[metaProxy] != "off" - corrections = append(corrections, &models.Correction{ - Msg: d.String(), - F: func() error { return c.modifyRecord(id, e.ID, proxy, rec) }, - }) + rec := d.Desired + ex := d.Existing + if rec.Type == "PAGE_RULE" { + corrections = append(corrections, &models.Correction{ + Msg: d.String(), + F: func() error { return c.updatePageRule(ex.Original.(*pageRule).ID, id, rec.Target) }, + }) + } else { + e := ex.Original.(*cfRecord) + proxy := e.Proxiable && rec.Metadata[metaProxy] != "off" + corrections = append(corrections, &models.Correction{ + Msg: d.String(), + F: func() error { return c.modifyRecord(id, e.ID, proxy, rec) }, + }) + } } return corrections, nil } @@ -163,10 +204,14 @@ func (c *CloudflareApi) preprocessConfig(dc *models.DomainConfig) error { } } + currentPrPrio := 1 + // Normalize the proxy setting for each record. // A and CNAMEs: Validate. If null, set to default. // else: Make sure it wasn't set. Set to default. - for _, rec := range dc.Records { + // iterate backwards so first defined page rules have highest priority + for i := len(dc.Records) - 1; i >= 0; i-- { + rec := dc.Records[i] if rec.Metadata == nil { rec.Metadata = map[string]string{} } @@ -193,6 +238,23 @@ func (c *CloudflareApi) preprocessConfig(dc *models.DomainConfig) error { rec.Metadata[metaProxy] = val } } + // CF_REDIRECT record types. Encode target as $FROM,$TO,$PRIO,$CODE + if rec.Type == "CF_REDIRECT" || rec.Type == "CF_TEMP_REDIRECT" { + if !c.manageRedirects { + return fmt.Errorf("you must add 'manage_redirects: true' metadata to cloudflare provider to use CF_REDIRECT records") + } + parts := strings.Split(rec.Target, ",") + if len(parts) != 2 { + return fmt.Errorf("Invalid data specified for cloudflare redirect record") + } + code := 301 + if rec.Type == "CF_TEMP_REDIRECT" { + code = 302 + } + rec.Target = fmt.Sprintf("%s,%d,%d", rec.Target, currentPrPrio, code) + currentPrPrio++ + rec.Type = "PAGE_RULE" + } } // look for ip conversions and transform records @@ -224,7 +286,7 @@ func newCloudflare(m map[string]string, metadata json.RawMessage) (providers.DNS api.ApiUser, api.ApiKey = m["apiuser"], m["apikey"] // check api keys from creds json file if api.ApiKey == "" || api.ApiUser == "" { - return nil, fmt.Errorf("Cloudflare apikey and apiuser must be provided.") + return nil, fmt.Errorf("cloudflare apikey and apiuser must be provided") } err := api.fetchDomainList() @@ -234,30 +296,30 @@ func newCloudflare(m map[string]string, metadata json.RawMessage) (providers.DNS if len(metadata) > 0 { parsedMeta := &struct { - IPConversions string `json:"ip_conversions"` - IgnoredLabels []string `json:"ignored_labels"` + IPConversions string `json:"ip_conversions"` + IgnoredLabels []string `json:"ignored_labels"` + ManageRedirects bool `json:"manage_redirects"` }{} err := json.Unmarshal([]byte(metadata), parsedMeta) if err != nil { return nil, err } + api.manageRedirects = parsedMeta.ManageRedirects // ignored_labels: for _, l := range parsedMeta.IgnoredLabels { api.ignoredLabels = append(api.ignoredLabels, l) } // parse provider level metadata - api.ipConversions, err = transform.DecodeTransformTable(parsedMeta.IPConversions) - if err != nil { - return nil, err + if len(parsedMeta.IPConversions) > 0 { + api.ipConversions, err = transform.DecodeTransformTable(parsedMeta.IPConversions) + if err != nil { + return nil, err + } } } return api, nil } -func init() { - providers.RegisterDomainServiceProviderType("CLOUDFLAREAPI", newCloudflare, providers.CanUseAlias) -} - // Used on the "existing" records. type cfRecord struct { ID string `json:"id"` diff --git a/providers/cloudflare/preprocess_test.go b/providers/cloudflare/preprocess_test.go index b30396ef4..f23e458c0 100644 --- a/providers/cloudflare/preprocess_test.go +++ b/providers/cloudflare/preprocess_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/StackExchange/dnscontrol/models" - "github.com/StackExchange/dnscontrol/transform" + "github.com/StackExchange/dnscontrol/pkg/transform" ) func newDomainConfig() *models.DomainConfig { diff --git a/providers/cloudflare/rest.go b/providers/cloudflare/rest.go index 3a9e58ceb..eed8a92cf 100644 --- a/providers/cloudflare/rest.go +++ b/providers/cloudflare/rest.go @@ -5,15 +5,22 @@ import ( "encoding/json" "fmt" "net/http" + "time" + + "strings" + + "strconv" "github.com/StackExchange/dnscontrol/models" ) const ( - baseURL = "https://api.cloudflare.com/client/v4/" - zonesURL = baseURL + "zones/" - recordsURL = zonesURL + "%s/dns_records/" - singleRecordURL = recordsURL + "%s" + baseURL = "https://api.cloudflare.com/client/v4/" + zonesURL = baseURL + "zones/" + recordsURL = zonesURL + "%s/dns_records/" + pageRulesURL = zonesURL + "%s/pagerules/" + singlePageRuleURL = pageRulesURL + "%s" + singleRecordURL = recordsURL + "%s" ) // get list of domains for account. Cache so the ids can be looked up from domain name @@ -231,6 +238,99 @@ func (c *CloudflareApi) get(endpoint string, target interface{}) error { return decoder.Decode(target) } +func (c *CloudflareApi) getPageRules(id string, domain string) ([]*models.RecordConfig, error) { + url := fmt.Sprintf(pageRulesURL, id) + data := pageRuleResponse{} + if err := c.get(url, &data); err != nil { + return nil, fmt.Errorf("Error fetching page rule list from cloudflare: %s", err) + } + if !data.Success { + return nil, fmt.Errorf("Error fetching page rule list cloudflare: %s", stringifyErrors(data.Errors)) + } + recs := []*models.RecordConfig{} + for _, pr := range data.Result { + // only interested in forwarding rules. Lets be very specific, and skip anything else + if len(pr.Actions) != 1 || len(pr.Targets) != 1 { + continue + } + if pr.Actions[0].ID != "forwarding_url" { + continue + } + err := json.Unmarshal([]byte(pr.Actions[0].Value), &pr.ForwardingInfo) + if err != nil { + return nil, err + } + var thisPr = pr + recs = append(recs, &models.RecordConfig{ + Name: "@", + NameFQDN: domain, + Type: "PAGE_RULE", + //$FROM,$TO,$PRIO,$CODE + Target: fmt.Sprintf("%s,%s,%d,%d", pr.Targets[0].Constraint.Value, pr.ForwardingInfo.URL, pr.Priority, pr.ForwardingInfo.StatusCode), + Original: thisPr, + TTL: 1, + }) + } + return recs, nil +} + +func (c *CloudflareApi) deletePageRule(recordID, domainID string) error { + endpoint := fmt.Sprintf(singlePageRuleURL, domainID, recordID) + req, err := http.NewRequest("DELETE", endpoint, nil) + if err != nil { + return err + } + c.setHeaders(req) + _, err = handleActionResponse(http.DefaultClient.Do(req)) + return err +} + +func (c *CloudflareApi) updatePageRule(recordID, domainID string, target string) error { + if err := c.deletePageRule(recordID, domainID); err != nil { + return err + } + return c.createPageRule(domainID, target) +} + +func (c *CloudflareApi) createPageRule(domainID string, target string) error { + endpoint := fmt.Sprintf(pageRulesURL, domainID) + return c.sendPageRule(endpoint, "POST", target) +} + +func (c *CloudflareApi) sendPageRule(endpoint, method string, data string) error { + //from to priority code + parts := strings.Split(data, ",") + priority, _ := strconv.Atoi(parts[2]) + code, _ := strconv.Atoi(parts[3]) + fwdInfo := &pageRuleFwdInfo{ + StatusCode: code, + URL: parts[1], + } + dat, _ := json.Marshal(fwdInfo) + pr := &pageRule{ + Status: "active", + Priority: priority, + Targets: []pageRuleTarget{ + {Target: "url", Constraint: pageRuleConstraint{Operator: "matches", Value: parts[0]}}, + }, + Actions: []pageRuleAction{ + {ID: "forwarding_url", Value: json.RawMessage(dat)}, + }, + } + buf := &bytes.Buffer{} + enc := json.NewEncoder(buf) + if err := enc.Encode(pr); err != nil { + return err + } + req, err := http.NewRequest(method, endpoint, buf) + if err != nil { + return err + } + c.setHeaders(req) + _, err = handleActionResponse(http.DefaultClient.Do(req)) + return err +} + func stringifyErrors(errors []interface{}) string { dat, err := json.Marshal(errors) if err != nil { @@ -244,6 +344,7 @@ type recordsResponse struct { Result []*cfRecord `json:"result"` ResultInfo pagingInfo `json:"result_info"` } + type basicResponse struct { Success bool `json:"success"` Errors []interface{} `json:"errors"` @@ -253,6 +354,43 @@ type basicResponse struct { } `json:"result"` } +type pageRuleResponse struct { + basicResponse + Result []*pageRule `json:"result"` + ResultInfo pagingInfo `json:"result_info"` +} + +type pageRule struct { + ID string `json:"id,omitempty"` + Targets []pageRuleTarget `json:"targets"` + Actions []pageRuleAction `json:"actions"` + Priority int `json:"priority"` + Status string `json:"status"` + ModifiedOn time.Time `json:"modified_on,omitempty"` + CreatedOn time.Time `json:"created_on,omitempty"` + ForwardingInfo *pageRuleFwdInfo `json:"-"` +} + +type pageRuleTarget struct { + Target string `json:"target"` + Constraint pageRuleConstraint `json:"constraint"` +} + +type pageRuleConstraint struct { + Operator string `json:"operator"` + Value string `json:"value"` +} + +type pageRuleAction struct { + ID string `json:"id"` + Value json.RawMessage `json:"value"` +} + +type pageRuleFwdInfo struct { + URL string `json:"url"` + StatusCode int `json:"status_code"` +} + type zoneResponse struct { basicResponse Result []struct { diff --git a/providers/namedotcom/namedotcom.md b/providers/namedotcom/namedotcom.md deleted file mode 100644 index 9988c73eb..000000000 --- a/providers/namedotcom/namedotcom.md +++ /dev/null @@ -1,48 +0,0 @@ -## name.com Provider - -### required config - -In your providers config json file you must provide your name.com api username and access token: - -``` - "yourNameDotComProviderName":{ - "apikey": "yourApiKeyFromName.com-klasjdkljasdlk235235235235", - "apiuser": "yourUsername" - } -``` - -In order to get api access you need to [apply for access](https://www.name.com/reseller/apply) - -### example dns config js (registrar only): - -``` -var NAMECOM = NewRegistrar("myNameCom","NAMEDOTCOM"); - -var mynameServers = [ - NAMESERVER("bill.ns.cloudflare.com"), - NAMESERVER("fred.ns.cloudflare.com") -]; - -D("example.tld",NAMECOM,myNameServers - //records handled by another provider... -); -``` - -### example config (registrar and records managed by namedotcom) - -``` -var NAMECOM = NewRegistrar("myNameCom","NAMEDOTCOM"); -var NAMECOMDSP = NewDSP("myNameCom","NAMEDOTCOM") - -D("exammple.tld", NAMECOM, NAMECOMDSP, - //ns[1-4].name.com used by default as nameservers - - //override default ttl of 300s - DefaultTTL(3600), - - A("test","1.2.3.4"), - - //override ttl for one record only - CNAME("foo","some.otherdomain.tld.",TTL(100)) -) -``` \ No newline at end of file diff --git a/providers/providers.go b/providers/providers.go index cf9b3d9cc..6941422f4 100644 --- a/providers/providers.go +++ b/providers/providers.go @@ -138,3 +138,24 @@ func init() { return None{}, nil }) } + +type CustomRType struct { + Name string + Provider string + RealType string +} + +// RegisterCustomRecordType registers a record type that is only valid for one provider. +// provider is the registered type of provider this is valid with +// name is the record type as it will appear in the js. (should be something like $PROVIDER_FOO) +// realType is the record type it will be replaced with after validation +func RegisterCustomRecordType(name, provider, realType string) { + customRecordTypes[name] = &CustomRType{Name: name, Provider: provider, RealType: realType} +} + +// GetCustomRecordType returns a registered custom record type, or nil if none +func GetCustomRecordType(rType string) *CustomRType { + return customRecordTypes[rType] +} + +var customRecordTypes = map[string]*CustomRType{} diff --git a/vendor/github.com/robertkrimen/otto/README.markdown b/vendor/github.com/robertkrimen/otto/README.markdown index 30f734ea3..a1ae7d1ae 100644 --- a/vendor/github.com/robertkrimen/otto/README.markdown +++ b/vendor/github.com/robertkrimen/otto/README.markdown @@ -88,7 +88,7 @@ Set a Go function that returns something useful ```go vm.Set("twoPlus", func(call otto.FunctionCall) otto.Value { right, _ := call.Argument(0).ToInteger() - return, _ := vm.ToValue(2 + right) + result, _ := vm.ToValue(2 + right) return result }) ``` @@ -114,7 +114,7 @@ http://godoc.org/github.com/robertkrimen/otto/parser Parse and return an AST ```go -filenamee := "" // A filename is optional +filename := "" // A filename is optional src := ` // Sample xyzzy example (function(){ @@ -167,6 +167,7 @@ The following are some limitations with otto: * "use strict" will parse, but does nothing. * The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification. + * Otto targets ES5. ES6 features (eg: Typed Arrays) are not supported. ### Regular Expression Incompatibility diff --git a/vendor/github.com/robertkrimen/otto/builtin.go b/vendor/github.com/robertkrimen/otto/builtin.go index 83f715083..256ee3c55 100644 --- a/vendor/github.com/robertkrimen/otto/builtin.go +++ b/vendor/github.com/robertkrimen/otto/builtin.go @@ -70,7 +70,7 @@ func digitValue(chr rune) int { } func builtinGlobal_parseInt(call FunctionCall) Value { - input := strings.TrimSpace(call.Argument(0).string()) + input := strings.Trim(call.Argument(0).string(), builtinString_trim_whitespace) if len(input) == 0 { return NaNValue() } @@ -153,7 +153,8 @@ var parseFloat_matchValid = regexp.MustCompile(`[0-9eE\+\-\.]|Infinity`) func builtinGlobal_parseFloat(call FunctionCall) Value { // Caveat emptor: This implementation does NOT match the specification - input := strings.TrimSpace(call.Argument(0).string()) + input := strings.Trim(call.Argument(0).string(), builtinString_trim_whitespace) + if parseFloat_matchBadSpecial.MatchString(input) { return NaNValue() } diff --git a/vendor/github.com/robertkrimen/otto/otto.go b/vendor/github.com/robertkrimen/otto/otto.go index 7d9065851..b5b528d53 100644 --- a/vendor/github.com/robertkrimen/otto/otto.go +++ b/vendor/github.com/robertkrimen/otto/otto.go @@ -132,6 +132,7 @@ The following are some limitations with otto: * "use strict" will parse, but does nothing. * The regular expression engine (re2/regexp) is not fully compatible with the ECMA5 specification. + * Otto targets ES5. ES6 features (eg: Typed Arrays) are not supported. Regular Expression Incompatibility diff --git a/vendor/github.com/robertkrimen/otto/runtime.go b/vendor/github.com/robertkrimen/otto/runtime.go index 6ebb1f982..7d29ecca0 100644 --- a/vendor/github.com/robertkrimen/otto/runtime.go +++ b/vendor/github.com/robertkrimen/otto/runtime.go @@ -302,7 +302,11 @@ func (self *_runtime) convertCallParameter(v Value, t reflect.Type) reflect.Valu } if t.Kind() == reflect.Interface { - iv := reflect.ValueOf(v.export()) + e := v.export() + if e == nil { + return reflect.Zero(t) + } + iv := reflect.ValueOf(e) if iv.Type().AssignableTo(t) { return iv } @@ -352,20 +356,52 @@ func (self *_runtime) convertCallParameter(v Value, t reflect.Type) reflect.Valu tt := t.Elem() - for i := int64(0); i < l; i++ { - p, ok := o.property[strconv.FormatInt(i, 10)] - if !ok { - continue + if o.class == "Array" { + for i := int64(0); i < l; i++ { + p, ok := o.property[strconv.FormatInt(i, 10)] + if !ok { + continue + } + + e, ok := p.value.(Value) + if !ok { + continue + } + + ev := self.convertCallParameter(e, tt) + + s.Index(int(i)).Set(ev) + } + } else if o.class == "GoArray" { + + var gslice bool + switch o.value.(type) { + case *_goSliceObject: + gslice = true + case *_goArrayObject: + gslice = false } - e, ok := p.value.(Value) - if !ok { - continue + for i := int64(0); i < l; i++ { + var p *_property + if gslice { + p = goSliceGetOwnProperty(o, strconv.FormatInt(i, 10)) + } else { + p = goArrayGetOwnProperty(o, strconv.FormatInt(i, 10)) + } + if p == nil { + continue + } + + e, ok := p.value.(Value) + if !ok { + continue + } + + ev := self.convertCallParameter(e, tt) + + s.Index(int(i)).Set(ev) } - - ev := self.convertCallParameter(e, tt) - - s.Index(int(i)).Set(ev) } return s diff --git a/vendor/github.com/robertkrimen/otto/value_boolean.go b/vendor/github.com/robertkrimen/otto/value_boolean.go index 3040f4163..b631507b0 100644 --- a/vendor/github.com/robertkrimen/otto/value_boolean.go +++ b/vendor/github.com/robertkrimen/otto/value_boolean.go @@ -4,6 +4,7 @@ import ( "fmt" "math" "reflect" + "unicode/utf16" ) func (value Value) bool() bool { @@ -32,6 +33,8 @@ func (value Value) bool() bool { return true case string: return 0 != len(value) + case []uint16: + return 0 != len(utf16.Decode(value)) } if value.IsObject() { return true diff --git a/vendor/github.com/robertkrimen/otto/value_number.go b/vendor/github.com/robertkrimen/otto/value_number.go index 870bf115b..8cbf136d2 100644 --- a/vendor/github.com/robertkrimen/otto/value_number.go +++ b/vendor/github.com/robertkrimen/otto/value_number.go @@ -11,7 +11,7 @@ import ( var stringToNumberParseInteger = regexp.MustCompile(`^(?:0[xX])`) func parseNumber(value string) float64 { - value = strings.TrimSpace(value) + value = strings.Trim(value, builtinString_trim_whitespace) if value == "" { return 0 diff --git a/vendor/vendor.json b/vendor/vendor.json index 78b8eb23e..599669bfb 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -303,10 +303,10 @@ "revisionTime": "2016-04-18T18:49:04Z" }, { - "checksumSHA1": "UH75lsKCrVFdCZvJchkAPo2QXjw=", + "checksumSHA1": "EqyHXBcg5cWi4ERsMXN6g1opi1o=", "path": "github.com/robertkrimen/otto", - "revision": "7d9cbc2befca39869eb0e5bcb0f44c0692c2f8ff", - "revisionTime": "2016-07-28T22:04:12Z" + "revision": "21ec96599b1279b5673e4df0097dd56bb8360068", + "revisionTime": "2017-04-24T10:46:44Z" }, { "checksumSHA1": "qgziiO3/QDVJMKw2nGrUbC8QldY=",