Any suggestions are welcomed!

Exercise: Loops and Functions

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
package main

import (
"fmt"
"math"
)

func Sqrt(x float64) float64 {
z := 1.0
y := x
for ;;z -= (z * z - x) / (2 * z) {
if math.Abs(z - y) < 0.00000001 {
return z
}
y = z
}
}

func main() {
fmt.Println(Sqrt(2))
}

Exercise: Slices

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
package main

import "golang.org/x/tour/pic"

func Pic(dx, dy int) [][]uint8 {
s := make([][]uint8, dy)
for i := range s {
s[i] = make([]uint8, dx)
}
return s
}

func main() {
pic.Show(Pic)
}

Exercise: Maps

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
package main

import (
"golang.org/x/tour/wc"
"strings"
)

func WordCount(s string) map[string]int {
m := make(map[string]int)
s1 := strings.Fields(s)
for _, w := range s1 {
m[w]++
}
return m
}

func main() {
wc.Test(WordCount)
}

Exercise: Fibonacci closure

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
package main

import "fmt"

// fibonacci is a function that returns
// a function that returns an int.
func fibonacci() func() int {
num0 := 0
num1 := 1
return func() int {
num := num0
num0, num1 = num1, num0 + num1
return num
}
}

func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}

Exercise: Stringers

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
package main

import "fmt"

type IPAddr [4]byte

// TODO: Add a "String() string" method to IPAddr.
func (ip IPAddr) String() string {
return fmt.Sprintf("%d.%d.%d.%d", ip[0], ip[1], ip[2], ip[3])
}

func main() {
hosts := map[string]IPAddr{
"loopback": {127, 0, 0, 1},
"googleDNS": {8, 8, 8, 8},
}
for name, ip := range hosts {
fmt.Printf("%v: %v\n", name, ip)
}
}

Exercise: Errors

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
package main

import (
"fmt"
"math"
)

type ErrNegativeSqrt float64

func (e ErrNegativeSqrt) Error() string {
return fmt.Sprint("cannot Sqrt negative number: " + fmt.Sprint(e))
}

func Sqrt(x float64) (float64, error) {
if x < 0 {
return 0, ErrNegativeSqrt(x)
}
z := 1.0
y := z
for {
z -= (z * z - x) / (2 * z)
if math.Abs(z - y) < 0.00000001 {
return z, nil
}
y = z
}
}

func main() {
fmt.Println(Sqrt(2))
fmt.Println(Sqrt(-2))
}

Exercise: Readers

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
package main

import "golang.org/x/tour/reader"

type MyReader struct{}

// TODO: Add a Read([]byte) (int, error) method to MyReader.
func (r MyReader) Read(b []byte) (int, error) {
b[0] = 'A'
return 1, nil
}

func main() {
reader.Validate(MyReader{})
}

Exercise: rot13Reader

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
package main

import (
"io"
"os"
"strings"
)

type rot13Reader struct {
r io.Reader
}

func (r rot13Reader) Read(b []byte) (int, error) {
i, e := r.r.Read(b)
for in, c := range b {
if (c >= 'a' && c <= 'm') || (c >= 'A' && c <= 'M') {
b[in] = c + 13
} else if (c >= 'n' && c <= 'z') || (c >= 'N' && c <= 'Z') {
b[in] = c - 13
}
}
return i, e
}

func main() {
s := strings.NewReader("Lbh penpxrq gur pbqr!")
r := rot13Reader{s}
io.Copy(os.Stdout, &r)
}

Exercise: Images

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
package main

import (
"code.google.com/p/go-tour/pic"
"image"
"image/color"
)

type Image struct {
x, y int
}

func (r *Image) Bounds() image.Rectangle {
return image.Rect(0, 0, r.x, r.y)
}

func (r *Image) ColorModel() color.Model {
return color.RGBAModel
}

func (r *Image) At(x, y int) color.Color {
return color.RGBA{uint8(x), uint8(y), 255, 255}
}

func main() {
m := &Image{256, 256}
pic.ShowImage(m)
}

Exercise: Equivalent Binary Trees

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
// Walk walks the tree t sending all values
// from the tree to the channel ch.
package main

import (
"golang.org/x/tour/tree"
"fmt"
)


// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
_Walk(t, ch)
close(ch)
}

// Recursively send the sorted values in the
// tree to ch
func _Walk(t *tree.Tree, ch chan int) {
if t == nil {
return
}
_Walk(t.Left, ch)
ch <- t.Value
_Walk(t.Right, ch)
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
ch1 := make(chan int)
ch2 := make(chan int)

go Walk(t1, ch1)
go Walk(t2, ch2)

for v1 := range ch1 {
v2, ok := <-ch2
if ok == false {
// t2 is finished but t1 isn't
return false
} else {
if v1 != v2 {
// different values
return false
}
}
}

if _, ok := <-ch1; ok == false {
return true
} else {
// t1 is finished but t2 isn't
return false
}
}
func main() {
    fmt.Println(Same(tree.New(1), tree.New(1)))
    fmt.Println(Same(tree.New(1), tree.New(2)))
}

Exercise: Web Crawler

Thanks to everyone in this question!

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
package main

import (
"fmt"
"sync"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

var wg sync.WaitGroup

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, m map[string]int) {
// TODO: Fetch URLs in parallel.
// DONE: Don't fetch the same URL twice.
// This implementation doesn't do either:
var mux sync.Mutex
if depth <= 0 {
wg.Done()
return
}
mux.Lock()
body, urls, err := fetcher.Fetch(url)
mux.Unlock()
if err != nil {
fmt.Println(err)
wg.Done()
return
}
if _, ok := m[url]; !ok {
m[url] = 1
fmt.Printf("found: %s %q\n", url, body)
} else {
m[url]++
}package main

import (
"fmt"
"sync"
)

type Fetcher interface {
// Fetch returns the body of URL and
// a slice of URLs found on that page.
Fetch(url string) (body string, urls []string, err error)
}

var wg sync.WaitGroup

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher, m map[string]int) {
// DONE: Fetch URLs in parallel.
// DONE: Don't fetch the same URL twice.
// This implementation doesn't do either:
var mux sync.Mutex
if depth <= 0 {
wg.Done()
return
}
mux.Lock()
body, urls, err := fetcher.Fetch(url)
mux.Unlock()
if err != nil {
fmt.Println(err)
wg.Done()
return
}
if _, ok := m[url]; !ok {
m[url] = 1
fmt.Printf("found: %s %q\n", url, body)
} else {
m[url]++
}
for _, u := range urls {
wg.Add(1)
go Crawl(u, depth-1, fetcher, m)
}
wg.Done()
return
}

func main() {
m := make(map[string]int)
wg.Add(1)
Crawl("https://golang.org/", 4, fetcher, m)
wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}

for _, u := range urls {
wg.Add(1)
go Crawl(u, depth-1, fetcher, m)
}
wg.Done()
return
}

func main() {
m := make(map[string]int)
wg.Add(1)
Crawl("https://golang.org/", 4, fetcher, m)
wg.Wait()
}

// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
body string
urls []string
}

func (f fakeFetcher) Fetch(url string) (string, []string, error) {
if res, ok := f[url]; ok {
return res.body, res.urls, nil
}
return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = fakeFetcher{
"https://golang.org/": &fakeResult{
"The Go Programming Language",
[]string{
"https://golang.org/pkg/",
"https://golang.org/cmd/",
},
},
"https://golang.org/pkg/": &fakeResult{
"Packages",
[]string{
"https://golang.org/",
"https://golang.org/cmd/",
"https://golang.org/pkg/fmt/",
"https://golang.org/pkg/os/",
},
},
"https://golang.org/pkg/fmt/": &fakeResult{
"Package fmt",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
"https://golang.org/pkg/os/": &fakeResult{
"Package os",
[]string{
"https://golang.org/",
"https://golang.org/pkg/",
},
},
}