Last active
January 27, 2025 21:23
-
-
Save peterhellberg/c531189dff8a126540d587f2c4501422 to your computer and use it in GitHub Desktop.
Minimal parametric JSON file store in Go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"encoding/json" | |
"io" | |
"os" | |
"path/filepath" | |
"sync" | |
) | |
type ( | |
Name string | |
Habitat []string | |
) | |
type Fish struct { | |
ID string | |
Scientific Name | |
English Name | |
Swedish Name | |
Habitat Habitat | |
} | |
func main() { | |
if err := run("fishes", os.Stdout); err != nil { | |
panic(err) | |
} | |
} | |
func run(dir string, out io.Writer) error { | |
cacheDir, err := os.UserCacheDir() | |
if err != nil { | |
return err | |
} | |
path := filepath.Join(cacheDir, dir) | |
fishes, err := Store[Fish](path) | |
if err != nil { | |
return err | |
} | |
// Data grabbed from https://raw.githubusercontent.com/You-now-Who/dataset/refs/heads/main/List%20of%20fishes%20found%20in%20Sweden/List%20of%20fishes%20found%20in%20Sweden.csv | |
fishes.Set("abborre", Fish{ | |
ID: "abborre", | |
Scientific: "Perca fluviatilis", | |
English: "European perch", | |
Swedish: "Abborre", | |
Habitat: Habitat{ | |
"fresh", | |
"brackish", | |
}, | |
}) | |
fishes.Set("gädda", Fish{ | |
ID: "gädda", | |
Scientific: "Esox lucius", | |
English: "Pike", | |
Swedish: "Gädda", | |
Habitat: Habitat{ | |
"fresh", | |
"brackish", | |
}, | |
}) | |
fishes.Set("makrill", Fish{ | |
ID: "makrill", | |
Scientific: "Scomber scombrus", | |
English: "Atlantic mackerel", | |
Swedish: "Makrill", | |
Habitat: Habitat{ | |
"marine", | |
}, | |
}) | |
a, err := fishes.Get("abborre") | |
if err != nil { | |
return err | |
} | |
g, err := fishes.Get("gädda") | |
if err != nil { | |
return err | |
} | |
m, err := fishes.Get("makrill") | |
if err != nil { | |
return err | |
} | |
return encodeJSON(out, []Fish{a, g, m}) | |
} | |
type Disk[T any] struct { | |
mu sync.RWMutex | |
path string | |
} | |
func Store[T any](path string) (*Disk[T], error) { | |
perm := os.ModeDir | os.ModePerm | |
if err := os.MkdirAll(path, perm); err != nil { | |
return nil, err | |
} | |
return &Disk[T]{path: path}, nil | |
} | |
func (d *Disk[T]) Set(id string, v T) error { | |
d.mu.Lock() | |
defer d.mu.Unlock() | |
f, err := os.Create(d.name(id)) | |
if err != nil { | |
return err | |
} | |
defer f.Close() | |
return encodeJSON(f, v) | |
} | |
func (d *Disk[T]) Get(id string) (v T, err error) { | |
d.mu.RLock() | |
defer d.mu.RUnlock() | |
f, err := os.Open(d.name(id)) | |
if err != nil { | |
return v, err | |
} | |
defer f.Close() | |
return v, json.NewDecoder(f).Decode(&v) | |
} | |
func (d *Disk[T]) name(id string) string { | |
return filepath.Join(d.path, id+".json") | |
} | |
func encodeJSON(w io.Writer, v any) error { | |
enc := json.NewEncoder(w) | |
enc.SetIndent("", " ") | |
return enc.Encode(v) | |
} |
Variant of the above where the .json
files contain some separate information in an object around the provided payload.
package main
import (
"encoding/json"
"io"
"os"
"path/filepath"
"sync"
"time"
)
type (
Name string
Habitat []string
)
type Fish struct {
Scientific Name
English Name
Swedish Name
Habitat Habitat
}
func main() {
if err := run("fishes", os.Stdout); err != nil {
panic(err)
}
}
func run(dir string, out io.Writer) error {
cacheDir, err := os.UserCacheDir()
if err != nil {
return err
}
path := filepath.Join(cacheDir, dir)
fishes, err := Store[Fish](path)
if err != nil {
return err
}
// Data grabbed from https://raw.githubusercontent.com/You-now-Who/dataset/refs/heads/main/List%20of%20fishes%20found%20in%20Sweden/List%20of%20fishes%20found%20in%20Sweden.csv
fishes.Set("abborre", Fish{
Scientific: "Perca fluviatilis",
English: "European perch",
Swedish: "Abborre",
Habitat: Habitat{
"fresh",
"brackish",
},
})
fishes.Set("gädda", Fish{
Scientific: "Esox lucius",
English: "Pike",
Swedish: "Gädda",
Habitat: Habitat{
"fresh",
"brackish",
},
})
fishes.Set("makrill", Fish{
Scientific: "Scomber scombrus",
English: "Atlantic mackerel",
Swedish: "Makrill",
Habitat: Habitat{
"marine",
},
})
a, err := fishes.Get("abborre")
if err != nil {
return err
}
g, err := fishes.Get("gädda")
if err != nil {
return err
}
m, err := fishes.Get("makrill")
if err != nil {
return err
}
return encodeJSON(out, []Fish{a, g, m})
}
type Value[Data any] struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
Data Data `json:"data"`
}
type Disk[Data any] struct {
mu sync.RWMutex
path string
now func() time.Time
}
func Store[Data any](path string, options ...func(*Disk[Data])) (*Disk[Data], error) {
perm := os.ModeDir | os.ModePerm
if err := os.MkdirAll(path, perm); err != nil {
return nil, err
}
d := &Disk[Data]{
path: path,
now: time.Now,
}
for _, o := range options {
o(d)
}
return d, nil
}
func (d *Disk[Data]) Set(id string, data Data) error {
d.mu.Lock()
defer d.mu.Unlock()
f, err := os.Create(d.name(id))
if err != nil {
return err
}
defer f.Close()
return encodeJSON(f, Value[Data]{
ID: id,
Data: data,
CreatedAt: d.now(),
})
}
func (d *Disk[Data]) Get(id string) (Data, error) {
v, err := d.Val(id)
return v.Data, err
}
func (d *Disk[Data]) Val(id string) (Value[Data], error) {
d.mu.RLock()
defer d.mu.RUnlock()
var v Value[Data]
f, err := os.Open(d.name(id))
if err != nil {
return v, err
}
defer f.Close()
return v, json.NewDecoder(f).Decode(&v)
}
func (d *Disk[Data]) name(id string) string {
return filepath.Join(d.path, id+".json")
}
func encodeJSON(w io.Writer, v any) error {
enc := json.NewEncoder(w)
enc.SetIndent("", " ")
return enc.Encode(v)
}
With the resulting ~/.cache/fishes/abborre.json
containing:
{
"id": "abborre",
"created_at": "2025-01-27T21:36:04.77682727+01:00",
"data": {
"Scientific": "Perca fluviatilis",
"English": "European perch",
"Swedish": "Abborre",
"Habitat": [
"fresh",
"brackish"
]
}
}
And yet another variant with All
and Vals
methods:
package main
import (
"encoding/json"
"io"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
type (
Name string
Habitat []string
)
type Fish struct {
Scientific Name
English Name
Swedish Name
Habitat Habitat
}
func main() {
if err := run("fishes", os.Stdout); err != nil {
panic(err)
}
}
func run(dir string, out io.Writer) error {
cacheDir, err := os.UserCacheDir()
if err != nil {
return err
}
path := filepath.Join(cacheDir, dir)
fishes, err := Store[Fish](path)
if err != nil {
return err
}
// Data grabbed from https://raw.githubusercontent.com/You-now-Who/dataset/refs/heads/main/List%20of%20fishes%20found%20in%20Sweden/List%20of%20fishes%20found%20in%20Sweden.csv
fishes.Set("abborre", Fish{
Scientific: "Perca fluviatilis",
English: "European perch",
Swedish: "Abborre",
Habitat: Habitat{
"fresh",
"brackish",
},
})
fishes.Set("gädda", Fish{
Scientific: "Esox lucius",
English: "Pike",
Swedish: "Gädda",
Habitat: Habitat{
"fresh",
"brackish",
},
})
fishes.Set("makrill", Fish{
Scientific: "Scomber scombrus",
English: "Atlantic mackerel",
Swedish: "Makrill",
Habitat: Habitat{
"marine",
},
})
list, err := fishes.All()
if err != nil {
return err
}
return encodeJSON(out, list)
}
type Value[Data any] struct {
ID string `json:"id"`
CreatedAt time.Time `json:"created_at"`
Data Data `json:"data"`
}
type Disk[Data any] struct {
mu sync.RWMutex
path string
now func() time.Time
}
func Store[Data any](path string, options ...func(*Disk[Data])) (*Disk[Data], error) {
perm := os.ModeDir | os.ModePerm
if err := os.MkdirAll(path, perm); err != nil {
return nil, err
}
d := &Disk[Data]{
path: path,
now: time.Now,
}
for _, o := range options {
o(d)
}
return d, nil
}
func (d *Disk[Data]) Set(id string, data Data) error {
d.mu.Lock()
defer d.mu.Unlock()
f, err := os.Create(d.name(id))
if err != nil {
return err
}
defer f.Close()
return encodeJSON(f, Value[Data]{
ID: id,
Data: data,
CreatedAt: d.now(),
})
}
func (d *Disk[Data]) Get(id string) (Data, error) {
v, err := d.Val(id)
return v.Data, err
}
func (d *Disk[Data]) All() ([]Data, error) {
vs, err := d.Vals()
if err != nil {
return nil, err
}
ds := []Data{}
for _, v := range vs {
ds = append(ds, v.Data)
}
return ds, nil
}
func (d *Disk[Data]) Vals() ([]Value[Data], error) {
d.mu.RLock()
defer d.mu.RUnlock()
vs := []Value[Data]{}
des, err := os.ReadDir(d.path)
if err != nil {
return nil, err
}
for _, de := range des {
if v, err := d.Val(de.Name()); err == nil {
vs = append(vs, v)
}
}
return vs, nil
}
func (d *Disk[Data]) Val(id string) (Value[Data], error) {
d.mu.RLock()
defer d.mu.RUnlock()
var v Value[Data]
f, err := os.Open(d.name(id))
if err != nil {
return v, err
}
defer f.Close()
return v, json.NewDecoder(f).Decode(&v)
}
func (d *Disk[Data]) name(id string) string {
ext := ".json"
return filepath.Join(d.path, strings.TrimSuffix(id, ext)+ext)
}
func encodeJSON(w io.Writer, v any) error {
enc := json.NewEncoder(w)
enc.SetIndent("", " ")
return enc.Encode(v)
}
Go Playground: https://go.dev/play/p/b1eteN_FBh5
Now with support for All
and Vals
methods.
I asked deepseek-r1:14b
to describe what the latest version does:
The provided Go program is designed to store and retrieve fish data locally in a
structured manner. Here's a step-by-step explanation:
1. **Setup and Data Structure**:
- The program defines a `Fish` struct to hold information about different fish
species, including their scientific, English, and Swedish names, along with their
habitats.
2. **Storage Mechanism**:
- It uses a key-value store approach where each fish entry is stored as a
separate JSON file in a cache directory (`$HOME/.cache/fishes` by default).
- Each entry is uniquely identified by an ID (e.g., "abborre").
3. **Storing Fish Data**:
- The program adds three fish entries to the store using their respective IDs and
detailed information.
4. **Retrieving All Entries**:
- After storing the entries, it retrieves all stored fish data, which includes
reading each JSON file in the cache directory.
5. **Output**:
- Finally, it prints all retrieved fish data as a single JSON array to standard
output.
This program is useful for applications needing local persistence of fish data,
demonstrating how to use Go's file system and JSON encoding/decoding capabilities
effectively.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
thanks alot