118

The ability to read (and write) a text file into and out of a string array is I believe a fairly common requirement. It is also quite useful when starting with a language removing the need initially to access a database. Does one exist in Golang?
e.g.

func ReadLines(sFileName string, iMinLines int) ([]string, bool) {

and

func WriteLines(saBuff[]string, sFilename string) (bool) { 

I would prefer to use an existing one rather than duplicate.

Himanshu
  • 12,071
  • 7
  • 46
  • 61
brianoh
  • 3,407
  • 6
  • 23
  • 23
  • 3
    Use bufio.Scanner to read lines from a file, see http://stackoverflow.com/a/16615559/1136018 and http://golang.org/pkg/bufio/ – Jack Valmadre Jul 07 '13 at 04:41

6 Answers6

155

As of Go1.1 release, there is a bufio.Scanner API that can easily read lines from a file. Consider the following example from above, rewritten with Scanner:

package main

import (
    "bufio"
    "fmt"
    "log"
    "os"
)

// readLines reads a whole file into memory
// and returns a slice of its lines.
func readLines(path string) ([]string, error) {
    file, err := os.Open(path)
    if err != nil {
        return nil, err
    }
    defer file.Close()

    var lines []string
    scanner := bufio.NewScanner(file)
    for scanner.Scan() {
        lines = append(lines, scanner.Text())
    }
    return lines, scanner.Err()
}

// writeLines writes the lines to the given file.
func writeLines(lines []string, path string) error {
    file, err := os.Create(path)
    if err != nil {
        return err
    }
    defer file.Close()

    w := bufio.NewWriter(file)
    for _, line := range lines {
        fmt.Fprintln(w, line)
    }
    return w.Flush()
}

func main() {
    lines, err := readLines("foo.in.txt")
    if err != nil {
        log.Fatalf("readLines: %s", err)
    }
    for i, line := range lines {
        fmt.Println(i, line)
    }

    if err := writeLines(lines, "foo.out.txt"); err != nil {
        log.Fatalf("writeLines: %s", err)
    }
}
Kyle Lemons
  • 4,716
  • 1
  • 19
  • 23
137

If the file isn't too large, this can be done with the ioutil.ReadFile/os.ReadFile and strings.Split functions like so:

Before Go 1.16

ioutil is deprecated as of Go 1.16.

content, err := ioutil.ReadFile(filename)
if err != nil {
    //Do something
}
lines := strings.Split(string(content), "\n")

Go1.16 or later

Starting with Go 1.16 you can apply the very same code, but utilizing os over ioutil.

content, err := os.ReadFile(filename)
if err != nil {
    //Do something
}
lines := strings.Split(string(content), "\n")

You can read the documentation on ioutil, os, and strings packages.

Mario
  • 35,726
  • 5
  • 62
  • 78
yanatan16
  • 1,419
  • 2
  • 10
  • 4
34

Cannot update first answer.
Anyway, after Go1 release, there are some breaking changes, so I updated as shown below:

package main

import (
    "os"
    "bufio"
    "bytes"
    "io"
    "fmt"
    "strings"
)

// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err error) {
    var (
        file *os.File
        part []byte
        prefix bool
    )
    if file, err = os.Open(path); err != nil {
        return
    }
    defer file.Close()

    reader := bufio.NewReader(file)
    buffer := bytes.NewBuffer(make([]byte, 0))
    for {
        if part, prefix, err = reader.ReadLine(); err != nil {
            break
        }
        buffer.Write(part)
        if !prefix {
            lines = append(lines, buffer.String())
            buffer.Reset()
        }
    }
    if err == io.EOF {
        err = nil
    }
    return
}

func writeLines(lines []string, path string) (err error) {
    var (
        file *os.File
    )

    if file, err = os.Create(path); err != nil {
        return
    }
    defer file.Close()

    //writer := bufio.NewWriter(file)
    for _,item := range lines {
        //fmt.Println(item)
        _, err := file.WriteString(strings.TrimSpace(item) + "\n"); 
        //file.Write([]byte(item)); 
        if err != nil {
            //fmt.Println("debug")
            fmt.Println(err)
            break
        }
    }
    /*content := strings.Join(lines, "\n")
    _, err = writer.WriteString(content)*/
    return
}

func main() {
    lines, err := readLines("foo.txt")
    if err != nil {
        fmt.Println("Error: %s\n", err)
        return
    }
    for _, line := range lines {
        fmt.Println(line)
    }
    //array := []string{"7.0", "8.5", "9.1"}
    err = writeLines(lines, "foo2.txt")
    fmt.Println(err)
}
Bill.Zhuang
  • 657
  • 7
  • 16
19

You can use os.File (which implements the io.Reader interface) with the bufio package for that. However, those packages are build with fixed memory usage in mind (no matter how large the file is) and are quite fast.

Unfortunately this makes reading the whole file into the memory a bit more complicated. You can use a bytes.Buffer to join the parts of the line if they exceed the line limit. Anyway, I recommend you to try to use the line reader directly in your project (especially if do not know how large the text file is!). But if the file is small, the following example might be sufficient for you:

package main

import (
    "os"
    "bufio"
    "bytes"
    "fmt"
)

// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err os.Error) {
    var (
        file *os.File
        part []byte
        prefix bool
    )
    if file, err = os.Open(path); err != nil {
        return
    }
    reader := bufio.NewReader(file)
    buffer := bytes.NewBuffer(make([]byte, 1024))
    for {
        if part, prefix, err = reader.ReadLine(); err != nil {
            break
        }
        buffer.Write(part)
        if !prefix {
            lines = append(lines, buffer.String())
            buffer.Reset()
        }
    }
    if err == os.EOF {
        err = nil
    }
    return
}

func main() {
    lines, err := readLines("foo.txt")
    if err != nil {
        fmt.Println("Error: %s\n", err)
        return
    }
    for _, line := range lines {
        fmt.Println(line)
    }
}

Another alternative might be to use io.ioutil.ReadAll to read in the complete file at once and do the slicing by line afterwards. I don't give you an explicit example of how to write the lines back to the file, but that's basically an os.Create() followed by a loop similar to that one in the example (see main()).

hyperslug
  • 3,473
  • 1
  • 28
  • 29
tux21b
  • 90,183
  • 16
  • 117
  • 101
  • Thanks for that info. I was more interested in using an existing package to do the whole job, because I think it is quite useful. For example, I want to use Go with persistence of data without using a database initially. Some languages have this I believe. eg. I think Ruby has Readlines which reads an array of strings (from memory) - not that I am particularly a Ruby fan. It's no big deal I guess, I just don't like duplication, but maybe it's just me that wants it. Anyway, I've written a package to do it and perhaps I'll put it on github. These files are typically very small. – brianoh May 05 '11 at 05:53
  • If you want to simply persist any kind of go structures (e.g. an array of strings, integers, maps or more complicated structures), you can simply use the `gob.Encode()` for that. The result is a binary file instead of a newline separated text file. This file can contain all kind of data, can be parsed efficiently, the resulting file will be smaller and you do not have to deal with those newlines and dynamic allocation. So it's probably better suited for you if you just want to persist something for later usage with Go. – tux21b May 05 '11 at 15:35
  • What I want is an array of text lines so that I can change any line (field). These files are very small. When changes are made, the variable-length strings are then eventually written back. It is very flexible and fast for what I want to do. I need the newlines to separate the lines (fields). Perhaps there is a better way, but this appears OK for my purposes at present. I will look at what you suggest later and perhaps change it then. – brianoh May 07 '11 at 13:50
  • 2
    Note that as of r58 (July 2011), the encoding/line package has been removed. "Its functionality is now in bufio." – kristianp Sep 08 '11 at 06:13
4
func readToDisplayUsingFile1(f *os.File){
    defer f.Close()
    reader := bufio.NewReader(f)
    contents, _ := ioutil.ReadAll(reader)
    lines := strings.Split(string(contents), '\n')
}

or

func readToDisplayUsingFile1(f *os.File){
    defer f.Close()
    slice := make([]string,0)

    reader := bufio.NewReader(f)

    for{

    str, err := reader.ReadString('\n')
    if err == io.EOF{
        break
    }

        slice = append(slice, str)
    }
laurent
  • 88,262
  • 77
  • 290
  • 428
Muhammad Soliman
  • 21,644
  • 6
  • 109
  • 75
  • 5
    the more "modern" everyone keeps trying to say Go is, the more it just looks like 35 year old bare-minimum library binding code. :\ The fact that simply reading a line-based text file is such a mess only reinforces that Go has a long way to.... go... to be more general purpose. There is a LOT of text, line-based data out there still being very efficiently processed in other langs and platforms. $.02 – ChrisH Jul 28 '19 at 15:08
0

I prefer to write a simpler generic function to read from an io.Reader (the generic interface implemented by any readable data stream including files, memory buffers, strings and byte slices, http request body etc.

func ReadLines(r io.Reader) ([]string, error) {
    var lines []string
    s := bufio.NewScanner(r)
    for s.Scan() {
        lines = append(lines, s.Text())
    }
    if err := s.Err(); err != nil {
        return nil, err
    }
    return lines, nil
}

Example of its use with a string.

const data = `
line 1
line 2
line 3  
`

lines, _ := ReadLines(strings.NewReader(data))
fmt.Println(lines)

run it here: https://go.dev/play/p/NcbEIVmGXpX

DrGo
  • 411
  • 3
  • 7