Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Option handling improvements #2991

Merged
merged 5 commits into from
Feb 12, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 21 additions & 5 deletions chunker/chunk.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,18 +48,19 @@ type rdfChunker struct{}
type jsonChunker struct{}

const (
RdfInput int = iota
JsonInput
UnknownFormat int = iota
RdfFormat
JsonFormat
)

func NewChunker(inputFormat int) Chunker {
switch inputFormat {
case RdfInput:
case RdfFormat:
return &rdfChunker{}
case JsonInput:
case JsonFormat:
return &jsonChunker{}
default:
panic("unknown chunker type")
panic("unknown input format")
}
}

Expand Down Expand Up @@ -311,3 +312,18 @@ func IsJSONData(r *bufio.Reader) (bool, error) {

return err == nil, nil
}

// DataFormat returns a file's data format (RDF, JSON, or unknown) based on the filename
// or the user-provided format option. The file extension has precedence.
func DataFormat(filename string, format string) int {
format = strings.ToLower(format)
filename = strings.TrimSuffix(strings.ToLower(filename), ".gz")
switch {
case strings.HasSuffix(filename, ".rdf") || format == "rdf":
return RdfFormat
case strings.HasSuffix(filename, ".json") || format == "json":
return JsonFormat
default:
return UnknownFormat
}
}
8 changes: 4 additions & 4 deletions chunker/chunk_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func TestJSONLoadStart(t *testing.T) {
}

for _, test := range tests {
chunker := NewChunker(JsonInput)
chunker := NewChunker(JsonFormat)
require.Error(t, chunker.Begin(bufioReader(test.json)), test.desc)
}
}
Expand All @@ -64,7 +64,7 @@ func TestJSONLoadReadNext(t *testing.T) {
{"[{}", "malformed array"},
}
for _, test := range tests {
chunker := NewChunker(JsonInput)
chunker := NewChunker(JsonFormat)
reader := bufioReader(test.json)
require.NoError(t, chunker.Begin(reader), test.desc)

Expand Down Expand Up @@ -113,7 +113,7 @@ func TestJSONLoadSuccessFirst(t *testing.T) {
},
}
for _, test := range tests {
chunker := NewChunker(JsonInput)
chunker := NewChunker(JsonFormat)
reader := bufioReader(test.json)
require.NoError(t, chunker.Begin(reader), test.desc)

Expand Down Expand Up @@ -176,7 +176,7 @@ func TestJSONLoadSuccessAll(t *testing.T) {
}`,
}

chunker := NewChunker(JsonInput)
chunker := NewChunker(JsonFormat)
reader := bufioReader(testDoc)

var json *bytes.Buffer
Expand Down
35 changes: 17 additions & 18 deletions dgraph/cmd/bulk/loader.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import (

"github.com/dgraph-io/badger"
bo "github.com/dgraph-io/badger/options"

"github.com/dgraph-io/dgraph/chunker"
"github.com/dgraph-io/dgraph/protos/pb"
"github.com/dgraph-io/dgraph/schema"
Expand All @@ -41,8 +42,8 @@ import (
)

type options struct {
RDFDir string
JSONDir string
DataFiles string
DataFormat string
SchemaFile string
DgraphsDir string
TmpDir string
Expand Down Expand Up @@ -162,29 +163,27 @@ func (ld *loader) mapStage() {
LRUSize: 1 << 19,
})

var dir, ext string
var loaderType int
if ld.opt.RDFDir != "" {
loaderType = chunker.RdfInput
dir = ld.opt.RDFDir
ext = ".rdf"
} else {
loaderType = chunker.JsonInput
dir = ld.opt.JSONDir
ext = ".json"

}
files := x.FindDataFiles(dir, []string{ext, ext + ".gz"})
files := x.FindDataFiles(ld.opt.DataFiles, []string{".rdf", ".rdf.gz", ".json", ".json.gz"})
if len(files) == 0 {
fmt.Printf("No *%s files found under %s.\n", ext, dir)
fmt.Printf("No data files found in %s.\n", ld.opt.DataFiles)
os.Exit(1)
}

// Because mappers must handle chunks that may be from different input files, they must all
// assume the same data format, either RDF or JSON. Use the one specified by the user or by
// the first load file.
loadType := chunker.DataFormat(files[0], ld.opt.DataFormat)
if loadType == chunker.UnknownFormat {
// Dont't try to detect JSON input in bulk loader.
fmt.Printf("Need --format=rdf or --format=json to load %s", files[0])
os.Exit(1)
}

var mapperWg sync.WaitGroup
mapperWg.Add(len(ld.mappers))
for _, m := range ld.mappers {
go func(m *mapper) {
m.run(loaderType)
m.run(loadType)
mapperWg.Done()
}(m)
}
Expand All @@ -201,7 +200,7 @@ func (ld *loader) mapStage() {
r, cleanup := chunker.FileReader(file)
defer cleanup()

chunker := chunker.NewChunker(loaderType)
chunker := chunker.NewChunker(loadType)
x.Check(chunker.Begin(r))
for {
chunkBuf, err := chunker.Chunk(r)
Expand Down
22 changes: 8 additions & 14 deletions dgraph/cmd/bulk/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,12 @@ func init() {
Bulk.EnvPrefix = "DGRAPH_BULK"

flag := Bulk.Cmd.Flags()
flag.StringP("rdfs", "r", "",
"Location of RDF data to load.")
// would be nice to use -j to match -r, but already used by --num_go_routines
flag.String("jsons", "",
"Location of JSON data to load.")
flag.StringP("files", "f", "",
"Location of *.rdf(.gz) or *.json(.gz) file(s) to load")
flag.StringP("schema_file", "s", "",
"Location of schema file to load.")
flag.String("format", "",
"Specify file format (rdf or json) instead of getting it from filename")
flag.String("out", "out",
"Location to write the final dgraph data directories.")
flag.String("tmp", "tmp",
Expand Down Expand Up @@ -95,8 +94,8 @@ func init() {

func run() {
opt := options{
RDFDir: Bulk.Conf.GetString("rdfs"),
JSONDir: Bulk.Conf.GetString("jsons"),
DataFiles: Bulk.Conf.GetString("files"),
DataFormat: Bulk.Conf.GetString("format"),
SchemaFile: Bulk.Conf.GetString("schema_file"),
DgraphsDir: Bulk.Conf.GetString("out"),
TmpDir: Bulk.Conf.GetString("tmp"),
Expand Down Expand Up @@ -124,13 +123,8 @@ func run() {
fmt.Fprint(os.Stderr, "Schema file must be specified.\n")
os.Exit(1)
}
if opt.RDFDir == "" && opt.JSONDir == "" {
fmt.Fprint(os.Stderr, "RDF or JSON file(s) must be specified.\n")
os.Exit(1)
}
if opt.RDFDir != "" && opt.JSONDir != "" {
fmt.Fprintf(os.Stderr, "Invalid flags: only one of rdfs(%q) of jsons(%q) may be specified.\n",
opt.RDFDir, opt.JSONDir)
if opt.DataFiles == "" {
fmt.Fprint(os.Stderr, "RDF or JSON file(s) location must be specified.\n")
os.Exit(1)
}
if opt.ReduceShards > opt.MapShards {
Expand Down
38 changes: 21 additions & 17 deletions dgraph/cmd/live/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"bytes"
"compress/gzip"
"context"
"errors"
"fmt"
"io"
"io/ioutil"
Expand Down Expand Up @@ -49,6 +50,7 @@ import (

type options struct {
dataFiles string
dataFormat string
schemaFile string
dgraph string
zero string
Expand Down Expand Up @@ -82,6 +84,7 @@ func init() {
flag := Live.Cmd.Flags()
flag.StringP("files", "f", "", "Location of *.rdf(.gz) or *.json(.gz) file(s) to load")
flag.StringP("schema", "s", "", "Location of schema file")
flag.String("format", "", "Specify file format (rdf or json) instead of getting it from filename")
flag.StringP("dgraph", "d", "127.0.0.1:9080", "Dgraph alpha gRPC server address")
flag.StringP("zero", "z", "127.0.0.1:5080", "Dgraph zero gRPC server address")
flag.IntP("conc", "c", 10,
Expand Down Expand Up @@ -169,27 +172,24 @@ func (l *loader) uid(val string) string {
}

// processFile forwards a file to the RDF or JSON processor as appropriate
func (l *loader) processFile(ctx context.Context, file string) error {
fmt.Printf("Processing data file %q\n", file)
func (l *loader) processFile(ctx context.Context, filename string) error {
fmt.Printf("Processing data file %q\n", filename)

rd, cleanup := chunker.FileReader(file)
rd, cleanup := chunker.FileReader(filename)
defer cleanup()

var err error
var isJson bool
if strings.HasSuffix(file, ".rdf") || strings.HasSuffix(file, ".rdf.gz") {
err = l.processLoadFile(ctx, rd, chunker.NewChunker(chunker.RdfInput))
} else if strings.HasSuffix(file, ".json") || strings.HasSuffix(file, ".json.gz") {
err = l.processLoadFile(ctx, rd, chunker.NewChunker(chunker.JsonInput))
} else if isJson, err = chunker.IsJSONData(rd); err == nil {
if isJson {
err = l.processLoadFile(ctx, rd, chunker.NewChunker(chunker.JsonInput))
} else {
err = fmt.Errorf("Unable to determine file content format: %s", file)
loadType := chunker.DataFormat(filename, opt.dataFormat)
if loadType == chunker.UnknownFormat {
if isJson, err := chunker.IsJSONData(rd); err == nil {
if isJson {
loadType = chunker.JsonFormat
} else {
return fmt.Errorf("need --format=rdf or --format=json to load %s", filename)
}
}
}

return err
return l.processLoadFile(ctx, rd, chunker.NewChunker(loadType))
}

func (l *loader) processLoadFile(ctx context.Context, rd *bufio.Reader, ck chunker.Chunker) error {
Expand Down Expand Up @@ -287,6 +287,7 @@ func run() error {
x.PrintVersion()
opt = options{
dataFiles: Live.Conf.GetString("files"),
dataFormat: Live.Conf.GetString("format"),
schemaFile: Live.Conf.GetString("schema"),
dgraph: Live.Conf.GetString("dgraph"),
zero: Live.Conf.GetString("zero"),
Expand Down Expand Up @@ -346,11 +347,14 @@ func run() error {
fmt.Printf("Processed schema file %q\n\n", opt.schemaFile)
}

if opt.dataFiles == "" {
return errors.New("RDF or JSON file(s) location must be specified")
}

filesList := x.FindDataFiles(opt.dataFiles, []string{".rdf", ".rdf.gz", ".json", ".json.gz"})
totalFiles := len(filesList)
if totalFiles == 0 {
fmt.Printf("No data files to process\n")
return nil
return fmt.Errorf("No data files found in %s", opt.dataFiles)
} else {
fmt.Printf("Found %d data file(s) to process\n", totalFiles)
}
Expand Down
5 changes: 4 additions & 1 deletion dgraph/cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,10 @@ func Execute() {
// https://github.com/kubernetes/kubernetes/issues/17162#issuecomment-225596212
x.Check(goflag.CommandLine.Parse([]string{}))

x.Check(RootCmd.Execute())
// Dumping the usage in case of an error makes the error messages harder to see.
RootCmd.SilenceUsage = true

x.CheckfNoLog(RootCmd.Execute())
}

var rootConf = viper.New()
Expand Down
9 changes: 9 additions & 0 deletions x/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ package x
import (
"fmt"
"log"
"os"

"github.com/pkg/errors"
)
Expand All @@ -49,12 +50,20 @@ func Checkf(err error, format string, args ...interface{}) {
}
}

// CheckfNoTrace is Checkf without a stack trace.
func CheckfNoTrace(err error) {
if err != nil {
log.Fatalf(err.Error())
}
}

// CheckfNoLog exits on error without any message (to avoid duplicate error messages).
func CheckfNoLog(err error) {
if err != nil {
os.Exit(1)
}
}

// Check2 acts as convenience wrapper around Check, using the 2nd argument as error.
func Check2(_ interface{}, err error) {
Check(err)
Expand Down