~egtann/json2sql

5da4a46e21c0afbda6c05985676c81e0258cbc5d — Evan Tann 8 months ago a538f49
add row tests, fix line count
5 files changed, 53 insertions(+), 5 deletions(-)

M json2sql.go
M json2sql_test.go
A testdata/rows.json
A testdata/rows.sql
A testdata/rows_debug.sql
M json2sql.go => json2sql.go +5 -5
@@ 81,7 81,7 @@ func insertRows(
	scn *bufio.Scanner,
	lines int,
) error {
	line := fmt.Sprintf("\nINSERT INTO data (%s) VALUES",
	line := fmt.Sprintf("INSERT INTO data (%s) VALUES",
		escapeKeys(sortedKeys))
	write(out, line)
	for i := 1; scn.Scan(); i++ {


@@ 90,7 90,7 @@ func insertRows(
			return fmt.Errorf("line %d: %w", i, err)
		}
		values := valuesForKey(sortedKeys, keyTypes, row)
		if i < lines-1 {
		if i < lines {
			line = fmt.Sprintf("\t(%s),", values)
		} else {
			line = fmt.Sprintf("\t(%s);", values)


@@ 202,7 202,7 @@ func streamSchema(
	out io.Writer,
	skipMergeErr bool,
) (map[string]Type, int, error) {
	lineCount := 1
	var lineCount int
	keyTypes := map[string]Type{}
	scn := bufio.NewScanner(in)
	buf := make([]byte, 64*1024)


@@ 223,14 223,14 @@ func streamSchema(
		// can start to work concurrently on different sections.
		row := map[string]interface{}{}
		if err := json.Unmarshal(scn.Bytes(), &row); err != nil {
			return nil, 0, fmt.Errorf("line %d: %w", lineCount, err)
			return nil, 0, fmt.Errorf("line %d: %w", lineCount+1, err)
		}
		rowTypes := typesForRow(row)
		if err := mergeTypes(keyTypes, rowTypes); err != nil {
			if skipMergeErr {
				continue
			}
			return nil, 0, fmt.Errorf("line %d: %w", lineCount, err)
			return nil, 0, fmt.Errorf("line %d: %w", lineCount+1, err)
		}
	}
	if err := scn.Err(); err != nil {

M json2sql_test.go => json2sql_test.go +41 -0
@@ 55,6 55,47 @@ func TestStreamSchema(t *testing.T) {
	}
}

func TestStreamRows(t *testing.T) {
	t.Parallel()

	testcases := []struct {
		have  string
		want  string
		debug bool
	}{
		{have: "rows.json", want: "rows.sql", debug: false},
		{have: "rows.json", want: "rows_debug.sql", debug: true},
	}
	for _, tc := range testcases {
		tc := tc // Capture reference

		t.Run(tc.have, func(t *testing.T) {
			t.Parallel()

			have := bytes.TrimSpace(fixture(tc.have))
			in := bytes.NewReader(have)
			buf := ""
			out := bytes.NewBufferString(buf)
			keyTypes := map[string]Type{
				"a": TypeInteger,
				"b": TypeFloat,
				"c": TypeString,
				"d": TypeBool,
			}

			err := streamRows(in, out, keyTypes, 2, tc.debug)
			if err != nil {
				t.Fatal(err)
			}
			got := out.String()
			want := fixture(tc.want)
			if got != string(want) {
				t.Fatalf("have:\n%s\nwant:\n%s", got, want)
			}
		})
	}
}

// fixture reads from testdata or panics.
func fixture(pth string) []byte {
	pth = filepath.Join("testdata", pth)

A testdata/rows.json => testdata/rows.json +2 -0
@@ 0,0 1,2 @@
{"a":1,"b":1.5,"c":"x","d":false}
{"a":1,"b":1.5,"c":"x","d":false}

A testdata/rows.sql => testdata/rows.sql +3 -0
@@ 0,0 1,3 @@
INSERT INTO data ('a', 'b', 'c', 'd') VALUES
	(1, 1.5, 'x', 0),
	(1, 1.5, 'x', 0);

A testdata/rows_debug.sql => testdata/rows_debug.sql +2 -0
@@ 0,0 1,2 @@
INSERT INTO data ('a', 'b', 'c', 'd') VALUES (1, 1.5, 'x', 0);
INSERT INTO data ('a', 'b', 'c', 'd') VALUES (1, 1.5, 'x', 0);