diff --git a/.github/workflows/compare_pr_to_master.v b/.github/workflows/compare_pr_to_master.v index 83af18d625893e..f9f4b35757af80 100755 --- a/.github/workflows/compare_pr_to_master.v +++ b/.github/workflows/compare_pr_to_master.v @@ -12,7 +12,11 @@ fn gcommit() string { } fn r(cmd string) { - os.system(cmd) + res := os.system(cmd) + if res != 0 { + eprintln('> failed running: `${cmd}`') + exit(1) + } } fn xtime(cmd string) { @@ -72,5 +76,6 @@ fn main() { } else { vcompare('./vold', './vnew') } + r('rm -rf hw nv ov hw.exe nv.exe ov.exe hw.c nv.c ov.c') println('Done.') } diff --git a/cmd/tools/vast/vast.v b/cmd/tools/vast/vast.v index 8d03e49963f6e9..accfb5932e38d3 100644 --- a/cmd/tools/vast/vast.v +++ b/cmd/tools/vast/vast.v @@ -1028,7 +1028,8 @@ fn (t Tree) comptime_call(node ast.ComptimeCall) &Node { obj.add_terse('method_name', t.string_node(node.method_name)) obj.add_terse('left', t.expr(node.left)) obj.add_terse('is_vweb', t.bool_node(node.is_vweb)) - obj.add_terse('vweb_tmpl', t.string_node(node.vweb_tmpl.path)) + obj.add_terse('is_veb', t.bool_node(node.is_veb)) + obj.add_terse('veb_tmpl', t.string_node(node.veb_tmpl.path)) obj.add_terse('args_var', t.string_node(node.args_var)) obj.add_terse('has_parens', t.bool_node(node.has_parens)) obj.add_terse('is_embed', t.bool_node(node.is_embed)) diff --git a/cmd/tools/vrepeat.v b/cmd/tools/vrepeat.v index 425cbf6edfcac2..c7361bba6801b8 100644 --- a/cmd/tools/vrepeat.v +++ b/cmd/tools/vrepeat.v @@ -325,11 +325,7 @@ fn (mut context Context) show_diff_summary() { } mut tcomparison := 'base ' if r.atiming.average != base { - if r.atiming.average < base { - tcomparison = '${base / r.atiming.average:4.2f}x ${c(tgreen, 'faster')}' - } else { - tcomparison = '${r.atiming.average / base:4.2f}x ${c(tcyan, 'slower')}' - } + tcomparison = readable_comparison(r.atiming.average, base, cpercent) } gcmd := c(tgreen, r.cmd) println(' ${first_marker}${(i + 1):3} ${comparison:7} ${tcomparison:5} ${r.atiming} `${gcmd}`') @@ -354,6 +350,22 @@ fn (mut context Context) show_diff_summary() { } } +fn readable_comparison(tcurrent f64, tbase f64, cpercent f64) string { + is_same := math.abs(cpercent) <= 0.15 + mut label := '~same~' + if tcurrent < tbase { + if !is_same { + label = c(tgreen, 'faster') + } + return '${tbase / tcurrent:4.2f}x ${label}' + } else { + if !is_same { + label = c(tcyan, 'slower') + } + return '${tcurrent / tbase:4.2f}x ${label}' + } +} + fn (mut context Context) show_summary_title(line string) { mut msg := [line] if context.nmins > 0 { diff --git a/vlib/orm/orm_insert_test.v b/vlib/orm/orm_insert_test.v index 9d8967265c94d6..063aac4a16a9b9 100644 --- a/vlib/orm/orm_insert_test.v +++ b/vlib/orm/orm_insert_test.v @@ -442,6 +442,7 @@ fn test_the_result_of_insert_should_be_the_last_insert_id() { insert address into Address } or { panic(err) } dump(aid1) + assert aid1 == 1 aid2 := sql db { insert address into Address } or { panic(err) } diff --git a/vlib/time/parse.c.v b/vlib/time/parse.c.v index 7d6315fef5b5dc..5dacf5b4ec8af9 100644 --- a/vlib/time/parse.c.v +++ b/vlib/time/parse.c.v @@ -5,6 +5,127 @@ module time import strconv +const date_format_buffer = [u8(`0`), `0`, `0`, `0`, `-`, `0`, `0`, `-`, `0`, `0`]! +const time_format_buffer = [u8(`0`), `0`, `:`, `0`, `0`, `:`, `0`, `0`]! + +fn validate_time_bounds(hour int, minute int, second int, nanosecond int) ! { + if hour < 0 || hour > 23 { + return error('invalid hour: ${hour}') + } + if minute < 0 || minute > 59 { + return error('invalid minute: ${minute}') + } + if second < 0 || second > 59 { + return error('invalid second: ${second}') + } + if nanosecond < 0 || nanosecond > 1_000_000_000 { + return error('invalid nanosecond: ${nanosecond}') + } +} + +fn check_and_extract_time(s string) !(int, int, int, int) { + mut hour_ := 0 + mut minute_ := 0 + mut second_ := 0 + mut nanosecond_ := 0 + + // Check if the string start in the format "HH:MM:SS" + for i := 0; i < time_format_buffer.len; i++ { + if time_format_buffer[i] == u8(`0`) { + if s[i] < u8(`0`) && s[i] > u8(`9`) { + return error('`HH:MM:SS` match error: expected digit, not `${s[i]}` in position ${i}') + } else { + if i < 2 { + hour_ = hour_ * 10 + (s[i] - u8(`0`)) + } else if i < 5 { + minute_ = minute_ * 10 + (s[i] - u8(`0`)) + } else { + second_ = second_ * 10 + (s[i] - u8(`0`)) + } + } + } else if time_format_buffer[i] != s[i] { + return error('time separator error: expected `:`, not `${[s[i]].bytestr()}` in position ${i}') + } + } + + if s.len == time_format_buffer.len + 1 { + if s[time_format_buffer.len] !in [u8(`Z`), `z`] { + return error('timezone error: expected "Z" or "z" at the end of the string') + } + validate_time_bounds(hour_, minute_, second_, nanosecond_)! + return hour_, minute_, second_, nanosecond_ + } + + if s.len < time_format_buffer.len + 1 { + return error('datetime string is too short') + } + + if s[time_format_buffer.len] == u8(`.`) { + // Check if the string contains the nanoseconds part after the time part + if s.len < time_format_buffer.len + 1 { + return error('datetime string is too short') + } + // Check if the string start in the format ".NNNNNNNNN" + mut nanosecond_digits := 0 + for i := time_format_buffer.len + 1; i < s.len; i++ { + if s[i] < u8(`0`) || s[i] > u8(`9`) { + if s[i] in [u8(`Z`), `z`] { + if i != s.len - 1 { + return error('timezone error: "Z" or "z" can only be at the end of the string') + } + break + } else if s[i] in [u8(`+`), `-`] { + break + } + return error('nanoseconds error: expected digit, not `${s[i]}` in position ${i}') + } + if !(i >= time_format_buffer.len + 1 + 9) { + // nanoseconds limit is 9 digits + nanosecond_ = nanosecond_ * 10 + (s[i] - u8(`0`)) + nanosecond_digits++ + } + } + if nanosecond_digits < 9 { + for i := 0; i < 9 - nanosecond_digits; i++ { + nanosecond_ *= 10 + } + } + } + validate_time_bounds(hour_, minute_, second_, nanosecond_)! + return hour_, minute_, second_, nanosecond_ +} + +fn check_and_extract_date(s string) !(int, int, int) { + mut year := 0 + mut month := 0 + mut day := 0 + // Check if the string start in the format "YYYY-MM-DD" + for i := 0; i < date_format_buffer.len; i++ { + if date_format_buffer[i] == u8(`0`) { + if s[i] < u8(`0`) && s[i] > u8(`9`) { + return error('`YYYY-MM-DD` match error: expected digit, not `${s[i]}` in position ${i}') + } else { + if i < 4 { + year = year * 10 + (s[i] - u8(`0`)) + } else if i < 7 { + month = month * 10 + (s[i] - u8(`0`)) + } else { + day = day * 10 + (s[i] - u8(`0`)) + } + } + } else if date_format_buffer[i] != s[i] { + return error('date separator error:expected "${date_format_buffer[i]}", not `${s[i]}` in position ${i}') + } + } + if month < 1 || month > 12 { + return error('date error: invalid month ${month}') + } + if day < 1 || day > 31 { + return error('date error: invalid day ${day}') + } + return year, month, day +} + // parse_rfc3339 returns the time from a date string in RFC 3339 datetime format. // See also https://ijmacd.github.io/rfc3339-iso8601/ for a visual reference of // the differences between ISO-8601 and RFC 3339. @@ -12,48 +133,152 @@ pub fn parse_rfc3339(s string) !Time { if s == '' { return error_invalid_time(0, 'datetime string is empty') } - // Normalize the input before parsing. Good since iso8601 doesn't permit lower case `t` and `z`. - sn := s.replace_each(['t', 'T', 'z', 'Z']) - mut t := parse_iso8601(sn) or { Time{} } - // If parse_iso8601 DID NOT result in default values (i.e. date was parsed correctly) - if t != Time{} { - return t - } - - t_i := sn.index('T') or { -1 } - parts := if t_i != -1 { [sn[..t_i], sn[t_i + 1..]] } else { sn.split(' ') } - - // Check if sn is date only - if !parts[0].contains_any(' Z') && parts[0].contains('-') { - year, month, day := parse_iso8601_date(sn)! - t = new(Time{ - year: year - month: month - day: day - }) - return t - } - // Check if sn is time only - if !parts[0].contains('-') && parts[0].contains(':') { - mut hour_, mut minute_, mut second_, mut microsecond_, mut nanosecond_, mut unix_offset, mut is_local_time := 0, 0, 0, 0, 0, i64(0), true - hour_, minute_, second_, microsecond_, nanosecond_, unix_offset, is_local_time = parse_iso8601_time(parts[0])! - t = new(Time{ - hour: hour_ - minute: minute_ - second: second_ - nanosecond: nanosecond_ - }) - if is_local_time { - return t // Time is already local time + + if s.len < time_format_buffer.len { + return error('string is too short to parse') + } + + mut year, mut month, mut day := 0, 0, 0 + mut hour_, mut minute_, mut second_, mut nanosecond_ := 0, 0, 0, 0 + + is_time := if s.len >= time_format_buffer.len { + s[2] == u8(`:`) && s[5] == u8(`:`) + } else { + false + } + if is_time { + return error('missing date part of RFC 3339') + } + + is_date := if s.len >= date_format_buffer.len { + s[4] == u8(`-`) && s[7] == u8(`-`) + } else { + false + } + + if is_date { + year, month, day = check_and_extract_date(s)! + if s.len == date_format_buffer.len { + return new(Time{ + year: year + month: month + day: day + is_local: false + }) + } + } + + is_datetime := if s.len >= date_format_buffer.len + 1 + time_format_buffer.len + 1 { + is_date && s[10] == u8(`T`) + } else { + false + } + if is_datetime { + // year, month, day := check_and_extract_date(s)! + hour_, minute_, second_, nanosecond_ = check_and_extract_time(s[date_format_buffer.len + 1..])! + } + + mut timezone_start_position := 0 + + if is_datetime || is_time { + timezone_start_position = date_format_buffer.len + 1 + time_format_buffer.len + if s[timezone_start_position] == u8(`.`) { + timezone_start_position++ + + for s[timezone_start_position] !in [u8(`Z`), `z`, `+`, `-`] { + timezone_start_position++ + if timezone_start_position == s.len { + return error('timezone error: expected "Z" or "z" or "+" or "-" in position ${timezone_start_position}, not "${[ + s[timezone_start_position], + ].bytestr()}"') + } + } } - mut unix_time := t.unix - if unix_offset < 0 { - unix_time -= (-unix_offset) - } else if unix_offset > 0 { - unix_time += unix_offset + } + + pos := date_format_buffer.len + time_format_buffer.len + 1 + if pos >= s.len { + return error('timezone error: datetime string is too short') + } + if s[date_format_buffer.len + time_format_buffer.len + 1] !in [u8(`Z`), `z`, `+`, `-`, `.`] { + // RFC 3339 needs a timezone + return error('timezone error: expected "Z" or "z" or "+" or "-" in position ${ + date_format_buffer.len + time_format_buffer.len + 1}, not "${[ + s[date_format_buffer.len + time_format_buffer.len + 1], + ].bytestr()}"') + } else { + if s[s.len - 1] in [u8(`Z`), `z`] { + return new(Time{ + year: year + month: month + day: day + hour: hour_ + minute: minute_ + second: second_ + nanosecond: nanosecond_ + is_local: false + }) + } else { + // Check if the string contains the timezone part after the time part +00:00 + if s.len < date_format_buffer.len + 1 + time_format_buffer.len + 6 { + return error('datetime string is too short') + } + if s[s.len - 3] != u8(`:`) { + return error('timezone separator error: expected ":", not `${[ + s[date_format_buffer.len + time_format_buffer.len + 3], + ].bytestr()}` in position ${date_format_buffer.len + time_format_buffer.len + 3}') + } + + // Check if it is UTC time + if unsafe { vmemcmp(s.str + s.len - 5, '00:00'.str, 5) == 0 } { + return new(Time{ + year: year + month: month + day: day + hour: hour_ + minute: minute_ + second: second_ + nanosecond: nanosecond_ + is_local: false + }) + } + + is_negative := s[s.len - 6] == u8(`-`) + + // To local time using the offset to add_seconds + mut offset_in_minutes := 0 + mut offset_in_hours := 0 + // offset hours + for i := 0; i < 2; i++ { + offset_in_hours = offset_in_minutes * 10 + (s[s.len - 5 + i] - u8(`0`)) + } + + // offset minutes + for i := 0; i < 2; i++ { + offset_in_minutes = offset_in_minutes * 10 + (s[s.len - 2 + i] - u8(`0`)) + } + + offset_in_minutes += offset_in_hours * 60 + + if !is_negative { + offset_in_minutes *= -1 + } + + mut time_to_be_returned := new(Time{ + year: year + month: month + day: day + hour: hour_ + minute: minute_ + second: second_ + nanosecond: nanosecond_ + is_local: false + }) + + time_to_be_returned = time_to_be_returned.add_seconds(offset_in_minutes * 60) + + return time_to_be_returned } - t = unix_nanosecond(i64(unix_time), t.nanosecond) - return t } return error_invalid_time(9, 'malformed date') @@ -310,6 +535,6 @@ fn parse_iso8601_time(s string) !(int, int, int, int, int, i64, bool) { if plus_min_z == `+` { unix_offset *= -1 } - // eprintln('parse_iso8601_time s: $s | hour_: $hour_ | minute_: $minute_ | second_: $second_ | microsecond_: $microsecond_ | nanosecond_: $nanosecond_ | unix_offset: $unix_offset | is_local_time: $is_local_time') + // eprintln('parse_iso8601_time s: $s | hour_: $hour_ | minute_: $minute_ | second_: $second_ | microsecond_: $microsecond_ | nanosecond_: $nanosecond_ | unix_offset: $unix_offset | is_local: $is_local_time') return hour_, minute_, second_, microsecond_, nanosecond_, unix_offset, is_local_time } diff --git a/vlib/time/parse_test.v b/vlib/time/parse_test.v index 1e76384e9fff8a..26e8480ee33e72 100644 --- a/vlib/time/parse_test.v +++ b/vlib/time/parse_test.v @@ -3,8 +3,7 @@ import time fn test_parse() { s := '2018-01-27 12:48:34' t := time.parse(s) or { - eprintln('> failing format: ${s} | err: ${err}') - assert false + assert false, '> failing format: ${s} | err: ${err}' return } assert t.year == 2018 && t.month == 1 && t.day == 27 && t.hour == 12 && t.minute == 48 @@ -27,8 +26,7 @@ fn test_parse_invalid() { fn test_parse_rfc2822() { s1 := 'Thu, 12 Dec 2019 06:07:45 GMT' t1 := time.parse_rfc2822(s1) or { - eprintln('> failing format: ${s1} | err: ${err}') - assert false + assert false, '> failing format: ${s1} | err: ${err}' return } assert t1.year == 2019 && t1.month == 12 && t1.day == 12 && t1.hour == 6 && t1.minute == 7 @@ -36,8 +34,7 @@ fn test_parse_rfc2822() { assert t1.unix() == 1576130865 s2 := 'Thu 12 Dec 2019 06:07:45 +0800' t2 := time.parse_rfc2822(s2) or { - eprintln('> failing format: ${s2} | err: ${err}') - assert false + assert false, '> failing format: ${s2} | err: ${err}' return } assert t2.year == 2019 && t2.month == 12 && t2.day == 12 && t2.hour == 6 && t2.minute == 7 @@ -73,8 +70,7 @@ fn test_parse_iso8601() { ] for i, format in formats { t := time.parse_iso8601(format) or { - eprintln('>>> failing format: ${format} | err: ${err}') - assert false + assert false, '>>> failing format: ${format} | err: ${err}' continue } year := times[i][0] @@ -97,8 +93,7 @@ fn test_parse_iso8601() { fn test_parse_iso8601_local() { format := '2020-06-05T15:38:06.015959' t := time.parse_iso8601(format) or { - eprintln('> failing format: ${format} | err: ${err}') - assert false + assert false, '> failing format: ${format} | err: ${err}' return } assert t.year == 2020 @@ -135,8 +130,7 @@ fn test_parse_iso8601_invalid() { fn test_parse_iso8601_date_only() { format := '2020-06-05' t := time.parse_iso8601(format) or { - eprintln('> failing format: ${format} | err: ${err}') - assert false + assert false, '> failing format: ${format} | err: ${err}' return } assert t.year == 2020 @@ -150,12 +144,21 @@ fn test_parse_iso8601_date_only() { fn check_invalid_date(s string) { if date := time.parse(s) { - eprintln('invalid date: "${s}" => "${date}"') - assert false + assert false, 'invalid date: "${s}" => "${date}"' } assert true } +fn invalid_rfc3339(s string) string { + if date := time.parse_rfc3339(s) { + assert false, 'invalid date: "${s}" => "${date}"' + } else { + assert true + return err.str() + } + return '' +} + fn test_invalid_dates_should_error_during_parse() { check_invalid_date('-99999-12-20 00:00:00') check_invalid_date('99999-12-20 00:00:00') @@ -175,17 +178,48 @@ fn test_parse_rfc3339() { pairs := [ ['2015-01-06T15:47:32.080254511Z', '2015-01-06 15:47:32.080254'], ['2015-01-06T15:47:32.072697474Z', '2015-01-06 15:47:32.072697'], + ['2015-01-06T15:47:32.1234Z', '2015-01-06 15:47:32.123400'], + ['2015-01-06T15:47:32.001234Z', '2015-01-06 15:47:32.001234'], + ['2015-01-06T15:47:32Z', '2015-01-06 15:47:32.000000'], + ['2015-01-06T15:47:32+00:00', '2015-01-06 15:47:32.000000'], + ['2015-01-06T15:47:32-00:00', '2015-01-06 15:47:32.000000'], + ['2015-01-06T15:47:32-01:00', '2015-01-06 16:47:32.000000'], + ['2015-01-06T15:47:32+01:00', '2015-01-06 14:47:32.000000'], + ['2015-01-06T15:47:32-01:10', '2015-01-06 16:57:32.000000'], + ['2015-01-06T15:47:32+01:10', '2015-01-06 14:37:32.000000'], + ['2015-01-06T15:47:32.1234-00:00', '2015-01-06 15:47:32.123400'], + ['2015-01-06T15:47:32.1234+01:00', '2015-01-06 14:47:32.123400'], + ['2015-01-06T15:47:32.1234-01:00', '2015-01-06 16:47:32.123400'], + ['2015-01-06T22:59:59-00:10', '2015-01-06 23:09:59.000000'], + ['1979-05-27T07:32:00-08:00', '1979-05-27 15:32:00.000000'], + ['2024-10-19T22:47:08-00:00', '2024-10-19 22:47:08.000000'], + ['2024-10-19T22:47:08.9+00:00', '2024-10-19 22:47:08.900000'], + ['2024-10-20T01:47:08+03:00', '2024-10-19 22:47:08.000000'], + ['2024-10-20T01:47:08.981+03:00', '2024-10-19 22:47:08.981000'], ] for pair in pairs { input, expected := pair[0], pair[1] res := time.parse_rfc3339(input) or { - eprintln('>>> failing input: ${input} | err: ${err}') - assert false + assert false, '>>> failing input: ${input} | err: ${err}' return } output := res.format_ss_micro() assert expected == output } + assert invalid_rfc3339('22:47:08Z') == 'missing date part of RFC 3339' + assert invalid_rfc3339('01:47:08.981+03:00') == 'missing date part of RFC 3339' + assert invalid_rfc3339('2006-01-00') == 'date error: invalid day 0' + assert invalid_rfc3339('2006-01-32') == 'date error: invalid day 32' + assert invalid_rfc3339('2006-01-88') == 'date error: invalid day 88' + assert invalid_rfc3339('2006-00-01') == 'date error: invalid month 0' + assert invalid_rfc3339('2006-13-01') == 'date error: invalid month 13' + assert invalid_rfc3339('2006-77-01') == 'date error: invalid month 77' + assert invalid_rfc3339('2006-01-01T24:47:08Z') == 'invalid hour: 24' + assert invalid_rfc3339('2006-01-01T99:47:08Z') == 'invalid hour: 99' + assert invalid_rfc3339('2006-01-01T23:60:08Z') == 'invalid minute: 60' + assert invalid_rfc3339('2006-01-01T23:99:08Z') == 'invalid minute: 99' + assert invalid_rfc3339('2006-01-01T23:59:60Z') == 'invalid second: 60' + assert invalid_rfc3339('2006-01-01T23:59:99Z') == 'invalid second: 99' } fn test_ad_second_to_parse_result_in_2001() { @@ -205,8 +239,7 @@ fn test_ad_second_to_parse_result_pre_2001() { fn test_parse_format() { mut s := '2018-01-27 12:48:34' mut t := time.parse_format(s, 'YYYY-MM-DD HH:mm:ss') or { - eprintln('> failing format: ${s} | err: ${err}') - assert false + assert false, '> failing format: ${s} | err: ${err}' return } assert t.year == 2018 && t.month == 1 && t.day == 27 && t.hour == 12 && t.minute == 48 @@ -214,8 +247,7 @@ fn test_parse_format() { s = '2018-November-27 12:48:20' t = time.parse_format(s, 'YYYY-MMMM-DD HH:mm:ss') or { - eprintln('> failing format: ${s} | err: ${err}') - assert false + assert false, '> failing format: ${s} | err: ${err}' return } assert t.year == 2018 && t.month == 11 && t.day == 27 && t.hour == 12 && t.minute == 48 @@ -223,8 +255,7 @@ fn test_parse_format() { s = '18-1-2 0:8:2' t = time.parse_format(s, 'YY-M-D H:m:s') or { - eprintln('> failing format: ${s} | err: ${err}') - assert false + assert false, '> failing format: ${s} | err: ${err}' return } assert t.year == 2018 && t.month == 1 && t.day == 2 && t.hour == 0 && t.minute == 8 @@ -233,6 +264,6 @@ fn test_parse_format() { // This should always fail, because we test if M and D allow for a 01 value which they shouldn't s = '2018-01-02 1:8:2' t = time.parse_format(s, 'YYYY-M-D H:m:s') or { return } - eprintln('> failing for datetime: ${s}, the datetime string should not have passed the format "YYYY-M-D H:m:s"') - assert false + + assert false, '> failing for datetime: ${s}, the datetime string should not have passed the format "YYYY-M-D H:m:s"' } diff --git a/vlib/toml/checker/checker.v b/vlib/toml/checker/checker.v index 305200b722873f..15ae7ec67555d6 100644 --- a/vlib/toml/checker/checker.v +++ b/vlib/toml/checker/checker.v @@ -17,6 +17,14 @@ pub const allowed_basic_escape_chars = [`u`, `U`, `b`, `t`, `n`, `f`, `r`, `"`, // utf8_max is the largest inclusive value of the Unicodes scalar value ranges. const utf8_max = 0x10FFFF +fn toml_parse_time(s string) !time.Time { + if s.len > 3 && s[2] == `:` { + // complete the partial time, with an arbitrary date: + return time.parse_rfc3339('0001-01-01T' + s) + } + return time.parse_rfc3339(s)! +} + // Checker checks a tree of TOML `ast.Value`'s for common errors. pub struct Checker { pub: @@ -318,8 +326,21 @@ fn (c &Checker) check_date_time(dt ast.DateTime) ! { col: dt.pos.col + split[0].len } })! - // Use V's builtin functionality to validate the string - time.parse_rfc3339(lit) or { + // Simulate a time offset if it's missing then it can be checked. Already toml supports local time and rfc3339 don't. + mut has_time_offset := false + for ch in lit#[19..] { + if ch in [u8(`-`), `+`, `Z`] { + has_time_offset = true + break + } + } + + mut lit_with_offset := lit + if !has_time_offset { + lit_with_offset += 'Z' + } + + toml_parse_time(lit_with_offset) or { return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "${lit}" is not a valid RFC 3339 Date-Time format string "${err}". In ...${c.excerpt(dt.pos)}...') } @@ -352,8 +373,7 @@ fn (c &Checker) check_date(date ast.Date) ! { return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "${lit}" does not have a valid RFC 3339 day indication in ...${c.excerpt(date.pos)}...') } - // Use V's builtin functionality to validate the string - time.parse_rfc3339(lit) or { + toml_parse_time(lit) or { return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "${lit}" is not a valid RFC 3339 Date format string "${err}". In ...${c.excerpt(date.pos)}...') } @@ -380,8 +400,22 @@ fn (c &Checker) check_time(t ast.Time) ! { return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "${lit}" is not a valid RFC 3339 Time format string in ...${c.excerpt(t.pos)}...') } - // Use V's builtin functionality to validate the time string - time.parse_rfc3339(parts[0]) or { + + // Simulate a time offset if it's missing then it can be checked. Already toml supports local time and rfc3339 don't. + mut has_time_offset := false + for ch in parts[0]#[8..] { + if ch in [u8(`-`), `+`, `Z`] { + has_time_offset = true + break + } + } + + mut part_with_offset := parts[0] + if !has_time_offset { + part_with_offset += 'Z' + } + + toml_parse_time(part_with_offset) or { return error(@MOD + '.' + @STRUCT + '.' + @FN + ' "${lit}" is not a valid RFC 3339 Time format string "${err}". In ...${c.excerpt(t.pos)}...') } diff --git a/vlib/v/ast/ast.v b/vlib/v/ast/ast.v index 03e94d1ca37878..ca0830eda0b754 100644 --- a/vlib/v/ast/ast.v +++ b/vlib/v/ast/ast.v @@ -1989,7 +1989,7 @@ pub: mut: is_d_resolved bool pub mut: - vweb_tmpl File + veb_tmpl File left Expr left_type Type result_type Type diff --git a/vlib/v/checker/comptime.v b/vlib/v/checker/comptime.v index 9ce43873d9a7a8..fdd9cc772f5339 100644 --- a/vlib/v/checker/comptime.v +++ b/vlib/v/checker/comptime.v @@ -100,7 +100,7 @@ fn (mut c Checker) comptime_call(mut node ast.ComptimeCall) ast.Type { } mut c2 := new_checker(c.table, pref2) c2.comptime_call_pos = node.pos.pos - c2.check(mut node.vweb_tmpl) + c2.check(mut node.veb_tmpl) c.warnings << c2.warnings c.errors << c2.errors c.notices << c2.notices diff --git a/vlib/v/checker/errors.v b/vlib/v/checker/errors.v index 92c85a3d7fa4f5..2d0df6d5a1115c 100644 --- a/vlib/v/checker/errors.v +++ b/vlib/v/checker/errors.v @@ -50,7 +50,7 @@ fn (mut c Checker) error(message string, pos token.Pos) { mut msg := message.replace('`Array_', '`[]') if c.pref.is_vweb { // Show in which veb action the error occurred (for easier debugging) - veb_action := c.table.cur_fn.name.replace('vweb_tmpl_', '') + veb_action := c.table.cur_fn.name.replace('veb_tmpl_', '') mut j := 0 for _, ch in veb_action { if ch.is_digit() { diff --git a/vlib/v/gen/c/assign.v b/vlib/v/gen/c/assign.v index f2f559bb3f48d8..d1c62c668664e1 100644 --- a/vlib/v/gen/c/assign.v +++ b/vlib/v/gen/c/assign.v @@ -723,10 +723,13 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) { g.write(', ') } mut cloned := false - if g.is_autofree && right_sym.kind in [.array, .string] - && !unwrapped_val_type.has_flag(.shared_f) { - if g.gen_clone_assignment(var_type, val, unwrapped_val_type, false) { - cloned = true + if g.is_autofree { + if right_sym.kind in [.array, .string] && !unwrapped_val_type.has_flag(.shared_f) { + if g.gen_clone_assignment(var_type, val, unwrapped_val_type, false) { + cloned = true + } + } else if right_sym.info is ast.Interface && var_type != ast.error_type { + g.register_free_method(var_type) } } if !cloned { diff --git a/vlib/v/gen/c/auto_free_methods.v b/vlib/v/gen/c/auto_free_methods.v index d45fe97e581c0c..881bdf53aa5f89 100644 --- a/vlib/v/gen/c/auto_free_methods.v +++ b/vlib/v/gen/c/auto_free_methods.v @@ -5,8 +5,19 @@ module c import v.ast import strings +@[inline] +fn (mut g Gen) register_free_method(typ ast.Type) { + if typ.has_flag(.shared_f) { + g.get_free_method(typ.clear_flag(.shared_f).set_nr_muls(0)) + } else { + g.get_free_method(typ) + } +} + fn (mut g Gen) get_free_method(typ ast.Type) string { - g.autofree_methods[typ] = true + if typ in g.autofree_methods { + return g.autofree_methods[typ] + } mut sym := g.table.sym(g.unwrap_generic(typ)) if mut sym.info is ast.Alias { if sym.info.is_import { @@ -16,8 +27,10 @@ fn (mut g Gen) get_free_method(typ ast.Type) string { styp := g.styp(typ).replace('*', '') fn_name := styp_to_free_fn_name(styp) if sym.has_method_with_generic_parent('free') { + g.autofree_methods[typ] = fn_name return fn_name } + g.autofree_methods[typ] = fn_name return fn_name } @@ -43,7 +56,7 @@ fn (mut g Gen) gen_free_method(typ ast.Type) string { sym = g.table.sym(sym.info.parent_type) } } - if sym.has_method_with_generic_parent('free') { + if sym.kind != .interface && sym.has_method_with_generic_parent('free') { return fn_name } @@ -57,6 +70,9 @@ fn (mut g Gen) gen_free_method(typ ast.Type) string { ast.Map { g.gen_free_for_map(objtyp, sym.info, styp, fn_name) } + ast.Interface { + g.gen_free_for_interface(sym, sym.info, styp, fn_name) + } else { println(g.table.type_str(typ)) // print_backtrace() @@ -67,6 +83,28 @@ fn (mut g Gen) gen_free_method(typ ast.Type) string { return fn_name } +fn (mut g Gen) gen_free_for_interface(sym ast.TypeSymbol, info ast.Interface, styp string, fn_name string) { + g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto') + mut fn_builder := strings.new_builder(128) + defer { + g.auto_fn_definitions << fn_builder.str() + } + fn_builder.writeln('${g.static_modifier} void ${fn_name}(${styp}* it) {') + for t in info.types { + typ_ := g.unwrap_generic(t) + sub_sym := g.table.sym(typ_) + if sub_sym.kind !in [.string, .array, .map, .struct] { + continue + } + if !sub_sym.has_method_with_generic_parent('free') { + continue + } + type_styp := g.gen_type_name_for_free_call(typ_) + fn_builder.writeln('\tif (it->_typ == _${sym.cname}_${sub_sym.cname}_index) { ${type_styp}_free(it->_${sub_sym.cname}); return; }') + } + fn_builder.writeln('}') +} + fn (mut g Gen) gen_free_for_struct(typ ast.Type, info ast.Struct, styp string, fn_name string) { g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto') mut fn_builder := strings.new_builder(128) @@ -81,13 +119,9 @@ fn (mut g Gen) gen_free_for_struct(typ ast.Type, info ast.Struct, styp string, f if sym.kind !in [.string, .array, .map, .struct] { continue } - mut field_styp := g.styp(field.typ.set_nr_muls(0).clear_flag(.option)).replace('*', - '') - is_shared := field_styp.starts_with('__shared') + + field_styp := g.gen_type_name_for_free_call(field.typ) is_struct_option := typ.has_flag(.option) - if is_shared { - field_styp = field_styp.all_after('__shared__') - } field_styp_fn_name := if sym.has_method('free') { '${field_styp}_free' } else { @@ -95,7 +129,7 @@ fn (mut g Gen) gen_free_for_struct(typ ast.Type, info ast.Struct, styp string, f } is_field_option := field.typ.has_flag(.option) expects_opt := field_styp_fn_name.starts_with('_option_') - if is_shared { + if field.typ.has_flag(.shared_f) { fn_builder.writeln('\t${field_styp_fn_name}(&(it->${field_name}->val));') } else if is_struct_option { opt_styp := g.base_type(typ) @@ -134,6 +168,14 @@ fn (mut g Gen) gen_free_for_struct(typ ast.Type, info ast.Struct, styp string, f fn_builder.writeln('}') } +fn (mut g Gen) gen_type_name_for_free_call(typ ast.Type) string { + mut styp := g.typ(typ.set_nr_muls(0).clear_flag(.option)).replace('*', '') + if styp.starts_with('__shared') { + styp = styp.all_after('__shared__') + } + return styp +} + fn (mut g Gen) gen_free_for_array(info ast.Array, styp string, fn_name string) { g.definitions.writeln('${g.static_modifier} void ${fn_name}(${styp}* it); // auto') mut fn_builder := strings.new_builder(128) diff --git a/vlib/v/gen/c/cgen.v b/vlib/v/gen/c/cgen.v index 5a5a920cbe369c..8cbd8ee3828da5 100644 --- a/vlib/v/gen/c/cgen.v +++ b/vlib/v/gen/c/cgen.v @@ -245,7 +245,7 @@ mut: cur_fn &ast.FnDecl = unsafe { nil } // same here cur_lock ast.LockExpr cur_struct_init_typ ast.Type - autofree_methods map[ast.Type]bool + autofree_methods map[ast.Type]string generated_free_methods map[ast.Type]bool autofree_scope_stmts []string use_segfault_handler bool = true diff --git a/vlib/v/gen/c/comptime.v b/vlib/v/gen/c/comptime.v index 787eee740c43a5..d8dbe16efd6dd2 100644 --- a/vlib/v/gen/c/comptime.v +++ b/vlib/v/gen/c/comptime.v @@ -87,9 +87,9 @@ fn (mut g Gen) comptime_call(mut node ast.ComptimeCall) { is_x_vweb := ret_sym.cname == 'x__vweb__Result' is_veb := ret_sym.cname == 'veb__Result' - for stmt in node.vweb_tmpl.stmts { + for stmt in node.veb_tmpl.stmts { if stmt is ast.FnDecl { - if stmt.name.starts_with('main.vweb_tmpl') { + if stmt.name.starts_with('main.veb_tmpl') { if is_html { g.inside_vweb_tmpl = true if is_veb { diff --git a/vlib/v/gen/c/fn.v b/vlib/v/gen/c/fn.v index 60fefa16668017..38eab7ea65dc11 100644 --- a/vlib/v/gen/c/fn.v +++ b/vlib/v/gen/c/fn.v @@ -1734,11 +1734,7 @@ fn (mut g Gen) method_call(node ast.CallExpr) { return } } else if node.name == 'free' { - mut rec_type := node.receiver_type - if rec_type.has_flag(.shared_f) { - rec_type = rec_type.clear_flag(.shared_f).set_nr_muls(0) - } - g.get_free_method(rec_type) + g.register_free_method(node.receiver_type) is_free_method = true } mut cast_n := 0 diff --git a/vlib/v/gen/c/testdata/interface_auto_free.c.must_have b/vlib/v/gen/c/testdata/interface_auto_free.c.must_have new file mode 100644 index 00000000000000..41d8ea445035d7 --- /dev/null +++ b/vlib/v/gen/c/testdata/interface_auto_free.c.must_have @@ -0,0 +1,7 @@ +void main__IFoo_free(main__IFoo* it) { + if (it->_typ == _main__IFoo_main__Foo_index) { main__Foo_free(it->_main__Foo); return; } + if (it->_typ == _main__IFoo_array_index) { array_free(it->_array); return; } + if (it->_typ == _main__IFoo_map_index) { map_free(it->_map); return; } + if (it->_typ == _main__IFoo_VAssertMetaInfo_index) { VAssertMetaInfo_free(it->_VAssertMetaInfo); return; } + if (it->_typ == _main__IFoo_MessageError_index) { MessageError_free(it->_MessageError); return; } +} \ No newline at end of file diff --git a/vlib/v/gen/c/testdata/interface_auto_free.vv b/vlib/v/gen/c/testdata/interface_auto_free.vv new file mode 100644 index 00000000000000..c33f61558eca8f --- /dev/null +++ b/vlib/v/gen/c/testdata/interface_auto_free.vv @@ -0,0 +1,20 @@ +// vtest vflags: -autofree +module main + +interface IFoo { + free() +} + +struct Bar { + a int +} + +struct Foo implements IFoo { + Bar +} + +fn (f &Foo) free() {} + +fn main() { + a := IFoo(Foo{}) +} diff --git a/vlib/v/markused/walker.v b/vlib/v/markused/walker.v index accdb62c9b7c8a..8eefe1bfd631f8 100644 --- a/vlib/v/markused/walker.v +++ b/vlib/v/markused/walker.v @@ -286,7 +286,7 @@ fn (mut w Walker) expr(node_ ast.Expr) { ast.ComptimeCall { w.expr(node.left) if node.is_vweb { - w.stmts(node.vweb_tmpl.stmts) + w.stmts(node.veb_tmpl.stmts) } } ast.DumpExpr { diff --git a/vlib/v/parser/comptime.v b/vlib/v/parser/comptime.v index 02ec191964b3d6..493f8fce975870 100644 --- a/vlib/v/parser/comptime.v +++ b/vlib/v/parser/comptime.v @@ -324,7 +324,7 @@ fn (mut p Parser) comptime_call() ast.ComptimeCall { scope: unsafe { nil } is_vweb: true is_veb: is_veb - vweb_tmpl: file + veb_tmpl: file method_name: method_name args_var: literal_string_param args: [arg] diff --git a/vlib/v/parser/parse_type.v b/vlib/v/parser/parse_type.v index a60ce7c64fc734..378751b78de956 100644 --- a/vlib/v/parser/parse_type.v +++ b/vlib/v/parser/parse_type.v @@ -176,7 +176,7 @@ fn (mut p Parser) parse_map_type() ast.Type { return 0 } if value_type.idx() == ast.void_type_idx { - p.error_with_pos('map value type cannot be void', p.tok.pos()) + p.error_with_pos('map value type is missing: use `map[KeyType]ValueType`', p.tok.pos()) return 0 } idx := p.table.find_or_register_map(key_type, value_type) diff --git a/vlib/v/parser/tests/map_init_void.out b/vlib/v/parser/tests/map_init_void.out index fd76d7ee0f0aba..b60477eba58536 100644 --- a/vlib/v/parser/tests/map_init_void.out +++ b/vlib/v/parser/tests/map_init_void.out @@ -1,4 +1,4 @@ -vlib/v/parser/tests/map_init_void.vv:2:18: error: map value type cannot be void +vlib/v/parser/tests/map_init_void.vv:2:18: error: map value type is missing: use `map[KeyType]ValueType` 1 | fn main() { 2 | m := map[string]{} | ^ diff --git a/vlib/v/parser/tmpl.v b/vlib/v/parser/tmpl.v index 650e07e5e3f6a2..799bfcbd3c08a0 100644 --- a/vlib/v/parser/tmpl.v +++ b/vlib/v/parser/tmpl.v @@ -75,7 +75,7 @@ fn is_html_open_tag(name string, s string) bool { fn insert_template_code(fn_name string, tmpl_str_start string, line string) string { // HTML, may include `@var` - // escaped by cgen, unless it's a `vweb.RawHtml` string + // escaped by cgen, unless it's a `veb.RawHtml` string trailing_bs := tmpl_str_end + 'sb_${fn_name}.write_u8(92)\n' + tmpl_str_start replace_pairs := ['\\', '\\\\', r"'", "\\'", r'@@', r'@', r'@', r'$', r'$$', r'\@'] mut rline := line.replace_each(replace_pairs) @@ -225,8 +225,9 @@ pub fn (mut p Parser) compile_template_file(template_file string, fn_name string mut source := strings.new_builder(1000) source.writeln(' import strings -// === vweb html template === -fn vweb_tmpl_${fn_name}() string { +import veb as _ +// === veb html template === +fn veb_tmpl_${fn_name}() string { mut sb_${fn_name} := strings.new_builder(${lstartlength})\n ') @@ -438,7 +439,7 @@ fn vweb_tmpl_${fn_name}() string { key := line[pos + 1..end] println('GOT tr key line="${line}" key="${key}"') // source.writeln('\${tr("${key}")}') - line_ = line.replace('%${key}', '\${tr("${key}")}') + line_ = line.replace('%${key}', '\${veb.tr(ctx.lang.str(), "${key}")}') // i += key.len } // println(source.str()) @@ -454,7 +455,7 @@ fn vweb_tmpl_${fn_name}() string { source.writeln('\t_tmpl_res_${fn_name} := sb_${fn_name}.str() ') source.writeln('\treturn _tmpl_res_${fn_name}') source.writeln('}') - source.writeln('// === end of vweb html template_file: ${template_file} ===') + source.writeln('// === end of veb html template_file: ${template_file} ===') result := source.str() $if trace_tmpl_expansion ? { diff --git a/vlib/veb/tr.v b/vlib/veb/tr.v new file mode 100644 index 00000000000000..b3606522166e47 --- /dev/null +++ b/vlib/veb/tr.v @@ -0,0 +1,95 @@ +// Copyright (c) 2019-2024 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. +module veb + +import os + +const tr_map = load_tr_map() + +pub fn raw(s string) RawHtml { + return RawHtml(s) +} + +/* +struct TrData { + data +} + m map[string]TrData + */ + +// This function is run once, on app startup. Setting the `tr_map` const. +// m['en']['house'] == 'House' +fn load_tr_map() map[string]map[string]string { + // Find all translation files to figure out how many languages we have and to load the translation map + files := os.walk_ext('translations/', '.tr') + mut res := map[string]map[string]string{} + for tr_path in files { + lang := fetch_lang_from_tr_path(tr_path) + text := os.read_file(tr_path) or { + eprintln('translation file "${tr_path}" failed to laod') + return {} + } + x := text.split('-----\n') + for s in x { + // println('val="${val}"') + nl_pos := s.index('\n') or { continue } + key := s[..nl_pos] + val := s[nl_pos..] + // v := vals[i + 1] + // println('key="${key}" => val="${v}"') + res[lang][key] = val + // println(val) + } + } + return res +} + +fn fetch_lang_from_tr_path(path string) string { + return path.find_between('/', '.') +} + +// Used by %key in templates +pub fn tr(lang string, key string) string { + res := tr_map[lang][key] + if res == '' { + eprintln('NO TRANSLATION FOR KEY "${key}"') + return key + } + return RawHtml(res) +} + +pub fn tr_plural(lang string, key string, amount int) string { + s := tr_map[lang][key] + if s == '' { + eprintln('NO TRANSLATION FOR KEY "${key}"') + return key + } + if s.contains('|') { + //----- + // goods + // товар|а|ов + vals := s.split('|') + if vals.len != 3 { + return s + } + amount_str := amount.str() + // 1, 21, 121 товар + ending := if amount % 10 == 1 && !amount_str.ends_with('11') { // vals[0] + '' + // 2, 3, 4, 22 товара + } else if amount % 10 == 2 && !amount_str.ends_with('12') { + vals[1] + } else if amount % 10 == 3 && !amount_str.ends_with('13') { + vals[1] + } else if amount % 10 == 4 && !amount_str.ends_with('14') { + vals[1] + } else { + // 5 товаров, 11 товаров etc + vals[2] + } + return vals[0] + ending + } else { + return s + } +} diff --git a/vlib/veb/veb.v b/vlib/veb/veb.v index 5302794fb14848..b2e23c3041c2a8 100644 --- a/vlib/veb/veb.v +++ b/vlib/veb/veb.v @@ -1,3 +1,6 @@ +// Copyright (c) 2019-2024 Alexander Medvednikov. All rights reserved. +// Use of this source code is governed by an MIT license +// that can be found in the LICENSE file. module veb import io