Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
8a1176c
Replace `core:crypto` usage of `core:os` with `core:os/os2`
Kelimion Oct 27, 2025
0f56ee3
Handle `clone_to_cstring` error on OpenBSD
Kelimion Oct 27, 2025
1f9d2b3
Stub out `core:os/os2` for js_wasm
Kelimion Oct 27, 2025
10a2879
`core:os` -> `core:os/os` for CSV, INI, and XML
Kelimion Oct 27, 2025
4dba43f
Reenable `core:encoding/ini` tests
Kelimion Oct 27, 2025
a2adab1
Convert `core:encoding/hxa`
Kelimion Oct 27, 2025
9b33a35
core:os -> core:os/os for core:image
Kelimion Oct 28, 2025
aad945a
core:os -> core:os/os for core:math/big
Kelimion Oct 28, 2025
6322804
Address wasi errors
Kelimion Oct 28, 2025
39ca418
Add updated PNG example
Kelimion Oct 28, 2025
2cb97ca
Add `loc := #caller_location` to `read_entire_file`
Kelimion Oct 28, 2025
87d3d57
eprintf
Kelimion Oct 28, 2025
9ea18f8
`core:os` -> `core:os/os2` in `core:terminal`
Kelimion Oct 28, 2025
7462ef5
In the middle of porting core:testing
Kelimion Oct 28, 2025
b7b0af1
core:testing Darwin
Kelimion Oct 28, 2025
d1bb592
BSDs
Kelimion Oct 28, 2025
a0719aa
core:text/i18n -> core:os/os2
Kelimion Oct 29, 2025
abc776f
core:text/regex -> core:os/os2
Kelimion Oct 29, 2025
393aa3f
core:text/table -> core:os/os2
Kelimion Oct 29, 2025
ce7e714
gzip -> os2
Kelimion Oct 29, 2025
26e78f3
mem.virtual -> os2
Kelimion Oct 29, 2025
e7fdaa9
core:net -> os2
Kelimion Oct 29, 2025
24536a7
Merge branch 'master' into core_os2_revamp
Kelimion Oct 30, 2025
20e1f54
Fix doc tester
Kelimion Oct 30, 2025
e53f937
Port doc tester to os2 itself as well
Kelimion Oct 30, 2025
d075dc1
core:time/timezone -> os2
Kelimion Oct 30, 2025
b270922
core\unicode\tools -> os2
Kelimion Oct 30, 2025
077f94d
tests/core/io -> os2
Kelimion Oct 30, 2025
3e929c5
core:path/filepath -> os2
Kelimion Oct 30, 2025
3f74851
Merge branch 'master' into core_os2_revamp
Kelimion Oct 30, 2025
b4c2cee
Merge branch 'master' into core_os2_revamp
Kelimion Oct 30, 2025
803883f
Merge branch 'master' into core_os2_revamp
Kelimion Oct 30, 2025
b1e8574
Add more `filepath` to `os2`
Kelimion Oct 30, 2025
2fd35d5
Add `glob` + `match` to os2
Kelimion Oct 31, 2025
92c96e7
merge master
Kelimion Oct 31, 2025
f5e0d85
Merge branch 'master' into core_os2_revamp
Kelimion Oct 31, 2025
79d3806
Fix local tz
Kelimion Oct 31, 2025
bd73433
Fix js_wasm
Kelimion Nov 1, 2025
23c4fec
Start of glob test
Kelimion Nov 1, 2025
8dd4f93
Add tests for glob + match
Kelimion Nov 1, 2025
a9901a6
Link more of path/filepath to os2
Kelimion Nov 1, 2025
6933b02
Merge branch 'master' into core_os2_revamp
Kelimion Nov 1, 2025
ce19267
Link some more of filepath to os2
Kelimion Nov 1, 2025
68d9ea6
Port vendor:OpenGL and vendor:fontstash
Kelimion Nov 1, 2025
af29c6d
Update example
Kelimion Nov 1, 2025
abdcdb7
Port core:flags to os2
Kelimion Nov 2, 2025
d62e77f
require
Kelimion Nov 2, 2025
f8afec5
Fix
Kelimion Nov 2, 2025
a119884
Merge branch 'master' into core_os2_revamp
Kelimion Nov 2, 2025
2e3bece
Convert fmt_js.odin to not use `core:os*`
Kelimion Nov 2, 2025
2ebf564
remove import
Kelimion Nov 2, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 25 additions & 29 deletions core/compress/gzip/doc.odin
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
A small `GZIP` unpacker.

Example:
import "core:bytes"
import "core:os"
import "core:compress"
import "core:fmt"
import "core:bytes"
import os "core:os/os2"
import "core:compress"
import "core:compress/gzip"
import "core:fmt"

// Small GZIP file with fextra, fname and fcomment present.
@private
Expand All @@ -22,7 +23,8 @@ Example:

main :: proc() {
// Set up output buffer.
buf := bytes.Buffer{}
buf: bytes.Buffer
defer bytes.buffer_destroy(&buf)

stdout :: proc(s: string) {
os.write_string(os.stdout, s)
Expand All @@ -31,51 +33,45 @@ Example:
os.write_string(os.stderr, s)
}

args := os.args

if len(args) < 2 {
if len(os.args) < 2 {
stderr("No input file specified.\n")
err := load(data=TEST, buf=&buf, known_gzip_size=len(TEST))
err := gzip.load(data=TEST, buf=&buf, known_gzip_size=len(TEST))
if err == nil {
stdout("Displaying test vector: ")
stdout("Displaying test vector: \"")
stdout(bytes.buffer_to_string(&buf))
stdout("\n")
stdout("\"\n")
} else {
fmt.printf("gzip.load returned %v\n", err)
}
bytes.buffer_destroy(&buf)
os.exit(0)
}

// The rest are all files.
args = args[1:]
err: Error
for file in os.args[1:] {
err: gzip.Error

for file in args {
if file == "-" {
// Read from stdin
s := os.stream_from_handle(os.stdin)
ctx := &compress.Context_Stream_Input{
input = s,
input = os.stdin.stream,
}
err = load(ctx, &buf)
err = gzip.load(ctx, &buf)
} else {
err = load(file, &buf)
err = gzip.load(file, &buf)
}
if err != nil {
if err != E_General.File_Not_Found {
stderr("File not found: ")
stderr(file)
stderr("\n")
os.exit(1)
}
switch err {
case nil:
stdout(bytes.buffer_to_string(&buf))
case gzip.E_General.File_Not_Found:
stderr("File not found: ")
stderr(file)
stderr("\n")
os.exit(1)
case:
stderr("GZIP returned an error.\n")
bytes.buffer_destroy(&buf)
os.exit(2)
}
stdout(bytes.buffer_to_string(&buf))
}
bytes.buffer_destroy(&buf)
}
*/
package compress_gzip
Expand Down
22 changes: 9 additions & 13 deletions core/compress/gzip/gzip.odin
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ package compress_gzip
to be the input to a complementary TAR implementation.
*/

import "core:compress/zlib"
import "core:compress"
import "core:os"
import "core:io"
import "core:bytes"
import "core:hash"
import "core:compress/zlib"
import "core:compress"
import os "core:os/os2"
import "core:io"
import "core:bytes"
import "core:hash"

Magic :: enum u16le {
GZIP = 0x8b << 8 | 0x1f,
Expand Down Expand Up @@ -107,14 +107,10 @@ load :: proc{load_from_bytes, load_from_file, load_from_context}
load_from_file :: proc(filename: string, buf: ^bytes.Buffer, expected_output_size := -1, allocator := context.allocator) -> (err: Error) {
context.allocator = allocator

data, ok := os.read_entire_file(filename)
defer delete(data)
file_data, file_err := os.read_entire_file(filename, allocator)
defer delete(file_data)

err = E_General.File_Not_Found
if ok {
err = load_from_bytes(data, buf, len(data), expected_output_size)
}
return
return load_from_bytes(file_data, buf, len(file_data), expected_output_size) if file_err == nil else E_General.File_Not_Found
}

load_from_bytes :: proc(data: []byte, buf: ^bytes.Buffer, known_gzip_size := -1, expected_output_size := -1, allocator := context.allocator) -> (err: Error) {
Expand Down
41 changes: 30 additions & 11 deletions core/crypto/hash/hash_os.odin
Original file line number Diff line number Diff line change
Expand Up @@ -2,37 +2,56 @@
package crypto_hash

import "core:io"
import "core:os"
import os "core:os/os2"

// hash_file will read the file provided by the given handle and return the
// `hash_file` will read the file provided by the given handle and return the
// computed digest in a newly allocated slice.
hash_file :: proc(
algorithm: Algorithm,
hd: os.Handle,
hash_file_by_handle :: proc(
algorithm: Algorithm,
handle: ^os.File,
load_at_once := false,
allocator := context.allocator,
allocator := context.allocator,
) -> (
[]byte,
io.Error,
) {
if !load_at_once {
return hash_stream(algorithm, os.stream_from_handle(hd), allocator)
return hash_stream(algorithm, handle.stream, allocator)
}

buf, ok := os.read_entire_file(hd, allocator)
if !ok {
buf, err := os.read_entire_file(handle, allocator)
if err != nil {
return nil, io.Error.Unknown
}
defer delete(buf, allocator)

return hash_bytes(algorithm, buf, allocator), io.Error.None
}

hash_file_by_name :: proc(
algorithm: Algorithm,
filename: string,
load_at_once := false,
allocator := context.allocator,
) -> (
[]byte,
io.Error,
) {
handle, err := os.open(filename)
defer os.close(handle)

if err != nil {
return {}, io.Error.Unknown
}
return hash_file_by_handle(algorithm, handle, load_at_once, allocator)
}


hash :: proc {
hash_stream,
hash_file,
hash_file_by_handle,
hash_bytes,
hash_string,
hash_bytes_to_buffer,
hash_string_to_buffer,
}
}
13 changes: 7 additions & 6 deletions core/crypto/rand_windows.odin
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
package crypto

import win32 "core:sys/windows"
import "core:os"
import "core:fmt"

HAS_RAND_BYTES :: true

@(private)
_rand_bytes :: proc(dst: []byte) {
ret := os.Platform_Error(win32.BCryptGenRandom(nil, raw_data(dst), u32(len(dst)), win32.BCRYPT_USE_SYSTEM_PREFERRED_RNG))
if ret != nil {
#partial switch ret {
case os.ERROR_INVALID_HANDLE:
// NOTE(Jeroen) We don't actually use anything `core:os`-specific here.
// So let's just evaluate `win32`'s return values without first wrapping them.
ret := win32.DWORD(win32.BCryptGenRandom(nil, raw_data(dst), u32(len(dst)), win32.BCRYPT_USE_SYSTEM_PREFERRED_RNG))
if ret != win32.ERROR_SUCCESS {
switch ret {
case win32.ERROR_INVALID_HANDLE:
// The handle to the first parameter is invalid.
// This should not happen here, since we explicitly pass nil to it
panic("crypto: BCryptGenRandom Invalid handle for hAlgorithm")
case os.ERROR_INVALID_PARAMETER:
case win32.ERROR_INVALID_PARAMETER:
// One of the parameters was invalid
panic("crypto: BCryptGenRandom Invalid parameter")
case:
Expand Down
37 changes: 20 additions & 17 deletions core/encoding/csv/doc.odin
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Example:

import "core:fmt"
import "core:encoding/csv"
import "core:os"
import os "core:os/os2"

// Requires keeping the entire CSV file in memory at once
iterate_csv_from_string :: proc(filename: string) {
Expand All @@ -16,14 +16,15 @@ Example:
r.reuse_record_buffer = true // Without it you have to each of the fields within it
defer csv.reader_destroy(&r)

csv_data, ok := os.read_entire_file(filename)
if ok {
csv_data, csv_err := os.read_entire_file(filename, context.allocator)
defer delete(csv_data)

if csv_err == nil {
csv.reader_init_with_string(&r, string(csv_data))
} else {
fmt.printfln("Unable to open file: %v", filename)
fmt.eprintfln("Unable to open file: %v. Error: %v", filename, csv_err)
return
}
defer delete(csv_data)

for r, i, err in csv.iterator_next(&r) {
if err != nil { /* Do something with error */ }
Expand All @@ -39,16 +40,16 @@ Example:
r: csv.Reader
r.trim_leading_space = true
r.reuse_record = true // Without it you have to delete(record)
r.reuse_record_buffer = true // Without it you have to each of the fields within it
r.reuse_record_buffer = true // Without it you have to delete each of the fields within it
defer csv.reader_destroy(&r)

handle, err := os.open(filename)
defer os.close(handle)
if err != nil {
fmt.eprintfln("Error opening file: %v", filename)
fmt.eprintfln("Unable to open file: %v. Error: %v", filename, err)
return
}
defer os.close(handle)
csv.reader_init(&r, os.stream_from_handle(handle))
csv.reader_init(&r, handle.stream)

for r, i in csv.iterator_next(&r) {
for f, j in r {
Expand All @@ -64,21 +65,23 @@ Example:
r.trim_leading_space = true
defer csv.reader_destroy(&r)

csv_data, ok := os.read_entire_file(filename)
if ok {
csv.reader_init_with_string(&r, string(csv_data))
} else {
fmt.printfln("Unable to open file: %v", filename)
csv_data, csv_err := os.read_entire_file(filename, context.allocator)
defer delete(csv_data, context.allocator)
if err != nil {
fmt.eprintfln("Unable to open file: %v. Error: %v", filename, csv_err)
return
}
defer delete(csv_data)
csv.reader_init_with_string(&r, string(csv_data))

records, err := csv.read_all(&r)
if err != nil { /* Do something with CSV parse error */ }

defer {
for rec in records {
delete(rec)
for record in records {
for field in record {
delete(field)
}
delete(record)
}
delete(records)
}
Expand Down
10 changes: 5 additions & 5 deletions core/encoding/hxa/read.odin
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package encoding_hxa

import "core:fmt"
import "core:os"
import os "core:os/os2"
import "core:mem"

Read_Error :: enum {
Expand All @@ -14,13 +14,13 @@ Read_Error :: enum {
read_from_file :: proc(filename: string, print_error := false, allocator := context.allocator, loc := #caller_location) -> (file: File, err: Read_Error) {
context.allocator = allocator

data, ok := os.read_entire_file(filename, allocator, loc)
if !ok {
data, data_err := os.read_entire_file(filename, allocator, loc)
if data_err != nil {
err = .Unable_To_Read_File
delete(data, allocator, loc)
delete(data, allocator)
return
}
file, err = read(data, filename, print_error, allocator, loc)
file, err = read(data, filename, print_error, allocator)
file.backing = data
return
}
Expand Down
6 changes: 3 additions & 3 deletions core/encoding/hxa/write.odin
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package encoding_hxa

import "core:os"
import "core:mem"
import os "core:os/os2"
import "core:mem"

Write_Error :: enum {
None,
Expand All @@ -18,7 +18,7 @@ write_to_file :: proc(filepath: string, file: File) -> (err: Write_Error) {
defer delete(buf)

write_internal(&Writer{data = buf}, file)
if !os.write_entire_file(filepath, buf) {
if os.write_entire_file(filepath, buf) != nil {
err =.Failed_File_Write
}
return
Expand Down
21 changes: 12 additions & 9 deletions core/encoding/ini/ini.odin
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
// Reader and writer for a variant of the `.ini` file format with `key = value` entries in `[sections]`.
package encoding_ini

import "base:runtime"
import "base:intrinsics"
import "core:strings"
import "core:strconv"
import "core:io"
import "core:os"
import "core:fmt"
import "base:runtime"
import "base:intrinsics"
import "core:strings"
import "core:strconv"
import "core:io"
import os "core:os/os2"
import "core:fmt"
_ :: fmt

Options :: struct {
Expand Down Expand Up @@ -121,8 +121,11 @@ load_map_from_string :: proc(src: string, allocator: runtime.Allocator, options
}

load_map_from_path :: proc(path: string, allocator: runtime.Allocator, options := DEFAULT_OPTIONS) -> (m: Map, err: runtime.Allocator_Error, ok: bool) {
data := os.read_entire_file(path, allocator) or_return
data, data_err := os.read_entire_file(path, allocator)
defer delete(data, allocator)
if data_err != nil {
return
}
m, err = load_map_from_string(string(data), allocator, options)
ok = err == nil
defer if !ok {
Expand Down Expand Up @@ -191,4 +194,4 @@ write_map :: proc(w: io.Writer, m: Map) -> (n: int, err: io.Error) {
section_index += 1
}
return
}
}
Loading