error: Error;
done : bool; // If an .EOF was reached.
+
+ read_all :: read_all;
+ read_byte :: read_byte;
+ unread_byte :: unread_byte;
+ read_bytes :: read_bytes;
+ read_string :: read_string;
+ read_i32 :: read_i32;
+ read_i64 :: read_i64;
+ read_u32 :: read_u32;
+ read_u64 :: read_u64;
+ read_line :: read_line;
+ read_word :: read_word;
+ read_until :: read_until;
+ peek_byte :: peek_byte;
+ advance_line :: advance_line;
+ skip_whitespace :: skip_whitespace;
+ skip_bytes :: skip_bytes;
}
reader_make :: (s: ^Stream, buffer_size := 4096, allocator := context.allocator) -> Reader {
output := array.make(u8, 128, allocator=allocator);
while !reader_empty(reader) {
- if err := reader_read_next_chunk(reader); err != .None do break;
- if error != .None {
+ if err := reader_read_next_chunk(reader); err != .None && err != .ReadPending {
+ break;
+ }
+
+ if error != .None && error != .ReadPending {
reader_consume_error(reader);
break;
}
}
#local reader_read_next_chunk :: (use reader: ^Reader) -> Error {
- if done do return .None;
-
if start > 0 {
// This assumes that memory.copy behaves like memmove, in that the
// buffer may overlap, but it should do the right thing.
start = 0;
}
+ if done do return .None;
+
if end >= buffer.count {
return .BufferFull;
}
return buffer[0 .. total_copied];
},
- (into: ^[..] u8, s: str) -> str {
- array.ensure_capacity(into, into.count + s.count);
- memory.copy(into.data, s.data, into.count);
- into.count += s.count;
+ (into: ^[..] u8, strings: ..str) -> str {
+ for s: strings {
+ array.ensure_capacity(into, into.count + s.count);
+ memory.copy(into.data + into.count, s.data, s.count);
+ into.count += s.count;
+ }
return .{ into.data, into.count };
}
}
--- /dev/null
+
+# HTTP library
+Minimal implementation of HTTP/1.0 and HTTP/1.1 protocols.
\ No newline at end of file
--- /dev/null
+package http
+
+//
+// These are named so that the string version of these values
+// will be valid request methods.
+Method :: enum {
+ GET;
+ HEAD;
+ POST;
+ PUT;
+ DELETE;
+ CONNECT;
+ OPTIONS;
+ TRACE;
+ PATCH;
+}
+
+HTTP_Header :: enum {
+ Content_Length;
+ Content_Type;
+ Expect;
+ TransferEncoding;
+ Server;
+ Accept;
+ AcceptEncoding;
+}
+
+header_from_str :: (s: str) -> (HTTP_Header, success: bool) {
+ switch s |> string.strip_whitespace() |> string.to_lowercase() {
+ case "content-length" do return .Content_Length, true;
+ case "content-type" do return .Content_Type, true;
+ case "expect" do return .Expect, true;
+ case "transfer-encoding" do return .TransferEncoding, true;
+ case "accept" do return .Accept, true;
+ case "accept-encoding" do return .AcceptEncoding, true;
+ case #default do return ~~ -1, false;
+ }
+}
+
+#match string.as_str (v: HTTP_Header) -> str {
+ switch v {
+ case .Content_Length do return "Content-Length";
+ case .Content_Type do return "Content-Type";
+ case .Expect do return "Expect";
+ case .TransferEncoding do return "Transfer-Encoding";
+ case .Server do return "Server";
+ case .Accept do return "Accept";
+ case .AcceptEncoding do return "Accept-Encoding";
+ case #default do return "";
+ }
+}
+
+MediaType :: enum {
+ PlainText;
+ ApplicationJson;
+}
+
+media_type_from_str :: (s: str) -> (MediaType, success: bool) {
+ switch s |> string.strip_whitespace() |> string.to_lowercase() {
+ case "text/plain" do return .PlainText, true;
+ case "application/json" do return .ApplicationJson, true;
+ case #default do return ~~ -1, false;
+ }
+}
+
+#match string.as_str (v: MediaType) -> str {
+ switch v {
+ case .PlainText do return "text/plain";
+ case .ApplicationJson do return "application/json";
+ case #default do return "";
+ }
+}
+
+
+HTTP_Headers :: struct {
+ content_length: u32;
+ expect: bool;
+ chunked: bool;
+ accept: MediaType;
+ custom_entries: Map(str, str);
+
+ parse_header_line :: (use this: ^HTTP_Headers, line_: [] u8) -> bool {
+ line := line_;
+ header, content := do {
+ header := string.read_until(^line, #char ":");
+ string.advance(^line, 1);
+ string.strip_leading_whitespace(^line);
+ return header, line;
+ };
+
+ header_type: HTTP_Header;
+ if header_type', success := header_from_str(header); !success {
+ custom_entries[header] = string.strip_whitespace(content);
+ return false;
+ }
+
+ switch header_type {
+ case .Content_Length {
+ content_length = ~~ conv.str_to_i64(content);
+ return true;
+ }
+
+ case .Content_Type {
+ _, success := media_type_from_str(content);
+ return success;
+ }
+
+ case .Accept {
+ accept', worked := media_type_from_str(content);
+ return worked;
+ }
+
+ case .TransferEncoding {
+ switch string.strip_whitespace(content) {
+ case "chunked" do chunked = true;
+ case "identity" ---
+ case #default do return false;
+ }
+
+ return true;
+ }
+
+ case .Expect {
+ switch string.strip_whitespace(content) {
+ case "100-continue" {
+ expect = true;
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ case .AcceptEncoding {
+ @TODO // This needs to be limited.
+ return true;
+ }
+
+ case .Server {
+ return true;
+ }
+ }
+
+ return false;
+ }
+}
\ No newline at end of file
--- /dev/null
+package http
+
+#local {
+ HTTP_VERSION_STRING :: "HTTP/1.1"
+ USER_AGENT :: "onyx/0.1.0"
+}
+
+Request :: struct {
+ method: Method;
+ headers := __zero_value(Map(str, str));
+ host: str;
+ resource: str;
+ body: [] u8;
+
+ //
+ // Methods
+ //
+ // Send a request to a writer
+ send :: request_write;
+
+
+
+ //
+ // "Factory" that constructs a GET request
+ get :: (resource: str, params: [] struct {key, value: str;}) -> Request {
+ req := init(Request);
+ req.method = .GET;
+
+ res: [..] u8;
+ string.concat(^res, resource);
+ if params.count > 0 {
+ string.concat(^res, "?");
+
+ for^ p: params {
+ string.concat(^res, p.key, "=", p.value, "&");
+ }
+ }
+
+ req.resource = res;
+ req.body = null_str;
+ return req;
+ }
+}
+
+request_write :: (use req: ^Request, writer: ^io.Writer) {
+ io.write_format(writer, "{} {} {}\r\n", method, resource, HTTP_VERSION_STRING);
+ io.write_format(writer, "User-Agent: {}\r\n", USER_AGENT);
+ io.write_format(writer, "Host: {}\r\n", host);
+ io.write_format(writer, "Accept-Language: en\r\n");
+
+ for^ headers.entries {
+ io.write_format(writer, "{}: {}\r\n", it.key, it.value);
+ }
+
+ io.write(writer, "\r\n");
+ if body.count > 0 {
+ io.write(writer, body);
+ }
+}
+
+
+Response :: struct {
+ status: i32;
+ headers: HTTP_Headers;
+ body: [] u8;
+}
+
+
+
+Connection :: struct {
+ url: str;
+ port: u16;
+ socket: ^net.Socket;
+ r: io.Reader;
+ w: io.Writer;
+}
+
+Connection_Error :: enum {
+ None;
+ Protocol_Not_Supported;
+ Socket_Creation_Failed;
+ Connection_Failed;
+}
+
+connect :: (url_: str, port: u16 = 80) -> (Connection, Connection_Error) {
+ url := url_;
+ //
+ // HTTPS is strictly not supported.
+ if string.starts_with(url, "https://") {
+ return __zero_value(Connection), .Protocol_Not_Supported;
+ }
+
+ //
+ // Remove HTTP specifier if given.
+ if string.starts_with(url, "http://") {
+ url = url[7 .. url.count];
+ }
+
+ conn: Connection;
+ conn.url = url;
+ conn.port = port;
+ conn.socket = new(net.Socket);
+
+ err: net.SocketError;
+ *conn.socket, err = net.socket_create(.Inet, .Stream);
+ if err != .None {
+ return conn, .Socket_Creation_Failed;
+ }
+
+ if err := conn.socket->connect(url, port); err != .None {
+ return conn, .Connection_Failed;
+ }
+
+ // When using the io.Reader on a net.Socket, non-blocking reads must be enabled,
+ // because Reader assumes that it can call stream_read in a non-blocking way.
+ // If no data is available to read, but the stream is still alive, stream_read should
+ // return io.Error.ReadPending. If this is not set, the program can hang waiting for
+ // data to arrive on the socket when reading.
+ conn.socket->setting(.NonBlocking, 1);
+
+ conn.r = io.reader_make(conn.socket);
+ conn.w = io.writer_make(conn.socket);
+
+ return conn, .None;
+}
+
+request :: (connection: ^Connection, req: ^Request) -> Response {
+ req.host = connection.url;
+ req->send(^connection.w);
+
+ res: Response;
+ res.status = 500;
+
+ status_line := io.read_line(^connection.r, consume_newline=true, inplace=true);
+ if string.starts_with(status_line, "HTTP/1.1")
+ || string.starts_with(status_line, "HTTP/1.0") {
+ res.status = ~~ conv.str_to_i64(status_line[HTTP_VERSION_STRING.count+1 .. status_line.count]);
+ }
+
+ while true {
+ line := io.read_line(^connection.r, consume_newline=true);
+ if line == "\r\n" do break;
+ res.headers->parse_header_line(line);
+ }
+
+ data: [..] u8;
+ if res.headers.chunked {
+ read_chunked_body();
+ } else {
+ read_simple_body();
+ }
+
+ res.body = data;
+ return res;
+
+ read_chunked_body :: macro () {
+ while true {
+ line := io.read_line(^connection.r, consume_newline=true, inplace=true);
+ chunk_size := cast(u32) (line |> conv.str_to_i64(base=16));
+ if chunk_size == 0 do break;
+
+ array.ensure_capacity(^data, data.count + chunk_size);
+ read_n_bytes(chunk_size);
+
+ io.advance_line(^connection.r);
+ }
+ }
+
+ read_simple_body :: macro () {
+ array.ensure_capacity(^data, res.headers.content_length);
+ read_n_bytes(res.headers.content_length);
+ }
+
+ read_n_bytes :: macro (n: u32) {
+ to_read := n;
+ while to_read > 0 {
+ bytes_read, err := io.read_bytes(^connection.r, data.data[data.count .. data.count + to_read]);
+ to_read -= bytes_read;
+ data.count += bytes_read;
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+package http
+
+#package {
+ string :: package core.string
+ conv :: package core.conv
+ io :: package core.io
+ net :: package core.net
+ iter :: package core.iter
+ array :: package core.array
+
+ init :: (package core.intrinsics.onyx).init
+ __zero_value :: (package core.intrinsics.onyx).__zero_value
+}
+
+#load_all "./."
\ No newline at end of file